xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cse.c (revision 1debfc3d3fad8af6f31804271c18e67f77b4d718)
1 /* Common subexpression elimination for GNU compiler.
2    Copyright (C) 1987-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "cfghooks.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "regs.h"
33 #include "emit-rtl.h"
34 #include "recog.h"
35 #include "cfgrtl.h"
36 #include "cfganal.h"
37 #include "cfgcleanup.h"
38 #include "alias.h"
39 #include "toplev.h"
40 #include "params.h"
41 #include "rtlhooks-def.h"
42 #include "tree-pass.h"
43 #include "dbgcnt.h"
44 #include "rtl-iter.h"
45 
46 /* The basic idea of common subexpression elimination is to go
47    through the code, keeping a record of expressions that would
48    have the same value at the current scan point, and replacing
49    expressions encountered with the cheapest equivalent expression.
50 
51    It is too complicated to keep track of the different possibilities
52    when control paths merge in this code; so, at each label, we forget all
53    that is known and start fresh.  This can be described as processing each
54    extended basic block separately.  We have a separate pass to perform
55    global CSE.
56 
57    Note CSE can turn a conditional or computed jump into a nop or
58    an unconditional jump.  When this occurs we arrange to run the jump
59    optimizer after CSE to delete the unreachable code.
60 
61    We use two data structures to record the equivalent expressions:
62    a hash table for most expressions, and a vector of "quantity
63    numbers" to record equivalent (pseudo) registers.
64 
65    The use of the special data structure for registers is desirable
66    because it is faster.  It is possible because registers references
67    contain a fairly small number, the register number, taken from
68    a contiguously allocated series, and two register references are
69    identical if they have the same number.  General expressions
70    do not have any such thing, so the only way to retrieve the
71    information recorded on an expression other than a register
72    is to keep it in a hash table.
73 
74 Registers and "quantity numbers":
75 
76    At the start of each basic block, all of the (hardware and pseudo)
77    registers used in the function are given distinct quantity
78    numbers to indicate their contents.  During scan, when the code
79    copies one register into another, we copy the quantity number.
80    When a register is loaded in any other way, we allocate a new
81    quantity number to describe the value generated by this operation.
82    `REG_QTY (N)' records what quantity register N is currently thought
83    of as containing.
84 
85    All real quantity numbers are greater than or equal to zero.
86    If register N has not been assigned a quantity, `REG_QTY (N)' will
87    equal -N - 1, which is always negative.
88 
89    Quantity numbers below zero do not exist and none of the `qty_table'
90    entries should be referenced with a negative index.
91 
92    We also maintain a bidirectional chain of registers for each
93    quantity number.  The `qty_table` members `first_reg' and `last_reg',
94    and `reg_eqv_table' members `next' and `prev' hold these chains.
95 
96    The first register in a chain is the one whose lifespan is least local.
97    Among equals, it is the one that was seen first.
98    We replace any equivalent register with that one.
99 
100    If two registers have the same quantity number, it must be true that
101    REG expressions with qty_table `mode' must be in the hash table for both
102    registers and must be in the same class.
103 
104    The converse is not true.  Since hard registers may be referenced in
105    any mode, two REG expressions might be equivalent in the hash table
106    but not have the same quantity number if the quantity number of one
107    of the registers is not the same mode as those expressions.
108 
109 Constants and quantity numbers
110 
111    When a quantity has a known constant value, that value is stored
112    in the appropriate qty_table `const_rtx'.  This is in addition to
113    putting the constant in the hash table as is usual for non-regs.
114 
115    Whether a reg or a constant is preferred is determined by the configuration
116    macro CONST_COSTS and will often depend on the constant value.  In any
117    event, expressions containing constants can be simplified, by fold_rtx.
118 
119    When a quantity has a known nearly constant value (such as an address
120    of a stack slot), that value is stored in the appropriate qty_table
121    `const_rtx'.
122 
123    Integer constants don't have a machine mode.  However, cse
124    determines the intended machine mode from the destination
125    of the instruction that moves the constant.  The machine mode
126    is recorded in the hash table along with the actual RTL
127    constant expression so that different modes are kept separate.
128 
129 Other expressions:
130 
131    To record known equivalences among expressions in general
132    we use a hash table called `table'.  It has a fixed number of buckets
133    that contain chains of `struct table_elt' elements for expressions.
134    These chains connect the elements whose expressions have the same
135    hash codes.
136 
137    Other chains through the same elements connect the elements which
138    currently have equivalent values.
139 
140    Register references in an expression are canonicalized before hashing
141    the expression.  This is done using `reg_qty' and qty_table `first_reg'.
142    The hash code of a register reference is computed using the quantity
143    number, not the register number.
144 
145    When the value of an expression changes, it is necessary to remove from the
146    hash table not just that expression but all expressions whose values
147    could be different as a result.
148 
149      1. If the value changing is in memory, except in special cases
150      ANYTHING referring to memory could be changed.  That is because
151      nobody knows where a pointer does not point.
152      The function `invalidate_memory' removes what is necessary.
153 
154      The special cases are when the address is constant or is
155      a constant plus a fixed register such as the frame pointer
156      or a static chain pointer.  When such addresses are stored in,
157      we can tell exactly which other such addresses must be invalidated
158      due to overlap.  `invalidate' does this.
159      All expressions that refer to non-constant
160      memory addresses are also invalidated.  `invalidate_memory' does this.
161 
162      2. If the value changing is a register, all expressions
163      containing references to that register, and only those,
164      must be removed.
165 
166    Because searching the entire hash table for expressions that contain
167    a register is very slow, we try to figure out when it isn't necessary.
168    Precisely, this is necessary only when expressions have been
169    entered in the hash table using this register, and then the value has
170    changed, and then another expression wants to be added to refer to
171    the register's new value.  This sequence of circumstances is rare
172    within any one basic block.
173 
174    `REG_TICK' and `REG_IN_TABLE', accessors for members of
175    cse_reg_info, are used to detect this case.  REG_TICK (i) is
176    incremented whenever a value is stored in register i.
177    REG_IN_TABLE (i) holds -1 if no references to register i have been
178    entered in the table; otherwise, it contains the value REG_TICK (i)
179    had when the references were entered.  If we want to enter a
180    reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
181    remove old references.  Until we want to enter a new entry, the
182    mere fact that the two vectors don't match makes the entries be
183    ignored if anyone tries to match them.
184 
185    Registers themselves are entered in the hash table as well as in
186    the equivalent-register chains.  However, `REG_TICK' and
187    `REG_IN_TABLE' do not apply to expressions which are simple
188    register references.  These expressions are removed from the table
189    immediately when they become invalid, and this can be done even if
190    we do not immediately search for all the expressions that refer to
191    the register.
192 
193    A CLOBBER rtx in an instruction invalidates its operand for further
194    reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
195    invalidates everything that resides in memory.
196 
197 Related expressions:
198 
199    Constant expressions that differ only by an additive integer
200    are called related.  When a constant expression is put in
201    the table, the related expression with no constant term
202    is also entered.  These are made to point at each other
203    so that it is possible to find out if there exists any
204    register equivalent to an expression related to a given expression.  */
205 
206 /* Length of qty_table vector.  We know in advance we will not need
207    a quantity number this big.  */
208 
209 static int max_qty;
210 
211 /* Next quantity number to be allocated.
212    This is 1 + the largest number needed so far.  */
213 
214 static int next_qty;
215 
216 /* Per-qty information tracking.
217 
218    `first_reg' and `last_reg' track the head and tail of the
219    chain of registers which currently contain this quantity.
220 
221    `mode' contains the machine mode of this quantity.
222 
223    `const_rtx' holds the rtx of the constant value of this
224    quantity, if known.  A summations of the frame/arg pointer
225    and a constant can also be entered here.  When this holds
226    a known value, `const_insn' is the insn which stored the
227    constant value.
228 
229    `comparison_{code,const,qty}' are used to track when a
230    comparison between a quantity and some constant or register has
231    been passed.  In such a case, we know the results of the comparison
232    in case we see it again.  These members record a comparison that
233    is known to be true.  `comparison_code' holds the rtx code of such
234    a comparison, else it is set to UNKNOWN and the other two
235    comparison members are undefined.  `comparison_const' holds
236    the constant being compared against, or zero if the comparison
237    is not against a constant.  `comparison_qty' holds the quantity
238    being compared against when the result is known.  If the comparison
239    is not with a register, `comparison_qty' is -1.  */
240 
241 struct qty_table_elem
242 {
243   rtx const_rtx;
244   rtx_insn *const_insn;
245   rtx comparison_const;
246   int comparison_qty;
247   unsigned int first_reg, last_reg;
248   /* The sizes of these fields should match the sizes of the
249      code and mode fields of struct rtx_def (see rtl.h).  */
250   ENUM_BITFIELD(rtx_code) comparison_code : 16;
251   ENUM_BITFIELD(machine_mode) mode : 8;
252 };
253 
254 /* The table of all qtys, indexed by qty number.  */
255 static struct qty_table_elem *qty_table;
256 
257 /* For machines that have a CC0, we do not record its value in the hash
258    table since its use is guaranteed to be the insn immediately following
259    its definition and any other insn is presumed to invalidate it.
260 
261    Instead, we store below the current and last value assigned to CC0.
262    If it should happen to be a constant, it is stored in preference
263    to the actual assigned value.  In case it is a constant, we store
264    the mode in which the constant should be interpreted.  */
265 
266 static rtx this_insn_cc0, prev_insn_cc0;
267 static machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
268 
269 /* Insn being scanned.  */
270 
271 static rtx_insn *this_insn;
272 static bool optimize_this_for_speed_p;
273 
274 /* Index by register number, gives the number of the next (or
275    previous) register in the chain of registers sharing the same
276    value.
277 
278    Or -1 if this register is at the end of the chain.
279 
280    If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined.  */
281 
282 /* Per-register equivalence chain.  */
283 struct reg_eqv_elem
284 {
285   int next, prev;
286 };
287 
288 /* The table of all register equivalence chains.  */
289 static struct reg_eqv_elem *reg_eqv_table;
290 
291 struct cse_reg_info
292 {
293   /* The timestamp at which this register is initialized.  */
294   unsigned int timestamp;
295 
296   /* The quantity number of the register's current contents.  */
297   int reg_qty;
298 
299   /* The number of times the register has been altered in the current
300      basic block.  */
301   int reg_tick;
302 
303   /* The REG_TICK value at which rtx's containing this register are
304      valid in the hash table.  If this does not equal the current
305      reg_tick value, such expressions existing in the hash table are
306      invalid.  */
307   int reg_in_table;
308 
309   /* The SUBREG that was set when REG_TICK was last incremented.  Set
310      to -1 if the last store was to the whole register, not a subreg.  */
311   unsigned int subreg_ticked;
312 };
313 
314 /* A table of cse_reg_info indexed by register numbers.  */
315 static struct cse_reg_info *cse_reg_info_table;
316 
317 /* The size of the above table.  */
318 static unsigned int cse_reg_info_table_size;
319 
320 /* The index of the first entry that has not been initialized.  */
321 static unsigned int cse_reg_info_table_first_uninitialized;
322 
323 /* The timestamp at the beginning of the current run of
324    cse_extended_basic_block.  We increment this variable at the beginning of
325    the current run of cse_extended_basic_block.  The timestamp field of a
326    cse_reg_info entry matches the value of this variable if and only
327    if the entry has been initialized during the current run of
328    cse_extended_basic_block.  */
329 static unsigned int cse_reg_info_timestamp;
330 
331 /* A HARD_REG_SET containing all the hard registers for which there is
332    currently a REG expression in the hash table.  Note the difference
333    from the above variables, which indicate if the REG is mentioned in some
334    expression in the table.  */
335 
336 static HARD_REG_SET hard_regs_in_table;
337 
338 /* True if CSE has altered the CFG.  */
339 static bool cse_cfg_altered;
340 
341 /* True if CSE has altered conditional jump insns in such a way
342    that jump optimization should be redone.  */
343 static bool cse_jumps_altered;
344 
345 /* True if we put a LABEL_REF into the hash table for an INSN
346    without a REG_LABEL_OPERAND, we have to rerun jump after CSE
347    to put in the note.  */
348 static bool recorded_label_ref;
349 
350 /* canon_hash stores 1 in do_not_record
351    if it notices a reference to CC0, PC, or some other volatile
352    subexpression.  */
353 
354 static int do_not_record;
355 
356 /* canon_hash stores 1 in hash_arg_in_memory
357    if it notices a reference to memory within the expression being hashed.  */
358 
359 static int hash_arg_in_memory;
360 
361 /* The hash table contains buckets which are chains of `struct table_elt's,
362    each recording one expression's information.
363    That expression is in the `exp' field.
364 
365    The canon_exp field contains a canonical (from the point of view of
366    alias analysis) version of the `exp' field.
367 
368    Those elements with the same hash code are chained in both directions
369    through the `next_same_hash' and `prev_same_hash' fields.
370 
371    Each set of expressions with equivalent values
372    are on a two-way chain through the `next_same_value'
373    and `prev_same_value' fields, and all point with
374    the `first_same_value' field at the first element in
375    that chain.  The chain is in order of increasing cost.
376    Each element's cost value is in its `cost' field.
377 
378    The `in_memory' field is nonzero for elements that
379    involve any reference to memory.  These elements are removed
380    whenever a write is done to an unidentified location in memory.
381    To be safe, we assume that a memory address is unidentified unless
382    the address is either a symbol constant or a constant plus
383    the frame pointer or argument pointer.
384 
385    The `related_value' field is used to connect related expressions
386    (that differ by adding an integer).
387    The related expressions are chained in a circular fashion.
388    `related_value' is zero for expressions for which this
389    chain is not useful.
390 
391    The `cost' field stores the cost of this element's expression.
392    The `regcost' field stores the value returned by approx_reg_cost for
393    this element's expression.
394 
395    The `is_const' flag is set if the element is a constant (including
396    a fixed address).
397 
398    The `flag' field is used as a temporary during some search routines.
399 
400    The `mode' field is usually the same as GET_MODE (`exp'), but
401    if `exp' is a CONST_INT and has no machine mode then the `mode'
402    field is the mode it was being used as.  Each constant is
403    recorded separately for each mode it is used with.  */
404 
405 struct table_elt
406 {
407   rtx exp;
408   rtx canon_exp;
409   struct table_elt *next_same_hash;
410   struct table_elt *prev_same_hash;
411   struct table_elt *next_same_value;
412   struct table_elt *prev_same_value;
413   struct table_elt *first_same_value;
414   struct table_elt *related_value;
415   int cost;
416   int regcost;
417   /* The size of this field should match the size
418      of the mode field of struct rtx_def (see rtl.h).  */
419   ENUM_BITFIELD(machine_mode) mode : 8;
420   char in_memory;
421   char is_const;
422   char flag;
423 };
424 
425 /* We don't want a lot of buckets, because we rarely have very many
426    things stored in the hash table, and a lot of buckets slows
427    down a lot of loops that happen frequently.  */
428 #define HASH_SHIFT	5
429 #define HASH_SIZE	(1 << HASH_SHIFT)
430 #define HASH_MASK	(HASH_SIZE - 1)
431 
432 /* Compute hash code of X in mode M.  Special-case case where X is a pseudo
433    register (hard registers may require `do_not_record' to be set).  */
434 
435 #define HASH(X, M)	\
436  ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
437   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
438   : canon_hash (X, M)) & HASH_MASK)
439 
440 /* Like HASH, but without side-effects.  */
441 #define SAFE_HASH(X, M)	\
442  ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
443   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
444   : safe_hash (X, M)) & HASH_MASK)
445 
446 /* Determine whether register number N is considered a fixed register for the
447    purpose of approximating register costs.
448    It is desirable to replace other regs with fixed regs, to reduce need for
449    non-fixed hard regs.
450    A reg wins if it is either the frame pointer or designated as fixed.  */
451 #define FIXED_REGNO_P(N)  \
452   ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
453    || fixed_regs[N] || global_regs[N])
454 
455 /* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
456    hard registers and pointers into the frame are the cheapest with a cost
457    of 0.  Next come pseudos with a cost of one and other hard registers with
458    a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
459 
460 #define CHEAP_REGNO(N)							\
461   (REGNO_PTR_FRAME_P (N)						\
462    || (HARD_REGISTER_NUM_P (N)						\
463        && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
464 
465 #define COST(X, MODE)							\
466   (REG_P (X) ? 0 : notreg_cost (X, MODE, SET, 1))
467 #define COST_IN(X, MODE, OUTER, OPNO)					\
468   (REG_P (X) ? 0 : notreg_cost (X, MODE, OUTER, OPNO))
469 
470 /* Get the number of times this register has been updated in this
471    basic block.  */
472 
473 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
474 
475 /* Get the point at which REG was recorded in the table.  */
476 
477 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
478 
479 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
480    SUBREG).  */
481 
482 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
483 
484 /* Get the quantity number for REG.  */
485 
486 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
487 
488 /* Determine if the quantity number for register X represents a valid index
489    into the qty_table.  */
490 
491 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
492 
493 /* Compare table_elt X and Y and return true iff X is cheaper than Y.  */
494 
495 #define CHEAPER(X, Y) \
496  (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
497 
498 static struct table_elt *table[HASH_SIZE];
499 
500 /* Chain of `struct table_elt's made so far for this function
501    but currently removed from the table.  */
502 
503 static struct table_elt *free_element_chain;
504 
505 /* Set to the cost of a constant pool reference if one was found for a
506    symbolic constant.  If this was found, it means we should try to
507    convert constants into constant pool entries if they don't fit in
508    the insn.  */
509 
510 static int constant_pool_entries_cost;
511 static int constant_pool_entries_regcost;
512 
513 /* Trace a patch through the CFG.  */
514 
515 struct branch_path
516 {
517   /* The basic block for this path entry.  */
518   basic_block bb;
519 };
520 
521 /* This data describes a block that will be processed by
522    cse_extended_basic_block.  */
523 
524 struct cse_basic_block_data
525 {
526   /* Total number of SETs in block.  */
527   int nsets;
528   /* Size of current branch path, if any.  */
529   int path_size;
530   /* Current path, indicating which basic_blocks will be processed.  */
531   struct branch_path *path;
532 };
533 
534 
535 /* Pointers to the live in/live out bitmaps for the boundaries of the
536    current EBB.  */
537 static bitmap cse_ebb_live_in, cse_ebb_live_out;
538 
539 /* A simple bitmap to track which basic blocks have been visited
540    already as part of an already processed extended basic block.  */
541 static sbitmap cse_visited_basic_blocks;
542 
543 static bool fixed_base_plus_p (rtx x);
544 static int notreg_cost (rtx, machine_mode, enum rtx_code, int);
545 static int preferable (int, int, int, int);
546 static void new_basic_block (void);
547 static void make_new_qty (unsigned int, machine_mode);
548 static void make_regs_eqv (unsigned int, unsigned int);
549 static void delete_reg_equiv (unsigned int);
550 static int mention_regs (rtx);
551 static int insert_regs (rtx, struct table_elt *, int);
552 static void remove_from_table (struct table_elt *, unsigned);
553 static void remove_pseudo_from_table (rtx, unsigned);
554 static struct table_elt *lookup (rtx, unsigned, machine_mode);
555 static struct table_elt *lookup_for_remove (rtx, unsigned, machine_mode);
556 static rtx lookup_as_function (rtx, enum rtx_code);
557 static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
558 					    machine_mode, int, int);
559 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
560 				 machine_mode);
561 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
562 static void invalidate (rtx, machine_mode);
563 static void remove_invalid_refs (unsigned int);
564 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
565 					machine_mode);
566 static void rehash_using_reg (rtx);
567 static void invalidate_memory (void);
568 static void invalidate_for_call (void);
569 static rtx use_related_value (rtx, struct table_elt *);
570 
571 static inline unsigned canon_hash (rtx, machine_mode);
572 static inline unsigned safe_hash (rtx, machine_mode);
573 static inline unsigned hash_rtx_string (const char *);
574 
575 static rtx canon_reg (rtx, rtx_insn *);
576 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
577 					   machine_mode *,
578 					   machine_mode *);
579 static rtx fold_rtx (rtx, rtx_insn *);
580 static rtx equiv_constant (rtx);
581 static void record_jump_equiv (rtx_insn *, bool);
582 static void record_jump_cond (enum rtx_code, machine_mode, rtx, rtx,
583 			      int);
584 static void cse_insn (rtx_insn *);
585 static void cse_prescan_path (struct cse_basic_block_data *);
586 static void invalidate_from_clobbers (rtx_insn *);
587 static void invalidate_from_sets_and_clobbers (rtx_insn *);
588 static rtx cse_process_notes (rtx, rtx, bool *);
589 static void cse_extended_basic_block (struct cse_basic_block_data *);
590 extern void dump_class (struct table_elt*);
591 static void get_cse_reg_info_1 (unsigned int regno);
592 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
593 
594 static void flush_hash_table (void);
595 static bool insn_live_p (rtx_insn *, int *);
596 static bool set_live_p (rtx, rtx_insn *, int *);
597 static void cse_change_cc_mode_insn (rtx_insn *, rtx);
598 static void cse_change_cc_mode_insns (rtx_insn *, rtx_insn *, rtx);
599 static machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
600 				       bool);
601 
602 
603 #undef RTL_HOOKS_GEN_LOWPART
604 #define RTL_HOOKS_GEN_LOWPART		gen_lowpart_if_possible
605 
606 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
607 
608 /* Nonzero if X has the form (PLUS frame-pointer integer).  */
609 
610 static bool
611 fixed_base_plus_p (rtx x)
612 {
613   switch (GET_CODE (x))
614     {
615     case REG:
616       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
617 	return true;
618       if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
619 	return true;
620       return false;
621 
622     case PLUS:
623       if (!CONST_INT_P (XEXP (x, 1)))
624 	return false;
625       return fixed_base_plus_p (XEXP (x, 0));
626 
627     default:
628       return false;
629     }
630 }
631 
632 /* Dump the expressions in the equivalence class indicated by CLASSP.
633    This function is used only for debugging.  */
634 DEBUG_FUNCTION void
635 dump_class (struct table_elt *classp)
636 {
637   struct table_elt *elt;
638 
639   fprintf (stderr, "Equivalence chain for ");
640   print_rtl (stderr, classp->exp);
641   fprintf (stderr, ": \n");
642 
643   for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
644     {
645       print_rtl (stderr, elt->exp);
646       fprintf (stderr, "\n");
647     }
648 }
649 
650 /* Return an estimate of the cost of the registers used in an rtx.
651    This is mostly the number of different REG expressions in the rtx;
652    however for some exceptions like fixed registers we use a cost of
653    0.  If any other hard register reference occurs, return MAX_COST.  */
654 
655 static int
656 approx_reg_cost (const_rtx x)
657 {
658   int cost = 0;
659   subrtx_iterator::array_type array;
660   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
661     {
662       const_rtx x = *iter;
663       if (REG_P (x))
664 	{
665 	  unsigned int regno = REGNO (x);
666 	  if (!CHEAP_REGNO (regno))
667 	    {
668 	      if (regno < FIRST_PSEUDO_REGISTER)
669 		{
670 		  if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
671 		    return MAX_COST;
672 		  cost += 2;
673 		}
674 	      else
675 		cost += 1;
676 	    }
677 	}
678     }
679   return cost;
680 }
681 
682 /* Return a negative value if an rtx A, whose costs are given by COST_A
683    and REGCOST_A, is more desirable than an rtx B.
684    Return a positive value if A is less desirable, or 0 if the two are
685    equally good.  */
686 static int
687 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
688 {
689   /* First, get rid of cases involving expressions that are entirely
690      unwanted.  */
691   if (cost_a != cost_b)
692     {
693       if (cost_a == MAX_COST)
694 	return 1;
695       if (cost_b == MAX_COST)
696 	return -1;
697     }
698 
699   /* Avoid extending lifetimes of hardregs.  */
700   if (regcost_a != regcost_b)
701     {
702       if (regcost_a == MAX_COST)
703 	return 1;
704       if (regcost_b == MAX_COST)
705 	return -1;
706     }
707 
708   /* Normal operation costs take precedence.  */
709   if (cost_a != cost_b)
710     return cost_a - cost_b;
711   /* Only if these are identical consider effects on register pressure.  */
712   if (regcost_a != regcost_b)
713     return regcost_a - regcost_b;
714   return 0;
715 }
716 
717 /* Internal function, to compute cost when X is not a register; called
718    from COST macro to keep it simple.  */
719 
720 static int
721 notreg_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno)
722 {
723   return ((GET_CODE (x) == SUBREG
724 	   && REG_P (SUBREG_REG (x))
725 	   && GET_MODE_CLASS (mode) == MODE_INT
726 	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
727 	   && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
728 	   && subreg_lowpart_p (x)
729 	   && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (SUBREG_REG (x))))
730 	  ? 0
731 	  : rtx_cost (x, mode, outer, opno, optimize_this_for_speed_p) * 2);
732 }
733 
734 
735 /* Initialize CSE_REG_INFO_TABLE.  */
736 
737 static void
738 init_cse_reg_info (unsigned int nregs)
739 {
740   /* Do we need to grow the table?  */
741   if (nregs > cse_reg_info_table_size)
742     {
743       unsigned int new_size;
744 
745       if (cse_reg_info_table_size < 2048)
746 	{
747 	  /* Compute a new size that is a power of 2 and no smaller
748 	     than the large of NREGS and 64.  */
749 	  new_size = (cse_reg_info_table_size
750 		      ? cse_reg_info_table_size : 64);
751 
752 	  while (new_size < nregs)
753 	    new_size *= 2;
754 	}
755       else
756 	{
757 	  /* If we need a big table, allocate just enough to hold
758 	     NREGS registers.  */
759 	  new_size = nregs;
760 	}
761 
762       /* Reallocate the table with NEW_SIZE entries.  */
763       free (cse_reg_info_table);
764       cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
765       cse_reg_info_table_size = new_size;
766       cse_reg_info_table_first_uninitialized = 0;
767     }
768 
769   /* Do we have all of the first NREGS entries initialized?  */
770   if (cse_reg_info_table_first_uninitialized < nregs)
771     {
772       unsigned int old_timestamp = cse_reg_info_timestamp - 1;
773       unsigned int i;
774 
775       /* Put the old timestamp on newly allocated entries so that they
776 	 will all be considered out of date.  We do not touch those
777 	 entries beyond the first NREGS entries to be nice to the
778 	 virtual memory.  */
779       for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
780 	cse_reg_info_table[i].timestamp = old_timestamp;
781 
782       cse_reg_info_table_first_uninitialized = nregs;
783     }
784 }
785 
786 /* Given REGNO, initialize the cse_reg_info entry for REGNO.  */
787 
788 static void
789 get_cse_reg_info_1 (unsigned int regno)
790 {
791   /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
792      entry will be considered to have been initialized.  */
793   cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
794 
795   /* Initialize the rest of the entry.  */
796   cse_reg_info_table[regno].reg_tick = 1;
797   cse_reg_info_table[regno].reg_in_table = -1;
798   cse_reg_info_table[regno].subreg_ticked = -1;
799   cse_reg_info_table[regno].reg_qty = -regno - 1;
800 }
801 
802 /* Find a cse_reg_info entry for REGNO.  */
803 
804 static inline struct cse_reg_info *
805 get_cse_reg_info (unsigned int regno)
806 {
807   struct cse_reg_info *p = &cse_reg_info_table[regno];
808 
809   /* If this entry has not been initialized, go ahead and initialize
810      it.  */
811   if (p->timestamp != cse_reg_info_timestamp)
812     get_cse_reg_info_1 (regno);
813 
814   return p;
815 }
816 
817 /* Clear the hash table and initialize each register with its own quantity,
818    for a new basic block.  */
819 
820 static void
821 new_basic_block (void)
822 {
823   int i;
824 
825   next_qty = 0;
826 
827   /* Invalidate cse_reg_info_table.  */
828   cse_reg_info_timestamp++;
829 
830   /* Clear out hash table state for this pass.  */
831   CLEAR_HARD_REG_SET (hard_regs_in_table);
832 
833   /* The per-quantity values used to be initialized here, but it is
834      much faster to initialize each as it is made in `make_new_qty'.  */
835 
836   for (i = 0; i < HASH_SIZE; i++)
837     {
838       struct table_elt *first;
839 
840       first = table[i];
841       if (first != NULL)
842 	{
843 	  struct table_elt *last = first;
844 
845 	  table[i] = NULL;
846 
847 	  while (last->next_same_hash != NULL)
848 	    last = last->next_same_hash;
849 
850 	  /* Now relink this hash entire chain into
851 	     the free element list.  */
852 
853 	  last->next_same_hash = free_element_chain;
854 	  free_element_chain = first;
855 	}
856     }
857 
858   prev_insn_cc0 = 0;
859 }
860 
861 /* Say that register REG contains a quantity in mode MODE not in any
862    register before and initialize that quantity.  */
863 
864 static void
865 make_new_qty (unsigned int reg, machine_mode mode)
866 {
867   int q;
868   struct qty_table_elem *ent;
869   struct reg_eqv_elem *eqv;
870 
871   gcc_assert (next_qty < max_qty);
872 
873   q = REG_QTY (reg) = next_qty++;
874   ent = &qty_table[q];
875   ent->first_reg = reg;
876   ent->last_reg = reg;
877   ent->mode = mode;
878   ent->const_rtx = ent->const_insn = NULL;
879   ent->comparison_code = UNKNOWN;
880 
881   eqv = &reg_eqv_table[reg];
882   eqv->next = eqv->prev = -1;
883 }
884 
885 /* Make reg NEW equivalent to reg OLD.
886    OLD is not changing; NEW is.  */
887 
888 static void
889 make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
890 {
891   unsigned int lastr, firstr;
892   int q = REG_QTY (old_reg);
893   struct qty_table_elem *ent;
894 
895   ent = &qty_table[q];
896 
897   /* Nothing should become eqv until it has a "non-invalid" qty number.  */
898   gcc_assert (REGNO_QTY_VALID_P (old_reg));
899 
900   REG_QTY (new_reg) = q;
901   firstr = ent->first_reg;
902   lastr = ent->last_reg;
903 
904   /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
905      hard regs.  Among pseudos, if NEW will live longer than any other reg
906      of the same qty, and that is beyond the current basic block,
907      make it the new canonical replacement for this qty.  */
908   if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
909       /* Certain fixed registers might be of the class NO_REGS.  This means
910 	 that not only can they not be allocated by the compiler, but
911 	 they cannot be used in substitutions or canonicalizations
912 	 either.  */
913       && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
914       && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
915 	  || (new_reg >= FIRST_PSEUDO_REGISTER
916 	      && (firstr < FIRST_PSEUDO_REGISTER
917 		  || (bitmap_bit_p (cse_ebb_live_out, new_reg)
918 		      && !bitmap_bit_p (cse_ebb_live_out, firstr))
919 		  || (bitmap_bit_p (cse_ebb_live_in, new_reg)
920 		      && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
921     {
922       reg_eqv_table[firstr].prev = new_reg;
923       reg_eqv_table[new_reg].next = firstr;
924       reg_eqv_table[new_reg].prev = -1;
925       ent->first_reg = new_reg;
926     }
927   else
928     {
929       /* If NEW is a hard reg (known to be non-fixed), insert at end.
930 	 Otherwise, insert before any non-fixed hard regs that are at the
931 	 end.  Registers of class NO_REGS cannot be used as an
932 	 equivalent for anything.  */
933       while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
934 	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
935 	     && new_reg >= FIRST_PSEUDO_REGISTER)
936 	lastr = reg_eqv_table[lastr].prev;
937       reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
938       if (reg_eqv_table[lastr].next >= 0)
939 	reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
940       else
941 	qty_table[q].last_reg = new_reg;
942       reg_eqv_table[lastr].next = new_reg;
943       reg_eqv_table[new_reg].prev = lastr;
944     }
945 }
946 
947 /* Remove REG from its equivalence class.  */
948 
949 static void
950 delete_reg_equiv (unsigned int reg)
951 {
952   struct qty_table_elem *ent;
953   int q = REG_QTY (reg);
954   int p, n;
955 
956   /* If invalid, do nothing.  */
957   if (! REGNO_QTY_VALID_P (reg))
958     return;
959 
960   ent = &qty_table[q];
961 
962   p = reg_eqv_table[reg].prev;
963   n = reg_eqv_table[reg].next;
964 
965   if (n != -1)
966     reg_eqv_table[n].prev = p;
967   else
968     ent->last_reg = p;
969   if (p != -1)
970     reg_eqv_table[p].next = n;
971   else
972     ent->first_reg = n;
973 
974   REG_QTY (reg) = -reg - 1;
975 }
976 
977 /* Remove any invalid expressions from the hash table
978    that refer to any of the registers contained in expression X.
979 
980    Make sure that newly inserted references to those registers
981    as subexpressions will be considered valid.
982 
983    mention_regs is not called when a register itself
984    is being stored in the table.
985 
986    Return 1 if we have done something that may have changed the hash code
987    of X.  */
988 
989 static int
990 mention_regs (rtx x)
991 {
992   enum rtx_code code;
993   int i, j;
994   const char *fmt;
995   int changed = 0;
996 
997   if (x == 0)
998     return 0;
999 
1000   code = GET_CODE (x);
1001   if (code == REG)
1002     {
1003       unsigned int regno = REGNO (x);
1004       unsigned int endregno = END_REGNO (x);
1005       unsigned int i;
1006 
1007       for (i = regno; i < endregno; i++)
1008 	{
1009 	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1010 	    remove_invalid_refs (i);
1011 
1012 	  REG_IN_TABLE (i) = REG_TICK (i);
1013 	  SUBREG_TICKED (i) = -1;
1014 	}
1015 
1016       return 0;
1017     }
1018 
1019   /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1020      pseudo if they don't use overlapping words.  We handle only pseudos
1021      here for simplicity.  */
1022   if (code == SUBREG && REG_P (SUBREG_REG (x))
1023       && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1024     {
1025       unsigned int i = REGNO (SUBREG_REG (x));
1026 
1027       if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1028 	{
1029 	  /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1030 	     the last store to this register really stored into this
1031 	     subreg, then remove the memory of this subreg.
1032 	     Otherwise, remove any memory of the entire register and
1033 	     all its subregs from the table.  */
1034 	  if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1035 	      || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1036 	    remove_invalid_refs (i);
1037 	  else
1038 	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1039 	}
1040 
1041       REG_IN_TABLE (i) = REG_TICK (i);
1042       SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1043       return 0;
1044     }
1045 
1046   /* If X is a comparison or a COMPARE and either operand is a register
1047      that does not have a quantity, give it one.  This is so that a later
1048      call to record_jump_equiv won't cause X to be assigned a different
1049      hash code and not found in the table after that call.
1050 
1051      It is not necessary to do this here, since rehash_using_reg can
1052      fix up the table later, but doing this here eliminates the need to
1053      call that expensive function in the most common case where the only
1054      use of the register is in the comparison.  */
1055 
1056   if (code == COMPARE || COMPARISON_P (x))
1057     {
1058       if (REG_P (XEXP (x, 0))
1059 	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1060 	if (insert_regs (XEXP (x, 0), NULL, 0))
1061 	  {
1062 	    rehash_using_reg (XEXP (x, 0));
1063 	    changed = 1;
1064 	  }
1065 
1066       if (REG_P (XEXP (x, 1))
1067 	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1068 	if (insert_regs (XEXP (x, 1), NULL, 0))
1069 	  {
1070 	    rehash_using_reg (XEXP (x, 1));
1071 	    changed = 1;
1072 	  }
1073     }
1074 
1075   fmt = GET_RTX_FORMAT (code);
1076   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1077     if (fmt[i] == 'e')
1078       changed |= mention_regs (XEXP (x, i));
1079     else if (fmt[i] == 'E')
1080       for (j = 0; j < XVECLEN (x, i); j++)
1081 	changed |= mention_regs (XVECEXP (x, i, j));
1082 
1083   return changed;
1084 }
1085 
1086 /* Update the register quantities for inserting X into the hash table
1087    with a value equivalent to CLASSP.
1088    (If the class does not contain a REG, it is irrelevant.)
1089    If MODIFIED is nonzero, X is a destination; it is being modified.
1090    Note that delete_reg_equiv should be called on a register
1091    before insert_regs is done on that register with MODIFIED != 0.
1092 
1093    Nonzero value means that elements of reg_qty have changed
1094    so X's hash code may be different.  */
1095 
1096 static int
1097 insert_regs (rtx x, struct table_elt *classp, int modified)
1098 {
1099   if (REG_P (x))
1100     {
1101       unsigned int regno = REGNO (x);
1102       int qty_valid;
1103 
1104       /* If REGNO is in the equivalence table already but is of the
1105 	 wrong mode for that equivalence, don't do anything here.  */
1106 
1107       qty_valid = REGNO_QTY_VALID_P (regno);
1108       if (qty_valid)
1109 	{
1110 	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1111 
1112 	  if (ent->mode != GET_MODE (x))
1113 	    return 0;
1114 	}
1115 
1116       if (modified || ! qty_valid)
1117 	{
1118 	  if (classp)
1119 	    for (classp = classp->first_same_value;
1120 		 classp != 0;
1121 		 classp = classp->next_same_value)
1122 	      if (REG_P (classp->exp)
1123 		  && GET_MODE (classp->exp) == GET_MODE (x))
1124 		{
1125 		  unsigned c_regno = REGNO (classp->exp);
1126 
1127 		  gcc_assert (REGNO_QTY_VALID_P (c_regno));
1128 
1129 		  /* Suppose that 5 is hard reg and 100 and 101 are
1130 		     pseudos.  Consider
1131 
1132 		     (set (reg:si 100) (reg:si 5))
1133 		     (set (reg:si 5) (reg:si 100))
1134 		     (set (reg:di 101) (reg:di 5))
1135 
1136 		     We would now set REG_QTY (101) = REG_QTY (5), but the
1137 		     entry for 5 is in SImode.  When we use this later in
1138 		     copy propagation, we get the register in wrong mode.  */
1139 		  if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1140 		    continue;
1141 
1142 		  make_regs_eqv (regno, c_regno);
1143 		  return 1;
1144 		}
1145 
1146 	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1147 	     than REG_IN_TABLE to find out if there was only a single preceding
1148 	     invalidation - for the SUBREG - or another one, which would be
1149 	     for the full register.  However, if we find here that REG_TICK
1150 	     indicates that the register is invalid, it means that it has
1151 	     been invalidated in a separate operation.  The SUBREG might be used
1152 	     now (then this is a recursive call), or we might use the full REG
1153 	     now and a SUBREG of it later.  So bump up REG_TICK so that
1154 	     mention_regs will do the right thing.  */
1155 	  if (! modified
1156 	      && REG_IN_TABLE (regno) >= 0
1157 	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1158 	    REG_TICK (regno)++;
1159 	  make_new_qty (regno, GET_MODE (x));
1160 	  return 1;
1161 	}
1162 
1163       return 0;
1164     }
1165 
1166   /* If X is a SUBREG, we will likely be inserting the inner register in the
1167      table.  If that register doesn't have an assigned quantity number at
1168      this point but does later, the insertion that we will be doing now will
1169      not be accessible because its hash code will have changed.  So assign
1170      a quantity number now.  */
1171 
1172   else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1173 	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1174     {
1175       insert_regs (SUBREG_REG (x), NULL, 0);
1176       mention_regs (x);
1177       return 1;
1178     }
1179   else
1180     return mention_regs (x);
1181 }
1182 
1183 
1184 /* Compute upper and lower anchors for CST.  Also compute the offset of CST
1185    from these anchors/bases such that *_BASE + *_OFFS = CST.  Return false iff
1186    CST is equal to an anchor.  */
1187 
1188 static bool
1189 compute_const_anchors (rtx cst,
1190 		       HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1191 		       HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1192 {
1193   HOST_WIDE_INT n = INTVAL (cst);
1194 
1195   *lower_base = n & ~(targetm.const_anchor - 1);
1196   if (*lower_base == n)
1197     return false;
1198 
1199   *upper_base =
1200     (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1);
1201   *upper_offs = n - *upper_base;
1202   *lower_offs = n - *lower_base;
1203   return true;
1204 }
1205 
1206 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE.  */
1207 
1208 static void
1209 insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
1210 		     machine_mode mode)
1211 {
1212   struct table_elt *elt;
1213   unsigned hash;
1214   rtx anchor_exp;
1215   rtx exp;
1216 
1217   anchor_exp = GEN_INT (anchor);
1218   hash = HASH (anchor_exp, mode);
1219   elt = lookup (anchor_exp, hash, mode);
1220   if (!elt)
1221     elt = insert (anchor_exp, NULL, hash, mode);
1222 
1223   exp = plus_constant (mode, reg, offs);
1224   /* REG has just been inserted and the hash codes recomputed.  */
1225   mention_regs (exp);
1226   hash = HASH (exp, mode);
1227 
1228   /* Use the cost of the register rather than the whole expression.  When
1229      looking up constant anchors we will further offset the corresponding
1230      expression therefore it does not make sense to prefer REGs over
1231      reg-immediate additions.  Prefer instead the oldest expression.  Also
1232      don't prefer pseudos over hard regs so that we derive constants in
1233      argument registers from other argument registers rather than from the
1234      original pseudo that was used to synthesize the constant.  */
1235   insert_with_costs (exp, elt, hash, mode, COST (reg, mode), 1);
1236 }
1237 
1238 /* The constant CST is equivalent to the register REG.  Create
1239    equivalences between the two anchors of CST and the corresponding
1240    register-offset expressions using REG.  */
1241 
1242 static void
1243 insert_const_anchors (rtx reg, rtx cst, machine_mode mode)
1244 {
1245   HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1246 
1247   if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1248 			      &upper_base, &upper_offs))
1249       return;
1250 
1251   /* Ignore anchors of value 0.  Constants accessible from zero are
1252      simple.  */
1253   if (lower_base != 0)
1254     insert_const_anchor (lower_base, reg, -lower_offs, mode);
1255 
1256   if (upper_base != 0)
1257     insert_const_anchor (upper_base, reg, -upper_offs, mode);
1258 }
1259 
1260 /* We need to express ANCHOR_ELT->exp + OFFS.  Walk the equivalence list of
1261    ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1262    valid expression.  Return the cheapest and oldest of such expressions.  In
1263    *OLD, return how old the resulting expression is compared to the other
1264    equivalent expressions.  */
1265 
1266 static rtx
1267 find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1268 			   unsigned *old)
1269 {
1270   struct table_elt *elt;
1271   unsigned idx;
1272   struct table_elt *match_elt;
1273   rtx match;
1274 
1275   /* Find the cheapest and *oldest* expression to maximize the chance of
1276      reusing the same pseudo.  */
1277 
1278   match_elt = NULL;
1279   match = NULL_RTX;
1280   for (elt = anchor_elt->first_same_value, idx = 0;
1281        elt;
1282        elt = elt->next_same_value, idx++)
1283     {
1284       if (match_elt && CHEAPER (match_elt, elt))
1285 	return match;
1286 
1287       if (REG_P (elt->exp)
1288 	  || (GET_CODE (elt->exp) == PLUS
1289 	      && REG_P (XEXP (elt->exp, 0))
1290 	      && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1291 	{
1292 	  rtx x;
1293 
1294 	  /* Ignore expressions that are no longer valid.  */
1295 	  if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1296 	    continue;
1297 
1298 	  x = plus_constant (GET_MODE (elt->exp), elt->exp, offs);
1299 	  if (REG_P (x)
1300 	      || (GET_CODE (x) == PLUS
1301 		  && IN_RANGE (INTVAL (XEXP (x, 1)),
1302 			       -targetm.const_anchor,
1303 			       targetm.const_anchor - 1)))
1304 	    {
1305 	      match = x;
1306 	      match_elt = elt;
1307 	      *old = idx;
1308 	    }
1309 	}
1310     }
1311 
1312   return match;
1313 }
1314 
1315 /* Try to express the constant SRC_CONST using a register+offset expression
1316    derived from a constant anchor.  Return it if successful or NULL_RTX,
1317    otherwise.  */
1318 
1319 static rtx
1320 try_const_anchors (rtx src_const, machine_mode mode)
1321 {
1322   struct table_elt *lower_elt, *upper_elt;
1323   HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1324   rtx lower_anchor_rtx, upper_anchor_rtx;
1325   rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1326   unsigned lower_old, upper_old;
1327 
1328   /* CONST_INT is used for CC modes, but we should leave those alone.  */
1329   if (GET_MODE_CLASS (mode) == MODE_CC)
1330     return NULL_RTX;
1331 
1332   gcc_assert (SCALAR_INT_MODE_P (mode));
1333   if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1334 			      &upper_base, &upper_offs))
1335     return NULL_RTX;
1336 
1337   lower_anchor_rtx = GEN_INT (lower_base);
1338   upper_anchor_rtx = GEN_INT (upper_base);
1339   lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1340   upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1341 
1342   if (lower_elt)
1343     lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1344   if (upper_elt)
1345     upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1346 
1347   if (!lower_exp)
1348     return upper_exp;
1349   if (!upper_exp)
1350     return lower_exp;
1351 
1352   /* Return the older expression.  */
1353   return (upper_old > lower_old ? upper_exp : lower_exp);
1354 }
1355 
1356 /* Look in or update the hash table.  */
1357 
1358 /* Remove table element ELT from use in the table.
1359    HASH is its hash code, made using the HASH macro.
1360    It's an argument because often that is known in advance
1361    and we save much time not recomputing it.  */
1362 
1363 static void
1364 remove_from_table (struct table_elt *elt, unsigned int hash)
1365 {
1366   if (elt == 0)
1367     return;
1368 
1369   /* Mark this element as removed.  See cse_insn.  */
1370   elt->first_same_value = 0;
1371 
1372   /* Remove the table element from its equivalence class.  */
1373 
1374   {
1375     struct table_elt *prev = elt->prev_same_value;
1376     struct table_elt *next = elt->next_same_value;
1377 
1378     if (next)
1379       next->prev_same_value = prev;
1380 
1381     if (prev)
1382       prev->next_same_value = next;
1383     else
1384       {
1385 	struct table_elt *newfirst = next;
1386 	while (next)
1387 	  {
1388 	    next->first_same_value = newfirst;
1389 	    next = next->next_same_value;
1390 	  }
1391       }
1392   }
1393 
1394   /* Remove the table element from its hash bucket.  */
1395 
1396   {
1397     struct table_elt *prev = elt->prev_same_hash;
1398     struct table_elt *next = elt->next_same_hash;
1399 
1400     if (next)
1401       next->prev_same_hash = prev;
1402 
1403     if (prev)
1404       prev->next_same_hash = next;
1405     else if (table[hash] == elt)
1406       table[hash] = next;
1407     else
1408       {
1409 	/* This entry is not in the proper hash bucket.  This can happen
1410 	   when two classes were merged by `merge_equiv_classes'.  Search
1411 	   for the hash bucket that it heads.  This happens only very
1412 	   rarely, so the cost is acceptable.  */
1413 	for (hash = 0; hash < HASH_SIZE; hash++)
1414 	  if (table[hash] == elt)
1415 	    table[hash] = next;
1416       }
1417   }
1418 
1419   /* Remove the table element from its related-value circular chain.  */
1420 
1421   if (elt->related_value != 0 && elt->related_value != elt)
1422     {
1423       struct table_elt *p = elt->related_value;
1424 
1425       while (p->related_value != elt)
1426 	p = p->related_value;
1427       p->related_value = elt->related_value;
1428       if (p->related_value == p)
1429 	p->related_value = 0;
1430     }
1431 
1432   /* Now add it to the free element chain.  */
1433   elt->next_same_hash = free_element_chain;
1434   free_element_chain = elt;
1435 }
1436 
1437 /* Same as above, but X is a pseudo-register.  */
1438 
1439 static void
1440 remove_pseudo_from_table (rtx x, unsigned int hash)
1441 {
1442   struct table_elt *elt;
1443 
1444   /* Because a pseudo-register can be referenced in more than one
1445      mode, we might have to remove more than one table entry.  */
1446   while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1447     remove_from_table (elt, hash);
1448 }
1449 
1450 /* Look up X in the hash table and return its table element,
1451    or 0 if X is not in the table.
1452 
1453    MODE is the machine-mode of X, or if X is an integer constant
1454    with VOIDmode then MODE is the mode with which X will be used.
1455 
1456    Here we are satisfied to find an expression whose tree structure
1457    looks like X.  */
1458 
1459 static struct table_elt *
1460 lookup (rtx x, unsigned int hash, machine_mode mode)
1461 {
1462   struct table_elt *p;
1463 
1464   for (p = table[hash]; p; p = p->next_same_hash)
1465     if (mode == p->mode && ((x == p->exp && REG_P (x))
1466 			    || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1467       return p;
1468 
1469   return 0;
1470 }
1471 
1472 /* Like `lookup' but don't care whether the table element uses invalid regs.
1473    Also ignore discrepancies in the machine mode of a register.  */
1474 
1475 static struct table_elt *
1476 lookup_for_remove (rtx x, unsigned int hash, machine_mode mode)
1477 {
1478   struct table_elt *p;
1479 
1480   if (REG_P (x))
1481     {
1482       unsigned int regno = REGNO (x);
1483 
1484       /* Don't check the machine mode when comparing registers;
1485 	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1486       for (p = table[hash]; p; p = p->next_same_hash)
1487 	if (REG_P (p->exp)
1488 	    && REGNO (p->exp) == regno)
1489 	  return p;
1490     }
1491   else
1492     {
1493       for (p = table[hash]; p; p = p->next_same_hash)
1494 	if (mode == p->mode
1495 	    && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1496 	  return p;
1497     }
1498 
1499   return 0;
1500 }
1501 
1502 /* Look for an expression equivalent to X and with code CODE.
1503    If one is found, return that expression.  */
1504 
1505 static rtx
1506 lookup_as_function (rtx x, enum rtx_code code)
1507 {
1508   struct table_elt *p
1509     = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1510 
1511   if (p == 0)
1512     return 0;
1513 
1514   for (p = p->first_same_value; p; p = p->next_same_value)
1515     if (GET_CODE (p->exp) == code
1516 	/* Make sure this is a valid entry in the table.  */
1517 	&& exp_equiv_p (p->exp, p->exp, 1, false))
1518       return p->exp;
1519 
1520   return 0;
1521 }
1522 
1523 /* Insert X in the hash table, assuming HASH is its hash code and
1524    CLASSP is an element of the class it should go in (or 0 if a new
1525    class should be made).  COST is the code of X and reg_cost is the
1526    cost of registers in X.  It is inserted at the proper position to
1527    keep the class in the order cheapest first.
1528 
1529    MODE is the machine-mode of X, or if X is an integer constant
1530    with VOIDmode then MODE is the mode with which X will be used.
1531 
1532    For elements of equal cheapness, the most recent one
1533    goes in front, except that the first element in the list
1534    remains first unless a cheaper element is added.  The order of
1535    pseudo-registers does not matter, as canon_reg will be called to
1536    find the cheapest when a register is retrieved from the table.
1537 
1538    The in_memory field in the hash table element is set to 0.
1539    The caller must set it nonzero if appropriate.
1540 
1541    You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1542    and if insert_regs returns a nonzero value
1543    you must then recompute its hash code before calling here.
1544 
1545    If necessary, update table showing constant values of quantities.  */
1546 
1547 static struct table_elt *
1548 insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
1549 		   machine_mode mode, int cost, int reg_cost)
1550 {
1551   struct table_elt *elt;
1552 
1553   /* If X is a register and we haven't made a quantity for it,
1554      something is wrong.  */
1555   gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1556 
1557   /* If X is a hard register, show it is being put in the table.  */
1558   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1559     add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1560 
1561   /* Put an element for X into the right hash bucket.  */
1562 
1563   elt = free_element_chain;
1564   if (elt)
1565     free_element_chain = elt->next_same_hash;
1566   else
1567     elt = XNEW (struct table_elt);
1568 
1569   elt->exp = x;
1570   elt->canon_exp = NULL_RTX;
1571   elt->cost = cost;
1572   elt->regcost = reg_cost;
1573   elt->next_same_value = 0;
1574   elt->prev_same_value = 0;
1575   elt->next_same_hash = table[hash];
1576   elt->prev_same_hash = 0;
1577   elt->related_value = 0;
1578   elt->in_memory = 0;
1579   elt->mode = mode;
1580   elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1581 
1582   if (table[hash])
1583     table[hash]->prev_same_hash = elt;
1584   table[hash] = elt;
1585 
1586   /* Put it into the proper value-class.  */
1587   if (classp)
1588     {
1589       classp = classp->first_same_value;
1590       if (CHEAPER (elt, classp))
1591 	/* Insert at the head of the class.  */
1592 	{
1593 	  struct table_elt *p;
1594 	  elt->next_same_value = classp;
1595 	  classp->prev_same_value = elt;
1596 	  elt->first_same_value = elt;
1597 
1598 	  for (p = classp; p; p = p->next_same_value)
1599 	    p->first_same_value = elt;
1600 	}
1601       else
1602 	{
1603 	  /* Insert not at head of the class.  */
1604 	  /* Put it after the last element cheaper than X.  */
1605 	  struct table_elt *p, *next;
1606 
1607 	  for (p = classp;
1608 	       (next = p->next_same_value) && CHEAPER (next, elt);
1609 	       p = next)
1610 	    ;
1611 
1612 	  /* Put it after P and before NEXT.  */
1613 	  elt->next_same_value = next;
1614 	  if (next)
1615 	    next->prev_same_value = elt;
1616 
1617 	  elt->prev_same_value = p;
1618 	  p->next_same_value = elt;
1619 	  elt->first_same_value = classp;
1620 	}
1621     }
1622   else
1623     elt->first_same_value = elt;
1624 
1625   /* If this is a constant being set equivalent to a register or a register
1626      being set equivalent to a constant, note the constant equivalence.
1627 
1628      If this is a constant, it cannot be equivalent to a different constant,
1629      and a constant is the only thing that can be cheaper than a register.  So
1630      we know the register is the head of the class (before the constant was
1631      inserted).
1632 
1633      If this is a register that is not already known equivalent to a
1634      constant, we must check the entire class.
1635 
1636      If this is a register that is already known equivalent to an insn,
1637      update the qtys `const_insn' to show that `this_insn' is the latest
1638      insn making that quantity equivalent to the constant.  */
1639 
1640   if (elt->is_const && classp && REG_P (classp->exp)
1641       && !REG_P (x))
1642     {
1643       int exp_q = REG_QTY (REGNO (classp->exp));
1644       struct qty_table_elem *exp_ent = &qty_table[exp_q];
1645 
1646       exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1647       exp_ent->const_insn = this_insn;
1648     }
1649 
1650   else if (REG_P (x)
1651 	   && classp
1652 	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1653 	   && ! elt->is_const)
1654     {
1655       struct table_elt *p;
1656 
1657       for (p = classp; p != 0; p = p->next_same_value)
1658 	{
1659 	  if (p->is_const && !REG_P (p->exp))
1660 	    {
1661 	      int x_q = REG_QTY (REGNO (x));
1662 	      struct qty_table_elem *x_ent = &qty_table[x_q];
1663 
1664 	      x_ent->const_rtx
1665 		= gen_lowpart (GET_MODE (x), p->exp);
1666 	      x_ent->const_insn = this_insn;
1667 	      break;
1668 	    }
1669 	}
1670     }
1671 
1672   else if (REG_P (x)
1673 	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1674 	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1675     qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1676 
1677   /* If this is a constant with symbolic value,
1678      and it has a term with an explicit integer value,
1679      link it up with related expressions.  */
1680   if (GET_CODE (x) == CONST)
1681     {
1682       rtx subexp = get_related_value (x);
1683       unsigned subhash;
1684       struct table_elt *subelt, *subelt_prev;
1685 
1686       if (subexp != 0)
1687 	{
1688 	  /* Get the integer-free subexpression in the hash table.  */
1689 	  subhash = SAFE_HASH (subexp, mode);
1690 	  subelt = lookup (subexp, subhash, mode);
1691 	  if (subelt == 0)
1692 	    subelt = insert (subexp, NULL, subhash, mode);
1693 	  /* Initialize SUBELT's circular chain if it has none.  */
1694 	  if (subelt->related_value == 0)
1695 	    subelt->related_value = subelt;
1696 	  /* Find the element in the circular chain that precedes SUBELT.  */
1697 	  subelt_prev = subelt;
1698 	  while (subelt_prev->related_value != subelt)
1699 	    subelt_prev = subelt_prev->related_value;
1700 	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1701 	     This way the element that follows SUBELT is the oldest one.  */
1702 	  elt->related_value = subelt_prev->related_value;
1703 	  subelt_prev->related_value = elt;
1704 	}
1705     }
1706 
1707   return elt;
1708 }
1709 
1710 /* Wrap insert_with_costs by passing the default costs.  */
1711 
1712 static struct table_elt *
1713 insert (rtx x, struct table_elt *classp, unsigned int hash,
1714 	machine_mode mode)
1715 {
1716   return insert_with_costs (x, classp, hash, mode,
1717 			    COST (x, mode), approx_reg_cost (x));
1718 }
1719 
1720 
1721 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1722    CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1723    the two classes equivalent.
1724 
1725    CLASS1 will be the surviving class; CLASS2 should not be used after this
1726    call.
1727 
1728    Any invalid entries in CLASS2 will not be copied.  */
1729 
1730 static void
1731 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1732 {
1733   struct table_elt *elt, *next, *new_elt;
1734 
1735   /* Ensure we start with the head of the classes.  */
1736   class1 = class1->first_same_value;
1737   class2 = class2->first_same_value;
1738 
1739   /* If they were already equal, forget it.  */
1740   if (class1 == class2)
1741     return;
1742 
1743   for (elt = class2; elt; elt = next)
1744     {
1745       unsigned int hash;
1746       rtx exp = elt->exp;
1747       machine_mode mode = elt->mode;
1748 
1749       next = elt->next_same_value;
1750 
1751       /* Remove old entry, make a new one in CLASS1's class.
1752 	 Don't do this for invalid entries as we cannot find their
1753 	 hash code (it also isn't necessary).  */
1754       if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1755 	{
1756 	  bool need_rehash = false;
1757 
1758 	  hash_arg_in_memory = 0;
1759 	  hash = HASH (exp, mode);
1760 
1761 	  if (REG_P (exp))
1762 	    {
1763 	      need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1764 	      delete_reg_equiv (REGNO (exp));
1765 	    }
1766 
1767 	  if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1768 	    remove_pseudo_from_table (exp, hash);
1769 	  else
1770 	    remove_from_table (elt, hash);
1771 
1772 	  if (insert_regs (exp, class1, 0) || need_rehash)
1773 	    {
1774 	      rehash_using_reg (exp);
1775 	      hash = HASH (exp, mode);
1776 	    }
1777 	  new_elt = insert (exp, class1, hash, mode);
1778 	  new_elt->in_memory = hash_arg_in_memory;
1779 	  if (GET_CODE (exp) == ASM_OPERANDS && elt->cost == MAX_COST)
1780 	    new_elt->cost = MAX_COST;
1781 	}
1782     }
1783 }
1784 
1785 /* Flush the entire hash table.  */
1786 
1787 static void
1788 flush_hash_table (void)
1789 {
1790   int i;
1791   struct table_elt *p;
1792 
1793   for (i = 0; i < HASH_SIZE; i++)
1794     for (p = table[i]; p; p = table[i])
1795       {
1796 	/* Note that invalidate can remove elements
1797 	   after P in the current hash chain.  */
1798 	if (REG_P (p->exp))
1799 	  invalidate (p->exp, VOIDmode);
1800 	else
1801 	  remove_from_table (p, i);
1802       }
1803 }
1804 
1805 /* Check whether an anti dependence exists between X and EXP.  MODE and
1806    ADDR are as for canon_anti_dependence.  */
1807 
1808 static bool
1809 check_dependence (const_rtx x, rtx exp, machine_mode mode, rtx addr)
1810 {
1811   subrtx_iterator::array_type array;
1812   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1813     {
1814       const_rtx x = *iter;
1815       if (MEM_P (x) && canon_anti_dependence (x, true, exp, mode, addr))
1816 	return true;
1817     }
1818   return false;
1819 }
1820 
1821 /* Remove from the hash table, or mark as invalid, all expressions whose
1822    values could be altered by storing in X.  X is a register, a subreg, or
1823    a memory reference with nonvarying address (because, when a memory
1824    reference with a varying address is stored in, all memory references are
1825    removed by invalidate_memory so specific invalidation is superfluous).
1826    FULL_MODE, if not VOIDmode, indicates that this much should be
1827    invalidated instead of just the amount indicated by the mode of X.  This
1828    is only used for bitfield stores into memory.
1829 
1830    A nonvarying address may be just a register or just a symbol reference,
1831    or it may be either of those plus a numeric offset.  */
1832 
1833 static void
1834 invalidate (rtx x, machine_mode full_mode)
1835 {
1836   int i;
1837   struct table_elt *p;
1838   rtx addr;
1839 
1840   switch (GET_CODE (x))
1841     {
1842     case REG:
1843       {
1844 	/* If X is a register, dependencies on its contents are recorded
1845 	   through the qty number mechanism.  Just change the qty number of
1846 	   the register, mark it as invalid for expressions that refer to it,
1847 	   and remove it itself.  */
1848 	unsigned int regno = REGNO (x);
1849 	unsigned int hash = HASH (x, GET_MODE (x));
1850 
1851 	/* Remove REGNO from any quantity list it might be on and indicate
1852 	   that its value might have changed.  If it is a pseudo, remove its
1853 	   entry from the hash table.
1854 
1855 	   For a hard register, we do the first two actions above for any
1856 	   additional hard registers corresponding to X.  Then, if any of these
1857 	   registers are in the table, we must remove any REG entries that
1858 	   overlap these registers.  */
1859 
1860 	delete_reg_equiv (regno);
1861 	REG_TICK (regno)++;
1862 	SUBREG_TICKED (regno) = -1;
1863 
1864 	if (regno >= FIRST_PSEUDO_REGISTER)
1865 	  remove_pseudo_from_table (x, hash);
1866 	else
1867 	  {
1868 	    HOST_WIDE_INT in_table
1869 	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1870 	    unsigned int endregno = END_REGNO (x);
1871 	    unsigned int tregno, tendregno, rn;
1872 	    struct table_elt *p, *next;
1873 
1874 	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1875 
1876 	    for (rn = regno + 1; rn < endregno; rn++)
1877 	      {
1878 		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1879 		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1880 		delete_reg_equiv (rn);
1881 		REG_TICK (rn)++;
1882 		SUBREG_TICKED (rn) = -1;
1883 	      }
1884 
1885 	    if (in_table)
1886 	      for (hash = 0; hash < HASH_SIZE; hash++)
1887 		for (p = table[hash]; p; p = next)
1888 		  {
1889 		    next = p->next_same_hash;
1890 
1891 		    if (!REG_P (p->exp)
1892 			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1893 		      continue;
1894 
1895 		    tregno = REGNO (p->exp);
1896 		    tendregno = END_REGNO (p->exp);
1897 		    if (tendregno > regno && tregno < endregno)
1898 		      remove_from_table (p, hash);
1899 		  }
1900 	  }
1901       }
1902       return;
1903 
1904     case SUBREG:
1905       invalidate (SUBREG_REG (x), VOIDmode);
1906       return;
1907 
1908     case PARALLEL:
1909       for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1910 	invalidate (XVECEXP (x, 0, i), VOIDmode);
1911       return;
1912 
1913     case EXPR_LIST:
1914       /* This is part of a disjoint return value; extract the location in
1915 	 question ignoring the offset.  */
1916       invalidate (XEXP (x, 0), VOIDmode);
1917       return;
1918 
1919     case MEM:
1920       addr = canon_rtx (get_addr (XEXP (x, 0)));
1921       /* Calculate the canonical version of X here so that
1922 	 true_dependence doesn't generate new RTL for X on each call.  */
1923       x = canon_rtx (x);
1924 
1925       /* Remove all hash table elements that refer to overlapping pieces of
1926 	 memory.  */
1927       if (full_mode == VOIDmode)
1928 	full_mode = GET_MODE (x);
1929 
1930       for (i = 0; i < HASH_SIZE; i++)
1931 	{
1932 	  struct table_elt *next;
1933 
1934 	  for (p = table[i]; p; p = next)
1935 	    {
1936 	      next = p->next_same_hash;
1937 	      if (p->in_memory)
1938 		{
1939 		  /* Just canonicalize the expression once;
1940 		     otherwise each time we call invalidate
1941 		     true_dependence will canonicalize the
1942 		     expression again.  */
1943 		  if (!p->canon_exp)
1944 		    p->canon_exp = canon_rtx (p->exp);
1945 		  if (check_dependence (p->canon_exp, x, full_mode, addr))
1946 		    remove_from_table (p, i);
1947 		}
1948 	    }
1949 	}
1950       return;
1951 
1952     default:
1953       gcc_unreachable ();
1954     }
1955 }
1956 
1957 /* Invalidate DEST.  Used when DEST is not going to be added
1958    into the hash table for some reason, e.g. do_not_record
1959    flagged on it.  */
1960 
1961 static void
1962 invalidate_dest (rtx dest)
1963 {
1964   if (REG_P (dest)
1965       || GET_CODE (dest) == SUBREG
1966       || MEM_P (dest))
1967     invalidate (dest, VOIDmode);
1968   else if (GET_CODE (dest) == STRICT_LOW_PART
1969 	   || GET_CODE (dest) == ZERO_EXTRACT)
1970     invalidate (XEXP (dest, 0), GET_MODE (dest));
1971 }
1972 
1973 /* Remove all expressions that refer to register REGNO,
1974    since they are already invalid, and we are about to
1975    mark that register valid again and don't want the old
1976    expressions to reappear as valid.  */
1977 
1978 static void
1979 remove_invalid_refs (unsigned int regno)
1980 {
1981   unsigned int i;
1982   struct table_elt *p, *next;
1983 
1984   for (i = 0; i < HASH_SIZE; i++)
1985     for (p = table[i]; p; p = next)
1986       {
1987 	next = p->next_same_hash;
1988 	if (!REG_P (p->exp) && refers_to_regno_p (regno, p->exp))
1989 	  remove_from_table (p, i);
1990       }
1991 }
1992 
1993 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1994    and mode MODE.  */
1995 static void
1996 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1997 			    machine_mode mode)
1998 {
1999   unsigned int i;
2000   struct table_elt *p, *next;
2001   unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2002 
2003   for (i = 0; i < HASH_SIZE; i++)
2004     for (p = table[i]; p; p = next)
2005       {
2006 	rtx exp = p->exp;
2007 	next = p->next_same_hash;
2008 
2009 	if (!REG_P (exp)
2010 	    && (GET_CODE (exp) != SUBREG
2011 		|| !REG_P (SUBREG_REG (exp))
2012 		|| REGNO (SUBREG_REG (exp)) != regno
2013 		|| (((SUBREG_BYTE (exp)
2014 		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2015 		    && SUBREG_BYTE (exp) <= end))
2016 	    && refers_to_regno_p (regno, p->exp))
2017 	  remove_from_table (p, i);
2018       }
2019 }
2020 
2021 /* Recompute the hash codes of any valid entries in the hash table that
2022    reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2023 
2024    This is called when we make a jump equivalence.  */
2025 
2026 static void
2027 rehash_using_reg (rtx x)
2028 {
2029   unsigned int i;
2030   struct table_elt *p, *next;
2031   unsigned hash;
2032 
2033   if (GET_CODE (x) == SUBREG)
2034     x = SUBREG_REG (x);
2035 
2036   /* If X is not a register or if the register is known not to be in any
2037      valid entries in the table, we have no work to do.  */
2038 
2039   if (!REG_P (x)
2040       || REG_IN_TABLE (REGNO (x)) < 0
2041       || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2042     return;
2043 
2044   /* Scan all hash chains looking for valid entries that mention X.
2045      If we find one and it is in the wrong hash chain, move it.  */
2046 
2047   for (i = 0; i < HASH_SIZE; i++)
2048     for (p = table[i]; p; p = next)
2049       {
2050 	next = p->next_same_hash;
2051 	if (reg_mentioned_p (x, p->exp)
2052 	    && exp_equiv_p (p->exp, p->exp, 1, false)
2053 	    && i != (hash = SAFE_HASH (p->exp, p->mode)))
2054 	  {
2055 	    if (p->next_same_hash)
2056 	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2057 
2058 	    if (p->prev_same_hash)
2059 	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2060 	    else
2061 	      table[i] = p->next_same_hash;
2062 
2063 	    p->next_same_hash = table[hash];
2064 	    p->prev_same_hash = 0;
2065 	    if (table[hash])
2066 	      table[hash]->prev_same_hash = p;
2067 	    table[hash] = p;
2068 	  }
2069       }
2070 }
2071 
2072 /* Remove from the hash table any expression that is a call-clobbered
2073    register.  Also update their TICK values.  */
2074 
2075 static void
2076 invalidate_for_call (void)
2077 {
2078   unsigned int regno, endregno;
2079   unsigned int i;
2080   unsigned hash;
2081   struct table_elt *p, *next;
2082   int in_table = 0;
2083   hard_reg_set_iterator hrsi;
2084 
2085   /* Go through all the hard registers.  For each that is clobbered in
2086      a CALL_INSN, remove the register from quantity chains and update
2087      reg_tick if defined.  Also see if any of these registers is currently
2088      in the table.  */
2089   EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
2090     {
2091       delete_reg_equiv (regno);
2092       if (REG_TICK (regno) >= 0)
2093 	{
2094 	  REG_TICK (regno)++;
2095 	  SUBREG_TICKED (regno) = -1;
2096 	}
2097       in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2098     }
2099 
2100   /* In the case where we have no call-clobbered hard registers in the
2101      table, we are done.  Otherwise, scan the table and remove any
2102      entry that overlaps a call-clobbered register.  */
2103 
2104   if (in_table)
2105     for (hash = 0; hash < HASH_SIZE; hash++)
2106       for (p = table[hash]; p; p = next)
2107 	{
2108 	  next = p->next_same_hash;
2109 
2110 	  if (!REG_P (p->exp)
2111 	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2112 	    continue;
2113 
2114 	  regno = REGNO (p->exp);
2115 	  endregno = END_REGNO (p->exp);
2116 
2117 	  for (i = regno; i < endregno; i++)
2118 	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2119 	      {
2120 		remove_from_table (p, hash);
2121 		break;
2122 	      }
2123 	}
2124 }
2125 
2126 /* Given an expression X of type CONST,
2127    and ELT which is its table entry (or 0 if it
2128    is not in the hash table),
2129    return an alternate expression for X as a register plus integer.
2130    If none can be found, return 0.  */
2131 
2132 static rtx
2133 use_related_value (rtx x, struct table_elt *elt)
2134 {
2135   struct table_elt *relt = 0;
2136   struct table_elt *p, *q;
2137   HOST_WIDE_INT offset;
2138 
2139   /* First, is there anything related known?
2140      If we have a table element, we can tell from that.
2141      Otherwise, must look it up.  */
2142 
2143   if (elt != 0 && elt->related_value != 0)
2144     relt = elt;
2145   else if (elt == 0 && GET_CODE (x) == CONST)
2146     {
2147       rtx subexp = get_related_value (x);
2148       if (subexp != 0)
2149 	relt = lookup (subexp,
2150 		       SAFE_HASH (subexp, GET_MODE (subexp)),
2151 		       GET_MODE (subexp));
2152     }
2153 
2154   if (relt == 0)
2155     return 0;
2156 
2157   /* Search all related table entries for one that has an
2158      equivalent register.  */
2159 
2160   p = relt;
2161   while (1)
2162     {
2163       /* This loop is strange in that it is executed in two different cases.
2164 	 The first is when X is already in the table.  Then it is searching
2165 	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2166 	 X is not in the table.  Then RELT points to a class for the related
2167 	 value.
2168 
2169 	 Ensure that, whatever case we are in, that we ignore classes that have
2170 	 the same value as X.  */
2171 
2172       if (rtx_equal_p (x, p->exp))
2173 	q = 0;
2174       else
2175 	for (q = p->first_same_value; q; q = q->next_same_value)
2176 	  if (REG_P (q->exp))
2177 	    break;
2178 
2179       if (q)
2180 	break;
2181 
2182       p = p->related_value;
2183 
2184       /* We went all the way around, so there is nothing to be found.
2185 	 Alternatively, perhaps RELT was in the table for some other reason
2186 	 and it has no related values recorded.  */
2187       if (p == relt || p == 0)
2188 	break;
2189     }
2190 
2191   if (q == 0)
2192     return 0;
2193 
2194   offset = (get_integer_term (x) - get_integer_term (p->exp));
2195   /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2196   return plus_constant (q->mode, q->exp, offset);
2197 }
2198 
2199 
2200 /* Hash a string.  Just add its bytes up.  */
2201 static inline unsigned
2202 hash_rtx_string (const char *ps)
2203 {
2204   unsigned hash = 0;
2205   const unsigned char *p = (const unsigned char *) ps;
2206 
2207   if (p)
2208     while (*p)
2209       hash += *p++;
2210 
2211   return hash;
2212 }
2213 
2214 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2215    When the callback returns true, we continue with the new rtx.  */
2216 
2217 unsigned
2218 hash_rtx_cb (const_rtx x, machine_mode mode,
2219              int *do_not_record_p, int *hash_arg_in_memory_p,
2220              bool have_reg_qty, hash_rtx_callback_function cb)
2221 {
2222   int i, j;
2223   unsigned hash = 0;
2224   enum rtx_code code;
2225   const char *fmt;
2226   machine_mode newmode;
2227   rtx newx;
2228 
2229   /* Used to turn recursion into iteration.  We can't rely on GCC's
2230      tail-recursion elimination since we need to keep accumulating values
2231      in HASH.  */
2232  repeat:
2233   if (x == 0)
2234     return hash;
2235 
2236   /* Invoke the callback first.  */
2237   if (cb != NULL
2238       && ((*cb) (x, mode, &newx, &newmode)))
2239     {
2240       hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2241                            hash_arg_in_memory_p, have_reg_qty, cb);
2242       return hash;
2243     }
2244 
2245   code = GET_CODE (x);
2246   switch (code)
2247     {
2248     case REG:
2249       {
2250 	unsigned int regno = REGNO (x);
2251 
2252 	if (do_not_record_p && !reload_completed)
2253 	  {
2254 	    /* On some machines, we can't record any non-fixed hard register,
2255 	       because extending its life will cause reload problems.  We
2256 	       consider ap, fp, sp, gp to be fixed for this purpose.
2257 
2258 	       We also consider CCmode registers to be fixed for this purpose;
2259 	       failure to do so leads to failure to simplify 0<100 type of
2260 	       conditionals.
2261 
2262 	       On all machines, we can't record any global registers.
2263 	       Nor should we record any register that is in a small
2264 	       class, as defined by TARGET_CLASS_LIKELY_SPILLED_P.  */
2265 	    bool record;
2266 
2267 	    if (regno >= FIRST_PSEUDO_REGISTER)
2268 	      record = true;
2269 	    else if (x == frame_pointer_rtx
2270 		     || x == hard_frame_pointer_rtx
2271 		     || x == arg_pointer_rtx
2272 		     || x == stack_pointer_rtx
2273 		     || x == pic_offset_table_rtx)
2274 	      record = true;
2275 	    else if (global_regs[regno])
2276 	      record = false;
2277 	    else if (fixed_regs[regno])
2278 	      record = true;
2279 	    else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2280 	      record = true;
2281 	    else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
2282 	      record = false;
2283 	    else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno)))
2284 	      record = false;
2285 	    else
2286 	      record = true;
2287 
2288 	    if (!record)
2289 	      {
2290 		*do_not_record_p = 1;
2291 		return 0;
2292 	      }
2293 	  }
2294 
2295 	hash += ((unsigned int) REG << 7);
2296         hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2297 	return hash;
2298       }
2299 
2300     /* We handle SUBREG of a REG specially because the underlying
2301        reg changes its hash value with every value change; we don't
2302        want to have to forget unrelated subregs when one subreg changes.  */
2303     case SUBREG:
2304       {
2305 	if (REG_P (SUBREG_REG (x)))
2306 	  {
2307 	    hash += (((unsigned int) SUBREG << 7)
2308 		     + REGNO (SUBREG_REG (x))
2309 		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2310 	    return hash;
2311 	  }
2312 	break;
2313       }
2314 
2315     case CONST_INT:
2316       hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2317                + (unsigned int) INTVAL (x));
2318       return hash;
2319 
2320     case CONST_WIDE_INT:
2321       for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
2322 	hash += CONST_WIDE_INT_ELT (x, i);
2323       return hash;
2324 
2325     case CONST_DOUBLE:
2326       /* This is like the general case, except that it only counts
2327 	 the integers representing the constant.  */
2328       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2329       if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
2330 	hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2331 		 + (unsigned int) CONST_DOUBLE_HIGH (x));
2332       else
2333 	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2334       return hash;
2335 
2336     case CONST_FIXED:
2337       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2338       hash += fixed_hash (CONST_FIXED_VALUE (x));
2339       return hash;
2340 
2341     case CONST_VECTOR:
2342       {
2343 	int units;
2344 	rtx elt;
2345 
2346 	units = CONST_VECTOR_NUNITS (x);
2347 
2348 	for (i = 0; i < units; ++i)
2349 	  {
2350 	    elt = CONST_VECTOR_ELT (x, i);
2351 	    hash += hash_rtx_cb (elt, GET_MODE (elt),
2352                                  do_not_record_p, hash_arg_in_memory_p,
2353                                  have_reg_qty, cb);
2354 	  }
2355 
2356 	return hash;
2357       }
2358 
2359       /* Assume there is only one rtx object for any given label.  */
2360     case LABEL_REF:
2361       /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2362 	 differences and differences between each stage's debugging dumps.  */
2363 	 hash += (((unsigned int) LABEL_REF << 7)
2364 		  + CODE_LABEL_NUMBER (label_ref_label (x)));
2365       return hash;
2366 
2367     case SYMBOL_REF:
2368       {
2369 	/* Don't hash on the symbol's address to avoid bootstrap differences.
2370 	   Different hash values may cause expressions to be recorded in
2371 	   different orders and thus different registers to be used in the
2372 	   final assembler.  This also avoids differences in the dump files
2373 	   between various stages.  */
2374 	unsigned int h = 0;
2375 	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2376 
2377 	while (*p)
2378 	  h += (h << 7) + *p++; /* ??? revisit */
2379 
2380 	hash += ((unsigned int) SYMBOL_REF << 7) + h;
2381 	return hash;
2382       }
2383 
2384     case MEM:
2385       /* We don't record if marked volatile or if BLKmode since we don't
2386 	 know the size of the move.  */
2387       if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2388 	{
2389 	  *do_not_record_p = 1;
2390 	  return 0;
2391 	}
2392       if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2393 	*hash_arg_in_memory_p = 1;
2394 
2395       /* Now that we have already found this special case,
2396 	 might as well speed it up as much as possible.  */
2397       hash += (unsigned) MEM;
2398       x = XEXP (x, 0);
2399       goto repeat;
2400 
2401     case USE:
2402       /* A USE that mentions non-volatile memory needs special
2403 	 handling since the MEM may be BLKmode which normally
2404 	 prevents an entry from being made.  Pure calls are
2405 	 marked by a USE which mentions BLKmode memory.
2406 	 See calls.c:emit_call_1.  */
2407       if (MEM_P (XEXP (x, 0))
2408 	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2409 	{
2410 	  hash += (unsigned) USE;
2411 	  x = XEXP (x, 0);
2412 
2413 	  if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2414 	    *hash_arg_in_memory_p = 1;
2415 
2416 	  /* Now that we have already found this special case,
2417 	     might as well speed it up as much as possible.  */
2418 	  hash += (unsigned) MEM;
2419 	  x = XEXP (x, 0);
2420 	  goto repeat;
2421 	}
2422       break;
2423 
2424     case PRE_DEC:
2425     case PRE_INC:
2426     case POST_DEC:
2427     case POST_INC:
2428     case PRE_MODIFY:
2429     case POST_MODIFY:
2430     case PC:
2431     case CC0:
2432     case CALL:
2433     case UNSPEC_VOLATILE:
2434       if (do_not_record_p) {
2435         *do_not_record_p = 1;
2436         return 0;
2437       }
2438       else
2439         return hash;
2440       break;
2441 
2442     case ASM_OPERANDS:
2443       if (do_not_record_p && MEM_VOLATILE_P (x))
2444 	{
2445 	  *do_not_record_p = 1;
2446 	  return 0;
2447 	}
2448       else
2449 	{
2450 	  /* We don't want to take the filename and line into account.  */
2451 	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2452 	    + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2453 	    + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2454 	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2455 
2456 	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2457 	    {
2458 	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2459 		{
2460 		  hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2461                                         GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2462                                         do_not_record_p, hash_arg_in_memory_p,
2463                                         have_reg_qty, cb)
2464 			   + hash_rtx_string
2465                            (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2466 		}
2467 
2468 	      hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2469 	      x = ASM_OPERANDS_INPUT (x, 0);
2470 	      mode = GET_MODE (x);
2471 	      goto repeat;
2472 	    }
2473 
2474 	  return hash;
2475 	}
2476       break;
2477 
2478     default:
2479       break;
2480     }
2481 
2482   i = GET_RTX_LENGTH (code) - 1;
2483   hash += (unsigned) code + (unsigned) GET_MODE (x);
2484   fmt = GET_RTX_FORMAT (code);
2485   for (; i >= 0; i--)
2486     {
2487       switch (fmt[i])
2488 	{
2489 	case 'e':
2490 	  /* If we are about to do the last recursive call
2491 	     needed at this level, change it into iteration.
2492 	     This function  is called enough to be worth it.  */
2493 	  if (i == 0)
2494 	    {
2495 	      x = XEXP (x, i);
2496 	      goto repeat;
2497 	    }
2498 
2499 	  hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
2500                                hash_arg_in_memory_p,
2501                                have_reg_qty, cb);
2502 	  break;
2503 
2504 	case 'E':
2505 	  for (j = 0; j < XVECLEN (x, i); j++)
2506 	    hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
2507                                  hash_arg_in_memory_p,
2508                                  have_reg_qty, cb);
2509 	  break;
2510 
2511 	case 's':
2512 	  hash += hash_rtx_string (XSTR (x, i));
2513 	  break;
2514 
2515 	case 'i':
2516 	  hash += (unsigned int) XINT (x, i);
2517 	  break;
2518 
2519 	case '0': case 't':
2520 	  /* Unused.  */
2521 	  break;
2522 
2523 	default:
2524 	  gcc_unreachable ();
2525 	}
2526     }
2527 
2528   return hash;
2529 }
2530 
2531 /* Hash an rtx.  We are careful to make sure the value is never negative.
2532    Equivalent registers hash identically.
2533    MODE is used in hashing for CONST_INTs only;
2534    otherwise the mode of X is used.
2535 
2536    Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2537 
2538    If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2539    a MEM rtx which does not have the MEM_READONLY_P flag set.
2540 
2541    Note that cse_insn knows that the hash code of a MEM expression
2542    is just (int) MEM plus the hash code of the address.  */
2543 
2544 unsigned
2545 hash_rtx (const_rtx x, machine_mode mode, int *do_not_record_p,
2546 	  int *hash_arg_in_memory_p, bool have_reg_qty)
2547 {
2548   return hash_rtx_cb (x, mode, do_not_record_p,
2549                       hash_arg_in_memory_p, have_reg_qty, NULL);
2550 }
2551 
2552 /* Hash an rtx X for cse via hash_rtx.
2553    Stores 1 in do_not_record if any subexpression is volatile.
2554    Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2555    does not have the MEM_READONLY_P flag set.  */
2556 
2557 static inline unsigned
2558 canon_hash (rtx x, machine_mode mode)
2559 {
2560   return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2561 }
2562 
2563 /* Like canon_hash but with no side effects, i.e. do_not_record
2564    and hash_arg_in_memory are not changed.  */
2565 
2566 static inline unsigned
2567 safe_hash (rtx x, machine_mode mode)
2568 {
2569   int dummy_do_not_record;
2570   return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2571 }
2572 
2573 /* Return 1 iff X and Y would canonicalize into the same thing,
2574    without actually constructing the canonicalization of either one.
2575    If VALIDATE is nonzero,
2576    we assume X is an expression being processed from the rtl
2577    and Y was found in the hash table.  We check register refs
2578    in Y for being marked as valid.
2579 
2580    If FOR_GCSE is true, we compare X and Y for equivalence for GCSE.  */
2581 
2582 int
2583 exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2584 {
2585   int i, j;
2586   enum rtx_code code;
2587   const char *fmt;
2588 
2589   /* Note: it is incorrect to assume an expression is equivalent to itself
2590      if VALIDATE is nonzero.  */
2591   if (x == y && !validate)
2592     return 1;
2593 
2594   if (x == 0 || y == 0)
2595     return x == y;
2596 
2597   code = GET_CODE (x);
2598   if (code != GET_CODE (y))
2599     return 0;
2600 
2601   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2602   if (GET_MODE (x) != GET_MODE (y))
2603     return 0;
2604 
2605   /* MEMs referring to different address space are not equivalent.  */
2606   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2607     return 0;
2608 
2609   switch (code)
2610     {
2611     case PC:
2612     case CC0:
2613     CASE_CONST_UNIQUE:
2614       return x == y;
2615 
2616     case LABEL_REF:
2617       return label_ref_label (x) == label_ref_label (y);
2618 
2619     case SYMBOL_REF:
2620       return XSTR (x, 0) == XSTR (y, 0);
2621 
2622     case REG:
2623       if (for_gcse)
2624 	return REGNO (x) == REGNO (y);
2625       else
2626 	{
2627 	  unsigned int regno = REGNO (y);
2628 	  unsigned int i;
2629 	  unsigned int endregno = END_REGNO (y);
2630 
2631 	  /* If the quantities are not the same, the expressions are not
2632 	     equivalent.  If there are and we are not to validate, they
2633 	     are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2634 
2635 	  if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2636 	    return 0;
2637 
2638 	  if (! validate)
2639 	    return 1;
2640 
2641 	  for (i = regno; i < endregno; i++)
2642 	    if (REG_IN_TABLE (i) != REG_TICK (i))
2643 	      return 0;
2644 
2645 	  return 1;
2646 	}
2647 
2648     case MEM:
2649       if (for_gcse)
2650 	{
2651 	  /* A volatile mem should not be considered equivalent to any
2652 	     other.  */
2653 	  if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2654 	    return 0;
2655 
2656 	  /* Can't merge two expressions in different alias sets, since we
2657 	     can decide that the expression is transparent in a block when
2658 	     it isn't, due to it being set with the different alias set.
2659 
2660 	     Also, can't merge two expressions with different MEM_ATTRS.
2661 	     They could e.g. be two different entities allocated into the
2662 	     same space on the stack (see e.g. PR25130).  In that case, the
2663 	     MEM addresses can be the same, even though the two MEMs are
2664 	     absolutely not equivalent.
2665 
2666 	     But because really all MEM attributes should be the same for
2667 	     equivalent MEMs, we just use the invariant that MEMs that have
2668 	     the same attributes share the same mem_attrs data structure.  */
2669 	  if (!mem_attrs_eq_p (MEM_ATTRS (x), MEM_ATTRS (y)))
2670 	    return 0;
2671 
2672 	  /* If we are handling exceptions, we cannot consider two expressions
2673 	     with different trapping status as equivalent, because simple_mem
2674 	     might accept one and reject the other.  */
2675 	  if (cfun->can_throw_non_call_exceptions
2676 	      && (MEM_NOTRAP_P (x) != MEM_NOTRAP_P (y)))
2677 	    return 0;
2678 	}
2679       break;
2680 
2681     /*  For commutative operations, check both orders.  */
2682     case PLUS:
2683     case MULT:
2684     case AND:
2685     case IOR:
2686     case XOR:
2687     case NE:
2688     case EQ:
2689       return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2690 			     validate, for_gcse)
2691 	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2692 				validate, for_gcse))
2693 	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2694 				validate, for_gcse)
2695 		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2696 				   validate, for_gcse)));
2697 
2698     case ASM_OPERANDS:
2699       /* We don't use the generic code below because we want to
2700 	 disregard filename and line numbers.  */
2701 
2702       /* A volatile asm isn't equivalent to any other.  */
2703       if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2704 	return 0;
2705 
2706       if (GET_MODE (x) != GET_MODE (y)
2707 	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2708 	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2709 		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2710 	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2711 	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2712 	return 0;
2713 
2714       if (ASM_OPERANDS_INPUT_LENGTH (x))
2715 	{
2716 	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2717 	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2718 			       ASM_OPERANDS_INPUT (y, i),
2719 			       validate, for_gcse)
2720 		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2721 			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2722 	      return 0;
2723 	}
2724 
2725       return 1;
2726 
2727     default:
2728       break;
2729     }
2730 
2731   /* Compare the elements.  If any pair of corresponding elements
2732      fail to match, return 0 for the whole thing.  */
2733 
2734   fmt = GET_RTX_FORMAT (code);
2735   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2736     {
2737       switch (fmt[i])
2738 	{
2739 	case 'e':
2740 	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2741 			      validate, for_gcse))
2742 	    return 0;
2743 	  break;
2744 
2745 	case 'E':
2746 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2747 	    return 0;
2748 	  for (j = 0; j < XVECLEN (x, i); j++)
2749 	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2750 				validate, for_gcse))
2751 	      return 0;
2752 	  break;
2753 
2754 	case 's':
2755 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2756 	    return 0;
2757 	  break;
2758 
2759 	case 'i':
2760 	  if (XINT (x, i) != XINT (y, i))
2761 	    return 0;
2762 	  break;
2763 
2764 	case 'w':
2765 	  if (XWINT (x, i) != XWINT (y, i))
2766 	    return 0;
2767 	  break;
2768 
2769 	case '0':
2770 	case 't':
2771 	  break;
2772 
2773 	default:
2774 	  gcc_unreachable ();
2775 	}
2776     }
2777 
2778   return 1;
2779 }
2780 
2781 /* Subroutine of canon_reg.  Pass *XLOC through canon_reg, and validate
2782    the result if necessary.  INSN is as for canon_reg.  */
2783 
2784 static void
2785 validate_canon_reg (rtx *xloc, rtx_insn *insn)
2786 {
2787   if (*xloc)
2788     {
2789       rtx new_rtx = canon_reg (*xloc, insn);
2790 
2791       /* If replacing pseudo with hard reg or vice versa, ensure the
2792          insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2793       gcc_assert (insn && new_rtx);
2794       validate_change (insn, xloc, new_rtx, 1);
2795     }
2796 }
2797 
2798 /* Canonicalize an expression:
2799    replace each register reference inside it
2800    with the "oldest" equivalent register.
2801 
2802    If INSN is nonzero validate_change is used to ensure that INSN remains valid
2803    after we make our substitution.  The calls are made with IN_GROUP nonzero
2804    so apply_change_group must be called upon the outermost return from this
2805    function (unless INSN is zero).  The result of apply_change_group can
2806    generally be discarded since the changes we are making are optional.  */
2807 
2808 static rtx
2809 canon_reg (rtx x, rtx_insn *insn)
2810 {
2811   int i;
2812   enum rtx_code code;
2813   const char *fmt;
2814 
2815   if (x == 0)
2816     return x;
2817 
2818   code = GET_CODE (x);
2819   switch (code)
2820     {
2821     case PC:
2822     case CC0:
2823     case CONST:
2824     CASE_CONST_ANY:
2825     case SYMBOL_REF:
2826     case LABEL_REF:
2827     case ADDR_VEC:
2828     case ADDR_DIFF_VEC:
2829       return x;
2830 
2831     case REG:
2832       {
2833 	int first;
2834 	int q;
2835 	struct qty_table_elem *ent;
2836 
2837 	/* Never replace a hard reg, because hard regs can appear
2838 	   in more than one machine mode, and we must preserve the mode
2839 	   of each occurrence.  Also, some hard regs appear in
2840 	   MEMs that are shared and mustn't be altered.  Don't try to
2841 	   replace any reg that maps to a reg of class NO_REGS.  */
2842 	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2843 	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2844 	  return x;
2845 
2846 	q = REG_QTY (REGNO (x));
2847 	ent = &qty_table[q];
2848 	first = ent->first_reg;
2849 	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2850 		: REGNO_REG_CLASS (first) == NO_REGS ? x
2851 		: gen_rtx_REG (ent->mode, first));
2852       }
2853 
2854     default:
2855       break;
2856     }
2857 
2858   fmt = GET_RTX_FORMAT (code);
2859   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2860     {
2861       int j;
2862 
2863       if (fmt[i] == 'e')
2864 	validate_canon_reg (&XEXP (x, i), insn);
2865       else if (fmt[i] == 'E')
2866 	for (j = 0; j < XVECLEN (x, i); j++)
2867 	  validate_canon_reg (&XVECEXP (x, i, j), insn);
2868     }
2869 
2870   return x;
2871 }
2872 
2873 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2874    operation (EQ, NE, GT, etc.), follow it back through the hash table and
2875    what values are being compared.
2876 
2877    *PARG1 and *PARG2 are updated to contain the rtx representing the values
2878    actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
2879    was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2880    compared to produce cc0.
2881 
2882    The return value is the comparison operator and is either the code of
2883    A or the code corresponding to the inverse of the comparison.  */
2884 
2885 static enum rtx_code
2886 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2887 		      machine_mode *pmode1, machine_mode *pmode2)
2888 {
2889   rtx arg1, arg2;
2890   hash_set<rtx> *visited = NULL;
2891   /* Set nonzero when we find something of interest.  */
2892   rtx x = NULL;
2893 
2894   arg1 = *parg1, arg2 = *parg2;
2895 
2896   /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
2897 
2898   while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2899     {
2900       int reverse_code = 0;
2901       struct table_elt *p = 0;
2902 
2903       /* Remember state from previous iteration.  */
2904       if (x)
2905 	{
2906 	  if (!visited)
2907 	    visited = new hash_set<rtx>;
2908 	  visited->add (x);
2909 	  x = 0;
2910 	}
2911 
2912       /* If arg1 is a COMPARE, extract the comparison arguments from it.
2913 	 On machines with CC0, this is the only case that can occur, since
2914 	 fold_rtx will return the COMPARE or item being compared with zero
2915 	 when given CC0.  */
2916 
2917       if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2918 	x = arg1;
2919 
2920       /* If ARG1 is a comparison operator and CODE is testing for
2921 	 STORE_FLAG_VALUE, get the inner arguments.  */
2922 
2923       else if (COMPARISON_P (arg1))
2924 	{
2925 #ifdef FLOAT_STORE_FLAG_VALUE
2926 	  REAL_VALUE_TYPE fsfv;
2927 #endif
2928 
2929 	  if (code == NE
2930 	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2931 		  && code == LT && STORE_FLAG_VALUE == -1)
2932 #ifdef FLOAT_STORE_FLAG_VALUE
2933 	      || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2934 		  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2935 		      REAL_VALUE_NEGATIVE (fsfv)))
2936 #endif
2937 	      )
2938 	    x = arg1;
2939 	  else if (code == EQ
2940 		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2941 		       && code == GE && STORE_FLAG_VALUE == -1)
2942 #ifdef FLOAT_STORE_FLAG_VALUE
2943 		   || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2944 		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2945 			   REAL_VALUE_NEGATIVE (fsfv)))
2946 #endif
2947 		   )
2948 	    x = arg1, reverse_code = 1;
2949 	}
2950 
2951       /* ??? We could also check for
2952 
2953 	 (ne (and (eq (...) (const_int 1))) (const_int 0))
2954 
2955 	 and related forms, but let's wait until we see them occurring.  */
2956 
2957       if (x == 0)
2958 	/* Look up ARG1 in the hash table and see if it has an equivalence
2959 	   that lets us see what is being compared.  */
2960 	p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
2961       if (p)
2962 	{
2963 	  p = p->first_same_value;
2964 
2965 	  /* If what we compare is already known to be constant, that is as
2966 	     good as it gets.
2967 	     We need to break the loop in this case, because otherwise we
2968 	     can have an infinite loop when looking at a reg that is known
2969 	     to be a constant which is the same as a comparison of a reg
2970 	     against zero which appears later in the insn stream, which in
2971 	     turn is constant and the same as the comparison of the first reg
2972 	     against zero...  */
2973 	  if (p->is_const)
2974 	    break;
2975 	}
2976 
2977       for (; p; p = p->next_same_value)
2978 	{
2979 	  machine_mode inner_mode = GET_MODE (p->exp);
2980 #ifdef FLOAT_STORE_FLAG_VALUE
2981 	  REAL_VALUE_TYPE fsfv;
2982 #endif
2983 
2984 	  /* If the entry isn't valid, skip it.  */
2985 	  if (! exp_equiv_p (p->exp, p->exp, 1, false))
2986 	    continue;
2987 
2988 	  /* If it's a comparison we've used before, skip it.  */
2989 	  if (visited && visited->contains (p->exp))
2990 	    continue;
2991 
2992 	  if (GET_CODE (p->exp) == COMPARE
2993 	      /* Another possibility is that this machine has a compare insn
2994 		 that includes the comparison code.  In that case, ARG1 would
2995 		 be equivalent to a comparison operation that would set ARG1 to
2996 		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
2997 		 ORIG_CODE is the actual comparison being done; if it is an EQ,
2998 		 we must reverse ORIG_CODE.  On machine with a negative value
2999 		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3000 	      || ((code == NE
3001 		   || (code == LT
3002 		       && val_signbit_known_set_p (inner_mode,
3003 						   STORE_FLAG_VALUE))
3004 #ifdef FLOAT_STORE_FLAG_VALUE
3005 		   || (code == LT
3006 		       && SCALAR_FLOAT_MODE_P (inner_mode)
3007 		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3008 			   REAL_VALUE_NEGATIVE (fsfv)))
3009 #endif
3010 		   )
3011 		  && COMPARISON_P (p->exp)))
3012 	    {
3013 	      x = p->exp;
3014 	      break;
3015 	    }
3016 	  else if ((code == EQ
3017 		    || (code == GE
3018 			&& val_signbit_known_set_p (inner_mode,
3019 						    STORE_FLAG_VALUE))
3020 #ifdef FLOAT_STORE_FLAG_VALUE
3021 		    || (code == GE
3022 			&& SCALAR_FLOAT_MODE_P (inner_mode)
3023 			&& (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3024 			    REAL_VALUE_NEGATIVE (fsfv)))
3025 #endif
3026 		    )
3027 		   && COMPARISON_P (p->exp))
3028 	    {
3029 	      reverse_code = 1;
3030 	      x = p->exp;
3031 	      break;
3032 	    }
3033 
3034 	  /* If this non-trapping address, e.g. fp + constant, the
3035 	     equivalent is a better operand since it may let us predict
3036 	     the value of the comparison.  */
3037 	  else if (!rtx_addr_can_trap_p (p->exp))
3038 	    {
3039 	      arg1 = p->exp;
3040 	      continue;
3041 	    }
3042 	}
3043 
3044       /* If we didn't find a useful equivalence for ARG1, we are done.
3045 	 Otherwise, set up for the next iteration.  */
3046       if (x == 0)
3047 	break;
3048 
3049       /* If we need to reverse the comparison, make sure that is
3050 	 possible -- we can't necessarily infer the value of GE from LT
3051 	 with floating-point operands.  */
3052       if (reverse_code)
3053 	{
3054 	  enum rtx_code reversed = reversed_comparison_code (x, NULL);
3055 	  if (reversed == UNKNOWN)
3056 	    break;
3057 	  else
3058 	    code = reversed;
3059 	}
3060       else if (COMPARISON_P (x))
3061 	code = GET_CODE (x);
3062       arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3063     }
3064 
3065   /* Return our results.  Return the modes from before fold_rtx
3066      because fold_rtx might produce const_int, and then it's too late.  */
3067   *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3068   *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3069 
3070   if (visited)
3071     delete visited;
3072   return code;
3073 }
3074 
3075 /* If X is a nontrivial arithmetic operation on an argument for which
3076    a constant value can be determined, return the result of operating
3077    on that value, as a constant.  Otherwise, return X, possibly with
3078    one or more operands changed to a forward-propagated constant.
3079 
3080    If X is a register whose contents are known, we do NOT return
3081    those contents here; equiv_constant is called to perform that task.
3082    For SUBREGs and MEMs, we do that both here and in equiv_constant.
3083 
3084    INSN is the insn that we may be modifying.  If it is 0, make a copy
3085    of X before modifying it.  */
3086 
3087 static rtx
3088 fold_rtx (rtx x, rtx_insn *insn)
3089 {
3090   enum rtx_code code;
3091   machine_mode mode;
3092   const char *fmt;
3093   int i;
3094   rtx new_rtx = 0;
3095   int changed = 0;
3096 
3097   /* Operands of X.  */
3098   /* Workaround -Wmaybe-uninitialized false positive during
3099      profiledbootstrap by initializing them.  */
3100   rtx folded_arg0 = NULL_RTX;
3101   rtx folded_arg1 = NULL_RTX;
3102 
3103   /* Constant equivalents of first three operands of X;
3104      0 when no such equivalent is known.  */
3105   rtx const_arg0;
3106   rtx const_arg1;
3107   rtx const_arg2;
3108 
3109   /* The mode of the first operand of X.  We need this for sign and zero
3110      extends.  */
3111   machine_mode mode_arg0;
3112 
3113   if (x == 0)
3114     return x;
3115 
3116   /* Try to perform some initial simplifications on X.  */
3117   code = GET_CODE (x);
3118   switch (code)
3119     {
3120     case MEM:
3121     case SUBREG:
3122     /* The first operand of a SIGN/ZERO_EXTRACT has a different meaning
3123        than it would in other contexts.  Basically its mode does not
3124        signify the size of the object read.  That information is carried
3125        by size operand.    If we happen to have a MEM of the appropriate
3126        mode in our tables with a constant value we could simplify the
3127        extraction incorrectly if we allowed substitution of that value
3128        for the MEM.   */
3129     case ZERO_EXTRACT:
3130     case SIGN_EXTRACT:
3131       if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3132         return new_rtx;
3133       return x;
3134 
3135     case CONST:
3136     CASE_CONST_ANY:
3137     case SYMBOL_REF:
3138     case LABEL_REF:
3139     case REG:
3140     case PC:
3141       /* No use simplifying an EXPR_LIST
3142 	 since they are used only for lists of args
3143 	 in a function call's REG_EQUAL note.  */
3144     case EXPR_LIST:
3145       return x;
3146 
3147     case CC0:
3148       return prev_insn_cc0;
3149 
3150     case ASM_OPERANDS:
3151       if (insn)
3152 	{
3153 	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3154 	    validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3155 			     fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3156 	}
3157       return x;
3158 
3159     case CALL:
3160       if (NO_FUNCTION_CSE && CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3161 	return x;
3162       break;
3163 
3164     /* Anything else goes through the loop below.  */
3165     default:
3166       break;
3167     }
3168 
3169   mode = GET_MODE (x);
3170   const_arg0 = 0;
3171   const_arg1 = 0;
3172   const_arg2 = 0;
3173   mode_arg0 = VOIDmode;
3174 
3175   /* Try folding our operands.
3176      Then see which ones have constant values known.  */
3177 
3178   fmt = GET_RTX_FORMAT (code);
3179   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3180     if (fmt[i] == 'e')
3181       {
3182 	rtx folded_arg = XEXP (x, i), const_arg;
3183 	machine_mode mode_arg = GET_MODE (folded_arg);
3184 
3185 	switch (GET_CODE (folded_arg))
3186 	  {
3187 	  case MEM:
3188 	  case REG:
3189 	  case SUBREG:
3190 	    const_arg = equiv_constant (folded_arg);
3191 	    break;
3192 
3193 	  case CONST:
3194 	  CASE_CONST_ANY:
3195 	  case SYMBOL_REF:
3196 	  case LABEL_REF:
3197 	    const_arg = folded_arg;
3198 	    break;
3199 
3200 	  case CC0:
3201 	    /* The cc0-user and cc0-setter may be in different blocks if
3202 	       the cc0-setter potentially traps.  In that case PREV_INSN_CC0
3203 	       will have been cleared as we exited the block with the
3204 	       setter.
3205 
3206 	       While we could potentially track cc0 in this case, it just
3207 	       doesn't seem to be worth it given that cc0 targets are not
3208 	       terribly common or important these days and trapping math
3209 	       is rarely used.  The combination of those two conditions
3210 	       necessary to trip this situation is exceedingly rare in the
3211 	       real world.  */
3212 	    if (!prev_insn_cc0)
3213 	      {
3214 		const_arg = NULL_RTX;
3215 	      }
3216 	    else
3217 	      {
3218 		folded_arg = prev_insn_cc0;
3219 		mode_arg = prev_insn_cc0_mode;
3220 		const_arg = equiv_constant (folded_arg);
3221 	      }
3222 	    break;
3223 
3224 	  default:
3225 	    folded_arg = fold_rtx (folded_arg, insn);
3226 	    const_arg = equiv_constant (folded_arg);
3227 	    break;
3228 	  }
3229 
3230 	/* For the first three operands, see if the operand
3231 	   is constant or equivalent to a constant.  */
3232 	switch (i)
3233 	  {
3234 	  case 0:
3235 	    folded_arg0 = folded_arg;
3236 	    const_arg0 = const_arg;
3237 	    mode_arg0 = mode_arg;
3238 	    break;
3239 	  case 1:
3240 	    folded_arg1 = folded_arg;
3241 	    const_arg1 = const_arg;
3242 	    break;
3243 	  case 2:
3244 	    const_arg2 = const_arg;
3245 	    break;
3246 	  }
3247 
3248 	/* Pick the least expensive of the argument and an equivalent constant
3249 	   argument.  */
3250 	if (const_arg != 0
3251 	    && const_arg != folded_arg
3252 	    && (COST_IN (const_arg, mode_arg, code, i)
3253 		<= COST_IN (folded_arg, mode_arg, code, i))
3254 
3255 	    /* It's not safe to substitute the operand of a conversion
3256 	       operator with a constant, as the conversion's identity
3257 	       depends upon the mode of its operand.  This optimization
3258 	       is handled by the call to simplify_unary_operation.  */
3259 	    && (GET_RTX_CLASS (code) != RTX_UNARY
3260 		|| GET_MODE (const_arg) == mode_arg0
3261 		|| (code != ZERO_EXTEND
3262 		    && code != SIGN_EXTEND
3263 		    && code != TRUNCATE
3264 		    && code != FLOAT_TRUNCATE
3265 		    && code != FLOAT_EXTEND
3266 		    && code != FLOAT
3267 		    && code != FIX
3268 		    && code != UNSIGNED_FLOAT
3269 		    && code != UNSIGNED_FIX)))
3270 	  folded_arg = const_arg;
3271 
3272 	if (folded_arg == XEXP (x, i))
3273 	  continue;
3274 
3275 	if (insn == NULL_RTX && !changed)
3276 	  x = copy_rtx (x);
3277 	changed = 1;
3278 	validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3279       }
3280 
3281   if (changed)
3282     {
3283       /* Canonicalize X if necessary, and keep const_argN and folded_argN
3284 	 consistent with the order in X.  */
3285       if (canonicalize_change_group (insn, x))
3286 	{
3287 	  std::swap (const_arg0, const_arg1);
3288 	  std::swap (folded_arg0, folded_arg1);
3289 	}
3290 
3291       apply_change_group ();
3292     }
3293 
3294   /* If X is an arithmetic operation, see if we can simplify it.  */
3295 
3296   switch (GET_RTX_CLASS (code))
3297     {
3298     case RTX_UNARY:
3299       {
3300 	/* We can't simplify extension ops unless we know the
3301 	   original mode.  */
3302 	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3303 	    && mode_arg0 == VOIDmode)
3304 	  break;
3305 
3306 	new_rtx = simplify_unary_operation (code, mode,
3307 					    const_arg0 ? const_arg0 : folded_arg0,
3308 					    mode_arg0);
3309       }
3310       break;
3311 
3312     case RTX_COMPARE:
3313     case RTX_COMM_COMPARE:
3314       /* See what items are actually being compared and set FOLDED_ARG[01]
3315 	 to those values and CODE to the actual comparison code.  If any are
3316 	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3317 	 do anything if both operands are already known to be constant.  */
3318 
3319       /* ??? Vector mode comparisons are not supported yet.  */
3320       if (VECTOR_MODE_P (mode))
3321 	break;
3322 
3323       if (const_arg0 == 0 || const_arg1 == 0)
3324 	{
3325 	  struct table_elt *p0, *p1;
3326 	  rtx true_rtx, false_rtx;
3327 	  machine_mode mode_arg1;
3328 
3329 	  if (SCALAR_FLOAT_MODE_P (mode))
3330 	    {
3331 #ifdef FLOAT_STORE_FLAG_VALUE
3332 	      true_rtx = (const_double_from_real_value
3333 			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
3334 #else
3335 	      true_rtx = NULL_RTX;
3336 #endif
3337 	      false_rtx = CONST0_RTX (mode);
3338 	    }
3339 	  else
3340 	    {
3341 	      true_rtx = const_true_rtx;
3342 	      false_rtx = const0_rtx;
3343 	    }
3344 
3345 	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3346 				       &mode_arg0, &mode_arg1);
3347 
3348 	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3349 	     what kinds of things are being compared, so we can't do
3350 	     anything with this comparison.  */
3351 
3352 	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3353 	    break;
3354 
3355 	  const_arg0 = equiv_constant (folded_arg0);
3356 	  const_arg1 = equiv_constant (folded_arg1);
3357 
3358 	  /* If we do not now have two constants being compared, see
3359 	     if we can nevertheless deduce some things about the
3360 	     comparison.  */
3361 	  if (const_arg0 == 0 || const_arg1 == 0)
3362 	    {
3363 	      if (const_arg1 != NULL)
3364 		{
3365 		  rtx cheapest_simplification;
3366 		  int cheapest_cost;
3367 		  rtx simp_result;
3368 		  struct table_elt *p;
3369 
3370 		  /* See if we can find an equivalent of folded_arg0
3371 		     that gets us a cheaper expression, possibly a
3372 		     constant through simplifications.  */
3373 		  p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3374 			      mode_arg0);
3375 
3376 		  if (p != NULL)
3377 		    {
3378 		      cheapest_simplification = x;
3379 		      cheapest_cost = COST (x, mode);
3380 
3381 		      for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3382 			{
3383 			  int cost;
3384 
3385 			  /* If the entry isn't valid, skip it.  */
3386 			  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3387 			    continue;
3388 
3389 			  /* Try to simplify using this equivalence.  */
3390 			  simp_result
3391 			    = simplify_relational_operation (code, mode,
3392 							     mode_arg0,
3393 							     p->exp,
3394 							     const_arg1);
3395 
3396 			  if (simp_result == NULL)
3397 			    continue;
3398 
3399 			  cost = COST (simp_result, mode);
3400 			  if (cost < cheapest_cost)
3401 			    {
3402 			      cheapest_cost = cost;
3403 			      cheapest_simplification = simp_result;
3404 			    }
3405 			}
3406 
3407 		      /* If we have a cheaper expression now, use that
3408 			 and try folding it further, from the top.  */
3409 		      if (cheapest_simplification != x)
3410 			return fold_rtx (copy_rtx (cheapest_simplification),
3411 					 insn);
3412 		    }
3413 		}
3414 
3415 	      /* See if the two operands are the same.  */
3416 
3417 	      if ((REG_P (folded_arg0)
3418 		   && REG_P (folded_arg1)
3419 		   && (REG_QTY (REGNO (folded_arg0))
3420 		       == REG_QTY (REGNO (folded_arg1))))
3421 		  || ((p0 = lookup (folded_arg0,
3422 				    SAFE_HASH (folded_arg0, mode_arg0),
3423 				    mode_arg0))
3424 		      && (p1 = lookup (folded_arg1,
3425 				       SAFE_HASH (folded_arg1, mode_arg0),
3426 				       mode_arg0))
3427 		      && p0->first_same_value == p1->first_same_value))
3428 		folded_arg1 = folded_arg0;
3429 
3430 	      /* If FOLDED_ARG0 is a register, see if the comparison we are
3431 		 doing now is either the same as we did before or the reverse
3432 		 (we only check the reverse if not floating-point).  */
3433 	      else if (REG_P (folded_arg0))
3434 		{
3435 		  int qty = REG_QTY (REGNO (folded_arg0));
3436 
3437 		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3438 		    {
3439 		      struct qty_table_elem *ent = &qty_table[qty];
3440 
3441 		      if ((comparison_dominates_p (ent->comparison_code, code)
3442 			   || (! FLOAT_MODE_P (mode_arg0)
3443 			       && comparison_dominates_p (ent->comparison_code,
3444 						          reverse_condition (code))))
3445 			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
3446 			      || (const_arg1
3447 				  && rtx_equal_p (ent->comparison_const,
3448 						  const_arg1))
3449 			      || (REG_P (folded_arg1)
3450 				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3451 			{
3452 			  if (comparison_dominates_p (ent->comparison_code, code))
3453 			    {
3454 			      if (true_rtx)
3455 				return true_rtx;
3456 			      else
3457 				break;
3458 			    }
3459 			  else
3460 			    return false_rtx;
3461 			}
3462 		    }
3463 		}
3464 	    }
3465 	}
3466 
3467       /* If we are comparing against zero, see if the first operand is
3468 	 equivalent to an IOR with a constant.  If so, we may be able to
3469 	 determine the result of this comparison.  */
3470       if (const_arg1 == const0_rtx && !const_arg0)
3471 	{
3472 	  rtx y = lookup_as_function (folded_arg0, IOR);
3473 	  rtx inner_const;
3474 
3475 	  if (y != 0
3476 	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3477 	      && CONST_INT_P (inner_const)
3478 	      && INTVAL (inner_const) != 0)
3479 	    folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3480 	}
3481 
3482       {
3483 	rtx op0 = const_arg0 ? const_arg0 : copy_rtx (folded_arg0);
3484 	rtx op1 = const_arg1 ? const_arg1 : copy_rtx (folded_arg1);
3485 	new_rtx = simplify_relational_operation (code, mode, mode_arg0,
3486 						 op0, op1);
3487       }
3488       break;
3489 
3490     case RTX_BIN_ARITH:
3491     case RTX_COMM_ARITH:
3492       switch (code)
3493 	{
3494 	case PLUS:
3495 	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
3496 	     with that LABEL_REF as its second operand.  If so, the result is
3497 	     the first operand of that MINUS.  This handles switches with an
3498 	     ADDR_DIFF_VEC table.  */
3499 	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3500 	    {
3501 	      rtx y
3502 		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
3503 		: lookup_as_function (folded_arg0, MINUS);
3504 
3505 	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3506 		  && label_ref_label (XEXP (y, 1)) == label_ref_label (const_arg1))
3507 		return XEXP (y, 0);
3508 
3509 	      /* Now try for a CONST of a MINUS like the above.  */
3510 	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3511 			: lookup_as_function (folded_arg0, CONST))) != 0
3512 		  && GET_CODE (XEXP (y, 0)) == MINUS
3513 		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3514 		  && label_ref_label (XEXP (XEXP (y, 0), 1)) == label_ref_label (const_arg1))
3515 		return XEXP (XEXP (y, 0), 0);
3516 	    }
3517 
3518 	  /* Likewise if the operands are in the other order.  */
3519 	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3520 	    {
3521 	      rtx y
3522 		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
3523 		: lookup_as_function (folded_arg1, MINUS);
3524 
3525 	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3526 		  && label_ref_label (XEXP (y, 1)) == label_ref_label (const_arg0))
3527 		return XEXP (y, 0);
3528 
3529 	      /* Now try for a CONST of a MINUS like the above.  */
3530 	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3531 			: lookup_as_function (folded_arg1, CONST))) != 0
3532 		  && GET_CODE (XEXP (y, 0)) == MINUS
3533 		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3534 		  && label_ref_label (XEXP (XEXP (y, 0), 1)) == label_ref_label (const_arg0))
3535 		return XEXP (XEXP (y, 0), 0);
3536 	    }
3537 
3538 	  /* If second operand is a register equivalent to a negative
3539 	     CONST_INT, see if we can find a register equivalent to the
3540 	     positive constant.  Make a MINUS if so.  Don't do this for
3541 	     a non-negative constant since we might then alternate between
3542 	     choosing positive and negative constants.  Having the positive
3543 	     constant previously-used is the more common case.  Be sure
3544 	     the resulting constant is non-negative; if const_arg1 were
3545 	     the smallest negative number this would overflow: depending
3546 	     on the mode, this would either just be the same value (and
3547 	     hence not save anything) or be incorrect.  */
3548 	  if (const_arg1 != 0 && CONST_INT_P (const_arg1)
3549 	      && INTVAL (const_arg1) < 0
3550 	      /* This used to test
3551 
3552 	         -INTVAL (const_arg1) >= 0
3553 
3554 		 But The Sun V5.0 compilers mis-compiled that test.  So
3555 		 instead we test for the problematic value in a more direct
3556 		 manner and hope the Sun compilers get it correct.  */
3557 	      && INTVAL (const_arg1) !=
3558 	        (HOST_WIDE_INT_1 << (HOST_BITS_PER_WIDE_INT - 1))
3559 	      && REG_P (folded_arg1))
3560 	    {
3561 	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
3562 	      struct table_elt *p
3563 		= lookup (new_const, SAFE_HASH (new_const, mode), mode);
3564 
3565 	      if (p)
3566 		for (p = p->first_same_value; p; p = p->next_same_value)
3567 		  if (REG_P (p->exp))
3568 		    return simplify_gen_binary (MINUS, mode, folded_arg0,
3569 						canon_reg (p->exp, NULL));
3570 	    }
3571 	  goto from_plus;
3572 
3573 	case MINUS:
3574 	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3575 	     If so, produce (PLUS Z C2-C).  */
3576 	  if (const_arg1 != 0 && CONST_INT_P (const_arg1))
3577 	    {
3578 	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3579 	      if (y && CONST_INT_P (XEXP (y, 1)))
3580 		return fold_rtx (plus_constant (mode, copy_rtx (y),
3581 						-INTVAL (const_arg1)),
3582 				 NULL);
3583 	    }
3584 
3585 	  /* Fall through.  */
3586 
3587 	from_plus:
3588 	case SMIN:    case SMAX:      case UMIN:    case UMAX:
3589 	case IOR:     case AND:       case XOR:
3590 	case MULT:
3591 	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
3592 	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3593 	     is known to be of similar form, we may be able to replace the
3594 	     operation with a combined operation.  This may eliminate the
3595 	     intermediate operation if every use is simplified in this way.
3596 	     Note that the similar optimization done by combine.c only works
3597 	     if the intermediate operation's result has only one reference.  */
3598 
3599 	  if (REG_P (folded_arg0)
3600 	      && const_arg1 && CONST_INT_P (const_arg1))
3601 	    {
3602 	      int is_shift
3603 		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3604 	      rtx y, inner_const, new_const;
3605 	      rtx canon_const_arg1 = const_arg1;
3606 	      enum rtx_code associate_code;
3607 
3608 	      if (is_shift
3609 		  && (INTVAL (const_arg1) >= GET_MODE_PRECISION (mode)
3610 		      || INTVAL (const_arg1) < 0))
3611 		{
3612 		  if (SHIFT_COUNT_TRUNCATED)
3613 		    canon_const_arg1 = GEN_INT (INTVAL (const_arg1)
3614 						& (GET_MODE_BITSIZE (mode)
3615 						   - 1));
3616 		  else
3617 		    break;
3618 		}
3619 
3620 	      y = lookup_as_function (folded_arg0, code);
3621 	      if (y == 0)
3622 		break;
3623 
3624 	      /* If we have compiled a statement like
3625 		 "if (x == (x & mask1))", and now are looking at
3626 		 "x & mask2", we will have a case where the first operand
3627 		 of Y is the same as our first operand.  Unless we detect
3628 		 this case, an infinite loop will result.  */
3629 	      if (XEXP (y, 0) == folded_arg0)
3630 		break;
3631 
3632 	      inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3633 	      if (!inner_const || !CONST_INT_P (inner_const))
3634 		break;
3635 
3636 	      /* Don't associate these operations if they are a PLUS with the
3637 		 same constant and it is a power of two.  These might be doable
3638 		 with a pre- or post-increment.  Similarly for two subtracts of
3639 		 identical powers of two with post decrement.  */
3640 
3641 	      if (code == PLUS && const_arg1 == inner_const
3642 		  && ((HAVE_PRE_INCREMENT
3643 			  && pow2p_hwi (INTVAL (const_arg1)))
3644 		      || (HAVE_POST_INCREMENT
3645 			  && pow2p_hwi (INTVAL (const_arg1)))
3646 		      || (HAVE_PRE_DECREMENT
3647 			  && pow2p_hwi (- INTVAL (const_arg1)))
3648 		      || (HAVE_POST_DECREMENT
3649 			  && pow2p_hwi (- INTVAL (const_arg1)))))
3650 		break;
3651 
3652 	      /* ??? Vector mode shifts by scalar
3653 		 shift operand are not supported yet.  */
3654 	      if (is_shift && VECTOR_MODE_P (mode))
3655                 break;
3656 
3657 	      if (is_shift
3658 		  && (INTVAL (inner_const) >= GET_MODE_PRECISION (mode)
3659 		      || INTVAL (inner_const) < 0))
3660 		{
3661 		  if (SHIFT_COUNT_TRUNCATED)
3662 		    inner_const = GEN_INT (INTVAL (inner_const)
3663 					   & (GET_MODE_BITSIZE (mode) - 1));
3664 		  else
3665 		    break;
3666 		}
3667 
3668 	      /* Compute the code used to compose the constants.  For example,
3669 		 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
3670 
3671 	      associate_code = (is_shift || code == MINUS ? PLUS : code);
3672 
3673 	      new_const = simplify_binary_operation (associate_code, mode,
3674 						     canon_const_arg1,
3675 						     inner_const);
3676 
3677 	      if (new_const == 0)
3678 		break;
3679 
3680 	      /* If we are associating shift operations, don't let this
3681 		 produce a shift of the size of the object or larger.
3682 		 This could occur when we follow a sign-extend by a right
3683 		 shift on a machine that does a sign-extend as a pair
3684 		 of shifts.  */
3685 
3686 	      if (is_shift
3687 		  && CONST_INT_P (new_const)
3688 		  && INTVAL (new_const) >= GET_MODE_PRECISION (mode))
3689 		{
3690 		  /* As an exception, we can turn an ASHIFTRT of this
3691 		     form into a shift of the number of bits - 1.  */
3692 		  if (code == ASHIFTRT)
3693 		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
3694 		  else if (!side_effects_p (XEXP (y, 0)))
3695 		    return CONST0_RTX (mode);
3696 		  else
3697 		    break;
3698 		}
3699 
3700 	      y = copy_rtx (XEXP (y, 0));
3701 
3702 	      /* If Y contains our first operand (the most common way this
3703 		 can happen is if Y is a MEM), we would do into an infinite
3704 		 loop if we tried to fold it.  So don't in that case.  */
3705 
3706 	      if (! reg_mentioned_p (folded_arg0, y))
3707 		y = fold_rtx (y, insn);
3708 
3709 	      return simplify_gen_binary (code, mode, y, new_const);
3710 	    }
3711 	  break;
3712 
3713 	case DIV:       case UDIV:
3714 	  /* ??? The associative optimization performed immediately above is
3715 	     also possible for DIV and UDIV using associate_code of MULT.
3716 	     However, we would need extra code to verify that the
3717 	     multiplication does not overflow, that is, there is no overflow
3718 	     in the calculation of new_const.  */
3719 	  break;
3720 
3721 	default:
3722 	  break;
3723 	}
3724 
3725       new_rtx = simplify_binary_operation (code, mode,
3726 				       const_arg0 ? const_arg0 : folded_arg0,
3727 				       const_arg1 ? const_arg1 : folded_arg1);
3728       break;
3729 
3730     case RTX_OBJ:
3731       /* (lo_sum (high X) X) is simply X.  */
3732       if (code == LO_SUM && const_arg0 != 0
3733 	  && GET_CODE (const_arg0) == HIGH
3734 	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3735 	return const_arg1;
3736       break;
3737 
3738     case RTX_TERNARY:
3739     case RTX_BITFIELD_OPS:
3740       new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3741 					const_arg0 ? const_arg0 : folded_arg0,
3742 					const_arg1 ? const_arg1 : folded_arg1,
3743 					const_arg2 ? const_arg2 : XEXP (x, 2));
3744       break;
3745 
3746     default:
3747       break;
3748     }
3749 
3750   return new_rtx ? new_rtx : x;
3751 }
3752 
3753 /* Return a constant value currently equivalent to X.
3754    Return 0 if we don't know one.  */
3755 
3756 static rtx
3757 equiv_constant (rtx x)
3758 {
3759   if (REG_P (x)
3760       && REGNO_QTY_VALID_P (REGNO (x)))
3761     {
3762       int x_q = REG_QTY (REGNO (x));
3763       struct qty_table_elem *x_ent = &qty_table[x_q];
3764 
3765       if (x_ent->const_rtx)
3766 	x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3767     }
3768 
3769   if (x == 0 || CONSTANT_P (x))
3770     return x;
3771 
3772   if (GET_CODE (x) == SUBREG)
3773     {
3774       machine_mode mode = GET_MODE (x);
3775       machine_mode imode = GET_MODE (SUBREG_REG (x));
3776       rtx new_rtx;
3777 
3778       /* See if we previously assigned a constant value to this SUBREG.  */
3779       if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3780 	  || (new_rtx = lookup_as_function (x, CONST_WIDE_INT)) != 0
3781           || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3782           || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3783         return new_rtx;
3784 
3785       /* If we didn't and if doing so makes sense, see if we previously
3786 	 assigned a constant value to the enclosing word mode SUBREG.  */
3787       if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode)
3788 	  && GET_MODE_SIZE (word_mode) < GET_MODE_SIZE (imode))
3789 	{
3790 	  int byte = SUBREG_BYTE (x) - subreg_lowpart_offset (mode, word_mode);
3791 	  if (byte >= 0 && (byte % UNITS_PER_WORD) == 0)
3792 	    {
3793 	      rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3794 	      new_rtx = lookup_as_function (y, CONST_INT);
3795 	      if (new_rtx)
3796 		return gen_lowpart (mode, new_rtx);
3797 	    }
3798 	}
3799 
3800       /* Otherwise see if we already have a constant for the inner REG,
3801 	 and if that is enough to calculate an equivalent constant for
3802 	 the subreg.  Note that the upper bits of paradoxical subregs
3803 	 are undefined, so they cannot be said to equal anything.  */
3804       if (REG_P (SUBREG_REG (x))
3805 	  && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (imode)
3806 	  && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3807         return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
3808 
3809       return 0;
3810     }
3811 
3812   /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3813      the hash table in case its value was seen before.  */
3814 
3815   if (MEM_P (x))
3816     {
3817       struct table_elt *elt;
3818 
3819       x = avoid_constant_pool_reference (x);
3820       if (CONSTANT_P (x))
3821 	return x;
3822 
3823       elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3824       if (elt == 0)
3825 	return 0;
3826 
3827       for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3828 	if (elt->is_const && CONSTANT_P (elt->exp))
3829 	  return elt->exp;
3830     }
3831 
3832   return 0;
3833 }
3834 
3835 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3836    "taken" branch.
3837 
3838    In certain cases, this can cause us to add an equivalence.  For example,
3839    if we are following the taken case of
3840 	if (i == 2)
3841    we can add the fact that `i' and '2' are now equivalent.
3842 
3843    In any case, we can record that this comparison was passed.  If the same
3844    comparison is seen later, we will know its value.  */
3845 
3846 static void
3847 record_jump_equiv (rtx_insn *insn, bool taken)
3848 {
3849   int cond_known_true;
3850   rtx op0, op1;
3851   rtx set;
3852   machine_mode mode, mode0, mode1;
3853   int reversed_nonequality = 0;
3854   enum rtx_code code;
3855 
3856   /* Ensure this is the right kind of insn.  */
3857   gcc_assert (any_condjump_p (insn));
3858 
3859   set = pc_set (insn);
3860 
3861   /* See if this jump condition is known true or false.  */
3862   if (taken)
3863     cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3864   else
3865     cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3866 
3867   /* Get the type of comparison being done and the operands being compared.
3868      If we had to reverse a non-equality condition, record that fact so we
3869      know that it isn't valid for floating-point.  */
3870   code = GET_CODE (XEXP (SET_SRC (set), 0));
3871   op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3872   op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3873 
3874   /* On a cc0 target the cc0-setter and cc0-user may end up in different
3875      blocks.  When that happens the tracking of the cc0-setter via
3876      PREV_INSN_CC0 is spoiled.  That means that fold_rtx may return
3877      NULL_RTX.  In those cases, there's nothing to record.  */
3878   if (op0 == NULL_RTX || op1 == NULL_RTX)
3879     return;
3880 
3881   code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3882   if (! cond_known_true)
3883     {
3884       code = reversed_comparison_code_parts (code, op0, op1, insn);
3885 
3886       /* Don't remember if we can't find the inverse.  */
3887       if (code == UNKNOWN)
3888 	return;
3889     }
3890 
3891   /* The mode is the mode of the non-constant.  */
3892   mode = mode0;
3893   if (mode1 != VOIDmode)
3894     mode = mode1;
3895 
3896   record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3897 }
3898 
3899 /* Yet another form of subreg creation.  In this case, we want something in
3900    MODE, and we should assume OP has MODE iff it is naturally modeless.  */
3901 
3902 static rtx
3903 record_jump_cond_subreg (machine_mode mode, rtx op)
3904 {
3905   machine_mode op_mode = GET_MODE (op);
3906   if (op_mode == mode || op_mode == VOIDmode)
3907     return op;
3908   return lowpart_subreg (mode, op, op_mode);
3909 }
3910 
3911 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3912    REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3913    Make any useful entries we can with that information.  Called from
3914    above function and called recursively.  */
3915 
3916 static void
3917 record_jump_cond (enum rtx_code code, machine_mode mode, rtx op0,
3918 		  rtx op1, int reversed_nonequality)
3919 {
3920   unsigned op0_hash, op1_hash;
3921   int op0_in_memory, op1_in_memory;
3922   struct table_elt *op0_elt, *op1_elt;
3923 
3924   /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3925      we know that they are also equal in the smaller mode (this is also
3926      true for all smaller modes whether or not there is a SUBREG, but
3927      is not worth testing for with no SUBREG).  */
3928 
3929   /* Note that GET_MODE (op0) may not equal MODE.  */
3930   if (code == EQ && paradoxical_subreg_p (op0))
3931     {
3932       machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3933       rtx tem = record_jump_cond_subreg (inner_mode, op1);
3934       if (tem)
3935 	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3936 			  reversed_nonequality);
3937     }
3938 
3939   if (code == EQ && paradoxical_subreg_p (op1))
3940     {
3941       machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3942       rtx tem = record_jump_cond_subreg (inner_mode, op0);
3943       if (tem)
3944 	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3945 			  reversed_nonequality);
3946     }
3947 
3948   /* Similarly, if this is an NE comparison, and either is a SUBREG
3949      making a smaller mode, we know the whole thing is also NE.  */
3950 
3951   /* Note that GET_MODE (op0) may not equal MODE;
3952      if we test MODE instead, we can get an infinite recursion
3953      alternating between two modes each wider than MODE.  */
3954 
3955   if (code == NE && GET_CODE (op0) == SUBREG
3956       && subreg_lowpart_p (op0)
3957       && (GET_MODE_SIZE (GET_MODE (op0))
3958 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
3959     {
3960       machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3961       rtx tem = record_jump_cond_subreg (inner_mode, op1);
3962       if (tem)
3963 	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3964 			  reversed_nonequality);
3965     }
3966 
3967   if (code == NE && GET_CODE (op1) == SUBREG
3968       && subreg_lowpart_p (op1)
3969       && (GET_MODE_SIZE (GET_MODE (op1))
3970 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
3971     {
3972       machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3973       rtx tem = record_jump_cond_subreg (inner_mode, op0);
3974       if (tem)
3975 	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3976 			  reversed_nonequality);
3977     }
3978 
3979   /* Hash both operands.  */
3980 
3981   do_not_record = 0;
3982   hash_arg_in_memory = 0;
3983   op0_hash = HASH (op0, mode);
3984   op0_in_memory = hash_arg_in_memory;
3985 
3986   if (do_not_record)
3987     return;
3988 
3989   do_not_record = 0;
3990   hash_arg_in_memory = 0;
3991   op1_hash = HASH (op1, mode);
3992   op1_in_memory = hash_arg_in_memory;
3993 
3994   if (do_not_record)
3995     return;
3996 
3997   /* Look up both operands.  */
3998   op0_elt = lookup (op0, op0_hash, mode);
3999   op1_elt = lookup (op1, op1_hash, mode);
4000 
4001   /* If both operands are already equivalent or if they are not in the
4002      table but are identical, do nothing.  */
4003   if ((op0_elt != 0 && op1_elt != 0
4004        && op0_elt->first_same_value == op1_elt->first_same_value)
4005       || op0 == op1 || rtx_equal_p (op0, op1))
4006     return;
4007 
4008   /* If we aren't setting two things equal all we can do is save this
4009      comparison.   Similarly if this is floating-point.  In the latter
4010      case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4011      If we record the equality, we might inadvertently delete code
4012      whose intent was to change -0 to +0.  */
4013 
4014   if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4015     {
4016       struct qty_table_elem *ent;
4017       int qty;
4018 
4019       /* If we reversed a floating-point comparison, if OP0 is not a
4020 	 register, or if OP1 is neither a register or constant, we can't
4021 	 do anything.  */
4022 
4023       if (!REG_P (op1))
4024 	op1 = equiv_constant (op1);
4025 
4026       if ((reversed_nonequality && FLOAT_MODE_P (mode))
4027 	  || !REG_P (op0) || op1 == 0)
4028 	return;
4029 
4030       /* Put OP0 in the hash table if it isn't already.  This gives it a
4031 	 new quantity number.  */
4032       if (op0_elt == 0)
4033 	{
4034 	  if (insert_regs (op0, NULL, 0))
4035 	    {
4036 	      rehash_using_reg (op0);
4037 	      op0_hash = HASH (op0, mode);
4038 
4039 	      /* If OP0 is contained in OP1, this changes its hash code
4040 		 as well.  Faster to rehash than to check, except
4041 		 for the simple case of a constant.  */
4042 	      if (! CONSTANT_P (op1))
4043 		op1_hash = HASH (op1,mode);
4044 	    }
4045 
4046 	  op0_elt = insert (op0, NULL, op0_hash, mode);
4047 	  op0_elt->in_memory = op0_in_memory;
4048 	}
4049 
4050       qty = REG_QTY (REGNO (op0));
4051       ent = &qty_table[qty];
4052 
4053       ent->comparison_code = code;
4054       if (REG_P (op1))
4055 	{
4056 	  /* Look it up again--in case op0 and op1 are the same.  */
4057 	  op1_elt = lookup (op1, op1_hash, mode);
4058 
4059 	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4060 	  if (op1_elt == 0)
4061 	    {
4062 	      if (insert_regs (op1, NULL, 0))
4063 		{
4064 		  rehash_using_reg (op1);
4065 		  op1_hash = HASH (op1, mode);
4066 		}
4067 
4068 	      op1_elt = insert (op1, NULL, op1_hash, mode);
4069 	      op1_elt->in_memory = op1_in_memory;
4070 	    }
4071 
4072 	  ent->comparison_const = NULL_RTX;
4073 	  ent->comparison_qty = REG_QTY (REGNO (op1));
4074 	}
4075       else
4076 	{
4077 	  ent->comparison_const = op1;
4078 	  ent->comparison_qty = -1;
4079 	}
4080 
4081       return;
4082     }
4083 
4084   /* If either side is still missing an equivalence, make it now,
4085      then merge the equivalences.  */
4086 
4087   if (op0_elt == 0)
4088     {
4089       if (insert_regs (op0, NULL, 0))
4090 	{
4091 	  rehash_using_reg (op0);
4092 	  op0_hash = HASH (op0, mode);
4093 	}
4094 
4095       op0_elt = insert (op0, NULL, op0_hash, mode);
4096       op0_elt->in_memory = op0_in_memory;
4097     }
4098 
4099   if (op1_elt == 0)
4100     {
4101       if (insert_regs (op1, NULL, 0))
4102 	{
4103 	  rehash_using_reg (op1);
4104 	  op1_hash = HASH (op1, mode);
4105 	}
4106 
4107       op1_elt = insert (op1, NULL, op1_hash, mode);
4108       op1_elt->in_memory = op1_in_memory;
4109     }
4110 
4111   merge_equiv_classes (op0_elt, op1_elt);
4112 }
4113 
4114 /* CSE processing for one instruction.
4115 
4116    Most "true" common subexpressions are mostly optimized away in GIMPLE,
4117    but the few that "leak through" are cleaned up by cse_insn, and complex
4118    addressing modes are often formed here.
4119 
4120    The main function is cse_insn, and between here and that function
4121    a couple of helper functions is defined to keep the size of cse_insn
4122    within reasonable proportions.
4123 
4124    Data is shared between the main and helper functions via STRUCT SET,
4125    that contains all data related for every set in the instruction that
4126    is being processed.
4127 
4128    Note that cse_main processes all sets in the instruction.  Most
4129    passes in GCC only process simple SET insns or single_set insns, but
4130    CSE processes insns with multiple sets as well.  */
4131 
4132 /* Data on one SET contained in the instruction.  */
4133 
4134 struct set
4135 {
4136   /* The SET rtx itself.  */
4137   rtx rtl;
4138   /* The SET_SRC of the rtx (the original value, if it is changing).  */
4139   rtx src;
4140   /* The hash-table element for the SET_SRC of the SET.  */
4141   struct table_elt *src_elt;
4142   /* Hash value for the SET_SRC.  */
4143   unsigned src_hash;
4144   /* Hash value for the SET_DEST.  */
4145   unsigned dest_hash;
4146   /* The SET_DEST, with SUBREG, etc., stripped.  */
4147   rtx inner_dest;
4148   /* Nonzero if the SET_SRC is in memory.  */
4149   char src_in_memory;
4150   /* Nonzero if the SET_SRC contains something
4151      whose value cannot be predicted and understood.  */
4152   char src_volatile;
4153   /* Original machine mode, in case it becomes a CONST_INT.
4154      The size of this field should match the size of the mode
4155      field of struct rtx_def (see rtl.h).  */
4156   ENUM_BITFIELD(machine_mode) mode : 8;
4157   /* Hash value of constant equivalent for SET_SRC.  */
4158   unsigned src_const_hash;
4159   /* A constant equivalent for SET_SRC, if any.  */
4160   rtx src_const;
4161   /* Table entry for constant equivalent for SET_SRC, if any.  */
4162   struct table_elt *src_const_elt;
4163   /* Table entry for the destination address.  */
4164   struct table_elt *dest_addr_elt;
4165 };
4166 
4167 /* Special handling for (set REG0 REG1) where REG0 is the
4168    "cheapest", cheaper than REG1.  After cse, REG1 will probably not
4169    be used in the sequel, so (if easily done) change this insn to
4170    (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4171    that computed their value.  Then REG1 will become a dead store
4172    and won't cloud the situation for later optimizations.
4173 
4174    Do not make this change if REG1 is a hard register, because it will
4175    then be used in the sequel and we may be changing a two-operand insn
4176    into a three-operand insn.
4177 
4178    This is the last transformation that cse_insn will try to do.  */
4179 
4180 static void
4181 try_back_substitute_reg (rtx set, rtx_insn *insn)
4182 {
4183   rtx dest = SET_DEST (set);
4184   rtx src = SET_SRC (set);
4185 
4186   if (REG_P (dest)
4187       && REG_P (src) && ! HARD_REGISTER_P (src)
4188       && REGNO_QTY_VALID_P (REGNO (src)))
4189     {
4190       int src_q = REG_QTY (REGNO (src));
4191       struct qty_table_elem *src_ent = &qty_table[src_q];
4192 
4193       if (src_ent->first_reg == REGNO (dest))
4194 	{
4195 	  /* Scan for the previous nonnote insn, but stop at a basic
4196 	     block boundary.  */
4197 	  rtx_insn *prev = insn;
4198 	  rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
4199 	  do
4200 	    {
4201 	      prev = PREV_INSN (prev);
4202 	    }
4203 	  while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
4204 
4205 	  /* Do not swap the registers around if the previous instruction
4206 	     attaches a REG_EQUIV note to REG1.
4207 
4208 	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
4209 	     from the pseudo that originally shadowed an incoming argument
4210 	     to another register.  Some uses of REG_EQUIV might rely on it
4211 	     being attached to REG1 rather than REG2.
4212 
4213 	     This section previously turned the REG_EQUIV into a REG_EQUAL
4214 	     note.  We cannot do that because REG_EQUIV may provide an
4215 	     uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
4216 	  if (NONJUMP_INSN_P (prev)
4217 	      && GET_CODE (PATTERN (prev)) == SET
4218 	      && SET_DEST (PATTERN (prev)) == src
4219 	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
4220 	    {
4221 	      rtx note;
4222 
4223 	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
4224 	      validate_change (insn, &SET_DEST (set), src, 1);
4225 	      validate_change (insn, &SET_SRC (set), dest, 1);
4226 	      apply_change_group ();
4227 
4228 	      /* If INSN has a REG_EQUAL note, and this note mentions
4229 		 REG0, then we must delete it, because the value in
4230 		 REG0 has changed.  If the note's value is REG1, we must
4231 		 also delete it because that is now this insn's dest.  */
4232 	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4233 	      if (note != 0
4234 		  && (reg_mentioned_p (dest, XEXP (note, 0))
4235 		      || rtx_equal_p (src, XEXP (note, 0))))
4236 		remove_note (insn, note);
4237 	    }
4238 	}
4239     }
4240 }
4241 
4242 /* Record all the SETs in this instruction into SETS_PTR,
4243    and return the number of recorded sets.  */
4244 static int
4245 find_sets_in_insn (rtx_insn *insn, struct set **psets)
4246 {
4247   struct set *sets = *psets;
4248   int n_sets = 0;
4249   rtx x = PATTERN (insn);
4250 
4251   if (GET_CODE (x) == SET)
4252     {
4253       /* Ignore SETs that are unconditional jumps.
4254 	 They never need cse processing, so this does not hurt.
4255 	 The reason is not efficiency but rather
4256 	 so that we can test at the end for instructions
4257 	 that have been simplified to unconditional jumps
4258 	 and not be misled by unchanged instructions
4259 	 that were unconditional jumps to begin with.  */
4260       if (SET_DEST (x) == pc_rtx
4261 	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4262 	;
4263       /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4264 	 The hard function value register is used only once, to copy to
4265 	 someplace else, so it isn't worth cse'ing.  */
4266       else if (GET_CODE (SET_SRC (x)) == CALL)
4267 	;
4268       else
4269 	sets[n_sets++].rtl = x;
4270     }
4271   else if (GET_CODE (x) == PARALLEL)
4272     {
4273       int i, lim = XVECLEN (x, 0);
4274 
4275       /* Go over the expressions of the PARALLEL in forward order, to
4276 	 put them in the same order in the SETS array.  */
4277       for (i = 0; i < lim; i++)
4278 	{
4279 	  rtx y = XVECEXP (x, 0, i);
4280 	  if (GET_CODE (y) == SET)
4281 	    {
4282 	      /* As above, we ignore unconditional jumps and call-insns and
4283 		 ignore the result of apply_change_group.  */
4284 	      if (SET_DEST (y) == pc_rtx
4285 		  && GET_CODE (SET_SRC (y)) == LABEL_REF)
4286 		;
4287 	      else if (GET_CODE (SET_SRC (y)) == CALL)
4288 		;
4289 	      else
4290 		sets[n_sets++].rtl = y;
4291 	    }
4292 	}
4293     }
4294 
4295   return n_sets;
4296 }
4297 
4298 /* Subroutine of canonicalize_insn.  X is an ASM_OPERANDS in INSN.  */
4299 
4300 static void
4301 canon_asm_operands (rtx x, rtx_insn *insn)
4302 {
4303   for (int i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
4304     {
4305       rtx input = ASM_OPERANDS_INPUT (x, i);
4306       if (!(REG_P (input) && HARD_REGISTER_P (input)))
4307 	{
4308 	  input = canon_reg (input, insn);
4309 	  validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1);
4310 	}
4311     }
4312 }
4313 
4314 /* Where possible, substitute every register reference in the N_SETS
4315    number of SETS in INSN with the canonical register.
4316 
4317    Register canonicalization propagatest the earliest register (i.e.
4318    one that is set before INSN) with the same value.  This is a very
4319    useful, simple form of CSE, to clean up warts from expanding GIMPLE
4320    to RTL.  For instance, a CONST for an address is usually expanded
4321    multiple times to loads into different registers, thus creating many
4322    subexpressions of the form:
4323 
4324    (set (reg1) (some_const))
4325    (set (mem (... reg1 ...) (thing)))
4326    (set (reg2) (some_const))
4327    (set (mem (... reg2 ...) (thing)))
4328 
4329    After canonicalizing, the code takes the following form:
4330 
4331    (set (reg1) (some_const))
4332    (set (mem (... reg1 ...) (thing)))
4333    (set (reg2) (some_const))
4334    (set (mem (... reg1 ...) (thing)))
4335 
4336    The set to reg2 is now trivially dead, and the memory reference (or
4337    address, or whatever) may be a candidate for further CSEing.
4338 
4339    In this function, the result of apply_change_group can be ignored;
4340    see canon_reg.  */
4341 
4342 static void
4343 canonicalize_insn (rtx_insn *insn, struct set **psets, int n_sets)
4344 {
4345   struct set *sets = *psets;
4346   rtx tem;
4347   rtx x = PATTERN (insn);
4348   int i;
4349 
4350   if (CALL_P (insn))
4351     {
4352       for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4353 	if (GET_CODE (XEXP (tem, 0)) != SET)
4354 	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4355     }
4356 
4357   if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
4358     {
4359       canon_reg (SET_SRC (x), insn);
4360       apply_change_group ();
4361       fold_rtx (SET_SRC (x), insn);
4362     }
4363   else if (GET_CODE (x) == CLOBBER)
4364     {
4365       /* If we clobber memory, canon the address.
4366 	 This does nothing when a register is clobbered
4367 	 because we have already invalidated the reg.  */
4368       if (MEM_P (XEXP (x, 0)))
4369 	canon_reg (XEXP (x, 0), insn);
4370     }
4371   else if (GET_CODE (x) == USE
4372 	   && ! (REG_P (XEXP (x, 0))
4373 		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4374     /* Canonicalize a USE of a pseudo register or memory location.  */
4375     canon_reg (x, insn);
4376   else if (GET_CODE (x) == ASM_OPERANDS)
4377     canon_asm_operands (x, insn);
4378   else if (GET_CODE (x) == CALL)
4379     {
4380       canon_reg (x, insn);
4381       apply_change_group ();
4382       fold_rtx (x, insn);
4383     }
4384   else if (DEBUG_INSN_P (insn))
4385     canon_reg (PATTERN (insn), insn);
4386   else if (GET_CODE (x) == PARALLEL)
4387     {
4388       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4389 	{
4390 	  rtx y = XVECEXP (x, 0, i);
4391 	  if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
4392 	    {
4393 	      canon_reg (SET_SRC (y), insn);
4394 	      apply_change_group ();
4395 	      fold_rtx (SET_SRC (y), insn);
4396 	    }
4397 	  else if (GET_CODE (y) == CLOBBER)
4398 	    {
4399 	      if (MEM_P (XEXP (y, 0)))
4400 		canon_reg (XEXP (y, 0), insn);
4401 	    }
4402 	  else if (GET_CODE (y) == USE
4403 		   && ! (REG_P (XEXP (y, 0))
4404 			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4405 	    canon_reg (y, insn);
4406 	  else if (GET_CODE (y) == ASM_OPERANDS)
4407 	    canon_asm_operands (y, insn);
4408 	  else if (GET_CODE (y) == CALL)
4409 	    {
4410 	      canon_reg (y, insn);
4411 	      apply_change_group ();
4412 	      fold_rtx (y, insn);
4413 	    }
4414 	}
4415     }
4416 
4417   if (n_sets == 1 && REG_NOTES (insn) != 0
4418       && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4419     {
4420       /* We potentially will process this insn many times.  Therefore,
4421 	 drop the REG_EQUAL note if it is equal to the SET_SRC of the
4422 	 unique set in INSN.
4423 
4424 	 Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4425 	 because cse_insn handles those specially.  */
4426       if (GET_CODE (SET_DEST (sets[0].rtl)) != STRICT_LOW_PART
4427 	  && rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
4428 	remove_note (insn, tem);
4429       else
4430 	{
4431 	  canon_reg (XEXP (tem, 0), insn);
4432 	  apply_change_group ();
4433 	  XEXP (tem, 0) = fold_rtx (XEXP (tem, 0), insn);
4434 	  df_notes_rescan (insn);
4435 	}
4436     }
4437 
4438   /* Canonicalize sources and addresses of destinations.
4439      We do this in a separate pass to avoid problems when a MATCH_DUP is
4440      present in the insn pattern.  In that case, we want to ensure that
4441      we don't break the duplicate nature of the pattern.  So we will replace
4442      both operands at the same time.  Otherwise, we would fail to find an
4443      equivalent substitution in the loop calling validate_change below.
4444 
4445      We used to suppress canonicalization of DEST if it appears in SRC,
4446      but we don't do this any more.  */
4447 
4448   for (i = 0; i < n_sets; i++)
4449     {
4450       rtx dest = SET_DEST (sets[i].rtl);
4451       rtx src = SET_SRC (sets[i].rtl);
4452       rtx new_rtx = canon_reg (src, insn);
4453 
4454       validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4455 
4456       if (GET_CODE (dest) == ZERO_EXTRACT)
4457 	{
4458 	  validate_change (insn, &XEXP (dest, 1),
4459 			   canon_reg (XEXP (dest, 1), insn), 1);
4460 	  validate_change (insn, &XEXP (dest, 2),
4461 			   canon_reg (XEXP (dest, 2), insn), 1);
4462 	}
4463 
4464       while (GET_CODE (dest) == SUBREG
4465 	     || GET_CODE (dest) == ZERO_EXTRACT
4466 	     || GET_CODE (dest) == STRICT_LOW_PART)
4467 	dest = XEXP (dest, 0);
4468 
4469       if (MEM_P (dest))
4470 	canon_reg (dest, insn);
4471     }
4472 
4473   /* Now that we have done all the replacements, we can apply the change
4474      group and see if they all work.  Note that this will cause some
4475      canonicalizations that would have worked individually not to be applied
4476      because some other canonicalization didn't work, but this should not
4477      occur often.
4478 
4479      The result of apply_change_group can be ignored; see canon_reg.  */
4480 
4481   apply_change_group ();
4482 }
4483 
4484 /* Main function of CSE.
4485    First simplify sources and addresses of all assignments
4486    in the instruction, using previously-computed equivalents values.
4487    Then install the new sources and destinations in the table
4488    of available values.  */
4489 
4490 static void
4491 cse_insn (rtx_insn *insn)
4492 {
4493   rtx x = PATTERN (insn);
4494   int i;
4495   rtx tem;
4496   int n_sets = 0;
4497 
4498   rtx src_eqv = 0;
4499   struct table_elt *src_eqv_elt = 0;
4500   int src_eqv_volatile = 0;
4501   int src_eqv_in_memory = 0;
4502   unsigned src_eqv_hash = 0;
4503 
4504   struct set *sets = (struct set *) 0;
4505 
4506   if (GET_CODE (x) == SET)
4507     sets = XALLOCA (struct set);
4508   else if (GET_CODE (x) == PARALLEL)
4509     sets = XALLOCAVEC (struct set, XVECLEN (x, 0));
4510 
4511   this_insn = insn;
4512   /* Records what this insn does to set CC0.  */
4513   this_insn_cc0 = 0;
4514   this_insn_cc0_mode = VOIDmode;
4515 
4516   /* Find all regs explicitly clobbered in this insn,
4517      to ensure they are not replaced with any other regs
4518      elsewhere in this insn.  */
4519   invalidate_from_sets_and_clobbers (insn);
4520 
4521   /* Record all the SETs in this instruction.  */
4522   n_sets = find_sets_in_insn (insn, &sets);
4523 
4524   /* Substitute the canonical register where possible.  */
4525   canonicalize_insn (insn, &sets, n_sets);
4526 
4527   /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4528      if different, or if the DEST is a STRICT_LOW_PART/ZERO_EXTRACT.  The
4529      latter condition is necessary because SRC_EQV is handled specially for
4530      this case, and if it isn't set, then there will be no equivalence
4531      for the destination.  */
4532   if (n_sets == 1 && REG_NOTES (insn) != 0
4533       && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4534     {
4535 
4536       if (GET_CODE (SET_DEST (sets[0].rtl)) != ZERO_EXTRACT
4537 	  && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4538 	      || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4539 	src_eqv = copy_rtx (XEXP (tem, 0));
4540       /* If DEST is of the form ZERO_EXTACT, as in:
4541 	 (set (zero_extract:SI (reg:SI 119)
4542 		  (const_int 16 [0x10])
4543 		  (const_int 16 [0x10]))
4544 	      (const_int 51154 [0xc7d2]))
4545 	 REG_EQUAL note will specify the value of register (reg:SI 119) at this
4546 	 point.  Note that this is different from SRC_EQV. We can however
4547 	 calculate SRC_EQV with the position and width of ZERO_EXTRACT.  */
4548       else if (GET_CODE (SET_DEST (sets[0].rtl)) == ZERO_EXTRACT
4549 	       && CONST_INT_P (XEXP (tem, 0))
4550 	       && CONST_INT_P (XEXP (SET_DEST (sets[0].rtl), 1))
4551 	       && CONST_INT_P (XEXP (SET_DEST (sets[0].rtl), 2)))
4552 	{
4553 	  rtx dest_reg = XEXP (SET_DEST (sets[0].rtl), 0);
4554 	  rtx width = XEXP (SET_DEST (sets[0].rtl), 1);
4555 	  rtx pos = XEXP (SET_DEST (sets[0].rtl), 2);
4556 	  HOST_WIDE_INT val = INTVAL (XEXP (tem, 0));
4557 	  HOST_WIDE_INT mask;
4558 	  unsigned int shift;
4559 	  if (BITS_BIG_ENDIAN)
4560 	    shift = GET_MODE_PRECISION (GET_MODE (dest_reg))
4561 	      - INTVAL (pos) - INTVAL (width);
4562 	  else
4563 	    shift = INTVAL (pos);
4564 	  if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
4565 	    mask = HOST_WIDE_INT_M1;
4566 	  else
4567 	    mask = (HOST_WIDE_INT_1 << INTVAL (width)) - 1;
4568 	  val = (val >> shift) & mask;
4569 	  src_eqv = GEN_INT (val);
4570 	}
4571     }
4572 
4573   /* Set sets[i].src_elt to the class each source belongs to.
4574      Detect assignments from or to volatile things
4575      and set set[i] to zero so they will be ignored
4576      in the rest of this function.
4577 
4578      Nothing in this loop changes the hash table or the register chains.  */
4579 
4580   for (i = 0; i < n_sets; i++)
4581     {
4582       bool repeat = false;
4583       bool mem_noop_insn = false;
4584       rtx src, dest;
4585       rtx src_folded;
4586       struct table_elt *elt = 0, *p;
4587       machine_mode mode;
4588       rtx src_eqv_here;
4589       rtx src_const = 0;
4590       rtx src_related = 0;
4591       bool src_related_is_const_anchor = false;
4592       struct table_elt *src_const_elt = 0;
4593       int src_cost = MAX_COST;
4594       int src_eqv_cost = MAX_COST;
4595       int src_folded_cost = MAX_COST;
4596       int src_related_cost = MAX_COST;
4597       int src_elt_cost = MAX_COST;
4598       int src_regcost = MAX_COST;
4599       int src_eqv_regcost = MAX_COST;
4600       int src_folded_regcost = MAX_COST;
4601       int src_related_regcost = MAX_COST;
4602       int src_elt_regcost = MAX_COST;
4603       /* Set nonzero if we need to call force_const_mem on with the
4604 	 contents of src_folded before using it.  */
4605       int src_folded_force_flag = 0;
4606 
4607       dest = SET_DEST (sets[i].rtl);
4608       src = SET_SRC (sets[i].rtl);
4609 
4610       /* If SRC is a constant that has no machine mode,
4611 	 hash it with the destination's machine mode.
4612 	 This way we can keep different modes separate.  */
4613 
4614       mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4615       sets[i].mode = mode;
4616 
4617       if (src_eqv)
4618 	{
4619 	  machine_mode eqvmode = mode;
4620 	  if (GET_CODE (dest) == STRICT_LOW_PART)
4621 	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4622 	  do_not_record = 0;
4623 	  hash_arg_in_memory = 0;
4624 	  src_eqv_hash = HASH (src_eqv, eqvmode);
4625 
4626 	  /* Find the equivalence class for the equivalent expression.  */
4627 
4628 	  if (!do_not_record)
4629 	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4630 
4631 	  src_eqv_volatile = do_not_record;
4632 	  src_eqv_in_memory = hash_arg_in_memory;
4633 	}
4634 
4635       /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4636 	 value of the INNER register, not the destination.  So it is not
4637 	 a valid substitution for the source.  But save it for later.  */
4638       if (GET_CODE (dest) == STRICT_LOW_PART)
4639 	src_eqv_here = 0;
4640       else
4641 	src_eqv_here = src_eqv;
4642 
4643       /* Simplify and foldable subexpressions in SRC.  Then get the fully-
4644 	 simplified result, which may not necessarily be valid.  */
4645       src_folded = fold_rtx (src, NULL);
4646 
4647 #if 0
4648       /* ??? This caused bad code to be generated for the m68k port with -O2.
4649 	 Suppose src is (CONST_INT -1), and that after truncation src_folded
4650 	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
4651 	 At the end we will add src and src_const to the same equivalence
4652 	 class.  We now have 3 and -1 on the same equivalence class.  This
4653 	 causes later instructions to be mis-optimized.  */
4654       /* If storing a constant in a bitfield, pre-truncate the constant
4655 	 so we will be able to record it later.  */
4656       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4657 	{
4658 	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4659 
4660 	  if (CONST_INT_P (src)
4661 	      && CONST_INT_P (width)
4662 	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4663 	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4664 	    src_folded
4665 	      = GEN_INT (INTVAL (src) & ((HOST_WIDE_INT_1
4666 					  << INTVAL (width)) - 1));
4667 	}
4668 #endif
4669 
4670       /* Compute SRC's hash code, and also notice if it
4671 	 should not be recorded at all.  In that case,
4672 	 prevent any further processing of this assignment.  */
4673       do_not_record = 0;
4674       hash_arg_in_memory = 0;
4675 
4676       sets[i].src = src;
4677       sets[i].src_hash = HASH (src, mode);
4678       sets[i].src_volatile = do_not_record;
4679       sets[i].src_in_memory = hash_arg_in_memory;
4680 
4681       /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4682 	 a pseudo, do not record SRC.  Using SRC as a replacement for
4683 	 anything else will be incorrect in that situation.  Note that
4684 	 this usually occurs only for stack slots, in which case all the
4685 	 RTL would be referring to SRC, so we don't lose any optimization
4686 	 opportunities by not having SRC in the hash table.  */
4687 
4688       if (MEM_P (src)
4689 	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4690 	  && REG_P (dest)
4691 	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4692 	sets[i].src_volatile = 1;
4693 
4694       else if (GET_CODE (src) == ASM_OPERANDS
4695 	       && GET_CODE (x) == PARALLEL)
4696 	{
4697 	  /* Do not record result of a non-volatile inline asm with
4698 	     more than one result.  */
4699 	  if (n_sets > 1)
4700 	    sets[i].src_volatile = 1;
4701 
4702 	  int j, lim = XVECLEN (x, 0);
4703 	  for (j = 0; j < lim; j++)
4704 	    {
4705 	      rtx y = XVECEXP (x, 0, j);
4706 	      /* And do not record result of a non-volatile inline asm
4707 		 with "memory" clobber.  */
4708 	      if (GET_CODE (y) == CLOBBER && MEM_P (XEXP (y, 0)))
4709 		{
4710 		  sets[i].src_volatile = 1;
4711 		  break;
4712 		}
4713 	    }
4714 	}
4715 
4716 #if 0
4717       /* It is no longer clear why we used to do this, but it doesn't
4718 	 appear to still be needed.  So let's try without it since this
4719 	 code hurts cse'ing widened ops.  */
4720       /* If source is a paradoxical subreg (such as QI treated as an SI),
4721 	 treat it as volatile.  It may do the work of an SI in one context
4722 	 where the extra bits are not being used, but cannot replace an SI
4723 	 in general.  */
4724       if (paradoxical_subreg_p (src))
4725 	sets[i].src_volatile = 1;
4726 #endif
4727 
4728       /* Locate all possible equivalent forms for SRC.  Try to replace
4729          SRC in the insn with each cheaper equivalent.
4730 
4731          We have the following types of equivalents: SRC itself, a folded
4732          version, a value given in a REG_EQUAL note, or a value related
4733 	 to a constant.
4734 
4735          Each of these equivalents may be part of an additional class
4736          of equivalents (if more than one is in the table, they must be in
4737          the same class; we check for this).
4738 
4739 	 If the source is volatile, we don't do any table lookups.
4740 
4741          We note any constant equivalent for possible later use in a
4742          REG_NOTE.  */
4743 
4744       if (!sets[i].src_volatile)
4745 	elt = lookup (src, sets[i].src_hash, mode);
4746 
4747       sets[i].src_elt = elt;
4748 
4749       if (elt && src_eqv_here && src_eqv_elt)
4750 	{
4751 	  if (elt->first_same_value != src_eqv_elt->first_same_value)
4752 	    {
4753 	      /* The REG_EQUAL is indicating that two formerly distinct
4754 		 classes are now equivalent.  So merge them.  */
4755 	      merge_equiv_classes (elt, src_eqv_elt);
4756 	      src_eqv_hash = HASH (src_eqv, elt->mode);
4757 	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4758 	    }
4759 
4760 	  src_eqv_here = 0;
4761 	}
4762 
4763       else if (src_eqv_elt)
4764 	elt = src_eqv_elt;
4765 
4766       /* Try to find a constant somewhere and record it in `src_const'.
4767 	 Record its table element, if any, in `src_const_elt'.  Look in
4768 	 any known equivalences first.  (If the constant is not in the
4769 	 table, also set `sets[i].src_const_hash').  */
4770       if (elt)
4771 	for (p = elt->first_same_value; p; p = p->next_same_value)
4772 	  if (p->is_const)
4773 	    {
4774 	      src_const = p->exp;
4775 	      src_const_elt = elt;
4776 	      break;
4777 	    }
4778 
4779       if (src_const == 0
4780 	  && (CONSTANT_P (src_folded)
4781 	      /* Consider (minus (label_ref L1) (label_ref L2)) as
4782 		 "constant" here so we will record it. This allows us
4783 		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
4784 	      || (GET_CODE (src_folded) == MINUS
4785 		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4786 		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4787 	src_const = src_folded, src_const_elt = elt;
4788       else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4789 	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4790 
4791       /* If we don't know if the constant is in the table, get its
4792 	 hash code and look it up.  */
4793       if (src_const && src_const_elt == 0)
4794 	{
4795 	  sets[i].src_const_hash = HASH (src_const, mode);
4796 	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4797 	}
4798 
4799       sets[i].src_const = src_const;
4800       sets[i].src_const_elt = src_const_elt;
4801 
4802       /* If the constant and our source are both in the table, mark them as
4803 	 equivalent.  Otherwise, if a constant is in the table but the source
4804 	 isn't, set ELT to it.  */
4805       if (src_const_elt && elt
4806 	  && src_const_elt->first_same_value != elt->first_same_value)
4807 	merge_equiv_classes (elt, src_const_elt);
4808       else if (src_const_elt && elt == 0)
4809 	elt = src_const_elt;
4810 
4811       /* See if there is a register linearly related to a constant
4812          equivalent of SRC.  */
4813       if (src_const
4814 	  && (GET_CODE (src_const) == CONST
4815 	      || (src_const_elt && src_const_elt->related_value != 0)))
4816 	{
4817 	  src_related = use_related_value (src_const, src_const_elt);
4818 	  if (src_related)
4819 	    {
4820 	      struct table_elt *src_related_elt
4821 		= lookup (src_related, HASH (src_related, mode), mode);
4822 	      if (src_related_elt && elt)
4823 		{
4824 		  if (elt->first_same_value
4825 		      != src_related_elt->first_same_value)
4826 		    /* This can occur when we previously saw a CONST
4827 		       involving a SYMBOL_REF and then see the SYMBOL_REF
4828 		       twice.  Merge the involved classes.  */
4829 		    merge_equiv_classes (elt, src_related_elt);
4830 
4831 		  src_related = 0;
4832 		  src_related_elt = 0;
4833 		}
4834 	      else if (src_related_elt && elt == 0)
4835 		elt = src_related_elt;
4836 	    }
4837 	}
4838 
4839       /* See if we have a CONST_INT that is already in a register in a
4840 	 wider mode.  */
4841 
4842       if (src_const && src_related == 0 && CONST_INT_P (src_const)
4843 	  && GET_MODE_CLASS (mode) == MODE_INT
4844 	  && GET_MODE_PRECISION (mode) < BITS_PER_WORD)
4845 	{
4846 	  machine_mode wider_mode;
4847 
4848 	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
4849 	       wider_mode != VOIDmode
4850 	       && GET_MODE_PRECISION (wider_mode) <= BITS_PER_WORD
4851 	       && src_related == 0;
4852 	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4853 	    {
4854 	      struct table_elt *const_elt
4855 		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4856 
4857 	      if (const_elt == 0)
4858 		continue;
4859 
4860 	      for (const_elt = const_elt->first_same_value;
4861 		   const_elt; const_elt = const_elt->next_same_value)
4862 		if (REG_P (const_elt->exp))
4863 		  {
4864 		    src_related = gen_lowpart (mode, const_elt->exp);
4865 		    break;
4866 		  }
4867 	    }
4868 	}
4869 
4870       /* Another possibility is that we have an AND with a constant in
4871 	 a mode narrower than a word.  If so, it might have been generated
4872 	 as part of an "if" which would narrow the AND.  If we already
4873 	 have done the AND in a wider mode, we can use a SUBREG of that
4874 	 value.  */
4875 
4876       if (flag_expensive_optimizations && ! src_related
4877 	  && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
4878 	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4879 	{
4880 	  machine_mode tmode;
4881 	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4882 
4883 	  for (tmode = GET_MODE_WIDER_MODE (mode);
4884 	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4885 	       tmode = GET_MODE_WIDER_MODE (tmode))
4886 	    {
4887 	      rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4888 	      struct table_elt *larger_elt;
4889 
4890 	      if (inner)
4891 		{
4892 		  PUT_MODE (new_and, tmode);
4893 		  XEXP (new_and, 0) = inner;
4894 		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4895 		  if (larger_elt == 0)
4896 		    continue;
4897 
4898 		  for (larger_elt = larger_elt->first_same_value;
4899 		       larger_elt; larger_elt = larger_elt->next_same_value)
4900 		    if (REG_P (larger_elt->exp))
4901 		      {
4902 			src_related
4903 			  = gen_lowpart (mode, larger_elt->exp);
4904 			break;
4905 		      }
4906 
4907 		  if (src_related)
4908 		    break;
4909 		}
4910 	    }
4911 	}
4912 
4913       /* See if a MEM has already been loaded with a widening operation;
4914 	 if it has, we can use a subreg of that.  Many CISC machines
4915 	 also have such operations, but this is only likely to be
4916 	 beneficial on these machines.  */
4917 
4918       rtx_code extend_op;
4919       if (flag_expensive_optimizations && src_related == 0
4920 	  && MEM_P (src) && ! do_not_record
4921 	  && (extend_op = load_extend_op (mode)) != UNKNOWN)
4922 	{
4923 	  struct rtx_def memory_extend_buf;
4924 	  rtx memory_extend_rtx = &memory_extend_buf;
4925 	  machine_mode tmode;
4926 
4927 	  /* Set what we are trying to extend and the operation it might
4928 	     have been extended with.  */
4929 	  memset (memory_extend_rtx, 0, sizeof (*memory_extend_rtx));
4930 	  PUT_CODE (memory_extend_rtx, extend_op);
4931 	  XEXP (memory_extend_rtx, 0) = src;
4932 
4933 	  for (tmode = GET_MODE_WIDER_MODE (mode);
4934 	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4935 	       tmode = GET_MODE_WIDER_MODE (tmode))
4936 	    {
4937 	      struct table_elt *larger_elt;
4938 
4939 	      PUT_MODE (memory_extend_rtx, tmode);
4940 	      larger_elt = lookup (memory_extend_rtx,
4941 				   HASH (memory_extend_rtx, tmode), tmode);
4942 	      if (larger_elt == 0)
4943 		continue;
4944 
4945 	      for (larger_elt = larger_elt->first_same_value;
4946 		   larger_elt; larger_elt = larger_elt->next_same_value)
4947 		if (REG_P (larger_elt->exp))
4948 		  {
4949 		    src_related = gen_lowpart (mode, larger_elt->exp);
4950 		    break;
4951 		  }
4952 
4953 	      if (src_related)
4954 		break;
4955 	    }
4956 	}
4957 
4958       /* Try to express the constant using a register+offset expression
4959 	 derived from a constant anchor.  */
4960 
4961       if (targetm.const_anchor
4962 	  && !src_related
4963 	  && src_const
4964 	  && GET_CODE (src_const) == CONST_INT)
4965 	{
4966 	  src_related = try_const_anchors (src_const, mode);
4967 	  src_related_is_const_anchor = src_related != NULL_RTX;
4968 	}
4969 
4970 
4971       if (src == src_folded)
4972 	src_folded = 0;
4973 
4974       /* At this point, ELT, if nonzero, points to a class of expressions
4975          equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4976 	 and SRC_RELATED, if nonzero, each contain additional equivalent
4977 	 expressions.  Prune these latter expressions by deleting expressions
4978 	 already in the equivalence class.
4979 
4980 	 Check for an equivalent identical to the destination.  If found,
4981 	 this is the preferred equivalent since it will likely lead to
4982 	 elimination of the insn.  Indicate this by placing it in
4983 	 `src_related'.  */
4984 
4985       if (elt)
4986 	elt = elt->first_same_value;
4987       for (p = elt; p; p = p->next_same_value)
4988 	{
4989 	  enum rtx_code code = GET_CODE (p->exp);
4990 
4991 	  /* If the expression is not valid, ignore it.  Then we do not
4992 	     have to check for validity below.  In most cases, we can use
4993 	     `rtx_equal_p', since canonicalization has already been done.  */
4994 	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
4995 	    continue;
4996 
4997 	  /* Also skip paradoxical subregs, unless that's what we're
4998 	     looking for.  */
4999 	  if (paradoxical_subreg_p (p->exp)
5000 	      && ! (src != 0
5001 		    && GET_CODE (src) == SUBREG
5002 		    && GET_MODE (src) == GET_MODE (p->exp)
5003 		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5004 			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5005 	    continue;
5006 
5007 	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5008 	    src = 0;
5009 	  else if (src_folded && GET_CODE (src_folded) == code
5010 		   && rtx_equal_p (src_folded, p->exp))
5011 	    src_folded = 0;
5012 	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5013 		   && rtx_equal_p (src_eqv_here, p->exp))
5014 	    src_eqv_here = 0;
5015 	  else if (src_related && GET_CODE (src_related) == code
5016 		   && rtx_equal_p (src_related, p->exp))
5017 	    src_related = 0;
5018 
5019 	  /* This is the same as the destination of the insns, we want
5020 	     to prefer it.  Copy it to src_related.  The code below will
5021 	     then give it a negative cost.  */
5022 	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5023 	    src_related = dest;
5024 	}
5025 
5026       /* Find the cheapest valid equivalent, trying all the available
5027          possibilities.  Prefer items not in the hash table to ones
5028          that are when they are equal cost.  Note that we can never
5029          worsen an insn as the current contents will also succeed.
5030 	 If we find an equivalent identical to the destination, use it as best,
5031 	 since this insn will probably be eliminated in that case.  */
5032       if (src)
5033 	{
5034 	  if (rtx_equal_p (src, dest))
5035 	    src_cost = src_regcost = -1;
5036 	  else
5037 	    {
5038 	      src_cost = COST (src, mode);
5039 	      src_regcost = approx_reg_cost (src);
5040 	    }
5041 	}
5042 
5043       if (src_eqv_here)
5044 	{
5045 	  if (rtx_equal_p (src_eqv_here, dest))
5046 	    src_eqv_cost = src_eqv_regcost = -1;
5047 	  else
5048 	    {
5049 	      src_eqv_cost = COST (src_eqv_here, mode);
5050 	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5051 	    }
5052 	}
5053 
5054       if (src_folded)
5055 	{
5056 	  if (rtx_equal_p (src_folded, dest))
5057 	    src_folded_cost = src_folded_regcost = -1;
5058 	  else
5059 	    {
5060 	      src_folded_cost = COST (src_folded, mode);
5061 	      src_folded_regcost = approx_reg_cost (src_folded);
5062 	    }
5063 	}
5064 
5065       if (src_related)
5066 	{
5067 	  if (rtx_equal_p (src_related, dest))
5068 	    src_related_cost = src_related_regcost = -1;
5069 	  else
5070 	    {
5071 	      src_related_cost = COST (src_related, mode);
5072 	      src_related_regcost = approx_reg_cost (src_related);
5073 
5074 	      /* If a const-anchor is used to synthesize a constant that
5075 		 normally requires multiple instructions then slightly prefer
5076 		 it over the original sequence.  These instructions are likely
5077 		 to become redundant now.  We can't compare against the cost
5078 		 of src_eqv_here because, on MIPS for example, multi-insn
5079 		 constants have zero cost; they are assumed to be hoisted from
5080 		 loops.  */
5081 	      if (src_related_is_const_anchor
5082 		  && src_related_cost == src_cost
5083 		  && src_eqv_here)
5084 		src_related_cost--;
5085 	    }
5086 	}
5087 
5088       /* If this was an indirect jump insn, a known label will really be
5089 	 cheaper even though it looks more expensive.  */
5090       if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5091 	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5092 
5093       /* Terminate loop when replacement made.  This must terminate since
5094          the current contents will be tested and will always be valid.  */
5095       while (1)
5096 	{
5097 	  rtx trial;
5098 
5099 	  /* Skip invalid entries.  */
5100 	  while (elt && !REG_P (elt->exp)
5101 		 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5102 	    elt = elt->next_same_value;
5103 
5104 	  /* A paradoxical subreg would be bad here: it'll be the right
5105 	     size, but later may be adjusted so that the upper bits aren't
5106 	     what we want.  So reject it.  */
5107 	  if (elt != 0
5108 	      && paradoxical_subreg_p (elt->exp)
5109 	      /* It is okay, though, if the rtx we're trying to match
5110 		 will ignore any of the bits we can't predict.  */
5111 	      && ! (src != 0
5112 		    && GET_CODE (src) == SUBREG
5113 		    && GET_MODE (src) == GET_MODE (elt->exp)
5114 		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5115 			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5116 	    {
5117 	      elt = elt->next_same_value;
5118 	      continue;
5119 	    }
5120 
5121 	  if (elt)
5122 	    {
5123 	      src_elt_cost = elt->cost;
5124 	      src_elt_regcost = elt->regcost;
5125 	    }
5126 
5127 	  /* Find cheapest and skip it for the next time.   For items
5128 	     of equal cost, use this order:
5129 	     src_folded, src, src_eqv, src_related and hash table entry.  */
5130 	  if (src_folded
5131 	      && preferable (src_folded_cost, src_folded_regcost,
5132 			     src_cost, src_regcost) <= 0
5133 	      && preferable (src_folded_cost, src_folded_regcost,
5134 			     src_eqv_cost, src_eqv_regcost) <= 0
5135 	      && preferable (src_folded_cost, src_folded_regcost,
5136 			     src_related_cost, src_related_regcost) <= 0
5137 	      && preferable (src_folded_cost, src_folded_regcost,
5138 			     src_elt_cost, src_elt_regcost) <= 0)
5139 	    {
5140 	      trial = src_folded, src_folded_cost = MAX_COST;
5141 	      if (src_folded_force_flag)
5142 		{
5143 		  rtx forced = force_const_mem (mode, trial);
5144 		  if (forced)
5145 		    trial = forced;
5146 		}
5147 	    }
5148 	  else if (src
5149 		   && preferable (src_cost, src_regcost,
5150 				  src_eqv_cost, src_eqv_regcost) <= 0
5151 		   && preferable (src_cost, src_regcost,
5152 				  src_related_cost, src_related_regcost) <= 0
5153 		   && preferable (src_cost, src_regcost,
5154 				  src_elt_cost, src_elt_regcost) <= 0)
5155 	    trial = src, src_cost = MAX_COST;
5156 	  else if (src_eqv_here
5157 		   && preferable (src_eqv_cost, src_eqv_regcost,
5158 				  src_related_cost, src_related_regcost) <= 0
5159 		   && preferable (src_eqv_cost, src_eqv_regcost,
5160 				  src_elt_cost, src_elt_regcost) <= 0)
5161 	    trial = src_eqv_here, src_eqv_cost = MAX_COST;
5162 	  else if (src_related
5163 		   && preferable (src_related_cost, src_related_regcost,
5164 				  src_elt_cost, src_elt_regcost) <= 0)
5165 	    trial = src_related, src_related_cost = MAX_COST;
5166 	  else
5167 	    {
5168 	      trial = elt->exp;
5169 	      elt = elt->next_same_value;
5170 	      src_elt_cost = MAX_COST;
5171 	    }
5172 
5173 	  /* Avoid creation of overlapping memory moves.  */
5174 	  if (MEM_P (trial) && MEM_P (dest) && !rtx_equal_p (trial, dest))
5175 	    {
5176 	      rtx src, dest;
5177 
5178 	      /* BLKmode moves are not handled by cse anyway.  */
5179 	      if (GET_MODE (trial) == BLKmode)
5180 		break;
5181 
5182 	      src = canon_rtx (trial);
5183 	      dest = canon_rtx (SET_DEST (sets[i].rtl));
5184 
5185 	      if (!MEM_P (src) || !MEM_P (dest)
5186 		  || !nonoverlapping_memrefs_p (src, dest, false))
5187 		break;
5188 	    }
5189 
5190 	  /* Try to optimize
5191 	     (set (reg:M N) (const_int A))
5192 	     (set (reg:M2 O) (const_int B))
5193 	     (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5194 		  (reg:M2 O)).  */
5195 	  if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5196 	      && CONST_INT_P (trial)
5197 	      && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
5198 	      && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
5199 	      && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
5200 	      && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl)))
5201 		  >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)))
5202 	      && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
5203 		  + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
5204 		  <= HOST_BITS_PER_WIDE_INT))
5205 	    {
5206 	      rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
5207 	      rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5208 	      rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
5209 	      unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
5210 	      struct table_elt *dest_elt
5211 		= lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
5212 	      rtx dest_cst = NULL;
5213 
5214 	      if (dest_elt)
5215 		for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5216 		  if (p->is_const && CONST_INT_P (p->exp))
5217 		    {
5218 		      dest_cst = p->exp;
5219 		      break;
5220 		    }
5221 	      if (dest_cst)
5222 		{
5223 		  HOST_WIDE_INT val = INTVAL (dest_cst);
5224 		  HOST_WIDE_INT mask;
5225 		  unsigned int shift;
5226 		  if (BITS_BIG_ENDIAN)
5227 		    shift = GET_MODE_PRECISION (GET_MODE (dest_reg))
5228 			    - INTVAL (pos) - INTVAL (width);
5229 		  else
5230 		    shift = INTVAL (pos);
5231 		  if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5232 		    mask = HOST_WIDE_INT_M1;
5233 		  else
5234 		    mask = (HOST_WIDE_INT_1 << INTVAL (width)) - 1;
5235 		  val &= ~(mask << shift);
5236 		  val |= (INTVAL (trial) & mask) << shift;
5237 		  val = trunc_int_for_mode (val, GET_MODE (dest_reg));
5238 		  validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5239 					   dest_reg, 1);
5240 		  validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5241 					   GEN_INT (val), 1);
5242 		  if (apply_change_group ())
5243 		    {
5244 		      rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5245 		      if (note)
5246 			{
5247 			  remove_note (insn, note);
5248 			  df_notes_rescan (insn);
5249 			}
5250 		      src_eqv = NULL_RTX;
5251 		      src_eqv_elt = NULL;
5252 		      src_eqv_volatile = 0;
5253 		      src_eqv_in_memory = 0;
5254 		      src_eqv_hash = 0;
5255 		      repeat = true;
5256 		      break;
5257 		    }
5258 		}
5259 	    }
5260 
5261 	  /* We don't normally have an insn matching (set (pc) (pc)), so
5262 	     check for this separately here.  We will delete such an
5263 	     insn below.
5264 
5265 	     For other cases such as a table jump or conditional jump
5266 	     where we know the ultimate target, go ahead and replace the
5267 	     operand.  While that may not make a valid insn, we will
5268 	     reemit the jump below (and also insert any necessary
5269 	     barriers).  */
5270 	  if (n_sets == 1 && dest == pc_rtx
5271 	      && (trial == pc_rtx
5272 		  || (GET_CODE (trial) == LABEL_REF
5273 		      && ! condjump_p (insn))))
5274 	    {
5275 	      /* Don't substitute non-local labels, this confuses CFG.  */
5276 	      if (GET_CODE (trial) == LABEL_REF
5277 		  && LABEL_REF_NONLOCAL_P (trial))
5278 		continue;
5279 
5280 	      SET_SRC (sets[i].rtl) = trial;
5281 	      cse_jumps_altered = true;
5282 	      break;
5283 	    }
5284 
5285 	  /* Similarly, lots of targets don't allow no-op
5286 	     (set (mem x) (mem x)) moves.  */
5287 	  else if (n_sets == 1
5288 		   && MEM_P (trial)
5289 		   && MEM_P (dest)
5290 		   && rtx_equal_p (trial, dest)
5291 		   && !side_effects_p (dest)
5292 		   && (cfun->can_delete_dead_exceptions
5293 		       || insn_nothrow_p (insn)))
5294 	    {
5295 	      SET_SRC (sets[i].rtl) = trial;
5296 	      mem_noop_insn = true;
5297 	      break;
5298 	    }
5299 
5300 	  /* Reject certain invalid forms of CONST that we create.  */
5301 	  else if (CONSTANT_P (trial)
5302 		   && GET_CODE (trial) == CONST
5303 		   /* Reject cases that will cause decode_rtx_const to
5304 		      die.  On the alpha when simplifying a switch, we
5305 		      get (const (truncate (minus (label_ref)
5306 		      (label_ref)))).  */
5307 		   && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5308 		       /* Likewise on IA-64, except without the
5309 			  truncate.  */
5310 		       || (GET_CODE (XEXP (trial, 0)) == MINUS
5311 			   && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5312 			   && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5313 	    /* Do nothing for this case.  */
5314 	    ;
5315 
5316 	  /* Look for a substitution that makes a valid insn.  */
5317 	  else if (validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5318 					    trial, 0))
5319 	    {
5320 	      rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
5321 
5322 	      /* The result of apply_change_group can be ignored; see
5323 		 canon_reg.  */
5324 
5325 	      validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
5326 	      apply_change_group ();
5327 
5328 	      break;
5329 	    }
5330 
5331 	  /* If we previously found constant pool entries for
5332 	     constants and this is a constant, try making a
5333 	     pool entry.  Put it in src_folded unless we already have done
5334 	     this since that is where it likely came from.  */
5335 
5336 	  else if (constant_pool_entries_cost
5337 		   && CONSTANT_P (trial)
5338 		   && (src_folded == 0
5339 		       || (!MEM_P (src_folded)
5340 			   && ! src_folded_force_flag))
5341 		   && GET_MODE_CLASS (mode) != MODE_CC
5342 		   && mode != VOIDmode)
5343 	    {
5344 	      src_folded_force_flag = 1;
5345 	      src_folded = trial;
5346 	      src_folded_cost = constant_pool_entries_cost;
5347 	      src_folded_regcost = constant_pool_entries_regcost;
5348 	    }
5349 	}
5350 
5351       /* If we changed the insn too much, handle this set from scratch.  */
5352       if (repeat)
5353 	{
5354 	  i--;
5355 	  continue;
5356 	}
5357 
5358       src = SET_SRC (sets[i].rtl);
5359 
5360       /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5361 	 However, there is an important exception:  If both are registers
5362 	 that are not the head of their equivalence class, replace SET_SRC
5363 	 with the head of the class.  If we do not do this, we will have
5364 	 both registers live over a portion of the basic block.  This way,
5365 	 their lifetimes will likely abut instead of overlapping.  */
5366       if (REG_P (dest)
5367 	  && REGNO_QTY_VALID_P (REGNO (dest)))
5368 	{
5369 	  int dest_q = REG_QTY (REGNO (dest));
5370 	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5371 
5372 	  if (dest_ent->mode == GET_MODE (dest)
5373 	      && dest_ent->first_reg != REGNO (dest)
5374 	      && REG_P (src) && REGNO (src) == REGNO (dest)
5375 	      /* Don't do this if the original insn had a hard reg as
5376 		 SET_SRC or SET_DEST.  */
5377 	      && (!REG_P (sets[i].src)
5378 		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5379 	      && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5380 	    /* We can't call canon_reg here because it won't do anything if
5381 	       SRC is a hard register.  */
5382 	    {
5383 	      int src_q = REG_QTY (REGNO (src));
5384 	      struct qty_table_elem *src_ent = &qty_table[src_q];
5385 	      int first = src_ent->first_reg;
5386 	      rtx new_src
5387 		= (first >= FIRST_PSEUDO_REGISTER
5388 		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5389 
5390 	      /* We must use validate-change even for this, because this
5391 		 might be a special no-op instruction, suitable only to
5392 		 tag notes onto.  */
5393 	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5394 		{
5395 		  src = new_src;
5396 		  /* If we had a constant that is cheaper than what we are now
5397 		     setting SRC to, use that constant.  We ignored it when we
5398 		     thought we could make this into a no-op.  */
5399 		  if (src_const && COST (src_const, mode) < COST (src, mode)
5400 		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5401 					  src_const, 0))
5402 		    src = src_const;
5403 		}
5404 	    }
5405 	}
5406 
5407       /* If we made a change, recompute SRC values.  */
5408       if (src != sets[i].src)
5409 	{
5410 	  do_not_record = 0;
5411 	  hash_arg_in_memory = 0;
5412 	  sets[i].src = src;
5413 	  sets[i].src_hash = HASH (src, mode);
5414 	  sets[i].src_volatile = do_not_record;
5415 	  sets[i].src_in_memory = hash_arg_in_memory;
5416 	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5417 	}
5418 
5419       /* If this is a single SET, we are setting a register, and we have an
5420 	 equivalent constant, we want to add a REG_EQUAL note if the constant
5421 	 is different from the source.  We don't want to do it for a constant
5422 	 pseudo since verifying that this pseudo hasn't been eliminated is a
5423 	 pain; moreover such a note won't help anything.
5424 
5425 	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5426 	 which can be created for a reference to a compile time computable
5427 	 entry in a jump table.  */
5428       if (n_sets == 1
5429 	  && REG_P (dest)
5430 	  && src_const
5431 	  && !REG_P (src_const)
5432 	  && !(GET_CODE (src_const) == SUBREG
5433 	       && REG_P (SUBREG_REG (src_const)))
5434 	  && !(GET_CODE (src_const) == CONST
5435 	       && GET_CODE (XEXP (src_const, 0)) == MINUS
5436 	       && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5437 	       && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF)
5438 	  && !rtx_equal_p (src, src_const))
5439 	{
5440 	  /* Make sure that the rtx is not shared.  */
5441 	  src_const = copy_rtx (src_const);
5442 
5443 	  /* Record the actual constant value in a REG_EQUAL note,
5444 	     making a new one if one does not already exist.  */
5445 	  set_unique_reg_note (insn, REG_EQUAL, src_const);
5446 	  df_notes_rescan (insn);
5447 	}
5448 
5449       /* Now deal with the destination.  */
5450       do_not_record = 0;
5451 
5452       /* Look within any ZERO_EXTRACT to the MEM or REG within it.  */
5453       while (GET_CODE (dest) == SUBREG
5454 	     || GET_CODE (dest) == ZERO_EXTRACT
5455 	     || GET_CODE (dest) == STRICT_LOW_PART)
5456 	dest = XEXP (dest, 0);
5457 
5458       sets[i].inner_dest = dest;
5459 
5460       if (MEM_P (dest))
5461 	{
5462 #ifdef PUSH_ROUNDING
5463 	  /* Stack pushes invalidate the stack pointer.  */
5464 	  rtx addr = XEXP (dest, 0);
5465 	  if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5466 	      && XEXP (addr, 0) == stack_pointer_rtx)
5467 	    invalidate (stack_pointer_rtx, VOIDmode);
5468 #endif
5469 	  dest = fold_rtx (dest, insn);
5470 	}
5471 
5472       /* Compute the hash code of the destination now,
5473 	 before the effects of this instruction are recorded,
5474 	 since the register values used in the address computation
5475 	 are those before this instruction.  */
5476       sets[i].dest_hash = HASH (dest, mode);
5477 
5478       /* Don't enter a bit-field in the hash table
5479 	 because the value in it after the store
5480 	 may not equal what was stored, due to truncation.  */
5481 
5482       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5483 	{
5484 	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5485 
5486 	  if (src_const != 0 && CONST_INT_P (src_const)
5487 	      && CONST_INT_P (width)
5488 	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5489 	      && ! (INTVAL (src_const)
5490 		    & (HOST_WIDE_INT_M1U << INTVAL (width))))
5491 	    /* Exception: if the value is constant,
5492 	       and it won't be truncated, record it.  */
5493 	    ;
5494 	  else
5495 	    {
5496 	      /* This is chosen so that the destination will be invalidated
5497 		 but no new value will be recorded.
5498 		 We must invalidate because sometimes constant
5499 		 values can be recorded for bitfields.  */
5500 	      sets[i].src_elt = 0;
5501 	      sets[i].src_volatile = 1;
5502 	      src_eqv = 0;
5503 	      src_eqv_elt = 0;
5504 	    }
5505 	}
5506 
5507       /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5508 	 the insn.  */
5509       else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5510 	{
5511 	  /* One less use of the label this insn used to jump to.  */
5512 	  cse_cfg_altered |= delete_insn_and_edges (insn);
5513 	  cse_jumps_altered = true;
5514 	  /* No more processing for this set.  */
5515 	  sets[i].rtl = 0;
5516 	}
5517 
5518       /* Similarly for no-op MEM moves.  */
5519       else if (mem_noop_insn)
5520 	{
5521 	  if (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
5522 	    cse_cfg_altered = true;
5523 	  cse_cfg_altered |= delete_insn_and_edges (insn);
5524 	  /* No more processing for this set.  */
5525 	  sets[i].rtl = 0;
5526 	}
5527 
5528       /* If this SET is now setting PC to a label, we know it used to
5529 	 be a conditional or computed branch.  */
5530       else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5531 	       && !LABEL_REF_NONLOCAL_P (src))
5532 	{
5533 	  /* We reemit the jump in as many cases as possible just in
5534 	     case the form of an unconditional jump is significantly
5535 	     different than a computed jump or conditional jump.
5536 
5537 	     If this insn has multiple sets, then reemitting the
5538 	     jump is nontrivial.  So instead we just force rerecognition
5539 	     and hope for the best.  */
5540 	  if (n_sets == 1)
5541 	    {
5542 	      rtx_jump_insn *new_rtx;
5543 	      rtx note;
5544 
5545 	      rtx_insn *seq = targetm.gen_jump (XEXP (src, 0));
5546 	      new_rtx = emit_jump_insn_before (seq, insn);
5547 	      JUMP_LABEL (new_rtx) = XEXP (src, 0);
5548 	      LABEL_NUSES (XEXP (src, 0))++;
5549 
5550 	      /* Make sure to copy over REG_NON_LOCAL_GOTO.  */
5551 	      note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5552 	      if (note)
5553 		{
5554 		  XEXP (note, 1) = NULL_RTX;
5555 		  REG_NOTES (new_rtx) = note;
5556 		}
5557 
5558 	      cse_cfg_altered |= delete_insn_and_edges (insn);
5559 	      insn = new_rtx;
5560 	    }
5561 	  else
5562 	    INSN_CODE (insn) = -1;
5563 
5564 	  /* Do not bother deleting any unreachable code, let jump do it.  */
5565 	  cse_jumps_altered = true;
5566 	  sets[i].rtl = 0;
5567 	}
5568 
5569       /* If destination is volatile, invalidate it and then do no further
5570 	 processing for this assignment.  */
5571 
5572       else if (do_not_record)
5573 	{
5574 	  invalidate_dest (dest);
5575 	  sets[i].rtl = 0;
5576 	}
5577 
5578       if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5579 	{
5580 	  do_not_record = 0;
5581 	  sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5582 	  if (do_not_record)
5583 	    {
5584 	      invalidate_dest (SET_DEST (sets[i].rtl));
5585 	      sets[i].rtl = 0;
5586 	    }
5587 	}
5588 
5589       /* If setting CC0, record what it was set to, or a constant, if it
5590 	 is equivalent to a constant.  If it is being set to a floating-point
5591 	 value, make a COMPARE with the appropriate constant of 0.  If we
5592 	 don't do this, later code can interpret this as a test against
5593 	 const0_rtx, which can cause problems if we try to put it into an
5594 	 insn as a floating-point operand.  */
5595       if (dest == cc0_rtx)
5596 	{
5597 	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5598 	  this_insn_cc0_mode = mode;
5599 	  if (FLOAT_MODE_P (mode))
5600 	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5601 					     CONST0_RTX (mode));
5602 	}
5603     }
5604 
5605   /* Now enter all non-volatile source expressions in the hash table
5606      if they are not already present.
5607      Record their equivalence classes in src_elt.
5608      This way we can insert the corresponding destinations into
5609      the same classes even if the actual sources are no longer in them
5610      (having been invalidated).  */
5611 
5612   if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5613       && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5614     {
5615       struct table_elt *elt;
5616       struct table_elt *classp = sets[0].src_elt;
5617       rtx dest = SET_DEST (sets[0].rtl);
5618       machine_mode eqvmode = GET_MODE (dest);
5619 
5620       if (GET_CODE (dest) == STRICT_LOW_PART)
5621 	{
5622 	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5623 	  classp = 0;
5624 	}
5625       if (insert_regs (src_eqv, classp, 0))
5626 	{
5627 	  rehash_using_reg (src_eqv);
5628 	  src_eqv_hash = HASH (src_eqv, eqvmode);
5629 	}
5630       elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5631       elt->in_memory = src_eqv_in_memory;
5632       src_eqv_elt = elt;
5633 
5634       /* Check to see if src_eqv_elt is the same as a set source which
5635 	 does not yet have an elt, and if so set the elt of the set source
5636 	 to src_eqv_elt.  */
5637       for (i = 0; i < n_sets; i++)
5638 	if (sets[i].rtl && sets[i].src_elt == 0
5639 	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5640 	  sets[i].src_elt = src_eqv_elt;
5641     }
5642 
5643   for (i = 0; i < n_sets; i++)
5644     if (sets[i].rtl && ! sets[i].src_volatile
5645 	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5646       {
5647 	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5648 	  {
5649 	    /* REG_EQUAL in setting a STRICT_LOW_PART
5650 	       gives an equivalent for the entire destination register,
5651 	       not just for the subreg being stored in now.
5652 	       This is a more interesting equivalence, so we arrange later
5653 	       to treat the entire reg as the destination.  */
5654 	    sets[i].src_elt = src_eqv_elt;
5655 	    sets[i].src_hash = src_eqv_hash;
5656 	  }
5657 	else
5658 	  {
5659 	    /* Insert source and constant equivalent into hash table, if not
5660 	       already present.  */
5661 	    struct table_elt *classp = src_eqv_elt;
5662 	    rtx src = sets[i].src;
5663 	    rtx dest = SET_DEST (sets[i].rtl);
5664 	    machine_mode mode
5665 	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5666 
5667 	    /* It's possible that we have a source value known to be
5668 	       constant but don't have a REG_EQUAL note on the insn.
5669 	       Lack of a note will mean src_eqv_elt will be NULL.  This
5670 	       can happen where we've generated a SUBREG to access a
5671 	       CONST_INT that is already in a register in a wider mode.
5672 	       Ensure that the source expression is put in the proper
5673 	       constant class.  */
5674 	    if (!classp)
5675 	      classp = sets[i].src_const_elt;
5676 
5677 	    if (sets[i].src_elt == 0)
5678 	      {
5679 		struct table_elt *elt;
5680 
5681 		/* Note that these insert_regs calls cannot remove
5682 		   any of the src_elt's, because they would have failed to
5683 		   match if not still valid.  */
5684 		if (insert_regs (src, classp, 0))
5685 		  {
5686 		    rehash_using_reg (src);
5687 		    sets[i].src_hash = HASH (src, mode);
5688 		  }
5689 		elt = insert (src, classp, sets[i].src_hash, mode);
5690 		elt->in_memory = sets[i].src_in_memory;
5691 		/* If inline asm has any clobbers, ensure we only reuse
5692 		   existing inline asms and never try to put the ASM_OPERANDS
5693 		   into an insn that isn't inline asm.  */
5694 		if (GET_CODE (src) == ASM_OPERANDS
5695 		    && GET_CODE (x) == PARALLEL)
5696 		  elt->cost = MAX_COST;
5697 		sets[i].src_elt = classp = elt;
5698 	      }
5699 	    if (sets[i].src_const && sets[i].src_const_elt == 0
5700 		&& src != sets[i].src_const
5701 		&& ! rtx_equal_p (sets[i].src_const, src))
5702 	      sets[i].src_elt = insert (sets[i].src_const, classp,
5703 					sets[i].src_const_hash, mode);
5704 	  }
5705       }
5706     else if (sets[i].src_elt == 0)
5707       /* If we did not insert the source into the hash table (e.g., it was
5708 	 volatile), note the equivalence class for the REG_EQUAL value, if any,
5709 	 so that the destination goes into that class.  */
5710       sets[i].src_elt = src_eqv_elt;
5711 
5712   /* Record destination addresses in the hash table.  This allows us to
5713      check if they are invalidated by other sets.  */
5714   for (i = 0; i < n_sets; i++)
5715     {
5716       if (sets[i].rtl)
5717 	{
5718 	  rtx x = sets[i].inner_dest;
5719 	  struct table_elt *elt;
5720 	  machine_mode mode;
5721 	  unsigned hash;
5722 
5723 	  if (MEM_P (x))
5724 	    {
5725 	      x = XEXP (x, 0);
5726 	      mode = GET_MODE (x);
5727 	      hash = HASH (x, mode);
5728 	      elt = lookup (x, hash, mode);
5729 	      if (!elt)
5730 		{
5731 		  if (insert_regs (x, NULL, 0))
5732 		    {
5733 		      rtx dest = SET_DEST (sets[i].rtl);
5734 
5735 		      rehash_using_reg (x);
5736 		      hash = HASH (x, mode);
5737 		      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5738 		    }
5739 		  elt = insert (x, NULL, hash, mode);
5740 		}
5741 
5742 	      sets[i].dest_addr_elt = elt;
5743 	    }
5744 	  else
5745 	    sets[i].dest_addr_elt = NULL;
5746 	}
5747     }
5748 
5749   invalidate_from_clobbers (insn);
5750 
5751   /* Some registers are invalidated by subroutine calls.  Memory is
5752      invalidated by non-constant calls.  */
5753 
5754   if (CALL_P (insn))
5755     {
5756       if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5757 	invalidate_memory ();
5758       else
5759 	/* For const/pure calls, invalidate any argument slots, because
5760 	   those are owned by the callee.  */
5761 	for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5762 	  if (GET_CODE (XEXP (tem, 0)) == USE
5763 	      && MEM_P (XEXP (XEXP (tem, 0), 0)))
5764 	    invalidate (XEXP (XEXP (tem, 0), 0), VOIDmode);
5765       invalidate_for_call ();
5766     }
5767 
5768   /* Now invalidate everything set by this instruction.
5769      If a SUBREG or other funny destination is being set,
5770      sets[i].rtl is still nonzero, so here we invalidate the reg
5771      a part of which is being set.  */
5772 
5773   for (i = 0; i < n_sets; i++)
5774     if (sets[i].rtl)
5775       {
5776 	/* We can't use the inner dest, because the mode associated with
5777 	   a ZERO_EXTRACT is significant.  */
5778 	rtx dest = SET_DEST (sets[i].rtl);
5779 
5780 	/* Needed for registers to remove the register from its
5781 	   previous quantity's chain.
5782 	   Needed for memory if this is a nonvarying address, unless
5783 	   we have just done an invalidate_memory that covers even those.  */
5784 	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5785 	  invalidate (dest, VOIDmode);
5786 	else if (MEM_P (dest))
5787 	  invalidate (dest, VOIDmode);
5788 	else if (GET_CODE (dest) == STRICT_LOW_PART
5789 		 || GET_CODE (dest) == ZERO_EXTRACT)
5790 	  invalidate (XEXP (dest, 0), GET_MODE (dest));
5791       }
5792 
5793   /* Don't cse over a call to setjmp; on some machines (eg VAX)
5794      the regs restored by the longjmp come from a later time
5795      than the setjmp.  */
5796   if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5797     {
5798       flush_hash_table ();
5799       goto done;
5800     }
5801 
5802   /* Make sure registers mentioned in destinations
5803      are safe for use in an expression to be inserted.
5804      This removes from the hash table
5805      any invalid entry that refers to one of these registers.
5806 
5807      We don't care about the return value from mention_regs because
5808      we are going to hash the SET_DEST values unconditionally.  */
5809 
5810   for (i = 0; i < n_sets; i++)
5811     {
5812       if (sets[i].rtl)
5813 	{
5814 	  rtx x = SET_DEST (sets[i].rtl);
5815 
5816 	  if (!REG_P (x))
5817 	    mention_regs (x);
5818 	  else
5819 	    {
5820 	      /* We used to rely on all references to a register becoming
5821 		 inaccessible when a register changes to a new quantity,
5822 		 since that changes the hash code.  However, that is not
5823 		 safe, since after HASH_SIZE new quantities we get a
5824 		 hash 'collision' of a register with its own invalid
5825 		 entries.  And since SUBREGs have been changed not to
5826 		 change their hash code with the hash code of the register,
5827 		 it wouldn't work any longer at all.  So we have to check
5828 		 for any invalid references lying around now.
5829 		 This code is similar to the REG case in mention_regs,
5830 		 but it knows that reg_tick has been incremented, and
5831 		 it leaves reg_in_table as -1 .  */
5832 	      unsigned int regno = REGNO (x);
5833 	      unsigned int endregno = END_REGNO (x);
5834 	      unsigned int i;
5835 
5836 	      for (i = regno; i < endregno; i++)
5837 		{
5838 		  if (REG_IN_TABLE (i) >= 0)
5839 		    {
5840 		      remove_invalid_refs (i);
5841 		      REG_IN_TABLE (i) = -1;
5842 		    }
5843 		}
5844 	    }
5845 	}
5846     }
5847 
5848   /* We may have just removed some of the src_elt's from the hash table.
5849      So replace each one with the current head of the same class.
5850      Also check if destination addresses have been removed.  */
5851 
5852   for (i = 0; i < n_sets; i++)
5853     if (sets[i].rtl)
5854       {
5855 	if (sets[i].dest_addr_elt
5856 	    && sets[i].dest_addr_elt->first_same_value == 0)
5857 	  {
5858 	    /* The elt was removed, which means this destination is not
5859 	       valid after this instruction.  */
5860 	    sets[i].rtl = NULL_RTX;
5861 	  }
5862 	else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5863 	  /* If elt was removed, find current head of same class,
5864 	     or 0 if nothing remains of that class.  */
5865 	  {
5866 	    struct table_elt *elt = sets[i].src_elt;
5867 
5868 	    while (elt && elt->prev_same_value)
5869 	      elt = elt->prev_same_value;
5870 
5871 	    while (elt && elt->first_same_value == 0)
5872 	      elt = elt->next_same_value;
5873 	    sets[i].src_elt = elt ? elt->first_same_value : 0;
5874 	  }
5875       }
5876 
5877   /* Now insert the destinations into their equivalence classes.  */
5878 
5879   for (i = 0; i < n_sets; i++)
5880     if (sets[i].rtl)
5881       {
5882 	rtx dest = SET_DEST (sets[i].rtl);
5883 	struct table_elt *elt;
5884 
5885 	/* Don't record value if we are not supposed to risk allocating
5886 	   floating-point values in registers that might be wider than
5887 	   memory.  */
5888 	if ((flag_float_store
5889 	     && MEM_P (dest)
5890 	     && FLOAT_MODE_P (GET_MODE (dest)))
5891 	    /* Don't record BLKmode values, because we don't know the
5892 	       size of it, and can't be sure that other BLKmode values
5893 	       have the same or smaller size.  */
5894 	    || GET_MODE (dest) == BLKmode
5895 	    /* If we didn't put a REG_EQUAL value or a source into the hash
5896 	       table, there is no point is recording DEST.  */
5897 	    || sets[i].src_elt == 0)
5898 	  continue;
5899 
5900 	/* STRICT_LOW_PART isn't part of the value BEING set,
5901 	   and neither is the SUBREG inside it.
5902 	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
5903 	if (GET_CODE (dest) == STRICT_LOW_PART)
5904 	  dest = SUBREG_REG (XEXP (dest, 0));
5905 
5906 	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5907 	  /* Registers must also be inserted into chains for quantities.  */
5908 	  if (insert_regs (dest, sets[i].src_elt, 1))
5909 	    {
5910 	      /* If `insert_regs' changes something, the hash code must be
5911 		 recalculated.  */
5912 	      rehash_using_reg (dest);
5913 	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5914 	    }
5915 
5916 	/* If DEST is a paradoxical SUBREG, don't record DEST since the bits
5917 	   outside the mode of GET_MODE (SUBREG_REG (dest)) are undefined.  */
5918 	if (paradoxical_subreg_p (dest))
5919 	  continue;
5920 
5921 	elt = insert (dest, sets[i].src_elt,
5922 		      sets[i].dest_hash, GET_MODE (dest));
5923 
5924 	/* If this is a constant, insert the constant anchors with the
5925 	   equivalent register-offset expressions using register DEST.  */
5926 	if (targetm.const_anchor
5927 	    && REG_P (dest)
5928 	    && SCALAR_INT_MODE_P (GET_MODE (dest))
5929 	    && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
5930 	  insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
5931 
5932 	elt->in_memory = (MEM_P (sets[i].inner_dest)
5933 			  && !MEM_READONLY_P (sets[i].inner_dest));
5934 
5935 	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5936 	   narrower than M2, and both M1 and M2 are the same number of words,
5937 	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5938 	   make that equivalence as well.
5939 
5940 	   However, BAR may have equivalences for which gen_lowpart
5941 	   will produce a simpler value than gen_lowpart applied to
5942 	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5943 	   BAR's equivalences.  If we don't get a simplified form, make
5944 	   the SUBREG.  It will not be used in an equivalence, but will
5945 	   cause two similar assignments to be detected.
5946 
5947 	   Note the loop below will find SUBREG_REG (DEST) since we have
5948 	   already entered SRC and DEST of the SET in the table.  */
5949 
5950 	if (GET_CODE (dest) == SUBREG
5951 	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5952 		 / UNITS_PER_WORD)
5953 		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
5954 	    && (GET_MODE_SIZE (GET_MODE (dest))
5955 		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5956 	    && sets[i].src_elt != 0)
5957 	  {
5958 	    machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5959 	    struct table_elt *elt, *classp = 0;
5960 
5961 	    for (elt = sets[i].src_elt->first_same_value; elt;
5962 		 elt = elt->next_same_value)
5963 	      {
5964 		rtx new_src = 0;
5965 		unsigned src_hash;
5966 		struct table_elt *src_elt;
5967 		int byte = 0;
5968 
5969 		/* Ignore invalid entries.  */
5970 		if (!REG_P (elt->exp)
5971 		    && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5972 		  continue;
5973 
5974 		/* We may have already been playing subreg games.  If the
5975 		   mode is already correct for the destination, use it.  */
5976 		if (GET_MODE (elt->exp) == new_mode)
5977 		  new_src = elt->exp;
5978 		else
5979 		  {
5980 		    /* Calculate big endian correction for the SUBREG_BYTE.
5981 		       We have already checked that M1 (GET_MODE (dest))
5982 		       is not narrower than M2 (new_mode).  */
5983 		    if (BYTES_BIG_ENDIAN)
5984 		      byte = (GET_MODE_SIZE (GET_MODE (dest))
5985 			      - GET_MODE_SIZE (new_mode));
5986 
5987 		    new_src = simplify_gen_subreg (new_mode, elt->exp,
5988 					           GET_MODE (dest), byte);
5989 		  }
5990 
5991 		/* The call to simplify_gen_subreg fails if the value
5992 		   is VOIDmode, yet we can't do any simplification, e.g.
5993 		   for EXPR_LISTs denoting function call results.
5994 		   It is invalid to construct a SUBREG with a VOIDmode
5995 		   SUBREG_REG, hence a zero new_src means we can't do
5996 		   this substitution.  */
5997 		if (! new_src)
5998 		  continue;
5999 
6000 		src_hash = HASH (new_src, new_mode);
6001 		src_elt = lookup (new_src, src_hash, new_mode);
6002 
6003 		/* Put the new source in the hash table is if isn't
6004 		   already.  */
6005 		if (src_elt == 0)
6006 		  {
6007 		    if (insert_regs (new_src, classp, 0))
6008 		      {
6009 			rehash_using_reg (new_src);
6010 			src_hash = HASH (new_src, new_mode);
6011 		      }
6012 		    src_elt = insert (new_src, classp, src_hash, new_mode);
6013 		    src_elt->in_memory = elt->in_memory;
6014 		    if (GET_CODE (new_src) == ASM_OPERANDS
6015 			&& elt->cost == MAX_COST)
6016 		      src_elt->cost = MAX_COST;
6017 		  }
6018 		else if (classp && classp != src_elt->first_same_value)
6019 		  /* Show that two things that we've seen before are
6020 		     actually the same.  */
6021 		  merge_equiv_classes (src_elt, classp);
6022 
6023 		classp = src_elt->first_same_value;
6024 		/* Ignore invalid entries.  */
6025 		while (classp
6026 		       && !REG_P (classp->exp)
6027 		       && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6028 		  classp = classp->next_same_value;
6029 	      }
6030 	  }
6031       }
6032 
6033   /* Special handling for (set REG0 REG1) where REG0 is the
6034      "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6035      be used in the sequel, so (if easily done) change this insn to
6036      (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6037      that computed their value.  Then REG1 will become a dead store
6038      and won't cloud the situation for later optimizations.
6039 
6040      Do not make this change if REG1 is a hard register, because it will
6041      then be used in the sequel and we may be changing a two-operand insn
6042      into a three-operand insn.
6043 
6044      Also do not do this if we are operating on a copy of INSN.  */
6045 
6046   if (n_sets == 1 && sets[0].rtl)
6047     try_back_substitute_reg (sets[0].rtl, insn);
6048 
6049 done:;
6050 }
6051 
6052 /* Remove from the hash table all expressions that reference memory.  */
6053 
6054 static void
6055 invalidate_memory (void)
6056 {
6057   int i;
6058   struct table_elt *p, *next;
6059 
6060   for (i = 0; i < HASH_SIZE; i++)
6061     for (p = table[i]; p; p = next)
6062       {
6063 	next = p->next_same_hash;
6064 	if (p->in_memory)
6065 	  remove_from_table (p, i);
6066       }
6067 }
6068 
6069 /* Perform invalidation on the basis of everything about INSN,
6070    except for invalidating the actual places that are SET in it.
6071    This includes the places CLOBBERed, and anything that might
6072    alias with something that is SET or CLOBBERed.  */
6073 
6074 static void
6075 invalidate_from_clobbers (rtx_insn *insn)
6076 {
6077   rtx x = PATTERN (insn);
6078 
6079   if (GET_CODE (x) == CLOBBER)
6080     {
6081       rtx ref = XEXP (x, 0);
6082       if (ref)
6083 	{
6084 	  if (REG_P (ref) || GET_CODE (ref) == SUBREG
6085 	      || MEM_P (ref))
6086 	    invalidate (ref, VOIDmode);
6087 	  else if (GET_CODE (ref) == STRICT_LOW_PART
6088 		   || GET_CODE (ref) == ZERO_EXTRACT)
6089 	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6090 	}
6091     }
6092   else if (GET_CODE (x) == PARALLEL)
6093     {
6094       int i;
6095       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6096 	{
6097 	  rtx y = XVECEXP (x, 0, i);
6098 	  if (GET_CODE (y) == CLOBBER)
6099 	    {
6100 	      rtx ref = XEXP (y, 0);
6101 	      if (REG_P (ref) || GET_CODE (ref) == SUBREG
6102 		  || MEM_P (ref))
6103 		invalidate (ref, VOIDmode);
6104 	      else if (GET_CODE (ref) == STRICT_LOW_PART
6105 		       || GET_CODE (ref) == ZERO_EXTRACT)
6106 		invalidate (XEXP (ref, 0), GET_MODE (ref));
6107 	    }
6108 	}
6109     }
6110 }
6111 
6112 /* Perform invalidation on the basis of everything about INSN.
6113    This includes the places CLOBBERed, and anything that might
6114    alias with something that is SET or CLOBBERed.  */
6115 
6116 static void
6117 invalidate_from_sets_and_clobbers (rtx_insn *insn)
6118 {
6119   rtx tem;
6120   rtx x = PATTERN (insn);
6121 
6122   if (CALL_P (insn))
6123     {
6124       for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6125 	if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6126 	  invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6127     }
6128 
6129   /* Ensure we invalidate the destination register of a CALL insn.
6130      This is necessary for machines where this register is a fixed_reg,
6131      because no other code would invalidate it.  */
6132   if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
6133     invalidate (SET_DEST (x), VOIDmode);
6134 
6135   else if (GET_CODE (x) == PARALLEL)
6136     {
6137       int i;
6138 
6139       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6140 	{
6141 	  rtx y = XVECEXP (x, 0, i);
6142 	  if (GET_CODE (y) == CLOBBER)
6143 	    {
6144 	      rtx clobbered = XEXP (y, 0);
6145 
6146 	      if (REG_P (clobbered)
6147 		  || GET_CODE (clobbered) == SUBREG)
6148 		invalidate (clobbered, VOIDmode);
6149 	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
6150 		       || GET_CODE (clobbered) == ZERO_EXTRACT)
6151 		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6152 	    }
6153 	  else if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
6154 	    invalidate (SET_DEST (y), VOIDmode);
6155 	}
6156     }
6157 }
6158 
6159 /* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6160    and replace any registers in them with either an equivalent constant
6161    or the canonical form of the register.  If we are inside an address,
6162    only do this if the address remains valid.
6163 
6164    OBJECT is 0 except when within a MEM in which case it is the MEM.
6165 
6166    Return the replacement for X.  */
6167 
6168 static rtx
6169 cse_process_notes_1 (rtx x, rtx object, bool *changed)
6170 {
6171   enum rtx_code code = GET_CODE (x);
6172   const char *fmt = GET_RTX_FORMAT (code);
6173   int i;
6174 
6175   switch (code)
6176     {
6177     case CONST:
6178     case SYMBOL_REF:
6179     case LABEL_REF:
6180     CASE_CONST_ANY:
6181     case PC:
6182     case CC0:
6183     case LO_SUM:
6184       return x;
6185 
6186     case MEM:
6187       validate_change (x, &XEXP (x, 0),
6188 		       cse_process_notes (XEXP (x, 0), x, changed), 0);
6189       return x;
6190 
6191     case EXPR_LIST:
6192       if (REG_NOTE_KIND (x) == REG_EQUAL)
6193 	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
6194       /* Fall through.  */
6195 
6196     case INSN_LIST:
6197     case INT_LIST:
6198       if (XEXP (x, 1))
6199 	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
6200       return x;
6201 
6202     case SIGN_EXTEND:
6203     case ZERO_EXTEND:
6204     case SUBREG:
6205       {
6206 	rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6207 	/* We don't substitute VOIDmode constants into these rtx,
6208 	   since they would impede folding.  */
6209 	if (GET_MODE (new_rtx) != VOIDmode)
6210 	  validate_change (object, &XEXP (x, 0), new_rtx, 0);
6211 	return x;
6212       }
6213 
6214     case UNSIGNED_FLOAT:
6215       {
6216 	rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6217 	/* We don't substitute negative VOIDmode constants into these rtx,
6218 	   since they would impede folding.  */
6219 	if (GET_MODE (new_rtx) != VOIDmode
6220 	    || (CONST_INT_P (new_rtx) && INTVAL (new_rtx) >= 0)
6221 	    || (CONST_DOUBLE_P (new_rtx) && CONST_DOUBLE_HIGH (new_rtx) >= 0))
6222 	  validate_change (object, &XEXP (x, 0), new_rtx, 0);
6223 	return x;
6224       }
6225 
6226     case REG:
6227       i = REG_QTY (REGNO (x));
6228 
6229       /* Return a constant or a constant register.  */
6230       if (REGNO_QTY_VALID_P (REGNO (x)))
6231 	{
6232 	  struct qty_table_elem *ent = &qty_table[i];
6233 
6234 	  if (ent->const_rtx != NULL_RTX
6235 	      && (CONSTANT_P (ent->const_rtx)
6236 		  || REG_P (ent->const_rtx)))
6237 	    {
6238 	      rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
6239 	      if (new_rtx)
6240 		return copy_rtx (new_rtx);
6241 	    }
6242 	}
6243 
6244       /* Otherwise, canonicalize this register.  */
6245       return canon_reg (x, NULL);
6246 
6247     default:
6248       break;
6249     }
6250 
6251   for (i = 0; i < GET_RTX_LENGTH (code); i++)
6252     if (fmt[i] == 'e')
6253       validate_change (object, &XEXP (x, i),
6254 		       cse_process_notes (XEXP (x, i), object, changed), 0);
6255 
6256   return x;
6257 }
6258 
6259 static rtx
6260 cse_process_notes (rtx x, rtx object, bool *changed)
6261 {
6262   rtx new_rtx = cse_process_notes_1 (x, object, changed);
6263   if (new_rtx != x)
6264     *changed = true;
6265   return new_rtx;
6266 }
6267 
6268 
6269 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6270 
6271    DATA is a pointer to a struct cse_basic_block_data, that is used to
6272    describe the path.
6273    It is filled with a queue of basic blocks, starting with FIRST_BB
6274    and following a trace through the CFG.
6275 
6276    If all paths starting at FIRST_BB have been followed, or no new path
6277    starting at FIRST_BB can be constructed, this function returns FALSE.
6278    Otherwise, DATA->path is filled and the function returns TRUE indicating
6279    that a path to follow was found.
6280 
6281    If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6282    block in the path will be FIRST_BB.  */
6283 
6284 static bool
6285 cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6286 	       int follow_jumps)
6287 {
6288   basic_block bb;
6289   edge e;
6290   int path_size;
6291 
6292   bitmap_set_bit (cse_visited_basic_blocks, first_bb->index);
6293 
6294   /* See if there is a previous path.  */
6295   path_size = data->path_size;
6296 
6297   /* There is a previous path.  Make sure it started with FIRST_BB.  */
6298   if (path_size)
6299     gcc_assert (data->path[0].bb == first_bb);
6300 
6301   /* There was only one basic block in the last path.  Clear the path and
6302      return, so that paths starting at another basic block can be tried.  */
6303   if (path_size == 1)
6304     {
6305       path_size = 0;
6306       goto done;
6307     }
6308 
6309   /* If the path was empty from the beginning, construct a new path.  */
6310   if (path_size == 0)
6311     data->path[path_size++].bb = first_bb;
6312   else
6313     {
6314       /* Otherwise, path_size must be equal to or greater than 2, because
6315 	 a previous path exists that is at least two basic blocks long.
6316 
6317 	 Update the previous branch path, if any.  If the last branch was
6318 	 previously along the branch edge, take the fallthrough edge now.  */
6319       while (path_size >= 2)
6320 	{
6321 	  basic_block last_bb_in_path, previous_bb_in_path;
6322 	  edge e;
6323 
6324 	  --path_size;
6325 	  last_bb_in_path = data->path[path_size].bb;
6326 	  previous_bb_in_path = data->path[path_size - 1].bb;
6327 
6328 	  /* If we previously followed a path along the branch edge, try
6329 	     the fallthru edge now.  */
6330 	  if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6331 	      && any_condjump_p (BB_END (previous_bb_in_path))
6332 	      && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6333 	      && e == BRANCH_EDGE (previous_bb_in_path))
6334 	    {
6335 	      bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
6336 	      if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
6337 		  && single_pred_p (bb)
6338 		  /* We used to assert here that we would only see blocks
6339 		     that we have not visited yet.  But we may end up
6340 		     visiting basic blocks twice if the CFG has changed
6341 		     in this run of cse_main, because when the CFG changes
6342 		     the topological sort of the CFG also changes.  A basic
6343 		     blocks that previously had more than two predecessors
6344 		     may now have a single predecessor, and become part of
6345 		     a path that starts at another basic block.
6346 
6347 		     We still want to visit each basic block only once, so
6348 		     halt the path here if we have already visited BB.  */
6349 		  && !bitmap_bit_p (cse_visited_basic_blocks, bb->index))
6350 		{
6351 		  bitmap_set_bit (cse_visited_basic_blocks, bb->index);
6352 		  data->path[path_size++].bb = bb;
6353 		  break;
6354 		}
6355 	    }
6356 
6357 	  data->path[path_size].bb = NULL;
6358 	}
6359 
6360       /* If only one block remains in the path, bail.  */
6361       if (path_size == 1)
6362 	{
6363 	  path_size = 0;
6364 	  goto done;
6365 	}
6366     }
6367 
6368   /* Extend the path if possible.  */
6369   if (follow_jumps)
6370     {
6371       bb = data->path[path_size - 1].bb;
6372       while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
6373 	{
6374 	  if (single_succ_p (bb))
6375 	    e = single_succ_edge (bb);
6376 	  else if (EDGE_COUNT (bb->succs) == 2
6377 		   && any_condjump_p (BB_END (bb)))
6378 	    {
6379 	      /* First try to follow the branch.  If that doesn't lead
6380 		 to a useful path, follow the fallthru edge.  */
6381 	      e = BRANCH_EDGE (bb);
6382 	      if (!single_pred_p (e->dest))
6383 		e = FALLTHRU_EDGE (bb);
6384 	    }
6385 	  else
6386 	    e = NULL;
6387 
6388 	  if (e
6389 	      && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
6390 	      && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
6391 	      && single_pred_p (e->dest)
6392 	      /* Avoid visiting basic blocks twice.  The large comment
6393 		 above explains why this can happen.  */
6394 	      && !bitmap_bit_p (cse_visited_basic_blocks, e->dest->index))
6395 	    {
6396 	      basic_block bb2 = e->dest;
6397 	      bitmap_set_bit (cse_visited_basic_blocks, bb2->index);
6398 	      data->path[path_size++].bb = bb2;
6399 	      bb = bb2;
6400 	    }
6401 	  else
6402 	    bb = NULL;
6403 	}
6404     }
6405 
6406 done:
6407   data->path_size = path_size;
6408   return path_size != 0;
6409 }
6410 
6411 /* Dump the path in DATA to file F.  NSETS is the number of sets
6412    in the path.  */
6413 
6414 static void
6415 cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6416 {
6417   int path_entry;
6418 
6419   fprintf (f, ";; Following path with %d sets: ", nsets);
6420   for (path_entry = 0; path_entry < data->path_size; path_entry++)
6421     fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6422   fputc ('\n', dump_file);
6423   fflush (f);
6424 }
6425 
6426 
6427 /* Return true if BB has exception handling successor edges.  */
6428 
6429 static bool
6430 have_eh_succ_edges (basic_block bb)
6431 {
6432   edge e;
6433   edge_iterator ei;
6434 
6435   FOR_EACH_EDGE (e, ei, bb->succs)
6436     if (e->flags & EDGE_EH)
6437       return true;
6438 
6439   return false;
6440 }
6441 
6442 
6443 /* Scan to the end of the path described by DATA.  Return an estimate of
6444    the total number of SETs of all insns in the path.  */
6445 
6446 static void
6447 cse_prescan_path (struct cse_basic_block_data *data)
6448 {
6449   int nsets = 0;
6450   int path_size = data->path_size;
6451   int path_entry;
6452 
6453   /* Scan to end of each basic block in the path.  */
6454   for (path_entry = 0; path_entry < path_size; path_entry++)
6455     {
6456       basic_block bb;
6457       rtx_insn *insn;
6458 
6459       bb = data->path[path_entry].bb;
6460 
6461       FOR_BB_INSNS (bb, insn)
6462 	{
6463 	  if (!INSN_P (insn))
6464 	    continue;
6465 
6466 	  /* A PARALLEL can have lots of SETs in it,
6467 	     especially if it is really an ASM_OPERANDS.  */
6468 	  if (GET_CODE (PATTERN (insn)) == PARALLEL)
6469 	    nsets += XVECLEN (PATTERN (insn), 0);
6470 	  else
6471 	    nsets += 1;
6472 	}
6473     }
6474 
6475   data->nsets = nsets;
6476 }
6477 
6478 /* Return true if the pattern of INSN uses a LABEL_REF for which
6479    there isn't a REG_LABEL_OPERAND note.  */
6480 
6481 static bool
6482 check_for_label_ref (rtx_insn *insn)
6483 {
6484   /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6485      note for it, we must rerun jump since it needs to place the note.  If
6486      this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6487      don't do this since no REG_LABEL_OPERAND will be added.  */
6488   subrtx_iterator::array_type array;
6489   FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
6490     {
6491       const_rtx x = *iter;
6492       if (GET_CODE (x) == LABEL_REF
6493 	  && !LABEL_REF_NONLOCAL_P (x)
6494 	  && (!JUMP_P (insn)
6495 	      || !label_is_jump_target_p (label_ref_label (x), insn))
6496 	  && LABEL_P (label_ref_label (x))
6497 	  && INSN_UID (label_ref_label (x)) != 0
6498 	  && !find_reg_note (insn, REG_LABEL_OPERAND, label_ref_label (x)))
6499 	return true;
6500     }
6501   return false;
6502 }
6503 
6504 /* Process a single extended basic block described by EBB_DATA.  */
6505 
6506 static void
6507 cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6508 {
6509   int path_size = ebb_data->path_size;
6510   int path_entry;
6511   int num_insns = 0;
6512 
6513   /* Allocate the space needed by qty_table.  */
6514   qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6515 
6516   new_basic_block ();
6517   cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6518   cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
6519   for (path_entry = 0; path_entry < path_size; path_entry++)
6520     {
6521       basic_block bb;
6522       rtx_insn *insn;
6523 
6524       bb = ebb_data->path[path_entry].bb;
6525 
6526       /* Invalidate recorded information for eh regs if there is an EH
6527 	 edge pointing to that bb.  */
6528       if (bb_has_eh_pred (bb))
6529 	{
6530 	  df_ref def;
6531 
6532 	  FOR_EACH_ARTIFICIAL_DEF (def, bb->index)
6533 	    if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6534 	      invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6535 	}
6536 
6537       optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6538       FOR_BB_INSNS (bb, insn)
6539 	{
6540 	  /* If we have processed 1,000 insns, flush the hash table to
6541 	     avoid extreme quadratic behavior.  We must not include NOTEs
6542 	     in the count since there may be more of them when generating
6543 	     debugging information.  If we clear the table at different
6544 	     times, code generated with -g -O might be different than code
6545 	     generated with -O but not -g.
6546 
6547 	     FIXME: This is a real kludge and needs to be done some other
6548 		    way.  */
6549 	  if (NONDEBUG_INSN_P (insn)
6550 	      && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6551 	    {
6552 	      flush_hash_table ();
6553 	      num_insns = 0;
6554 	    }
6555 
6556 	  if (INSN_P (insn))
6557 	    {
6558 	      /* Process notes first so we have all notes in canonical forms
6559 		 when looking for duplicate operations.  */
6560 	      if (REG_NOTES (insn))
6561 		{
6562 		  bool changed = false;
6563 		  REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
6564 						        NULL_RTX, &changed);
6565 		  if (changed)
6566 		    df_notes_rescan (insn);
6567 		}
6568 
6569 	      cse_insn (insn);
6570 
6571 	      /* If we haven't already found an insn where we added a LABEL_REF,
6572 		 check this one.  */
6573 	      if (INSN_P (insn) && !recorded_label_ref
6574 		  && check_for_label_ref (insn))
6575 		recorded_label_ref = true;
6576 
6577 	      if (HAVE_cc0 && NONDEBUG_INSN_P (insn))
6578 		{
6579 		  /* If the previous insn sets CC0 and this insn no
6580 		     longer references CC0, delete the previous insn.
6581 		     Here we use fact that nothing expects CC0 to be
6582 		     valid over an insn, which is true until the final
6583 		     pass.  */
6584 		  rtx_insn *prev_insn;
6585 		  rtx tem;
6586 
6587 		  prev_insn = prev_nonnote_nondebug_insn (insn);
6588 		  if (prev_insn && NONJUMP_INSN_P (prev_insn)
6589 		      && (tem = single_set (prev_insn)) != NULL_RTX
6590 		      && SET_DEST (tem) == cc0_rtx
6591 		      && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6592 		    delete_insn (prev_insn);
6593 
6594 		  /* If this insn is not the last insn in the basic
6595 		     block, it will be PREV_INSN(insn) in the next
6596 		     iteration.  If we recorded any CC0-related
6597 		     information for this insn, remember it.  */
6598 		  if (insn != BB_END (bb))
6599 		    {
6600 		      prev_insn_cc0 = this_insn_cc0;
6601 		      prev_insn_cc0_mode = this_insn_cc0_mode;
6602 		    }
6603 		}
6604 	    }
6605 	}
6606 
6607       /* With non-call exceptions, we are not always able to update
6608 	 the CFG properly inside cse_insn.  So clean up possibly
6609 	 redundant EH edges here.  */
6610       if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
6611 	cse_cfg_altered |= purge_dead_edges (bb);
6612 
6613       /* If we changed a conditional jump, we may have terminated
6614 	 the path we are following.  Check that by verifying that
6615 	 the edge we would take still exists.  If the edge does
6616 	 not exist anymore, purge the remainder of the path.
6617 	 Note that this will cause us to return to the caller.  */
6618       if (path_entry < path_size - 1)
6619 	{
6620 	  basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6621 	  if (!find_edge (bb, next_bb))
6622 	    {
6623 	      do
6624 		{
6625 		  path_size--;
6626 
6627 		  /* If we truncate the path, we must also reset the
6628 		     visited bit on the remaining blocks in the path,
6629 		     or we will never visit them at all.  */
6630 		  bitmap_clear_bit (cse_visited_basic_blocks,
6631 			     ebb_data->path[path_size].bb->index);
6632 		  ebb_data->path[path_size].bb = NULL;
6633 		}
6634 	      while (path_size - 1 != path_entry);
6635 	      ebb_data->path_size = path_size;
6636 	    }
6637 	}
6638 
6639       /* If this is a conditional jump insn, record any known
6640 	 equivalences due to the condition being tested.  */
6641       insn = BB_END (bb);
6642       if (path_entry < path_size - 1
6643 	  && JUMP_P (insn)
6644 	  && single_set (insn)
6645 	  && any_condjump_p (insn))
6646 	{
6647 	  basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6648 	  bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6649 	  record_jump_equiv (insn, taken);
6650 	}
6651 
6652       /* Clear the CC0-tracking related insns, they can't provide
6653 	 useful information across basic block boundaries.  */
6654       prev_insn_cc0 = 0;
6655     }
6656 
6657   gcc_assert (next_qty <= max_qty);
6658 
6659   free (qty_table);
6660 }
6661 
6662 
6663 /* Perform cse on the instructions of a function.
6664    F is the first instruction.
6665    NREGS is one plus the highest pseudo-reg number used in the instruction.
6666 
6667    Return 2 if jump optimizations should be redone due to simplifications
6668    in conditional jump instructions.
6669    Return 1 if the CFG should be cleaned up because it has been modified.
6670    Return 0 otherwise.  */
6671 
6672 static int
6673 cse_main (rtx_insn *f ATTRIBUTE_UNUSED, int nregs)
6674 {
6675   struct cse_basic_block_data ebb_data;
6676   basic_block bb;
6677   int *rc_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
6678   int i, n_blocks;
6679 
6680   /* CSE doesn't use dominane info but can invalidate it in different ways.
6681      For simplicity free dominance info here.  */
6682   free_dominance_info (CDI_DOMINATORS);
6683 
6684   df_set_flags (DF_LR_RUN_DCE);
6685   df_note_add_problem ();
6686   df_analyze ();
6687   df_set_flags (DF_DEFER_INSN_RESCAN);
6688 
6689   reg_scan (get_insns (), max_reg_num ());
6690   init_cse_reg_info (nregs);
6691 
6692   ebb_data.path = XNEWVEC (struct branch_path,
6693 			   PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6694 
6695   cse_cfg_altered = false;
6696   cse_jumps_altered = false;
6697   recorded_label_ref = false;
6698   constant_pool_entries_cost = 0;
6699   constant_pool_entries_regcost = 0;
6700   ebb_data.path_size = 0;
6701   ebb_data.nsets = 0;
6702   rtl_hooks = cse_rtl_hooks;
6703 
6704   init_recog ();
6705   init_alias_analysis ();
6706 
6707   reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6708 
6709   /* Set up the table of already visited basic blocks.  */
6710   cse_visited_basic_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6711   bitmap_clear (cse_visited_basic_blocks);
6712 
6713   /* Loop over basic blocks in reverse completion order (RPO),
6714      excluding the ENTRY and EXIT blocks.  */
6715   n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6716   i = 0;
6717   while (i < n_blocks)
6718     {
6719       /* Find the first block in the RPO queue that we have not yet
6720 	 processed before.  */
6721       do
6722 	{
6723 	  bb = BASIC_BLOCK_FOR_FN (cfun, rc_order[i++]);
6724 	}
6725       while (bitmap_bit_p (cse_visited_basic_blocks, bb->index)
6726 	     && i < n_blocks);
6727 
6728       /* Find all paths starting with BB, and process them.  */
6729       while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6730 	{
6731 	  /* Pre-scan the path.  */
6732 	  cse_prescan_path (&ebb_data);
6733 
6734 	  /* If this basic block has no sets, skip it.  */
6735 	  if (ebb_data.nsets == 0)
6736 	    continue;
6737 
6738 	  /* Get a reasonable estimate for the maximum number of qty's
6739 	     needed for this path.  For this, we take the number of sets
6740 	     and multiply that by MAX_RECOG_OPERANDS.  */
6741 	  max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6742 
6743 	  /* Dump the path we're about to process.  */
6744 	  if (dump_file)
6745 	    cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6746 
6747 	  cse_extended_basic_block (&ebb_data);
6748 	}
6749     }
6750 
6751   /* Clean up.  */
6752   end_alias_analysis ();
6753   free (reg_eqv_table);
6754   free (ebb_data.path);
6755   sbitmap_free (cse_visited_basic_blocks);
6756   free (rc_order);
6757   rtl_hooks = general_rtl_hooks;
6758 
6759   if (cse_jumps_altered || recorded_label_ref)
6760     return 2;
6761   else if (cse_cfg_altered)
6762     return 1;
6763   else
6764     return 0;
6765 }
6766 
6767 /* Count the number of times registers are used (not set) in X.
6768    COUNTS is an array in which we accumulate the count, INCR is how much
6769    we count each register usage.
6770 
6771    Don't count a usage of DEST, which is the SET_DEST of a SET which
6772    contains X in its SET_SRC.  This is because such a SET does not
6773    modify the liveness of DEST.
6774    DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6775    We must then count uses of a SET_DEST regardless, because the insn can't be
6776    deleted here.  */
6777 
6778 static void
6779 count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6780 {
6781   enum rtx_code code;
6782   rtx note;
6783   const char *fmt;
6784   int i, j;
6785 
6786   if (x == 0)
6787     return;
6788 
6789   switch (code = GET_CODE (x))
6790     {
6791     case REG:
6792       if (x != dest)
6793 	counts[REGNO (x)] += incr;
6794       return;
6795 
6796     case PC:
6797     case CC0:
6798     case CONST:
6799     CASE_CONST_ANY:
6800     case SYMBOL_REF:
6801     case LABEL_REF:
6802       return;
6803 
6804     case CLOBBER:
6805       /* If we are clobbering a MEM, mark any registers inside the address
6806          as being used.  */
6807       if (MEM_P (XEXP (x, 0)))
6808 	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6809       return;
6810 
6811     case SET:
6812       /* Unless we are setting a REG, count everything in SET_DEST.  */
6813       if (!REG_P (SET_DEST (x)))
6814 	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6815       count_reg_usage (SET_SRC (x), counts,
6816 		       dest ? dest : SET_DEST (x),
6817 		       incr);
6818       return;
6819 
6820     case DEBUG_INSN:
6821       return;
6822 
6823     case CALL_INSN:
6824     case INSN:
6825     case JUMP_INSN:
6826       /* We expect dest to be NULL_RTX here.  If the insn may throw,
6827 	 or if it cannot be deleted due to side-effects, mark this fact
6828 	 by setting DEST to pc_rtx.  */
6829       if ((!cfun->can_delete_dead_exceptions && !insn_nothrow_p (x))
6830 	  || side_effects_p (PATTERN (x)))
6831 	dest = pc_rtx;
6832       if (code == CALL_INSN)
6833 	count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6834       count_reg_usage (PATTERN (x), counts, dest, incr);
6835 
6836       /* Things used in a REG_EQUAL note aren't dead since loop may try to
6837 	 use them.  */
6838 
6839       note = find_reg_equal_equiv_note (x);
6840       if (note)
6841 	{
6842 	  rtx eqv = XEXP (note, 0);
6843 
6844 	  if (GET_CODE (eqv) == EXPR_LIST)
6845 	  /* This REG_EQUAL note describes the result of a function call.
6846 	     Process all the arguments.  */
6847 	    do
6848 	      {
6849 		count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6850 		eqv = XEXP (eqv, 1);
6851 	      }
6852 	    while (eqv && GET_CODE (eqv) == EXPR_LIST);
6853 	  else
6854 	    count_reg_usage (eqv, counts, dest, incr);
6855 	}
6856       return;
6857 
6858     case EXPR_LIST:
6859       if (REG_NOTE_KIND (x) == REG_EQUAL
6860 	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6861 	  /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6862 	     involving registers in the address.  */
6863 	  || GET_CODE (XEXP (x, 0)) == CLOBBER)
6864 	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6865 
6866       count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6867       return;
6868 
6869     case ASM_OPERANDS:
6870       /* Iterate over just the inputs, not the constraints as well.  */
6871       for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6872 	count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6873       return;
6874 
6875     case INSN_LIST:
6876     case INT_LIST:
6877       gcc_unreachable ();
6878 
6879     default:
6880       break;
6881     }
6882 
6883   fmt = GET_RTX_FORMAT (code);
6884   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6885     {
6886       if (fmt[i] == 'e')
6887 	count_reg_usage (XEXP (x, i), counts, dest, incr);
6888       else if (fmt[i] == 'E')
6889 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6890 	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6891     }
6892 }
6893 
6894 /* Return true if X is a dead register.  */
6895 
6896 static inline int
6897 is_dead_reg (const_rtx x, int *counts)
6898 {
6899   return (REG_P (x)
6900 	  && REGNO (x) >= FIRST_PSEUDO_REGISTER
6901 	  && counts[REGNO (x)] == 0);
6902 }
6903 
6904 /* Return true if set is live.  */
6905 static bool
6906 set_live_p (rtx set, rtx_insn *insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
6907 	    int *counts)
6908 {
6909   rtx_insn *tem;
6910 
6911   if (set_noop_p (set))
6912     ;
6913 
6914   else if (GET_CODE (SET_DEST (set)) == CC0
6915 	   && !side_effects_p (SET_SRC (set))
6916 	   && ((tem = next_nonnote_nondebug_insn (insn)) == NULL_RTX
6917 	       || !INSN_P (tem)
6918 	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6919     return false;
6920   else if (!is_dead_reg (SET_DEST (set), counts)
6921 	   || side_effects_p (SET_SRC (set)))
6922     return true;
6923   return false;
6924 }
6925 
6926 /* Return true if insn is live.  */
6927 
6928 static bool
6929 insn_live_p (rtx_insn *insn, int *counts)
6930 {
6931   int i;
6932   if (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
6933     return true;
6934   else if (GET_CODE (PATTERN (insn)) == SET)
6935     return set_live_p (PATTERN (insn), insn, counts);
6936   else if (GET_CODE (PATTERN (insn)) == PARALLEL)
6937     {
6938       for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6939 	{
6940 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
6941 
6942 	  if (GET_CODE (elt) == SET)
6943 	    {
6944 	      if (set_live_p (elt, insn, counts))
6945 		return true;
6946 	    }
6947 	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6948 	    return true;
6949 	}
6950       return false;
6951     }
6952   else if (DEBUG_INSN_P (insn))
6953     {
6954       rtx_insn *next;
6955 
6956       for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
6957 	if (NOTE_P (next))
6958 	  continue;
6959 	else if (!DEBUG_INSN_P (next))
6960 	  return true;
6961 	else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
6962 	  return false;
6963 
6964       return true;
6965     }
6966   else
6967     return true;
6968 }
6969 
6970 /* Count the number of stores into pseudo.  Callback for note_stores.  */
6971 
6972 static void
6973 count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data)
6974 {
6975   int *counts = (int *) data;
6976   if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
6977     counts[REGNO (x)]++;
6978 }
6979 
6980 /* Return if DEBUG_INSN pattern PAT needs to be reset because some dead
6981    pseudo doesn't have a replacement.  COUNTS[X] is zero if register X
6982    is dead and REPLACEMENTS[X] is null if it has no replacemenet.
6983    Set *SEEN_REPL to true if we see a dead register that does have
6984    a replacement.  */
6985 
6986 static bool
6987 is_dead_debug_insn (const_rtx pat, int *counts, rtx *replacements,
6988 		    bool *seen_repl)
6989 {
6990   subrtx_iterator::array_type array;
6991   FOR_EACH_SUBRTX (iter, array, pat, NONCONST)
6992     {
6993       const_rtx x = *iter;
6994       if (is_dead_reg (x, counts))
6995 	{
6996 	  if (replacements && replacements[REGNO (x)] != NULL_RTX)
6997 	    *seen_repl = true;
6998 	  else
6999 	    return true;
7000 	}
7001     }
7002   return false;
7003 }
7004 
7005 /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
7006    Callback for simplify_replace_fn_rtx.  */
7007 
7008 static rtx
7009 replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data)
7010 {
7011   rtx *replacements = (rtx *) data;
7012 
7013   if (REG_P (x)
7014       && REGNO (x) >= FIRST_PSEUDO_REGISTER
7015       && replacements[REGNO (x)] != NULL_RTX)
7016     {
7017       if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)]))
7018 	return replacements[REGNO (x)];
7019       return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)],
7020 			     GET_MODE (replacements[REGNO (x)]));
7021     }
7022   return NULL_RTX;
7023 }
7024 
7025 /* Scan all the insns and delete any that are dead; i.e., they store a register
7026    that is never used or they copy a register to itself.
7027 
7028    This is used to remove insns made obviously dead by cse, loop or other
7029    optimizations.  It improves the heuristics in loop since it won't try to
7030    move dead invariants out of loops or make givs for dead quantities.  The
7031    remaining passes of the compilation are also sped up.  */
7032 
7033 int
7034 delete_trivially_dead_insns (rtx_insn *insns, int nreg)
7035 {
7036   int *counts;
7037   rtx_insn *insn, *prev;
7038   rtx *replacements = NULL;
7039   int ndead = 0;
7040 
7041   timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7042   /* First count the number of times each register is used.  */
7043   if (MAY_HAVE_DEBUG_INSNS)
7044     {
7045       counts = XCNEWVEC (int, nreg * 3);
7046       for (insn = insns; insn; insn = NEXT_INSN (insn))
7047 	if (DEBUG_INSN_P (insn))
7048 	  count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7049 			   NULL_RTX, 1);
7050 	else if (INSN_P (insn))
7051 	  {
7052 	    count_reg_usage (insn, counts, NULL_RTX, 1);
7053 	    note_stores (PATTERN (insn), count_stores, counts + nreg * 2);
7054 	  }
7055       /* If there can be debug insns, COUNTS are 3 consecutive arrays.
7056 	 First one counts how many times each pseudo is used outside
7057 	 of debug insns, second counts how many times each pseudo is
7058 	 used in debug insns and third counts how many times a pseudo
7059 	 is stored.  */
7060     }
7061   else
7062     {
7063       counts = XCNEWVEC (int, nreg);
7064       for (insn = insns; insn; insn = NEXT_INSN (insn))
7065 	if (INSN_P (insn))
7066 	  count_reg_usage (insn, counts, NULL_RTX, 1);
7067       /* If no debug insns can be present, COUNTS is just an array
7068 	 which counts how many times each pseudo is used.  */
7069     }
7070   /* Pseudo PIC register should be considered as used due to possible
7071      new usages generated.  */
7072   if (!reload_completed
7073       && pic_offset_table_rtx
7074       && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
7075     counts[REGNO (pic_offset_table_rtx)]++;
7076   /* Go from the last insn to the first and delete insns that only set unused
7077      registers or copy a register to itself.  As we delete an insn, remove
7078      usage counts for registers it uses.
7079 
7080      The first jump optimization pass may leave a real insn as the last
7081      insn in the function.   We must not skip that insn or we may end
7082      up deleting code that is not really dead.
7083 
7084      If some otherwise unused register is only used in DEBUG_INSNs,
7085      try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
7086      the setter.  Then go through DEBUG_INSNs and if a DEBUG_EXPR
7087      has been created for the unused register, replace it with
7088      the DEBUG_EXPR, otherwise reset the DEBUG_INSN.  */
7089   for (insn = get_last_insn (); insn; insn = prev)
7090     {
7091       int live_insn = 0;
7092 
7093       prev = PREV_INSN (insn);
7094       if (!INSN_P (insn))
7095 	continue;
7096 
7097       live_insn = insn_live_p (insn, counts);
7098 
7099       /* If this is a dead insn, delete it and show registers in it aren't
7100 	 being used.  */
7101 
7102       if (! live_insn && dbg_cnt (delete_trivial_dead))
7103 	{
7104 	  if (DEBUG_INSN_P (insn))
7105 	    count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7106 			     NULL_RTX, -1);
7107 	  else
7108 	    {
7109 	      rtx set;
7110 	      if (MAY_HAVE_DEBUG_INSNS
7111 		  && (set = single_set (insn)) != NULL_RTX
7112 		  && is_dead_reg (SET_DEST (set), counts)
7113 		  /* Used at least once in some DEBUG_INSN.  */
7114 		  && counts[REGNO (SET_DEST (set)) + nreg] > 0
7115 		  /* And set exactly once.  */
7116 		  && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1
7117 		  && !side_effects_p (SET_SRC (set))
7118 		  && asm_noperands (PATTERN (insn)) < 0)
7119 		{
7120 		  rtx dval, bind_var_loc;
7121 		  rtx_insn *bind;
7122 
7123 		  /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
7124 		  dval = make_debug_expr_from_rtl (SET_DEST (set));
7125 
7126 		  /* Emit a debug bind insn before the insn in which
7127 		     reg dies.  */
7128 		  bind_var_loc =
7129 		    gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)),
7130 					  DEBUG_EXPR_TREE_DECL (dval),
7131 					  SET_SRC (set),
7132 					  VAR_INIT_STATUS_INITIALIZED);
7133 		  count_reg_usage (bind_var_loc, counts + nreg, NULL_RTX, 1);
7134 
7135 		  bind = emit_debug_insn_before (bind_var_loc, insn);
7136 		  df_insn_rescan (bind);
7137 
7138 		  if (replacements == NULL)
7139 		    replacements = XCNEWVEC (rtx, nreg);
7140 		  replacements[REGNO (SET_DEST (set))] = dval;
7141 		}
7142 
7143 	      count_reg_usage (insn, counts, NULL_RTX, -1);
7144 	      ndead++;
7145 	    }
7146 	  cse_cfg_altered |= delete_insn_and_edges (insn);
7147 	}
7148     }
7149 
7150   if (MAY_HAVE_DEBUG_INSNS)
7151     {
7152       for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7153 	if (DEBUG_INSN_P (insn))
7154 	  {
7155 	    /* If this debug insn references a dead register that wasn't replaced
7156 	       with an DEBUG_EXPR, reset the DEBUG_INSN.  */
7157 	    bool seen_repl = false;
7158 	    if (is_dead_debug_insn (INSN_VAR_LOCATION_LOC (insn),
7159 				    counts, replacements, &seen_repl))
7160 	      {
7161 		INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
7162 		df_insn_rescan (insn);
7163 	      }
7164 	    else if (seen_repl)
7165 	      {
7166 		INSN_VAR_LOCATION_LOC (insn)
7167 		  = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
7168 					     NULL_RTX, replace_dead_reg,
7169 					     replacements);
7170 		df_insn_rescan (insn);
7171 	      }
7172 	  }
7173       free (replacements);
7174     }
7175 
7176   if (dump_file && ndead)
7177     fprintf (dump_file, "Deleted %i trivially dead insns\n",
7178 	     ndead);
7179   /* Clean up.  */
7180   free (counts);
7181   timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7182   return ndead;
7183 }
7184 
7185 /* If LOC contains references to NEWREG in a different mode, change them
7186    to use NEWREG instead.  */
7187 
7188 static void
7189 cse_change_cc_mode (subrtx_ptr_iterator::array_type &array,
7190 		    rtx *loc, rtx_insn *insn, rtx newreg)
7191 {
7192   FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
7193     {
7194       rtx *loc = *iter;
7195       rtx x = *loc;
7196       if (x
7197 	  && REG_P (x)
7198 	  && REGNO (x) == REGNO (newreg)
7199 	  && GET_MODE (x) != GET_MODE (newreg))
7200 	{
7201 	  validate_change (insn, loc, newreg, 1);
7202 	  iter.skip_subrtxes ();
7203 	}
7204     }
7205 }
7206 
7207 /* Change the mode of any reference to the register REGNO (NEWREG) to
7208    GET_MODE (NEWREG) in INSN.  */
7209 
7210 static void
7211 cse_change_cc_mode_insn (rtx_insn *insn, rtx newreg)
7212 {
7213   int success;
7214 
7215   if (!INSN_P (insn))
7216     return;
7217 
7218   subrtx_ptr_iterator::array_type array;
7219   cse_change_cc_mode (array, &PATTERN (insn), insn, newreg);
7220   cse_change_cc_mode (array, &REG_NOTES (insn), insn, newreg);
7221 
7222   /* If the following assertion was triggered, there is most probably
7223      something wrong with the cc_modes_compatible back end function.
7224      CC modes only can be considered compatible if the insn - with the mode
7225      replaced by any of the compatible modes - can still be recognized.  */
7226   success = apply_change_group ();
7227   gcc_assert (success);
7228 }
7229 
7230 /* Change the mode of any reference to the register REGNO (NEWREG) to
7231    GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
7232    any instruction which modifies NEWREG.  */
7233 
7234 static void
7235 cse_change_cc_mode_insns (rtx_insn *start, rtx_insn *end, rtx newreg)
7236 {
7237   rtx_insn *insn;
7238 
7239   for (insn = start; insn != end; insn = NEXT_INSN (insn))
7240     {
7241       if (! INSN_P (insn))
7242 	continue;
7243 
7244       if (reg_set_p (newreg, insn))
7245 	return;
7246 
7247       cse_change_cc_mode_insn (insn, newreg);
7248     }
7249 }
7250 
7251 /* BB is a basic block which finishes with CC_REG as a condition code
7252    register which is set to CC_SRC.  Look through the successors of BB
7253    to find blocks which have a single predecessor (i.e., this one),
7254    and look through those blocks for an assignment to CC_REG which is
7255    equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
7256    permitted to change the mode of CC_SRC to a compatible mode.  This
7257    returns VOIDmode if no equivalent assignments were found.
7258    Otherwise it returns the mode which CC_SRC should wind up with.
7259    ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7260    but is passed unmodified down to recursive calls in order to prevent
7261    endless recursion.
7262 
7263    The main complexity in this function is handling the mode issues.
7264    We may have more than one duplicate which we can eliminate, and we
7265    try to find a mode which will work for multiple duplicates.  */
7266 
7267 static machine_mode
7268 cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
7269 	      bool can_change_mode)
7270 {
7271   bool found_equiv;
7272   machine_mode mode;
7273   unsigned int insn_count;
7274   edge e;
7275   rtx_insn *insns[2];
7276   machine_mode modes[2];
7277   rtx_insn *last_insns[2];
7278   unsigned int i;
7279   rtx newreg;
7280   edge_iterator ei;
7281 
7282   /* We expect to have two successors.  Look at both before picking
7283      the final mode for the comparison.  If we have more successors
7284      (i.e., some sort of table jump, although that seems unlikely),
7285      then we require all beyond the first two to use the same
7286      mode.  */
7287 
7288   found_equiv = false;
7289   mode = GET_MODE (cc_src);
7290   insn_count = 0;
7291   FOR_EACH_EDGE (e, ei, bb->succs)
7292     {
7293       rtx_insn *insn;
7294       rtx_insn *end;
7295 
7296       if (e->flags & EDGE_COMPLEX)
7297 	continue;
7298 
7299       if (EDGE_COUNT (e->dest->preds) != 1
7300 	  || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
7301 	  /* Avoid endless recursion on unreachable blocks.  */
7302 	  || e->dest == orig_bb)
7303 	continue;
7304 
7305       end = NEXT_INSN (BB_END (e->dest));
7306       for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7307 	{
7308 	  rtx set;
7309 
7310 	  if (! INSN_P (insn))
7311 	    continue;
7312 
7313 	  /* If CC_SRC is modified, we have to stop looking for
7314 	     something which uses it.  */
7315 	  if (modified_in_p (cc_src, insn))
7316 	    break;
7317 
7318 	  /* Check whether INSN sets CC_REG to CC_SRC.  */
7319 	  set = single_set (insn);
7320 	  if (set
7321 	      && REG_P (SET_DEST (set))
7322 	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7323 	    {
7324 	      bool found;
7325 	      machine_mode set_mode;
7326 	      machine_mode comp_mode;
7327 
7328 	      found = false;
7329 	      set_mode = GET_MODE (SET_SRC (set));
7330 	      comp_mode = set_mode;
7331 	      if (rtx_equal_p (cc_src, SET_SRC (set)))
7332 		found = true;
7333 	      else if (GET_CODE (cc_src) == COMPARE
7334 		       && GET_CODE (SET_SRC (set)) == COMPARE
7335 		       && mode != set_mode
7336 		       && rtx_equal_p (XEXP (cc_src, 0),
7337 				       XEXP (SET_SRC (set), 0))
7338 		       && rtx_equal_p (XEXP (cc_src, 1),
7339 				       XEXP (SET_SRC (set), 1)))
7340 
7341 		{
7342 		  comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7343 		  if (comp_mode != VOIDmode
7344 		      && (can_change_mode || comp_mode == mode))
7345 		    found = true;
7346 		}
7347 
7348 	      if (found)
7349 		{
7350 		  found_equiv = true;
7351 		  if (insn_count < ARRAY_SIZE (insns))
7352 		    {
7353 		      insns[insn_count] = insn;
7354 		      modes[insn_count] = set_mode;
7355 		      last_insns[insn_count] = end;
7356 		      ++insn_count;
7357 
7358 		      if (mode != comp_mode)
7359 			{
7360 			  gcc_assert (can_change_mode);
7361 			  mode = comp_mode;
7362 
7363 			  /* The modified insn will be re-recognized later.  */
7364 			  PUT_MODE (cc_src, mode);
7365 			}
7366 		    }
7367 		  else
7368 		    {
7369 		      if (set_mode != mode)
7370 			{
7371 			  /* We found a matching expression in the
7372 			     wrong mode, but we don't have room to
7373 			     store it in the array.  Punt.  This case
7374 			     should be rare.  */
7375 			  break;
7376 			}
7377 		      /* INSN sets CC_REG to a value equal to CC_SRC
7378 			 with the right mode.  We can simply delete
7379 			 it.  */
7380 		      delete_insn (insn);
7381 		    }
7382 
7383 		  /* We found an instruction to delete.  Keep looking,
7384 		     in the hopes of finding a three-way jump.  */
7385 		  continue;
7386 		}
7387 
7388 	      /* We found an instruction which sets the condition
7389 		 code, so don't look any farther.  */
7390 	      break;
7391 	    }
7392 
7393 	  /* If INSN sets CC_REG in some other way, don't look any
7394 	     farther.  */
7395 	  if (reg_set_p (cc_reg, insn))
7396 	    break;
7397 	}
7398 
7399       /* If we fell off the bottom of the block, we can keep looking
7400 	 through successors.  We pass CAN_CHANGE_MODE as false because
7401 	 we aren't prepared to handle compatibility between the
7402 	 further blocks and this block.  */
7403       if (insn == end)
7404 	{
7405 	  machine_mode submode;
7406 
7407 	  submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
7408 	  if (submode != VOIDmode)
7409 	    {
7410 	      gcc_assert (submode == mode);
7411 	      found_equiv = true;
7412 	      can_change_mode = false;
7413 	    }
7414 	}
7415     }
7416 
7417   if (! found_equiv)
7418     return VOIDmode;
7419 
7420   /* Now INSN_COUNT is the number of instructions we found which set
7421      CC_REG to a value equivalent to CC_SRC.  The instructions are in
7422      INSNS.  The modes used by those instructions are in MODES.  */
7423 
7424   newreg = NULL_RTX;
7425   for (i = 0; i < insn_count; ++i)
7426     {
7427       if (modes[i] != mode)
7428 	{
7429 	  /* We need to change the mode of CC_REG in INSNS[i] and
7430 	     subsequent instructions.  */
7431 	  if (! newreg)
7432 	    {
7433 	      if (GET_MODE (cc_reg) == mode)
7434 		newreg = cc_reg;
7435 	      else
7436 		newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7437 	    }
7438 	  cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7439 				    newreg);
7440 	}
7441 
7442       cse_cfg_altered |= delete_insn_and_edges (insns[i]);
7443     }
7444 
7445   return mode;
7446 }
7447 
7448 /* If we have a fixed condition code register (or two), walk through
7449    the instructions and try to eliminate duplicate assignments.  */
7450 
7451 static void
7452 cse_condition_code_reg (void)
7453 {
7454   unsigned int cc_regno_1;
7455   unsigned int cc_regno_2;
7456   rtx cc_reg_1;
7457   rtx cc_reg_2;
7458   basic_block bb;
7459 
7460   if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7461     return;
7462 
7463   cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7464   if (cc_regno_2 != INVALID_REGNUM)
7465     cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7466   else
7467     cc_reg_2 = NULL_RTX;
7468 
7469   FOR_EACH_BB_FN (bb, cfun)
7470     {
7471       rtx_insn *last_insn;
7472       rtx cc_reg;
7473       rtx_insn *insn;
7474       rtx_insn *cc_src_insn;
7475       rtx cc_src;
7476       machine_mode mode;
7477       machine_mode orig_mode;
7478 
7479       /* Look for blocks which end with a conditional jump based on a
7480 	 condition code register.  Then look for the instruction which
7481 	 sets the condition code register.  Then look through the
7482 	 successor blocks for instructions which set the condition
7483 	 code register to the same value.  There are other possible
7484 	 uses of the condition code register, but these are by far the
7485 	 most common and the ones which we are most likely to be able
7486 	 to optimize.  */
7487 
7488       last_insn = BB_END (bb);
7489       if (!JUMP_P (last_insn))
7490 	continue;
7491 
7492       if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7493 	cc_reg = cc_reg_1;
7494       else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7495 	cc_reg = cc_reg_2;
7496       else
7497 	continue;
7498 
7499       cc_src_insn = NULL;
7500       cc_src = NULL_RTX;
7501       for (insn = PREV_INSN (last_insn);
7502 	   insn && insn != PREV_INSN (BB_HEAD (bb));
7503 	   insn = PREV_INSN (insn))
7504 	{
7505 	  rtx set;
7506 
7507 	  if (! INSN_P (insn))
7508 	    continue;
7509 	  set = single_set (insn);
7510 	  if (set
7511 	      && REG_P (SET_DEST (set))
7512 	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7513 	    {
7514 	      cc_src_insn = insn;
7515 	      cc_src = SET_SRC (set);
7516 	      break;
7517 	    }
7518 	  else if (reg_set_p (cc_reg, insn))
7519 	    break;
7520 	}
7521 
7522       if (! cc_src_insn)
7523 	continue;
7524 
7525       if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7526 	continue;
7527 
7528       /* Now CC_REG is a condition code register used for a
7529 	 conditional jump at the end of the block, and CC_SRC, in
7530 	 CC_SRC_INSN, is the value to which that condition code
7531 	 register is set, and CC_SRC is still meaningful at the end of
7532 	 the basic block.  */
7533 
7534       orig_mode = GET_MODE (cc_src);
7535       mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
7536       if (mode != VOIDmode)
7537 	{
7538 	  gcc_assert (mode == GET_MODE (cc_src));
7539 	  if (mode != orig_mode)
7540 	    {
7541 	      rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7542 
7543 	      cse_change_cc_mode_insn (cc_src_insn, newreg);
7544 
7545 	      /* Do the same in the following insns that use the
7546 		 current value of CC_REG within BB.  */
7547 	      cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7548 					NEXT_INSN (last_insn),
7549 					newreg);
7550 	    }
7551 	}
7552     }
7553 }
7554 
7555 
7556 /* Perform common subexpression elimination.  Nonzero value from
7557    `cse_main' means that jumps were simplified and some code may now
7558    be unreachable, so do jump optimization again.  */
7559 static unsigned int
7560 rest_of_handle_cse (void)
7561 {
7562   int tem;
7563 
7564   if (dump_file)
7565     dump_flow_info (dump_file, dump_flags);
7566 
7567   tem = cse_main (get_insns (), max_reg_num ());
7568 
7569   /* If we are not running more CSE passes, then we are no longer
7570      expecting CSE to be run.  But always rerun it in a cheap mode.  */
7571   cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7572 
7573   if (tem == 2)
7574     {
7575       timevar_push (TV_JUMP);
7576       rebuild_jump_labels (get_insns ());
7577       cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7578       timevar_pop (TV_JUMP);
7579     }
7580   else if (tem == 1 || optimize > 1)
7581     cse_cfg_altered |= cleanup_cfg (0);
7582 
7583   return 0;
7584 }
7585 
7586 namespace {
7587 
7588 const pass_data pass_data_cse =
7589 {
7590   RTL_PASS, /* type */
7591   "cse1", /* name */
7592   OPTGROUP_NONE, /* optinfo_flags */
7593   TV_CSE, /* tv_id */
7594   0, /* properties_required */
7595   0, /* properties_provided */
7596   0, /* properties_destroyed */
7597   0, /* todo_flags_start */
7598   TODO_df_finish, /* todo_flags_finish */
7599 };
7600 
7601 class pass_cse : public rtl_opt_pass
7602 {
7603 public:
7604   pass_cse (gcc::context *ctxt)
7605     : rtl_opt_pass (pass_data_cse, ctxt)
7606   {}
7607 
7608   /* opt_pass methods: */
7609   virtual bool gate (function *) { return optimize > 0; }
7610   virtual unsigned int execute (function *) { return rest_of_handle_cse (); }
7611 
7612 }; // class pass_cse
7613 
7614 } // anon namespace
7615 
7616 rtl_opt_pass *
7617 make_pass_cse (gcc::context *ctxt)
7618 {
7619   return new pass_cse (ctxt);
7620 }
7621 
7622 
7623 /* Run second CSE pass after loop optimizations.  */
7624 static unsigned int
7625 rest_of_handle_cse2 (void)
7626 {
7627   int tem;
7628 
7629   if (dump_file)
7630     dump_flow_info (dump_file, dump_flags);
7631 
7632   tem = cse_main (get_insns (), max_reg_num ());
7633 
7634   /* Run a pass to eliminate duplicated assignments to condition code
7635      registers.  We have to run this after bypass_jumps, because it
7636      makes it harder for that pass to determine whether a jump can be
7637      bypassed safely.  */
7638   cse_condition_code_reg ();
7639 
7640   delete_trivially_dead_insns (get_insns (), max_reg_num ());
7641 
7642   if (tem == 2)
7643     {
7644       timevar_push (TV_JUMP);
7645       rebuild_jump_labels (get_insns ());
7646       cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7647       timevar_pop (TV_JUMP);
7648     }
7649   else if (tem == 1)
7650     cse_cfg_altered |= cleanup_cfg (0);
7651 
7652   cse_not_expected = 1;
7653   return 0;
7654 }
7655 
7656 
7657 namespace {
7658 
7659 const pass_data pass_data_cse2 =
7660 {
7661   RTL_PASS, /* type */
7662   "cse2", /* name */
7663   OPTGROUP_NONE, /* optinfo_flags */
7664   TV_CSE2, /* tv_id */
7665   0, /* properties_required */
7666   0, /* properties_provided */
7667   0, /* properties_destroyed */
7668   0, /* todo_flags_start */
7669   TODO_df_finish, /* todo_flags_finish */
7670 };
7671 
7672 class pass_cse2 : public rtl_opt_pass
7673 {
7674 public:
7675   pass_cse2 (gcc::context *ctxt)
7676     : rtl_opt_pass (pass_data_cse2, ctxt)
7677   {}
7678 
7679   /* opt_pass methods: */
7680   virtual bool gate (function *)
7681     {
7682       return optimize > 0 && flag_rerun_cse_after_loop;
7683     }
7684 
7685   virtual unsigned int execute (function *) { return rest_of_handle_cse2 (); }
7686 
7687 }; // class pass_cse2
7688 
7689 } // anon namespace
7690 
7691 rtl_opt_pass *
7692 make_pass_cse2 (gcc::context *ctxt)
7693 {
7694   return new pass_cse2 (ctxt);
7695 }
7696 
7697 /* Run second CSE pass after loop optimizations.  */
7698 static unsigned int
7699 rest_of_handle_cse_after_global_opts (void)
7700 {
7701   int save_cfj;
7702   int tem;
7703 
7704   /* We only want to do local CSE, so don't follow jumps.  */
7705   save_cfj = flag_cse_follow_jumps;
7706   flag_cse_follow_jumps = 0;
7707 
7708   rebuild_jump_labels (get_insns ());
7709   tem = cse_main (get_insns (), max_reg_num ());
7710   cse_cfg_altered |= purge_all_dead_edges ();
7711   delete_trivially_dead_insns (get_insns (), max_reg_num ());
7712 
7713   cse_not_expected = !flag_rerun_cse_after_loop;
7714 
7715   /* If cse altered any jumps, rerun jump opts to clean things up.  */
7716   if (tem == 2)
7717     {
7718       timevar_push (TV_JUMP);
7719       rebuild_jump_labels (get_insns ());
7720       cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7721       timevar_pop (TV_JUMP);
7722     }
7723   else if (tem == 1)
7724     cse_cfg_altered |= cleanup_cfg (0);
7725 
7726   flag_cse_follow_jumps = save_cfj;
7727   return 0;
7728 }
7729 
7730 namespace {
7731 
7732 const pass_data pass_data_cse_after_global_opts =
7733 {
7734   RTL_PASS, /* type */
7735   "cse_local", /* name */
7736   OPTGROUP_NONE, /* optinfo_flags */
7737   TV_CSE, /* tv_id */
7738   0, /* properties_required */
7739   0, /* properties_provided */
7740   0, /* properties_destroyed */
7741   0, /* todo_flags_start */
7742   TODO_df_finish, /* todo_flags_finish */
7743 };
7744 
7745 class pass_cse_after_global_opts : public rtl_opt_pass
7746 {
7747 public:
7748   pass_cse_after_global_opts (gcc::context *ctxt)
7749     : rtl_opt_pass (pass_data_cse_after_global_opts, ctxt)
7750   {}
7751 
7752   /* opt_pass methods: */
7753   virtual bool gate (function *)
7754     {
7755       return optimize > 0 && flag_rerun_cse_after_global_opts;
7756     }
7757 
7758   virtual unsigned int execute (function *)
7759     {
7760       return rest_of_handle_cse_after_global_opts ();
7761     }
7762 
7763 }; // class pass_cse_after_global_opts
7764 
7765 } // anon namespace
7766 
7767 rtl_opt_pass *
7768 make_pass_cse_after_global_opts (gcc::context *ctxt)
7769 {
7770   return new pass_cse_after_global_opts (ctxt);
7771 }
7772