xref: /openbsd-src/gnu/gcc/gcc/cse.c (revision 404b540a9034ac75a6199ad1a32d1bbc7a0d4210)
1 /* Common subexpression elimination for GNU compiler.
2    Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3    1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA.  */
21 
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS.  */
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "regs.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "function.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "output.h"
40 #include "ggc.h"
41 #include "timevar.h"
42 #include "except.h"
43 #include "target.h"
44 #include "params.h"
45 #include "rtlhooks-def.h"
46 #include "tree-pass.h"
47 
48 /* The basic idea of common subexpression elimination is to go
49    through the code, keeping a record of expressions that would
50    have the same value at the current scan point, and replacing
51    expressions encountered with the cheapest equivalent expression.
52 
53    It is too complicated to keep track of the different possibilities
54    when control paths merge in this code; so, at each label, we forget all
55    that is known and start fresh.  This can be described as processing each
56    extended basic block separately.  We have a separate pass to perform
57    global CSE.
58 
59    Note CSE can turn a conditional or computed jump into a nop or
60    an unconditional jump.  When this occurs we arrange to run the jump
61    optimizer after CSE to delete the unreachable code.
62 
63    We use two data structures to record the equivalent expressions:
64    a hash table for most expressions, and a vector of "quantity
65    numbers" to record equivalent (pseudo) registers.
66 
67    The use of the special data structure for registers is desirable
68    because it is faster.  It is possible because registers references
69    contain a fairly small number, the register number, taken from
70    a contiguously allocated series, and two register references are
71    identical if they have the same number.  General expressions
72    do not have any such thing, so the only way to retrieve the
73    information recorded on an expression other than a register
74    is to keep it in a hash table.
75 
76 Registers and "quantity numbers":
77 
78    At the start of each basic block, all of the (hardware and pseudo)
79    registers used in the function are given distinct quantity
80    numbers to indicate their contents.  During scan, when the code
81    copies one register into another, we copy the quantity number.
82    When a register is loaded in any other way, we allocate a new
83    quantity number to describe the value generated by this operation.
84    `REG_QTY (N)' records what quantity register N is currently thought
85    of as containing.
86 
87    All real quantity numbers are greater than or equal to zero.
88    If register N has not been assigned a quantity, `REG_QTY (N)' will
89    equal -N - 1, which is always negative.
90 
91    Quantity numbers below zero do not exist and none of the `qty_table'
92    entries should be referenced with a negative index.
93 
94    We also maintain a bidirectional chain of registers for each
95    quantity number.  The `qty_table` members `first_reg' and `last_reg',
96    and `reg_eqv_table' members `next' and `prev' hold these chains.
97 
98    The first register in a chain is the one whose lifespan is least local.
99    Among equals, it is the one that was seen first.
100    We replace any equivalent register with that one.
101 
102    If two registers have the same quantity number, it must be true that
103    REG expressions with qty_table `mode' must be in the hash table for both
104    registers and must be in the same class.
105 
106    The converse is not true.  Since hard registers may be referenced in
107    any mode, two REG expressions might be equivalent in the hash table
108    but not have the same quantity number if the quantity number of one
109    of the registers is not the same mode as those expressions.
110 
111 Constants and quantity numbers
112 
113    When a quantity has a known constant value, that value is stored
114    in the appropriate qty_table `const_rtx'.  This is in addition to
115    putting the constant in the hash table as is usual for non-regs.
116 
117    Whether a reg or a constant is preferred is determined by the configuration
118    macro CONST_COSTS and will often depend on the constant value.  In any
119    event, expressions containing constants can be simplified, by fold_rtx.
120 
121    When a quantity has a known nearly constant value (such as an address
122    of a stack slot), that value is stored in the appropriate qty_table
123    `const_rtx'.
124 
125    Integer constants don't have a machine mode.  However, cse
126    determines the intended machine mode from the destination
127    of the instruction that moves the constant.  The machine mode
128    is recorded in the hash table along with the actual RTL
129    constant expression so that different modes are kept separate.
130 
131 Other expressions:
132 
133    To record known equivalences among expressions in general
134    we use a hash table called `table'.  It has a fixed number of buckets
135    that contain chains of `struct table_elt' elements for expressions.
136    These chains connect the elements whose expressions have the same
137    hash codes.
138 
139    Other chains through the same elements connect the elements which
140    currently have equivalent values.
141 
142    Register references in an expression are canonicalized before hashing
143    the expression.  This is done using `reg_qty' and qty_table `first_reg'.
144    The hash code of a register reference is computed using the quantity
145    number, not the register number.
146 
147    When the value of an expression changes, it is necessary to remove from the
148    hash table not just that expression but all expressions whose values
149    could be different as a result.
150 
151      1. If the value changing is in memory, except in special cases
152      ANYTHING referring to memory could be changed.  That is because
153      nobody knows where a pointer does not point.
154      The function `invalidate_memory' removes what is necessary.
155 
156      The special cases are when the address is constant or is
157      a constant plus a fixed register such as the frame pointer
158      or a static chain pointer.  When such addresses are stored in,
159      we can tell exactly which other such addresses must be invalidated
160      due to overlap.  `invalidate' does this.
161      All expressions that refer to non-constant
162      memory addresses are also invalidated.  `invalidate_memory' does this.
163 
164      2. If the value changing is a register, all expressions
165      containing references to that register, and only those,
166      must be removed.
167 
168    Because searching the entire hash table for expressions that contain
169    a register is very slow, we try to figure out when it isn't necessary.
170    Precisely, this is necessary only when expressions have been
171    entered in the hash table using this register, and then the value has
172    changed, and then another expression wants to be added to refer to
173    the register's new value.  This sequence of circumstances is rare
174    within any one basic block.
175 
176    `REG_TICK' and `REG_IN_TABLE', accessors for members of
177    cse_reg_info, are used to detect this case.  REG_TICK (i) is
178    incremented whenever a value is stored in register i.
179    REG_IN_TABLE (i) holds -1 if no references to register i have been
180    entered in the table; otherwise, it contains the value REG_TICK (i)
181    had when the references were entered.  If we want to enter a
182    reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
183    remove old references.  Until we want to enter a new entry, the
184    mere fact that the two vectors don't match makes the entries be
185    ignored if anyone tries to match them.
186 
187    Registers themselves are entered in the hash table as well as in
188    the equivalent-register chains.  However, `REG_TICK' and
189    `REG_IN_TABLE' do not apply to expressions which are simple
190    register references.  These expressions are removed from the table
191    immediately when they become invalid, and this can be done even if
192    we do not immediately search for all the expressions that refer to
193    the register.
194 
195    A CLOBBER rtx in an instruction invalidates its operand for further
196    reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
197    invalidates everything that resides in memory.
198 
199 Related expressions:
200 
201    Constant expressions that differ only by an additive integer
202    are called related.  When a constant expression is put in
203    the table, the related expression with no constant term
204    is also entered.  These are made to point at each other
205    so that it is possible to find out if there exists any
206    register equivalent to an expression related to a given expression.  */
207 
208 /* Length of qty_table vector.  We know in advance we will not need
209    a quantity number this big.  */
210 
211 static int max_qty;
212 
213 /* Next quantity number to be allocated.
214    This is 1 + the largest number needed so far.  */
215 
216 static int next_qty;
217 
218 /* Per-qty information tracking.
219 
220    `first_reg' and `last_reg' track the head and tail of the
221    chain of registers which currently contain this quantity.
222 
223    `mode' contains the machine mode of this quantity.
224 
225    `const_rtx' holds the rtx of the constant value of this
226    quantity, if known.  A summations of the frame/arg pointer
227    and a constant can also be entered here.  When this holds
228    a known value, `const_insn' is the insn which stored the
229    constant value.
230 
231    `comparison_{code,const,qty}' are used to track when a
232    comparison between a quantity and some constant or register has
233    been passed.  In such a case, we know the results of the comparison
234    in case we see it again.  These members record a comparison that
235    is known to be true.  `comparison_code' holds the rtx code of such
236    a comparison, else it is set to UNKNOWN and the other two
237    comparison members are undefined.  `comparison_const' holds
238    the constant being compared against, or zero if the comparison
239    is not against a constant.  `comparison_qty' holds the quantity
240    being compared against when the result is known.  If the comparison
241    is not with a register, `comparison_qty' is -1.  */
242 
243 struct qty_table_elem
244 {
245   rtx const_rtx;
246   rtx const_insn;
247   rtx comparison_const;
248   int comparison_qty;
249   unsigned int first_reg, last_reg;
250   /* The sizes of these fields should match the sizes of the
251      code and mode fields of struct rtx_def (see rtl.h).  */
252   ENUM_BITFIELD(rtx_code) comparison_code : 16;
253   ENUM_BITFIELD(machine_mode) mode : 8;
254 };
255 
256 /* The table of all qtys, indexed by qty number.  */
257 static struct qty_table_elem *qty_table;
258 
259 /* Structure used to pass arguments via for_each_rtx to function
260    cse_change_cc_mode.  */
261 struct change_cc_mode_args
262 {
263   rtx insn;
264   rtx newreg;
265 };
266 
267 #ifdef HAVE_cc0
268 /* For machines that have a CC0, we do not record its value in the hash
269    table since its use is guaranteed to be the insn immediately following
270    its definition and any other insn is presumed to invalidate it.
271 
272    Instead, we store below the value last assigned to CC0.  If it should
273    happen to be a constant, it is stored in preference to the actual
274    assigned value.  In case it is a constant, we store the mode in which
275    the constant should be interpreted.  */
276 
277 static rtx prev_insn_cc0;
278 static enum machine_mode prev_insn_cc0_mode;
279 
280 /* Previous actual insn.  0 if at first insn of basic block.  */
281 
282 static rtx prev_insn;
283 #endif
284 
285 /* Insn being scanned.  */
286 
287 static rtx this_insn;
288 
289 /* Index by register number, gives the number of the next (or
290    previous) register in the chain of registers sharing the same
291    value.
292 
293    Or -1 if this register is at the end of the chain.
294 
295    If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined.  */
296 
297 /* Per-register equivalence chain.  */
298 struct reg_eqv_elem
299 {
300   int next, prev;
301 };
302 
303 /* The table of all register equivalence chains.  */
304 static struct reg_eqv_elem *reg_eqv_table;
305 
306 struct cse_reg_info
307 {
308   /* The timestamp at which this register is initialized.  */
309   unsigned int timestamp;
310 
311   /* The quantity number of the register's current contents.  */
312   int reg_qty;
313 
314   /* The number of times the register has been altered in the current
315      basic block.  */
316   int reg_tick;
317 
318   /* The REG_TICK value at which rtx's containing this register are
319      valid in the hash table.  If this does not equal the current
320      reg_tick value, such expressions existing in the hash table are
321      invalid.  */
322   int reg_in_table;
323 
324   /* The SUBREG that was set when REG_TICK was last incremented.  Set
325      to -1 if the last store was to the whole register, not a subreg.  */
326   unsigned int subreg_ticked;
327 };
328 
329 /* A table of cse_reg_info indexed by register numbers.  */
330 static struct cse_reg_info *cse_reg_info_table;
331 
332 /* The size of the above table.  */
333 static unsigned int cse_reg_info_table_size;
334 
335 /* The index of the first entry that has not been initialized.  */
336 static unsigned int cse_reg_info_table_first_uninitialized;
337 
338 /* The timestamp at the beginning of the current run of
339    cse_basic_block.  We increment this variable at the beginning of
340    the current run of cse_basic_block.  The timestamp field of a
341    cse_reg_info entry matches the value of this variable if and only
342    if the entry has been initialized during the current run of
343    cse_basic_block.  */
344 static unsigned int cse_reg_info_timestamp;
345 
346 /* A HARD_REG_SET containing all the hard registers for which there is
347    currently a REG expression in the hash table.  Note the difference
348    from the above variables, which indicate if the REG is mentioned in some
349    expression in the table.  */
350 
351 static HARD_REG_SET hard_regs_in_table;
352 
353 /* CUID of insn that starts the basic block currently being cse-processed.  */
354 
355 static int cse_basic_block_start;
356 
357 /* CUID of insn that ends the basic block currently being cse-processed.  */
358 
359 static int cse_basic_block_end;
360 
361 /* Vector mapping INSN_UIDs to cuids.
362    The cuids are like uids but increase monotonically always.
363    We use them to see whether a reg is used outside a given basic block.  */
364 
365 static int *uid_cuid;
366 
367 /* Highest UID in UID_CUID.  */
368 static int max_uid;
369 
370 /* Get the cuid of an insn.  */
371 
372 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
373 
374 /* Nonzero if this pass has made changes, and therefore it's
375    worthwhile to run the garbage collector.  */
376 
377 static int cse_altered;
378 
379 /* Nonzero if cse has altered conditional jump insns
380    in such a way that jump optimization should be redone.  */
381 
382 static int cse_jumps_altered;
383 
384 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
385    REG_LABEL, we have to rerun jump after CSE to put in the note.  */
386 static int recorded_label_ref;
387 
388 /* canon_hash stores 1 in do_not_record
389    if it notices a reference to CC0, PC, or some other volatile
390    subexpression.  */
391 
392 static int do_not_record;
393 
394 /* canon_hash stores 1 in hash_arg_in_memory
395    if it notices a reference to memory within the expression being hashed.  */
396 
397 static int hash_arg_in_memory;
398 
399 /* The hash table contains buckets which are chains of `struct table_elt's,
400    each recording one expression's information.
401    That expression is in the `exp' field.
402 
403    The canon_exp field contains a canonical (from the point of view of
404    alias analysis) version of the `exp' field.
405 
406    Those elements with the same hash code are chained in both directions
407    through the `next_same_hash' and `prev_same_hash' fields.
408 
409    Each set of expressions with equivalent values
410    are on a two-way chain through the `next_same_value'
411    and `prev_same_value' fields, and all point with
412    the `first_same_value' field at the first element in
413    that chain.  The chain is in order of increasing cost.
414    Each element's cost value is in its `cost' field.
415 
416    The `in_memory' field is nonzero for elements that
417    involve any reference to memory.  These elements are removed
418    whenever a write is done to an unidentified location in memory.
419    To be safe, we assume that a memory address is unidentified unless
420    the address is either a symbol constant or a constant plus
421    the frame pointer or argument pointer.
422 
423    The `related_value' field is used to connect related expressions
424    (that differ by adding an integer).
425    The related expressions are chained in a circular fashion.
426    `related_value' is zero for expressions for which this
427    chain is not useful.
428 
429    The `cost' field stores the cost of this element's expression.
430    The `regcost' field stores the value returned by approx_reg_cost for
431    this element's expression.
432 
433    The `is_const' flag is set if the element is a constant (including
434    a fixed address).
435 
436    The `flag' field is used as a temporary during some search routines.
437 
438    The `mode' field is usually the same as GET_MODE (`exp'), but
439    if `exp' is a CONST_INT and has no machine mode then the `mode'
440    field is the mode it was being used as.  Each constant is
441    recorded separately for each mode it is used with.  */
442 
443 struct table_elt
444 {
445   rtx exp;
446   rtx canon_exp;
447   struct table_elt *next_same_hash;
448   struct table_elt *prev_same_hash;
449   struct table_elt *next_same_value;
450   struct table_elt *prev_same_value;
451   struct table_elt *first_same_value;
452   struct table_elt *related_value;
453   int cost;
454   int regcost;
455   /* The size of this field should match the size
456      of the mode field of struct rtx_def (see rtl.h).  */
457   ENUM_BITFIELD(machine_mode) mode : 8;
458   char in_memory;
459   char is_const;
460   char flag;
461 };
462 
463 /* We don't want a lot of buckets, because we rarely have very many
464    things stored in the hash table, and a lot of buckets slows
465    down a lot of loops that happen frequently.  */
466 #define HASH_SHIFT	5
467 #define HASH_SIZE	(1 << HASH_SHIFT)
468 #define HASH_MASK	(HASH_SIZE - 1)
469 
470 /* Compute hash code of X in mode M.  Special-case case where X is a pseudo
471    register (hard registers may require `do_not_record' to be set).  */
472 
473 #define HASH(X, M)	\
474  ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
475   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
476   : canon_hash (X, M)) & HASH_MASK)
477 
478 /* Like HASH, but without side-effects.  */
479 #define SAFE_HASH(X, M)	\
480  ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
481   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
482   : safe_hash (X, M)) & HASH_MASK)
483 
484 /* Determine whether register number N is considered a fixed register for the
485    purpose of approximating register costs.
486    It is desirable to replace other regs with fixed regs, to reduce need for
487    non-fixed hard regs.
488    A reg wins if it is either the frame pointer or designated as fixed.  */
489 #define FIXED_REGNO_P(N)  \
490   ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
491    || fixed_regs[N] || global_regs[N])
492 
493 /* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
494    hard registers and pointers into the frame are the cheapest with a cost
495    of 0.  Next come pseudos with a cost of one and other hard registers with
496    a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
497 
498 #define CHEAP_REGNO(N)							\
499   (REGNO_PTR_FRAME_P(N)							\
500    || (HARD_REGISTER_NUM_P (N)						\
501        && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
502 
503 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
504 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
505 
506 /* Get the number of times this register has been updated in this
507    basic block.  */
508 
509 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
510 
511 /* Get the point at which REG was recorded in the table.  */
512 
513 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
514 
515 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
516    SUBREG).  */
517 
518 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
519 
520 /* Get the quantity number for REG.  */
521 
522 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
523 
524 /* Determine if the quantity number for register X represents a valid index
525    into the qty_table.  */
526 
527 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
528 
529 static struct table_elt *table[HASH_SIZE];
530 
531 /* Number of elements in the hash table.  */
532 
533 static unsigned int table_size;
534 
535 /* Chain of `struct table_elt's made so far for this function
536    but currently removed from the table.  */
537 
538 static struct table_elt *free_element_chain;
539 
540 /* Set to the cost of a constant pool reference if one was found for a
541    symbolic constant.  If this was found, it means we should try to
542    convert constants into constant pool entries if they don't fit in
543    the insn.  */
544 
545 static int constant_pool_entries_cost;
546 static int constant_pool_entries_regcost;
547 
548 /* This data describes a block that will be processed by cse_basic_block.  */
549 
550 struct cse_basic_block_data
551 {
552   /* Lowest CUID value of insns in block.  */
553   int low_cuid;
554   /* Highest CUID value of insns in block.  */
555   int high_cuid;
556   /* Total number of SETs in block.  */
557   int nsets;
558   /* Last insn in the block.  */
559   rtx last;
560   /* Size of current branch path, if any.  */
561   int path_size;
562   /* Current branch path, indicating which branches will be taken.  */
563   struct branch_path
564     {
565       /* The branch insn.  */
566       rtx branch;
567       /* Whether it should be taken or not.  AROUND is the same as taken
568 	 except that it is used when the destination label is not preceded
569        by a BARRIER.  */
570       enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
571     } *path;
572 };
573 
574 static bool fixed_base_plus_p (rtx x);
575 static int notreg_cost (rtx, enum rtx_code);
576 static int approx_reg_cost_1 (rtx *, void *);
577 static int approx_reg_cost (rtx);
578 static int preferable (int, int, int, int);
579 static void new_basic_block (void);
580 static void make_new_qty (unsigned int, enum machine_mode);
581 static void make_regs_eqv (unsigned int, unsigned int);
582 static void delete_reg_equiv (unsigned int);
583 static int mention_regs (rtx);
584 static int insert_regs (rtx, struct table_elt *, int);
585 static void remove_from_table (struct table_elt *, unsigned);
586 static struct table_elt *lookup	(rtx, unsigned, enum machine_mode);
587 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
588 static rtx lookup_as_function (rtx, enum rtx_code);
589 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
590 				 enum machine_mode);
591 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
592 static void invalidate (rtx, enum machine_mode);
593 static int cse_rtx_varies_p (rtx, int);
594 static void remove_invalid_refs (unsigned int);
595 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
596 					enum machine_mode);
597 static void rehash_using_reg (rtx);
598 static void invalidate_memory (void);
599 static void invalidate_for_call (void);
600 static rtx use_related_value (rtx, struct table_elt *);
601 
602 static inline unsigned canon_hash (rtx, enum machine_mode);
603 static inline unsigned safe_hash (rtx, enum machine_mode);
604 static unsigned hash_rtx_string (const char *);
605 
606 static rtx canon_reg (rtx, rtx);
607 static void find_best_addr (rtx, rtx *, enum machine_mode);
608 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
609 					   enum machine_mode *,
610 					   enum machine_mode *);
611 static rtx fold_rtx (rtx, rtx);
612 static rtx equiv_constant (rtx);
613 static void record_jump_equiv (rtx, int);
614 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
615 			      int);
616 static void cse_insn (rtx, rtx);
617 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
618 				    int, int);
619 static int addr_affects_sp_p (rtx);
620 static void invalidate_from_clobbers (rtx);
621 static rtx cse_process_notes (rtx, rtx);
622 static void invalidate_skipped_set (rtx, rtx, void *);
623 static void invalidate_skipped_block (rtx);
624 static rtx cse_basic_block (rtx, rtx, struct branch_path *);
625 static void count_reg_usage (rtx, int *, rtx, int);
626 static int check_for_label_ref (rtx *, void *);
627 extern void dump_class (struct table_elt*);
628 static void get_cse_reg_info_1 (unsigned int regno);
629 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
630 static int check_dependence (rtx *, void *);
631 
632 static void flush_hash_table (void);
633 static bool insn_live_p (rtx, int *);
634 static bool set_live_p (rtx, rtx, int *);
635 static bool dead_libcall_p (rtx, int *);
636 static int cse_change_cc_mode (rtx *, void *);
637 static void cse_change_cc_mode_insn (rtx, rtx);
638 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
639 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
640 
641 
642 #undef RTL_HOOKS_GEN_LOWPART
643 #define RTL_HOOKS_GEN_LOWPART		gen_lowpart_if_possible
644 
645 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
646 
647 /* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
648    virtual regs here because the simplify_*_operation routines are called
649    by integrate.c, which is called before virtual register instantiation.  */
650 
651 static bool
fixed_base_plus_p(rtx x)652 fixed_base_plus_p (rtx x)
653 {
654   switch (GET_CODE (x))
655     {
656     case REG:
657       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
658 	return true;
659       if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
660 	return true;
661       if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
662 	  && REGNO (x) <= LAST_VIRTUAL_REGISTER)
663 	return true;
664       return false;
665 
666     case PLUS:
667       if (GET_CODE (XEXP (x, 1)) != CONST_INT)
668 	return false;
669       return fixed_base_plus_p (XEXP (x, 0));
670 
671     default:
672       return false;
673     }
674 }
675 
676 /* Dump the expressions in the equivalence class indicated by CLASSP.
677    This function is used only for debugging.  */
678 void
dump_class(struct table_elt * classp)679 dump_class (struct table_elt *classp)
680 {
681   struct table_elt *elt;
682 
683   fprintf (stderr, "Equivalence chain for ");
684   print_rtl (stderr, classp->exp);
685   fprintf (stderr, ": \n");
686 
687   for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
688     {
689       print_rtl (stderr, elt->exp);
690       fprintf (stderr, "\n");
691     }
692 }
693 
694 /* Subroutine of approx_reg_cost; called through for_each_rtx.  */
695 
696 static int
approx_reg_cost_1(rtx * xp,void * data)697 approx_reg_cost_1 (rtx *xp, void *data)
698 {
699   rtx x = *xp;
700   int *cost_p = data;
701 
702   if (x && REG_P (x))
703     {
704       unsigned int regno = REGNO (x);
705 
706       if (! CHEAP_REGNO (regno))
707 	{
708 	  if (regno < FIRST_PSEUDO_REGISTER)
709 	    {
710 	      if (SMALL_REGISTER_CLASSES)
711 		return 1;
712 	      *cost_p += 2;
713 	    }
714 	  else
715 	    *cost_p += 1;
716 	}
717     }
718 
719   return 0;
720 }
721 
722 /* Return an estimate of the cost of the registers used in an rtx.
723    This is mostly the number of different REG expressions in the rtx;
724    however for some exceptions like fixed registers we use a cost of
725    0.  If any other hard register reference occurs, return MAX_COST.  */
726 
727 static int
approx_reg_cost(rtx x)728 approx_reg_cost (rtx x)
729 {
730   int cost = 0;
731 
732   if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
733     return MAX_COST;
734 
735   return cost;
736 }
737 
738 /* Returns a canonical version of X for the address, from the point of view,
739    that all multiplications are represented as MULT instead of the multiply
740    by a power of 2 being represented as ASHIFT.  */
741 
742 static rtx
canon_for_address(rtx x)743 canon_for_address (rtx x)
744 {
745   enum rtx_code code;
746   enum machine_mode mode;
747   rtx new = 0;
748   int i;
749   const char *fmt;
750 
751   if (!x)
752     return x;
753 
754   code = GET_CODE (x);
755   mode = GET_MODE (x);
756 
757   switch (code)
758     {
759     case ASHIFT:
760       if (GET_CODE (XEXP (x, 1)) == CONST_INT
761 	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
762 	  && INTVAL (XEXP (x, 1)) >= 0)
763         {
764 	  new = canon_for_address (XEXP (x, 0));
765 	  new = gen_rtx_MULT (mode, new,
766 			      gen_int_mode ((HOST_WIDE_INT) 1
767 				            << INTVAL (XEXP (x, 1)),
768 					    mode));
769 	}
770       break;
771     default:
772       break;
773 
774     }
775   if (new)
776     return new;
777 
778   /* Now recursively process each operand of this operation.  */
779   fmt = GET_RTX_FORMAT (code);
780   for (i = 0; i < GET_RTX_LENGTH (code); i++)
781     if (fmt[i] == 'e')
782       {
783 	new = canon_for_address (XEXP (x, i));
784 	XEXP (x, i) = new;
785       }
786   return x;
787 }
788 
789 /* Return a negative value if an rtx A, whose costs are given by COST_A
790    and REGCOST_A, is more desirable than an rtx B.
791    Return a positive value if A is less desirable, or 0 if the two are
792    equally good.  */
793 static int
preferable(int cost_a,int regcost_a,int cost_b,int regcost_b)794 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
795 {
796   /* First, get rid of cases involving expressions that are entirely
797      unwanted.  */
798   if (cost_a != cost_b)
799     {
800       if (cost_a == MAX_COST)
801 	return 1;
802       if (cost_b == MAX_COST)
803 	return -1;
804     }
805 
806   /* Avoid extending lifetimes of hardregs.  */
807   if (regcost_a != regcost_b)
808     {
809       if (regcost_a == MAX_COST)
810 	return 1;
811       if (regcost_b == MAX_COST)
812 	return -1;
813     }
814 
815   /* Normal operation costs take precedence.  */
816   if (cost_a != cost_b)
817     return cost_a - cost_b;
818   /* Only if these are identical consider effects on register pressure.  */
819   if (regcost_a != regcost_b)
820     return regcost_a - regcost_b;
821   return 0;
822 }
823 
824 /* Internal function, to compute cost when X is not a register; called
825    from COST macro to keep it simple.  */
826 
827 static int
notreg_cost(rtx x,enum rtx_code outer)828 notreg_cost (rtx x, enum rtx_code outer)
829 {
830   return ((GET_CODE (x) == SUBREG
831 	   && REG_P (SUBREG_REG (x))
832 	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
833 	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
834 	   && (GET_MODE_SIZE (GET_MODE (x))
835 	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
836 	   && subreg_lowpart_p (x)
837 	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
838 				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
839 	  ? 0
840 	  : rtx_cost (x, outer) * 2);
841 }
842 
843 
844 /* Initialize CSE_REG_INFO_TABLE.  */
845 
846 static void
init_cse_reg_info(unsigned int nregs)847 init_cse_reg_info (unsigned int nregs)
848 {
849   /* Do we need to grow the table?  */
850   if (nregs > cse_reg_info_table_size)
851     {
852       unsigned int new_size;
853 
854       if (cse_reg_info_table_size < 2048)
855 	{
856 	  /* Compute a new size that is a power of 2 and no smaller
857 	     than the large of NREGS and 64.  */
858 	  new_size = (cse_reg_info_table_size
859 		      ? cse_reg_info_table_size : 64);
860 
861 	  while (new_size < nregs)
862 	    new_size *= 2;
863 	}
864       else
865 	{
866 	  /* If we need a big table, allocate just enough to hold
867 	     NREGS registers.  */
868 	  new_size = nregs;
869 	}
870 
871       /* Reallocate the table with NEW_SIZE entries.  */
872       if (cse_reg_info_table)
873 	free (cse_reg_info_table);
874       cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
875       cse_reg_info_table_size = new_size;
876       cse_reg_info_table_first_uninitialized = 0;
877     }
878 
879   /* Do we have all of the first NREGS entries initialized?  */
880   if (cse_reg_info_table_first_uninitialized < nregs)
881     {
882       unsigned int old_timestamp = cse_reg_info_timestamp - 1;
883       unsigned int i;
884 
885       /* Put the old timestamp on newly allocated entries so that they
886 	 will all be considered out of date.  We do not touch those
887 	 entries beyond the first NREGS entries to be nice to the
888 	 virtual memory.  */
889       for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
890 	cse_reg_info_table[i].timestamp = old_timestamp;
891 
892       cse_reg_info_table_first_uninitialized = nregs;
893     }
894 }
895 
896 /* Given REGNO, initialize the cse_reg_info entry for REGNO.  */
897 
898 static void
get_cse_reg_info_1(unsigned int regno)899 get_cse_reg_info_1 (unsigned int regno)
900 {
901   /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
902      entry will be considered to have been initialized.  */
903   cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
904 
905   /* Initialize the rest of the entry.  */
906   cse_reg_info_table[regno].reg_tick = 1;
907   cse_reg_info_table[regno].reg_in_table = -1;
908   cse_reg_info_table[regno].subreg_ticked = -1;
909   cse_reg_info_table[regno].reg_qty = -regno - 1;
910 }
911 
912 /* Find a cse_reg_info entry for REGNO.  */
913 
914 static inline struct cse_reg_info *
get_cse_reg_info(unsigned int regno)915 get_cse_reg_info (unsigned int regno)
916 {
917   struct cse_reg_info *p = &cse_reg_info_table[regno];
918 
919   /* If this entry has not been initialized, go ahead and initialize
920      it.  */
921   if (p->timestamp != cse_reg_info_timestamp)
922     get_cse_reg_info_1 (regno);
923 
924   return p;
925 }
926 
927 /* Clear the hash table and initialize each register with its own quantity,
928    for a new basic block.  */
929 
930 static void
new_basic_block(void)931 new_basic_block (void)
932 {
933   int i;
934 
935   next_qty = 0;
936 
937   /* Invalidate cse_reg_info_table.  */
938   cse_reg_info_timestamp++;
939 
940   /* Clear out hash table state for this pass.  */
941   CLEAR_HARD_REG_SET (hard_regs_in_table);
942 
943   /* The per-quantity values used to be initialized here, but it is
944      much faster to initialize each as it is made in `make_new_qty'.  */
945 
946   for (i = 0; i < HASH_SIZE; i++)
947     {
948       struct table_elt *first;
949 
950       first = table[i];
951       if (first != NULL)
952 	{
953 	  struct table_elt *last = first;
954 
955 	  table[i] = NULL;
956 
957 	  while (last->next_same_hash != NULL)
958 	    last = last->next_same_hash;
959 
960 	  /* Now relink this hash entire chain into
961 	     the free element list.  */
962 
963 	  last->next_same_hash = free_element_chain;
964 	  free_element_chain = first;
965 	}
966     }
967 
968   table_size = 0;
969 
970 #ifdef HAVE_cc0
971   prev_insn = 0;
972   prev_insn_cc0 = 0;
973 #endif
974 }
975 
976 /* Say that register REG contains a quantity in mode MODE not in any
977    register before and initialize that quantity.  */
978 
979 static void
make_new_qty(unsigned int reg,enum machine_mode mode)980 make_new_qty (unsigned int reg, enum machine_mode mode)
981 {
982   int q;
983   struct qty_table_elem *ent;
984   struct reg_eqv_elem *eqv;
985 
986   gcc_assert (next_qty < max_qty);
987 
988   q = REG_QTY (reg) = next_qty++;
989   ent = &qty_table[q];
990   ent->first_reg = reg;
991   ent->last_reg = reg;
992   ent->mode = mode;
993   ent->const_rtx = ent->const_insn = NULL_RTX;
994   ent->comparison_code = UNKNOWN;
995 
996   eqv = &reg_eqv_table[reg];
997   eqv->next = eqv->prev = -1;
998 }
999 
1000 /* Make reg NEW equivalent to reg OLD.
1001    OLD is not changing; NEW is.  */
1002 
1003 static void
make_regs_eqv(unsigned int new,unsigned int old)1004 make_regs_eqv (unsigned int new, unsigned int old)
1005 {
1006   unsigned int lastr, firstr;
1007   int q = REG_QTY (old);
1008   struct qty_table_elem *ent;
1009 
1010   ent = &qty_table[q];
1011 
1012   /* Nothing should become eqv until it has a "non-invalid" qty number.  */
1013   gcc_assert (REGNO_QTY_VALID_P (old));
1014 
1015   REG_QTY (new) = q;
1016   firstr = ent->first_reg;
1017   lastr = ent->last_reg;
1018 
1019   /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
1020      hard regs.  Among pseudos, if NEW will live longer than any other reg
1021      of the same qty, and that is beyond the current basic block,
1022      make it the new canonical replacement for this qty.  */
1023   if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1024       /* Certain fixed registers might be of the class NO_REGS.  This means
1025 	 that not only can they not be allocated by the compiler, but
1026 	 they cannot be used in substitutions or canonicalizations
1027 	 either.  */
1028       && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1029       && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1030 	  || (new >= FIRST_PSEUDO_REGISTER
1031 	      && (firstr < FIRST_PSEUDO_REGISTER
1032 		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1033 		       || (uid_cuid[REGNO_FIRST_UID (new)]
1034 			   < cse_basic_block_start))
1035 		      && (uid_cuid[REGNO_LAST_UID (new)]
1036 			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1037     {
1038       reg_eqv_table[firstr].prev = new;
1039       reg_eqv_table[new].next = firstr;
1040       reg_eqv_table[new].prev = -1;
1041       ent->first_reg = new;
1042     }
1043   else
1044     {
1045       /* If NEW is a hard reg (known to be non-fixed), insert at end.
1046 	 Otherwise, insert before any non-fixed hard regs that are at the
1047 	 end.  Registers of class NO_REGS cannot be used as an
1048 	 equivalent for anything.  */
1049       while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1050 	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1051 	     && new >= FIRST_PSEUDO_REGISTER)
1052 	lastr = reg_eqv_table[lastr].prev;
1053       reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1054       if (reg_eqv_table[lastr].next >= 0)
1055 	reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1056       else
1057 	qty_table[q].last_reg = new;
1058       reg_eqv_table[lastr].next = new;
1059       reg_eqv_table[new].prev = lastr;
1060     }
1061 }
1062 
1063 /* Remove REG from its equivalence class.  */
1064 
1065 static void
delete_reg_equiv(unsigned int reg)1066 delete_reg_equiv (unsigned int reg)
1067 {
1068   struct qty_table_elem *ent;
1069   int q = REG_QTY (reg);
1070   int p, n;
1071 
1072   /* If invalid, do nothing.  */
1073   if (! REGNO_QTY_VALID_P (reg))
1074     return;
1075 
1076   ent = &qty_table[q];
1077 
1078   p = reg_eqv_table[reg].prev;
1079   n = reg_eqv_table[reg].next;
1080 
1081   if (n != -1)
1082     reg_eqv_table[n].prev = p;
1083   else
1084     ent->last_reg = p;
1085   if (p != -1)
1086     reg_eqv_table[p].next = n;
1087   else
1088     ent->first_reg = n;
1089 
1090   REG_QTY (reg) = -reg - 1;
1091 }
1092 
1093 /* Remove any invalid expressions from the hash table
1094    that refer to any of the registers contained in expression X.
1095 
1096    Make sure that newly inserted references to those registers
1097    as subexpressions will be considered valid.
1098 
1099    mention_regs is not called when a register itself
1100    is being stored in the table.
1101 
1102    Return 1 if we have done something that may have changed the hash code
1103    of X.  */
1104 
1105 static int
mention_regs(rtx x)1106 mention_regs (rtx x)
1107 {
1108   enum rtx_code code;
1109   int i, j;
1110   const char *fmt;
1111   int changed = 0;
1112 
1113   if (x == 0)
1114     return 0;
1115 
1116   code = GET_CODE (x);
1117   if (code == REG)
1118     {
1119       unsigned int regno = REGNO (x);
1120       unsigned int endregno
1121 	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1122 		   : hard_regno_nregs[regno][GET_MODE (x)]);
1123       unsigned int i;
1124 
1125       for (i = regno; i < endregno; i++)
1126 	{
1127 	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1128 	    remove_invalid_refs (i);
1129 
1130 	  REG_IN_TABLE (i) = REG_TICK (i);
1131 	  SUBREG_TICKED (i) = -1;
1132 	}
1133 
1134       return 0;
1135     }
1136 
1137   /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1138      pseudo if they don't use overlapping words.  We handle only pseudos
1139      here for simplicity.  */
1140   if (code == SUBREG && REG_P (SUBREG_REG (x))
1141       && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1142     {
1143       unsigned int i = REGNO (SUBREG_REG (x));
1144 
1145       if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1146 	{
1147 	  /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1148 	     the last store to this register really stored into this
1149 	     subreg, then remove the memory of this subreg.
1150 	     Otherwise, remove any memory of the entire register and
1151 	     all its subregs from the table.  */
1152 	  if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1153 	      || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1154 	    remove_invalid_refs (i);
1155 	  else
1156 	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1157 	}
1158 
1159       REG_IN_TABLE (i) = REG_TICK (i);
1160       SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1161       return 0;
1162     }
1163 
1164   /* If X is a comparison or a COMPARE and either operand is a register
1165      that does not have a quantity, give it one.  This is so that a later
1166      call to record_jump_equiv won't cause X to be assigned a different
1167      hash code and not found in the table after that call.
1168 
1169      It is not necessary to do this here, since rehash_using_reg can
1170      fix up the table later, but doing this here eliminates the need to
1171      call that expensive function in the most common case where the only
1172      use of the register is in the comparison.  */
1173 
1174   if (code == COMPARE || COMPARISON_P (x))
1175     {
1176       if (REG_P (XEXP (x, 0))
1177 	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1178 	if (insert_regs (XEXP (x, 0), NULL, 0))
1179 	  {
1180 	    rehash_using_reg (XEXP (x, 0));
1181 	    changed = 1;
1182 	  }
1183 
1184       if (REG_P (XEXP (x, 1))
1185 	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1186 	if (insert_regs (XEXP (x, 1), NULL, 0))
1187 	  {
1188 	    rehash_using_reg (XEXP (x, 1));
1189 	    changed = 1;
1190 	  }
1191     }
1192 
1193   fmt = GET_RTX_FORMAT (code);
1194   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1195     if (fmt[i] == 'e')
1196       changed |= mention_regs (XEXP (x, i));
1197     else if (fmt[i] == 'E')
1198       for (j = 0; j < XVECLEN (x, i); j++)
1199 	changed |= mention_regs (XVECEXP (x, i, j));
1200 
1201   return changed;
1202 }
1203 
1204 /* Update the register quantities for inserting X into the hash table
1205    with a value equivalent to CLASSP.
1206    (If the class does not contain a REG, it is irrelevant.)
1207    If MODIFIED is nonzero, X is a destination; it is being modified.
1208    Note that delete_reg_equiv should be called on a register
1209    before insert_regs is done on that register with MODIFIED != 0.
1210 
1211    Nonzero value means that elements of reg_qty have changed
1212    so X's hash code may be different.  */
1213 
1214 static int
insert_regs(rtx x,struct table_elt * classp,int modified)1215 insert_regs (rtx x, struct table_elt *classp, int modified)
1216 {
1217   if (REG_P (x))
1218     {
1219       unsigned int regno = REGNO (x);
1220       int qty_valid;
1221 
1222       /* If REGNO is in the equivalence table already but is of the
1223 	 wrong mode for that equivalence, don't do anything here.  */
1224 
1225       qty_valid = REGNO_QTY_VALID_P (regno);
1226       if (qty_valid)
1227 	{
1228 	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1229 
1230 	  if (ent->mode != GET_MODE (x))
1231 	    return 0;
1232 	}
1233 
1234       if (modified || ! qty_valid)
1235 	{
1236 	  if (classp)
1237 	    for (classp = classp->first_same_value;
1238 		 classp != 0;
1239 		 classp = classp->next_same_value)
1240 	      if (REG_P (classp->exp)
1241 		  && GET_MODE (classp->exp) == GET_MODE (x))
1242 		{
1243 		  unsigned c_regno = REGNO (classp->exp);
1244 
1245 		  gcc_assert (REGNO_QTY_VALID_P (c_regno));
1246 
1247 		  /* Suppose that 5 is hard reg and 100 and 101 are
1248 		     pseudos.  Consider
1249 
1250 		     (set (reg:si 100) (reg:si 5))
1251 		     (set (reg:si 5) (reg:si 100))
1252 		     (set (reg:di 101) (reg:di 5))
1253 
1254 		     We would now set REG_QTY (101) = REG_QTY (5), but the
1255 		     entry for 5 is in SImode.  When we use this later in
1256 		     copy propagation, we get the register in wrong mode.  */
1257 		  if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1258 		    continue;
1259 
1260 		  make_regs_eqv (regno, c_regno);
1261 		  return 1;
1262 		}
1263 
1264 	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1265 	     than REG_IN_TABLE to find out if there was only a single preceding
1266 	     invalidation - for the SUBREG - or another one, which would be
1267 	     for the full register.  However, if we find here that REG_TICK
1268 	     indicates that the register is invalid, it means that it has
1269 	     been invalidated in a separate operation.  The SUBREG might be used
1270 	     now (then this is a recursive call), or we might use the full REG
1271 	     now and a SUBREG of it later.  So bump up REG_TICK so that
1272 	     mention_regs will do the right thing.  */
1273 	  if (! modified
1274 	      && REG_IN_TABLE (regno) >= 0
1275 	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1276 	    REG_TICK (regno)++;
1277 	  make_new_qty (regno, GET_MODE (x));
1278 	  return 1;
1279 	}
1280 
1281       return 0;
1282     }
1283 
1284   /* If X is a SUBREG, we will likely be inserting the inner register in the
1285      table.  If that register doesn't have an assigned quantity number at
1286      this point but does later, the insertion that we will be doing now will
1287      not be accessible because its hash code will have changed.  So assign
1288      a quantity number now.  */
1289 
1290   else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1291 	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1292     {
1293       insert_regs (SUBREG_REG (x), NULL, 0);
1294       mention_regs (x);
1295       return 1;
1296     }
1297   else
1298     return mention_regs (x);
1299 }
1300 
1301 /* Look in or update the hash table.  */
1302 
1303 /* Remove table element ELT from use in the table.
1304    HASH is its hash code, made using the HASH macro.
1305    It's an argument because often that is known in advance
1306    and we save much time not recomputing it.  */
1307 
1308 static void
remove_from_table(struct table_elt * elt,unsigned int hash)1309 remove_from_table (struct table_elt *elt, unsigned int hash)
1310 {
1311   if (elt == 0)
1312     return;
1313 
1314   /* Mark this element as removed.  See cse_insn.  */
1315   elt->first_same_value = 0;
1316 
1317   /* Remove the table element from its equivalence class.  */
1318 
1319   {
1320     struct table_elt *prev = elt->prev_same_value;
1321     struct table_elt *next = elt->next_same_value;
1322 
1323     if (next)
1324       next->prev_same_value = prev;
1325 
1326     if (prev)
1327       prev->next_same_value = next;
1328     else
1329       {
1330 	struct table_elt *newfirst = next;
1331 	while (next)
1332 	  {
1333 	    next->first_same_value = newfirst;
1334 	    next = next->next_same_value;
1335 	  }
1336       }
1337   }
1338 
1339   /* Remove the table element from its hash bucket.  */
1340 
1341   {
1342     struct table_elt *prev = elt->prev_same_hash;
1343     struct table_elt *next = elt->next_same_hash;
1344 
1345     if (next)
1346       next->prev_same_hash = prev;
1347 
1348     if (prev)
1349       prev->next_same_hash = next;
1350     else if (table[hash] == elt)
1351       table[hash] = next;
1352     else
1353       {
1354 	/* This entry is not in the proper hash bucket.  This can happen
1355 	   when two classes were merged by `merge_equiv_classes'.  Search
1356 	   for the hash bucket that it heads.  This happens only very
1357 	   rarely, so the cost is acceptable.  */
1358 	for (hash = 0; hash < HASH_SIZE; hash++)
1359 	  if (table[hash] == elt)
1360 	    table[hash] = next;
1361       }
1362   }
1363 
1364   /* Remove the table element from its related-value circular chain.  */
1365 
1366   if (elt->related_value != 0 && elt->related_value != elt)
1367     {
1368       struct table_elt *p = elt->related_value;
1369 
1370       while (p->related_value != elt)
1371 	p = p->related_value;
1372       p->related_value = elt->related_value;
1373       if (p->related_value == p)
1374 	p->related_value = 0;
1375     }
1376 
1377   /* Now add it to the free element chain.  */
1378   elt->next_same_hash = free_element_chain;
1379   free_element_chain = elt;
1380 
1381   table_size--;
1382 }
1383 
1384 /* Look up X in the hash table and return its table element,
1385    or 0 if X is not in the table.
1386 
1387    MODE is the machine-mode of X, or if X is an integer constant
1388    with VOIDmode then MODE is the mode with which X will be used.
1389 
1390    Here we are satisfied to find an expression whose tree structure
1391    looks like X.  */
1392 
1393 static struct table_elt *
lookup(rtx x,unsigned int hash,enum machine_mode mode)1394 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1395 {
1396   struct table_elt *p;
1397 
1398   for (p = table[hash]; p; p = p->next_same_hash)
1399     if (mode == p->mode && ((x == p->exp && REG_P (x))
1400 			    || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1401       return p;
1402 
1403   return 0;
1404 }
1405 
1406 /* Like `lookup' but don't care whether the table element uses invalid regs.
1407    Also ignore discrepancies in the machine mode of a register.  */
1408 
1409 static struct table_elt *
lookup_for_remove(rtx x,unsigned int hash,enum machine_mode mode)1410 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1411 {
1412   struct table_elt *p;
1413 
1414   if (REG_P (x))
1415     {
1416       unsigned int regno = REGNO (x);
1417 
1418       /* Don't check the machine mode when comparing registers;
1419 	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1420       for (p = table[hash]; p; p = p->next_same_hash)
1421 	if (REG_P (p->exp)
1422 	    && REGNO (p->exp) == regno)
1423 	  return p;
1424     }
1425   else
1426     {
1427       for (p = table[hash]; p; p = p->next_same_hash)
1428 	if (mode == p->mode
1429 	    && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1430 	  return p;
1431     }
1432 
1433   return 0;
1434 }
1435 
1436 /* Look for an expression equivalent to X and with code CODE.
1437    If one is found, return that expression.  */
1438 
1439 static rtx
lookup_as_function(rtx x,enum rtx_code code)1440 lookup_as_function (rtx x, enum rtx_code code)
1441 {
1442   struct table_elt *p
1443     = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1444 
1445   /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1446      long as we are narrowing.  So if we looked in vain for a mode narrower
1447      than word_mode before, look for word_mode now.  */
1448   if (p == 0 && code == CONST_INT
1449       && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1450     {
1451       x = copy_rtx (x);
1452       PUT_MODE (x, word_mode);
1453       p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1454     }
1455 
1456   if (p == 0)
1457     return 0;
1458 
1459   for (p = p->first_same_value; p; p = p->next_same_value)
1460     if (GET_CODE (p->exp) == code
1461 	/* Make sure this is a valid entry in the table.  */
1462 	&& exp_equiv_p (p->exp, p->exp, 1, false))
1463       return p->exp;
1464 
1465   return 0;
1466 }
1467 
1468 /* Insert X in the hash table, assuming HASH is its hash code
1469    and CLASSP is an element of the class it should go in
1470    (or 0 if a new class should be made).
1471    It is inserted at the proper position to keep the class in
1472    the order cheapest first.
1473 
1474    MODE is the machine-mode of X, or if X is an integer constant
1475    with VOIDmode then MODE is the mode with which X will be used.
1476 
1477    For elements of equal cheapness, the most recent one
1478    goes in front, except that the first element in the list
1479    remains first unless a cheaper element is added.  The order of
1480    pseudo-registers does not matter, as canon_reg will be called to
1481    find the cheapest when a register is retrieved from the table.
1482 
1483    The in_memory field in the hash table element is set to 0.
1484    The caller must set it nonzero if appropriate.
1485 
1486    You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1487    and if insert_regs returns a nonzero value
1488    you must then recompute its hash code before calling here.
1489 
1490    If necessary, update table showing constant values of quantities.  */
1491 
1492 #define CHEAPER(X, Y) \
1493  (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1494 
1495 static struct table_elt *
insert(rtx x,struct table_elt * classp,unsigned int hash,enum machine_mode mode)1496 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1497 {
1498   struct table_elt *elt;
1499 
1500   /* If X is a register and we haven't made a quantity for it,
1501      something is wrong.  */
1502   gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1503 
1504   /* If X is a hard register, show it is being put in the table.  */
1505   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1506     {
1507       unsigned int regno = REGNO (x);
1508       unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1509       unsigned int i;
1510 
1511       for (i = regno; i < endregno; i++)
1512 	SET_HARD_REG_BIT (hard_regs_in_table, i);
1513     }
1514 
1515   /* Put an element for X into the right hash bucket.  */
1516 
1517   elt = free_element_chain;
1518   if (elt)
1519     free_element_chain = elt->next_same_hash;
1520   else
1521     elt = XNEW (struct table_elt);
1522 
1523   elt->exp = x;
1524   elt->canon_exp = NULL_RTX;
1525   elt->cost = COST (x);
1526   elt->regcost = approx_reg_cost (x);
1527   elt->next_same_value = 0;
1528   elt->prev_same_value = 0;
1529   elt->next_same_hash = table[hash];
1530   elt->prev_same_hash = 0;
1531   elt->related_value = 0;
1532   elt->in_memory = 0;
1533   elt->mode = mode;
1534   elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1535 
1536   if (table[hash])
1537     table[hash]->prev_same_hash = elt;
1538   table[hash] = elt;
1539 
1540   /* Put it into the proper value-class.  */
1541   if (classp)
1542     {
1543       classp = classp->first_same_value;
1544       if (CHEAPER (elt, classp))
1545 	/* Insert at the head of the class.  */
1546 	{
1547 	  struct table_elt *p;
1548 	  elt->next_same_value = classp;
1549 	  classp->prev_same_value = elt;
1550 	  elt->first_same_value = elt;
1551 
1552 	  for (p = classp; p; p = p->next_same_value)
1553 	    p->first_same_value = elt;
1554 	}
1555       else
1556 	{
1557 	  /* Insert not at head of the class.  */
1558 	  /* Put it after the last element cheaper than X.  */
1559 	  struct table_elt *p, *next;
1560 
1561 	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1562 	       p = next);
1563 
1564 	  /* Put it after P and before NEXT.  */
1565 	  elt->next_same_value = next;
1566 	  if (next)
1567 	    next->prev_same_value = elt;
1568 
1569 	  elt->prev_same_value = p;
1570 	  p->next_same_value = elt;
1571 	  elt->first_same_value = classp;
1572 	}
1573     }
1574   else
1575     elt->first_same_value = elt;
1576 
1577   /* If this is a constant being set equivalent to a register or a register
1578      being set equivalent to a constant, note the constant equivalence.
1579 
1580      If this is a constant, it cannot be equivalent to a different constant,
1581      and a constant is the only thing that can be cheaper than a register.  So
1582      we know the register is the head of the class (before the constant was
1583      inserted).
1584 
1585      If this is a register that is not already known equivalent to a
1586      constant, we must check the entire class.
1587 
1588      If this is a register that is already known equivalent to an insn,
1589      update the qtys `const_insn' to show that `this_insn' is the latest
1590      insn making that quantity equivalent to the constant.  */
1591 
1592   if (elt->is_const && classp && REG_P (classp->exp)
1593       && !REG_P (x))
1594     {
1595       int exp_q = REG_QTY (REGNO (classp->exp));
1596       struct qty_table_elem *exp_ent = &qty_table[exp_q];
1597 
1598       exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1599       exp_ent->const_insn = this_insn;
1600     }
1601 
1602   else if (REG_P (x)
1603 	   && classp
1604 	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1605 	   && ! elt->is_const)
1606     {
1607       struct table_elt *p;
1608 
1609       for (p = classp; p != 0; p = p->next_same_value)
1610 	{
1611 	  if (p->is_const && !REG_P (p->exp))
1612 	    {
1613 	      int x_q = REG_QTY (REGNO (x));
1614 	      struct qty_table_elem *x_ent = &qty_table[x_q];
1615 
1616 	      x_ent->const_rtx
1617 		= gen_lowpart (GET_MODE (x), p->exp);
1618 	      x_ent->const_insn = this_insn;
1619 	      break;
1620 	    }
1621 	}
1622     }
1623 
1624   else if (REG_P (x)
1625 	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1626 	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1627     qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1628 
1629   /* If this is a constant with symbolic value,
1630      and it has a term with an explicit integer value,
1631      link it up with related expressions.  */
1632   if (GET_CODE (x) == CONST)
1633     {
1634       rtx subexp = get_related_value (x);
1635       unsigned subhash;
1636       struct table_elt *subelt, *subelt_prev;
1637 
1638       if (subexp != 0)
1639 	{
1640 	  /* Get the integer-free subexpression in the hash table.  */
1641 	  subhash = SAFE_HASH (subexp, mode);
1642 	  subelt = lookup (subexp, subhash, mode);
1643 	  if (subelt == 0)
1644 	    subelt = insert (subexp, NULL, subhash, mode);
1645 	  /* Initialize SUBELT's circular chain if it has none.  */
1646 	  if (subelt->related_value == 0)
1647 	    subelt->related_value = subelt;
1648 	  /* Find the element in the circular chain that precedes SUBELT.  */
1649 	  subelt_prev = subelt;
1650 	  while (subelt_prev->related_value != subelt)
1651 	    subelt_prev = subelt_prev->related_value;
1652 	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1653 	     This way the element that follows SUBELT is the oldest one.  */
1654 	  elt->related_value = subelt_prev->related_value;
1655 	  subelt_prev->related_value = elt;
1656 	}
1657     }
1658 
1659   table_size++;
1660 
1661   return elt;
1662 }
1663 
1664 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1665    CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1666    the two classes equivalent.
1667 
1668    CLASS1 will be the surviving class; CLASS2 should not be used after this
1669    call.
1670 
1671    Any invalid entries in CLASS2 will not be copied.  */
1672 
1673 static void
merge_equiv_classes(struct table_elt * class1,struct table_elt * class2)1674 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1675 {
1676   struct table_elt *elt, *next, *new;
1677 
1678   /* Ensure we start with the head of the classes.  */
1679   class1 = class1->first_same_value;
1680   class2 = class2->first_same_value;
1681 
1682   /* If they were already equal, forget it.  */
1683   if (class1 == class2)
1684     return;
1685 
1686   for (elt = class2; elt; elt = next)
1687     {
1688       unsigned int hash;
1689       rtx exp = elt->exp;
1690       enum machine_mode mode = elt->mode;
1691 
1692       next = elt->next_same_value;
1693 
1694       /* Remove old entry, make a new one in CLASS1's class.
1695 	 Don't do this for invalid entries as we cannot find their
1696 	 hash code (it also isn't necessary).  */
1697       if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1698 	{
1699 	  bool need_rehash = false;
1700 
1701 	  hash_arg_in_memory = 0;
1702 	  hash = HASH (exp, mode);
1703 
1704 	  if (REG_P (exp))
1705 	    {
1706 	      need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1707 	      delete_reg_equiv (REGNO (exp));
1708 	    }
1709 
1710 	  remove_from_table (elt, hash);
1711 
1712 	  if (insert_regs (exp, class1, 0) || need_rehash)
1713 	    {
1714 	      rehash_using_reg (exp);
1715 	      hash = HASH (exp, mode);
1716 	    }
1717 	  new = insert (exp, class1, hash, mode);
1718 	  new->in_memory = hash_arg_in_memory;
1719 	}
1720     }
1721 }
1722 
1723 /* Flush the entire hash table.  */
1724 
1725 static void
flush_hash_table(void)1726 flush_hash_table (void)
1727 {
1728   int i;
1729   struct table_elt *p;
1730 
1731   for (i = 0; i < HASH_SIZE; i++)
1732     for (p = table[i]; p; p = table[i])
1733       {
1734 	/* Note that invalidate can remove elements
1735 	   after P in the current hash chain.  */
1736 	if (REG_P (p->exp))
1737 	  invalidate (p->exp, VOIDmode);
1738 	else
1739 	  remove_from_table (p, i);
1740       }
1741 }
1742 
1743 /* Function called for each rtx to check whether true dependence exist.  */
1744 struct check_dependence_data
1745 {
1746   enum machine_mode mode;
1747   rtx exp;
1748   rtx addr;
1749 };
1750 
1751 static int
check_dependence(rtx * x,void * data)1752 check_dependence (rtx *x, void *data)
1753 {
1754   struct check_dependence_data *d = (struct check_dependence_data *) data;
1755   if (*x && MEM_P (*x))
1756     return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1757 		    		  cse_rtx_varies_p);
1758   else
1759     return 0;
1760 }
1761 
1762 /* Remove from the hash table, or mark as invalid, all expressions whose
1763    values could be altered by storing in X.  X is a register, a subreg, or
1764    a memory reference with nonvarying address (because, when a memory
1765    reference with a varying address is stored in, all memory references are
1766    removed by invalidate_memory so specific invalidation is superfluous).
1767    FULL_MODE, if not VOIDmode, indicates that this much should be
1768    invalidated instead of just the amount indicated by the mode of X.  This
1769    is only used for bitfield stores into memory.
1770 
1771    A nonvarying address may be just a register or just a symbol reference,
1772    or it may be either of those plus a numeric offset.  */
1773 
1774 static void
invalidate(rtx x,enum machine_mode full_mode)1775 invalidate (rtx x, enum machine_mode full_mode)
1776 {
1777   int i;
1778   struct table_elt *p;
1779   rtx addr;
1780 
1781   switch (GET_CODE (x))
1782     {
1783     case REG:
1784       {
1785 	/* If X is a register, dependencies on its contents are recorded
1786 	   through the qty number mechanism.  Just change the qty number of
1787 	   the register, mark it as invalid for expressions that refer to it,
1788 	   and remove it itself.  */
1789 	unsigned int regno = REGNO (x);
1790 	unsigned int hash = HASH (x, GET_MODE (x));
1791 
1792 	/* Remove REGNO from any quantity list it might be on and indicate
1793 	   that its value might have changed.  If it is a pseudo, remove its
1794 	   entry from the hash table.
1795 
1796 	   For a hard register, we do the first two actions above for any
1797 	   additional hard registers corresponding to X.  Then, if any of these
1798 	   registers are in the table, we must remove any REG entries that
1799 	   overlap these registers.  */
1800 
1801 	delete_reg_equiv (regno);
1802 	REG_TICK (regno)++;
1803 	SUBREG_TICKED (regno) = -1;
1804 
1805 	if (regno >= FIRST_PSEUDO_REGISTER)
1806 	  {
1807 	    /* Because a register can be referenced in more than one mode,
1808 	       we might have to remove more than one table entry.  */
1809 	    struct table_elt *elt;
1810 
1811 	    while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1812 	      remove_from_table (elt, hash);
1813 	  }
1814 	else
1815 	  {
1816 	    HOST_WIDE_INT in_table
1817 	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1818 	    unsigned int endregno
1819 	      = regno + hard_regno_nregs[regno][GET_MODE (x)];
1820 	    unsigned int tregno, tendregno, rn;
1821 	    struct table_elt *p, *next;
1822 
1823 	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1824 
1825 	    for (rn = regno + 1; rn < endregno; rn++)
1826 	      {
1827 		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1828 		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1829 		delete_reg_equiv (rn);
1830 		REG_TICK (rn)++;
1831 		SUBREG_TICKED (rn) = -1;
1832 	      }
1833 
1834 	    if (in_table)
1835 	      for (hash = 0; hash < HASH_SIZE; hash++)
1836 		for (p = table[hash]; p; p = next)
1837 		  {
1838 		    next = p->next_same_hash;
1839 
1840 		    if (!REG_P (p->exp)
1841 			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1842 		      continue;
1843 
1844 		    tregno = REGNO (p->exp);
1845 		    tendregno
1846 		      = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1847 		    if (tendregno > regno && tregno < endregno)
1848 		      remove_from_table (p, hash);
1849 		  }
1850 	  }
1851       }
1852       return;
1853 
1854     case SUBREG:
1855       invalidate (SUBREG_REG (x), VOIDmode);
1856       return;
1857 
1858     case PARALLEL:
1859       for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1860 	invalidate (XVECEXP (x, 0, i), VOIDmode);
1861       return;
1862 
1863     case EXPR_LIST:
1864       /* This is part of a disjoint return value; extract the location in
1865 	 question ignoring the offset.  */
1866       invalidate (XEXP (x, 0), VOIDmode);
1867       return;
1868 
1869     case MEM:
1870       addr = canon_rtx (get_addr (XEXP (x, 0)));
1871       /* Calculate the canonical version of X here so that
1872 	 true_dependence doesn't generate new RTL for X on each call.  */
1873       x = canon_rtx (x);
1874 
1875       /* Remove all hash table elements that refer to overlapping pieces of
1876 	 memory.  */
1877       if (full_mode == VOIDmode)
1878 	full_mode = GET_MODE (x);
1879 
1880       for (i = 0; i < HASH_SIZE; i++)
1881 	{
1882 	  struct table_elt *next;
1883 
1884 	  for (p = table[i]; p; p = next)
1885 	    {
1886 	      next = p->next_same_hash;
1887 	      if (p->in_memory)
1888 		{
1889 		  struct check_dependence_data d;
1890 
1891 		  /* Just canonicalize the expression once;
1892 		     otherwise each time we call invalidate
1893 		     true_dependence will canonicalize the
1894 		     expression again.  */
1895 		  if (!p->canon_exp)
1896 		    p->canon_exp = canon_rtx (p->exp);
1897 		  d.exp = x;
1898 		  d.addr = addr;
1899 		  d.mode = full_mode;
1900 		  if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1901 		    remove_from_table (p, i);
1902 		}
1903 	    }
1904 	}
1905       return;
1906 
1907     default:
1908       gcc_unreachable ();
1909     }
1910 }
1911 
1912 /* Remove all expressions that refer to register REGNO,
1913    since they are already invalid, and we are about to
1914    mark that register valid again and don't want the old
1915    expressions to reappear as valid.  */
1916 
1917 static void
remove_invalid_refs(unsigned int regno)1918 remove_invalid_refs (unsigned int regno)
1919 {
1920   unsigned int i;
1921   struct table_elt *p, *next;
1922 
1923   for (i = 0; i < HASH_SIZE; i++)
1924     for (p = table[i]; p; p = next)
1925       {
1926 	next = p->next_same_hash;
1927 	if (!REG_P (p->exp)
1928 	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1929 	  remove_from_table (p, i);
1930       }
1931 }
1932 
1933 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1934    and mode MODE.  */
1935 static void
remove_invalid_subreg_refs(unsigned int regno,unsigned int offset,enum machine_mode mode)1936 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1937 			    enum machine_mode mode)
1938 {
1939   unsigned int i;
1940   struct table_elt *p, *next;
1941   unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1942 
1943   for (i = 0; i < HASH_SIZE; i++)
1944     for (p = table[i]; p; p = next)
1945       {
1946 	rtx exp = p->exp;
1947 	next = p->next_same_hash;
1948 
1949 	if (!REG_P (exp)
1950 	    && (GET_CODE (exp) != SUBREG
1951 		|| !REG_P (SUBREG_REG (exp))
1952 		|| REGNO (SUBREG_REG (exp)) != regno
1953 		|| (((SUBREG_BYTE (exp)
1954 		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1955 		    && SUBREG_BYTE (exp) <= end))
1956 	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1957 	  remove_from_table (p, i);
1958       }
1959 }
1960 
1961 /* Recompute the hash codes of any valid entries in the hash table that
1962    reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1963 
1964    This is called when we make a jump equivalence.  */
1965 
1966 static void
rehash_using_reg(rtx x)1967 rehash_using_reg (rtx x)
1968 {
1969   unsigned int i;
1970   struct table_elt *p, *next;
1971   unsigned hash;
1972 
1973   if (GET_CODE (x) == SUBREG)
1974     x = SUBREG_REG (x);
1975 
1976   /* If X is not a register or if the register is known not to be in any
1977      valid entries in the table, we have no work to do.  */
1978 
1979   if (!REG_P (x)
1980       || REG_IN_TABLE (REGNO (x)) < 0
1981       || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1982     return;
1983 
1984   /* Scan all hash chains looking for valid entries that mention X.
1985      If we find one and it is in the wrong hash chain, move it.  */
1986 
1987   for (i = 0; i < HASH_SIZE; i++)
1988     for (p = table[i]; p; p = next)
1989       {
1990 	next = p->next_same_hash;
1991 	if (reg_mentioned_p (x, p->exp)
1992 	    && exp_equiv_p (p->exp, p->exp, 1, false)
1993 	    && i != (hash = SAFE_HASH (p->exp, p->mode)))
1994 	  {
1995 	    if (p->next_same_hash)
1996 	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
1997 
1998 	    if (p->prev_same_hash)
1999 	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2000 	    else
2001 	      table[i] = p->next_same_hash;
2002 
2003 	    p->next_same_hash = table[hash];
2004 	    p->prev_same_hash = 0;
2005 	    if (table[hash])
2006 	      table[hash]->prev_same_hash = p;
2007 	    table[hash] = p;
2008 	  }
2009       }
2010 }
2011 
2012 /* Remove from the hash table any expression that is a call-clobbered
2013    register.  Also update their TICK values.  */
2014 
2015 static void
invalidate_for_call(void)2016 invalidate_for_call (void)
2017 {
2018   unsigned int regno, endregno;
2019   unsigned int i;
2020   unsigned hash;
2021   struct table_elt *p, *next;
2022   int in_table = 0;
2023 
2024   /* Go through all the hard registers.  For each that is clobbered in
2025      a CALL_INSN, remove the register from quantity chains and update
2026      reg_tick if defined.  Also see if any of these registers is currently
2027      in the table.  */
2028 
2029   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2030     if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2031       {
2032 	delete_reg_equiv (regno);
2033 	if (REG_TICK (regno) >= 0)
2034 	  {
2035 	    REG_TICK (regno)++;
2036 	    SUBREG_TICKED (regno) = -1;
2037 	  }
2038 
2039 	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2040       }
2041 
2042   /* In the case where we have no call-clobbered hard registers in the
2043      table, we are done.  Otherwise, scan the table and remove any
2044      entry that overlaps a call-clobbered register.  */
2045 
2046   if (in_table)
2047     for (hash = 0; hash < HASH_SIZE; hash++)
2048       for (p = table[hash]; p; p = next)
2049 	{
2050 	  next = p->next_same_hash;
2051 
2052 	  if (!REG_P (p->exp)
2053 	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2054 	    continue;
2055 
2056 	  regno = REGNO (p->exp);
2057 	  endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2058 
2059 	  for (i = regno; i < endregno; i++)
2060 	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2061 	      {
2062 		remove_from_table (p, hash);
2063 		break;
2064 	      }
2065 	}
2066 }
2067 
2068 /* Given an expression X of type CONST,
2069    and ELT which is its table entry (or 0 if it
2070    is not in the hash table),
2071    return an alternate expression for X as a register plus integer.
2072    If none can be found, return 0.  */
2073 
2074 static rtx
use_related_value(rtx x,struct table_elt * elt)2075 use_related_value (rtx x, struct table_elt *elt)
2076 {
2077   struct table_elt *relt = 0;
2078   struct table_elt *p, *q;
2079   HOST_WIDE_INT offset;
2080 
2081   /* First, is there anything related known?
2082      If we have a table element, we can tell from that.
2083      Otherwise, must look it up.  */
2084 
2085   if (elt != 0 && elt->related_value != 0)
2086     relt = elt;
2087   else if (elt == 0 && GET_CODE (x) == CONST)
2088     {
2089       rtx subexp = get_related_value (x);
2090       if (subexp != 0)
2091 	relt = lookup (subexp,
2092 		       SAFE_HASH (subexp, GET_MODE (subexp)),
2093 		       GET_MODE (subexp));
2094     }
2095 
2096   if (relt == 0)
2097     return 0;
2098 
2099   /* Search all related table entries for one that has an
2100      equivalent register.  */
2101 
2102   p = relt;
2103   while (1)
2104     {
2105       /* This loop is strange in that it is executed in two different cases.
2106 	 The first is when X is already in the table.  Then it is searching
2107 	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2108 	 X is not in the table.  Then RELT points to a class for the related
2109 	 value.
2110 
2111 	 Ensure that, whatever case we are in, that we ignore classes that have
2112 	 the same value as X.  */
2113 
2114       if (rtx_equal_p (x, p->exp))
2115 	q = 0;
2116       else
2117 	for (q = p->first_same_value; q; q = q->next_same_value)
2118 	  if (REG_P (q->exp))
2119 	    break;
2120 
2121       if (q)
2122 	break;
2123 
2124       p = p->related_value;
2125 
2126       /* We went all the way around, so there is nothing to be found.
2127 	 Alternatively, perhaps RELT was in the table for some other reason
2128 	 and it has no related values recorded.  */
2129       if (p == relt || p == 0)
2130 	break;
2131     }
2132 
2133   if (q == 0)
2134     return 0;
2135 
2136   offset = (get_integer_term (x) - get_integer_term (p->exp));
2137   /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2138   return plus_constant (q->exp, offset);
2139 }
2140 
2141 /* Hash a string.  Just add its bytes up.  */
2142 static inline unsigned
hash_rtx_string(const char * ps)2143 hash_rtx_string (const char *ps)
2144 {
2145   unsigned hash = 0;
2146   const unsigned char *p = (const unsigned char *) ps;
2147 
2148   if (p)
2149     while (*p)
2150       hash += *p++;
2151 
2152   return hash;
2153 }
2154 
2155 /* Hash an rtx.  We are careful to make sure the value is never negative.
2156    Equivalent registers hash identically.
2157    MODE is used in hashing for CONST_INTs only;
2158    otherwise the mode of X is used.
2159 
2160    Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2161 
2162    If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2163    a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2164 
2165    Note that cse_insn knows that the hash code of a MEM expression
2166    is just (int) MEM plus the hash code of the address.  */
2167 
2168 unsigned
hash_rtx(rtx x,enum machine_mode mode,int * do_not_record_p,int * hash_arg_in_memory_p,bool have_reg_qty)2169 hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2170 	  int *hash_arg_in_memory_p, bool have_reg_qty)
2171 {
2172   int i, j;
2173   unsigned hash = 0;
2174   enum rtx_code code;
2175   const char *fmt;
2176 
2177   /* Used to turn recursion into iteration.  We can't rely on GCC's
2178      tail-recursion elimination since we need to keep accumulating values
2179      in HASH.  */
2180  repeat:
2181   if (x == 0)
2182     return hash;
2183 
2184   code = GET_CODE (x);
2185   switch (code)
2186     {
2187     case REG:
2188       {
2189 	unsigned int regno = REGNO (x);
2190 
2191 	if (!reload_completed)
2192 	  {
2193 	    /* On some machines, we can't record any non-fixed hard register,
2194 	       because extending its life will cause reload problems.  We
2195 	       consider ap, fp, sp, gp to be fixed for this purpose.
2196 
2197 	       We also consider CCmode registers to be fixed for this purpose;
2198 	       failure to do so leads to failure to simplify 0<100 type of
2199 	       conditionals.
2200 
2201 	       On all machines, we can't record any global registers.
2202 	       Nor should we record any register that is in a small
2203 	       class, as defined by CLASS_LIKELY_SPILLED_P.  */
2204 	    bool record;
2205 
2206 	    if (regno >= FIRST_PSEUDO_REGISTER)
2207 	      record = true;
2208 	    else if (x == frame_pointer_rtx
2209 		     || x == hard_frame_pointer_rtx
2210 		     || x == arg_pointer_rtx
2211 		     || x == stack_pointer_rtx
2212 		     || x == pic_offset_table_rtx)
2213 	      record = true;
2214 	    else if (global_regs[regno])
2215 	      record = false;
2216 	    else if (fixed_regs[regno])
2217 	      record = true;
2218 	    else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2219 	      record = true;
2220 	    else if (SMALL_REGISTER_CLASSES)
2221 	      record = false;
2222 	    else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2223 	      record = false;
2224 	    else
2225 	      record = true;
2226 
2227 	    if (!record)
2228 	      {
2229 		*do_not_record_p = 1;
2230 		return 0;
2231 	      }
2232 	  }
2233 
2234 	hash += ((unsigned int) REG << 7);
2235         hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2236 	return hash;
2237       }
2238 
2239     /* We handle SUBREG of a REG specially because the underlying
2240        reg changes its hash value with every value change; we don't
2241        want to have to forget unrelated subregs when one subreg changes.  */
2242     case SUBREG:
2243       {
2244 	if (REG_P (SUBREG_REG (x)))
2245 	  {
2246 	    hash += (((unsigned int) SUBREG << 7)
2247 		     + REGNO (SUBREG_REG (x))
2248 		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2249 	    return hash;
2250 	  }
2251 	break;
2252       }
2253 
2254     case CONST_INT:
2255       hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2256                + (unsigned int) INTVAL (x));
2257       return hash;
2258 
2259     case CONST_DOUBLE:
2260       /* This is like the general case, except that it only counts
2261 	 the integers representing the constant.  */
2262       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2263       if (GET_MODE (x) != VOIDmode)
2264 	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2265       else
2266 	hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2267 		 + (unsigned int) CONST_DOUBLE_HIGH (x));
2268       return hash;
2269 
2270     case CONST_VECTOR:
2271       {
2272 	int units;
2273 	rtx elt;
2274 
2275 	units = CONST_VECTOR_NUNITS (x);
2276 
2277 	for (i = 0; i < units; ++i)
2278 	  {
2279 	    elt = CONST_VECTOR_ELT (x, i);
2280 	    hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2281 			      hash_arg_in_memory_p, have_reg_qty);
2282 	  }
2283 
2284 	return hash;
2285       }
2286 
2287       /* Assume there is only one rtx object for any given label.  */
2288     case LABEL_REF:
2289       /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2290 	 differences and differences between each stage's debugging dumps.  */
2291 	 hash += (((unsigned int) LABEL_REF << 7)
2292 		  + CODE_LABEL_NUMBER (XEXP (x, 0)));
2293       return hash;
2294 
2295     case SYMBOL_REF:
2296       {
2297 	/* Don't hash on the symbol's address to avoid bootstrap differences.
2298 	   Different hash values may cause expressions to be recorded in
2299 	   different orders and thus different registers to be used in the
2300 	   final assembler.  This also avoids differences in the dump files
2301 	   between various stages.  */
2302 	unsigned int h = 0;
2303 	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2304 
2305 	while (*p)
2306 	  h += (h << 7) + *p++; /* ??? revisit */
2307 
2308 	hash += ((unsigned int) SYMBOL_REF << 7) + h;
2309 	return hash;
2310       }
2311 
2312     case MEM:
2313       /* We don't record if marked volatile or if BLKmode since we don't
2314 	 know the size of the move.  */
2315       if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2316 	{
2317 	  *do_not_record_p = 1;
2318 	  return 0;
2319 	}
2320       if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2321 	*hash_arg_in_memory_p = 1;
2322 
2323       /* Now that we have already found this special case,
2324 	 might as well speed it up as much as possible.  */
2325       hash += (unsigned) MEM;
2326       x = XEXP (x, 0);
2327       goto repeat;
2328 
2329     case USE:
2330       /* A USE that mentions non-volatile memory needs special
2331 	 handling since the MEM may be BLKmode which normally
2332 	 prevents an entry from being made.  Pure calls are
2333 	 marked by a USE which mentions BLKmode memory.
2334 	 See calls.c:emit_call_1.  */
2335       if (MEM_P (XEXP (x, 0))
2336 	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2337 	{
2338 	  hash += (unsigned) USE;
2339 	  x = XEXP (x, 0);
2340 
2341 	  if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2342 	    *hash_arg_in_memory_p = 1;
2343 
2344 	  /* Now that we have already found this special case,
2345 	     might as well speed it up as much as possible.  */
2346 	  hash += (unsigned) MEM;
2347 	  x = XEXP (x, 0);
2348 	  goto repeat;
2349 	}
2350       break;
2351 
2352     case PRE_DEC:
2353     case PRE_INC:
2354     case POST_DEC:
2355     case POST_INC:
2356     case PRE_MODIFY:
2357     case POST_MODIFY:
2358     case PC:
2359     case CC0:
2360     case CALL:
2361     case UNSPEC_VOLATILE:
2362       *do_not_record_p = 1;
2363       return 0;
2364 
2365     case ASM_OPERANDS:
2366       if (MEM_VOLATILE_P (x))
2367 	{
2368 	  *do_not_record_p = 1;
2369 	  return 0;
2370 	}
2371       else
2372 	{
2373 	  /* We don't want to take the filename and line into account.  */
2374 	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2375 	    + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2376 	    + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2377 	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2378 
2379 	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2380 	    {
2381 	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2382 		{
2383 		  hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2384 				     GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2385 				     do_not_record_p, hash_arg_in_memory_p,
2386 				     have_reg_qty)
2387 			   + hash_rtx_string
2388 				(ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2389 		}
2390 
2391 	      hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2392 	      x = ASM_OPERANDS_INPUT (x, 0);
2393 	      mode = GET_MODE (x);
2394 	      goto repeat;
2395 	    }
2396 
2397 	  return hash;
2398 	}
2399       break;
2400 
2401     default:
2402       break;
2403     }
2404 
2405   i = GET_RTX_LENGTH (code) - 1;
2406   hash += (unsigned) code + (unsigned) GET_MODE (x);
2407   fmt = GET_RTX_FORMAT (code);
2408   for (; i >= 0; i--)
2409     {
2410       switch (fmt[i])
2411 	{
2412 	case 'e':
2413 	  /* If we are about to do the last recursive call
2414 	     needed at this level, change it into iteration.
2415 	     This function  is called enough to be worth it.  */
2416 	  if (i == 0)
2417 	    {
2418 	      x = XEXP (x, i);
2419 	      goto repeat;
2420 	    }
2421 
2422 	  hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2423 			    hash_arg_in_memory_p, have_reg_qty);
2424 	  break;
2425 
2426 	case 'E':
2427 	  for (j = 0; j < XVECLEN (x, i); j++)
2428 	    hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2429 			      hash_arg_in_memory_p, have_reg_qty);
2430 	  break;
2431 
2432 	case 's':
2433 	  hash += hash_rtx_string (XSTR (x, i));
2434 	  break;
2435 
2436 	case 'i':
2437 	  hash += (unsigned int) XINT (x, i);
2438 	  break;
2439 
2440 	case '0': case 't':
2441 	  /* Unused.  */
2442 	  break;
2443 
2444 	default:
2445 	  gcc_unreachable ();
2446 	}
2447     }
2448 
2449   return hash;
2450 }
2451 
2452 /* Hash an rtx X for cse via hash_rtx.
2453    Stores 1 in do_not_record if any subexpression is volatile.
2454    Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2455    does not have the RTX_UNCHANGING_P bit set.  */
2456 
2457 static inline unsigned
canon_hash(rtx x,enum machine_mode mode)2458 canon_hash (rtx x, enum machine_mode mode)
2459 {
2460   return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2461 }
2462 
2463 /* Like canon_hash but with no side effects, i.e. do_not_record
2464    and hash_arg_in_memory are not changed.  */
2465 
2466 static inline unsigned
safe_hash(rtx x,enum machine_mode mode)2467 safe_hash (rtx x, enum machine_mode mode)
2468 {
2469   int dummy_do_not_record;
2470   return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2471 }
2472 
2473 /* Return 1 iff X and Y would canonicalize into the same thing,
2474    without actually constructing the canonicalization of either one.
2475    If VALIDATE is nonzero,
2476    we assume X is an expression being processed from the rtl
2477    and Y was found in the hash table.  We check register refs
2478    in Y for being marked as valid.
2479 
2480    If FOR_GCSE is true, we compare X and Y for equivalence for GCSE.  */
2481 
2482 int
exp_equiv_p(rtx x,rtx y,int validate,bool for_gcse)2483 exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2484 {
2485   int i, j;
2486   enum rtx_code code;
2487   const char *fmt;
2488 
2489   /* Note: it is incorrect to assume an expression is equivalent to itself
2490      if VALIDATE is nonzero.  */
2491   if (x == y && !validate)
2492     return 1;
2493 
2494   if (x == 0 || y == 0)
2495     return x == y;
2496 
2497   code = GET_CODE (x);
2498   if (code != GET_CODE (y))
2499     return 0;
2500 
2501   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2502   if (GET_MODE (x) != GET_MODE (y))
2503     return 0;
2504 
2505   switch (code)
2506     {
2507     case PC:
2508     case CC0:
2509     case CONST_INT:
2510     case CONST_DOUBLE:
2511       return x == y;
2512 
2513     case LABEL_REF:
2514       return XEXP (x, 0) == XEXP (y, 0);
2515 
2516     case SYMBOL_REF:
2517       return XSTR (x, 0) == XSTR (y, 0);
2518 
2519     case REG:
2520       if (for_gcse)
2521 	return REGNO (x) == REGNO (y);
2522       else
2523 	{
2524 	  unsigned int regno = REGNO (y);
2525 	  unsigned int i;
2526 	  unsigned int endregno
2527 	    = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2528 		       : hard_regno_nregs[regno][GET_MODE (y)]);
2529 
2530 	  /* If the quantities are not the same, the expressions are not
2531 	     equivalent.  If there are and we are not to validate, they
2532 	     are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2533 
2534 	  if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2535 	    return 0;
2536 
2537 	  if (! validate)
2538 	    return 1;
2539 
2540 	  for (i = regno; i < endregno; i++)
2541 	    if (REG_IN_TABLE (i) != REG_TICK (i))
2542 	      return 0;
2543 
2544 	  return 1;
2545 	}
2546 
2547     case MEM:
2548       if (for_gcse)
2549 	{
2550 	  /* A volatile mem should not be considered equivalent to any
2551 	     other.  */
2552 	  if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2553 	    return 0;
2554 
2555 	  /* Can't merge two expressions in different alias sets, since we
2556 	     can decide that the expression is transparent in a block when
2557 	     it isn't, due to it being set with the different alias set.
2558 
2559 	     Also, can't merge two expressions with different MEM_ATTRS.
2560 	     They could e.g. be two different entities allocated into the
2561 	     same space on the stack (see e.g. PR25130).  In that case, the
2562 	     MEM addresses can be the same, even though the two MEMs are
2563 	     absolutely not equivalent.
2564 
2565 	     But because really all MEM attributes should be the same for
2566 	     equivalent MEMs, we just use the invariant that MEMs that have
2567 	     the same attributes share the same mem_attrs data structure.  */
2568 	  if (MEM_ATTRS (x) != MEM_ATTRS (y))
2569 	    return 0;
2570 	}
2571       break;
2572 
2573     /*  For commutative operations, check both orders.  */
2574     case PLUS:
2575     case MULT:
2576     case AND:
2577     case IOR:
2578     case XOR:
2579     case NE:
2580     case EQ:
2581       return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2582 			     validate, for_gcse)
2583 	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2584 				validate, for_gcse))
2585 	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2586 				validate, for_gcse)
2587 		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2588 				   validate, for_gcse)));
2589 
2590     case ASM_OPERANDS:
2591       /* We don't use the generic code below because we want to
2592 	 disregard filename and line numbers.  */
2593 
2594       /* A volatile asm isn't equivalent to any other.  */
2595       if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2596 	return 0;
2597 
2598       if (GET_MODE (x) != GET_MODE (y)
2599 	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2600 	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2601 		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2602 	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2603 	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2604 	return 0;
2605 
2606       if (ASM_OPERANDS_INPUT_LENGTH (x))
2607 	{
2608 	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2609 	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2610 			       ASM_OPERANDS_INPUT (y, i),
2611 			       validate, for_gcse)
2612 		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2613 			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2614 	      return 0;
2615 	}
2616 
2617       return 1;
2618 
2619     default:
2620       break;
2621     }
2622 
2623   /* Compare the elements.  If any pair of corresponding elements
2624      fail to match, return 0 for the whole thing.  */
2625 
2626   fmt = GET_RTX_FORMAT (code);
2627   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2628     {
2629       switch (fmt[i])
2630 	{
2631 	case 'e':
2632 	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2633 			      validate, for_gcse))
2634 	    return 0;
2635 	  break;
2636 
2637 	case 'E':
2638 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2639 	    return 0;
2640 	  for (j = 0; j < XVECLEN (x, i); j++)
2641 	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2642 				validate, for_gcse))
2643 	      return 0;
2644 	  break;
2645 
2646 	case 's':
2647 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2648 	    return 0;
2649 	  break;
2650 
2651 	case 'i':
2652 	  if (XINT (x, i) != XINT (y, i))
2653 	    return 0;
2654 	  break;
2655 
2656 	case 'w':
2657 	  if (XWINT (x, i) != XWINT (y, i))
2658 	    return 0;
2659 	  break;
2660 
2661 	case '0':
2662 	case 't':
2663 	  break;
2664 
2665 	default:
2666 	  gcc_unreachable ();
2667 	}
2668     }
2669 
2670   return 1;
2671 }
2672 
2673 /* Return 1 if X has a value that can vary even between two
2674    executions of the program.  0 means X can be compared reliably
2675    against certain constants or near-constants.  */
2676 
2677 static int
cse_rtx_varies_p(rtx x,int from_alias)2678 cse_rtx_varies_p (rtx x, int from_alias)
2679 {
2680   /* We need not check for X and the equivalence class being of the same
2681      mode because if X is equivalent to a constant in some mode, it
2682      doesn't vary in any mode.  */
2683 
2684   if (REG_P (x)
2685       && REGNO_QTY_VALID_P (REGNO (x)))
2686     {
2687       int x_q = REG_QTY (REGNO (x));
2688       struct qty_table_elem *x_ent = &qty_table[x_q];
2689 
2690       if (GET_MODE (x) == x_ent->mode
2691 	  && x_ent->const_rtx != NULL_RTX)
2692 	return 0;
2693     }
2694 
2695   if (GET_CODE (x) == PLUS
2696       && GET_CODE (XEXP (x, 1)) == CONST_INT
2697       && REG_P (XEXP (x, 0))
2698       && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2699     {
2700       int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2701       struct qty_table_elem *x0_ent = &qty_table[x0_q];
2702 
2703       if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2704 	  && x0_ent->const_rtx != NULL_RTX)
2705 	return 0;
2706     }
2707 
2708   /* This can happen as the result of virtual register instantiation, if
2709      the initial constant is too large to be a valid address.  This gives
2710      us a three instruction sequence, load large offset into a register,
2711      load fp minus a constant into a register, then a MEM which is the
2712      sum of the two `constant' registers.  */
2713   if (GET_CODE (x) == PLUS
2714       && REG_P (XEXP (x, 0))
2715       && REG_P (XEXP (x, 1))
2716       && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2717       && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2718     {
2719       int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2720       int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2721       struct qty_table_elem *x0_ent = &qty_table[x0_q];
2722       struct qty_table_elem *x1_ent = &qty_table[x1_q];
2723 
2724       if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2725 	  && x0_ent->const_rtx != NULL_RTX
2726 	  && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2727 	  && x1_ent->const_rtx != NULL_RTX)
2728 	return 0;
2729     }
2730 
2731   return rtx_varies_p (x, from_alias);
2732 }
2733 
2734 /* Subroutine of canon_reg.  Pass *XLOC through canon_reg, and validate
2735    the result if necessary.  INSN is as for canon_reg.  */
2736 
2737 static void
validate_canon_reg(rtx * xloc,rtx insn)2738 validate_canon_reg (rtx *xloc, rtx insn)
2739 {
2740   rtx new = canon_reg (*xloc, insn);
2741 
2742   /* If replacing pseudo with hard reg or vice versa, ensure the
2743      insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2744   if (insn != 0 && new != 0)
2745     validate_change (insn, xloc, new, 1);
2746   else
2747     *xloc = new;
2748 }
2749 
2750 /* Canonicalize an expression:
2751    replace each register reference inside it
2752    with the "oldest" equivalent register.
2753 
2754    If INSN is nonzero validate_change is used to ensure that INSN remains valid
2755    after we make our substitution.  The calls are made with IN_GROUP nonzero
2756    so apply_change_group must be called upon the outermost return from this
2757    function (unless INSN is zero).  The result of apply_change_group can
2758    generally be discarded since the changes we are making are optional.  */
2759 
2760 static rtx
canon_reg(rtx x,rtx insn)2761 canon_reg (rtx x, rtx insn)
2762 {
2763   int i;
2764   enum rtx_code code;
2765   const char *fmt;
2766 
2767   if (x == 0)
2768     return x;
2769 
2770   code = GET_CODE (x);
2771   switch (code)
2772     {
2773     case PC:
2774     case CC0:
2775     case CONST:
2776     case CONST_INT:
2777     case CONST_DOUBLE:
2778     case CONST_VECTOR:
2779     case SYMBOL_REF:
2780     case LABEL_REF:
2781     case ADDR_VEC:
2782     case ADDR_DIFF_VEC:
2783       return x;
2784 
2785     case REG:
2786       {
2787 	int first;
2788 	int q;
2789 	struct qty_table_elem *ent;
2790 
2791 	/* Never replace a hard reg, because hard regs can appear
2792 	   in more than one machine mode, and we must preserve the mode
2793 	   of each occurrence.  Also, some hard regs appear in
2794 	   MEMs that are shared and mustn't be altered.  Don't try to
2795 	   replace any reg that maps to a reg of class NO_REGS.  */
2796 	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2797 	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2798 	  return x;
2799 
2800 	q = REG_QTY (REGNO (x));
2801 	ent = &qty_table[q];
2802 	first = ent->first_reg;
2803 	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2804 		: REGNO_REG_CLASS (first) == NO_REGS ? x
2805 		: gen_rtx_REG (ent->mode, first));
2806       }
2807 
2808     default:
2809       break;
2810     }
2811 
2812   fmt = GET_RTX_FORMAT (code);
2813   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2814     {
2815       int j;
2816 
2817       if (fmt[i] == 'e')
2818 	validate_canon_reg (&XEXP (x, i), insn);
2819       else if (fmt[i] == 'E')
2820 	for (j = 0; j < XVECLEN (x, i); j++)
2821 	  validate_canon_reg (&XVECEXP (x, i, j), insn);
2822     }
2823 
2824   return x;
2825 }
2826 
2827 /* LOC is a location within INSN that is an operand address (the contents of
2828    a MEM).  Find the best equivalent address to use that is valid for this
2829    insn.
2830 
2831    On most CISC machines, complicated address modes are costly, and rtx_cost
2832    is a good approximation for that cost.  However, most RISC machines have
2833    only a few (usually only one) memory reference formats.  If an address is
2834    valid at all, it is often just as cheap as any other address.  Hence, for
2835    RISC machines, we use `address_cost' to compare the costs of various
2836    addresses.  For two addresses of equal cost, choose the one with the
2837    highest `rtx_cost' value as that has the potential of eliminating the
2838    most insns.  For equal costs, we choose the first in the equivalence
2839    class.  Note that we ignore the fact that pseudo registers are cheaper than
2840    hard registers here because we would also prefer the pseudo registers.  */
2841 
2842 static void
find_best_addr(rtx insn,rtx * loc,enum machine_mode mode)2843 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2844 {
2845   struct table_elt *elt;
2846   rtx addr = *loc;
2847   struct table_elt *p;
2848   int found_better = 1;
2849   int save_do_not_record = do_not_record;
2850   int save_hash_arg_in_memory = hash_arg_in_memory;
2851   int addr_volatile;
2852   int regno;
2853   unsigned hash;
2854 
2855   /* Do not try to replace constant addresses or addresses of local and
2856      argument slots.  These MEM expressions are made only once and inserted
2857      in many instructions, as well as being used to control symbol table
2858      output.  It is not safe to clobber them.
2859 
2860      There are some uncommon cases where the address is already in a register
2861      for some reason, but we cannot take advantage of that because we have
2862      no easy way to unshare the MEM.  In addition, looking up all stack
2863      addresses is costly.  */
2864   if ((GET_CODE (addr) == PLUS
2865        && REG_P (XEXP (addr, 0))
2866        && GET_CODE (XEXP (addr, 1)) == CONST_INT
2867        && (regno = REGNO (XEXP (addr, 0)),
2868 	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2869 	   || regno == ARG_POINTER_REGNUM))
2870       || (REG_P (addr)
2871 	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2872 	      || regno == HARD_FRAME_POINTER_REGNUM
2873 	      || regno == ARG_POINTER_REGNUM))
2874       || CONSTANT_ADDRESS_P (addr))
2875     return;
2876 
2877   /* If this address is not simply a register, try to fold it.  This will
2878      sometimes simplify the expression.  Many simplifications
2879      will not be valid, but some, usually applying the associative rule, will
2880      be valid and produce better code.  */
2881   if (!REG_P (addr))
2882     {
2883       rtx folded = canon_for_address (fold_rtx (addr, NULL_RTX));
2884 
2885       if (folded != addr)
2886 	{
2887 	  int addr_folded_cost = address_cost (folded, mode);
2888 	  int addr_cost = address_cost (addr, mode);
2889 
2890 	  if ((addr_folded_cost < addr_cost
2891 	       || (addr_folded_cost == addr_cost
2892 		   /* ??? The rtx_cost comparison is left over from an older
2893 		      version of this code.  It is probably no longer helpful.*/
2894 		   && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2895 		       || approx_reg_cost (folded) < approx_reg_cost (addr))))
2896 	      && validate_change (insn, loc, folded, 0))
2897 	    addr = folded;
2898 	}
2899     }
2900 
2901   /* If this address is not in the hash table, we can't look for equivalences
2902      of the whole address.  Also, ignore if volatile.  */
2903 
2904   do_not_record = 0;
2905   hash = HASH (addr, Pmode);
2906   addr_volatile = do_not_record;
2907   do_not_record = save_do_not_record;
2908   hash_arg_in_memory = save_hash_arg_in_memory;
2909 
2910   if (addr_volatile)
2911     return;
2912 
2913   elt = lookup (addr, hash, Pmode);
2914 
2915   if (elt)
2916     {
2917       /* We need to find the best (under the criteria documented above) entry
2918 	 in the class that is valid.  We use the `flag' field to indicate
2919 	 choices that were invalid and iterate until we can't find a better
2920 	 one that hasn't already been tried.  */
2921 
2922       for (p = elt->first_same_value; p; p = p->next_same_value)
2923 	p->flag = 0;
2924 
2925       while (found_better)
2926 	{
2927 	  int best_addr_cost = address_cost (*loc, mode);
2928 	  int best_rtx_cost = (elt->cost + 1) >> 1;
2929 	  int exp_cost;
2930 	  struct table_elt *best_elt = elt;
2931 
2932 	  found_better = 0;
2933 	  for (p = elt->first_same_value; p; p = p->next_same_value)
2934 	    if (! p->flag)
2935 	      {
2936 		if ((REG_P (p->exp)
2937 		     || exp_equiv_p (p->exp, p->exp, 1, false))
2938 		    && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2939 			|| (exp_cost == best_addr_cost
2940 			    && ((p->cost + 1) >> 1) > best_rtx_cost)))
2941 		  {
2942 		    found_better = 1;
2943 		    best_addr_cost = exp_cost;
2944 		    best_rtx_cost = (p->cost + 1) >> 1;
2945 		    best_elt = p;
2946 		  }
2947 	      }
2948 
2949 	  if (found_better)
2950 	    {
2951 	      if (validate_change (insn, loc,
2952 				   canon_reg (copy_rtx (best_elt->exp),
2953 					      NULL_RTX), 0))
2954 		return;
2955 	      else
2956 		best_elt->flag = 1;
2957 	    }
2958 	}
2959     }
2960 
2961   /* If the address is a binary operation with the first operand a register
2962      and the second a constant, do the same as above, but looking for
2963      equivalences of the register.  Then try to simplify before checking for
2964      the best address to use.  This catches a few cases:  First is when we
2965      have REG+const and the register is another REG+const.  We can often merge
2966      the constants and eliminate one insn and one register.  It may also be
2967      that a machine has a cheap REG+REG+const.  Finally, this improves the
2968      code on the Alpha for unaligned byte stores.  */
2969 
2970   if (flag_expensive_optimizations
2971       && ARITHMETIC_P (*loc)
2972       && REG_P (XEXP (*loc, 0)))
2973     {
2974       rtx op1 = XEXP (*loc, 1);
2975 
2976       do_not_record = 0;
2977       hash = HASH (XEXP (*loc, 0), Pmode);
2978       do_not_record = save_do_not_record;
2979       hash_arg_in_memory = save_hash_arg_in_memory;
2980 
2981       elt = lookup (XEXP (*loc, 0), hash, Pmode);
2982       if (elt == 0)
2983 	return;
2984 
2985       /* We need to find the best (under the criteria documented above) entry
2986 	 in the class that is valid.  We use the `flag' field to indicate
2987 	 choices that were invalid and iterate until we can't find a better
2988 	 one that hasn't already been tried.  */
2989 
2990       for (p = elt->first_same_value; p; p = p->next_same_value)
2991 	p->flag = 0;
2992 
2993       while (found_better)
2994 	{
2995 	  int best_addr_cost = address_cost (*loc, mode);
2996 	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
2997 	  struct table_elt *best_elt = elt;
2998 	  rtx best_rtx = *loc;
2999 	  int count;
3000 
3001 	  /* This is at worst case an O(n^2) algorithm, so limit our search
3002 	     to the first 32 elements on the list.  This avoids trouble
3003 	     compiling code with very long basic blocks that can easily
3004 	     call simplify_gen_binary so many times that we run out of
3005 	     memory.  */
3006 
3007 	  found_better = 0;
3008 	  for (p = elt->first_same_value, count = 0;
3009 	       p && count < 32;
3010 	       p = p->next_same_value, count++)
3011 	    if (! p->flag
3012 		&& (REG_P (p->exp)
3013 		    || (GET_CODE (p->exp) != EXPR_LIST
3014 			&& exp_equiv_p (p->exp, p->exp, 1, false))))
3015 
3016 	      {
3017 		rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3018 					       p->exp, op1);
3019 		int new_cost;
3020 
3021 		/* Get the canonical version of the address so we can accept
3022 		   more.  */
3023 		new = canon_for_address (new);
3024 
3025 		new_cost = address_cost (new, mode);
3026 
3027 		if (new_cost < best_addr_cost
3028 		    || (new_cost == best_addr_cost
3029 			&& (COST (new) + 1) >> 1 > best_rtx_cost))
3030 		  {
3031 		    found_better = 1;
3032 		    best_addr_cost = new_cost;
3033 		    best_rtx_cost = (COST (new) + 1) >> 1;
3034 		    best_elt = p;
3035 		    best_rtx = new;
3036 		  }
3037 	      }
3038 
3039 	  if (found_better)
3040 	    {
3041 	      if (validate_change (insn, loc,
3042 				   canon_reg (copy_rtx (best_rtx),
3043 					      NULL_RTX), 0))
3044 		return;
3045 	      else
3046 		best_elt->flag = 1;
3047 	    }
3048 	}
3049     }
3050 }
3051 
3052 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3053    operation (EQ, NE, GT, etc.), follow it back through the hash table and
3054    what values are being compared.
3055 
3056    *PARG1 and *PARG2 are updated to contain the rtx representing the values
3057    actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3058    was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3059    compared to produce cc0.
3060 
3061    The return value is the comparison operator and is either the code of
3062    A or the code corresponding to the inverse of the comparison.  */
3063 
3064 static enum rtx_code
find_comparison_args(enum rtx_code code,rtx * parg1,rtx * parg2,enum machine_mode * pmode1,enum machine_mode * pmode2)3065 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3066 		      enum machine_mode *pmode1, enum machine_mode *pmode2)
3067 {
3068   rtx arg1, arg2;
3069 
3070   arg1 = *parg1, arg2 = *parg2;
3071 
3072   /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3073 
3074   while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3075     {
3076       /* Set nonzero when we find something of interest.  */
3077       rtx x = 0;
3078       int reverse_code = 0;
3079       struct table_elt *p = 0;
3080 
3081       /* If arg1 is a COMPARE, extract the comparison arguments from it.
3082 	 On machines with CC0, this is the only case that can occur, since
3083 	 fold_rtx will return the COMPARE or item being compared with zero
3084 	 when given CC0.  */
3085 
3086       if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3087 	x = arg1;
3088 
3089       /* If ARG1 is a comparison operator and CODE is testing for
3090 	 STORE_FLAG_VALUE, get the inner arguments.  */
3091 
3092       else if (COMPARISON_P (arg1))
3093 	{
3094 #ifdef FLOAT_STORE_FLAG_VALUE
3095 	  REAL_VALUE_TYPE fsfv;
3096 #endif
3097 
3098 	  if (code == NE
3099 	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3100 		  && code == LT && STORE_FLAG_VALUE == -1)
3101 #ifdef FLOAT_STORE_FLAG_VALUE
3102 	      || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3103 		  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3104 		      REAL_VALUE_NEGATIVE (fsfv)))
3105 #endif
3106 	      )
3107 	    x = arg1;
3108 	  else if (code == EQ
3109 		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3110 		       && code == GE && STORE_FLAG_VALUE == -1)
3111 #ifdef FLOAT_STORE_FLAG_VALUE
3112 		   || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3113 		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3114 			   REAL_VALUE_NEGATIVE (fsfv)))
3115 #endif
3116 		   )
3117 	    x = arg1, reverse_code = 1;
3118 	}
3119 
3120       /* ??? We could also check for
3121 
3122 	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3123 
3124 	 and related forms, but let's wait until we see them occurring.  */
3125 
3126       if (x == 0)
3127 	/* Look up ARG1 in the hash table and see if it has an equivalence
3128 	   that lets us see what is being compared.  */
3129 	p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3130       if (p)
3131 	{
3132 	  p = p->first_same_value;
3133 
3134 	  /* If what we compare is already known to be constant, that is as
3135 	     good as it gets.
3136 	     We need to break the loop in this case, because otherwise we
3137 	     can have an infinite loop when looking at a reg that is known
3138 	     to be a constant which is the same as a comparison of a reg
3139 	     against zero which appears later in the insn stream, which in
3140 	     turn is constant and the same as the comparison of the first reg
3141 	     against zero...  */
3142 	  if (p->is_const)
3143 	    break;
3144 	}
3145 
3146       for (; p; p = p->next_same_value)
3147 	{
3148 	  enum machine_mode inner_mode = GET_MODE (p->exp);
3149 #ifdef FLOAT_STORE_FLAG_VALUE
3150 	  REAL_VALUE_TYPE fsfv;
3151 #endif
3152 
3153 	  /* If the entry isn't valid, skip it.  */
3154 	  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3155 	    continue;
3156 
3157 	  if (GET_CODE (p->exp) == COMPARE
3158 	      /* Another possibility is that this machine has a compare insn
3159 		 that includes the comparison code.  In that case, ARG1 would
3160 		 be equivalent to a comparison operation that would set ARG1 to
3161 		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3162 		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3163 		 we must reverse ORIG_CODE.  On machine with a negative value
3164 		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3165 	      || ((code == NE
3166 		   || (code == LT
3167 		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3168 		       && (GET_MODE_BITSIZE (inner_mode)
3169 			   <= HOST_BITS_PER_WIDE_INT)
3170 		       && (STORE_FLAG_VALUE
3171 			   & ((HOST_WIDE_INT) 1
3172 			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3173 #ifdef FLOAT_STORE_FLAG_VALUE
3174 		   || (code == LT
3175 		       && SCALAR_FLOAT_MODE_P (inner_mode)
3176 		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3177 			   REAL_VALUE_NEGATIVE (fsfv)))
3178 #endif
3179 		   )
3180 		  && COMPARISON_P (p->exp)))
3181 	    {
3182 	      x = p->exp;
3183 	      break;
3184 	    }
3185 	  else if ((code == EQ
3186 		    || (code == GE
3187 			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3188 			&& (GET_MODE_BITSIZE (inner_mode)
3189 			    <= HOST_BITS_PER_WIDE_INT)
3190 			&& (STORE_FLAG_VALUE
3191 			    & ((HOST_WIDE_INT) 1
3192 			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3193 #ifdef FLOAT_STORE_FLAG_VALUE
3194 		    || (code == GE
3195 			&& SCALAR_FLOAT_MODE_P (inner_mode)
3196 			&& (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3197 			    REAL_VALUE_NEGATIVE (fsfv)))
3198 #endif
3199 		    )
3200 		   && COMPARISON_P (p->exp))
3201 	    {
3202 	      reverse_code = 1;
3203 	      x = p->exp;
3204 	      break;
3205 	    }
3206 
3207 	  /* If this non-trapping address, e.g. fp + constant, the
3208 	     equivalent is a better operand since it may let us predict
3209 	     the value of the comparison.  */
3210 	  else if (!rtx_addr_can_trap_p (p->exp))
3211 	    {
3212 	      arg1 = p->exp;
3213 	      continue;
3214 	    }
3215 	}
3216 
3217       /* If we didn't find a useful equivalence for ARG1, we are done.
3218 	 Otherwise, set up for the next iteration.  */
3219       if (x == 0)
3220 	break;
3221 
3222       /* If we need to reverse the comparison, make sure that that is
3223 	 possible -- we can't necessarily infer the value of GE from LT
3224 	 with floating-point operands.  */
3225       if (reverse_code)
3226 	{
3227 	  enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3228 	  if (reversed == UNKNOWN)
3229 	    break;
3230 	  else
3231 	    code = reversed;
3232 	}
3233       else if (COMPARISON_P (x))
3234 	code = GET_CODE (x);
3235       arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3236     }
3237 
3238   /* Return our results.  Return the modes from before fold_rtx
3239      because fold_rtx might produce const_int, and then it's too late.  */
3240   *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3241   *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3242 
3243   return code;
3244 }
3245 
3246 /* Fold SUBREG.  */
3247 
3248 static rtx
fold_rtx_subreg(rtx x,rtx insn)3249 fold_rtx_subreg (rtx x, rtx insn)
3250 {
3251   enum machine_mode mode = GET_MODE (x);
3252   rtx folded_arg0;
3253   rtx const_arg0;
3254   rtx new;
3255 
3256   /* See if we previously assigned a constant value to this SUBREG.  */
3257   if ((new = lookup_as_function (x, CONST_INT)) != 0
3258       || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3259     return new;
3260 
3261   /* If this is a paradoxical SUBREG, we have no idea what value the
3262      extra bits would have.  However, if the operand is equivalent to
3263      a SUBREG whose operand is the same as our mode, and all the modes
3264      are within a word, we can just use the inner operand because
3265      these SUBREGs just say how to treat the register.
3266 
3267      Similarly if we find an integer constant.  */
3268 
3269   if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3270     {
3271       enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3272       struct table_elt *elt;
3273 
3274       if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3275 	  && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3276 	  && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3277 			    imode)) != 0)
3278 	for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3279 	  {
3280 	    if (CONSTANT_P (elt->exp)
3281 		&& GET_MODE (elt->exp) == VOIDmode)
3282 	      return elt->exp;
3283 
3284 	    if (GET_CODE (elt->exp) == SUBREG
3285 		&& GET_MODE (SUBREG_REG (elt->exp)) == mode
3286 		&& exp_equiv_p (elt->exp, elt->exp, 1, false))
3287 	      return copy_rtx (SUBREG_REG (elt->exp));
3288 	  }
3289 
3290       return x;
3291     }
3292 
3293   /* Fold SUBREG_REG.  If it changed, see if we can simplify the
3294      SUBREG.  We might be able to if the SUBREG is extracting a single
3295      word in an integral mode or extracting the low part.  */
3296 
3297   folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3298   const_arg0 = equiv_constant (folded_arg0);
3299   if (const_arg0)
3300     folded_arg0 = const_arg0;
3301 
3302   if (folded_arg0 != SUBREG_REG (x))
3303     {
3304       new = simplify_subreg (mode, folded_arg0,
3305 			     GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3306       if (new)
3307 	return new;
3308     }
3309 
3310   if (REG_P (folded_arg0)
3311       && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3312     {
3313       struct table_elt *elt;
3314 
3315       elt = lookup (folded_arg0,
3316 		    HASH (folded_arg0, GET_MODE (folded_arg0)),
3317 		    GET_MODE (folded_arg0));
3318 
3319       if (elt)
3320 	elt = elt->first_same_value;
3321 
3322       if (subreg_lowpart_p (x))
3323 	/* If this is a narrowing SUBREG and our operand is a REG, see
3324 	   if we can find an equivalence for REG that is an arithmetic
3325 	   operation in a wider mode where both operands are
3326 	   paradoxical SUBREGs from objects of our result mode.  In
3327 	   that case, we couldn-t report an equivalent value for that
3328 	   operation, since we don't know what the extra bits will be.
3329 	   But we can find an equivalence for this SUBREG by folding
3330 	   that operation in the narrow mode.  This allows us to fold
3331 	   arithmetic in narrow modes when the machine only supports
3332 	   word-sized arithmetic.
3333 
3334 	   Also look for a case where we have a SUBREG whose operand
3335 	   is the same as our result.  If both modes are smaller than
3336 	   a word, we are simply interpreting a register in different
3337 	   modes and we can use the inner value.  */
3338 
3339 	for (; elt; elt = elt->next_same_value)
3340 	  {
3341 	    enum rtx_code eltcode = GET_CODE (elt->exp);
3342 
3343 	    /* Just check for unary and binary operations.  */
3344 	    if (UNARY_P (elt->exp)
3345 		&& eltcode != SIGN_EXTEND
3346 		&& eltcode != ZERO_EXTEND
3347 		&& GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3348 		&& GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3349 		&& (GET_MODE_CLASS (mode)
3350 		    == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3351 	      {
3352 		rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3353 
3354 		if (!REG_P (op0) && ! CONSTANT_P (op0))
3355 		  op0 = fold_rtx (op0, NULL_RTX);
3356 
3357 		op0 = equiv_constant (op0);
3358 		if (op0)
3359 		  new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3360 						  op0, mode);
3361 	      }
3362 	    else if (ARITHMETIC_P (elt->exp)
3363 		     && eltcode != DIV && eltcode != MOD
3364 		     && eltcode != UDIV && eltcode != UMOD
3365 		     && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3366 		     && eltcode != ROTATE && eltcode != ROTATERT
3367 		     && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3368 			  && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3369 			      == mode))
3370 			 || CONSTANT_P (XEXP (elt->exp, 0)))
3371 		     && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3372 			  && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3373 			      == mode))
3374 			 || CONSTANT_P (XEXP (elt->exp, 1))))
3375 	      {
3376 		rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3377 		rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3378 
3379 		if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3380 		  op0 = fold_rtx (op0, NULL_RTX);
3381 
3382 		if (op0)
3383 		  op0 = equiv_constant (op0);
3384 
3385 		if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3386 		  op1 = fold_rtx (op1, NULL_RTX);
3387 
3388 		if (op1)
3389 		  op1 = equiv_constant (op1);
3390 
3391 		/* If we are looking for the low SImode part of
3392 		   (ashift:DI c (const_int 32)), it doesn't work to
3393 		   compute that in SImode, because a 32-bit shift in
3394 		   SImode is unpredictable.  We know the value is
3395 		   0.  */
3396 		if (op0 && op1
3397 		    && GET_CODE (elt->exp) == ASHIFT
3398 		    && GET_CODE (op1) == CONST_INT
3399 		    && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3400 		  {
3401 		    if (INTVAL (op1)
3402 			< GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3403 		      /* If the count fits in the inner mode's width,
3404 			 but exceeds the outer mode's width, the value
3405 			 will get truncated to 0 by the subreg.  */
3406 		      new = CONST0_RTX (mode);
3407 		    else
3408 		      /* If the count exceeds even the inner mode's width,
3409 			 don't fold this expression.  */
3410 		      new = 0;
3411 		  }
3412 		else if (op0 && op1)
3413 		  new = simplify_binary_operation (GET_CODE (elt->exp),
3414 						   mode, op0, op1);
3415 	      }
3416 
3417 	    else if (GET_CODE (elt->exp) == SUBREG
3418 		     && GET_MODE (SUBREG_REG (elt->exp)) == mode
3419 		     && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3420 			 <= UNITS_PER_WORD)
3421 		     && exp_equiv_p (elt->exp, elt->exp, 1, false))
3422 	      new = copy_rtx (SUBREG_REG (elt->exp));
3423 
3424 	    if (new)
3425 	      return new;
3426 	  }
3427       else
3428 	/* A SUBREG resulting from a zero extension may fold to zero
3429 	   if it extracts higher bits than the ZERO_EXTEND's source
3430 	   bits.  FIXME: if combine tried to, er, combine these
3431 	   instructions, this transformation may be moved to
3432 	   simplify_subreg.  */
3433 	for (; elt; elt = elt->next_same_value)
3434 	  {
3435 	    if (GET_CODE (elt->exp) == ZERO_EXTEND
3436 		&& subreg_lsb (x)
3437 		>= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3438 	      return CONST0_RTX (mode);
3439 	  }
3440     }
3441 
3442   return x;
3443 }
3444 
3445 /* Fold MEM.  Not to be called directly, see fold_rtx_mem instead.  */
3446 
3447 static rtx
fold_rtx_mem_1(rtx x,rtx insn)3448 fold_rtx_mem_1 (rtx x, rtx insn)
3449 {
3450   enum machine_mode mode = GET_MODE (x);
3451   rtx new;
3452 
3453   /* If we are not actually processing an insn, don't try to find the
3454      best address.  Not only don't we care, but we could modify the
3455      MEM in an invalid way since we have no insn to validate
3456      against.  */
3457   if (insn != 0)
3458     find_best_addr (insn, &XEXP (x, 0), mode);
3459 
3460   {
3461     /* Even if we don't fold in the insn itself, we can safely do so
3462        here, in hopes of getting a constant.  */
3463     rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3464     rtx base = 0;
3465     HOST_WIDE_INT offset = 0;
3466 
3467     if (REG_P (addr)
3468 	&& REGNO_QTY_VALID_P (REGNO (addr)))
3469       {
3470 	int addr_q = REG_QTY (REGNO (addr));
3471 	struct qty_table_elem *addr_ent = &qty_table[addr_q];
3472 
3473 	if (GET_MODE (addr) == addr_ent->mode
3474 	    && addr_ent->const_rtx != NULL_RTX)
3475 	  addr = addr_ent->const_rtx;
3476       }
3477 
3478     /* Call target hook to avoid the effects of -fpic etc....  */
3479     addr = targetm.delegitimize_address (addr);
3480 
3481     /* If address is constant, split it into a base and integer
3482        offset.  */
3483     if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3484       base = addr;
3485     else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3486 	     && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3487       {
3488 	base = XEXP (XEXP (addr, 0), 0);
3489 	offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3490       }
3491     else if (GET_CODE (addr) == LO_SUM
3492 	     && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3493       base = XEXP (addr, 1);
3494 
3495     /* If this is a constant pool reference, we can fold it into its
3496        constant to allow better value tracking.  */
3497     if (base && GET_CODE (base) == SYMBOL_REF
3498 	&& CONSTANT_POOL_ADDRESS_P (base))
3499       {
3500 	rtx constant = get_pool_constant (base);
3501 	enum machine_mode const_mode = get_pool_mode (base);
3502 	rtx new;
3503 
3504 	if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3505 	  {
3506 	    constant_pool_entries_cost = COST (constant);
3507 	    constant_pool_entries_regcost = approx_reg_cost (constant);
3508 	  }
3509 
3510 	/* If we are loading the full constant, we have an
3511 	   equivalence.  */
3512 	if (offset == 0 && mode == const_mode)
3513 	  return constant;
3514 
3515 	/* If this actually isn't a constant (weird!), we can't do
3516 	   anything.  Otherwise, handle the two most common cases:
3517 	   extracting a word from a multi-word constant, and
3518 	   extracting the low-order bits.  Other cases don't seem
3519 	   common enough to worry about.  */
3520 	if (! CONSTANT_P (constant))
3521 	  return x;
3522 
3523 	if (GET_MODE_CLASS (mode) == MODE_INT
3524 	    && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3525 	    && offset % UNITS_PER_WORD == 0
3526 	    && (new = operand_subword (constant,
3527 				       offset / UNITS_PER_WORD,
3528 				       0, const_mode)) != 0)
3529 	  return new;
3530 
3531 	if (((BYTES_BIG_ENDIAN
3532 	      && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3533 	     || (! BYTES_BIG_ENDIAN && offset == 0))
3534 	    && (new = gen_lowpart (mode, constant)) != 0)
3535 	  return new;
3536       }
3537 
3538     /* If this is a reference to a label at a known position in a jump
3539        table, we also know its value.  */
3540     if (base && GET_CODE (base) == LABEL_REF)
3541       {
3542 	rtx label = XEXP (base, 0);
3543 	rtx table_insn = NEXT_INSN (label);
3544 
3545 	if (table_insn && JUMP_P (table_insn)
3546 	    && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3547 	  {
3548 	    rtx table = PATTERN (table_insn);
3549 
3550 	    if (offset >= 0
3551 		&& (offset / GET_MODE_SIZE (GET_MODE (table))
3552 		    < XVECLEN (table, 0)))
3553 	      {
3554 		rtx label = XVECEXP
3555 		  (table, 0, offset / GET_MODE_SIZE (GET_MODE (table)));
3556 		rtx set;
3557 
3558 		/* If we have an insn that loads the label from the
3559 		   jumptable into a reg, we don't want to set the reg
3560 		   to the label, because this may cause a reference to
3561 		   the label to remain after the label is removed in
3562 		   some very obscure cases (PR middle-end/18628).  */
3563 		if (!insn)
3564 		  return label;
3565 
3566 		set = single_set (insn);
3567 
3568 		if (! set || SET_SRC (set) != x)
3569 		  return x;
3570 
3571 		/* If it's a jump, it's safe to reference the label.  */
3572 		if (SET_DEST (set) == pc_rtx)
3573 		  return label;
3574 
3575 		return x;
3576 	      }
3577 	  }
3578 	if (table_insn && JUMP_P (table_insn)
3579 	    && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3580 	  {
3581 	    rtx table = PATTERN (table_insn);
3582 
3583 	    if (offset >= 0
3584 		&& (offset / GET_MODE_SIZE (GET_MODE (table))
3585 		    < XVECLEN (table, 1)))
3586 	      {
3587 		offset /= GET_MODE_SIZE (GET_MODE (table));
3588 		new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3589 				     XEXP (table, 0));
3590 
3591 		if (GET_MODE (table) != Pmode)
3592 		  new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3593 
3594 		/* Indicate this is a constant.  This isn't a valid
3595 		   form of CONST, but it will only be used to fold the
3596 		   next insns and then discarded, so it should be
3597 		   safe.
3598 
3599 		   Note this expression must be explicitly discarded,
3600 		   by cse_insn, else it may end up in a REG_EQUAL note
3601 		   and "escape" to cause problems elsewhere.  */
3602 		return gen_rtx_CONST (GET_MODE (new), new);
3603 	      }
3604 	  }
3605       }
3606 
3607     return x;
3608   }
3609 }
3610 
3611 /* Fold MEM.  */
3612 
3613 static rtx
fold_rtx_mem(rtx x,rtx insn)3614 fold_rtx_mem (rtx x, rtx insn)
3615 {
3616   /* To avoid infinite oscillations between fold_rtx and fold_rtx_mem,
3617      refuse to allow recursion of the latter past n levels.  This can
3618      happen because fold_rtx_mem will try to fold the address of the
3619      memory reference it is passed, i.e. conceptually throwing away
3620      the MEM and reinjecting the bare address into fold_rtx.  As a
3621      result, patterns like
3622 
3623        set (reg1)
3624 	   (plus (reg)
3625 		 (mem (plus (reg2) (const_int))))
3626 
3627        set (reg2)
3628 	   (plus (reg)
3629 		 (mem (plus (reg1) (const_int))))
3630 
3631      will defeat any "first-order" short-circuit put in either
3632      function to prevent these infinite oscillations.
3633 
3634      The heuristics for determining n is as follows: since each time
3635      it is invoked fold_rtx_mem throws away a MEM, and since MEMs
3636      are generically not nested, we assume that each invocation of
3637      fold_rtx_mem corresponds to a new "top-level" operand, i.e.
3638      the source or the destination of a SET.  So fold_rtx_mem is
3639      bound to stop or cycle before n recursions, n being the number
3640      of expressions recorded in the hash table.  We also leave some
3641      play to account for the initial steps.  */
3642 
3643   static unsigned int depth;
3644   rtx ret;
3645 
3646   if (depth > 3 + table_size)
3647     return x;
3648 
3649   depth++;
3650   ret = fold_rtx_mem_1 (x, insn);
3651   depth--;
3652 
3653   return ret;
3654 }
3655 
3656 /* If X is a nontrivial arithmetic operation on an argument
3657    for which a constant value can be determined, return
3658    the result of operating on that value, as a constant.
3659    Otherwise, return X, possibly with one or more operands
3660    modified by recursive calls to this function.
3661 
3662    If X is a register whose contents are known, we do NOT
3663    return those contents here.  equiv_constant is called to
3664    perform that task.
3665 
3666    INSN is the insn that we may be modifying.  If it is 0, make a copy
3667    of X before modifying it.  */
3668 
3669 static rtx
fold_rtx(rtx x,rtx insn)3670 fold_rtx (rtx x, rtx insn)
3671 {
3672   enum rtx_code code;
3673   enum machine_mode mode;
3674   const char *fmt;
3675   int i;
3676   rtx new = 0;
3677   int copied = 0;
3678   int must_swap = 0;
3679 
3680   /* Folded equivalents of first two operands of X.  */
3681   rtx folded_arg0;
3682   rtx folded_arg1;
3683 
3684   /* Constant equivalents of first three operands of X;
3685      0 when no such equivalent is known.  */
3686   rtx const_arg0;
3687   rtx const_arg1;
3688   rtx const_arg2;
3689 
3690   /* The mode of the first operand of X.  We need this for sign and zero
3691      extends.  */
3692   enum machine_mode mode_arg0;
3693 
3694   if (x == 0)
3695     return x;
3696 
3697   mode = GET_MODE (x);
3698   code = GET_CODE (x);
3699   switch (code)
3700     {
3701     case CONST:
3702     case CONST_INT:
3703     case CONST_DOUBLE:
3704     case CONST_VECTOR:
3705     case SYMBOL_REF:
3706     case LABEL_REF:
3707     case REG:
3708     case PC:
3709       /* No use simplifying an EXPR_LIST
3710 	 since they are used only for lists of args
3711 	 in a function call's REG_EQUAL note.  */
3712     case EXPR_LIST:
3713       return x;
3714 
3715 #ifdef HAVE_cc0
3716     case CC0:
3717       return prev_insn_cc0;
3718 #endif
3719 
3720     case SUBREG:
3721       return fold_rtx_subreg (x, insn);
3722 
3723     case NOT:
3724     case NEG:
3725       /* If we have (NOT Y), see if Y is known to be (NOT Z).
3726 	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
3727       new = lookup_as_function (XEXP (x, 0), code);
3728       if (new)
3729 	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3730       break;
3731 
3732     case MEM:
3733       return fold_rtx_mem (x, insn);
3734 
3735 #ifdef NO_FUNCTION_CSE
3736     case CALL:
3737       if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3738 	return x;
3739       break;
3740 #endif
3741 
3742     case ASM_OPERANDS:
3743       if (insn)
3744 	{
3745 	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3746 	    validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3747 			     fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3748 	}
3749       break;
3750 
3751     default:
3752       break;
3753     }
3754 
3755   const_arg0 = 0;
3756   const_arg1 = 0;
3757   const_arg2 = 0;
3758   mode_arg0 = VOIDmode;
3759 
3760   /* Try folding our operands.
3761      Then see which ones have constant values known.  */
3762 
3763   fmt = GET_RTX_FORMAT (code);
3764   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3765     if (fmt[i] == 'e')
3766       {
3767 	rtx arg = XEXP (x, i);
3768 	rtx folded_arg = arg, const_arg = 0;
3769 	enum machine_mode mode_arg = GET_MODE (arg);
3770 	rtx cheap_arg, expensive_arg;
3771 	rtx replacements[2];
3772 	int j;
3773 	int old_cost = COST_IN (XEXP (x, i), code);
3774 
3775 	/* Most arguments are cheap, so handle them specially.  */
3776 	switch (GET_CODE (arg))
3777 	  {
3778 	  case REG:
3779 	    /* This is the same as calling equiv_constant; it is duplicated
3780 	       here for speed.  */
3781 	    if (REGNO_QTY_VALID_P (REGNO (arg)))
3782 	      {
3783 		int arg_q = REG_QTY (REGNO (arg));
3784 		struct qty_table_elem *arg_ent = &qty_table[arg_q];
3785 
3786 		if (arg_ent->const_rtx != NULL_RTX
3787 		    && !REG_P (arg_ent->const_rtx)
3788 		    && GET_CODE (arg_ent->const_rtx) != PLUS)
3789 		  const_arg
3790 		    = gen_lowpart (GET_MODE (arg),
3791 					       arg_ent->const_rtx);
3792 	      }
3793 	    break;
3794 
3795 	  case CONST:
3796 	  case CONST_INT:
3797 	  case SYMBOL_REF:
3798 	  case LABEL_REF:
3799 	  case CONST_DOUBLE:
3800 	  case CONST_VECTOR:
3801 	    const_arg = arg;
3802 	    break;
3803 
3804 #ifdef HAVE_cc0
3805 	  case CC0:
3806 	    folded_arg = prev_insn_cc0;
3807 	    mode_arg = prev_insn_cc0_mode;
3808 	    const_arg = equiv_constant (folded_arg);
3809 	    break;
3810 #endif
3811 
3812 	  default:
3813 	    folded_arg = fold_rtx (arg, insn);
3814 	    const_arg = equiv_constant (folded_arg);
3815 	  }
3816 
3817 	/* For the first three operands, see if the operand
3818 	   is constant or equivalent to a constant.  */
3819 	switch (i)
3820 	  {
3821 	  case 0:
3822 	    folded_arg0 = folded_arg;
3823 	    const_arg0 = const_arg;
3824 	    mode_arg0 = mode_arg;
3825 	    break;
3826 	  case 1:
3827 	    folded_arg1 = folded_arg;
3828 	    const_arg1 = const_arg;
3829 	    break;
3830 	  case 2:
3831 	    const_arg2 = const_arg;
3832 	    break;
3833 	  }
3834 
3835 	/* Pick the least expensive of the folded argument and an
3836 	   equivalent constant argument.  */
3837 	if (const_arg == 0 || const_arg == folded_arg
3838 	    || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3839 	  cheap_arg = folded_arg, expensive_arg = const_arg;
3840 	else
3841 	  cheap_arg = const_arg, expensive_arg = folded_arg;
3842 
3843 	/* Try to replace the operand with the cheapest of the two
3844 	   possibilities.  If it doesn't work and this is either of the first
3845 	   two operands of a commutative operation, try swapping them.
3846 	   If THAT fails, try the more expensive, provided it is cheaper
3847 	   than what is already there.  */
3848 
3849 	if (cheap_arg == XEXP (x, i))
3850 	  continue;
3851 
3852 	if (insn == 0 && ! copied)
3853 	  {
3854 	    x = copy_rtx (x);
3855 	    copied = 1;
3856 	  }
3857 
3858 	/* Order the replacements from cheapest to most expensive.  */
3859 	replacements[0] = cheap_arg;
3860 	replacements[1] = expensive_arg;
3861 
3862 	for (j = 0; j < 2 && replacements[j]; j++)
3863 	  {
3864 	    int new_cost = COST_IN (replacements[j], code);
3865 
3866 	    /* Stop if what existed before was cheaper.  Prefer constants
3867 	       in the case of a tie.  */
3868 	    if (new_cost > old_cost
3869 		|| (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3870 	      break;
3871 
3872 	    /* It's not safe to substitute the operand of a conversion
3873 	       operator with a constant, as the conversion's identity
3874 	       depends upon the mode of its operand.  This optimization
3875 	       is handled by the call to simplify_unary_operation.  */
3876 	    if (GET_RTX_CLASS (code) == RTX_UNARY
3877 		&& GET_MODE (replacements[j]) != mode_arg0
3878 		&& (code == ZERO_EXTEND
3879 		    || code == SIGN_EXTEND
3880 		    || code == TRUNCATE
3881 		    || code == FLOAT_TRUNCATE
3882 		    || code == FLOAT_EXTEND
3883 		    || code == FLOAT
3884 		    || code == FIX
3885 		    || code == UNSIGNED_FLOAT
3886 		    || code == UNSIGNED_FIX))
3887 	      continue;
3888 
3889 	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3890 	      break;
3891 
3892 	    if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3893 		|| GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3894 	      {
3895 		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3896 		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3897 
3898 		if (apply_change_group ())
3899 		  {
3900 		    /* Swap them back to be invalid so that this loop can
3901 		       continue and flag them to be swapped back later.  */
3902 		    rtx tem;
3903 
3904 		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3905 				       XEXP (x, 1) = tem;
3906 		    must_swap = 1;
3907 		    break;
3908 		  }
3909 	      }
3910 	  }
3911       }
3912 
3913     else
3914       {
3915 	if (fmt[i] == 'E')
3916 	  /* Don't try to fold inside of a vector of expressions.
3917 	     Doing nothing is harmless.  */
3918 	  {;}
3919       }
3920 
3921   /* If a commutative operation, place a constant integer as the second
3922      operand unless the first operand is also a constant integer.  Otherwise,
3923      place any constant second unless the first operand is also a constant.  */
3924 
3925   if (COMMUTATIVE_P (x))
3926     {
3927       if (must_swap
3928 	  || swap_commutative_operands_p (const_arg0 ? const_arg0
3929 						     : XEXP (x, 0),
3930 					  const_arg1 ? const_arg1
3931 						     : XEXP (x, 1)))
3932 	{
3933 	  rtx tem = XEXP (x, 0);
3934 
3935 	  if (insn == 0 && ! copied)
3936 	    {
3937 	      x = copy_rtx (x);
3938 	      copied = 1;
3939 	    }
3940 
3941 	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3942 	  validate_change (insn, &XEXP (x, 1), tem, 1);
3943 	  if (apply_change_group ())
3944 	    {
3945 	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3946 	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3947 	    }
3948 	}
3949     }
3950 
3951   /* If X is an arithmetic operation, see if we can simplify it.  */
3952 
3953   switch (GET_RTX_CLASS (code))
3954     {
3955     case RTX_UNARY:
3956       {
3957 	int is_const = 0;
3958 
3959 	/* We can't simplify extension ops unless we know the
3960 	   original mode.  */
3961 	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3962 	    && mode_arg0 == VOIDmode)
3963 	  break;
3964 
3965 	/* If we had a CONST, strip it off and put it back later if we
3966 	   fold.  */
3967 	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3968 	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3969 
3970 	new = simplify_unary_operation (code, mode,
3971 					const_arg0 ? const_arg0 : folded_arg0,
3972 					mode_arg0);
3973 	/* NEG of PLUS could be converted into MINUS, but that causes
3974 	   expressions of the form
3975 	   (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3976 	   which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3977 	   FIXME: those ports should be fixed.  */
3978 	if (new != 0 && is_const
3979 	    && GET_CODE (new) == PLUS
3980 	    && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3981 		|| GET_CODE (XEXP (new, 0)) == LABEL_REF)
3982 	    && GET_CODE (XEXP (new, 1)) == CONST_INT)
3983 	  new = gen_rtx_CONST (mode, new);
3984       }
3985       break;
3986 
3987     case RTX_COMPARE:
3988     case RTX_COMM_COMPARE:
3989       /* See what items are actually being compared and set FOLDED_ARG[01]
3990 	 to those values and CODE to the actual comparison code.  If any are
3991 	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3992 	 do anything if both operands are already known to be constant.  */
3993 
3994       /* ??? Vector mode comparisons are not supported yet.  */
3995       if (VECTOR_MODE_P (mode))
3996 	break;
3997 
3998       if (const_arg0 == 0 || const_arg1 == 0)
3999 	{
4000 	  struct table_elt *p0, *p1;
4001 	  rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4002 	  enum machine_mode mode_arg1;
4003 
4004 #ifdef FLOAT_STORE_FLAG_VALUE
4005 	  if (SCALAR_FLOAT_MODE_P (mode))
4006 	    {
4007 	      true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4008 			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
4009 	      false_rtx = CONST0_RTX (mode);
4010 	    }
4011 #endif
4012 
4013 	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
4014 				       &mode_arg0, &mode_arg1);
4015 
4016 	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
4017 	     what kinds of things are being compared, so we can't do
4018 	     anything with this comparison.  */
4019 
4020 	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
4021 	    break;
4022 
4023 	  const_arg0 = equiv_constant (folded_arg0);
4024 	  const_arg1 = equiv_constant (folded_arg1);
4025 
4026 	  /* If we do not now have two constants being compared, see
4027 	     if we can nevertheless deduce some things about the
4028 	     comparison.  */
4029 	  if (const_arg0 == 0 || const_arg1 == 0)
4030 	    {
4031 	      if (const_arg1 != NULL)
4032 		{
4033 		  rtx cheapest_simplification;
4034 		  int cheapest_cost;
4035 		  rtx simp_result;
4036 		  struct table_elt *p;
4037 
4038 		  /* See if we can find an equivalent of folded_arg0
4039 		     that gets us a cheaper expression, possibly a
4040 		     constant through simplifications.  */
4041 		  p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
4042 			      mode_arg0);
4043 
4044 		  if (p != NULL)
4045 		    {
4046 		      cheapest_simplification = x;
4047 		      cheapest_cost = COST (x);
4048 
4049 		      for (p = p->first_same_value; p != NULL; p = p->next_same_value)
4050 			{
4051 			  int cost;
4052 
4053 			  /* If the entry isn't valid, skip it.  */
4054 			  if (! exp_equiv_p (p->exp, p->exp, 1, false))
4055 			    continue;
4056 
4057 			  /* Try to simplify using this equivalence.  */
4058 			  simp_result
4059 			    = simplify_relational_operation (code, mode,
4060 							     mode_arg0,
4061 							     p->exp,
4062 							     const_arg1);
4063 
4064 			  if (simp_result == NULL)
4065 			    continue;
4066 
4067 			  cost = COST (simp_result);
4068 			  if (cost < cheapest_cost)
4069 			    {
4070 			      cheapest_cost = cost;
4071 			      cheapest_simplification = simp_result;
4072 			    }
4073 			}
4074 
4075 		      /* If we have a cheaper expression now, use that
4076 			 and try folding it further, from the top.  */
4077 		      if (cheapest_simplification != x)
4078 			return fold_rtx (cheapest_simplification, insn);
4079 		    }
4080 		}
4081 
4082 	      /* Some addresses are known to be nonzero.  We don't know
4083 		 their sign, but equality comparisons are known.  */
4084 	      if (const_arg1 == const0_rtx
4085 		  && nonzero_address_p (folded_arg0))
4086 		{
4087 		  if (code == EQ)
4088 		    return false_rtx;
4089 		  else if (code == NE)
4090 		    return true_rtx;
4091 		}
4092 
4093 	      /* See if the two operands are the same.  */
4094 
4095 	      if (folded_arg0 == folded_arg1
4096 		  || (REG_P (folded_arg0)
4097 		      && REG_P (folded_arg1)
4098 		      && (REG_QTY (REGNO (folded_arg0))
4099 			  == REG_QTY (REGNO (folded_arg1))))
4100 		  || ((p0 = lookup (folded_arg0,
4101 				    SAFE_HASH (folded_arg0, mode_arg0),
4102 				    mode_arg0))
4103 		      && (p1 = lookup (folded_arg1,
4104 				       SAFE_HASH (folded_arg1, mode_arg0),
4105 				       mode_arg0))
4106 		      && p0->first_same_value == p1->first_same_value))
4107 		{
4108 		  /* Sadly two equal NaNs are not equivalent.  */
4109 		  if (!HONOR_NANS (mode_arg0))
4110 		    return ((code == EQ || code == LE || code == GE
4111 			     || code == LEU || code == GEU || code == UNEQ
4112 			     || code == UNLE || code == UNGE
4113 			     || code == ORDERED)
4114 			    ? true_rtx : false_rtx);
4115 		  /* Take care for the FP compares we can resolve.  */
4116 		  if (code == UNEQ || code == UNLE || code == UNGE)
4117 		    return true_rtx;
4118 		  if (code == LTGT || code == LT || code == GT)
4119 		    return false_rtx;
4120 		}
4121 
4122 	      /* If FOLDED_ARG0 is a register, see if the comparison we are
4123 		 doing now is either the same as we did before or the reverse
4124 		 (we only check the reverse if not floating-point).  */
4125 	      else if (REG_P (folded_arg0))
4126 		{
4127 		  int qty = REG_QTY (REGNO (folded_arg0));
4128 
4129 		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4130 		    {
4131 		      struct qty_table_elem *ent = &qty_table[qty];
4132 
4133 		      if ((comparison_dominates_p (ent->comparison_code, code)
4134 			   || (! FLOAT_MODE_P (mode_arg0)
4135 			       && comparison_dominates_p (ent->comparison_code,
4136 						          reverse_condition (code))))
4137 			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
4138 			      || (const_arg1
4139 				  && rtx_equal_p (ent->comparison_const,
4140 						  const_arg1))
4141 			      || (REG_P (folded_arg1)
4142 				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4143 			return (comparison_dominates_p (ent->comparison_code, code)
4144 				? true_rtx : false_rtx);
4145 		    }
4146 		}
4147 	    }
4148 	}
4149 
4150       /* If we are comparing against zero, see if the first operand is
4151 	 equivalent to an IOR with a constant.  If so, we may be able to
4152 	 determine the result of this comparison.  */
4153 
4154       if (const_arg1 == const0_rtx)
4155 	{
4156 	  rtx y = lookup_as_function (folded_arg0, IOR);
4157 	  rtx inner_const;
4158 
4159 	  if (y != 0
4160 	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4161 	      && GET_CODE (inner_const) == CONST_INT
4162 	      && INTVAL (inner_const) != 0)
4163 	    {
4164 	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4165 	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4166 			      && (INTVAL (inner_const)
4167 				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4168 	      rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4169 
4170 #ifdef FLOAT_STORE_FLAG_VALUE
4171 	      if (SCALAR_FLOAT_MODE_P (mode))
4172 		{
4173 		  true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4174 			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
4175 		  false_rtx = CONST0_RTX (mode);
4176 		}
4177 #endif
4178 
4179 	      switch (code)
4180 		{
4181 		case EQ:
4182 		  return false_rtx;
4183 		case NE:
4184 		  return true_rtx;
4185 		case LT:  case LE:
4186 		  if (has_sign)
4187 		    return true_rtx;
4188 		  break;
4189 		case GT:  case GE:
4190 		  if (has_sign)
4191 		    return false_rtx;
4192 		  break;
4193 		default:
4194 		  break;
4195 		}
4196 	    }
4197 	}
4198 
4199       {
4200 	rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4201 	rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4202         new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4203       }
4204       break;
4205 
4206     case RTX_BIN_ARITH:
4207     case RTX_COMM_ARITH:
4208       switch (code)
4209 	{
4210 	case PLUS:
4211 	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
4212 	     with that LABEL_REF as its second operand.  If so, the result is
4213 	     the first operand of that MINUS.  This handles switches with an
4214 	     ADDR_DIFF_VEC table.  */
4215 	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4216 	    {
4217 	      rtx y
4218 		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
4219 		: lookup_as_function (folded_arg0, MINUS);
4220 
4221 	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4222 		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4223 		return XEXP (y, 0);
4224 
4225 	      /* Now try for a CONST of a MINUS like the above.  */
4226 	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4227 			: lookup_as_function (folded_arg0, CONST))) != 0
4228 		  && GET_CODE (XEXP (y, 0)) == MINUS
4229 		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4230 		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4231 		return XEXP (XEXP (y, 0), 0);
4232 	    }
4233 
4234 	  /* Likewise if the operands are in the other order.  */
4235 	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4236 	    {
4237 	      rtx y
4238 		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
4239 		: lookup_as_function (folded_arg1, MINUS);
4240 
4241 	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4242 		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4243 		return XEXP (y, 0);
4244 
4245 	      /* Now try for a CONST of a MINUS like the above.  */
4246 	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4247 			: lookup_as_function (folded_arg1, CONST))) != 0
4248 		  && GET_CODE (XEXP (y, 0)) == MINUS
4249 		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4250 		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4251 		return XEXP (XEXP (y, 0), 0);
4252 	    }
4253 
4254 	  /* If second operand is a register equivalent to a negative
4255 	     CONST_INT, see if we can find a register equivalent to the
4256 	     positive constant.  Make a MINUS if so.  Don't do this for
4257 	     a non-negative constant since we might then alternate between
4258 	     choosing positive and negative constants.  Having the positive
4259 	     constant previously-used is the more common case.  Be sure
4260 	     the resulting constant is non-negative; if const_arg1 were
4261 	     the smallest negative number this would overflow: depending
4262 	     on the mode, this would either just be the same value (and
4263 	     hence not save anything) or be incorrect.  */
4264 	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4265 	      && INTVAL (const_arg1) < 0
4266 	      /* This used to test
4267 
4268 	         -INTVAL (const_arg1) >= 0
4269 
4270 		 But The Sun V5.0 compilers mis-compiled that test.  So
4271 		 instead we test for the problematic value in a more direct
4272 		 manner and hope the Sun compilers get it correct.  */
4273 	      && INTVAL (const_arg1) !=
4274 	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4275 	      && REG_P (folded_arg1))
4276 	    {
4277 	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
4278 	      struct table_elt *p
4279 		= lookup (new_const, SAFE_HASH (new_const, mode), mode);
4280 
4281 	      if (p)
4282 		for (p = p->first_same_value; p; p = p->next_same_value)
4283 		  if (REG_P (p->exp))
4284 		    return simplify_gen_binary (MINUS, mode, folded_arg0,
4285 						canon_reg (p->exp, NULL_RTX));
4286 	    }
4287 	  goto from_plus;
4288 
4289 	case MINUS:
4290 	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4291 	     If so, produce (PLUS Z C2-C).  */
4292 	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4293 	    {
4294 	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4295 	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4296 		return fold_rtx (plus_constant (copy_rtx (y),
4297 						-INTVAL (const_arg1)),
4298 				 NULL_RTX);
4299 	    }
4300 
4301 	  /* Fall through.  */
4302 
4303 	from_plus:
4304 	case SMIN:    case SMAX:      case UMIN:    case UMAX:
4305 	case IOR:     case AND:       case XOR:
4306 	case MULT:
4307 	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
4308 	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4309 	     is known to be of similar form, we may be able to replace the
4310 	     operation with a combined operation.  This may eliminate the
4311 	     intermediate operation if every use is simplified in this way.
4312 	     Note that the similar optimization done by combine.c only works
4313 	     if the intermediate operation's result has only one reference.  */
4314 
4315 	  if (REG_P (folded_arg0)
4316 	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4317 	    {
4318 	      int is_shift
4319 		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4320 	      rtx y, inner_const, new_const;
4321 	      enum rtx_code associate_code;
4322 
4323 	      if (is_shift
4324 		  && (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode)
4325 		      || INTVAL (const_arg1) < 0))
4326 		{
4327 		  if (SHIFT_COUNT_TRUNCATED)
4328 		    const_arg1 = GEN_INT (INTVAL (const_arg1)
4329 					  & (GET_MODE_BITSIZE (mode) - 1));
4330 		  else
4331 		    break;
4332 		}
4333 
4334 	      y = lookup_as_function (folded_arg0, code);
4335 	      if (y == 0)
4336 		break;
4337 
4338 	      /* If we have compiled a statement like
4339 		 "if (x == (x & mask1))", and now are looking at
4340 		 "x & mask2", we will have a case where the first operand
4341 		 of Y is the same as our first operand.  Unless we detect
4342 		 this case, an infinite loop will result.  */
4343 	      if (XEXP (y, 0) == folded_arg0)
4344 		break;
4345 
4346 	      inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
4347 	      if (!inner_const || GET_CODE (inner_const) != CONST_INT)
4348 		break;
4349 
4350 	      /* Don't associate these operations if they are a PLUS with the
4351 		 same constant and it is a power of two.  These might be doable
4352 		 with a pre- or post-increment.  Similarly for two subtracts of
4353 		 identical powers of two with post decrement.  */
4354 
4355 	      if (code == PLUS && const_arg1 == inner_const
4356 		  && ((HAVE_PRE_INCREMENT
4357 			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4358 		      || (HAVE_POST_INCREMENT
4359 			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4360 		      || (HAVE_PRE_DECREMENT
4361 			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
4362 		      || (HAVE_POST_DECREMENT
4363 			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4364 		break;
4365 
4366 	      if (is_shift
4367 		  && (INTVAL (inner_const) >= GET_MODE_BITSIZE (mode)
4368 		      || INTVAL (inner_const) < 0))
4369 		{
4370 		  if (SHIFT_COUNT_TRUNCATED)
4371 		    inner_const = GEN_INT (INTVAL (inner_const)
4372 					   & (GET_MODE_BITSIZE (mode) - 1));
4373 		  else
4374 		    break;
4375 		}
4376 
4377 	      /* Compute the code used to compose the constants.  For example,
4378 		 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
4379 
4380 	      associate_code = (is_shift || code == MINUS ? PLUS : code);
4381 
4382 	      new_const = simplify_binary_operation (associate_code, mode,
4383 						     const_arg1, inner_const);
4384 
4385 	      if (new_const == 0)
4386 		break;
4387 
4388 	      /* If we are associating shift operations, don't let this
4389 		 produce a shift of the size of the object or larger.
4390 		 This could occur when we follow a sign-extend by a right
4391 		 shift on a machine that does a sign-extend as a pair
4392 		 of shifts.  */
4393 
4394 	      if (is_shift
4395 		  && GET_CODE (new_const) == CONST_INT
4396 		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4397 		{
4398 		  /* As an exception, we can turn an ASHIFTRT of this
4399 		     form into a shift of the number of bits - 1.  */
4400 		  if (code == ASHIFTRT)
4401 		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4402 		  else if (!side_effects_p (XEXP (y, 0)))
4403 		    return CONST0_RTX (mode);
4404 		  else
4405 		    break;
4406 		}
4407 
4408 	      y = copy_rtx (XEXP (y, 0));
4409 
4410 	      /* If Y contains our first operand (the most common way this
4411 		 can happen is if Y is a MEM), we would do into an infinite
4412 		 loop if we tried to fold it.  So don't in that case.  */
4413 
4414 	      if (! reg_mentioned_p (folded_arg0, y))
4415 		y = fold_rtx (y, insn);
4416 
4417 	      return simplify_gen_binary (code, mode, y, new_const);
4418 	    }
4419 	  break;
4420 
4421 	case DIV:       case UDIV:
4422 	  /* ??? The associative optimization performed immediately above is
4423 	     also possible for DIV and UDIV using associate_code of MULT.
4424 	     However, we would need extra code to verify that the
4425 	     multiplication does not overflow, that is, there is no overflow
4426 	     in the calculation of new_const.  */
4427 	  break;
4428 
4429 	default:
4430 	  break;
4431 	}
4432 
4433       new = simplify_binary_operation (code, mode,
4434 				       const_arg0 ? const_arg0 : folded_arg0,
4435 				       const_arg1 ? const_arg1 : folded_arg1);
4436       break;
4437 
4438     case RTX_OBJ:
4439       /* (lo_sum (high X) X) is simply X.  */
4440       if (code == LO_SUM && const_arg0 != 0
4441 	  && GET_CODE (const_arg0) == HIGH
4442 	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4443 	return const_arg1;
4444       break;
4445 
4446     case RTX_TERNARY:
4447     case RTX_BITFIELD_OPS:
4448       new = simplify_ternary_operation (code, mode, mode_arg0,
4449 					const_arg0 ? const_arg0 : folded_arg0,
4450 					const_arg1 ? const_arg1 : folded_arg1,
4451 					const_arg2 ? const_arg2 : XEXP (x, 2));
4452       break;
4453 
4454     default:
4455       break;
4456     }
4457 
4458   return new ? new : x;
4459 }
4460 
4461 /* Return a constant value currently equivalent to X.
4462    Return 0 if we don't know one.  */
4463 
4464 static rtx
equiv_constant(rtx x)4465 equiv_constant (rtx x)
4466 {
4467   if (REG_P (x)
4468       && REGNO_QTY_VALID_P (REGNO (x)))
4469     {
4470       int x_q = REG_QTY (REGNO (x));
4471       struct qty_table_elem *x_ent = &qty_table[x_q];
4472 
4473       if (x_ent->const_rtx)
4474 	x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4475     }
4476 
4477   if (x == 0 || CONSTANT_P (x))
4478     return x;
4479 
4480   /* If X is a MEM, try to fold it outside the context of any insn to see if
4481      it might be equivalent to a constant.  That handles the case where it
4482      is a constant-pool reference.  Then try to look it up in the hash table
4483      in case it is something whose value we have seen before.  */
4484 
4485   if (MEM_P (x))
4486     {
4487       struct table_elt *elt;
4488 
4489       x = fold_rtx (x, NULL_RTX);
4490       if (CONSTANT_P (x))
4491 	return x;
4492 
4493       elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4494       if (elt == 0)
4495 	return 0;
4496 
4497       for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4498 	if (elt->is_const && CONSTANT_P (elt->exp))
4499 	  return elt->exp;
4500     }
4501 
4502   return 0;
4503 }
4504 
4505 /* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4506    branch.  It will be zero if not.
4507 
4508    In certain cases, this can cause us to add an equivalence.  For example,
4509    if we are following the taken case of
4510 	if (i == 2)
4511    we can add the fact that `i' and '2' are now equivalent.
4512 
4513    In any case, we can record that this comparison was passed.  If the same
4514    comparison is seen later, we will know its value.  */
4515 
4516 static void
record_jump_equiv(rtx insn,int taken)4517 record_jump_equiv (rtx insn, int taken)
4518 {
4519   int cond_known_true;
4520   rtx op0, op1;
4521   rtx set;
4522   enum machine_mode mode, mode0, mode1;
4523   int reversed_nonequality = 0;
4524   enum rtx_code code;
4525 
4526   /* Ensure this is the right kind of insn.  */
4527   if (! any_condjump_p (insn))
4528     return;
4529   set = pc_set (insn);
4530 
4531   /* See if this jump condition is known true or false.  */
4532   if (taken)
4533     cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4534   else
4535     cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4536 
4537   /* Get the type of comparison being done and the operands being compared.
4538      If we had to reverse a non-equality condition, record that fact so we
4539      know that it isn't valid for floating-point.  */
4540   code = GET_CODE (XEXP (SET_SRC (set), 0));
4541   op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4542   op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4543 
4544   code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4545 
4546   /* If the mode is a MODE_CC mode, we don't know what kinds of things
4547      are being compared, so we can't do anything with this
4548      comparison.  */
4549 
4550   if (GET_MODE_CLASS (mode0) == MODE_CC)
4551     return;
4552 
4553   if (! cond_known_true)
4554     {
4555       code = reversed_comparison_code_parts (code, op0, op1, insn);
4556 
4557       /* Don't remember if we can't find the inverse.  */
4558       if (code == UNKNOWN)
4559 	return;
4560     }
4561 
4562   /* The mode is the mode of the non-constant.  */
4563   mode = mode0;
4564   if (mode1 != VOIDmode)
4565     mode = mode1;
4566 
4567   record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4568 }
4569 
4570 /* Yet another form of subreg creation.  In this case, we want something in
4571    MODE, and we should assume OP has MODE iff it is naturally modeless.  */
4572 
4573 static rtx
record_jump_cond_subreg(enum machine_mode mode,rtx op)4574 record_jump_cond_subreg (enum machine_mode mode, rtx op)
4575 {
4576   enum machine_mode op_mode = GET_MODE (op);
4577   if (op_mode == mode || op_mode == VOIDmode)
4578     return op;
4579   return lowpart_subreg (mode, op, op_mode);
4580 }
4581 
4582 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4583    REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4584    Make any useful entries we can with that information.  Called from
4585    above function and called recursively.  */
4586 
4587 static void
record_jump_cond(enum rtx_code code,enum machine_mode mode,rtx op0,rtx op1,int reversed_nonequality)4588 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4589 		  rtx op1, int reversed_nonequality)
4590 {
4591   unsigned op0_hash, op1_hash;
4592   int op0_in_memory, op1_in_memory;
4593   struct table_elt *op0_elt, *op1_elt;
4594 
4595   /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4596      we know that they are also equal in the smaller mode (this is also
4597      true for all smaller modes whether or not there is a SUBREG, but
4598      is not worth testing for with no SUBREG).  */
4599 
4600   /* Note that GET_MODE (op0) may not equal MODE.  */
4601   if (code == EQ && GET_CODE (op0) == SUBREG
4602       && (GET_MODE_SIZE (GET_MODE (op0))
4603 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4604     {
4605       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4606       rtx tem = record_jump_cond_subreg (inner_mode, op1);
4607       if (tem)
4608 	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4609 			  reversed_nonequality);
4610     }
4611 
4612   if (code == EQ && GET_CODE (op1) == SUBREG
4613       && (GET_MODE_SIZE (GET_MODE (op1))
4614 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4615     {
4616       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4617       rtx tem = record_jump_cond_subreg (inner_mode, op0);
4618       if (tem)
4619 	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4620 			  reversed_nonequality);
4621     }
4622 
4623   /* Similarly, if this is an NE comparison, and either is a SUBREG
4624      making a smaller mode, we know the whole thing is also NE.  */
4625 
4626   /* Note that GET_MODE (op0) may not equal MODE;
4627      if we test MODE instead, we can get an infinite recursion
4628      alternating between two modes each wider than MODE.  */
4629 
4630   if (code == NE && GET_CODE (op0) == SUBREG
4631       && subreg_lowpart_p (op0)
4632       && (GET_MODE_SIZE (GET_MODE (op0))
4633 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4634     {
4635       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4636       rtx tem = record_jump_cond_subreg (inner_mode, op1);
4637       if (tem)
4638 	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4639 			  reversed_nonequality);
4640     }
4641 
4642   if (code == NE && GET_CODE (op1) == SUBREG
4643       && subreg_lowpart_p (op1)
4644       && (GET_MODE_SIZE (GET_MODE (op1))
4645 	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4646     {
4647       enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4648       rtx tem = record_jump_cond_subreg (inner_mode, op0);
4649       if (tem)
4650 	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4651 			  reversed_nonequality);
4652     }
4653 
4654   /* Hash both operands.  */
4655 
4656   do_not_record = 0;
4657   hash_arg_in_memory = 0;
4658   op0_hash = HASH (op0, mode);
4659   op0_in_memory = hash_arg_in_memory;
4660 
4661   if (do_not_record)
4662     return;
4663 
4664   do_not_record = 0;
4665   hash_arg_in_memory = 0;
4666   op1_hash = HASH (op1, mode);
4667   op1_in_memory = hash_arg_in_memory;
4668 
4669   if (do_not_record)
4670     return;
4671 
4672   /* Look up both operands.  */
4673   op0_elt = lookup (op0, op0_hash, mode);
4674   op1_elt = lookup (op1, op1_hash, mode);
4675 
4676   /* If both operands are already equivalent or if they are not in the
4677      table but are identical, do nothing.  */
4678   if ((op0_elt != 0 && op1_elt != 0
4679        && op0_elt->first_same_value == op1_elt->first_same_value)
4680       || op0 == op1 || rtx_equal_p (op0, op1))
4681     return;
4682 
4683   /* If we aren't setting two things equal all we can do is save this
4684      comparison.   Similarly if this is floating-point.  In the latter
4685      case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4686      If we record the equality, we might inadvertently delete code
4687      whose intent was to change -0 to +0.  */
4688 
4689   if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4690     {
4691       struct qty_table_elem *ent;
4692       int qty;
4693 
4694       /* If we reversed a floating-point comparison, if OP0 is not a
4695 	 register, or if OP1 is neither a register or constant, we can't
4696 	 do anything.  */
4697 
4698       if (!REG_P (op1))
4699 	op1 = equiv_constant (op1);
4700 
4701       if ((reversed_nonequality && FLOAT_MODE_P (mode))
4702 	  || !REG_P (op0) || op1 == 0)
4703 	return;
4704 
4705       /* Put OP0 in the hash table if it isn't already.  This gives it a
4706 	 new quantity number.  */
4707       if (op0_elt == 0)
4708 	{
4709 	  if (insert_regs (op0, NULL, 0))
4710 	    {
4711 	      rehash_using_reg (op0);
4712 	      op0_hash = HASH (op0, mode);
4713 
4714 	      /* If OP0 is contained in OP1, this changes its hash code
4715 		 as well.  Faster to rehash than to check, except
4716 		 for the simple case of a constant.  */
4717 	      if (! CONSTANT_P (op1))
4718 		op1_hash = HASH (op1,mode);
4719 	    }
4720 
4721 	  op0_elt = insert (op0, NULL, op0_hash, mode);
4722 	  op0_elt->in_memory = op0_in_memory;
4723 	}
4724 
4725       qty = REG_QTY (REGNO (op0));
4726       ent = &qty_table[qty];
4727 
4728       ent->comparison_code = code;
4729       if (REG_P (op1))
4730 	{
4731 	  /* Look it up again--in case op0 and op1 are the same.  */
4732 	  op1_elt = lookup (op1, op1_hash, mode);
4733 
4734 	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4735 	  if (op1_elt == 0)
4736 	    {
4737 	      if (insert_regs (op1, NULL, 0))
4738 		{
4739 		  rehash_using_reg (op1);
4740 		  op1_hash = HASH (op1, mode);
4741 		}
4742 
4743 	      op1_elt = insert (op1, NULL, op1_hash, mode);
4744 	      op1_elt->in_memory = op1_in_memory;
4745 	    }
4746 
4747 	  ent->comparison_const = NULL_RTX;
4748 	  ent->comparison_qty = REG_QTY (REGNO (op1));
4749 	}
4750       else
4751 	{
4752 	  ent->comparison_const = op1;
4753 	  ent->comparison_qty = -1;
4754 	}
4755 
4756       return;
4757     }
4758 
4759   /* If either side is still missing an equivalence, make it now,
4760      then merge the equivalences.  */
4761 
4762   if (op0_elt == 0)
4763     {
4764       if (insert_regs (op0, NULL, 0))
4765 	{
4766 	  rehash_using_reg (op0);
4767 	  op0_hash = HASH (op0, mode);
4768 	}
4769 
4770       op0_elt = insert (op0, NULL, op0_hash, mode);
4771       op0_elt->in_memory = op0_in_memory;
4772     }
4773 
4774   if (op1_elt == 0)
4775     {
4776       if (insert_regs (op1, NULL, 0))
4777 	{
4778 	  rehash_using_reg (op1);
4779 	  op1_hash = HASH (op1, mode);
4780 	}
4781 
4782       op1_elt = insert (op1, NULL, op1_hash, mode);
4783       op1_elt->in_memory = op1_in_memory;
4784     }
4785 
4786   merge_equiv_classes (op0_elt, op1_elt);
4787 }
4788 
4789 /* CSE processing for one instruction.
4790    First simplify sources and addresses of all assignments
4791    in the instruction, using previously-computed equivalents values.
4792    Then install the new sources and destinations in the table
4793    of available values.
4794 
4795    If LIBCALL_INSN is nonzero, don't record any equivalence made in
4796    the insn.  It means that INSN is inside libcall block.  In this
4797    case LIBCALL_INSN is the corresponding insn with REG_LIBCALL.  */
4798 
4799 /* Data on one SET contained in the instruction.  */
4800 
4801 struct set
4802 {
4803   /* The SET rtx itself.  */
4804   rtx rtl;
4805   /* The SET_SRC of the rtx (the original value, if it is changing).  */
4806   rtx src;
4807   /* The hash-table element for the SET_SRC of the SET.  */
4808   struct table_elt *src_elt;
4809   /* Hash value for the SET_SRC.  */
4810   unsigned src_hash;
4811   /* Hash value for the SET_DEST.  */
4812   unsigned dest_hash;
4813   /* The SET_DEST, with SUBREG, etc., stripped.  */
4814   rtx inner_dest;
4815   /* Nonzero if the SET_SRC is in memory.  */
4816   char src_in_memory;
4817   /* Nonzero if the SET_SRC contains something
4818      whose value cannot be predicted and understood.  */
4819   char src_volatile;
4820   /* Original machine mode, in case it becomes a CONST_INT.
4821      The size of this field should match the size of the mode
4822      field of struct rtx_def (see rtl.h).  */
4823   ENUM_BITFIELD(machine_mode) mode : 8;
4824   /* A constant equivalent for SET_SRC, if any.  */
4825   rtx src_const;
4826   /* Original SET_SRC value used for libcall notes.  */
4827   rtx orig_src;
4828   /* Hash value of constant equivalent for SET_SRC.  */
4829   unsigned src_const_hash;
4830   /* Table entry for constant equivalent for SET_SRC, if any.  */
4831   struct table_elt *src_const_elt;
4832   /* Table entry for the destination address.  */
4833   struct table_elt *dest_addr_elt;
4834 };
4835 
4836 static void
cse_insn(rtx insn,rtx libcall_insn)4837 cse_insn (rtx insn, rtx libcall_insn)
4838 {
4839   rtx x = PATTERN (insn);
4840   int i;
4841   rtx tem;
4842   int n_sets = 0;
4843 
4844 #ifdef HAVE_cc0
4845   /* Records what this insn does to set CC0.  */
4846   rtx this_insn_cc0 = 0;
4847   enum machine_mode this_insn_cc0_mode = VOIDmode;
4848 #endif
4849 
4850   rtx src_eqv = 0;
4851   struct table_elt *src_eqv_elt = 0;
4852   int src_eqv_volatile = 0;
4853   int src_eqv_in_memory = 0;
4854   unsigned src_eqv_hash = 0;
4855 
4856   struct set *sets = (struct set *) 0;
4857 
4858   this_insn = insn;
4859 
4860   /* Find all the SETs and CLOBBERs in this instruction.
4861      Record all the SETs in the array `set' and count them.
4862      Also determine whether there is a CLOBBER that invalidates
4863      all memory references, or all references at varying addresses.  */
4864 
4865   if (CALL_P (insn))
4866     {
4867       for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4868 	{
4869 	  if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4870 	    invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4871 	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4872 	}
4873     }
4874 
4875   if (GET_CODE (x) == SET)
4876     {
4877       sets = alloca (sizeof (struct set));
4878       sets[0].rtl = x;
4879 
4880       /* Ignore SETs that are unconditional jumps.
4881 	 They never need cse processing, so this does not hurt.
4882 	 The reason is not efficiency but rather
4883 	 so that we can test at the end for instructions
4884 	 that have been simplified to unconditional jumps
4885 	 and not be misled by unchanged instructions
4886 	 that were unconditional jumps to begin with.  */
4887       if (SET_DEST (x) == pc_rtx
4888 	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4889 	;
4890 
4891       /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4892 	 The hard function value register is used only once, to copy to
4893 	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4894 	 Ensure we invalidate the destination register.  On the 80386 no
4895 	 other code would invalidate it since it is a fixed_reg.
4896 	 We need not check the return of apply_change_group; see canon_reg.  */
4897 
4898       else if (GET_CODE (SET_SRC (x)) == CALL)
4899 	{
4900 	  canon_reg (SET_SRC (x), insn);
4901 	  apply_change_group ();
4902 	  fold_rtx (SET_SRC (x), insn);
4903 	  invalidate (SET_DEST (x), VOIDmode);
4904 	}
4905       else
4906 	n_sets = 1;
4907     }
4908   else if (GET_CODE (x) == PARALLEL)
4909     {
4910       int lim = XVECLEN (x, 0);
4911 
4912       sets = alloca (lim * sizeof (struct set));
4913 
4914       /* Find all regs explicitly clobbered in this insn,
4915 	 and ensure they are not replaced with any other regs
4916 	 elsewhere in this insn.
4917 	 When a reg that is clobbered is also used for input,
4918 	 we should presume that that is for a reason,
4919 	 and we should not substitute some other register
4920 	 which is not supposed to be clobbered.
4921 	 Therefore, this loop cannot be merged into the one below
4922 	 because a CALL may precede a CLOBBER and refer to the
4923 	 value clobbered.  We must not let a canonicalization do
4924 	 anything in that case.  */
4925       for (i = 0; i < lim; i++)
4926 	{
4927 	  rtx y = XVECEXP (x, 0, i);
4928 	  if (GET_CODE (y) == CLOBBER)
4929 	    {
4930 	      rtx clobbered = XEXP (y, 0);
4931 
4932 	      if (REG_P (clobbered)
4933 		  || GET_CODE (clobbered) == SUBREG)
4934 		invalidate (clobbered, VOIDmode);
4935 	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
4936 		       || GET_CODE (clobbered) == ZERO_EXTRACT)
4937 		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4938 	    }
4939 	}
4940 
4941       for (i = 0; i < lim; i++)
4942 	{
4943 	  rtx y = XVECEXP (x, 0, i);
4944 	  if (GET_CODE (y) == SET)
4945 	    {
4946 	      /* As above, we ignore unconditional jumps and call-insns and
4947 		 ignore the result of apply_change_group.  */
4948 	      if (GET_CODE (SET_SRC (y)) == CALL)
4949 		{
4950 		  canon_reg (SET_SRC (y), insn);
4951 		  apply_change_group ();
4952 		  fold_rtx (SET_SRC (y), insn);
4953 		  invalidate (SET_DEST (y), VOIDmode);
4954 		}
4955 	      else if (SET_DEST (y) == pc_rtx
4956 		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
4957 		;
4958 	      else
4959 		sets[n_sets++].rtl = y;
4960 	    }
4961 	  else if (GET_CODE (y) == CLOBBER)
4962 	    {
4963 	      /* If we clobber memory, canon the address.
4964 		 This does nothing when a register is clobbered
4965 		 because we have already invalidated the reg.  */
4966 	      if (MEM_P (XEXP (y, 0)))
4967 		canon_reg (XEXP (y, 0), NULL_RTX);
4968 	    }
4969 	  else if (GET_CODE (y) == USE
4970 		   && ! (REG_P (XEXP (y, 0))
4971 			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4972 	    canon_reg (y, NULL_RTX);
4973 	  else if (GET_CODE (y) == CALL)
4974 	    {
4975 	      /* The result of apply_change_group can be ignored; see
4976 		 canon_reg.  */
4977 	      canon_reg (y, insn);
4978 	      apply_change_group ();
4979 	      fold_rtx (y, insn);
4980 	    }
4981 	}
4982     }
4983   else if (GET_CODE (x) == CLOBBER)
4984     {
4985       if (MEM_P (XEXP (x, 0)))
4986 	canon_reg (XEXP (x, 0), NULL_RTX);
4987     }
4988 
4989   /* Canonicalize a USE of a pseudo register or memory location.  */
4990   else if (GET_CODE (x) == USE
4991 	   && ! (REG_P (XEXP (x, 0))
4992 		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4993     canon_reg (XEXP (x, 0), NULL_RTX);
4994   else if (GET_CODE (x) == CALL)
4995     {
4996       /* The result of apply_change_group can be ignored; see canon_reg.  */
4997       canon_reg (x, insn);
4998       apply_change_group ();
4999       fold_rtx (x, insn);
5000     }
5001 
5002   /* Store the equivalent value in SRC_EQV, if different, or if the DEST
5003      is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
5004      is handled specially for this case, and if it isn't set, then there will
5005      be no equivalence for the destination.  */
5006   if (n_sets == 1 && REG_NOTES (insn) != 0
5007       && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
5008       && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
5009 	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
5010     {
5011       src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
5012       XEXP (tem, 0) = src_eqv;
5013     }
5014 
5015   /* Canonicalize sources and addresses of destinations.
5016      We do this in a separate pass to avoid problems when a MATCH_DUP is
5017      present in the insn pattern.  In that case, we want to ensure that
5018      we don't break the duplicate nature of the pattern.  So we will replace
5019      both operands at the same time.  Otherwise, we would fail to find an
5020      equivalent substitution in the loop calling validate_change below.
5021 
5022      We used to suppress canonicalization of DEST if it appears in SRC,
5023      but we don't do this any more.  */
5024 
5025   for (i = 0; i < n_sets; i++)
5026     {
5027       rtx dest = SET_DEST (sets[i].rtl);
5028       rtx src = SET_SRC (sets[i].rtl);
5029       rtx new = canon_reg (src, insn);
5030 
5031       sets[i].orig_src = src;
5032       validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5033 
5034       if (GET_CODE (dest) == ZERO_EXTRACT)
5035 	{
5036 	  validate_change (insn, &XEXP (dest, 1),
5037 			   canon_reg (XEXP (dest, 1), insn), 1);
5038 	  validate_change (insn, &XEXP (dest, 2),
5039 			   canon_reg (XEXP (dest, 2), insn), 1);
5040 	}
5041 
5042       while (GET_CODE (dest) == SUBREG
5043 	     || GET_CODE (dest) == ZERO_EXTRACT
5044 	     || GET_CODE (dest) == STRICT_LOW_PART)
5045 	dest = XEXP (dest, 0);
5046 
5047       if (MEM_P (dest))
5048 	canon_reg (dest, insn);
5049     }
5050 
5051   /* Now that we have done all the replacements, we can apply the change
5052      group and see if they all work.  Note that this will cause some
5053      canonicalizations that would have worked individually not to be applied
5054      because some other canonicalization didn't work, but this should not
5055      occur often.
5056 
5057      The result of apply_change_group can be ignored; see canon_reg.  */
5058 
5059   apply_change_group ();
5060 
5061   /* Set sets[i].src_elt to the class each source belongs to.
5062      Detect assignments from or to volatile things
5063      and set set[i] to zero so they will be ignored
5064      in the rest of this function.
5065 
5066      Nothing in this loop changes the hash table or the register chains.  */
5067 
5068   for (i = 0; i < n_sets; i++)
5069     {
5070       rtx src, dest;
5071       rtx src_folded;
5072       struct table_elt *elt = 0, *p;
5073       enum machine_mode mode;
5074       rtx src_eqv_here;
5075       rtx src_const = 0;
5076       rtx src_related = 0;
5077       struct table_elt *src_const_elt = 0;
5078       int src_cost = MAX_COST;
5079       int src_eqv_cost = MAX_COST;
5080       int src_folded_cost = MAX_COST;
5081       int src_related_cost = MAX_COST;
5082       int src_elt_cost = MAX_COST;
5083       int src_regcost = MAX_COST;
5084       int src_eqv_regcost = MAX_COST;
5085       int src_folded_regcost = MAX_COST;
5086       int src_related_regcost = MAX_COST;
5087       int src_elt_regcost = MAX_COST;
5088       /* Set nonzero if we need to call force_const_mem on with the
5089 	 contents of src_folded before using it.  */
5090       int src_folded_force_flag = 0;
5091 
5092       dest = SET_DEST (sets[i].rtl);
5093       src = SET_SRC (sets[i].rtl);
5094 
5095       /* If SRC is a constant that has no machine mode,
5096 	 hash it with the destination's machine mode.
5097 	 This way we can keep different modes separate.  */
5098 
5099       mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5100       sets[i].mode = mode;
5101 
5102       if (src_eqv)
5103 	{
5104 	  enum machine_mode eqvmode = mode;
5105 	  if (GET_CODE (dest) == STRICT_LOW_PART)
5106 	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5107 	  do_not_record = 0;
5108 	  hash_arg_in_memory = 0;
5109 	  src_eqv_hash = HASH (src_eqv, eqvmode);
5110 
5111 	  /* Find the equivalence class for the equivalent expression.  */
5112 
5113 	  if (!do_not_record)
5114 	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5115 
5116 	  src_eqv_volatile = do_not_record;
5117 	  src_eqv_in_memory = hash_arg_in_memory;
5118 	}
5119 
5120       /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5121 	 value of the INNER register, not the destination.  So it is not
5122 	 a valid substitution for the source.  But save it for later.  */
5123       if (GET_CODE (dest) == STRICT_LOW_PART)
5124 	src_eqv_here = 0;
5125       else
5126 	src_eqv_here = src_eqv;
5127 
5128       /* Simplify and foldable subexpressions in SRC.  Then get the fully-
5129 	 simplified result, which may not necessarily be valid.  */
5130       src_folded = fold_rtx (src, insn);
5131 
5132 #if 0
5133       /* ??? This caused bad code to be generated for the m68k port with -O2.
5134 	 Suppose src is (CONST_INT -1), and that after truncation src_folded
5135 	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
5136 	 At the end we will add src and src_const to the same equivalence
5137 	 class.  We now have 3 and -1 on the same equivalence class.  This
5138 	 causes later instructions to be mis-optimized.  */
5139       /* If storing a constant in a bitfield, pre-truncate the constant
5140 	 so we will be able to record it later.  */
5141       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5142 	{
5143 	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5144 
5145 	  if (GET_CODE (src) == CONST_INT
5146 	      && GET_CODE (width) == CONST_INT
5147 	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5148 	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5149 	    src_folded
5150 	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5151 					  << INTVAL (width)) - 1));
5152 	}
5153 #endif
5154 
5155       /* Compute SRC's hash code, and also notice if it
5156 	 should not be recorded at all.  In that case,
5157 	 prevent any further processing of this assignment.  */
5158       do_not_record = 0;
5159       hash_arg_in_memory = 0;
5160 
5161       sets[i].src = src;
5162       sets[i].src_hash = HASH (src, mode);
5163       sets[i].src_volatile = do_not_record;
5164       sets[i].src_in_memory = hash_arg_in_memory;
5165 
5166       /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5167 	 a pseudo, do not record SRC.  Using SRC as a replacement for
5168 	 anything else will be incorrect in that situation.  Note that
5169 	 this usually occurs only for stack slots, in which case all the
5170 	 RTL would be referring to SRC, so we don't lose any optimization
5171 	 opportunities by not having SRC in the hash table.  */
5172 
5173       if (MEM_P (src)
5174 	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5175 	  && REG_P (dest)
5176 	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5177 	sets[i].src_volatile = 1;
5178 
5179 #if 0
5180       /* It is no longer clear why we used to do this, but it doesn't
5181 	 appear to still be needed.  So let's try without it since this
5182 	 code hurts cse'ing widened ops.  */
5183       /* If source is a paradoxical subreg (such as QI treated as an SI),
5184 	 treat it as volatile.  It may do the work of an SI in one context
5185 	 where the extra bits are not being used, but cannot replace an SI
5186 	 in general.  */
5187       if (GET_CODE (src) == SUBREG
5188 	  && (GET_MODE_SIZE (GET_MODE (src))
5189 	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5190 	sets[i].src_volatile = 1;
5191 #endif
5192 
5193       /* Locate all possible equivalent forms for SRC.  Try to replace
5194          SRC in the insn with each cheaper equivalent.
5195 
5196          We have the following types of equivalents: SRC itself, a folded
5197          version, a value given in a REG_EQUAL note, or a value related
5198 	 to a constant.
5199 
5200          Each of these equivalents may be part of an additional class
5201          of equivalents (if more than one is in the table, they must be in
5202          the same class; we check for this).
5203 
5204 	 If the source is volatile, we don't do any table lookups.
5205 
5206          We note any constant equivalent for possible later use in a
5207          REG_NOTE.  */
5208 
5209       if (!sets[i].src_volatile)
5210 	elt = lookup (src, sets[i].src_hash, mode);
5211 
5212       sets[i].src_elt = elt;
5213 
5214       if (elt && src_eqv_here && src_eqv_elt)
5215 	{
5216 	  if (elt->first_same_value != src_eqv_elt->first_same_value)
5217 	    {
5218 	      /* The REG_EQUAL is indicating that two formerly distinct
5219 		 classes are now equivalent.  So merge them.  */
5220 	      merge_equiv_classes (elt, src_eqv_elt);
5221 	      src_eqv_hash = HASH (src_eqv, elt->mode);
5222 	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5223 	    }
5224 
5225 	  src_eqv_here = 0;
5226 	}
5227 
5228       else if (src_eqv_elt)
5229 	elt = src_eqv_elt;
5230 
5231       /* Try to find a constant somewhere and record it in `src_const'.
5232 	 Record its table element, if any, in `src_const_elt'.  Look in
5233 	 any known equivalences first.  (If the constant is not in the
5234 	 table, also set `sets[i].src_const_hash').  */
5235       if (elt)
5236 	for (p = elt->first_same_value; p; p = p->next_same_value)
5237 	  if (p->is_const)
5238 	    {
5239 	      src_const = p->exp;
5240 	      src_const_elt = elt;
5241 	      break;
5242 	    }
5243 
5244       if (src_const == 0
5245 	  && (CONSTANT_P (src_folded)
5246 	      /* Consider (minus (label_ref L1) (label_ref L2)) as
5247 		 "constant" here so we will record it. This allows us
5248 		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
5249 	      || (GET_CODE (src_folded) == MINUS
5250 		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5251 		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5252 	src_const = src_folded, src_const_elt = elt;
5253       else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5254 	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5255 
5256       /* If we don't know if the constant is in the table, get its
5257 	 hash code and look it up.  */
5258       if (src_const && src_const_elt == 0)
5259 	{
5260 	  sets[i].src_const_hash = HASH (src_const, mode);
5261 	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5262 	}
5263 
5264       sets[i].src_const = src_const;
5265       sets[i].src_const_elt = src_const_elt;
5266 
5267       /* If the constant and our source are both in the table, mark them as
5268 	 equivalent.  Otherwise, if a constant is in the table but the source
5269 	 isn't, set ELT to it.  */
5270       if (src_const_elt && elt
5271 	  && src_const_elt->first_same_value != elt->first_same_value)
5272 	merge_equiv_classes (elt, src_const_elt);
5273       else if (src_const_elt && elt == 0)
5274 	elt = src_const_elt;
5275 
5276       /* See if there is a register linearly related to a constant
5277          equivalent of SRC.  */
5278       if (src_const
5279 	  && (GET_CODE (src_const) == CONST
5280 	      || (src_const_elt && src_const_elt->related_value != 0)))
5281 	{
5282 	  src_related = use_related_value (src_const, src_const_elt);
5283 	  if (src_related)
5284 	    {
5285 	      struct table_elt *src_related_elt
5286 		= lookup (src_related, HASH (src_related, mode), mode);
5287 	      if (src_related_elt && elt)
5288 		{
5289 		  if (elt->first_same_value
5290 		      != src_related_elt->first_same_value)
5291 		    /* This can occur when we previously saw a CONST
5292 		       involving a SYMBOL_REF and then see the SYMBOL_REF
5293 		       twice.  Merge the involved classes.  */
5294 		    merge_equiv_classes (elt, src_related_elt);
5295 
5296 		  src_related = 0;
5297 		  src_related_elt = 0;
5298 		}
5299 	      else if (src_related_elt && elt == 0)
5300 		elt = src_related_elt;
5301 	    }
5302 	}
5303 
5304       /* See if we have a CONST_INT that is already in a register in a
5305 	 wider mode.  */
5306 
5307       if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5308 	  && GET_MODE_CLASS (mode) == MODE_INT
5309 	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5310 	{
5311 	  enum machine_mode wider_mode;
5312 
5313 	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
5314 	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5315 	       && src_related == 0;
5316 	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5317 	    {
5318 	      struct table_elt *const_elt
5319 		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5320 
5321 	      if (const_elt == 0)
5322 		continue;
5323 
5324 	      for (const_elt = const_elt->first_same_value;
5325 		   const_elt; const_elt = const_elt->next_same_value)
5326 		if (REG_P (const_elt->exp))
5327 		  {
5328 		    src_related = gen_lowpart (mode,
5329 							   const_elt->exp);
5330 		    break;
5331 		  }
5332 	    }
5333 	}
5334 
5335       /* Another possibility is that we have an AND with a constant in
5336 	 a mode narrower than a word.  If so, it might have been generated
5337 	 as part of an "if" which would narrow the AND.  If we already
5338 	 have done the AND in a wider mode, we can use a SUBREG of that
5339 	 value.  */
5340 
5341       if (flag_expensive_optimizations && ! src_related
5342 	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5343 	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5344 	{
5345 	  enum machine_mode tmode;
5346 	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5347 
5348 	  for (tmode = GET_MODE_WIDER_MODE (mode);
5349 	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5350 	       tmode = GET_MODE_WIDER_MODE (tmode))
5351 	    {
5352 	      rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5353 	      struct table_elt *larger_elt;
5354 
5355 	      if (inner)
5356 		{
5357 		  PUT_MODE (new_and, tmode);
5358 		  XEXP (new_and, 0) = inner;
5359 		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5360 		  if (larger_elt == 0)
5361 		    continue;
5362 
5363 		  for (larger_elt = larger_elt->first_same_value;
5364 		       larger_elt; larger_elt = larger_elt->next_same_value)
5365 		    if (REG_P (larger_elt->exp))
5366 		      {
5367 			src_related
5368 			  = gen_lowpart (mode, larger_elt->exp);
5369 			break;
5370 		      }
5371 
5372 		  if (src_related)
5373 		    break;
5374 		}
5375 	    }
5376 	}
5377 
5378 #ifdef LOAD_EXTEND_OP
5379       /* See if a MEM has already been loaded with a widening operation;
5380 	 if it has, we can use a subreg of that.  Many CISC machines
5381 	 also have such operations, but this is only likely to be
5382 	 beneficial on these machines.  */
5383 
5384       if (flag_expensive_optimizations && src_related == 0
5385 	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5386 	  && GET_MODE_CLASS (mode) == MODE_INT
5387 	  && MEM_P (src) && ! do_not_record
5388 	  && LOAD_EXTEND_OP (mode) != UNKNOWN)
5389 	{
5390 	  struct rtx_def memory_extend_buf;
5391 	  rtx memory_extend_rtx = &memory_extend_buf;
5392 	  enum machine_mode tmode;
5393 
5394 	  /* Set what we are trying to extend and the operation it might
5395 	     have been extended with.  */
5396 	  memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
5397 	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5398 	  XEXP (memory_extend_rtx, 0) = src;
5399 
5400 	  for (tmode = GET_MODE_WIDER_MODE (mode);
5401 	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5402 	       tmode = GET_MODE_WIDER_MODE (tmode))
5403 	    {
5404 	      struct table_elt *larger_elt;
5405 
5406 	      PUT_MODE (memory_extend_rtx, tmode);
5407 	      larger_elt = lookup (memory_extend_rtx,
5408 				   HASH (memory_extend_rtx, tmode), tmode);
5409 	      if (larger_elt == 0)
5410 		continue;
5411 
5412 	      for (larger_elt = larger_elt->first_same_value;
5413 		   larger_elt; larger_elt = larger_elt->next_same_value)
5414 		if (REG_P (larger_elt->exp))
5415 		  {
5416 		    src_related = gen_lowpart (mode,
5417 							   larger_elt->exp);
5418 		    break;
5419 		  }
5420 
5421 	      if (src_related)
5422 		break;
5423 	    }
5424 	}
5425 #endif /* LOAD_EXTEND_OP */
5426 
5427       if (src == src_folded)
5428 	src_folded = 0;
5429 
5430       /* At this point, ELT, if nonzero, points to a class of expressions
5431          equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5432 	 and SRC_RELATED, if nonzero, each contain additional equivalent
5433 	 expressions.  Prune these latter expressions by deleting expressions
5434 	 already in the equivalence class.
5435 
5436 	 Check for an equivalent identical to the destination.  If found,
5437 	 this is the preferred equivalent since it will likely lead to
5438 	 elimination of the insn.  Indicate this by placing it in
5439 	 `src_related'.  */
5440 
5441       if (elt)
5442 	elt = elt->first_same_value;
5443       for (p = elt; p; p = p->next_same_value)
5444 	{
5445 	  enum rtx_code code = GET_CODE (p->exp);
5446 
5447 	  /* If the expression is not valid, ignore it.  Then we do not
5448 	     have to check for validity below.  In most cases, we can use
5449 	     `rtx_equal_p', since canonicalization has already been done.  */
5450 	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5451 	    continue;
5452 
5453 	  /* Also skip paradoxical subregs, unless that's what we're
5454 	     looking for.  */
5455 	  if (code == SUBREG
5456 	      && (GET_MODE_SIZE (GET_MODE (p->exp))
5457 		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5458 	      && ! (src != 0
5459 		    && GET_CODE (src) == SUBREG
5460 		    && GET_MODE (src) == GET_MODE (p->exp)
5461 		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5462 			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5463 	    continue;
5464 
5465 	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5466 	    src = 0;
5467 	  else if (src_folded && GET_CODE (src_folded) == code
5468 		   && rtx_equal_p (src_folded, p->exp))
5469 	    src_folded = 0;
5470 	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5471 		   && rtx_equal_p (src_eqv_here, p->exp))
5472 	    src_eqv_here = 0;
5473 	  else if (src_related && GET_CODE (src_related) == code
5474 		   && rtx_equal_p (src_related, p->exp))
5475 	    src_related = 0;
5476 
5477 	  /* This is the same as the destination of the insns, we want
5478 	     to prefer it.  Copy it to src_related.  The code below will
5479 	     then give it a negative cost.  */
5480 	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5481 	    src_related = dest;
5482 	}
5483 
5484       /* Find the cheapest valid equivalent, trying all the available
5485          possibilities.  Prefer items not in the hash table to ones
5486          that are when they are equal cost.  Note that we can never
5487          worsen an insn as the current contents will also succeed.
5488 	 If we find an equivalent identical to the destination, use it as best,
5489 	 since this insn will probably be eliminated in that case.  */
5490       if (src)
5491 	{
5492 	  if (rtx_equal_p (src, dest))
5493 	    src_cost = src_regcost = -1;
5494 	  else
5495 	    {
5496 	      src_cost = COST (src);
5497 	      src_regcost = approx_reg_cost (src);
5498 	    }
5499 	}
5500 
5501       if (src_eqv_here)
5502 	{
5503 	  if (rtx_equal_p (src_eqv_here, dest))
5504 	    src_eqv_cost = src_eqv_regcost = -1;
5505 	  else
5506 	    {
5507 	      src_eqv_cost = COST (src_eqv_here);
5508 	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5509 	    }
5510 	}
5511 
5512       if (src_folded)
5513 	{
5514 	  if (rtx_equal_p (src_folded, dest))
5515 	    src_folded_cost = src_folded_regcost = -1;
5516 	  else
5517 	    {
5518 	      src_folded_cost = COST (src_folded);
5519 	      src_folded_regcost = approx_reg_cost (src_folded);
5520 	    }
5521 	}
5522 
5523       if (src_related)
5524 	{
5525 	  if (rtx_equal_p (src_related, dest))
5526 	    src_related_cost = src_related_regcost = -1;
5527 	  else
5528 	    {
5529 	      src_related_cost = COST (src_related);
5530 	      src_related_regcost = approx_reg_cost (src_related);
5531 	    }
5532 	}
5533 
5534       /* If this was an indirect jump insn, a known label will really be
5535 	 cheaper even though it looks more expensive.  */
5536       if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5537 	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5538 
5539       /* Terminate loop when replacement made.  This must terminate since
5540          the current contents will be tested and will always be valid.  */
5541       while (1)
5542 	{
5543 	  rtx trial;
5544 
5545 	  /* Skip invalid entries.  */
5546 	  while (elt && !REG_P (elt->exp)
5547 		 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5548 	    elt = elt->next_same_value;
5549 
5550 	  /* A paradoxical subreg would be bad here: it'll be the right
5551 	     size, but later may be adjusted so that the upper bits aren't
5552 	     what we want.  So reject it.  */
5553 	  if (elt != 0
5554 	      && GET_CODE (elt->exp) == SUBREG
5555 	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
5556 		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5557 	      /* It is okay, though, if the rtx we're trying to match
5558 		 will ignore any of the bits we can't predict.  */
5559 	      && ! (src != 0
5560 		    && GET_CODE (src) == SUBREG
5561 		    && GET_MODE (src) == GET_MODE (elt->exp)
5562 		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5563 			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5564 	    {
5565 	      elt = elt->next_same_value;
5566 	      continue;
5567 	    }
5568 
5569 	  if (elt)
5570 	    {
5571 	      src_elt_cost = elt->cost;
5572 	      src_elt_regcost = elt->regcost;
5573 	    }
5574 
5575 	  /* Find cheapest and skip it for the next time.   For items
5576 	     of equal cost, use this order:
5577 	     src_folded, src, src_eqv, src_related and hash table entry.  */
5578 	  if (src_folded
5579 	      && preferable (src_folded_cost, src_folded_regcost,
5580 			     src_cost, src_regcost) <= 0
5581 	      && preferable (src_folded_cost, src_folded_regcost,
5582 			     src_eqv_cost, src_eqv_regcost) <= 0
5583 	      && preferable (src_folded_cost, src_folded_regcost,
5584 			     src_related_cost, src_related_regcost) <= 0
5585 	      && preferable (src_folded_cost, src_folded_regcost,
5586 			     src_elt_cost, src_elt_regcost) <= 0)
5587 	    {
5588 	      trial = src_folded, src_folded_cost = MAX_COST;
5589 	      if (src_folded_force_flag)
5590 		{
5591 		  rtx forced = force_const_mem (mode, trial);
5592 		  if (forced)
5593 		    trial = forced;
5594 		}
5595 	    }
5596 	  else if (src
5597 		   && preferable (src_cost, src_regcost,
5598 				  src_eqv_cost, src_eqv_regcost) <= 0
5599 		   && preferable (src_cost, src_regcost,
5600 				  src_related_cost, src_related_regcost) <= 0
5601 		   && preferable (src_cost, src_regcost,
5602 				  src_elt_cost, src_elt_regcost) <= 0)
5603 	    trial = src, src_cost = MAX_COST;
5604 	  else if (src_eqv_here
5605 		   && preferable (src_eqv_cost, src_eqv_regcost,
5606 				  src_related_cost, src_related_regcost) <= 0
5607 		   && preferable (src_eqv_cost, src_eqv_regcost,
5608 				  src_elt_cost, src_elt_regcost) <= 0)
5609 	    trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5610 	  else if (src_related
5611 		   && preferable (src_related_cost, src_related_regcost,
5612 				  src_elt_cost, src_elt_regcost) <= 0)
5613 	    trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5614 	  else
5615 	    {
5616 	      trial = copy_rtx (elt->exp);
5617 	      elt = elt->next_same_value;
5618 	      src_elt_cost = MAX_COST;
5619 	    }
5620 
5621 	  /* We don't normally have an insn matching (set (pc) (pc)), so
5622 	     check for this separately here.  We will delete such an
5623 	     insn below.
5624 
5625 	     For other cases such as a table jump or conditional jump
5626 	     where we know the ultimate target, go ahead and replace the
5627 	     operand.  While that may not make a valid insn, we will
5628 	     reemit the jump below (and also insert any necessary
5629 	     barriers).  */
5630 	  if (n_sets == 1 && dest == pc_rtx
5631 	      && (trial == pc_rtx
5632 		  || (GET_CODE (trial) == LABEL_REF
5633 		      && ! condjump_p (insn))))
5634 	    {
5635 	      /* Don't substitute non-local labels, this confuses CFG.  */
5636 	      if (GET_CODE (trial) == LABEL_REF
5637 		  && LABEL_REF_NONLOCAL_P (trial))
5638 		continue;
5639 
5640 	      SET_SRC (sets[i].rtl) = trial;
5641 	      cse_jumps_altered = 1;
5642 	      break;
5643 	    }
5644 
5645 	  /* Reject certain invalid forms of CONST that we create.  */
5646 	  else if (CONSTANT_P (trial)
5647 		   && GET_CODE (trial) == CONST
5648 		   /* Reject cases that will cause decode_rtx_const to
5649 		      die.  On the alpha when simplifying a switch, we
5650 		      get (const (truncate (minus (label_ref)
5651 		      (label_ref)))).  */
5652 		   && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5653 		       /* Likewise on IA-64, except without the
5654 			  truncate.  */
5655 		       || (GET_CODE (XEXP (trial, 0)) == MINUS
5656 			   && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5657 			   && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5658 	    /* Do nothing for this case.  */
5659 	    ;
5660 
5661 	  /* Look for a substitution that makes a valid insn.  */
5662 	  else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5663 	    {
5664 	      rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5665 
5666 	      /* If we just made a substitution inside a libcall, then we
5667 		 need to make the same substitution in any notes attached
5668 		 to the RETVAL insn.  */
5669 	      if (libcall_insn
5670 		  && (REG_P (sets[i].orig_src)
5671 		      || GET_CODE (sets[i].orig_src) == SUBREG
5672 		      || MEM_P (sets[i].orig_src)))
5673 		{
5674 	          rtx note = find_reg_equal_equiv_note (libcall_insn);
5675 		  if (note != 0)
5676 		    XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5677 							   sets[i].orig_src,
5678 							   copy_rtx (new));
5679 		}
5680 
5681 	      /* The result of apply_change_group can be ignored; see
5682 		 canon_reg.  */
5683 
5684 	      validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5685 	      apply_change_group ();
5686 	      break;
5687 	    }
5688 
5689 	  /* If we previously found constant pool entries for
5690 	     constants and this is a constant, try making a
5691 	     pool entry.  Put it in src_folded unless we already have done
5692 	     this since that is where it likely came from.  */
5693 
5694 	  else if (constant_pool_entries_cost
5695 		   && CONSTANT_P (trial)
5696 		   && (src_folded == 0
5697 		       || (!MEM_P (src_folded)
5698 			   && ! src_folded_force_flag))
5699 		   && GET_MODE_CLASS (mode) != MODE_CC
5700 		   && mode != VOIDmode)
5701 	    {
5702 	      src_folded_force_flag = 1;
5703 	      src_folded = trial;
5704 	      src_folded_cost = constant_pool_entries_cost;
5705 	      src_folded_regcost = constant_pool_entries_regcost;
5706 	    }
5707 	}
5708 
5709       src = SET_SRC (sets[i].rtl);
5710 
5711       /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5712 	 However, there is an important exception:  If both are registers
5713 	 that are not the head of their equivalence class, replace SET_SRC
5714 	 with the head of the class.  If we do not do this, we will have
5715 	 both registers live over a portion of the basic block.  This way,
5716 	 their lifetimes will likely abut instead of overlapping.  */
5717       if (REG_P (dest)
5718 	  && REGNO_QTY_VALID_P (REGNO (dest)))
5719 	{
5720 	  int dest_q = REG_QTY (REGNO (dest));
5721 	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5722 
5723 	  if (dest_ent->mode == GET_MODE (dest)
5724 	      && dest_ent->first_reg != REGNO (dest)
5725 	      && REG_P (src) && REGNO (src) == REGNO (dest)
5726 	      /* Don't do this if the original insn had a hard reg as
5727 		 SET_SRC or SET_DEST.  */
5728 	      && (!REG_P (sets[i].src)
5729 		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5730 	      && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5731 	    /* We can't call canon_reg here because it won't do anything if
5732 	       SRC is a hard register.  */
5733 	    {
5734 	      int src_q = REG_QTY (REGNO (src));
5735 	      struct qty_table_elem *src_ent = &qty_table[src_q];
5736 	      int first = src_ent->first_reg;
5737 	      rtx new_src
5738 		= (first >= FIRST_PSEUDO_REGISTER
5739 		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5740 
5741 	      /* We must use validate-change even for this, because this
5742 		 might be a special no-op instruction, suitable only to
5743 		 tag notes onto.  */
5744 	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5745 		{
5746 		  src = new_src;
5747 		  /* If we had a constant that is cheaper than what we are now
5748 		     setting SRC to, use that constant.  We ignored it when we
5749 		     thought we could make this into a no-op.  */
5750 		  if (src_const && COST (src_const) < COST (src)
5751 		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5752 					  src_const, 0))
5753 		    src = src_const;
5754 		}
5755 	    }
5756 	}
5757 
5758       /* If we made a change, recompute SRC values.  */
5759       if (src != sets[i].src)
5760 	{
5761 	  cse_altered = 1;
5762 	  do_not_record = 0;
5763 	  hash_arg_in_memory = 0;
5764 	  sets[i].src = src;
5765 	  sets[i].src_hash = HASH (src, mode);
5766 	  sets[i].src_volatile = do_not_record;
5767 	  sets[i].src_in_memory = hash_arg_in_memory;
5768 	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5769 	}
5770 
5771       /* If this is a single SET, we are setting a register, and we have an
5772 	 equivalent constant, we want to add a REG_NOTE.   We don't want
5773 	 to write a REG_EQUAL note for a constant pseudo since verifying that
5774 	 that pseudo hasn't been eliminated is a pain.  Such a note also
5775 	 won't help anything.
5776 
5777 	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5778 	 which can be created for a reference to a compile time computable
5779 	 entry in a jump table.  */
5780 
5781       if (n_sets == 1 && src_const && REG_P (dest)
5782 	  && !REG_P (src_const)
5783 	  && ! (GET_CODE (src_const) == CONST
5784 		&& GET_CODE (XEXP (src_const, 0)) == MINUS
5785 		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5786 		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5787 	{
5788 	  /* We only want a REG_EQUAL note if src_const != src.  */
5789 	  if (! rtx_equal_p (src, src_const))
5790 	    {
5791 	      /* Make sure that the rtx is not shared.  */
5792 	      src_const = copy_rtx (src_const);
5793 
5794 	      /* Record the actual constant value in a REG_EQUAL note,
5795 		 making a new one if one does not already exist.  */
5796 	      set_unique_reg_note (insn, REG_EQUAL, src_const);
5797 	    }
5798 	}
5799 
5800       /* Now deal with the destination.  */
5801       do_not_record = 0;
5802 
5803       /* Look within any ZERO_EXTRACT to the MEM or REG within it.  */
5804       while (GET_CODE (dest) == SUBREG
5805 	     || GET_CODE (dest) == ZERO_EXTRACT
5806 	     || GET_CODE (dest) == STRICT_LOW_PART)
5807 	dest = XEXP (dest, 0);
5808 
5809       sets[i].inner_dest = dest;
5810 
5811       if (MEM_P (dest))
5812 	{
5813 #ifdef PUSH_ROUNDING
5814 	  /* Stack pushes invalidate the stack pointer.  */
5815 	  rtx addr = XEXP (dest, 0);
5816 	  if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5817 	      && XEXP (addr, 0) == stack_pointer_rtx)
5818 	    invalidate (stack_pointer_rtx, VOIDmode);
5819 #endif
5820 	  dest = fold_rtx (dest, insn);
5821 	}
5822 
5823       /* Compute the hash code of the destination now,
5824 	 before the effects of this instruction are recorded,
5825 	 since the register values used in the address computation
5826 	 are those before this instruction.  */
5827       sets[i].dest_hash = HASH (dest, mode);
5828 
5829       /* Don't enter a bit-field in the hash table
5830 	 because the value in it after the store
5831 	 may not equal what was stored, due to truncation.  */
5832 
5833       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5834 	{
5835 	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5836 
5837 	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5838 	      && GET_CODE (width) == CONST_INT
5839 	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5840 	      && ! (INTVAL (src_const)
5841 		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5842 	    /* Exception: if the value is constant,
5843 	       and it won't be truncated, record it.  */
5844 	    ;
5845 	  else
5846 	    {
5847 	      /* This is chosen so that the destination will be invalidated
5848 		 but no new value will be recorded.
5849 		 We must invalidate because sometimes constant
5850 		 values can be recorded for bitfields.  */
5851 	      sets[i].src_elt = 0;
5852 	      sets[i].src_volatile = 1;
5853 	      src_eqv = 0;
5854 	      src_eqv_elt = 0;
5855 	    }
5856 	}
5857 
5858       /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5859 	 the insn.  */
5860       else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5861 	{
5862 	  /* One less use of the label this insn used to jump to.  */
5863 	  delete_insn (insn);
5864 	  cse_jumps_altered = 1;
5865 	  /* No more processing for this set.  */
5866 	  sets[i].rtl = 0;
5867 	}
5868 
5869       /* If this SET is now setting PC to a label, we know it used to
5870 	 be a conditional or computed branch.  */
5871       else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5872 	       && !LABEL_REF_NONLOCAL_P (src))
5873 	{
5874 	  /* Now emit a BARRIER after the unconditional jump.  */
5875 	  if (NEXT_INSN (insn) == 0
5876 	      || !BARRIER_P (NEXT_INSN (insn)))
5877 	    emit_barrier_after (insn);
5878 
5879 	  /* We reemit the jump in as many cases as possible just in
5880 	     case the form of an unconditional jump is significantly
5881 	     different than a computed jump or conditional jump.
5882 
5883 	     If this insn has multiple sets, then reemitting the
5884 	     jump is nontrivial.  So instead we just force rerecognition
5885 	     and hope for the best.  */
5886 	  if (n_sets == 1)
5887 	    {
5888 	      rtx new, note;
5889 
5890 	      new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5891 	      JUMP_LABEL (new) = XEXP (src, 0);
5892 	      LABEL_NUSES (XEXP (src, 0))++;
5893 
5894 	      /* Make sure to copy over REG_NON_LOCAL_GOTO.  */
5895 	      note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5896 	      if (note)
5897 		{
5898 		  XEXP (note, 1) = NULL_RTX;
5899 		  REG_NOTES (new) = note;
5900 		}
5901 
5902 	      delete_insn (insn);
5903 	      insn = new;
5904 
5905 	      /* Now emit a BARRIER after the unconditional jump.  */
5906 	      if (NEXT_INSN (insn) == 0
5907 		  || !BARRIER_P (NEXT_INSN (insn)))
5908 		emit_barrier_after (insn);
5909 	    }
5910 	  else
5911 	    INSN_CODE (insn) = -1;
5912 
5913 	  /* Do not bother deleting any unreachable code,
5914 	     let jump/flow do that.  */
5915 
5916 	  cse_jumps_altered = 1;
5917 	  sets[i].rtl = 0;
5918 	}
5919 
5920       /* If destination is volatile, invalidate it and then do no further
5921 	 processing for this assignment.  */
5922 
5923       else if (do_not_record)
5924 	{
5925 	  if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5926 	    invalidate (dest, VOIDmode);
5927 	  else if (MEM_P (dest))
5928 	    invalidate (dest, VOIDmode);
5929 	  else if (GET_CODE (dest) == STRICT_LOW_PART
5930 		   || GET_CODE (dest) == ZERO_EXTRACT)
5931 	    invalidate (XEXP (dest, 0), GET_MODE (dest));
5932 	  sets[i].rtl = 0;
5933 	}
5934 
5935       if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5936 	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5937 
5938 #ifdef HAVE_cc0
5939       /* If setting CC0, record what it was set to, or a constant, if it
5940 	 is equivalent to a constant.  If it is being set to a floating-point
5941 	 value, make a COMPARE with the appropriate constant of 0.  If we
5942 	 don't do this, later code can interpret this as a test against
5943 	 const0_rtx, which can cause problems if we try to put it into an
5944 	 insn as a floating-point operand.  */
5945       if (dest == cc0_rtx)
5946 	{
5947 	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5948 	  this_insn_cc0_mode = mode;
5949 	  if (FLOAT_MODE_P (mode))
5950 	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5951 					     CONST0_RTX (mode));
5952 	}
5953 #endif
5954     }
5955 
5956   /* Now enter all non-volatile source expressions in the hash table
5957      if they are not already present.
5958      Record their equivalence classes in src_elt.
5959      This way we can insert the corresponding destinations into
5960      the same classes even if the actual sources are no longer in them
5961      (having been invalidated).  */
5962 
5963   if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5964       && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5965     {
5966       struct table_elt *elt;
5967       struct table_elt *classp = sets[0].src_elt;
5968       rtx dest = SET_DEST (sets[0].rtl);
5969       enum machine_mode eqvmode = GET_MODE (dest);
5970 
5971       if (GET_CODE (dest) == STRICT_LOW_PART)
5972 	{
5973 	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5974 	  classp = 0;
5975 	}
5976       if (insert_regs (src_eqv, classp, 0))
5977 	{
5978 	  rehash_using_reg (src_eqv);
5979 	  src_eqv_hash = HASH (src_eqv, eqvmode);
5980 	}
5981       elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5982       elt->in_memory = src_eqv_in_memory;
5983       src_eqv_elt = elt;
5984 
5985       /* Check to see if src_eqv_elt is the same as a set source which
5986 	 does not yet have an elt, and if so set the elt of the set source
5987 	 to src_eqv_elt.  */
5988       for (i = 0; i < n_sets; i++)
5989 	if (sets[i].rtl && sets[i].src_elt == 0
5990 	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5991 	  sets[i].src_elt = src_eqv_elt;
5992     }
5993 
5994   for (i = 0; i < n_sets; i++)
5995     if (sets[i].rtl && ! sets[i].src_volatile
5996 	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5997       {
5998 	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5999 	  {
6000 	    /* REG_EQUAL in setting a STRICT_LOW_PART
6001 	       gives an equivalent for the entire destination register,
6002 	       not just for the subreg being stored in now.
6003 	       This is a more interesting equivalence, so we arrange later
6004 	       to treat the entire reg as the destination.  */
6005 	    sets[i].src_elt = src_eqv_elt;
6006 	    sets[i].src_hash = src_eqv_hash;
6007 	  }
6008 	else
6009 	  {
6010 	    /* Insert source and constant equivalent into hash table, if not
6011 	       already present.  */
6012 	    struct table_elt *classp = src_eqv_elt;
6013 	    rtx src = sets[i].src;
6014 	    rtx dest = SET_DEST (sets[i].rtl);
6015 	    enum machine_mode mode
6016 	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6017 
6018 	    /* It's possible that we have a source value known to be
6019 	       constant but don't have a REG_EQUAL note on the insn.
6020 	       Lack of a note will mean src_eqv_elt will be NULL.  This
6021 	       can happen where we've generated a SUBREG to access a
6022 	       CONST_INT that is already in a register in a wider mode.
6023 	       Ensure that the source expression is put in the proper
6024 	       constant class.  */
6025 	    if (!classp)
6026 	      classp = sets[i].src_const_elt;
6027 
6028 	    if (sets[i].src_elt == 0)
6029 	      {
6030 		/* Don't put a hard register source into the table if this is
6031 		   the last insn of a libcall.  In this case, we only need
6032 		   to put src_eqv_elt in src_elt.  */
6033 		if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6034 		  {
6035 		    struct table_elt *elt;
6036 
6037 		    /* Note that these insert_regs calls cannot remove
6038 		       any of the src_elt's, because they would have failed to
6039 		       match if not still valid.  */
6040 		    if (insert_regs (src, classp, 0))
6041 		      {
6042 			rehash_using_reg (src);
6043 			sets[i].src_hash = HASH (src, mode);
6044 		      }
6045 		    elt = insert (src, classp, sets[i].src_hash, mode);
6046 		    elt->in_memory = sets[i].src_in_memory;
6047 		    sets[i].src_elt = classp = elt;
6048 		  }
6049 		else
6050 		  sets[i].src_elt = classp;
6051 	      }
6052 	    if (sets[i].src_const && sets[i].src_const_elt == 0
6053 		&& src != sets[i].src_const
6054 		&& ! rtx_equal_p (sets[i].src_const, src))
6055 	      sets[i].src_elt = insert (sets[i].src_const, classp,
6056 					sets[i].src_const_hash, mode);
6057 	  }
6058       }
6059     else if (sets[i].src_elt == 0)
6060       /* If we did not insert the source into the hash table (e.g., it was
6061 	 volatile), note the equivalence class for the REG_EQUAL value, if any,
6062 	 so that the destination goes into that class.  */
6063       sets[i].src_elt = src_eqv_elt;
6064 
6065   /* Record destination addresses in the hash table.  This allows us to
6066      check if they are invalidated by other sets.  */
6067   for (i = 0; i < n_sets; i++)
6068     {
6069       if (sets[i].rtl)
6070 	{
6071 	  rtx x = sets[i].inner_dest;
6072 	  struct table_elt *elt;
6073 	  enum machine_mode mode;
6074 	  unsigned hash;
6075 
6076 	  if (MEM_P (x))
6077 	    {
6078 	      x = XEXP (x, 0);
6079 	      mode = GET_MODE (x);
6080 	      hash = HASH (x, mode);
6081 	      elt = lookup (x, hash, mode);
6082 	      if (!elt)
6083 		{
6084 		  if (insert_regs (x, NULL, 0))
6085 		    {
6086 		      rtx dest = SET_DEST (sets[i].rtl);
6087 
6088 		      rehash_using_reg (x);
6089 		      hash = HASH (x, mode);
6090 		      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6091 		    }
6092 		  elt = insert (x, NULL, hash, mode);
6093 		}
6094 
6095 	      sets[i].dest_addr_elt = elt;
6096 	    }
6097 	  else
6098 	    sets[i].dest_addr_elt = NULL;
6099 	}
6100     }
6101 
6102   invalidate_from_clobbers (x);
6103 
6104   /* Some registers are invalidated by subroutine calls.  Memory is
6105      invalidated by non-constant calls.  */
6106 
6107   if (CALL_P (insn))
6108     {
6109       if (! CONST_OR_PURE_CALL_P (insn))
6110 	invalidate_memory ();
6111       invalidate_for_call ();
6112     }
6113 
6114   /* Now invalidate everything set by this instruction.
6115      If a SUBREG or other funny destination is being set,
6116      sets[i].rtl is still nonzero, so here we invalidate the reg
6117      a part of which is being set.  */
6118 
6119   for (i = 0; i < n_sets; i++)
6120     if (sets[i].rtl)
6121       {
6122 	/* We can't use the inner dest, because the mode associated with
6123 	   a ZERO_EXTRACT is significant.  */
6124 	rtx dest = SET_DEST (sets[i].rtl);
6125 
6126 	/* Needed for registers to remove the register from its
6127 	   previous quantity's chain.
6128 	   Needed for memory if this is a nonvarying address, unless
6129 	   we have just done an invalidate_memory that covers even those.  */
6130 	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6131 	  invalidate (dest, VOIDmode);
6132 	else if (MEM_P (dest))
6133 	  invalidate (dest, VOIDmode);
6134 	else if (GET_CODE (dest) == STRICT_LOW_PART
6135 		 || GET_CODE (dest) == ZERO_EXTRACT)
6136 	  invalidate (XEXP (dest, 0), GET_MODE (dest));
6137       }
6138 
6139   /* A volatile ASM invalidates everything.  */
6140   if (NONJUMP_INSN_P (insn)
6141       && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6142       && MEM_VOLATILE_P (PATTERN (insn)))
6143     flush_hash_table ();
6144 
6145   /* Make sure registers mentioned in destinations
6146      are safe for use in an expression to be inserted.
6147      This removes from the hash table
6148      any invalid entry that refers to one of these registers.
6149 
6150      We don't care about the return value from mention_regs because
6151      we are going to hash the SET_DEST values unconditionally.  */
6152 
6153   for (i = 0; i < n_sets; i++)
6154     {
6155       if (sets[i].rtl)
6156 	{
6157 	  rtx x = SET_DEST (sets[i].rtl);
6158 
6159 	  if (!REG_P (x))
6160 	    mention_regs (x);
6161 	  else
6162 	    {
6163 	      /* We used to rely on all references to a register becoming
6164 		 inaccessible when a register changes to a new quantity,
6165 		 since that changes the hash code.  However, that is not
6166 		 safe, since after HASH_SIZE new quantities we get a
6167 		 hash 'collision' of a register with its own invalid
6168 		 entries.  And since SUBREGs have been changed not to
6169 		 change their hash code with the hash code of the register,
6170 		 it wouldn't work any longer at all.  So we have to check
6171 		 for any invalid references lying around now.
6172 		 This code is similar to the REG case in mention_regs,
6173 		 but it knows that reg_tick has been incremented, and
6174 		 it leaves reg_in_table as -1 .  */
6175 	      unsigned int regno = REGNO (x);
6176 	      unsigned int endregno
6177 		= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6178 			   : hard_regno_nregs[regno][GET_MODE (x)]);
6179 	      unsigned int i;
6180 
6181 	      for (i = regno; i < endregno; i++)
6182 		{
6183 		  if (REG_IN_TABLE (i) >= 0)
6184 		    {
6185 		      remove_invalid_refs (i);
6186 		      REG_IN_TABLE (i) = -1;
6187 		    }
6188 		}
6189 	    }
6190 	}
6191     }
6192 
6193   /* We may have just removed some of the src_elt's from the hash table.
6194      So replace each one with the current head of the same class.
6195      Also check if destination addresses have been removed.  */
6196 
6197   for (i = 0; i < n_sets; i++)
6198     if (sets[i].rtl)
6199       {
6200 	if (sets[i].dest_addr_elt
6201 	    && sets[i].dest_addr_elt->first_same_value == 0)
6202 	  {
6203 	    /* The elt was removed, which means this destination is not
6204 	       valid after this instruction.  */
6205 	    sets[i].rtl = NULL_RTX;
6206 	  }
6207 	else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6208 	  /* If elt was removed, find current head of same class,
6209 	     or 0 if nothing remains of that class.  */
6210 	  {
6211 	    struct table_elt *elt = sets[i].src_elt;
6212 
6213 	    while (elt && elt->prev_same_value)
6214 	      elt = elt->prev_same_value;
6215 
6216 	    while (elt && elt->first_same_value == 0)
6217 	      elt = elt->next_same_value;
6218 	    sets[i].src_elt = elt ? elt->first_same_value : 0;
6219 	  }
6220       }
6221 
6222   /* Now insert the destinations into their equivalence classes.  */
6223 
6224   for (i = 0; i < n_sets; i++)
6225     if (sets[i].rtl)
6226       {
6227 	rtx dest = SET_DEST (sets[i].rtl);
6228 	struct table_elt *elt;
6229 
6230 	/* Don't record value if we are not supposed to risk allocating
6231 	   floating-point values in registers that might be wider than
6232 	   memory.  */
6233 	if ((flag_float_store
6234 	     && MEM_P (dest)
6235 	     && FLOAT_MODE_P (GET_MODE (dest)))
6236 	    /* Don't record BLKmode values, because we don't know the
6237 	       size of it, and can't be sure that other BLKmode values
6238 	       have the same or smaller size.  */
6239 	    || GET_MODE (dest) == BLKmode
6240 	    /* Don't record values of destinations set inside a libcall block
6241 	       since we might delete the libcall.  Things should have been set
6242 	       up so we won't want to reuse such a value, but we play it safe
6243 	       here.  */
6244 	    || libcall_insn
6245 	    /* If we didn't put a REG_EQUAL value or a source into the hash
6246 	       table, there is no point is recording DEST.  */
6247 	    || sets[i].src_elt == 0
6248 	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6249 	       or SIGN_EXTEND, don't record DEST since it can cause
6250 	       some tracking to be wrong.
6251 
6252 	       ??? Think about this more later.  */
6253 	    || (GET_CODE (dest) == SUBREG
6254 		&& (GET_MODE_SIZE (GET_MODE (dest))
6255 		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6256 		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
6257 		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6258 	  continue;
6259 
6260 	/* STRICT_LOW_PART isn't part of the value BEING set,
6261 	   and neither is the SUBREG inside it.
6262 	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
6263 	if (GET_CODE (dest) == STRICT_LOW_PART)
6264 	  dest = SUBREG_REG (XEXP (dest, 0));
6265 
6266 	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6267 	  /* Registers must also be inserted into chains for quantities.  */
6268 	  if (insert_regs (dest, sets[i].src_elt, 1))
6269 	    {
6270 	      /* If `insert_regs' changes something, the hash code must be
6271 		 recalculated.  */
6272 	      rehash_using_reg (dest);
6273 	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6274 	    }
6275 
6276 	elt = insert (dest, sets[i].src_elt,
6277 		      sets[i].dest_hash, GET_MODE (dest));
6278 
6279 	elt->in_memory = (MEM_P (sets[i].inner_dest)
6280 			  && !MEM_READONLY_P (sets[i].inner_dest));
6281 
6282 	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6283 	   narrower than M2, and both M1 and M2 are the same number of words,
6284 	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6285 	   make that equivalence as well.
6286 
6287 	   However, BAR may have equivalences for which gen_lowpart
6288 	   will produce a simpler value than gen_lowpart applied to
6289 	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6290 	   BAR's equivalences.  If we don't get a simplified form, make
6291 	   the SUBREG.  It will not be used in an equivalence, but will
6292 	   cause two similar assignments to be detected.
6293 
6294 	   Note the loop below will find SUBREG_REG (DEST) since we have
6295 	   already entered SRC and DEST of the SET in the table.  */
6296 
6297 	if (GET_CODE (dest) == SUBREG
6298 	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6299 		 / UNITS_PER_WORD)
6300 		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6301 	    && (GET_MODE_SIZE (GET_MODE (dest))
6302 		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6303 	    && sets[i].src_elt != 0)
6304 	  {
6305 	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6306 	    struct table_elt *elt, *classp = 0;
6307 
6308 	    for (elt = sets[i].src_elt->first_same_value; elt;
6309 		 elt = elt->next_same_value)
6310 	      {
6311 		rtx new_src = 0;
6312 		unsigned src_hash;
6313 		struct table_elt *src_elt;
6314 		int byte = 0;
6315 
6316 		/* Ignore invalid entries.  */
6317 		if (!REG_P (elt->exp)
6318 		    && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6319 		  continue;
6320 
6321 		/* We may have already been playing subreg games.  If the
6322 		   mode is already correct for the destination, use it.  */
6323 		if (GET_MODE (elt->exp) == new_mode)
6324 		  new_src = elt->exp;
6325 		else
6326 		  {
6327 		    /* Calculate big endian correction for the SUBREG_BYTE.
6328 		       We have already checked that M1 (GET_MODE (dest))
6329 		       is not narrower than M2 (new_mode).  */
6330 		    if (BYTES_BIG_ENDIAN)
6331 		      byte = (GET_MODE_SIZE (GET_MODE (dest))
6332 			      - GET_MODE_SIZE (new_mode));
6333 
6334 		    new_src = simplify_gen_subreg (new_mode, elt->exp,
6335 					           GET_MODE (dest), byte);
6336 		  }
6337 
6338 		/* The call to simplify_gen_subreg fails if the value
6339 		   is VOIDmode, yet we can't do any simplification, e.g.
6340 		   for EXPR_LISTs denoting function call results.
6341 		   It is invalid to construct a SUBREG with a VOIDmode
6342 		   SUBREG_REG, hence a zero new_src means we can't do
6343 		   this substitution.  */
6344 		if (! new_src)
6345 		  continue;
6346 
6347 		src_hash = HASH (new_src, new_mode);
6348 		src_elt = lookup (new_src, src_hash, new_mode);
6349 
6350 		/* Put the new source in the hash table is if isn't
6351 		   already.  */
6352 		if (src_elt == 0)
6353 		  {
6354 		    if (insert_regs (new_src, classp, 0))
6355 		      {
6356 			rehash_using_reg (new_src);
6357 			src_hash = HASH (new_src, new_mode);
6358 		      }
6359 		    src_elt = insert (new_src, classp, src_hash, new_mode);
6360 		    src_elt->in_memory = elt->in_memory;
6361 		  }
6362 		else if (classp && classp != src_elt->first_same_value)
6363 		  /* Show that two things that we've seen before are
6364 		     actually the same.  */
6365 		  merge_equiv_classes (src_elt, classp);
6366 
6367 		classp = src_elt->first_same_value;
6368 		/* Ignore invalid entries.  */
6369 		while (classp
6370 		       && !REG_P (classp->exp)
6371 		       && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6372 		  classp = classp->next_same_value;
6373 	      }
6374 	  }
6375       }
6376 
6377   /* Special handling for (set REG0 REG1) where REG0 is the
6378      "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6379      be used in the sequel, so (if easily done) change this insn to
6380      (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6381      that computed their value.  Then REG1 will become a dead store
6382      and won't cloud the situation for later optimizations.
6383 
6384      Do not make this change if REG1 is a hard register, because it will
6385      then be used in the sequel and we may be changing a two-operand insn
6386      into a three-operand insn.
6387 
6388      Also do not do this if we are operating on a copy of INSN.
6389 
6390      Also don't do this if INSN ends a libcall; this would cause an unrelated
6391      register to be set in the middle of a libcall, and we then get bad code
6392      if the libcall is deleted.  */
6393 
6394   if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6395       && NEXT_INSN (PREV_INSN (insn)) == insn
6396       && REG_P (SET_SRC (sets[0].rtl))
6397       && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6398       && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6399     {
6400       int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6401       struct qty_table_elem *src_ent = &qty_table[src_q];
6402 
6403       if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6404 	  && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6405 	{
6406 	  rtx prev = insn;
6407 	  /* Scan for the previous nonnote insn, but stop at a basic
6408 	     block boundary.  */
6409 	  do
6410 	    {
6411 	      prev = PREV_INSN (prev);
6412 	    }
6413 	  while (prev && NOTE_P (prev)
6414 		 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6415 
6416 	  /* Do not swap the registers around if the previous instruction
6417 	     attaches a REG_EQUIV note to REG1.
6418 
6419 	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
6420 	     from the pseudo that originally shadowed an incoming argument
6421 	     to another register.  Some uses of REG_EQUIV might rely on it
6422 	     being attached to REG1 rather than REG2.
6423 
6424 	     This section previously turned the REG_EQUIV into a REG_EQUAL
6425 	     note.  We cannot do that because REG_EQUIV may provide an
6426 	     uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
6427 
6428 	  if (prev != 0 && NONJUMP_INSN_P (prev)
6429 	      && GET_CODE (PATTERN (prev)) == SET
6430 	      && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6431 	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6432 	    {
6433 	      rtx dest = SET_DEST (sets[0].rtl);
6434 	      rtx src = SET_SRC (sets[0].rtl);
6435 	      rtx note;
6436 
6437 	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6438 	      validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6439 	      validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6440 	      apply_change_group ();
6441 
6442 	      /* If INSN has a REG_EQUAL note, and this note mentions
6443 		 REG0, then we must delete it, because the value in
6444 		 REG0 has changed.  If the note's value is REG1, we must
6445 		 also delete it because that is now this insn's dest.  */
6446 	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6447 	      if (note != 0
6448 		  && (reg_mentioned_p (dest, XEXP (note, 0))
6449 		      || rtx_equal_p (src, XEXP (note, 0))))
6450 		remove_note (insn, note);
6451 	    }
6452 	}
6453     }
6454 
6455   /* If this is a conditional jump insn, record any known equivalences due to
6456      the condition being tested.  */
6457 
6458   if (JUMP_P (insn)
6459       && n_sets == 1 && GET_CODE (x) == SET
6460       && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6461     record_jump_equiv (insn, 0);
6462 
6463 #ifdef HAVE_cc0
6464   /* If the previous insn set CC0 and this insn no longer references CC0,
6465      delete the previous insn.  Here we use the fact that nothing expects CC0
6466      to be valid over an insn, which is true until the final pass.  */
6467   if (prev_insn && NONJUMP_INSN_P (prev_insn)
6468       && (tem = single_set (prev_insn)) != 0
6469       && SET_DEST (tem) == cc0_rtx
6470       && ! reg_mentioned_p (cc0_rtx, x))
6471     delete_insn (prev_insn);
6472 
6473   prev_insn_cc0 = this_insn_cc0;
6474   prev_insn_cc0_mode = this_insn_cc0_mode;
6475   prev_insn = insn;
6476 #endif
6477 }
6478 
6479 /* Remove from the hash table all expressions that reference memory.  */
6480 
6481 static void
invalidate_memory(void)6482 invalidate_memory (void)
6483 {
6484   int i;
6485   struct table_elt *p, *next;
6486 
6487   for (i = 0; i < HASH_SIZE; i++)
6488     for (p = table[i]; p; p = next)
6489       {
6490 	next = p->next_same_hash;
6491 	if (p->in_memory)
6492 	  remove_from_table (p, i);
6493       }
6494 }
6495 
6496 /* If ADDR is an address that implicitly affects the stack pointer, return
6497    1 and update the register tables to show the effect.  Else, return 0.  */
6498 
6499 static int
addr_affects_sp_p(rtx addr)6500 addr_affects_sp_p (rtx addr)
6501 {
6502   if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6503       && REG_P (XEXP (addr, 0))
6504       && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6505     {
6506       if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6507 	{
6508 	  REG_TICK (STACK_POINTER_REGNUM)++;
6509 	  /* Is it possible to use a subreg of SP?  */
6510 	  SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6511 	}
6512 
6513       /* This should be *very* rare.  */
6514       if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6515 	invalidate (stack_pointer_rtx, VOIDmode);
6516 
6517       return 1;
6518     }
6519 
6520   return 0;
6521 }
6522 
6523 /* Perform invalidation on the basis of everything about an insn
6524    except for invalidating the actual places that are SET in it.
6525    This includes the places CLOBBERed, and anything that might
6526    alias with something that is SET or CLOBBERed.
6527 
6528    X is the pattern of the insn.  */
6529 
6530 static void
invalidate_from_clobbers(rtx x)6531 invalidate_from_clobbers (rtx x)
6532 {
6533   if (GET_CODE (x) == CLOBBER)
6534     {
6535       rtx ref = XEXP (x, 0);
6536       if (ref)
6537 	{
6538 	  if (REG_P (ref) || GET_CODE (ref) == SUBREG
6539 	      || MEM_P (ref))
6540 	    invalidate (ref, VOIDmode);
6541 	  else if (GET_CODE (ref) == STRICT_LOW_PART
6542 		   || GET_CODE (ref) == ZERO_EXTRACT)
6543 	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6544 	}
6545     }
6546   else if (GET_CODE (x) == PARALLEL)
6547     {
6548       int i;
6549       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6550 	{
6551 	  rtx y = XVECEXP (x, 0, i);
6552 	  if (GET_CODE (y) == CLOBBER)
6553 	    {
6554 	      rtx ref = XEXP (y, 0);
6555 	      if (REG_P (ref) || GET_CODE (ref) == SUBREG
6556 		  || MEM_P (ref))
6557 		invalidate (ref, VOIDmode);
6558 	      else if (GET_CODE (ref) == STRICT_LOW_PART
6559 		       || GET_CODE (ref) == ZERO_EXTRACT)
6560 		invalidate (XEXP (ref, 0), GET_MODE (ref));
6561 	    }
6562 	}
6563     }
6564 }
6565 
6566 /* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6567    and replace any registers in them with either an equivalent constant
6568    or the canonical form of the register.  If we are inside an address,
6569    only do this if the address remains valid.
6570 
6571    OBJECT is 0 except when within a MEM in which case it is the MEM.
6572 
6573    Return the replacement for X.  */
6574 
6575 static rtx
cse_process_notes(rtx x,rtx object)6576 cse_process_notes (rtx x, rtx object)
6577 {
6578   enum rtx_code code = GET_CODE (x);
6579   const char *fmt = GET_RTX_FORMAT (code);
6580   int i;
6581 
6582   switch (code)
6583     {
6584     case CONST_INT:
6585     case CONST:
6586     case SYMBOL_REF:
6587     case LABEL_REF:
6588     case CONST_DOUBLE:
6589     case CONST_VECTOR:
6590     case PC:
6591     case CC0:
6592     case LO_SUM:
6593       return x;
6594 
6595     case MEM:
6596       validate_change (x, &XEXP (x, 0),
6597 		       cse_process_notes (XEXP (x, 0), x), 0);
6598       return x;
6599 
6600     case EXPR_LIST:
6601     case INSN_LIST:
6602       if (REG_NOTE_KIND (x) == REG_EQUAL)
6603 	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6604       if (XEXP (x, 1))
6605 	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6606       return x;
6607 
6608     case SIGN_EXTEND:
6609     case ZERO_EXTEND:
6610     case SUBREG:
6611       {
6612 	rtx new = cse_process_notes (XEXP (x, 0), object);
6613 	/* We don't substitute VOIDmode constants into these rtx,
6614 	   since they would impede folding.  */
6615 	if (GET_MODE (new) != VOIDmode)
6616 	  validate_change (object, &XEXP (x, 0), new, 0);
6617 	return x;
6618       }
6619 
6620     case REG:
6621       i = REG_QTY (REGNO (x));
6622 
6623       /* Return a constant or a constant register.  */
6624       if (REGNO_QTY_VALID_P (REGNO (x)))
6625 	{
6626 	  struct qty_table_elem *ent = &qty_table[i];
6627 
6628 	  if (ent->const_rtx != NULL_RTX
6629 	      && (CONSTANT_P (ent->const_rtx)
6630 		  || REG_P (ent->const_rtx)))
6631 	    {
6632 	      rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6633 	      if (new)
6634 		return new;
6635 	    }
6636 	}
6637 
6638       /* Otherwise, canonicalize this register.  */
6639       return canon_reg (x, NULL_RTX);
6640 
6641     default:
6642       break;
6643     }
6644 
6645   for (i = 0; i < GET_RTX_LENGTH (code); i++)
6646     if (fmt[i] == 'e')
6647       validate_change (object, &XEXP (x, i),
6648 		       cse_process_notes (XEXP (x, i), object), 0);
6649 
6650   return x;
6651 }
6652 
6653 /* Process one SET of an insn that was skipped.  We ignore CLOBBERs
6654    since they are done elsewhere.  This function is called via note_stores.  */
6655 
6656 static void
invalidate_skipped_set(rtx dest,rtx set,void * data ATTRIBUTE_UNUSED)6657 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6658 {
6659   enum rtx_code code = GET_CODE (dest);
6660 
6661   if (code == MEM
6662       && ! addr_affects_sp_p (dest)	/* If this is not a stack push ...  */
6663       /* There are times when an address can appear varying and be a PLUS
6664 	 during this scan when it would be a fixed address were we to know
6665 	 the proper equivalences.  So invalidate all memory if there is
6666 	 a BLKmode or nonscalar memory reference or a reference to a
6667 	 variable address.  */
6668       && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6669 	  || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6670     {
6671       invalidate_memory ();
6672       return;
6673     }
6674 
6675   if (GET_CODE (set) == CLOBBER
6676       || CC0_P (dest)
6677       || dest == pc_rtx)
6678     return;
6679 
6680   if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6681     invalidate (XEXP (dest, 0), GET_MODE (dest));
6682   else if (code == REG || code == SUBREG || code == MEM)
6683     invalidate (dest, VOIDmode);
6684 }
6685 
6686 /* Invalidate all insns from START up to the end of the function or the
6687    next label.  This called when we wish to CSE around a block that is
6688    conditionally executed.  */
6689 
6690 static void
invalidate_skipped_block(rtx start)6691 invalidate_skipped_block (rtx start)
6692 {
6693   rtx insn;
6694 
6695   for (insn = start; insn && !LABEL_P (insn);
6696        insn = NEXT_INSN (insn))
6697     {
6698       if (! INSN_P (insn))
6699 	continue;
6700 
6701       if (CALL_P (insn))
6702 	{
6703 	  if (! CONST_OR_PURE_CALL_P (insn))
6704 	    invalidate_memory ();
6705 	  invalidate_for_call ();
6706 	}
6707 
6708       invalidate_from_clobbers (PATTERN (insn));
6709       note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6710     }
6711 }
6712 
6713 /* Find the end of INSN's basic block and return its range,
6714    the total number of SETs in all the insns of the block, the last insn of the
6715    block, and the branch path.
6716 
6717    The branch path indicates which branches should be followed.  If a nonzero
6718    path size is specified, the block should be rescanned and a different set
6719    of branches will be taken.  The branch path is only used if
6720    FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6721 
6722    DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6723    used to describe the block.  It is filled in with the information about
6724    the current block.  The incoming structure's branch path, if any, is used
6725    to construct the output branch path.  */
6726 
6727 static void
cse_end_of_basic_block(rtx insn,struct cse_basic_block_data * data,int follow_jumps,int skip_blocks)6728 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6729 			int follow_jumps, int skip_blocks)
6730 {
6731   rtx p = insn, q;
6732   int nsets = 0;
6733   int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6734   rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6735   int path_size = data->path_size;
6736   int path_entry = 0;
6737   int i;
6738 
6739   /* Update the previous branch path, if any.  If the last branch was
6740      previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6741      If it was previously PATH_NOT_TAKEN,
6742      shorten the path by one and look at the previous branch.  We know that
6743      at least one branch must have been taken if PATH_SIZE is nonzero.  */
6744   while (path_size > 0)
6745     {
6746       if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6747 	{
6748 	  data->path[path_size - 1].status = PATH_NOT_TAKEN;
6749 	  break;
6750 	}
6751       else
6752 	path_size--;
6753     }
6754 
6755   /* If the first instruction is marked with QImode, that means we've
6756      already processed this block.  Our caller will look at DATA->LAST
6757      to figure out where to go next.  We want to return the next block
6758      in the instruction stream, not some branched-to block somewhere
6759      else.  We accomplish this by pretending our called forbid us to
6760      follow jumps, or skip blocks.  */
6761   if (GET_MODE (insn) == QImode)
6762     follow_jumps = skip_blocks = 0;
6763 
6764   /* Scan to end of this basic block.  */
6765   while (p && !LABEL_P (p))
6766     {
6767       /* Don't cse over a call to setjmp; on some machines (eg VAX)
6768 	 the regs restored by the longjmp come from
6769 	 a later time than the setjmp.  */
6770       if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6771 	  && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6772 	break;
6773 
6774       /* A PARALLEL can have lots of SETs in it,
6775 	 especially if it is really an ASM_OPERANDS.  */
6776       if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6777 	nsets += XVECLEN (PATTERN (p), 0);
6778       else if (!NOTE_P (p))
6779 	nsets += 1;
6780 
6781       /* Ignore insns made by CSE; they cannot affect the boundaries of
6782 	 the basic block.  */
6783 
6784       if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6785 	high_cuid = INSN_CUID (p);
6786       if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6787 	low_cuid = INSN_CUID (p);
6788 
6789       /* See if this insn is in our branch path.  If it is and we are to
6790 	 take it, do so.  */
6791       if (path_entry < path_size && data->path[path_entry].branch == p)
6792 	{
6793 	  if (data->path[path_entry].status != PATH_NOT_TAKEN)
6794 	    p = JUMP_LABEL (p);
6795 
6796 	  /* Point to next entry in path, if any.  */
6797 	  path_entry++;
6798 	}
6799 
6800       /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6801 	 was specified, we haven't reached our maximum path length, there are
6802 	 insns following the target of the jump, this is the only use of the
6803 	 jump label, and the target label is preceded by a BARRIER.
6804 
6805 	 Alternatively, we can follow the jump if it branches around a
6806 	 block of code and there are no other branches into the block.
6807 	 In this case invalidate_skipped_block will be called to invalidate any
6808 	 registers set in the block when following the jump.  */
6809 
6810       else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6811 	       && JUMP_P (p)
6812 	       && GET_CODE (PATTERN (p)) == SET
6813 	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6814 	       && JUMP_LABEL (p) != 0
6815 	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
6816 	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
6817 	{
6818 	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6819 	    if ((!NOTE_P (q)
6820 		 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6821 		     && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6822 		&& (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6823 	      break;
6824 
6825 	  /* If we ran into a BARRIER, this code is an extension of the
6826 	     basic block when the branch is taken.  */
6827 	  if (follow_jumps && q != 0 && BARRIER_P (q))
6828 	    {
6829 	      /* Don't allow ourself to keep walking around an
6830 		 always-executed loop.  */
6831 	      if (next_real_insn (q) == next)
6832 		{
6833 		  p = NEXT_INSN (p);
6834 		  continue;
6835 		}
6836 
6837 	      /* Similarly, don't put a branch in our path more than once.  */
6838 	      for (i = 0; i < path_entry; i++)
6839 		if (data->path[i].branch == p)
6840 		  break;
6841 
6842 	      if (i != path_entry)
6843 		break;
6844 
6845 	      data->path[path_entry].branch = p;
6846 	      data->path[path_entry++].status = PATH_TAKEN;
6847 
6848 	      /* This branch now ends our path.  It was possible that we
6849 		 didn't see this branch the last time around (when the
6850 		 insn in front of the target was a JUMP_INSN that was
6851 		 turned into a no-op).  */
6852 	      path_size = path_entry;
6853 
6854 	      p = JUMP_LABEL (p);
6855 	      /* Mark block so we won't scan it again later.  */
6856 	      PUT_MODE (NEXT_INSN (p), QImode);
6857 	    }
6858 	  /* Detect a branch around a block of code.  */
6859 	  else if (skip_blocks && q != 0 && !LABEL_P (q))
6860 	    {
6861 	      rtx tmp;
6862 
6863 	      if (next_real_insn (q) == next)
6864 		{
6865 		  p = NEXT_INSN (p);
6866 		  continue;
6867 		}
6868 
6869 	      for (i = 0; i < path_entry; i++)
6870 		if (data->path[i].branch == p)
6871 		  break;
6872 
6873 	      if (i != path_entry)
6874 		break;
6875 
6876 	      /* This is no_labels_between_p (p, q) with an added check for
6877 		 reaching the end of a function (in case Q precedes P).  */
6878 	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6879 		if (LABEL_P (tmp))
6880 		  break;
6881 
6882 	      if (tmp == q)
6883 		{
6884 		  data->path[path_entry].branch = p;
6885 		  data->path[path_entry++].status = PATH_AROUND;
6886 
6887 		  path_size = path_entry;
6888 
6889 		  p = JUMP_LABEL (p);
6890 		  /* Mark block so we won't scan it again later.  */
6891 		  PUT_MODE (NEXT_INSN (p), QImode);
6892 		}
6893 	    }
6894 	}
6895       p = NEXT_INSN (p);
6896     }
6897 
6898   data->low_cuid = low_cuid;
6899   data->high_cuid = high_cuid;
6900   data->nsets = nsets;
6901   data->last = p;
6902 
6903   /* If all jumps in the path are not taken, set our path length to zero
6904      so a rescan won't be done.  */
6905   for (i = path_size - 1; i >= 0; i--)
6906     if (data->path[i].status != PATH_NOT_TAKEN)
6907       break;
6908 
6909   if (i == -1)
6910     data->path_size = 0;
6911   else
6912     data->path_size = path_size;
6913 
6914   /* End the current branch path.  */
6915   data->path[path_size].branch = 0;
6916 }
6917 
6918 /* Perform cse on the instructions of a function.
6919    F is the first instruction.
6920    NREGS is one plus the highest pseudo-reg number used in the instruction.
6921 
6922    Returns 1 if jump_optimize should be redone due to simplifications
6923    in conditional jump instructions.  */
6924 
6925 int
cse_main(rtx f,int nregs)6926 cse_main (rtx f, int nregs)
6927 {
6928   struct cse_basic_block_data val;
6929   rtx insn = f;
6930   int i;
6931 
6932   init_cse_reg_info (nregs);
6933 
6934   val.path = XNEWVEC (struct branch_path, PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6935 
6936   cse_jumps_altered = 0;
6937   recorded_label_ref = 0;
6938   constant_pool_entries_cost = 0;
6939   constant_pool_entries_regcost = 0;
6940   val.path_size = 0;
6941   rtl_hooks = cse_rtl_hooks;
6942 
6943   init_recog ();
6944   init_alias_analysis ();
6945 
6946   reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6947 
6948   /* Find the largest uid.  */
6949 
6950   max_uid = get_max_uid ();
6951   uid_cuid = XCNEWVEC (int, max_uid + 1);
6952 
6953   /* Compute the mapping from uids to cuids.
6954      CUIDs are numbers assigned to insns, like uids,
6955      except that cuids increase monotonically through the code.
6956      Don't assign cuids to line-number NOTEs, so that the distance in cuids
6957      between two insns is not affected by -g.  */
6958 
6959   for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6960     {
6961       if (!NOTE_P (insn)
6962 	  || NOTE_LINE_NUMBER (insn) < 0)
6963 	INSN_CUID (insn) = ++i;
6964       else
6965 	/* Give a line number note the same cuid as preceding insn.  */
6966 	INSN_CUID (insn) = i;
6967     }
6968 
6969   /* Loop over basic blocks.
6970      Compute the maximum number of qty's needed for each basic block
6971      (which is 2 for each SET).  */
6972   insn = f;
6973   while (insn)
6974     {
6975       cse_altered = 0;
6976       cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6977 			      flag_cse_skip_blocks);
6978 
6979       /* If this basic block was already processed or has no sets, skip it.  */
6980       if (val.nsets == 0 || GET_MODE (insn) == QImode)
6981 	{
6982 	  PUT_MODE (insn, VOIDmode);
6983 	  insn = (val.last ? NEXT_INSN (val.last) : 0);
6984 	  val.path_size = 0;
6985 	  continue;
6986 	}
6987 
6988       cse_basic_block_start = val.low_cuid;
6989       cse_basic_block_end = val.high_cuid;
6990       max_qty = val.nsets * 2;
6991 
6992       if (dump_file)
6993 	fprintf (dump_file, ";; Processing block from %d to %d, %d sets.\n",
6994 		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6995 		 val.nsets);
6996 
6997       /* Make MAX_QTY bigger to give us room to optimize
6998 	 past the end of this basic block, if that should prove useful.  */
6999       if (max_qty < 500)
7000 	max_qty = 500;
7001 
7002       /* If this basic block is being extended by following certain jumps,
7003          (see `cse_end_of_basic_block'), we reprocess the code from the start.
7004          Otherwise, we start after this basic block.  */
7005       if (val.path_size > 0)
7006 	cse_basic_block (insn, val.last, val.path);
7007       else
7008 	{
7009 	  int old_cse_jumps_altered = cse_jumps_altered;
7010 	  rtx temp;
7011 
7012 	  /* When cse changes a conditional jump to an unconditional
7013 	     jump, we want to reprocess the block, since it will give
7014 	     us a new branch path to investigate.  */
7015 	  cse_jumps_altered = 0;
7016 	  temp = cse_basic_block (insn, val.last, val.path);
7017 	  if (cse_jumps_altered == 0
7018 	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7019 	    insn = temp;
7020 
7021 	  cse_jumps_altered |= old_cse_jumps_altered;
7022 	}
7023 
7024       if (cse_altered)
7025 	ggc_collect ();
7026 
7027 #ifdef USE_C_ALLOCA
7028       alloca (0);
7029 #endif
7030     }
7031 
7032   /* Clean up.  */
7033   end_alias_analysis ();
7034   free (uid_cuid);
7035   free (reg_eqv_table);
7036   free (val.path);
7037   rtl_hooks = general_rtl_hooks;
7038 
7039   return cse_jumps_altered || recorded_label_ref;
7040 }
7041 
7042 /* Process a single basic block.  FROM and TO and the limits of the basic
7043    block.  NEXT_BRANCH points to the branch path when following jumps or
7044    a null path when not following jumps.  */
7045 
7046 static rtx
cse_basic_block(rtx from,rtx to,struct branch_path * next_branch)7047 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
7048 {
7049   rtx insn;
7050   int to_usage = 0;
7051   rtx libcall_insn = NULL_RTX;
7052   int num_insns = 0;
7053   int no_conflict = 0;
7054 
7055   /* Allocate the space needed by qty_table.  */
7056   qty_table = XNEWVEC (struct qty_table_elem, max_qty);
7057 
7058   new_basic_block ();
7059 
7060   /* TO might be a label.  If so, protect it from being deleted.  */
7061   if (to != 0 && LABEL_P (to))
7062     ++LABEL_NUSES (to);
7063 
7064   for (insn = from; insn != to; insn = NEXT_INSN (insn))
7065     {
7066       enum rtx_code code = GET_CODE (insn);
7067 
7068       /* If we have processed 1,000 insns, flush the hash table to
7069 	 avoid extreme quadratic behavior.  We must not include NOTEs
7070 	 in the count since there may be more of them when generating
7071 	 debugging information.  If we clear the table at different
7072 	 times, code generated with -g -O might be different than code
7073 	 generated with -O but not -g.
7074 
7075 	 ??? This is a real kludge and needs to be done some other way.
7076 	 Perhaps for 2.9.  */
7077       if (code != NOTE && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
7078 	{
7079 	  flush_hash_table ();
7080 	  num_insns = 0;
7081 	}
7082 
7083       /* See if this is a branch that is part of the path.  If so, and it is
7084 	 to be taken, do so.  */
7085       if (next_branch->branch == insn)
7086 	{
7087 	  enum taken status = next_branch++->status;
7088 	  if (status != PATH_NOT_TAKEN)
7089 	    {
7090 	      if (status == PATH_TAKEN)
7091 		record_jump_equiv (insn, 1);
7092 	      else
7093 		invalidate_skipped_block (NEXT_INSN (insn));
7094 
7095 	      /* Set the last insn as the jump insn; it doesn't affect cc0.
7096 		 Then follow this branch.  */
7097 #ifdef HAVE_cc0
7098 	      prev_insn_cc0 = 0;
7099 	      prev_insn = insn;
7100 #endif
7101 	      insn = JUMP_LABEL (insn);
7102 	      continue;
7103 	    }
7104 	}
7105 
7106       if (GET_MODE (insn) == QImode)
7107 	PUT_MODE (insn, VOIDmode);
7108 
7109       if (GET_RTX_CLASS (code) == RTX_INSN)
7110 	{
7111 	  rtx p;
7112 
7113 	  /* Process notes first so we have all notes in canonical forms when
7114 	     looking for duplicate operations.  */
7115 
7116 	  if (REG_NOTES (insn))
7117 	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7118 
7119 	  /* Track when we are inside in LIBCALL block.  Inside such a block,
7120 	     we do not want to record destinations.  The last insn of a
7121 	     LIBCALL block is not considered to be part of the block, since
7122 	     its destination is the result of the block and hence should be
7123 	     recorded.  */
7124 
7125 	  if (REG_NOTES (insn) != 0)
7126 	    {
7127 	      if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7128 		libcall_insn = XEXP (p, 0);
7129 	      else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7130 		{
7131 		  /* Keep libcall_insn for the last SET insn of a no-conflict
7132 		     block to prevent changing the destination.  */
7133 		  if (! no_conflict)
7134 		    libcall_insn = 0;
7135 		  else
7136 		    no_conflict = -1;
7137 		}
7138 	      else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
7139 		no_conflict = 1;
7140 	    }
7141 
7142 	  cse_insn (insn, libcall_insn);
7143 
7144 	  if (no_conflict == -1)
7145 	    {
7146 	      libcall_insn = 0;
7147 	      no_conflict = 0;
7148 	    }
7149 
7150 	  /* If we haven't already found an insn where we added a LABEL_REF,
7151 	     check this one.  */
7152 	  if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
7153 	      && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7154 			       (void *) insn))
7155 	    recorded_label_ref = 1;
7156 	}
7157 
7158       /* If INSN is now an unconditional jump, skip to the end of our
7159 	 basic block by pretending that we just did the last insn in the
7160 	 basic block.  If we are jumping to the end of our block, show
7161 	 that we can have one usage of TO.  */
7162 
7163       if (any_uncondjump_p (insn))
7164 	{
7165 	  if (to == 0)
7166 	    {
7167 	      free (qty_table);
7168 	      return 0;
7169 	    }
7170 
7171 	  if (JUMP_LABEL (insn) == to)
7172 	    to_usage = 1;
7173 
7174 	  /* Maybe TO was deleted because the jump is unconditional.
7175 	     If so, there is nothing left in this basic block.  */
7176 	  /* ??? Perhaps it would be smarter to set TO
7177 	     to whatever follows this insn,
7178 	     and pretend the basic block had always ended here.  */
7179 	  if (INSN_DELETED_P (to))
7180 	    break;
7181 
7182 	  insn = PREV_INSN (to);
7183 	}
7184 
7185       /* See if it is ok to keep on going past the label
7186 	 which used to end our basic block.  Remember that we incremented
7187 	 the count of that label, so we decrement it here.  If we made
7188 	 a jump unconditional, TO_USAGE will be one; in that case, we don't
7189 	 want to count the use in that jump.  */
7190 
7191       if (to != 0 && NEXT_INSN (insn) == to
7192 	  && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
7193 	{
7194 	  struct cse_basic_block_data val;
7195 	  rtx prev;
7196 
7197 	  insn = NEXT_INSN (to);
7198 
7199 	  /* If TO was the last insn in the function, we are done.  */
7200 	  if (insn == 0)
7201 	    {
7202 	      free (qty_table);
7203 	      return 0;
7204 	    }
7205 
7206 	  /* If TO was preceded by a BARRIER we are done with this block
7207 	     because it has no continuation.  */
7208 	  prev = prev_nonnote_insn (to);
7209 	  if (prev && BARRIER_P (prev))
7210 	    {
7211 	      free (qty_table);
7212 	      return insn;
7213 	    }
7214 
7215 	  /* Find the end of the following block.  Note that we won't be
7216 	     following branches in this case.  */
7217 	  to_usage = 0;
7218 	  val.path_size = 0;
7219 	  val.path = XNEWVEC (struct branch_path, PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7220 	  cse_end_of_basic_block (insn, &val, 0, 0);
7221 	  free (val.path);
7222 
7223 	  /* If the tables we allocated have enough space left
7224 	     to handle all the SETs in the next basic block,
7225 	     continue through it.  Otherwise, return,
7226 	     and that block will be scanned individually.  */
7227 	  if (val.nsets * 2 + next_qty > max_qty)
7228 	    break;
7229 
7230 	  cse_basic_block_start = val.low_cuid;
7231 	  cse_basic_block_end = val.high_cuid;
7232 	  to = val.last;
7233 
7234 	  /* Prevent TO from being deleted if it is a label.  */
7235 	  if (to != 0 && LABEL_P (to))
7236 	    ++LABEL_NUSES (to);
7237 
7238 	  /* Back up so we process the first insn in the extension.  */
7239 	  insn = PREV_INSN (insn);
7240 	}
7241     }
7242 
7243   gcc_assert (next_qty <= max_qty);
7244 
7245   free (qty_table);
7246 
7247   return to ? NEXT_INSN (to) : 0;
7248 }
7249 
7250 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7251    there isn't a REG_LABEL note.  Return one if so.  DATA is the insn.  */
7252 
7253 static int
check_for_label_ref(rtx * rtl,void * data)7254 check_for_label_ref (rtx *rtl, void *data)
7255 {
7256   rtx insn = (rtx) data;
7257 
7258   /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7259      we must rerun jump since it needs to place the note.  If this is a
7260      LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7261      since no REG_LABEL will be added.  */
7262   return (GET_CODE (*rtl) == LABEL_REF
7263 	  && ! LABEL_REF_NONLOCAL_P (*rtl)
7264 	  && LABEL_P (XEXP (*rtl, 0))
7265 	  && INSN_UID (XEXP (*rtl, 0)) != 0
7266 	  && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7267 }
7268 
7269 /* Count the number of times registers are used (not set) in X.
7270    COUNTS is an array in which we accumulate the count, INCR is how much
7271    we count each register usage.
7272 
7273    Don't count a usage of DEST, which is the SET_DEST of a SET which
7274    contains X in its SET_SRC.  This is because such a SET does not
7275    modify the liveness of DEST.
7276    DEST is set to pc_rtx for a trapping insn, which means that we must count
7277    uses of a SET_DEST regardless because the insn can't be deleted here.  */
7278 
7279 static void
count_reg_usage(rtx x,int * counts,rtx dest,int incr)7280 count_reg_usage (rtx x, int *counts, rtx dest, int incr)
7281 {
7282   enum rtx_code code;
7283   rtx note;
7284   const char *fmt;
7285   int i, j;
7286 
7287   if (x == 0)
7288     return;
7289 
7290   switch (code = GET_CODE (x))
7291     {
7292     case REG:
7293       if (x != dest)
7294 	counts[REGNO (x)] += incr;
7295       return;
7296 
7297     case PC:
7298     case CC0:
7299     case CONST:
7300     case CONST_INT:
7301     case CONST_DOUBLE:
7302     case CONST_VECTOR:
7303     case SYMBOL_REF:
7304     case LABEL_REF:
7305       return;
7306 
7307     case CLOBBER:
7308       /* If we are clobbering a MEM, mark any registers inside the address
7309          as being used.  */
7310       if (MEM_P (XEXP (x, 0)))
7311 	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7312       return;
7313 
7314     case SET:
7315       /* Unless we are setting a REG, count everything in SET_DEST.  */
7316       if (!REG_P (SET_DEST (x)))
7317 	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7318       count_reg_usage (SET_SRC (x), counts,
7319 		       dest ? dest : SET_DEST (x),
7320 		       incr);
7321       return;
7322 
7323     case CALL_INSN:
7324     case INSN:
7325     case JUMP_INSN:
7326     /* We expect dest to be NULL_RTX here.  If the insn may trap, mark
7327        this fact by setting DEST to pc_rtx.  */
7328       if (flag_non_call_exceptions && may_trap_p (PATTERN (x)))
7329 	dest = pc_rtx;
7330       if (code == CALL_INSN)
7331 	count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
7332       count_reg_usage (PATTERN (x), counts, dest, incr);
7333 
7334       /* Things used in a REG_EQUAL note aren't dead since loop may try to
7335 	 use them.  */
7336 
7337       note = find_reg_equal_equiv_note (x);
7338       if (note)
7339 	{
7340 	  rtx eqv = XEXP (note, 0);
7341 
7342 	  if (GET_CODE (eqv) == EXPR_LIST)
7343 	  /* This REG_EQUAL note describes the result of a function call.
7344 	     Process all the arguments.  */
7345 	    do
7346 	      {
7347 		count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
7348 		eqv = XEXP (eqv, 1);
7349 	      }
7350 	    while (eqv && GET_CODE (eqv) == EXPR_LIST);
7351 	  else
7352 	    count_reg_usage (eqv, counts, dest, incr);
7353 	}
7354       return;
7355 
7356     case EXPR_LIST:
7357       if (REG_NOTE_KIND (x) == REG_EQUAL
7358 	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7359 	  /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7360 	     involving registers in the address.  */
7361 	  || GET_CODE (XEXP (x, 0)) == CLOBBER)
7362 	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7363 
7364       count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7365       return;
7366 
7367     case ASM_OPERANDS:
7368       /* If the asm is volatile, then this insn cannot be deleted,
7369 	 and so the inputs *must* be live.  */
7370       if (MEM_VOLATILE_P (x))
7371 	dest = NULL_RTX;
7372       /* Iterate over just the inputs, not the constraints as well.  */
7373       for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7374 	count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
7375       return;
7376 
7377     case INSN_LIST:
7378       gcc_unreachable ();
7379 
7380     default:
7381       break;
7382     }
7383 
7384   fmt = GET_RTX_FORMAT (code);
7385   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7386     {
7387       if (fmt[i] == 'e')
7388 	count_reg_usage (XEXP (x, i), counts, dest, incr);
7389       else if (fmt[i] == 'E')
7390 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7391 	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7392     }
7393 }
7394 
7395 /* Return true if set is live.  */
7396 static bool
set_live_p(rtx set,rtx insn ATTRIBUTE_UNUSED,int * counts)7397 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
7398 	    int *counts)
7399 {
7400 #ifdef HAVE_cc0
7401   rtx tem;
7402 #endif
7403 
7404   if (set_noop_p (set))
7405     ;
7406 
7407 #ifdef HAVE_cc0
7408   else if (GET_CODE (SET_DEST (set)) == CC0
7409 	   && !side_effects_p (SET_SRC (set))
7410 	   && ((tem = next_nonnote_insn (insn)) == 0
7411 	       || !INSN_P (tem)
7412 	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7413     return false;
7414 #endif
7415   else if (!REG_P (SET_DEST (set))
7416 	   || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7417 	   || counts[REGNO (SET_DEST (set))] != 0
7418 	   || side_effects_p (SET_SRC (set)))
7419     return true;
7420   return false;
7421 }
7422 
7423 /* Return true if insn is live.  */
7424 
7425 static bool
insn_live_p(rtx insn,int * counts)7426 insn_live_p (rtx insn, int *counts)
7427 {
7428   int i;
7429   if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7430     return true;
7431   else if (GET_CODE (PATTERN (insn)) == SET)
7432     return set_live_p (PATTERN (insn), insn, counts);
7433   else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7434     {
7435       for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7436 	{
7437 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7438 
7439 	  if (GET_CODE (elt) == SET)
7440 	    {
7441 	      if (set_live_p (elt, insn, counts))
7442 		return true;
7443 	    }
7444 	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7445 	    return true;
7446 	}
7447       return false;
7448     }
7449   else
7450     return true;
7451 }
7452 
7453 /* Return true if libcall is dead as a whole.  */
7454 
7455 static bool
dead_libcall_p(rtx insn,int * counts)7456 dead_libcall_p (rtx insn, int *counts)
7457 {
7458   rtx note, set, new;
7459 
7460   /* See if there's a REG_EQUAL note on this insn and try to
7461      replace the source with the REG_EQUAL expression.
7462 
7463      We assume that insns with REG_RETVALs can only be reg->reg
7464      copies at this point.  */
7465   note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7466   if (!note)
7467     return false;
7468 
7469   set = single_set (insn);
7470   if (!set)
7471     return false;
7472 
7473   new = simplify_rtx (XEXP (note, 0));
7474   if (!new)
7475     new = XEXP (note, 0);
7476 
7477   /* While changing insn, we must update the counts accordingly.  */
7478   count_reg_usage (insn, counts, NULL_RTX, -1);
7479 
7480   if (validate_change (insn, &SET_SRC (set), new, 0))
7481     {
7482       count_reg_usage (insn, counts, NULL_RTX, 1);
7483       remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7484       remove_note (insn, note);
7485       return true;
7486     }
7487 
7488   if (CONSTANT_P (new))
7489     {
7490       new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7491       if (new && validate_change (insn, &SET_SRC (set), new, 0))
7492 	{
7493 	  count_reg_usage (insn, counts, NULL_RTX, 1);
7494 	  remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7495 	  remove_note (insn, note);
7496 	  return true;
7497 	}
7498     }
7499 
7500   count_reg_usage (insn, counts, NULL_RTX, 1);
7501   return false;
7502 }
7503 
7504 /* Scan all the insns and delete any that are dead; i.e., they store a register
7505    that is never used or they copy a register to itself.
7506 
7507    This is used to remove insns made obviously dead by cse, loop or other
7508    optimizations.  It improves the heuristics in loop since it won't try to
7509    move dead invariants out of loops or make givs for dead quantities.  The
7510    remaining passes of the compilation are also sped up.  */
7511 
7512 int
delete_trivially_dead_insns(rtx insns,int nreg)7513 delete_trivially_dead_insns (rtx insns, int nreg)
7514 {
7515   int *counts;
7516   rtx insn, prev;
7517   int in_libcall = 0, dead_libcall = 0;
7518   int ndead = 0;
7519 
7520   timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7521   /* First count the number of times each register is used.  */
7522   counts = XCNEWVEC (int, nreg);
7523   for (insn = insns; insn; insn = NEXT_INSN (insn))
7524     if (INSN_P (insn))
7525       count_reg_usage (insn, counts, NULL_RTX, 1);
7526 
7527   /* Go from the last insn to the first and delete insns that only set unused
7528      registers or copy a register to itself.  As we delete an insn, remove
7529      usage counts for registers it uses.
7530 
7531      The first jump optimization pass may leave a real insn as the last
7532      insn in the function.   We must not skip that insn or we may end
7533      up deleting code that is not really dead.  */
7534   for (insn = get_last_insn (); insn; insn = prev)
7535     {
7536       int live_insn = 0;
7537 
7538       prev = PREV_INSN (insn);
7539       if (!INSN_P (insn))
7540 	continue;
7541 
7542       /* Don't delete any insns that are part of a libcall block unless
7543 	 we can delete the whole libcall block.
7544 
7545 	 Flow or loop might get confused if we did that.  Remember
7546 	 that we are scanning backwards.  */
7547       if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7548 	{
7549 	  in_libcall = 1;
7550 	  live_insn = 1;
7551 	  dead_libcall = dead_libcall_p (insn, counts);
7552 	}
7553       else if (in_libcall)
7554 	live_insn = ! dead_libcall;
7555       else
7556 	live_insn = insn_live_p (insn, counts);
7557 
7558       /* If this is a dead insn, delete it and show registers in it aren't
7559 	 being used.  */
7560 
7561       if (! live_insn)
7562 	{
7563 	  count_reg_usage (insn, counts, NULL_RTX, -1);
7564 	  delete_insn_and_edges (insn);
7565 	  ndead++;
7566 	}
7567 
7568       if (in_libcall && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7569 	{
7570 	  in_libcall = 0;
7571 	  dead_libcall = 0;
7572 	}
7573     }
7574 
7575   if (dump_file && ndead)
7576     fprintf (dump_file, "Deleted %i trivially dead insns\n",
7577 	     ndead);
7578   /* Clean up.  */
7579   free (counts);
7580   timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7581   return ndead;
7582 }
7583 
7584 /* This function is called via for_each_rtx.  The argument, NEWREG, is
7585    a condition code register with the desired mode.  If we are looking
7586    at the same register in a different mode, replace it with
7587    NEWREG.  */
7588 
7589 static int
cse_change_cc_mode(rtx * loc,void * data)7590 cse_change_cc_mode (rtx *loc, void *data)
7591 {
7592   struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7593 
7594   if (*loc
7595       && REG_P (*loc)
7596       && REGNO (*loc) == REGNO (args->newreg)
7597       && GET_MODE (*loc) != GET_MODE (args->newreg))
7598     {
7599       validate_change (args->insn, loc, args->newreg, 1);
7600 
7601       return -1;
7602     }
7603   return 0;
7604 }
7605 
7606 /* Change the mode of any reference to the register REGNO (NEWREG) to
7607    GET_MODE (NEWREG) in INSN.  */
7608 
7609 static void
cse_change_cc_mode_insn(rtx insn,rtx newreg)7610 cse_change_cc_mode_insn (rtx insn, rtx newreg)
7611 {
7612   struct change_cc_mode_args args;
7613   int success;
7614 
7615   if (!INSN_P (insn))
7616     return;
7617 
7618   args.insn = insn;
7619   args.newreg = newreg;
7620 
7621   for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7622   for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7623 
7624   /* If the following assertion was triggered, there is most probably
7625      something wrong with the cc_modes_compatible back end function.
7626      CC modes only can be considered compatible if the insn - with the mode
7627      replaced by any of the compatible modes - can still be recognized.  */
7628   success = apply_change_group ();
7629   gcc_assert (success);
7630 }
7631 
7632 /* Change the mode of any reference to the register REGNO (NEWREG) to
7633    GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
7634    any instruction which modifies NEWREG.  */
7635 
7636 static void
cse_change_cc_mode_insns(rtx start,rtx end,rtx newreg)7637 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7638 {
7639   rtx insn;
7640 
7641   for (insn = start; insn != end; insn = NEXT_INSN (insn))
7642     {
7643       if (! INSN_P (insn))
7644 	continue;
7645 
7646       if (reg_set_p (newreg, insn))
7647 	return;
7648 
7649       cse_change_cc_mode_insn (insn, newreg);
7650     }
7651 }
7652 
7653 /* BB is a basic block which finishes with CC_REG as a condition code
7654    register which is set to CC_SRC.  Look through the successors of BB
7655    to find blocks which have a single predecessor (i.e., this one),
7656    and look through those blocks for an assignment to CC_REG which is
7657    equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
7658    permitted to change the mode of CC_SRC to a compatible mode.  This
7659    returns VOIDmode if no equivalent assignments were found.
7660    Otherwise it returns the mode which CC_SRC should wind up with.
7661 
7662    The main complexity in this function is handling the mode issues.
7663    We may have more than one duplicate which we can eliminate, and we
7664    try to find a mode which will work for multiple duplicates.  */
7665 
7666 static enum machine_mode
cse_cc_succs(basic_block bb,rtx cc_reg,rtx cc_src,bool can_change_mode)7667 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7668 {
7669   bool found_equiv;
7670   enum machine_mode mode;
7671   unsigned int insn_count;
7672   edge e;
7673   rtx insns[2];
7674   enum machine_mode modes[2];
7675   rtx last_insns[2];
7676   unsigned int i;
7677   rtx newreg;
7678   edge_iterator ei;
7679 
7680   /* We expect to have two successors.  Look at both before picking
7681      the final mode for the comparison.  If we have more successors
7682      (i.e., some sort of table jump, although that seems unlikely),
7683      then we require all beyond the first two to use the same
7684      mode.  */
7685 
7686   found_equiv = false;
7687   mode = GET_MODE (cc_src);
7688   insn_count = 0;
7689   FOR_EACH_EDGE (e, ei, bb->succs)
7690     {
7691       rtx insn;
7692       rtx end;
7693 
7694       if (e->flags & EDGE_COMPLEX)
7695 	continue;
7696 
7697       if (EDGE_COUNT (e->dest->preds) != 1
7698 	  || e->dest == EXIT_BLOCK_PTR)
7699 	continue;
7700 
7701       end = NEXT_INSN (BB_END (e->dest));
7702       for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7703 	{
7704 	  rtx set;
7705 
7706 	  if (! INSN_P (insn))
7707 	    continue;
7708 
7709 	  /* If CC_SRC is modified, we have to stop looking for
7710 	     something which uses it.  */
7711 	  if (modified_in_p (cc_src, insn))
7712 	    break;
7713 
7714 	  /* Check whether INSN sets CC_REG to CC_SRC.  */
7715 	  set = single_set (insn);
7716 	  if (set
7717 	      && REG_P (SET_DEST (set))
7718 	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7719 	    {
7720 	      bool found;
7721 	      enum machine_mode set_mode;
7722 	      enum machine_mode comp_mode;
7723 
7724 	      found = false;
7725 	      set_mode = GET_MODE (SET_SRC (set));
7726 	      comp_mode = set_mode;
7727 	      if (rtx_equal_p (cc_src, SET_SRC (set)))
7728 		found = true;
7729 	      else if (GET_CODE (cc_src) == COMPARE
7730 		       && GET_CODE (SET_SRC (set)) == COMPARE
7731 		       && mode != set_mode
7732 		       && rtx_equal_p (XEXP (cc_src, 0),
7733 				       XEXP (SET_SRC (set), 0))
7734 		       && rtx_equal_p (XEXP (cc_src, 1),
7735 				       XEXP (SET_SRC (set), 1)))
7736 
7737 		{
7738 		  comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7739 		  if (comp_mode != VOIDmode
7740 		      && (can_change_mode || comp_mode == mode))
7741 		    found = true;
7742 		}
7743 
7744 	      if (found)
7745 		{
7746 		  found_equiv = true;
7747 		  if (insn_count < ARRAY_SIZE (insns))
7748 		    {
7749 		      insns[insn_count] = insn;
7750 		      modes[insn_count] = set_mode;
7751 		      last_insns[insn_count] = end;
7752 		      ++insn_count;
7753 
7754 		      if (mode != comp_mode)
7755 			{
7756 			  gcc_assert (can_change_mode);
7757 			  mode = comp_mode;
7758 
7759 			  /* The modified insn will be re-recognized later.  */
7760 			  PUT_MODE (cc_src, mode);
7761 			}
7762 		    }
7763 		  else
7764 		    {
7765 		      if (set_mode != mode)
7766 			{
7767 			  /* We found a matching expression in the
7768 			     wrong mode, but we don't have room to
7769 			     store it in the array.  Punt.  This case
7770 			     should be rare.  */
7771 			  break;
7772 			}
7773 		      /* INSN sets CC_REG to a value equal to CC_SRC
7774 			 with the right mode.  We can simply delete
7775 			 it.  */
7776 		      delete_insn (insn);
7777 		    }
7778 
7779 		  /* We found an instruction to delete.  Keep looking,
7780 		     in the hopes of finding a three-way jump.  */
7781 		  continue;
7782 		}
7783 
7784 	      /* We found an instruction which sets the condition
7785 		 code, so don't look any farther.  */
7786 	      break;
7787 	    }
7788 
7789 	  /* If INSN sets CC_REG in some other way, don't look any
7790 	     farther.  */
7791 	  if (reg_set_p (cc_reg, insn))
7792 	    break;
7793 	}
7794 
7795       /* If we fell off the bottom of the block, we can keep looking
7796 	 through successors.  We pass CAN_CHANGE_MODE as false because
7797 	 we aren't prepared to handle compatibility between the
7798 	 further blocks and this block.  */
7799       if (insn == end)
7800 	{
7801 	  enum machine_mode submode;
7802 
7803 	  submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7804 	  if (submode != VOIDmode)
7805 	    {
7806 	      gcc_assert (submode == mode);
7807 	      found_equiv = true;
7808 	      can_change_mode = false;
7809 	    }
7810 	}
7811     }
7812 
7813   if (! found_equiv)
7814     return VOIDmode;
7815 
7816   /* Now INSN_COUNT is the number of instructions we found which set
7817      CC_REG to a value equivalent to CC_SRC.  The instructions are in
7818      INSNS.  The modes used by those instructions are in MODES.  */
7819 
7820   newreg = NULL_RTX;
7821   for (i = 0; i < insn_count; ++i)
7822     {
7823       if (modes[i] != mode)
7824 	{
7825 	  /* We need to change the mode of CC_REG in INSNS[i] and
7826 	     subsequent instructions.  */
7827 	  if (! newreg)
7828 	    {
7829 	      if (GET_MODE (cc_reg) == mode)
7830 		newreg = cc_reg;
7831 	      else
7832 		newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7833 	    }
7834 	  cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7835 				    newreg);
7836 	}
7837 
7838       delete_insn (insns[i]);
7839     }
7840 
7841   return mode;
7842 }
7843 
7844 /* If we have a fixed condition code register (or two), walk through
7845    the instructions and try to eliminate duplicate assignments.  */
7846 
7847 static void
cse_condition_code_reg(void)7848 cse_condition_code_reg (void)
7849 {
7850   unsigned int cc_regno_1;
7851   unsigned int cc_regno_2;
7852   rtx cc_reg_1;
7853   rtx cc_reg_2;
7854   basic_block bb;
7855 
7856   if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7857     return;
7858 
7859   cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7860   if (cc_regno_2 != INVALID_REGNUM)
7861     cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7862   else
7863     cc_reg_2 = NULL_RTX;
7864 
7865   FOR_EACH_BB (bb)
7866     {
7867       rtx last_insn;
7868       rtx cc_reg;
7869       rtx insn;
7870       rtx cc_src_insn;
7871       rtx cc_src;
7872       enum machine_mode mode;
7873       enum machine_mode orig_mode;
7874 
7875       /* Look for blocks which end with a conditional jump based on a
7876 	 condition code register.  Then look for the instruction which
7877 	 sets the condition code register.  Then look through the
7878 	 successor blocks for instructions which set the condition
7879 	 code register to the same value.  There are other possible
7880 	 uses of the condition code register, but these are by far the
7881 	 most common and the ones which we are most likely to be able
7882 	 to optimize.  */
7883 
7884       last_insn = BB_END (bb);
7885       if (!JUMP_P (last_insn))
7886 	continue;
7887 
7888       if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7889 	cc_reg = cc_reg_1;
7890       else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7891 	cc_reg = cc_reg_2;
7892       else
7893 	continue;
7894 
7895       cc_src_insn = NULL_RTX;
7896       cc_src = NULL_RTX;
7897       for (insn = PREV_INSN (last_insn);
7898 	   insn && insn != PREV_INSN (BB_HEAD (bb));
7899 	   insn = PREV_INSN (insn))
7900 	{
7901 	  rtx set;
7902 
7903 	  if (! INSN_P (insn))
7904 	    continue;
7905 	  set = single_set (insn);
7906 	  if (set
7907 	      && REG_P (SET_DEST (set))
7908 	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7909 	    {
7910 	      cc_src_insn = insn;
7911 	      cc_src = SET_SRC (set);
7912 	      break;
7913 	    }
7914 	  else if (reg_set_p (cc_reg, insn))
7915 	    break;
7916 	}
7917 
7918       if (! cc_src_insn)
7919 	continue;
7920 
7921       if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7922 	continue;
7923 
7924       /* Now CC_REG is a condition code register used for a
7925 	 conditional jump at the end of the block, and CC_SRC, in
7926 	 CC_SRC_INSN, is the value to which that condition code
7927 	 register is set, and CC_SRC is still meaningful at the end of
7928 	 the basic block.  */
7929 
7930       orig_mode = GET_MODE (cc_src);
7931       mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7932       if (mode != VOIDmode)
7933 	{
7934 	  gcc_assert (mode == GET_MODE (cc_src));
7935 	  if (mode != orig_mode)
7936 	    {
7937 	      rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7938 
7939 	      cse_change_cc_mode_insn (cc_src_insn, newreg);
7940 
7941 	      /* Do the same in the following insns that use the
7942 		 current value of CC_REG within BB.  */
7943 	      cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7944 					NEXT_INSN (last_insn),
7945 					newreg);
7946 	    }
7947 	}
7948     }
7949 }
7950 
7951 
7952 /* Perform common subexpression elimination.  Nonzero value from
7953    `cse_main' means that jumps were simplified and some code may now
7954    be unreachable, so do jump optimization again.  */
7955 static bool
gate_handle_cse(void)7956 gate_handle_cse (void)
7957 {
7958   return optimize > 0;
7959 }
7960 
7961 static unsigned int
rest_of_handle_cse(void)7962 rest_of_handle_cse (void)
7963 {
7964   int tem;
7965 
7966   if (dump_file)
7967     dump_flow_info (dump_file, dump_flags);
7968 
7969   reg_scan (get_insns (), max_reg_num ());
7970 
7971   tem = cse_main (get_insns (), max_reg_num ());
7972   if (tem)
7973     rebuild_jump_labels (get_insns ());
7974   if (purge_all_dead_edges ())
7975     delete_unreachable_blocks ();
7976 
7977   delete_trivially_dead_insns (get_insns (), max_reg_num ());
7978 
7979   /* If we are not running more CSE passes, then we are no longer
7980      expecting CSE to be run.  But always rerun it in a cheap mode.  */
7981   cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7982 
7983   if (tem)
7984     delete_dead_jumptables ();
7985 
7986   if (tem || optimize > 1)
7987     cleanup_cfg (CLEANUP_EXPENSIVE);
7988   return 0;
7989 }
7990 
7991 struct tree_opt_pass pass_cse =
7992 {
7993   "cse1",                               /* name */
7994   gate_handle_cse,                      /* gate */
7995   rest_of_handle_cse,			/* execute */
7996   NULL,                                 /* sub */
7997   NULL,                                 /* next */
7998   0,                                    /* static_pass_number */
7999   TV_CSE,                               /* tv_id */
8000   0,                                    /* properties_required */
8001   0,                                    /* properties_provided */
8002   0,                                    /* properties_destroyed */
8003   0,                                    /* todo_flags_start */
8004   TODO_dump_func |
8005   TODO_ggc_collect,                     /* todo_flags_finish */
8006   's'                                   /* letter */
8007 };
8008 
8009 
8010 static bool
gate_handle_cse2(void)8011 gate_handle_cse2 (void)
8012 {
8013   return optimize > 0 && flag_rerun_cse_after_loop;
8014 }
8015 
8016 /* Run second CSE pass after loop optimizations.  */
8017 static unsigned int
rest_of_handle_cse2(void)8018 rest_of_handle_cse2 (void)
8019 {
8020   int tem;
8021 
8022   if (dump_file)
8023     dump_flow_info (dump_file, dump_flags);
8024 
8025   tem = cse_main (get_insns (), max_reg_num ());
8026 
8027   /* Run a pass to eliminate duplicated assignments to condition code
8028      registers.  We have to run this after bypass_jumps, because it
8029      makes it harder for that pass to determine whether a jump can be
8030      bypassed safely.  */
8031   cse_condition_code_reg ();
8032 
8033   purge_all_dead_edges ();
8034   delete_trivially_dead_insns (get_insns (), max_reg_num ());
8035 
8036   if (tem)
8037     {
8038       timevar_push (TV_JUMP);
8039       rebuild_jump_labels (get_insns ());
8040       delete_dead_jumptables ();
8041       cleanup_cfg (CLEANUP_EXPENSIVE);
8042       timevar_pop (TV_JUMP);
8043     }
8044   reg_scan (get_insns (), max_reg_num ());
8045   cse_not_expected = 1;
8046   return 0;
8047 }
8048 
8049 
8050 struct tree_opt_pass pass_cse2 =
8051 {
8052   "cse2",                               /* name */
8053   gate_handle_cse2,                     /* gate */
8054   rest_of_handle_cse2,			/* execute */
8055   NULL,                                 /* sub */
8056   NULL,                                 /* next */
8057   0,                                    /* static_pass_number */
8058   TV_CSE2,                              /* tv_id */
8059   0,                                    /* properties_required */
8060   0,                                    /* properties_provided */
8061   0,                                    /* properties_destroyed */
8062   0,                                    /* todo_flags_start */
8063   TODO_dump_func |
8064   TODO_ggc_collect,                     /* todo_flags_finish */
8065   't'                                   /* letter */
8066 };
8067 
8068