xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cse.c (revision 23f5f46327e37e7811da3520f4bb933f9489322f)
1 /* Common subexpression elimination for GNU compiler.
2    Copyright (C) 1987-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "cfghooks.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "regs.h"
33 #include "emit-rtl.h"
34 #include "recog.h"
35 #include "cfgrtl.h"
36 #include "cfganal.h"
37 #include "cfgcleanup.h"
38 #include "alias.h"
39 #include "toplev.h"
40 #include "rtlhooks-def.h"
41 #include "tree-pass.h"
42 #include "dbgcnt.h"
43 #include "rtl-iter.h"
44 #include "regs.h"
45 #include "function-abi.h"
46 
47 /* The basic idea of common subexpression elimination is to go
48    through the code, keeping a record of expressions that would
49    have the same value at the current scan point, and replacing
50    expressions encountered with the cheapest equivalent expression.
51 
52    It is too complicated to keep track of the different possibilities
53    when control paths merge in this code; so, at each label, we forget all
54    that is known and start fresh.  This can be described as processing each
55    extended basic block separately.  We have a separate pass to perform
56    global CSE.
57 
58    Note CSE can turn a conditional or computed jump into a nop or
59    an unconditional jump.  When this occurs we arrange to run the jump
60    optimizer after CSE to delete the unreachable code.
61 
62    We use two data structures to record the equivalent expressions:
63    a hash table for most expressions, and a vector of "quantity
64    numbers" to record equivalent (pseudo) registers.
65 
66    The use of the special data structure for registers is desirable
67    because it is faster.  It is possible because registers references
68    contain a fairly small number, the register number, taken from
69    a contiguously allocated series, and two register references are
70    identical if they have the same number.  General expressions
71    do not have any such thing, so the only way to retrieve the
72    information recorded on an expression other than a register
73    is to keep it in a hash table.
74 
75 Registers and "quantity numbers":
76 
77    At the start of each basic block, all of the (hardware and pseudo)
78    registers used in the function are given distinct quantity
79    numbers to indicate their contents.  During scan, when the code
80    copies one register into another, we copy the quantity number.
81    When a register is loaded in any other way, we allocate a new
82    quantity number to describe the value generated by this operation.
83    `REG_QTY (N)' records what quantity register N is currently thought
84    of as containing.
85 
86    All real quantity numbers are greater than or equal to zero.
87    If register N has not been assigned a quantity, `REG_QTY (N)' will
88    equal -N - 1, which is always negative.
89 
90    Quantity numbers below zero do not exist and none of the `qty_table'
91    entries should be referenced with a negative index.
92 
93    We also maintain a bidirectional chain of registers for each
94    quantity number.  The `qty_table` members `first_reg' and `last_reg',
95    and `reg_eqv_table' members `next' and `prev' hold these chains.
96 
97    The first register in a chain is the one whose lifespan is least local.
98    Among equals, it is the one that was seen first.
99    We replace any equivalent register with that one.
100 
101    If two registers have the same quantity number, it must be true that
102    REG expressions with qty_table `mode' must be in the hash table for both
103    registers and must be in the same class.
104 
105    The converse is not true.  Since hard registers may be referenced in
106    any mode, two REG expressions might be equivalent in the hash table
107    but not have the same quantity number if the quantity number of one
108    of the registers is not the same mode as those expressions.
109 
110 Constants and quantity numbers
111 
112    When a quantity has a known constant value, that value is stored
113    in the appropriate qty_table `const_rtx'.  This is in addition to
114    putting the constant in the hash table as is usual for non-regs.
115 
116    Whether a reg or a constant is preferred is determined by the configuration
117    macro CONST_COSTS and will often depend on the constant value.  In any
118    event, expressions containing constants can be simplified, by fold_rtx.
119 
120    When a quantity has a known nearly constant value (such as an address
121    of a stack slot), that value is stored in the appropriate qty_table
122    `const_rtx'.
123 
124    Integer constants don't have a machine mode.  However, cse
125    determines the intended machine mode from the destination
126    of the instruction that moves the constant.  The machine mode
127    is recorded in the hash table along with the actual RTL
128    constant expression so that different modes are kept separate.
129 
130 Other expressions:
131 
132    To record known equivalences among expressions in general
133    we use a hash table called `table'.  It has a fixed number of buckets
134    that contain chains of `struct table_elt' elements for expressions.
135    These chains connect the elements whose expressions have the same
136    hash codes.
137 
138    Other chains through the same elements connect the elements which
139    currently have equivalent values.
140 
141    Register references in an expression are canonicalized before hashing
142    the expression.  This is done using `reg_qty' and qty_table `first_reg'.
143    The hash code of a register reference is computed using the quantity
144    number, not the register number.
145 
146    When the value of an expression changes, it is necessary to remove from the
147    hash table not just that expression but all expressions whose values
148    could be different as a result.
149 
150      1. If the value changing is in memory, except in special cases
151      ANYTHING referring to memory could be changed.  That is because
152      nobody knows where a pointer does not point.
153      The function `invalidate_memory' removes what is necessary.
154 
155      The special cases are when the address is constant or is
156      a constant plus a fixed register such as the frame pointer
157      or a static chain pointer.  When such addresses are stored in,
158      we can tell exactly which other such addresses must be invalidated
159      due to overlap.  `invalidate' does this.
160      All expressions that refer to non-constant
161      memory addresses are also invalidated.  `invalidate_memory' does this.
162 
163      2. If the value changing is a register, all expressions
164      containing references to that register, and only those,
165      must be removed.
166 
167    Because searching the entire hash table for expressions that contain
168    a register is very slow, we try to figure out when it isn't necessary.
169    Precisely, this is necessary only when expressions have been
170    entered in the hash table using this register, and then the value has
171    changed, and then another expression wants to be added to refer to
172    the register's new value.  This sequence of circumstances is rare
173    within any one basic block.
174 
175    `REG_TICK' and `REG_IN_TABLE', accessors for members of
176    cse_reg_info, are used to detect this case.  REG_TICK (i) is
177    incremented whenever a value is stored in register i.
178    REG_IN_TABLE (i) holds -1 if no references to register i have been
179    entered in the table; otherwise, it contains the value REG_TICK (i)
180    had when the references were entered.  If we want to enter a
181    reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
182    remove old references.  Until we want to enter a new entry, the
183    mere fact that the two vectors don't match makes the entries be
184    ignored if anyone tries to match them.
185 
186    Registers themselves are entered in the hash table as well as in
187    the equivalent-register chains.  However, `REG_TICK' and
188    `REG_IN_TABLE' do not apply to expressions which are simple
189    register references.  These expressions are removed from the table
190    immediately when they become invalid, and this can be done even if
191    we do not immediately search for all the expressions that refer to
192    the register.
193 
194    A CLOBBER rtx in an instruction invalidates its operand for further
195    reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
196    invalidates everything that resides in memory.
197 
198 Related expressions:
199 
200    Constant expressions that differ only by an additive integer
201    are called related.  When a constant expression is put in
202    the table, the related expression with no constant term
203    is also entered.  These are made to point at each other
204    so that it is possible to find out if there exists any
205    register equivalent to an expression related to a given expression.  */
206 
207 /* Length of qty_table vector.  We know in advance we will not need
208    a quantity number this big.  */
209 
210 static int max_qty;
211 
212 /* Next quantity number to be allocated.
213    This is 1 + the largest number needed so far.  */
214 
215 static int next_qty;
216 
217 /* Per-qty information tracking.
218 
219    `first_reg' and `last_reg' track the head and tail of the
220    chain of registers which currently contain this quantity.
221 
222    `mode' contains the machine mode of this quantity.
223 
224    `const_rtx' holds the rtx of the constant value of this
225    quantity, if known.  A summations of the frame/arg pointer
226    and a constant can also be entered here.  When this holds
227    a known value, `const_insn' is the insn which stored the
228    constant value.
229 
230    `comparison_{code,const,qty}' are used to track when a
231    comparison between a quantity and some constant or register has
232    been passed.  In such a case, we know the results of the comparison
233    in case we see it again.  These members record a comparison that
234    is known to be true.  `comparison_code' holds the rtx code of such
235    a comparison, else it is set to UNKNOWN and the other two
236    comparison members are undefined.  `comparison_const' holds
237    the constant being compared against, or zero if the comparison
238    is not against a constant.  `comparison_qty' holds the quantity
239    being compared against when the result is known.  If the comparison
240    is not with a register, `comparison_qty' is -1.  */
241 
242 struct qty_table_elem
243 {
244   rtx const_rtx;
245   rtx_insn *const_insn;
246   rtx comparison_const;
247   int comparison_qty;
248   unsigned int first_reg, last_reg;
249   /* The sizes of these fields should match the sizes of the
250      code and mode fields of struct rtx_def (see rtl.h).  */
251   ENUM_BITFIELD(rtx_code) comparison_code : 16;
252   ENUM_BITFIELD(machine_mode) mode : 8;
253 };
254 
255 /* The table of all qtys, indexed by qty number.  */
256 static struct qty_table_elem *qty_table;
257 
258 /* For machines that have a CC0, we do not record its value in the hash
259    table since its use is guaranteed to be the insn immediately following
260    its definition and any other insn is presumed to invalidate it.
261 
262    Instead, we store below the current and last value assigned to CC0.
263    If it should happen to be a constant, it is stored in preference
264    to the actual assigned value.  In case it is a constant, we store
265    the mode in which the constant should be interpreted.  */
266 
267 static rtx this_insn_cc0, prev_insn_cc0;
268 static machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
269 
270 /* Insn being scanned.  */
271 
272 static rtx_insn *this_insn;
273 static bool optimize_this_for_speed_p;
274 
275 /* Index by register number, gives the number of the next (or
276    previous) register in the chain of registers sharing the same
277    value.
278 
279    Or -1 if this register is at the end of the chain.
280 
281    If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined.  */
282 
283 /* Per-register equivalence chain.  */
284 struct reg_eqv_elem
285 {
286   int next, prev;
287 };
288 
289 /* The table of all register equivalence chains.  */
290 static struct reg_eqv_elem *reg_eqv_table;
291 
292 struct cse_reg_info
293 {
294   /* The timestamp at which this register is initialized.  */
295   unsigned int timestamp;
296 
297   /* The quantity number of the register's current contents.  */
298   int reg_qty;
299 
300   /* The number of times the register has been altered in the current
301      basic block.  */
302   int reg_tick;
303 
304   /* The REG_TICK value at which rtx's containing this register are
305      valid in the hash table.  If this does not equal the current
306      reg_tick value, such expressions existing in the hash table are
307      invalid.  */
308   int reg_in_table;
309 
310   /* The SUBREG that was set when REG_TICK was last incremented.  Set
311      to -1 if the last store was to the whole register, not a subreg.  */
312   unsigned int subreg_ticked;
313 };
314 
315 /* A table of cse_reg_info indexed by register numbers.  */
316 static struct cse_reg_info *cse_reg_info_table;
317 
318 /* The size of the above table.  */
319 static unsigned int cse_reg_info_table_size;
320 
321 /* The index of the first entry that has not been initialized.  */
322 static unsigned int cse_reg_info_table_first_uninitialized;
323 
324 /* The timestamp at the beginning of the current run of
325    cse_extended_basic_block.  We increment this variable at the beginning of
326    the current run of cse_extended_basic_block.  The timestamp field of a
327    cse_reg_info entry matches the value of this variable if and only
328    if the entry has been initialized during the current run of
329    cse_extended_basic_block.  */
330 static unsigned int cse_reg_info_timestamp;
331 
332 /* A HARD_REG_SET containing all the hard registers for which there is
333    currently a REG expression in the hash table.  Note the difference
334    from the above variables, which indicate if the REG is mentioned in some
335    expression in the table.  */
336 
337 static HARD_REG_SET hard_regs_in_table;
338 
339 /* True if CSE has altered the CFG.  */
340 static bool cse_cfg_altered;
341 
342 /* True if CSE has altered conditional jump insns in such a way
343    that jump optimization should be redone.  */
344 static bool cse_jumps_altered;
345 
346 /* True if we put a LABEL_REF into the hash table for an INSN
347    without a REG_LABEL_OPERAND, we have to rerun jump after CSE
348    to put in the note.  */
349 static bool recorded_label_ref;
350 
351 /* canon_hash stores 1 in do_not_record
352    if it notices a reference to CC0, PC, or some other volatile
353    subexpression.  */
354 
355 static int do_not_record;
356 
357 /* canon_hash stores 1 in hash_arg_in_memory
358    if it notices a reference to memory within the expression being hashed.  */
359 
360 static int hash_arg_in_memory;
361 
362 /* The hash table contains buckets which are chains of `struct table_elt's,
363    each recording one expression's information.
364    That expression is in the `exp' field.
365 
366    The canon_exp field contains a canonical (from the point of view of
367    alias analysis) version of the `exp' field.
368 
369    Those elements with the same hash code are chained in both directions
370    through the `next_same_hash' and `prev_same_hash' fields.
371 
372    Each set of expressions with equivalent values
373    are on a two-way chain through the `next_same_value'
374    and `prev_same_value' fields, and all point with
375    the `first_same_value' field at the first element in
376    that chain.  The chain is in order of increasing cost.
377    Each element's cost value is in its `cost' field.
378 
379    The `in_memory' field is nonzero for elements that
380    involve any reference to memory.  These elements are removed
381    whenever a write is done to an unidentified location in memory.
382    To be safe, we assume that a memory address is unidentified unless
383    the address is either a symbol constant or a constant plus
384    the frame pointer or argument pointer.
385 
386    The `related_value' field is used to connect related expressions
387    (that differ by adding an integer).
388    The related expressions are chained in a circular fashion.
389    `related_value' is zero for expressions for which this
390    chain is not useful.
391 
392    The `cost' field stores the cost of this element's expression.
393    The `regcost' field stores the value returned by approx_reg_cost for
394    this element's expression.
395 
396    The `is_const' flag is set if the element is a constant (including
397    a fixed address).
398 
399    The `flag' field is used as a temporary during some search routines.
400 
401    The `mode' field is usually the same as GET_MODE (`exp'), but
402    if `exp' is a CONST_INT and has no machine mode then the `mode'
403    field is the mode it was being used as.  Each constant is
404    recorded separately for each mode it is used with.  */
405 
406 struct table_elt
407 {
408   rtx exp;
409   rtx canon_exp;
410   struct table_elt *next_same_hash;
411   struct table_elt *prev_same_hash;
412   struct table_elt *next_same_value;
413   struct table_elt *prev_same_value;
414   struct table_elt *first_same_value;
415   struct table_elt *related_value;
416   int cost;
417   int regcost;
418   /* The size of this field should match the size
419      of the mode field of struct rtx_def (see rtl.h).  */
420   ENUM_BITFIELD(machine_mode) mode : 8;
421   char in_memory;
422   char is_const;
423   char flag;
424 };
425 
426 /* We don't want a lot of buckets, because we rarely have very many
427    things stored in the hash table, and a lot of buckets slows
428    down a lot of loops that happen frequently.  */
429 #define HASH_SHIFT	5
430 #define HASH_SIZE	(1 << HASH_SHIFT)
431 #define HASH_MASK	(HASH_SIZE - 1)
432 
433 /* Compute hash code of X in mode M.  Special-case case where X is a pseudo
434    register (hard registers may require `do_not_record' to be set).  */
435 
436 #define HASH(X, M)	\
437  ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
438   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
439   : canon_hash (X, M)) & HASH_MASK)
440 
441 /* Like HASH, but without side-effects.  */
442 #define SAFE_HASH(X, M)	\
443  ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
444   ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
445   : safe_hash (X, M)) & HASH_MASK)
446 
447 /* Determine whether register number N is considered a fixed register for the
448    purpose of approximating register costs.
449    It is desirable to replace other regs with fixed regs, to reduce need for
450    non-fixed hard regs.
451    A reg wins if it is either the frame pointer or designated as fixed.  */
452 #define FIXED_REGNO_P(N)  \
453   ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
454    || fixed_regs[N] || global_regs[N])
455 
456 /* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
457    hard registers and pointers into the frame are the cheapest with a cost
458    of 0.  Next come pseudos with a cost of one and other hard registers with
459    a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
460 
461 #define CHEAP_REGNO(N)							\
462   (REGNO_PTR_FRAME_P (N)						\
463    || (HARD_REGISTER_NUM_P (N)						\
464        && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
465 
466 #define COST(X, MODE)							\
467   (REG_P (X) ? 0 : notreg_cost (X, MODE, SET, 1))
468 #define COST_IN(X, MODE, OUTER, OPNO)					\
469   (REG_P (X) ? 0 : notreg_cost (X, MODE, OUTER, OPNO))
470 
471 /* Get the number of times this register has been updated in this
472    basic block.  */
473 
474 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
475 
476 /* Get the point at which REG was recorded in the table.  */
477 
478 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
479 
480 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
481    SUBREG).  */
482 
483 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
484 
485 /* Get the quantity number for REG.  */
486 
487 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
488 
489 /* Determine if the quantity number for register X represents a valid index
490    into the qty_table.  */
491 
492 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
493 
494 /* Compare table_elt X and Y and return true iff X is cheaper than Y.  */
495 
496 #define CHEAPER(X, Y) \
497  (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
498 
499 static struct table_elt *table[HASH_SIZE];
500 
501 /* Chain of `struct table_elt's made so far for this function
502    but currently removed from the table.  */
503 
504 static struct table_elt *free_element_chain;
505 
506 /* Set to the cost of a constant pool reference if one was found for a
507    symbolic constant.  If this was found, it means we should try to
508    convert constants into constant pool entries if they don't fit in
509    the insn.  */
510 
511 static int constant_pool_entries_cost;
512 static int constant_pool_entries_regcost;
513 
514 /* Trace a patch through the CFG.  */
515 
516 struct branch_path
517 {
518   /* The basic block for this path entry.  */
519   basic_block bb;
520 };
521 
522 /* This data describes a block that will be processed by
523    cse_extended_basic_block.  */
524 
525 struct cse_basic_block_data
526 {
527   /* Total number of SETs in block.  */
528   int nsets;
529   /* Size of current branch path, if any.  */
530   int path_size;
531   /* Current path, indicating which basic_blocks will be processed.  */
532   struct branch_path *path;
533 };
534 
535 
536 /* Pointers to the live in/live out bitmaps for the boundaries of the
537    current EBB.  */
538 static bitmap cse_ebb_live_in, cse_ebb_live_out;
539 
540 /* A simple bitmap to track which basic blocks have been visited
541    already as part of an already processed extended basic block.  */
542 static sbitmap cse_visited_basic_blocks;
543 
544 static bool fixed_base_plus_p (rtx x);
545 static int notreg_cost (rtx, machine_mode, enum rtx_code, int);
546 static int preferable (int, int, int, int);
547 static void new_basic_block (void);
548 static void make_new_qty (unsigned int, machine_mode);
549 static void make_regs_eqv (unsigned int, unsigned int);
550 static void delete_reg_equiv (unsigned int);
551 static int mention_regs (rtx);
552 static int insert_regs (rtx, struct table_elt *, int);
553 static void remove_from_table (struct table_elt *, unsigned);
554 static void remove_pseudo_from_table (rtx, unsigned);
555 static struct table_elt *lookup (rtx, unsigned, machine_mode);
556 static struct table_elt *lookup_for_remove (rtx, unsigned, machine_mode);
557 static rtx lookup_as_function (rtx, enum rtx_code);
558 static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
559 					    machine_mode, int, int);
560 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
561 				 machine_mode);
562 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
563 static void invalidate (rtx, machine_mode);
564 static void remove_invalid_refs (unsigned int);
565 static void remove_invalid_subreg_refs (unsigned int, poly_uint64,
566 					machine_mode);
567 static void rehash_using_reg (rtx);
568 static void invalidate_memory (void);
569 static rtx use_related_value (rtx, struct table_elt *);
570 
571 static inline unsigned canon_hash (rtx, machine_mode);
572 static inline unsigned safe_hash (rtx, machine_mode);
573 static inline unsigned hash_rtx_string (const char *);
574 
575 static rtx canon_reg (rtx, rtx_insn *);
576 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
577 					   machine_mode *,
578 					   machine_mode *);
579 static rtx fold_rtx (rtx, rtx_insn *);
580 static rtx equiv_constant (rtx);
581 static void record_jump_equiv (rtx_insn *, bool);
582 static void record_jump_cond (enum rtx_code, machine_mode, rtx, rtx,
583 			      int);
584 static void cse_insn (rtx_insn *);
585 static void cse_prescan_path (struct cse_basic_block_data *);
586 static void invalidate_from_clobbers (rtx_insn *);
587 static void invalidate_from_sets_and_clobbers (rtx_insn *);
588 static void cse_extended_basic_block (struct cse_basic_block_data *);
589 extern void dump_class (struct table_elt*);
590 static void get_cse_reg_info_1 (unsigned int regno);
591 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
592 
593 static void flush_hash_table (void);
594 static bool insn_live_p (rtx_insn *, int *);
595 static bool set_live_p (rtx, rtx_insn *, int *);
596 static void cse_change_cc_mode_insn (rtx_insn *, rtx);
597 static void cse_change_cc_mode_insns (rtx_insn *, rtx_insn *, rtx);
598 static machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
599 				       bool);
600 
601 
602 #undef RTL_HOOKS_GEN_LOWPART
603 #define RTL_HOOKS_GEN_LOWPART		gen_lowpart_if_possible
604 
605 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
606 
607 /* Nonzero if X has the form (PLUS frame-pointer integer).  */
608 
609 static bool
fixed_base_plus_p(rtx x)610 fixed_base_plus_p (rtx x)
611 {
612   switch (GET_CODE (x))
613     {
614     case REG:
615       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
616 	return true;
617       if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
618 	return true;
619       return false;
620 
621     case PLUS:
622       if (!CONST_INT_P (XEXP (x, 1)))
623 	return false;
624       return fixed_base_plus_p (XEXP (x, 0));
625 
626     default:
627       return false;
628     }
629 }
630 
631 /* Dump the expressions in the equivalence class indicated by CLASSP.
632    This function is used only for debugging.  */
633 DEBUG_FUNCTION void
dump_class(struct table_elt * classp)634 dump_class (struct table_elt *classp)
635 {
636   struct table_elt *elt;
637 
638   fprintf (stderr, "Equivalence chain for ");
639   print_rtl (stderr, classp->exp);
640   fprintf (stderr, ": \n");
641 
642   for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
643     {
644       print_rtl (stderr, elt->exp);
645       fprintf (stderr, "\n");
646     }
647 }
648 
649 /* Return an estimate of the cost of the registers used in an rtx.
650    This is mostly the number of different REG expressions in the rtx;
651    however for some exceptions like fixed registers we use a cost of
652    0.  If any other hard register reference occurs, return MAX_COST.  */
653 
654 static int
approx_reg_cost(const_rtx x)655 approx_reg_cost (const_rtx x)
656 {
657   int cost = 0;
658   subrtx_iterator::array_type array;
659   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
660     {
661       const_rtx x = *iter;
662       if (REG_P (x))
663 	{
664 	  unsigned int regno = REGNO (x);
665 	  if (!CHEAP_REGNO (regno))
666 	    {
667 	      if (regno < FIRST_PSEUDO_REGISTER)
668 		{
669 		  if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
670 		    return MAX_COST;
671 		  cost += 2;
672 		}
673 	      else
674 		cost += 1;
675 	    }
676 	}
677     }
678   return cost;
679 }
680 
681 /* Return a negative value if an rtx A, whose costs are given by COST_A
682    and REGCOST_A, is more desirable than an rtx B.
683    Return a positive value if A is less desirable, or 0 if the two are
684    equally good.  */
685 static int
preferable(int cost_a,int regcost_a,int cost_b,int regcost_b)686 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
687 {
688   /* First, get rid of cases involving expressions that are entirely
689      unwanted.  */
690   if (cost_a != cost_b)
691     {
692       if (cost_a == MAX_COST)
693 	return 1;
694       if (cost_b == MAX_COST)
695 	return -1;
696     }
697 
698   /* Avoid extending lifetimes of hardregs.  */
699   if (regcost_a != regcost_b)
700     {
701       if (regcost_a == MAX_COST)
702 	return 1;
703       if (regcost_b == MAX_COST)
704 	return -1;
705     }
706 
707   /* Normal operation costs take precedence.  */
708   if (cost_a != cost_b)
709     return cost_a - cost_b;
710   /* Only if these are identical consider effects on register pressure.  */
711   if (regcost_a != regcost_b)
712     return regcost_a - regcost_b;
713   return 0;
714 }
715 
716 /* Internal function, to compute cost when X is not a register; called
717    from COST macro to keep it simple.  */
718 
719 static int
notreg_cost(rtx x,machine_mode mode,enum rtx_code outer,int opno)720 notreg_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno)
721 {
722   scalar_int_mode int_mode, inner_mode;
723   return ((GET_CODE (x) == SUBREG
724 	   && REG_P (SUBREG_REG (x))
725 	   && is_int_mode (mode, &int_mode)
726 	   && is_int_mode (GET_MODE (SUBREG_REG (x)), &inner_mode)
727 	   && GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (inner_mode)
728 	   && subreg_lowpart_p (x)
729 	   && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, inner_mode))
730 	  ? 0
731 	  : rtx_cost (x, mode, outer, opno, optimize_this_for_speed_p) * 2);
732 }
733 
734 
735 /* Initialize CSE_REG_INFO_TABLE.  */
736 
737 static void
init_cse_reg_info(unsigned int nregs)738 init_cse_reg_info (unsigned int nregs)
739 {
740   /* Do we need to grow the table?  */
741   if (nregs > cse_reg_info_table_size)
742     {
743       unsigned int new_size;
744 
745       if (cse_reg_info_table_size < 2048)
746 	{
747 	  /* Compute a new size that is a power of 2 and no smaller
748 	     than the large of NREGS and 64.  */
749 	  new_size = (cse_reg_info_table_size
750 		      ? cse_reg_info_table_size : 64);
751 
752 	  while (new_size < nregs)
753 	    new_size *= 2;
754 	}
755       else
756 	{
757 	  /* If we need a big table, allocate just enough to hold
758 	     NREGS registers.  */
759 	  new_size = nregs;
760 	}
761 
762       /* Reallocate the table with NEW_SIZE entries.  */
763       free (cse_reg_info_table);
764       cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
765       cse_reg_info_table_size = new_size;
766       cse_reg_info_table_first_uninitialized = 0;
767     }
768 
769   /* Do we have all of the first NREGS entries initialized?  */
770   if (cse_reg_info_table_first_uninitialized < nregs)
771     {
772       unsigned int old_timestamp = cse_reg_info_timestamp - 1;
773       unsigned int i;
774 
775       /* Put the old timestamp on newly allocated entries so that they
776 	 will all be considered out of date.  We do not touch those
777 	 entries beyond the first NREGS entries to be nice to the
778 	 virtual memory.  */
779       for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
780 	cse_reg_info_table[i].timestamp = old_timestamp;
781 
782       cse_reg_info_table_first_uninitialized = nregs;
783     }
784 }
785 
786 /* Given REGNO, initialize the cse_reg_info entry for REGNO.  */
787 
788 static void
get_cse_reg_info_1(unsigned int regno)789 get_cse_reg_info_1 (unsigned int regno)
790 {
791   /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
792      entry will be considered to have been initialized.  */
793   cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
794 
795   /* Initialize the rest of the entry.  */
796   cse_reg_info_table[regno].reg_tick = 1;
797   cse_reg_info_table[regno].reg_in_table = -1;
798   cse_reg_info_table[regno].subreg_ticked = -1;
799   cse_reg_info_table[regno].reg_qty = -regno - 1;
800 }
801 
802 /* Find a cse_reg_info entry for REGNO.  */
803 
804 static inline struct cse_reg_info *
get_cse_reg_info(unsigned int regno)805 get_cse_reg_info (unsigned int regno)
806 {
807   struct cse_reg_info *p = &cse_reg_info_table[regno];
808 
809   /* If this entry has not been initialized, go ahead and initialize
810      it.  */
811   if (p->timestamp != cse_reg_info_timestamp)
812     get_cse_reg_info_1 (regno);
813 
814   return p;
815 }
816 
817 /* Clear the hash table and initialize each register with its own quantity,
818    for a new basic block.  */
819 
820 static void
new_basic_block(void)821 new_basic_block (void)
822 {
823   int i;
824 
825   next_qty = 0;
826 
827   /* Invalidate cse_reg_info_table.  */
828   cse_reg_info_timestamp++;
829 
830   /* Clear out hash table state for this pass.  */
831   CLEAR_HARD_REG_SET (hard_regs_in_table);
832 
833   /* The per-quantity values used to be initialized here, but it is
834      much faster to initialize each as it is made in `make_new_qty'.  */
835 
836   for (i = 0; i < HASH_SIZE; i++)
837     {
838       struct table_elt *first;
839 
840       first = table[i];
841       if (first != NULL)
842 	{
843 	  struct table_elt *last = first;
844 
845 	  table[i] = NULL;
846 
847 	  while (last->next_same_hash != NULL)
848 	    last = last->next_same_hash;
849 
850 	  /* Now relink this hash entire chain into
851 	     the free element list.  */
852 
853 	  last->next_same_hash = free_element_chain;
854 	  free_element_chain = first;
855 	}
856     }
857 
858   prev_insn_cc0 = 0;
859 }
860 
861 /* Say that register REG contains a quantity in mode MODE not in any
862    register before and initialize that quantity.  */
863 
864 static void
make_new_qty(unsigned int reg,machine_mode mode)865 make_new_qty (unsigned int reg, machine_mode mode)
866 {
867   int q;
868   struct qty_table_elem *ent;
869   struct reg_eqv_elem *eqv;
870 
871   gcc_assert (next_qty < max_qty);
872 
873   q = REG_QTY (reg) = next_qty++;
874   ent = &qty_table[q];
875   ent->first_reg = reg;
876   ent->last_reg = reg;
877   ent->mode = mode;
878   ent->const_rtx = ent->const_insn = NULL;
879   ent->comparison_code = UNKNOWN;
880 
881   eqv = &reg_eqv_table[reg];
882   eqv->next = eqv->prev = -1;
883 }
884 
885 /* Make reg NEW equivalent to reg OLD.
886    OLD is not changing; NEW is.  */
887 
888 static void
make_regs_eqv(unsigned int new_reg,unsigned int old_reg)889 make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
890 {
891   unsigned int lastr, firstr;
892   int q = REG_QTY (old_reg);
893   struct qty_table_elem *ent;
894 
895   ent = &qty_table[q];
896 
897   /* Nothing should become eqv until it has a "non-invalid" qty number.  */
898   gcc_assert (REGNO_QTY_VALID_P (old_reg));
899 
900   REG_QTY (new_reg) = q;
901   firstr = ent->first_reg;
902   lastr = ent->last_reg;
903 
904   /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
905      hard regs.  Among pseudos, if NEW will live longer than any other reg
906      of the same qty, and that is beyond the current basic block,
907      make it the new canonical replacement for this qty.  */
908   if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
909       /* Certain fixed registers might be of the class NO_REGS.  This means
910 	 that not only can they not be allocated by the compiler, but
911 	 they cannot be used in substitutions or canonicalizations
912 	 either.  */
913       && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
914       && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
915 	  || (new_reg >= FIRST_PSEUDO_REGISTER
916 	      && (firstr < FIRST_PSEUDO_REGISTER
917 		  || (bitmap_bit_p (cse_ebb_live_out, new_reg)
918 		      && !bitmap_bit_p (cse_ebb_live_out, firstr))
919 		  || (bitmap_bit_p (cse_ebb_live_in, new_reg)
920 		      && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
921     {
922       reg_eqv_table[firstr].prev = new_reg;
923       reg_eqv_table[new_reg].next = firstr;
924       reg_eqv_table[new_reg].prev = -1;
925       ent->first_reg = new_reg;
926     }
927   else
928     {
929       /* If NEW is a hard reg (known to be non-fixed), insert at end.
930 	 Otherwise, insert before any non-fixed hard regs that are at the
931 	 end.  Registers of class NO_REGS cannot be used as an
932 	 equivalent for anything.  */
933       while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
934 	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
935 	     && new_reg >= FIRST_PSEUDO_REGISTER)
936 	lastr = reg_eqv_table[lastr].prev;
937       reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
938       if (reg_eqv_table[lastr].next >= 0)
939 	reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
940       else
941 	qty_table[q].last_reg = new_reg;
942       reg_eqv_table[lastr].next = new_reg;
943       reg_eqv_table[new_reg].prev = lastr;
944     }
945 }
946 
947 /* Remove REG from its equivalence class.  */
948 
949 static void
delete_reg_equiv(unsigned int reg)950 delete_reg_equiv (unsigned int reg)
951 {
952   struct qty_table_elem *ent;
953   int q = REG_QTY (reg);
954   int p, n;
955 
956   /* If invalid, do nothing.  */
957   if (! REGNO_QTY_VALID_P (reg))
958     return;
959 
960   ent = &qty_table[q];
961 
962   p = reg_eqv_table[reg].prev;
963   n = reg_eqv_table[reg].next;
964 
965   if (n != -1)
966     reg_eqv_table[n].prev = p;
967   else
968     ent->last_reg = p;
969   if (p != -1)
970     reg_eqv_table[p].next = n;
971   else
972     ent->first_reg = n;
973 
974   REG_QTY (reg) = -reg - 1;
975 }
976 
977 /* Remove any invalid expressions from the hash table
978    that refer to any of the registers contained in expression X.
979 
980    Make sure that newly inserted references to those registers
981    as subexpressions will be considered valid.
982 
983    mention_regs is not called when a register itself
984    is being stored in the table.
985 
986    Return 1 if we have done something that may have changed the hash code
987    of X.  */
988 
989 static int
mention_regs(rtx x)990 mention_regs (rtx x)
991 {
992   enum rtx_code code;
993   int i, j;
994   const char *fmt;
995   int changed = 0;
996 
997   if (x == 0)
998     return 0;
999 
1000   code = GET_CODE (x);
1001   if (code == REG)
1002     {
1003       unsigned int regno = REGNO (x);
1004       unsigned int endregno = END_REGNO (x);
1005       unsigned int i;
1006 
1007       for (i = regno; i < endregno; i++)
1008 	{
1009 	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1010 	    remove_invalid_refs (i);
1011 
1012 	  REG_IN_TABLE (i) = REG_TICK (i);
1013 	  SUBREG_TICKED (i) = -1;
1014 	}
1015 
1016       return 0;
1017     }
1018 
1019   /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1020      pseudo if they don't use overlapping words.  We handle only pseudos
1021      here for simplicity.  */
1022   if (code == SUBREG && REG_P (SUBREG_REG (x))
1023       && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1024     {
1025       unsigned int i = REGNO (SUBREG_REG (x));
1026 
1027       if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1028 	{
1029 	  /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1030 	     the last store to this register really stored into this
1031 	     subreg, then remove the memory of this subreg.
1032 	     Otherwise, remove any memory of the entire register and
1033 	     all its subregs from the table.  */
1034 	  if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1035 	      || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1036 	    remove_invalid_refs (i);
1037 	  else
1038 	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1039 	}
1040 
1041       REG_IN_TABLE (i) = REG_TICK (i);
1042       SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1043       return 0;
1044     }
1045 
1046   /* If X is a comparison or a COMPARE and either operand is a register
1047      that does not have a quantity, give it one.  This is so that a later
1048      call to record_jump_equiv won't cause X to be assigned a different
1049      hash code and not found in the table after that call.
1050 
1051      It is not necessary to do this here, since rehash_using_reg can
1052      fix up the table later, but doing this here eliminates the need to
1053      call that expensive function in the most common case where the only
1054      use of the register is in the comparison.  */
1055 
1056   if (code == COMPARE || COMPARISON_P (x))
1057     {
1058       if (REG_P (XEXP (x, 0))
1059 	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1060 	if (insert_regs (XEXP (x, 0), NULL, 0))
1061 	  {
1062 	    rehash_using_reg (XEXP (x, 0));
1063 	    changed = 1;
1064 	  }
1065 
1066       if (REG_P (XEXP (x, 1))
1067 	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1068 	if (insert_regs (XEXP (x, 1), NULL, 0))
1069 	  {
1070 	    rehash_using_reg (XEXP (x, 1));
1071 	    changed = 1;
1072 	  }
1073     }
1074 
1075   fmt = GET_RTX_FORMAT (code);
1076   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1077     if (fmt[i] == 'e')
1078       changed |= mention_regs (XEXP (x, i));
1079     else if (fmt[i] == 'E')
1080       for (j = 0; j < XVECLEN (x, i); j++)
1081 	changed |= mention_regs (XVECEXP (x, i, j));
1082 
1083   return changed;
1084 }
1085 
1086 /* Update the register quantities for inserting X into the hash table
1087    with a value equivalent to CLASSP.
1088    (If the class does not contain a REG, it is irrelevant.)
1089    If MODIFIED is nonzero, X is a destination; it is being modified.
1090    Note that delete_reg_equiv should be called on a register
1091    before insert_regs is done on that register with MODIFIED != 0.
1092 
1093    Nonzero value means that elements of reg_qty have changed
1094    so X's hash code may be different.  */
1095 
1096 static int
insert_regs(rtx x,struct table_elt * classp,int modified)1097 insert_regs (rtx x, struct table_elt *classp, int modified)
1098 {
1099   if (REG_P (x))
1100     {
1101       unsigned int regno = REGNO (x);
1102       int qty_valid;
1103 
1104       /* If REGNO is in the equivalence table already but is of the
1105 	 wrong mode for that equivalence, don't do anything here.  */
1106 
1107       qty_valid = REGNO_QTY_VALID_P (regno);
1108       if (qty_valid)
1109 	{
1110 	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1111 
1112 	  if (ent->mode != GET_MODE (x))
1113 	    return 0;
1114 	}
1115 
1116       if (modified || ! qty_valid)
1117 	{
1118 	  if (classp)
1119 	    for (classp = classp->first_same_value;
1120 		 classp != 0;
1121 		 classp = classp->next_same_value)
1122 	      if (REG_P (classp->exp)
1123 		  && GET_MODE (classp->exp) == GET_MODE (x))
1124 		{
1125 		  unsigned c_regno = REGNO (classp->exp);
1126 
1127 		  gcc_assert (REGNO_QTY_VALID_P (c_regno));
1128 
1129 		  /* Suppose that 5 is hard reg and 100 and 101 are
1130 		     pseudos.  Consider
1131 
1132 		     (set (reg:si 100) (reg:si 5))
1133 		     (set (reg:si 5) (reg:si 100))
1134 		     (set (reg:di 101) (reg:di 5))
1135 
1136 		     We would now set REG_QTY (101) = REG_QTY (5), but the
1137 		     entry for 5 is in SImode.  When we use this later in
1138 		     copy propagation, we get the register in wrong mode.  */
1139 		  if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1140 		    continue;
1141 
1142 		  make_regs_eqv (regno, c_regno);
1143 		  return 1;
1144 		}
1145 
1146 	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1147 	     than REG_IN_TABLE to find out if there was only a single preceding
1148 	     invalidation - for the SUBREG - or another one, which would be
1149 	     for the full register.  However, if we find here that REG_TICK
1150 	     indicates that the register is invalid, it means that it has
1151 	     been invalidated in a separate operation.  The SUBREG might be used
1152 	     now (then this is a recursive call), or we might use the full REG
1153 	     now and a SUBREG of it later.  So bump up REG_TICK so that
1154 	     mention_regs will do the right thing.  */
1155 	  if (! modified
1156 	      && REG_IN_TABLE (regno) >= 0
1157 	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1158 	    REG_TICK (regno)++;
1159 	  make_new_qty (regno, GET_MODE (x));
1160 	  return 1;
1161 	}
1162 
1163       return 0;
1164     }
1165 
1166   /* If X is a SUBREG, we will likely be inserting the inner register in the
1167      table.  If that register doesn't have an assigned quantity number at
1168      this point but does later, the insertion that we will be doing now will
1169      not be accessible because its hash code will have changed.  So assign
1170      a quantity number now.  */
1171 
1172   else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1173 	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1174     {
1175       insert_regs (SUBREG_REG (x), NULL, 0);
1176       mention_regs (x);
1177       return 1;
1178     }
1179   else
1180     return mention_regs (x);
1181 }
1182 
1183 
1184 /* Compute upper and lower anchors for CST.  Also compute the offset of CST
1185    from these anchors/bases such that *_BASE + *_OFFS = CST.  Return false iff
1186    CST is equal to an anchor.  */
1187 
1188 static bool
compute_const_anchors(rtx cst,HOST_WIDE_INT * lower_base,HOST_WIDE_INT * lower_offs,HOST_WIDE_INT * upper_base,HOST_WIDE_INT * upper_offs)1189 compute_const_anchors (rtx cst,
1190 		       HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1191 		       HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1192 {
1193   unsigned HOST_WIDE_INT n = UINTVAL (cst);
1194 
1195   *lower_base = n & ~(targetm.const_anchor - 1);
1196   if ((unsigned HOST_WIDE_INT) *lower_base == n)
1197     return false;
1198 
1199   *upper_base = ((n + (targetm.const_anchor - 1))
1200 		 & ~(targetm.const_anchor - 1));
1201   *upper_offs = n - *upper_base;
1202   *lower_offs = n - *lower_base;
1203   return true;
1204 }
1205 
1206 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE.  */
1207 
1208 static void
insert_const_anchor(HOST_WIDE_INT anchor,rtx reg,HOST_WIDE_INT offs,machine_mode mode)1209 insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
1210 		     machine_mode mode)
1211 {
1212   struct table_elt *elt;
1213   unsigned hash;
1214   rtx anchor_exp;
1215   rtx exp;
1216 
1217   anchor_exp = gen_int_mode (anchor, mode);
1218   hash = HASH (anchor_exp, mode);
1219   elt = lookup (anchor_exp, hash, mode);
1220   if (!elt)
1221     elt = insert (anchor_exp, NULL, hash, mode);
1222 
1223   exp = plus_constant (mode, reg, offs);
1224   /* REG has just been inserted and the hash codes recomputed.  */
1225   mention_regs (exp);
1226   hash = HASH (exp, mode);
1227 
1228   /* Use the cost of the register rather than the whole expression.  When
1229      looking up constant anchors we will further offset the corresponding
1230      expression therefore it does not make sense to prefer REGs over
1231      reg-immediate additions.  Prefer instead the oldest expression.  Also
1232      don't prefer pseudos over hard regs so that we derive constants in
1233      argument registers from other argument registers rather than from the
1234      original pseudo that was used to synthesize the constant.  */
1235   insert_with_costs (exp, elt, hash, mode, COST (reg, mode), 1);
1236 }
1237 
1238 /* The constant CST is equivalent to the register REG.  Create
1239    equivalences between the two anchors of CST and the corresponding
1240    register-offset expressions using REG.  */
1241 
1242 static void
insert_const_anchors(rtx reg,rtx cst,machine_mode mode)1243 insert_const_anchors (rtx reg, rtx cst, machine_mode mode)
1244 {
1245   HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1246 
1247   if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1248 			      &upper_base, &upper_offs))
1249       return;
1250 
1251   /* Ignore anchors of value 0.  Constants accessible from zero are
1252      simple.  */
1253   if (lower_base != 0)
1254     insert_const_anchor (lower_base, reg, -lower_offs, mode);
1255 
1256   if (upper_base != 0)
1257     insert_const_anchor (upper_base, reg, -upper_offs, mode);
1258 }
1259 
1260 /* We need to express ANCHOR_ELT->exp + OFFS.  Walk the equivalence list of
1261    ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1262    valid expression.  Return the cheapest and oldest of such expressions.  In
1263    *OLD, return how old the resulting expression is compared to the other
1264    equivalent expressions.  */
1265 
1266 static rtx
find_reg_offset_for_const(struct table_elt * anchor_elt,HOST_WIDE_INT offs,unsigned * old)1267 find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1268 			   unsigned *old)
1269 {
1270   struct table_elt *elt;
1271   unsigned idx;
1272   struct table_elt *match_elt;
1273   rtx match;
1274 
1275   /* Find the cheapest and *oldest* expression to maximize the chance of
1276      reusing the same pseudo.  */
1277 
1278   match_elt = NULL;
1279   match = NULL_RTX;
1280   for (elt = anchor_elt->first_same_value, idx = 0;
1281        elt;
1282        elt = elt->next_same_value, idx++)
1283     {
1284       if (match_elt && CHEAPER (match_elt, elt))
1285 	return match;
1286 
1287       if (REG_P (elt->exp)
1288 	  || (GET_CODE (elt->exp) == PLUS
1289 	      && REG_P (XEXP (elt->exp, 0))
1290 	      && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1291 	{
1292 	  rtx x;
1293 
1294 	  /* Ignore expressions that are no longer valid.  */
1295 	  if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1296 	    continue;
1297 
1298 	  x = plus_constant (GET_MODE (elt->exp), elt->exp, offs);
1299 	  if (REG_P (x)
1300 	      || (GET_CODE (x) == PLUS
1301 		  && IN_RANGE (INTVAL (XEXP (x, 1)),
1302 			       -targetm.const_anchor,
1303 			       targetm.const_anchor - 1)))
1304 	    {
1305 	      match = x;
1306 	      match_elt = elt;
1307 	      *old = idx;
1308 	    }
1309 	}
1310     }
1311 
1312   return match;
1313 }
1314 
1315 /* Try to express the constant SRC_CONST using a register+offset expression
1316    derived from a constant anchor.  Return it if successful or NULL_RTX,
1317    otherwise.  */
1318 
1319 static rtx
try_const_anchors(rtx src_const,machine_mode mode)1320 try_const_anchors (rtx src_const, machine_mode mode)
1321 {
1322   struct table_elt *lower_elt, *upper_elt;
1323   HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1324   rtx lower_anchor_rtx, upper_anchor_rtx;
1325   rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1326   unsigned lower_old, upper_old;
1327 
1328   /* CONST_INT is used for CC modes, but we should leave those alone.  */
1329   if (GET_MODE_CLASS (mode) == MODE_CC)
1330     return NULL_RTX;
1331 
1332   gcc_assert (SCALAR_INT_MODE_P (mode));
1333   if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1334 			      &upper_base, &upper_offs))
1335     return NULL_RTX;
1336 
1337   lower_anchor_rtx = GEN_INT (lower_base);
1338   upper_anchor_rtx = GEN_INT (upper_base);
1339   lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1340   upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1341 
1342   if (lower_elt)
1343     lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1344   if (upper_elt)
1345     upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1346 
1347   if (!lower_exp)
1348     return upper_exp;
1349   if (!upper_exp)
1350     return lower_exp;
1351 
1352   /* Return the older expression.  */
1353   return (upper_old > lower_old ? upper_exp : lower_exp);
1354 }
1355 
1356 /* Look in or update the hash table.  */
1357 
1358 /* Remove table element ELT from use in the table.
1359    HASH is its hash code, made using the HASH macro.
1360    It's an argument because often that is known in advance
1361    and we save much time not recomputing it.  */
1362 
1363 static void
remove_from_table(struct table_elt * elt,unsigned int hash)1364 remove_from_table (struct table_elt *elt, unsigned int hash)
1365 {
1366   if (elt == 0)
1367     return;
1368 
1369   /* Mark this element as removed.  See cse_insn.  */
1370   elt->first_same_value = 0;
1371 
1372   /* Remove the table element from its equivalence class.  */
1373 
1374   {
1375     struct table_elt *prev = elt->prev_same_value;
1376     struct table_elt *next = elt->next_same_value;
1377 
1378     if (next)
1379       next->prev_same_value = prev;
1380 
1381     if (prev)
1382       prev->next_same_value = next;
1383     else
1384       {
1385 	struct table_elt *newfirst = next;
1386 	while (next)
1387 	  {
1388 	    next->first_same_value = newfirst;
1389 	    next = next->next_same_value;
1390 	  }
1391       }
1392   }
1393 
1394   /* Remove the table element from its hash bucket.  */
1395 
1396   {
1397     struct table_elt *prev = elt->prev_same_hash;
1398     struct table_elt *next = elt->next_same_hash;
1399 
1400     if (next)
1401       next->prev_same_hash = prev;
1402 
1403     if (prev)
1404       prev->next_same_hash = next;
1405     else if (table[hash] == elt)
1406       table[hash] = next;
1407     else
1408       {
1409 	/* This entry is not in the proper hash bucket.  This can happen
1410 	   when two classes were merged by `merge_equiv_classes'.  Search
1411 	   for the hash bucket that it heads.  This happens only very
1412 	   rarely, so the cost is acceptable.  */
1413 	for (hash = 0; hash < HASH_SIZE; hash++)
1414 	  if (table[hash] == elt)
1415 	    table[hash] = next;
1416       }
1417   }
1418 
1419   /* Remove the table element from its related-value circular chain.  */
1420 
1421   if (elt->related_value != 0 && elt->related_value != elt)
1422     {
1423       struct table_elt *p = elt->related_value;
1424 
1425       while (p->related_value != elt)
1426 	p = p->related_value;
1427       p->related_value = elt->related_value;
1428       if (p->related_value == p)
1429 	p->related_value = 0;
1430     }
1431 
1432   /* Now add it to the free element chain.  */
1433   elt->next_same_hash = free_element_chain;
1434   free_element_chain = elt;
1435 }
1436 
1437 /* Same as above, but X is a pseudo-register.  */
1438 
1439 static void
remove_pseudo_from_table(rtx x,unsigned int hash)1440 remove_pseudo_from_table (rtx x, unsigned int hash)
1441 {
1442   struct table_elt *elt;
1443 
1444   /* Because a pseudo-register can be referenced in more than one
1445      mode, we might have to remove more than one table entry.  */
1446   while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1447     remove_from_table (elt, hash);
1448 }
1449 
1450 /* Look up X in the hash table and return its table element,
1451    or 0 if X is not in the table.
1452 
1453    MODE is the machine-mode of X, or if X is an integer constant
1454    with VOIDmode then MODE is the mode with which X will be used.
1455 
1456    Here we are satisfied to find an expression whose tree structure
1457    looks like X.  */
1458 
1459 static struct table_elt *
lookup(rtx x,unsigned int hash,machine_mode mode)1460 lookup (rtx x, unsigned int hash, machine_mode mode)
1461 {
1462   struct table_elt *p;
1463 
1464   for (p = table[hash]; p; p = p->next_same_hash)
1465     if (mode == p->mode && ((x == p->exp && REG_P (x))
1466 			    || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1467       return p;
1468 
1469   return 0;
1470 }
1471 
1472 /* Like `lookup' but don't care whether the table element uses invalid regs.
1473    Also ignore discrepancies in the machine mode of a register.  */
1474 
1475 static struct table_elt *
lookup_for_remove(rtx x,unsigned int hash,machine_mode mode)1476 lookup_for_remove (rtx x, unsigned int hash, machine_mode mode)
1477 {
1478   struct table_elt *p;
1479 
1480   if (REG_P (x))
1481     {
1482       unsigned int regno = REGNO (x);
1483 
1484       /* Don't check the machine mode when comparing registers;
1485 	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1486       for (p = table[hash]; p; p = p->next_same_hash)
1487 	if (REG_P (p->exp)
1488 	    && REGNO (p->exp) == regno)
1489 	  return p;
1490     }
1491   else
1492     {
1493       for (p = table[hash]; p; p = p->next_same_hash)
1494 	if (mode == p->mode
1495 	    && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1496 	  return p;
1497     }
1498 
1499   return 0;
1500 }
1501 
1502 /* Look for an expression equivalent to X and with code CODE.
1503    If one is found, return that expression.  */
1504 
1505 static rtx
lookup_as_function(rtx x,enum rtx_code code)1506 lookup_as_function (rtx x, enum rtx_code code)
1507 {
1508   struct table_elt *p
1509     = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1510 
1511   if (p == 0)
1512     return 0;
1513 
1514   for (p = p->first_same_value; p; p = p->next_same_value)
1515     if (GET_CODE (p->exp) == code
1516 	/* Make sure this is a valid entry in the table.  */
1517 	&& exp_equiv_p (p->exp, p->exp, 1, false))
1518       return p->exp;
1519 
1520   return 0;
1521 }
1522 
1523 /* Insert X in the hash table, assuming HASH is its hash code and
1524    CLASSP is an element of the class it should go in (or 0 if a new
1525    class should be made).  COST is the code of X and reg_cost is the
1526    cost of registers in X.  It is inserted at the proper position to
1527    keep the class in the order cheapest first.
1528 
1529    MODE is the machine-mode of X, or if X is an integer constant
1530    with VOIDmode then MODE is the mode with which X will be used.
1531 
1532    For elements of equal cheapness, the most recent one
1533    goes in front, except that the first element in the list
1534    remains first unless a cheaper element is added.  The order of
1535    pseudo-registers does not matter, as canon_reg will be called to
1536    find the cheapest when a register is retrieved from the table.
1537 
1538    The in_memory field in the hash table element is set to 0.
1539    The caller must set it nonzero if appropriate.
1540 
1541    You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1542    and if insert_regs returns a nonzero value
1543    you must then recompute its hash code before calling here.
1544 
1545    If necessary, update table showing constant values of quantities.  */
1546 
1547 static struct table_elt *
insert_with_costs(rtx x,struct table_elt * classp,unsigned int hash,machine_mode mode,int cost,int reg_cost)1548 insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
1549 		   machine_mode mode, int cost, int reg_cost)
1550 {
1551   struct table_elt *elt;
1552 
1553   /* If X is a register and we haven't made a quantity for it,
1554      something is wrong.  */
1555   gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1556 
1557   /* If X is a hard register, show it is being put in the table.  */
1558   if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1559     add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1560 
1561   /* Put an element for X into the right hash bucket.  */
1562 
1563   elt = free_element_chain;
1564   if (elt)
1565     free_element_chain = elt->next_same_hash;
1566   else
1567     elt = XNEW (struct table_elt);
1568 
1569   elt->exp = x;
1570   elt->canon_exp = NULL_RTX;
1571   elt->cost = cost;
1572   elt->regcost = reg_cost;
1573   elt->next_same_value = 0;
1574   elt->prev_same_value = 0;
1575   elt->next_same_hash = table[hash];
1576   elt->prev_same_hash = 0;
1577   elt->related_value = 0;
1578   elt->in_memory = 0;
1579   elt->mode = mode;
1580   elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1581 
1582   if (table[hash])
1583     table[hash]->prev_same_hash = elt;
1584   table[hash] = elt;
1585 
1586   /* Put it into the proper value-class.  */
1587   if (classp)
1588     {
1589       classp = classp->first_same_value;
1590       if (CHEAPER (elt, classp))
1591 	/* Insert at the head of the class.  */
1592 	{
1593 	  struct table_elt *p;
1594 	  elt->next_same_value = classp;
1595 	  classp->prev_same_value = elt;
1596 	  elt->first_same_value = elt;
1597 
1598 	  for (p = classp; p; p = p->next_same_value)
1599 	    p->first_same_value = elt;
1600 	}
1601       else
1602 	{
1603 	  /* Insert not at head of the class.  */
1604 	  /* Put it after the last element cheaper than X.  */
1605 	  struct table_elt *p, *next;
1606 
1607 	  for (p = classp;
1608 	       (next = p->next_same_value) && CHEAPER (next, elt);
1609 	       p = next)
1610 	    ;
1611 
1612 	  /* Put it after P and before NEXT.  */
1613 	  elt->next_same_value = next;
1614 	  if (next)
1615 	    next->prev_same_value = elt;
1616 
1617 	  elt->prev_same_value = p;
1618 	  p->next_same_value = elt;
1619 	  elt->first_same_value = classp;
1620 	}
1621     }
1622   else
1623     elt->first_same_value = elt;
1624 
1625   /* If this is a constant being set equivalent to a register or a register
1626      being set equivalent to a constant, note the constant equivalence.
1627 
1628      If this is a constant, it cannot be equivalent to a different constant,
1629      and a constant is the only thing that can be cheaper than a register.  So
1630      we know the register is the head of the class (before the constant was
1631      inserted).
1632 
1633      If this is a register that is not already known equivalent to a
1634      constant, we must check the entire class.
1635 
1636      If this is a register that is already known equivalent to an insn,
1637      update the qtys `const_insn' to show that `this_insn' is the latest
1638      insn making that quantity equivalent to the constant.  */
1639 
1640   if (elt->is_const && classp && REG_P (classp->exp)
1641       && !REG_P (x))
1642     {
1643       int exp_q = REG_QTY (REGNO (classp->exp));
1644       struct qty_table_elem *exp_ent = &qty_table[exp_q];
1645 
1646       exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1647       exp_ent->const_insn = this_insn;
1648     }
1649 
1650   else if (REG_P (x)
1651 	   && classp
1652 	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1653 	   && ! elt->is_const)
1654     {
1655       struct table_elt *p;
1656 
1657       for (p = classp; p != 0; p = p->next_same_value)
1658 	{
1659 	  if (p->is_const && !REG_P (p->exp))
1660 	    {
1661 	      int x_q = REG_QTY (REGNO (x));
1662 	      struct qty_table_elem *x_ent = &qty_table[x_q];
1663 
1664 	      x_ent->const_rtx
1665 		= gen_lowpart (GET_MODE (x), p->exp);
1666 	      x_ent->const_insn = this_insn;
1667 	      break;
1668 	    }
1669 	}
1670     }
1671 
1672   else if (REG_P (x)
1673 	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1674 	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1675     qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1676 
1677   /* If this is a constant with symbolic value,
1678      and it has a term with an explicit integer value,
1679      link it up with related expressions.  */
1680   if (GET_CODE (x) == CONST)
1681     {
1682       rtx subexp = get_related_value (x);
1683       unsigned subhash;
1684       struct table_elt *subelt, *subelt_prev;
1685 
1686       if (subexp != 0)
1687 	{
1688 	  /* Get the integer-free subexpression in the hash table.  */
1689 	  subhash = SAFE_HASH (subexp, mode);
1690 	  subelt = lookup (subexp, subhash, mode);
1691 	  if (subelt == 0)
1692 	    subelt = insert (subexp, NULL, subhash, mode);
1693 	  /* Initialize SUBELT's circular chain if it has none.  */
1694 	  if (subelt->related_value == 0)
1695 	    subelt->related_value = subelt;
1696 	  /* Find the element in the circular chain that precedes SUBELT.  */
1697 	  subelt_prev = subelt;
1698 	  while (subelt_prev->related_value != subelt)
1699 	    subelt_prev = subelt_prev->related_value;
1700 	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1701 	     This way the element that follows SUBELT is the oldest one.  */
1702 	  elt->related_value = subelt_prev->related_value;
1703 	  subelt_prev->related_value = elt;
1704 	}
1705     }
1706 
1707   return elt;
1708 }
1709 
1710 /* Wrap insert_with_costs by passing the default costs.  */
1711 
1712 static struct table_elt *
insert(rtx x,struct table_elt * classp,unsigned int hash,machine_mode mode)1713 insert (rtx x, struct table_elt *classp, unsigned int hash,
1714 	machine_mode mode)
1715 {
1716   return insert_with_costs (x, classp, hash, mode,
1717 			    COST (x, mode), approx_reg_cost (x));
1718 }
1719 
1720 
1721 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1722    CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1723    the two classes equivalent.
1724 
1725    CLASS1 will be the surviving class; CLASS2 should not be used after this
1726    call.
1727 
1728    Any invalid entries in CLASS2 will not be copied.  */
1729 
1730 static void
merge_equiv_classes(struct table_elt * class1,struct table_elt * class2)1731 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1732 {
1733   struct table_elt *elt, *next, *new_elt;
1734 
1735   /* Ensure we start with the head of the classes.  */
1736   class1 = class1->first_same_value;
1737   class2 = class2->first_same_value;
1738 
1739   /* If they were already equal, forget it.  */
1740   if (class1 == class2)
1741     return;
1742 
1743   for (elt = class2; elt; elt = next)
1744     {
1745       unsigned int hash;
1746       rtx exp = elt->exp;
1747       machine_mode mode = elt->mode;
1748 
1749       next = elt->next_same_value;
1750 
1751       /* Remove old entry, make a new one in CLASS1's class.
1752 	 Don't do this for invalid entries as we cannot find their
1753 	 hash code (it also isn't necessary).  */
1754       if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1755 	{
1756 	  bool need_rehash = false;
1757 
1758 	  hash_arg_in_memory = 0;
1759 	  hash = HASH (exp, mode);
1760 
1761 	  if (REG_P (exp))
1762 	    {
1763 	      need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1764 	      delete_reg_equiv (REGNO (exp));
1765 	    }
1766 
1767 	  if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1768 	    remove_pseudo_from_table (exp, hash);
1769 	  else
1770 	    remove_from_table (elt, hash);
1771 
1772 	  if (insert_regs (exp, class1, 0) || need_rehash)
1773 	    {
1774 	      rehash_using_reg (exp);
1775 	      hash = HASH (exp, mode);
1776 	    }
1777 	  new_elt = insert (exp, class1, hash, mode);
1778 	  new_elt->in_memory = hash_arg_in_memory;
1779 	  if (GET_CODE (exp) == ASM_OPERANDS && elt->cost == MAX_COST)
1780 	    new_elt->cost = MAX_COST;
1781 	}
1782     }
1783 }
1784 
1785 /* Flush the entire hash table.  */
1786 
1787 static void
flush_hash_table(void)1788 flush_hash_table (void)
1789 {
1790   int i;
1791   struct table_elt *p;
1792 
1793   for (i = 0; i < HASH_SIZE; i++)
1794     for (p = table[i]; p; p = table[i])
1795       {
1796 	/* Note that invalidate can remove elements
1797 	   after P in the current hash chain.  */
1798 	if (REG_P (p->exp))
1799 	  invalidate (p->exp, VOIDmode);
1800 	else
1801 	  remove_from_table (p, i);
1802       }
1803 }
1804 
1805 /* Check whether an anti dependence exists between X and EXP.  MODE and
1806    ADDR are as for canon_anti_dependence.  */
1807 
1808 static bool
check_dependence(const_rtx x,rtx exp,machine_mode mode,rtx addr)1809 check_dependence (const_rtx x, rtx exp, machine_mode mode, rtx addr)
1810 {
1811   subrtx_iterator::array_type array;
1812   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1813     {
1814       const_rtx x = *iter;
1815       if (MEM_P (x) && canon_anti_dependence (x, true, exp, mode, addr))
1816 	return true;
1817     }
1818   return false;
1819 }
1820 
1821 /* Remove from the hash table, or mark as invalid, all expressions whose
1822    values could be altered by storing in register X.  */
1823 
1824 static void
invalidate_reg(rtx x)1825 invalidate_reg (rtx x)
1826 {
1827   gcc_assert (GET_CODE (x) == REG);
1828 
1829   /* If X is a register, dependencies on its contents are recorded
1830      through the qty number mechanism.  Just change the qty number of
1831      the register, mark it as invalid for expressions that refer to it,
1832      and remove it itself.  */
1833   unsigned int regno = REGNO (x);
1834   unsigned int hash = HASH (x, GET_MODE (x));
1835 
1836   /* Remove REGNO from any quantity list it might be on and indicate
1837      that its value might have changed.  If it is a pseudo, remove its
1838      entry from the hash table.
1839 
1840      For a hard register, we do the first two actions above for any
1841      additional hard registers corresponding to X.  Then, if any of these
1842      registers are in the table, we must remove any REG entries that
1843      overlap these registers.  */
1844 
1845   delete_reg_equiv (regno);
1846   REG_TICK (regno)++;
1847   SUBREG_TICKED (regno) = -1;
1848 
1849   if (regno >= FIRST_PSEUDO_REGISTER)
1850     remove_pseudo_from_table (x, hash);
1851   else
1852     {
1853       HOST_WIDE_INT in_table = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1854       unsigned int endregno = END_REGNO (x);
1855       unsigned int rn;
1856       struct table_elt *p, *next;
1857 
1858       CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1859 
1860       for (rn = regno + 1; rn < endregno; rn++)
1861 	{
1862 	  in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1863 	  CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1864 	  delete_reg_equiv (rn);
1865 	  REG_TICK (rn)++;
1866 	  SUBREG_TICKED (rn) = -1;
1867 	}
1868 
1869       if (in_table)
1870 	for (hash = 0; hash < HASH_SIZE; hash++)
1871 	  for (p = table[hash]; p; p = next)
1872 	    {
1873 	      next = p->next_same_hash;
1874 
1875 	      if (!REG_P (p->exp) || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1876 		continue;
1877 
1878 	      unsigned int tregno = REGNO (p->exp);
1879 	      unsigned int tendregno = END_REGNO (p->exp);
1880 	      if (tendregno > regno && tregno < endregno)
1881 		remove_from_table (p, hash);
1882 	    }
1883     }
1884 }
1885 
1886 /* Remove from the hash table, or mark as invalid, all expressions whose
1887    values could be altered by storing in X.  X is a register, a subreg, or
1888    a memory reference with nonvarying address (because, when a memory
1889    reference with a varying address is stored in, all memory references are
1890    removed by invalidate_memory so specific invalidation is superfluous).
1891    FULL_MODE, if not VOIDmode, indicates that this much should be
1892    invalidated instead of just the amount indicated by the mode of X.  This
1893    is only used for bitfield stores into memory.
1894 
1895    A nonvarying address may be just a register or just a symbol reference,
1896    or it may be either of those plus a numeric offset.  */
1897 
1898 static void
invalidate(rtx x,machine_mode full_mode)1899 invalidate (rtx x, machine_mode full_mode)
1900 {
1901   int i;
1902   struct table_elt *p;
1903   rtx addr;
1904 
1905   switch (GET_CODE (x))
1906     {
1907     case REG:
1908       invalidate_reg (x);
1909       return;
1910 
1911     case SUBREG:
1912       invalidate (SUBREG_REG (x), VOIDmode);
1913       return;
1914 
1915     case PARALLEL:
1916       for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1917 	invalidate (XVECEXP (x, 0, i), VOIDmode);
1918       return;
1919 
1920     case EXPR_LIST:
1921       /* This is part of a disjoint return value; extract the location in
1922 	 question ignoring the offset.  */
1923       invalidate (XEXP (x, 0), VOIDmode);
1924       return;
1925 
1926     case MEM:
1927       addr = canon_rtx (get_addr (XEXP (x, 0)));
1928       /* Calculate the canonical version of X here so that
1929 	 true_dependence doesn't generate new RTL for X on each call.  */
1930       x = canon_rtx (x);
1931 
1932       /* Remove all hash table elements that refer to overlapping pieces of
1933 	 memory.  */
1934       if (full_mode == VOIDmode)
1935 	full_mode = GET_MODE (x);
1936 
1937       for (i = 0; i < HASH_SIZE; i++)
1938 	{
1939 	  struct table_elt *next;
1940 
1941 	  for (p = table[i]; p; p = next)
1942 	    {
1943 	      next = p->next_same_hash;
1944 	      if (p->in_memory)
1945 		{
1946 		  /* Just canonicalize the expression once;
1947 		     otherwise each time we call invalidate
1948 		     true_dependence will canonicalize the
1949 		     expression again.  */
1950 		  if (!p->canon_exp)
1951 		    p->canon_exp = canon_rtx (p->exp);
1952 		  if (check_dependence (p->canon_exp, x, full_mode, addr))
1953 		    remove_from_table (p, i);
1954 		}
1955 	    }
1956 	}
1957       return;
1958 
1959     default:
1960       gcc_unreachable ();
1961     }
1962 }
1963 
1964 /* Invalidate DEST.  Used when DEST is not going to be added
1965    into the hash table for some reason, e.g. do_not_record
1966    flagged on it.  */
1967 
1968 static void
invalidate_dest(rtx dest)1969 invalidate_dest (rtx dest)
1970 {
1971   if (REG_P (dest)
1972       || GET_CODE (dest) == SUBREG
1973       || MEM_P (dest))
1974     invalidate (dest, VOIDmode);
1975   else if (GET_CODE (dest) == STRICT_LOW_PART
1976 	   || GET_CODE (dest) == ZERO_EXTRACT)
1977     invalidate (XEXP (dest, 0), GET_MODE (dest));
1978 }
1979 
1980 /* Remove all expressions that refer to register REGNO,
1981    since they are already invalid, and we are about to
1982    mark that register valid again and don't want the old
1983    expressions to reappear as valid.  */
1984 
1985 static void
remove_invalid_refs(unsigned int regno)1986 remove_invalid_refs (unsigned int regno)
1987 {
1988   unsigned int i;
1989   struct table_elt *p, *next;
1990 
1991   for (i = 0; i < HASH_SIZE; i++)
1992     for (p = table[i]; p; p = next)
1993       {
1994 	next = p->next_same_hash;
1995 	if (!REG_P (p->exp) && refers_to_regno_p (regno, p->exp))
1996 	  remove_from_table (p, i);
1997       }
1998 }
1999 
2000 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2001    and mode MODE.  */
2002 static void
remove_invalid_subreg_refs(unsigned int regno,poly_uint64 offset,machine_mode mode)2003 remove_invalid_subreg_refs (unsigned int regno, poly_uint64 offset,
2004 			    machine_mode mode)
2005 {
2006   unsigned int i;
2007   struct table_elt *p, *next;
2008 
2009   for (i = 0; i < HASH_SIZE; i++)
2010     for (p = table[i]; p; p = next)
2011       {
2012 	rtx exp = p->exp;
2013 	next = p->next_same_hash;
2014 
2015 	if (!REG_P (exp)
2016 	    && (GET_CODE (exp) != SUBREG
2017 		|| !REG_P (SUBREG_REG (exp))
2018 		|| REGNO (SUBREG_REG (exp)) != regno
2019 		|| ranges_maybe_overlap_p (SUBREG_BYTE (exp),
2020 					   GET_MODE_SIZE (GET_MODE (exp)),
2021 					   offset, GET_MODE_SIZE (mode)))
2022 	    && refers_to_regno_p (regno, p->exp))
2023 	  remove_from_table (p, i);
2024       }
2025 }
2026 
2027 /* Recompute the hash codes of any valid entries in the hash table that
2028    reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2029 
2030    This is called when we make a jump equivalence.  */
2031 
2032 static void
rehash_using_reg(rtx x)2033 rehash_using_reg (rtx x)
2034 {
2035   unsigned int i;
2036   struct table_elt *p, *next;
2037   unsigned hash;
2038 
2039   if (GET_CODE (x) == SUBREG)
2040     x = SUBREG_REG (x);
2041 
2042   /* If X is not a register or if the register is known not to be in any
2043      valid entries in the table, we have no work to do.  */
2044 
2045   if (!REG_P (x)
2046       || REG_IN_TABLE (REGNO (x)) < 0
2047       || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2048     return;
2049 
2050   /* Scan all hash chains looking for valid entries that mention X.
2051      If we find one and it is in the wrong hash chain, move it.  */
2052 
2053   for (i = 0; i < HASH_SIZE; i++)
2054     for (p = table[i]; p; p = next)
2055       {
2056 	next = p->next_same_hash;
2057 	if (reg_mentioned_p (x, p->exp)
2058 	    && exp_equiv_p (p->exp, p->exp, 1, false)
2059 	    && i != (hash = SAFE_HASH (p->exp, p->mode)))
2060 	  {
2061 	    if (p->next_same_hash)
2062 	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2063 
2064 	    if (p->prev_same_hash)
2065 	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2066 	    else
2067 	      table[i] = p->next_same_hash;
2068 
2069 	    p->next_same_hash = table[hash];
2070 	    p->prev_same_hash = 0;
2071 	    if (table[hash])
2072 	      table[hash]->prev_same_hash = p;
2073 	    table[hash] = p;
2074 	  }
2075       }
2076 }
2077 
2078 /* Remove from the hash table any expression that is a call-clobbered
2079    register in INSN.  Also update their TICK values.  */
2080 
2081 static void
invalidate_for_call(rtx_insn * insn)2082 invalidate_for_call (rtx_insn *insn)
2083 {
2084   unsigned int regno;
2085   unsigned hash;
2086   struct table_elt *p, *next;
2087   int in_table = 0;
2088   hard_reg_set_iterator hrsi;
2089 
2090   /* Go through all the hard registers.  For each that might be clobbered
2091      in call insn INSN, remove the register from quantity chains and update
2092      reg_tick if defined.  Also see if any of these registers is currently
2093      in the table.
2094 
2095      ??? We could be more precise for partially-clobbered registers,
2096      and only invalidate values that actually occupy the clobbered part
2097      of the registers.  It doesn't seem worth the effort though, since
2098      we shouldn't see this situation much before RA.  Whatever choice
2099      we make here has to be consistent with the table walk below,
2100      so any change to this test will require a change there too.  */
2101   HARD_REG_SET callee_clobbers
2102     = insn_callee_abi (insn).full_and_partial_reg_clobbers ();
2103   EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, regno, hrsi)
2104     {
2105       delete_reg_equiv (regno);
2106       if (REG_TICK (regno) >= 0)
2107 	{
2108 	  REG_TICK (regno)++;
2109 	  SUBREG_TICKED (regno) = -1;
2110 	}
2111       in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2112     }
2113 
2114   /* In the case where we have no call-clobbered hard registers in the
2115      table, we are done.  Otherwise, scan the table and remove any
2116      entry that overlaps a call-clobbered register.  */
2117 
2118   if (in_table)
2119     for (hash = 0; hash < HASH_SIZE; hash++)
2120       for (p = table[hash]; p; p = next)
2121 	{
2122 	  next = p->next_same_hash;
2123 
2124 	  if (!REG_P (p->exp)
2125 	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2126 	    continue;
2127 
2128 	  /* This must use the same test as above rather than the
2129 	     more accurate clobbers_reg_p.  */
2130 	  if (overlaps_hard_reg_set_p (callee_clobbers, GET_MODE (p->exp),
2131 				       REGNO (p->exp)))
2132 	    remove_from_table (p, hash);
2133 	}
2134 }
2135 
2136 /* Given an expression X of type CONST,
2137    and ELT which is its table entry (or 0 if it
2138    is not in the hash table),
2139    return an alternate expression for X as a register plus integer.
2140    If none can be found, return 0.  */
2141 
2142 static rtx
use_related_value(rtx x,struct table_elt * elt)2143 use_related_value (rtx x, struct table_elt *elt)
2144 {
2145   struct table_elt *relt = 0;
2146   struct table_elt *p, *q;
2147   HOST_WIDE_INT offset;
2148 
2149   /* First, is there anything related known?
2150      If we have a table element, we can tell from that.
2151      Otherwise, must look it up.  */
2152 
2153   if (elt != 0 && elt->related_value != 0)
2154     relt = elt;
2155   else if (elt == 0 && GET_CODE (x) == CONST)
2156     {
2157       rtx subexp = get_related_value (x);
2158       if (subexp != 0)
2159 	relt = lookup (subexp,
2160 		       SAFE_HASH (subexp, GET_MODE (subexp)),
2161 		       GET_MODE (subexp));
2162     }
2163 
2164   if (relt == 0)
2165     return 0;
2166 
2167   /* Search all related table entries for one that has an
2168      equivalent register.  */
2169 
2170   p = relt;
2171   while (1)
2172     {
2173       /* This loop is strange in that it is executed in two different cases.
2174 	 The first is when X is already in the table.  Then it is searching
2175 	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2176 	 X is not in the table.  Then RELT points to a class for the related
2177 	 value.
2178 
2179 	 Ensure that, whatever case we are in, that we ignore classes that have
2180 	 the same value as X.  */
2181 
2182       if (rtx_equal_p (x, p->exp))
2183 	q = 0;
2184       else
2185 	for (q = p->first_same_value; q; q = q->next_same_value)
2186 	  if (REG_P (q->exp))
2187 	    break;
2188 
2189       if (q)
2190 	break;
2191 
2192       p = p->related_value;
2193 
2194       /* We went all the way around, so there is nothing to be found.
2195 	 Alternatively, perhaps RELT was in the table for some other reason
2196 	 and it has no related values recorded.  */
2197       if (p == relt || p == 0)
2198 	break;
2199     }
2200 
2201   if (q == 0)
2202     return 0;
2203 
2204   offset = (get_integer_term (x) - get_integer_term (p->exp));
2205   /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2206   return plus_constant (q->mode, q->exp, offset);
2207 }
2208 
2209 
2210 /* Hash a string.  Just add its bytes up.  */
2211 static inline unsigned
hash_rtx_string(const char * ps)2212 hash_rtx_string (const char *ps)
2213 {
2214   unsigned hash = 0;
2215   const unsigned char *p = (const unsigned char *) ps;
2216 
2217   if (p)
2218     while (*p)
2219       hash += *p++;
2220 
2221   return hash;
2222 }
2223 
2224 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2225    When the callback returns true, we continue with the new rtx.  */
2226 
2227 unsigned
hash_rtx_cb(const_rtx x,machine_mode mode,int * do_not_record_p,int * hash_arg_in_memory_p,bool have_reg_qty,hash_rtx_callback_function cb)2228 hash_rtx_cb (const_rtx x, machine_mode mode,
2229              int *do_not_record_p, int *hash_arg_in_memory_p,
2230              bool have_reg_qty, hash_rtx_callback_function cb)
2231 {
2232   int i, j;
2233   unsigned hash = 0;
2234   enum rtx_code code;
2235   const char *fmt;
2236   machine_mode newmode;
2237   rtx newx;
2238 
2239   /* Used to turn recursion into iteration.  We can't rely on GCC's
2240      tail-recursion elimination since we need to keep accumulating values
2241      in HASH.  */
2242  repeat:
2243   if (x == 0)
2244     return hash;
2245 
2246   /* Invoke the callback first.  */
2247   if (cb != NULL
2248       && ((*cb) (x, mode, &newx, &newmode)))
2249     {
2250       hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2251                            hash_arg_in_memory_p, have_reg_qty, cb);
2252       return hash;
2253     }
2254 
2255   code = GET_CODE (x);
2256   switch (code)
2257     {
2258     case REG:
2259       {
2260 	unsigned int regno = REGNO (x);
2261 
2262 	if (do_not_record_p && !reload_completed)
2263 	  {
2264 	    /* On some machines, we can't record any non-fixed hard register,
2265 	       because extending its life will cause reload problems.  We
2266 	       consider ap, fp, sp, gp to be fixed for this purpose.
2267 
2268 	       We also consider CCmode registers to be fixed for this purpose;
2269 	       failure to do so leads to failure to simplify 0<100 type of
2270 	       conditionals.
2271 
2272 	       On all machines, we can't record any global registers.
2273 	       Nor should we record any register that is in a small
2274 	       class, as defined by TARGET_CLASS_LIKELY_SPILLED_P.  */
2275 	    bool record;
2276 
2277 	    if (regno >= FIRST_PSEUDO_REGISTER)
2278 	      record = true;
2279 	    else if (x == frame_pointer_rtx
2280 		     || x == hard_frame_pointer_rtx
2281 		     || x == arg_pointer_rtx
2282 		     || x == stack_pointer_rtx
2283 		     || x == pic_offset_table_rtx)
2284 	      record = true;
2285 	    else if (global_regs[regno])
2286 	      record = false;
2287 	    else if (fixed_regs[regno])
2288 	      record = true;
2289 	    else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2290 	      record = true;
2291 	    else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
2292 	      record = false;
2293 	    else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno)))
2294 	      record = false;
2295 	    else
2296 	      record = true;
2297 
2298 	    if (!record)
2299 	      {
2300 		*do_not_record_p = 1;
2301 		return 0;
2302 	      }
2303 	  }
2304 
2305 	hash += ((unsigned int) REG << 7);
2306         hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2307 	return hash;
2308       }
2309 
2310     /* We handle SUBREG of a REG specially because the underlying
2311        reg changes its hash value with every value change; we don't
2312        want to have to forget unrelated subregs when one subreg changes.  */
2313     case SUBREG:
2314       {
2315 	if (REG_P (SUBREG_REG (x)))
2316 	  {
2317 	    hash += (((unsigned int) SUBREG << 7)
2318 		     + REGNO (SUBREG_REG (x))
2319 		     + (constant_lower_bound (SUBREG_BYTE (x))
2320 			/ UNITS_PER_WORD));
2321 	    return hash;
2322 	  }
2323 	break;
2324       }
2325 
2326     case CONST_INT:
2327       hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2328                + (unsigned int) INTVAL (x));
2329       return hash;
2330 
2331     case CONST_WIDE_INT:
2332       for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
2333 	hash += CONST_WIDE_INT_ELT (x, i);
2334       return hash;
2335 
2336     case CONST_POLY_INT:
2337       {
2338 	inchash::hash h;
2339 	h.add_int (hash);
2340 	for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2341 	  h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
2342 	return h.end ();
2343       }
2344 
2345     case CONST_DOUBLE:
2346       /* This is like the general case, except that it only counts
2347 	 the integers representing the constant.  */
2348       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2349       if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
2350 	hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2351 		 + (unsigned int) CONST_DOUBLE_HIGH (x));
2352       else
2353 	hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2354       return hash;
2355 
2356     case CONST_FIXED:
2357       hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2358       hash += fixed_hash (CONST_FIXED_VALUE (x));
2359       return hash;
2360 
2361     case CONST_VECTOR:
2362       {
2363 	int units;
2364 	rtx elt;
2365 
2366 	units = const_vector_encoded_nelts (x);
2367 
2368 	for (i = 0; i < units; ++i)
2369 	  {
2370 	    elt = CONST_VECTOR_ENCODED_ELT (x, i);
2371 	    hash += hash_rtx_cb (elt, GET_MODE (elt),
2372                                  do_not_record_p, hash_arg_in_memory_p,
2373                                  have_reg_qty, cb);
2374 	  }
2375 
2376 	return hash;
2377       }
2378 
2379       /* Assume there is only one rtx object for any given label.  */
2380     case LABEL_REF:
2381       /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2382 	 differences and differences between each stage's debugging dumps.  */
2383 	 hash += (((unsigned int) LABEL_REF << 7)
2384 		  + CODE_LABEL_NUMBER (label_ref_label (x)));
2385       return hash;
2386 
2387     case SYMBOL_REF:
2388       {
2389 	/* Don't hash on the symbol's address to avoid bootstrap differences.
2390 	   Different hash values may cause expressions to be recorded in
2391 	   different orders and thus different registers to be used in the
2392 	   final assembler.  This also avoids differences in the dump files
2393 	   between various stages.  */
2394 	unsigned int h = 0;
2395 	const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2396 
2397 	while (*p)
2398 	  h += (h << 7) + *p++; /* ??? revisit */
2399 
2400 	hash += ((unsigned int) SYMBOL_REF << 7) + h;
2401 	return hash;
2402       }
2403 
2404     case MEM:
2405       /* We don't record if marked volatile or if BLKmode since we don't
2406 	 know the size of the move.  */
2407       if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2408 	{
2409 	  *do_not_record_p = 1;
2410 	  return 0;
2411 	}
2412       if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2413 	*hash_arg_in_memory_p = 1;
2414 
2415       /* Now that we have already found this special case,
2416 	 might as well speed it up as much as possible.  */
2417       hash += (unsigned) MEM;
2418       x = XEXP (x, 0);
2419       goto repeat;
2420 
2421     case USE:
2422       /* A USE that mentions non-volatile memory needs special
2423 	 handling since the MEM may be BLKmode which normally
2424 	 prevents an entry from being made.  Pure calls are
2425 	 marked by a USE which mentions BLKmode memory.
2426 	 See calls.c:emit_call_1.  */
2427       if (MEM_P (XEXP (x, 0))
2428 	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2429 	{
2430 	  hash += (unsigned) USE;
2431 	  x = XEXP (x, 0);
2432 
2433 	  if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2434 	    *hash_arg_in_memory_p = 1;
2435 
2436 	  /* Now that we have already found this special case,
2437 	     might as well speed it up as much as possible.  */
2438 	  hash += (unsigned) MEM;
2439 	  x = XEXP (x, 0);
2440 	  goto repeat;
2441 	}
2442       break;
2443 
2444     case PRE_DEC:
2445     case PRE_INC:
2446     case POST_DEC:
2447     case POST_INC:
2448     case PRE_MODIFY:
2449     case POST_MODIFY:
2450     case PC:
2451     case CC0:
2452     case CALL:
2453     case UNSPEC_VOLATILE:
2454       if (do_not_record_p) {
2455         *do_not_record_p = 1;
2456         return 0;
2457       }
2458       else
2459         return hash;
2460       break;
2461 
2462     case ASM_OPERANDS:
2463       if (do_not_record_p && MEM_VOLATILE_P (x))
2464 	{
2465 	  *do_not_record_p = 1;
2466 	  return 0;
2467 	}
2468       else
2469 	{
2470 	  /* We don't want to take the filename and line into account.  */
2471 	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2472 	    + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2473 	    + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2474 	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2475 
2476 	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2477 	    {
2478 	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2479 		{
2480 		  hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2481                                         GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2482                                         do_not_record_p, hash_arg_in_memory_p,
2483                                         have_reg_qty, cb)
2484 			   + hash_rtx_string
2485                            (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2486 		}
2487 
2488 	      hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2489 	      x = ASM_OPERANDS_INPUT (x, 0);
2490 	      mode = GET_MODE (x);
2491 	      goto repeat;
2492 	    }
2493 
2494 	  return hash;
2495 	}
2496       break;
2497 
2498     default:
2499       break;
2500     }
2501 
2502   i = GET_RTX_LENGTH (code) - 1;
2503   hash += (unsigned) code + (unsigned) GET_MODE (x);
2504   fmt = GET_RTX_FORMAT (code);
2505   for (; i >= 0; i--)
2506     {
2507       switch (fmt[i])
2508 	{
2509 	case 'e':
2510 	  /* If we are about to do the last recursive call
2511 	     needed at this level, change it into iteration.
2512 	     This function  is called enough to be worth it.  */
2513 	  if (i == 0)
2514 	    {
2515 	      x = XEXP (x, i);
2516 	      goto repeat;
2517 	    }
2518 
2519 	  hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
2520                                hash_arg_in_memory_p,
2521                                have_reg_qty, cb);
2522 	  break;
2523 
2524 	case 'E':
2525 	  for (j = 0; j < XVECLEN (x, i); j++)
2526 	    hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
2527                                  hash_arg_in_memory_p,
2528                                  have_reg_qty, cb);
2529 	  break;
2530 
2531 	case 's':
2532 	  hash += hash_rtx_string (XSTR (x, i));
2533 	  break;
2534 
2535 	case 'i':
2536 	  hash += (unsigned int) XINT (x, i);
2537 	  break;
2538 
2539 	case 'p':
2540 	  hash += constant_lower_bound (SUBREG_BYTE (x));
2541 	  break;
2542 
2543 	case '0': case 't':
2544 	  /* Unused.  */
2545 	  break;
2546 
2547 	default:
2548 	  gcc_unreachable ();
2549 	}
2550     }
2551 
2552   return hash;
2553 }
2554 
2555 /* Hash an rtx.  We are careful to make sure the value is never negative.
2556    Equivalent registers hash identically.
2557    MODE is used in hashing for CONST_INTs only;
2558    otherwise the mode of X is used.
2559 
2560    Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2561 
2562    If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2563    a MEM rtx which does not have the MEM_READONLY_P flag set.
2564 
2565    Note that cse_insn knows that the hash code of a MEM expression
2566    is just (int) MEM plus the hash code of the address.  */
2567 
2568 unsigned
hash_rtx(const_rtx x,machine_mode mode,int * do_not_record_p,int * hash_arg_in_memory_p,bool have_reg_qty)2569 hash_rtx (const_rtx x, machine_mode mode, int *do_not_record_p,
2570 	  int *hash_arg_in_memory_p, bool have_reg_qty)
2571 {
2572   return hash_rtx_cb (x, mode, do_not_record_p,
2573                       hash_arg_in_memory_p, have_reg_qty, NULL);
2574 }
2575 
2576 /* Hash an rtx X for cse via hash_rtx.
2577    Stores 1 in do_not_record if any subexpression is volatile.
2578    Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2579    does not have the MEM_READONLY_P flag set.  */
2580 
2581 static inline unsigned
canon_hash(rtx x,machine_mode mode)2582 canon_hash (rtx x, machine_mode mode)
2583 {
2584   return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2585 }
2586 
2587 /* Like canon_hash but with no side effects, i.e. do_not_record
2588    and hash_arg_in_memory are not changed.  */
2589 
2590 static inline unsigned
safe_hash(rtx x,machine_mode mode)2591 safe_hash (rtx x, machine_mode mode)
2592 {
2593   int dummy_do_not_record;
2594   return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2595 }
2596 
2597 /* Return 1 iff X and Y would canonicalize into the same thing,
2598    without actually constructing the canonicalization of either one.
2599    If VALIDATE is nonzero,
2600    we assume X is an expression being processed from the rtl
2601    and Y was found in the hash table.  We check register refs
2602    in Y for being marked as valid.
2603 
2604    If FOR_GCSE is true, we compare X and Y for equivalence for GCSE.  */
2605 
2606 int
exp_equiv_p(const_rtx x,const_rtx y,int validate,bool for_gcse)2607 exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2608 {
2609   int i, j;
2610   enum rtx_code code;
2611   const char *fmt;
2612 
2613   /* Note: it is incorrect to assume an expression is equivalent to itself
2614      if VALIDATE is nonzero.  */
2615   if (x == y && !validate)
2616     return 1;
2617 
2618   if (x == 0 || y == 0)
2619     return x == y;
2620 
2621   code = GET_CODE (x);
2622   if (code != GET_CODE (y))
2623     return 0;
2624 
2625   /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2626   if (GET_MODE (x) != GET_MODE (y))
2627     return 0;
2628 
2629   /* MEMs referring to different address space are not equivalent.  */
2630   if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2631     return 0;
2632 
2633   switch (code)
2634     {
2635     case PC:
2636     case CC0:
2637     CASE_CONST_UNIQUE:
2638       return x == y;
2639 
2640     case CONST_VECTOR:
2641       if (!same_vector_encodings_p (x, y))
2642 	return false;
2643       break;
2644 
2645     case LABEL_REF:
2646       return label_ref_label (x) == label_ref_label (y);
2647 
2648     case SYMBOL_REF:
2649       return XSTR (x, 0) == XSTR (y, 0);
2650 
2651     case REG:
2652       if (for_gcse)
2653 	return REGNO (x) == REGNO (y);
2654       else
2655 	{
2656 	  unsigned int regno = REGNO (y);
2657 	  unsigned int i;
2658 	  unsigned int endregno = END_REGNO (y);
2659 
2660 	  /* If the quantities are not the same, the expressions are not
2661 	     equivalent.  If there are and we are not to validate, they
2662 	     are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2663 
2664 	  if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2665 	    return 0;
2666 
2667 	  if (! validate)
2668 	    return 1;
2669 
2670 	  for (i = regno; i < endregno; i++)
2671 	    if (REG_IN_TABLE (i) != REG_TICK (i))
2672 	      return 0;
2673 
2674 	  return 1;
2675 	}
2676 
2677     case MEM:
2678       if (for_gcse)
2679 	{
2680 	  /* A volatile mem should not be considered equivalent to any
2681 	     other.  */
2682 	  if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2683 	    return 0;
2684 
2685 	  /* Can't merge two expressions in different alias sets, since we
2686 	     can decide that the expression is transparent in a block when
2687 	     it isn't, due to it being set with the different alias set.
2688 
2689 	     Also, can't merge two expressions with different MEM_ATTRS.
2690 	     They could e.g. be two different entities allocated into the
2691 	     same space on the stack (see e.g. PR25130).  In that case, the
2692 	     MEM addresses can be the same, even though the two MEMs are
2693 	     absolutely not equivalent.
2694 
2695 	     But because really all MEM attributes should be the same for
2696 	     equivalent MEMs, we just use the invariant that MEMs that have
2697 	     the same attributes share the same mem_attrs data structure.  */
2698 	  if (!mem_attrs_eq_p (MEM_ATTRS (x), MEM_ATTRS (y)))
2699 	    return 0;
2700 
2701 	  /* If we are handling exceptions, we cannot consider two expressions
2702 	     with different trapping status as equivalent, because simple_mem
2703 	     might accept one and reject the other.  */
2704 	  if (cfun->can_throw_non_call_exceptions
2705 	      && (MEM_NOTRAP_P (x) != MEM_NOTRAP_P (y)))
2706 	    return 0;
2707 	}
2708       break;
2709 
2710     /*  For commutative operations, check both orders.  */
2711     case PLUS:
2712     case MULT:
2713     case AND:
2714     case IOR:
2715     case XOR:
2716     case NE:
2717     case EQ:
2718       return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2719 			     validate, for_gcse)
2720 	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2721 				validate, for_gcse))
2722 	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2723 				validate, for_gcse)
2724 		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2725 				   validate, for_gcse)));
2726 
2727     case ASM_OPERANDS:
2728       /* We don't use the generic code below because we want to
2729 	 disregard filename and line numbers.  */
2730 
2731       /* A volatile asm isn't equivalent to any other.  */
2732       if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2733 	return 0;
2734 
2735       if (GET_MODE (x) != GET_MODE (y)
2736 	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2737 	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2738 		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2739 	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2740 	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2741 	return 0;
2742 
2743       if (ASM_OPERANDS_INPUT_LENGTH (x))
2744 	{
2745 	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2746 	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2747 			       ASM_OPERANDS_INPUT (y, i),
2748 			       validate, for_gcse)
2749 		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2750 			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2751 	      return 0;
2752 	}
2753 
2754       return 1;
2755 
2756     default:
2757       break;
2758     }
2759 
2760   /* Compare the elements.  If any pair of corresponding elements
2761      fail to match, return 0 for the whole thing.  */
2762 
2763   fmt = GET_RTX_FORMAT (code);
2764   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2765     {
2766       switch (fmt[i])
2767 	{
2768 	case 'e':
2769 	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2770 			      validate, for_gcse))
2771 	    return 0;
2772 	  break;
2773 
2774 	case 'E':
2775 	  if (XVECLEN (x, i) != XVECLEN (y, i))
2776 	    return 0;
2777 	  for (j = 0; j < XVECLEN (x, i); j++)
2778 	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2779 				validate, for_gcse))
2780 	      return 0;
2781 	  break;
2782 
2783 	case 's':
2784 	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2785 	    return 0;
2786 	  break;
2787 
2788 	case 'i':
2789 	  if (XINT (x, i) != XINT (y, i))
2790 	    return 0;
2791 	  break;
2792 
2793 	case 'w':
2794 	  if (XWINT (x, i) != XWINT (y, i))
2795 	    return 0;
2796 	  break;
2797 
2798 	case 'p':
2799 	  if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2800 	    return 0;
2801 	  break;
2802 
2803 	case '0':
2804 	case 't':
2805 	  break;
2806 
2807 	default:
2808 	  gcc_unreachable ();
2809 	}
2810     }
2811 
2812   return 1;
2813 }
2814 
2815 /* Subroutine of canon_reg.  Pass *XLOC through canon_reg, and validate
2816    the result if necessary.  INSN is as for canon_reg.  */
2817 
2818 static void
validate_canon_reg(rtx * xloc,rtx_insn * insn)2819 validate_canon_reg (rtx *xloc, rtx_insn *insn)
2820 {
2821   if (*xloc)
2822     {
2823       rtx new_rtx = canon_reg (*xloc, insn);
2824 
2825       /* If replacing pseudo with hard reg or vice versa, ensure the
2826          insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2827       gcc_assert (insn && new_rtx);
2828       validate_change (insn, xloc, new_rtx, 1);
2829     }
2830 }
2831 
2832 /* Canonicalize an expression:
2833    replace each register reference inside it
2834    with the "oldest" equivalent register.
2835 
2836    If INSN is nonzero validate_change is used to ensure that INSN remains valid
2837    after we make our substitution.  The calls are made with IN_GROUP nonzero
2838    so apply_change_group must be called upon the outermost return from this
2839    function (unless INSN is zero).  The result of apply_change_group can
2840    generally be discarded since the changes we are making are optional.  */
2841 
2842 static rtx
canon_reg(rtx x,rtx_insn * insn)2843 canon_reg (rtx x, rtx_insn *insn)
2844 {
2845   int i;
2846   enum rtx_code code;
2847   const char *fmt;
2848 
2849   if (x == 0)
2850     return x;
2851 
2852   code = GET_CODE (x);
2853   switch (code)
2854     {
2855     case PC:
2856     case CC0:
2857     case CONST:
2858     CASE_CONST_ANY:
2859     case SYMBOL_REF:
2860     case LABEL_REF:
2861     case ADDR_VEC:
2862     case ADDR_DIFF_VEC:
2863       return x;
2864 
2865     case REG:
2866       {
2867 	int first;
2868 	int q;
2869 	struct qty_table_elem *ent;
2870 
2871 	/* Never replace a hard reg, because hard regs can appear
2872 	   in more than one machine mode, and we must preserve the mode
2873 	   of each occurrence.  Also, some hard regs appear in
2874 	   MEMs that are shared and mustn't be altered.  Don't try to
2875 	   replace any reg that maps to a reg of class NO_REGS.  */
2876 	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2877 	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2878 	  return x;
2879 
2880 	q = REG_QTY (REGNO (x));
2881 	ent = &qty_table[q];
2882 	first = ent->first_reg;
2883 	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2884 		: REGNO_REG_CLASS (first) == NO_REGS ? x
2885 		: gen_rtx_REG (ent->mode, first));
2886       }
2887 
2888     default:
2889       break;
2890     }
2891 
2892   fmt = GET_RTX_FORMAT (code);
2893   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2894     {
2895       int j;
2896 
2897       if (fmt[i] == 'e')
2898 	validate_canon_reg (&XEXP (x, i), insn);
2899       else if (fmt[i] == 'E')
2900 	for (j = 0; j < XVECLEN (x, i); j++)
2901 	  validate_canon_reg (&XVECEXP (x, i, j), insn);
2902     }
2903 
2904   return x;
2905 }
2906 
2907 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2908    operation (EQ, NE, GT, etc.), follow it back through the hash table and
2909    what values are being compared.
2910 
2911    *PARG1 and *PARG2 are updated to contain the rtx representing the values
2912    actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
2913    was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2914    compared to produce cc0.
2915 
2916    The return value is the comparison operator and is either the code of
2917    A or the code corresponding to the inverse of the comparison.  */
2918 
2919 static enum rtx_code
find_comparison_args(enum rtx_code code,rtx * parg1,rtx * parg2,machine_mode * pmode1,machine_mode * pmode2)2920 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2921 		      machine_mode *pmode1, machine_mode *pmode2)
2922 {
2923   rtx arg1, arg2;
2924   hash_set<rtx> *visited = NULL;
2925   /* Set nonzero when we find something of interest.  */
2926   rtx x = NULL;
2927 
2928   arg1 = *parg1, arg2 = *parg2;
2929 
2930   /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
2931 
2932   while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2933     {
2934       int reverse_code = 0;
2935       struct table_elt *p = 0;
2936 
2937       /* Remember state from previous iteration.  */
2938       if (x)
2939 	{
2940 	  if (!visited)
2941 	    visited = new hash_set<rtx>;
2942 	  visited->add (x);
2943 	  x = 0;
2944 	}
2945 
2946       /* If arg1 is a COMPARE, extract the comparison arguments from it.
2947 	 On machines with CC0, this is the only case that can occur, since
2948 	 fold_rtx will return the COMPARE or item being compared with zero
2949 	 when given CC0.  */
2950 
2951       if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2952 	x = arg1;
2953 
2954       /* If ARG1 is a comparison operator and CODE is testing for
2955 	 STORE_FLAG_VALUE, get the inner arguments.  */
2956 
2957       else if (COMPARISON_P (arg1))
2958 	{
2959 #ifdef FLOAT_STORE_FLAG_VALUE
2960 	  REAL_VALUE_TYPE fsfv;
2961 #endif
2962 
2963 	  if (code == NE
2964 	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2965 		  && code == LT && STORE_FLAG_VALUE == -1)
2966 #ifdef FLOAT_STORE_FLAG_VALUE
2967 	      || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2968 		  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2969 		      REAL_VALUE_NEGATIVE (fsfv)))
2970 #endif
2971 	      )
2972 	    x = arg1;
2973 	  else if (code == EQ
2974 		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2975 		       && code == GE && STORE_FLAG_VALUE == -1)
2976 #ifdef FLOAT_STORE_FLAG_VALUE
2977 		   || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
2978 		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2979 			   REAL_VALUE_NEGATIVE (fsfv)))
2980 #endif
2981 		   )
2982 	    x = arg1, reverse_code = 1;
2983 	}
2984 
2985       /* ??? We could also check for
2986 
2987 	 (ne (and (eq (...) (const_int 1))) (const_int 0))
2988 
2989 	 and related forms, but let's wait until we see them occurring.  */
2990 
2991       if (x == 0)
2992 	/* Look up ARG1 in the hash table and see if it has an equivalence
2993 	   that lets us see what is being compared.  */
2994 	p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
2995       if (p)
2996 	{
2997 	  p = p->first_same_value;
2998 
2999 	  /* If what we compare is already known to be constant, that is as
3000 	     good as it gets.
3001 	     We need to break the loop in this case, because otherwise we
3002 	     can have an infinite loop when looking at a reg that is known
3003 	     to be a constant which is the same as a comparison of a reg
3004 	     against zero which appears later in the insn stream, which in
3005 	     turn is constant and the same as the comparison of the first reg
3006 	     against zero...  */
3007 	  if (p->is_const)
3008 	    break;
3009 	}
3010 
3011       for (; p; p = p->next_same_value)
3012 	{
3013 	  machine_mode inner_mode = GET_MODE (p->exp);
3014 #ifdef FLOAT_STORE_FLAG_VALUE
3015 	  REAL_VALUE_TYPE fsfv;
3016 #endif
3017 
3018 	  /* If the entry isn't valid, skip it.  */
3019 	  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3020 	    continue;
3021 
3022 	  /* If it's a comparison we've used before, skip it.  */
3023 	  if (visited && visited->contains (p->exp))
3024 	    continue;
3025 
3026 	  if (GET_CODE (p->exp) == COMPARE
3027 	      /* Another possibility is that this machine has a compare insn
3028 		 that includes the comparison code.  In that case, ARG1 would
3029 		 be equivalent to a comparison operation that would set ARG1 to
3030 		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3031 		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3032 		 we must reverse ORIG_CODE.  On machine with a negative value
3033 		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3034 	      || ((code == NE
3035 		   || (code == LT
3036 		       && val_signbit_known_set_p (inner_mode,
3037 						   STORE_FLAG_VALUE))
3038 #ifdef FLOAT_STORE_FLAG_VALUE
3039 		   || (code == LT
3040 		       && SCALAR_FLOAT_MODE_P (inner_mode)
3041 		       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3042 			   REAL_VALUE_NEGATIVE (fsfv)))
3043 #endif
3044 		   )
3045 		  && COMPARISON_P (p->exp)))
3046 	    {
3047 	      x = p->exp;
3048 	      break;
3049 	    }
3050 	  else if ((code == EQ
3051 		    || (code == GE
3052 			&& val_signbit_known_set_p (inner_mode,
3053 						    STORE_FLAG_VALUE))
3054 #ifdef FLOAT_STORE_FLAG_VALUE
3055 		    || (code == GE
3056 			&& SCALAR_FLOAT_MODE_P (inner_mode)
3057 			&& (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3058 			    REAL_VALUE_NEGATIVE (fsfv)))
3059 #endif
3060 		    )
3061 		   && COMPARISON_P (p->exp))
3062 	    {
3063 	      reverse_code = 1;
3064 	      x = p->exp;
3065 	      break;
3066 	    }
3067 
3068 	  /* If this non-trapping address, e.g. fp + constant, the
3069 	     equivalent is a better operand since it may let us predict
3070 	     the value of the comparison.  */
3071 	  else if (!rtx_addr_can_trap_p (p->exp))
3072 	    {
3073 	      arg1 = p->exp;
3074 	      continue;
3075 	    }
3076 	}
3077 
3078       /* If we didn't find a useful equivalence for ARG1, we are done.
3079 	 Otherwise, set up for the next iteration.  */
3080       if (x == 0)
3081 	break;
3082 
3083       /* If we need to reverse the comparison, make sure that is
3084 	 possible -- we can't necessarily infer the value of GE from LT
3085 	 with floating-point operands.  */
3086       if (reverse_code)
3087 	{
3088 	  enum rtx_code reversed = reversed_comparison_code (x, NULL);
3089 	  if (reversed == UNKNOWN)
3090 	    break;
3091 	  else
3092 	    code = reversed;
3093 	}
3094       else if (COMPARISON_P (x))
3095 	code = GET_CODE (x);
3096       arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3097     }
3098 
3099   /* Return our results.  Return the modes from before fold_rtx
3100      because fold_rtx might produce const_int, and then it's too late.  */
3101   *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3102   *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3103 
3104   if (visited)
3105     delete visited;
3106   return code;
3107 }
3108 
3109 /* If X is a nontrivial arithmetic operation on an argument for which
3110    a constant value can be determined, return the result of operating
3111    on that value, as a constant.  Otherwise, return X, possibly with
3112    one or more operands changed to a forward-propagated constant.
3113 
3114    If X is a register whose contents are known, we do NOT return
3115    those contents here; equiv_constant is called to perform that task.
3116    For SUBREGs and MEMs, we do that both here and in equiv_constant.
3117 
3118    INSN is the insn that we may be modifying.  If it is 0, make a copy
3119    of X before modifying it.  */
3120 
3121 static rtx
fold_rtx(rtx x,rtx_insn * insn)3122 fold_rtx (rtx x, rtx_insn *insn)
3123 {
3124   enum rtx_code code;
3125   machine_mode mode;
3126   const char *fmt;
3127   int i;
3128   rtx new_rtx = 0;
3129   int changed = 0;
3130   poly_int64 xval;
3131 
3132   /* Operands of X.  */
3133   /* Workaround -Wmaybe-uninitialized false positive during
3134      profiledbootstrap by initializing them.  */
3135   rtx folded_arg0 = NULL_RTX;
3136   rtx folded_arg1 = NULL_RTX;
3137 
3138   /* Constant equivalents of first three operands of X;
3139      0 when no such equivalent is known.  */
3140   rtx const_arg0;
3141   rtx const_arg1;
3142   rtx const_arg2;
3143 
3144   /* The mode of the first operand of X.  We need this for sign and zero
3145      extends.  */
3146   machine_mode mode_arg0;
3147 
3148   if (x == 0)
3149     return x;
3150 
3151   /* Try to perform some initial simplifications on X.  */
3152   code = GET_CODE (x);
3153   switch (code)
3154     {
3155     case MEM:
3156     case SUBREG:
3157     /* The first operand of a SIGN/ZERO_EXTRACT has a different meaning
3158        than it would in other contexts.  Basically its mode does not
3159        signify the size of the object read.  That information is carried
3160        by size operand.    If we happen to have a MEM of the appropriate
3161        mode in our tables with a constant value we could simplify the
3162        extraction incorrectly if we allowed substitution of that value
3163        for the MEM.   */
3164     case ZERO_EXTRACT:
3165     case SIGN_EXTRACT:
3166       if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3167         return new_rtx;
3168       return x;
3169 
3170     case CONST:
3171     CASE_CONST_ANY:
3172     case SYMBOL_REF:
3173     case LABEL_REF:
3174     case REG:
3175     case PC:
3176       /* No use simplifying an EXPR_LIST
3177 	 since they are used only for lists of args
3178 	 in a function call's REG_EQUAL note.  */
3179     case EXPR_LIST:
3180       return x;
3181 
3182     case CC0:
3183       return prev_insn_cc0;
3184 
3185     case ASM_OPERANDS:
3186       if (insn)
3187 	{
3188 	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3189 	    validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3190 			     fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3191 	}
3192       return x;
3193 
3194     case CALL:
3195       if (NO_FUNCTION_CSE && CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3196 	return x;
3197       break;
3198 
3199     /* Anything else goes through the loop below.  */
3200     default:
3201       break;
3202     }
3203 
3204   mode = GET_MODE (x);
3205   const_arg0 = 0;
3206   const_arg1 = 0;
3207   const_arg2 = 0;
3208   mode_arg0 = VOIDmode;
3209 
3210   /* Try folding our operands.
3211      Then see which ones have constant values known.  */
3212 
3213   fmt = GET_RTX_FORMAT (code);
3214   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3215     if (fmt[i] == 'e')
3216       {
3217 	rtx folded_arg = XEXP (x, i), const_arg;
3218 	machine_mode mode_arg = GET_MODE (folded_arg);
3219 
3220 	switch (GET_CODE (folded_arg))
3221 	  {
3222 	  case MEM:
3223 	  case REG:
3224 	  case SUBREG:
3225 	    const_arg = equiv_constant (folded_arg);
3226 	    break;
3227 
3228 	  case CONST:
3229 	  CASE_CONST_ANY:
3230 	  case SYMBOL_REF:
3231 	  case LABEL_REF:
3232 	    const_arg = folded_arg;
3233 	    break;
3234 
3235 	  case CC0:
3236 	    /* The cc0-user and cc0-setter may be in different blocks if
3237 	       the cc0-setter potentially traps.  In that case PREV_INSN_CC0
3238 	       will have been cleared as we exited the block with the
3239 	       setter.
3240 
3241 	       While we could potentially track cc0 in this case, it just
3242 	       doesn't seem to be worth it given that cc0 targets are not
3243 	       terribly common or important these days and trapping math
3244 	       is rarely used.  The combination of those two conditions
3245 	       necessary to trip this situation is exceedingly rare in the
3246 	       real world.  */
3247 	    if (!prev_insn_cc0)
3248 	      {
3249 		const_arg = NULL_RTX;
3250 	      }
3251 	    else
3252 	      {
3253 		folded_arg = prev_insn_cc0;
3254 		mode_arg = prev_insn_cc0_mode;
3255 		const_arg = equiv_constant (folded_arg);
3256 	      }
3257 	    break;
3258 
3259 	  default:
3260 	    folded_arg = fold_rtx (folded_arg, insn);
3261 	    const_arg = equiv_constant (folded_arg);
3262 	    break;
3263 	  }
3264 
3265 	/* For the first three operands, see if the operand
3266 	   is constant or equivalent to a constant.  */
3267 	switch (i)
3268 	  {
3269 	  case 0:
3270 	    folded_arg0 = folded_arg;
3271 	    const_arg0 = const_arg;
3272 	    mode_arg0 = mode_arg;
3273 	    break;
3274 	  case 1:
3275 	    folded_arg1 = folded_arg;
3276 	    const_arg1 = const_arg;
3277 	    break;
3278 	  case 2:
3279 	    const_arg2 = const_arg;
3280 	    break;
3281 	  }
3282 
3283 	/* Pick the least expensive of the argument and an equivalent constant
3284 	   argument.  */
3285 	if (const_arg != 0
3286 	    && const_arg != folded_arg
3287 	    && (COST_IN (const_arg, mode_arg, code, i)
3288 		<= COST_IN (folded_arg, mode_arg, code, i))
3289 
3290 	    /* It's not safe to substitute the operand of a conversion
3291 	       operator with a constant, as the conversion's identity
3292 	       depends upon the mode of its operand.  This optimization
3293 	       is handled by the call to simplify_unary_operation.  */
3294 	    && (GET_RTX_CLASS (code) != RTX_UNARY
3295 		|| GET_MODE (const_arg) == mode_arg0
3296 		|| (code != ZERO_EXTEND
3297 		    && code != SIGN_EXTEND
3298 		    && code != TRUNCATE
3299 		    && code != FLOAT_TRUNCATE
3300 		    && code != FLOAT_EXTEND
3301 		    && code != FLOAT
3302 		    && code != FIX
3303 		    && code != UNSIGNED_FLOAT
3304 		    && code != UNSIGNED_FIX)))
3305 	  folded_arg = const_arg;
3306 
3307 	if (folded_arg == XEXP (x, i))
3308 	  continue;
3309 
3310 	if (insn == NULL_RTX && !changed)
3311 	  x = copy_rtx (x);
3312 	changed = 1;
3313 	validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3314       }
3315 
3316   if (changed)
3317     {
3318       /* Canonicalize X if necessary, and keep const_argN and folded_argN
3319 	 consistent with the order in X.  */
3320       if (canonicalize_change_group (insn, x))
3321 	{
3322 	  std::swap (const_arg0, const_arg1);
3323 	  std::swap (folded_arg0, folded_arg1);
3324 	}
3325 
3326       apply_change_group ();
3327     }
3328 
3329   /* If X is an arithmetic operation, see if we can simplify it.  */
3330 
3331   switch (GET_RTX_CLASS (code))
3332     {
3333     case RTX_UNARY:
3334       {
3335 	/* We can't simplify extension ops unless we know the
3336 	   original mode.  */
3337 	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3338 	    && mode_arg0 == VOIDmode)
3339 	  break;
3340 
3341 	new_rtx = simplify_unary_operation (code, mode,
3342 					    const_arg0 ? const_arg0 : folded_arg0,
3343 					    mode_arg0);
3344       }
3345       break;
3346 
3347     case RTX_COMPARE:
3348     case RTX_COMM_COMPARE:
3349       /* See what items are actually being compared and set FOLDED_ARG[01]
3350 	 to those values and CODE to the actual comparison code.  If any are
3351 	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3352 	 do anything if both operands are already known to be constant.  */
3353 
3354       /* ??? Vector mode comparisons are not supported yet.  */
3355       if (VECTOR_MODE_P (mode))
3356 	break;
3357 
3358       if (const_arg0 == 0 || const_arg1 == 0)
3359 	{
3360 	  struct table_elt *p0, *p1;
3361 	  rtx true_rtx, false_rtx;
3362 	  machine_mode mode_arg1;
3363 
3364 	  if (SCALAR_FLOAT_MODE_P (mode))
3365 	    {
3366 #ifdef FLOAT_STORE_FLAG_VALUE
3367 	      true_rtx = (const_double_from_real_value
3368 			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
3369 #else
3370 	      true_rtx = NULL_RTX;
3371 #endif
3372 	      false_rtx = CONST0_RTX (mode);
3373 	    }
3374 	  else
3375 	    {
3376 	      true_rtx = const_true_rtx;
3377 	      false_rtx = const0_rtx;
3378 	    }
3379 
3380 	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3381 				       &mode_arg0, &mode_arg1);
3382 
3383 	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3384 	     what kinds of things are being compared, so we can't do
3385 	     anything with this comparison.  */
3386 
3387 	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3388 	    break;
3389 
3390 	  const_arg0 = equiv_constant (folded_arg0);
3391 	  const_arg1 = equiv_constant (folded_arg1);
3392 
3393 	  /* If we do not now have two constants being compared, see
3394 	     if we can nevertheless deduce some things about the
3395 	     comparison.  */
3396 	  if (const_arg0 == 0 || const_arg1 == 0)
3397 	    {
3398 	      if (const_arg1 != NULL)
3399 		{
3400 		  rtx cheapest_simplification;
3401 		  int cheapest_cost;
3402 		  rtx simp_result;
3403 		  struct table_elt *p;
3404 
3405 		  /* See if we can find an equivalent of folded_arg0
3406 		     that gets us a cheaper expression, possibly a
3407 		     constant through simplifications.  */
3408 		  p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3409 			      mode_arg0);
3410 
3411 		  if (p != NULL)
3412 		    {
3413 		      cheapest_simplification = x;
3414 		      cheapest_cost = COST (x, mode);
3415 
3416 		      for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3417 			{
3418 			  int cost;
3419 
3420 			  /* If the entry isn't valid, skip it.  */
3421 			  if (! exp_equiv_p (p->exp, p->exp, 1, false))
3422 			    continue;
3423 
3424 			  /* Try to simplify using this equivalence.  */
3425 			  simp_result
3426 			    = simplify_relational_operation (code, mode,
3427 							     mode_arg0,
3428 							     p->exp,
3429 							     const_arg1);
3430 
3431 			  if (simp_result == NULL)
3432 			    continue;
3433 
3434 			  cost = COST (simp_result, mode);
3435 			  if (cost < cheapest_cost)
3436 			    {
3437 			      cheapest_cost = cost;
3438 			      cheapest_simplification = simp_result;
3439 			    }
3440 			}
3441 
3442 		      /* If we have a cheaper expression now, use that
3443 			 and try folding it further, from the top.  */
3444 		      if (cheapest_simplification != x)
3445 			return fold_rtx (copy_rtx (cheapest_simplification),
3446 					 insn);
3447 		    }
3448 		}
3449 
3450 	      /* See if the two operands are the same.  */
3451 
3452 	      if ((REG_P (folded_arg0)
3453 		   && REG_P (folded_arg1)
3454 		   && (REG_QTY (REGNO (folded_arg0))
3455 		       == REG_QTY (REGNO (folded_arg1))))
3456 		  || ((p0 = lookup (folded_arg0,
3457 				    SAFE_HASH (folded_arg0, mode_arg0),
3458 				    mode_arg0))
3459 		      && (p1 = lookup (folded_arg1,
3460 				       SAFE_HASH (folded_arg1, mode_arg0),
3461 				       mode_arg0))
3462 		      && p0->first_same_value == p1->first_same_value))
3463 		folded_arg1 = folded_arg0;
3464 
3465 	      /* If FOLDED_ARG0 is a register, see if the comparison we are
3466 		 doing now is either the same as we did before or the reverse
3467 		 (we only check the reverse if not floating-point).  */
3468 	      else if (REG_P (folded_arg0))
3469 		{
3470 		  int qty = REG_QTY (REGNO (folded_arg0));
3471 
3472 		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3473 		    {
3474 		      struct qty_table_elem *ent = &qty_table[qty];
3475 
3476 		      if ((comparison_dominates_p (ent->comparison_code, code)
3477 			   || (! FLOAT_MODE_P (mode_arg0)
3478 			       && comparison_dominates_p (ent->comparison_code,
3479 						          reverse_condition (code))))
3480 			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
3481 			      || (const_arg1
3482 				  && rtx_equal_p (ent->comparison_const,
3483 						  const_arg1))
3484 			      || (REG_P (folded_arg1)
3485 				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3486 			{
3487 			  if (comparison_dominates_p (ent->comparison_code, code))
3488 			    {
3489 			      if (true_rtx)
3490 				return true_rtx;
3491 			      else
3492 				break;
3493 			    }
3494 			  else
3495 			    return false_rtx;
3496 			}
3497 		    }
3498 		}
3499 	    }
3500 	}
3501 
3502       /* If we are comparing against zero, see if the first operand is
3503 	 equivalent to an IOR with a constant.  If so, we may be able to
3504 	 determine the result of this comparison.  */
3505       if (const_arg1 == const0_rtx && !const_arg0)
3506 	{
3507 	  rtx y = lookup_as_function (folded_arg0, IOR);
3508 	  rtx inner_const;
3509 
3510 	  if (y != 0
3511 	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3512 	      && CONST_INT_P (inner_const)
3513 	      && INTVAL (inner_const) != 0)
3514 	    folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3515 	}
3516 
3517       {
3518 	rtx op0 = const_arg0 ? const_arg0 : copy_rtx (folded_arg0);
3519 	rtx op1 = const_arg1 ? const_arg1 : copy_rtx (folded_arg1);
3520 	new_rtx = simplify_relational_operation (code, mode, mode_arg0,
3521 						 op0, op1);
3522       }
3523       break;
3524 
3525     case RTX_BIN_ARITH:
3526     case RTX_COMM_ARITH:
3527       switch (code)
3528 	{
3529 	case PLUS:
3530 	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
3531 	     with that LABEL_REF as its second operand.  If so, the result is
3532 	     the first operand of that MINUS.  This handles switches with an
3533 	     ADDR_DIFF_VEC table.  */
3534 	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3535 	    {
3536 	      rtx y
3537 		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
3538 		: lookup_as_function (folded_arg0, MINUS);
3539 
3540 	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3541 		  && label_ref_label (XEXP (y, 1)) == label_ref_label (const_arg1))
3542 		return XEXP (y, 0);
3543 
3544 	      /* Now try for a CONST of a MINUS like the above.  */
3545 	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3546 			: lookup_as_function (folded_arg0, CONST))) != 0
3547 		  && GET_CODE (XEXP (y, 0)) == MINUS
3548 		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3549 		  && label_ref_label (XEXP (XEXP (y, 0), 1)) == label_ref_label (const_arg1))
3550 		return XEXP (XEXP (y, 0), 0);
3551 	    }
3552 
3553 	  /* Likewise if the operands are in the other order.  */
3554 	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3555 	    {
3556 	      rtx y
3557 		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
3558 		: lookup_as_function (folded_arg1, MINUS);
3559 
3560 	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3561 		  && label_ref_label (XEXP (y, 1)) == label_ref_label (const_arg0))
3562 		return XEXP (y, 0);
3563 
3564 	      /* Now try for a CONST of a MINUS like the above.  */
3565 	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3566 			: lookup_as_function (folded_arg1, CONST))) != 0
3567 		  && GET_CODE (XEXP (y, 0)) == MINUS
3568 		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3569 		  && label_ref_label (XEXP (XEXP (y, 0), 1)) == label_ref_label (const_arg0))
3570 		return XEXP (XEXP (y, 0), 0);
3571 	    }
3572 
3573 	  /* If second operand is a register equivalent to a negative
3574 	     CONST_INT, see if we can find a register equivalent to the
3575 	     positive constant.  Make a MINUS if so.  Don't do this for
3576 	     a non-negative constant since we might then alternate between
3577 	     choosing positive and negative constants.  Having the positive
3578 	     constant previously-used is the more common case.  Be sure
3579 	     the resulting constant is non-negative; if const_arg1 were
3580 	     the smallest negative number this would overflow: depending
3581 	     on the mode, this would either just be the same value (and
3582 	     hence not save anything) or be incorrect.  */
3583 	  if (const_arg1 != 0 && CONST_INT_P (const_arg1)
3584 	      && INTVAL (const_arg1) < 0
3585 	      /* This used to test
3586 
3587 	         -INTVAL (const_arg1) >= 0
3588 
3589 		 But The Sun V5.0 compilers mis-compiled that test.  So
3590 		 instead we test for the problematic value in a more direct
3591 		 manner and hope the Sun compilers get it correct.  */
3592 	      && INTVAL (const_arg1) !=
3593 	        (HOST_WIDE_INT_1 << (HOST_BITS_PER_WIDE_INT - 1))
3594 	      && REG_P (folded_arg1))
3595 	    {
3596 	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
3597 	      struct table_elt *p
3598 		= lookup (new_const, SAFE_HASH (new_const, mode), mode);
3599 
3600 	      if (p)
3601 		for (p = p->first_same_value; p; p = p->next_same_value)
3602 		  if (REG_P (p->exp))
3603 		    return simplify_gen_binary (MINUS, mode, folded_arg0,
3604 						canon_reg (p->exp, NULL));
3605 	    }
3606 	  goto from_plus;
3607 
3608 	case MINUS:
3609 	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3610 	     If so, produce (PLUS Z C2-C).  */
3611 	  if (const_arg1 != 0 && poly_int_rtx_p (const_arg1, &xval))
3612 	    {
3613 	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3614 	      if (y && poly_int_rtx_p (XEXP (y, 1)))
3615 		return fold_rtx (plus_constant (mode, copy_rtx (y), -xval),
3616 				 NULL);
3617 	    }
3618 
3619 	  /* Fall through.  */
3620 
3621 	from_plus:
3622 	case SMIN:    case SMAX:      case UMIN:    case UMAX:
3623 	case IOR:     case AND:       case XOR:
3624 	case MULT:
3625 	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
3626 	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3627 	     is known to be of similar form, we may be able to replace the
3628 	     operation with a combined operation.  This may eliminate the
3629 	     intermediate operation if every use is simplified in this way.
3630 	     Note that the similar optimization done by combine.c only works
3631 	     if the intermediate operation's result has only one reference.  */
3632 
3633 	  if (REG_P (folded_arg0)
3634 	      && const_arg1 && CONST_INT_P (const_arg1))
3635 	    {
3636 	      int is_shift
3637 		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3638 	      rtx y, inner_const, new_const;
3639 	      rtx canon_const_arg1 = const_arg1;
3640 	      enum rtx_code associate_code;
3641 
3642 	      if (is_shift
3643 		  && (INTVAL (const_arg1) >= GET_MODE_UNIT_PRECISION (mode)
3644 		      || INTVAL (const_arg1) < 0))
3645 		{
3646 		  if (SHIFT_COUNT_TRUNCATED)
3647 		    canon_const_arg1 = gen_int_shift_amount
3648 		      (mode, (INTVAL (const_arg1)
3649 			      & (GET_MODE_UNIT_BITSIZE (mode) - 1)));
3650 		  else
3651 		    break;
3652 		}
3653 
3654 	      y = lookup_as_function (folded_arg0, code);
3655 	      if (y == 0)
3656 		break;
3657 
3658 	      /* If we have compiled a statement like
3659 		 "if (x == (x & mask1))", and now are looking at
3660 		 "x & mask2", we will have a case where the first operand
3661 		 of Y is the same as our first operand.  Unless we detect
3662 		 this case, an infinite loop will result.  */
3663 	      if (XEXP (y, 0) == folded_arg0)
3664 		break;
3665 
3666 	      inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3667 	      if (!inner_const || !CONST_INT_P (inner_const))
3668 		break;
3669 
3670 	      /* Don't associate these operations if they are a PLUS with the
3671 		 same constant and it is a power of two.  These might be doable
3672 		 with a pre- or post-increment.  Similarly for two subtracts of
3673 		 identical powers of two with post decrement.  */
3674 
3675 	      if (code == PLUS && const_arg1 == inner_const
3676 		  && ((HAVE_PRE_INCREMENT
3677 			  && pow2p_hwi (INTVAL (const_arg1)))
3678 		      || (HAVE_POST_INCREMENT
3679 			  && pow2p_hwi (INTVAL (const_arg1)))
3680 		      || (HAVE_PRE_DECREMENT
3681 			  && pow2p_hwi (- INTVAL (const_arg1)))
3682 		      || (HAVE_POST_DECREMENT
3683 			  && pow2p_hwi (- INTVAL (const_arg1)))))
3684 		break;
3685 
3686 	      /* ??? Vector mode shifts by scalar
3687 		 shift operand are not supported yet.  */
3688 	      if (is_shift && VECTOR_MODE_P (mode))
3689                 break;
3690 
3691 	      if (is_shift
3692 		  && (INTVAL (inner_const) >= GET_MODE_UNIT_PRECISION (mode)
3693 		      || INTVAL (inner_const) < 0))
3694 		{
3695 		  if (SHIFT_COUNT_TRUNCATED)
3696 		    inner_const = gen_int_shift_amount
3697 		      (mode, (INTVAL (inner_const)
3698 			      & (GET_MODE_UNIT_BITSIZE (mode) - 1)));
3699 		  else
3700 		    break;
3701 		}
3702 
3703 	      /* Compute the code used to compose the constants.  For example,
3704 		 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
3705 
3706 	      associate_code = (is_shift || code == MINUS ? PLUS : code);
3707 
3708 	      new_const = simplify_binary_operation (associate_code, mode,
3709 						     canon_const_arg1,
3710 						     inner_const);
3711 
3712 	      if (new_const == 0)
3713 		break;
3714 
3715 	      /* If we are associating shift operations, don't let this
3716 		 produce a shift of the size of the object or larger.
3717 		 This could occur when we follow a sign-extend by a right
3718 		 shift on a machine that does a sign-extend as a pair
3719 		 of shifts.  */
3720 
3721 	      if (is_shift
3722 		  && CONST_INT_P (new_const)
3723 		  && INTVAL (new_const) >= GET_MODE_UNIT_PRECISION (mode))
3724 		{
3725 		  /* As an exception, we can turn an ASHIFTRT of this
3726 		     form into a shift of the number of bits - 1.  */
3727 		  if (code == ASHIFTRT)
3728 		    new_const = gen_int_shift_amount
3729 		      (mode, GET_MODE_UNIT_BITSIZE (mode) - 1);
3730 		  else if (!side_effects_p (XEXP (y, 0)))
3731 		    return CONST0_RTX (mode);
3732 		  else
3733 		    break;
3734 		}
3735 
3736 	      y = copy_rtx (XEXP (y, 0));
3737 
3738 	      /* If Y contains our first operand (the most common way this
3739 		 can happen is if Y is a MEM), we would do into an infinite
3740 		 loop if we tried to fold it.  So don't in that case.  */
3741 
3742 	      if (! reg_mentioned_p (folded_arg0, y))
3743 		y = fold_rtx (y, insn);
3744 
3745 	      return simplify_gen_binary (code, mode, y, new_const);
3746 	    }
3747 	  break;
3748 
3749 	case DIV:       case UDIV:
3750 	  /* ??? The associative optimization performed immediately above is
3751 	     also possible for DIV and UDIV using associate_code of MULT.
3752 	     However, we would need extra code to verify that the
3753 	     multiplication does not overflow, that is, there is no overflow
3754 	     in the calculation of new_const.  */
3755 	  break;
3756 
3757 	default:
3758 	  break;
3759 	}
3760 
3761       new_rtx = simplify_binary_operation (code, mode,
3762 				       const_arg0 ? const_arg0 : folded_arg0,
3763 				       const_arg1 ? const_arg1 : folded_arg1);
3764       break;
3765 
3766     case RTX_OBJ:
3767       /* (lo_sum (high X) X) is simply X.  */
3768       if (code == LO_SUM && const_arg0 != 0
3769 	  && GET_CODE (const_arg0) == HIGH
3770 	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3771 	return const_arg1;
3772       break;
3773 
3774     case RTX_TERNARY:
3775     case RTX_BITFIELD_OPS:
3776       new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3777 					const_arg0 ? const_arg0 : folded_arg0,
3778 					const_arg1 ? const_arg1 : folded_arg1,
3779 					const_arg2 ? const_arg2 : XEXP (x, 2));
3780       break;
3781 
3782     default:
3783       break;
3784     }
3785 
3786   return new_rtx ? new_rtx : x;
3787 }
3788 
3789 /* Return a constant value currently equivalent to X.
3790    Return 0 if we don't know one.  */
3791 
3792 static rtx
equiv_constant(rtx x)3793 equiv_constant (rtx x)
3794 {
3795   if (REG_P (x)
3796       && REGNO_QTY_VALID_P (REGNO (x)))
3797     {
3798       int x_q = REG_QTY (REGNO (x));
3799       struct qty_table_elem *x_ent = &qty_table[x_q];
3800 
3801       if (x_ent->const_rtx)
3802 	x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3803     }
3804 
3805   if (x == 0 || CONSTANT_P (x))
3806     return x;
3807 
3808   if (GET_CODE (x) == SUBREG)
3809     {
3810       machine_mode mode = GET_MODE (x);
3811       machine_mode imode = GET_MODE (SUBREG_REG (x));
3812       rtx new_rtx;
3813 
3814       /* See if we previously assigned a constant value to this SUBREG.  */
3815       if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3816 	  || (new_rtx = lookup_as_function (x, CONST_WIDE_INT)) != 0
3817 	  || (NUM_POLY_INT_COEFFS > 1
3818 	      && (new_rtx = lookup_as_function (x, CONST_POLY_INT)) != 0)
3819           || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3820           || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3821         return new_rtx;
3822 
3823       /* If we didn't and if doing so makes sense, see if we previously
3824 	 assigned a constant value to the enclosing word mode SUBREG.  */
3825       if (known_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD)
3826 	  && known_lt (UNITS_PER_WORD, GET_MODE_SIZE (imode)))
3827 	{
3828 	  poly_int64 byte = (SUBREG_BYTE (x)
3829 			     - subreg_lowpart_offset (mode, word_mode));
3830 	  if (known_ge (byte, 0) && multiple_p (byte, UNITS_PER_WORD))
3831 	    {
3832 	      rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3833 	      new_rtx = lookup_as_function (y, CONST_INT);
3834 	      if (new_rtx)
3835 		return gen_lowpart (mode, new_rtx);
3836 	    }
3837 	}
3838 
3839       /* Otherwise see if we already have a constant for the inner REG,
3840 	 and if that is enough to calculate an equivalent constant for
3841 	 the subreg.  Note that the upper bits of paradoxical subregs
3842 	 are undefined, so they cannot be said to equal anything.  */
3843       if (REG_P (SUBREG_REG (x))
3844 	  && !paradoxical_subreg_p (x)
3845 	  && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3846         return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
3847 
3848       return 0;
3849     }
3850 
3851   /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3852      the hash table in case its value was seen before.  */
3853 
3854   if (MEM_P (x))
3855     {
3856       struct table_elt *elt;
3857 
3858       x = avoid_constant_pool_reference (x);
3859       if (CONSTANT_P (x))
3860 	return x;
3861 
3862       elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3863       if (elt == 0)
3864 	return 0;
3865 
3866       for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3867 	if (elt->is_const && CONSTANT_P (elt->exp))
3868 	  return elt->exp;
3869     }
3870 
3871   return 0;
3872 }
3873 
3874 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3875    "taken" branch.
3876 
3877    In certain cases, this can cause us to add an equivalence.  For example,
3878    if we are following the taken case of
3879 	if (i == 2)
3880    we can add the fact that `i' and '2' are now equivalent.
3881 
3882    In any case, we can record that this comparison was passed.  If the same
3883    comparison is seen later, we will know its value.  */
3884 
3885 static void
record_jump_equiv(rtx_insn * insn,bool taken)3886 record_jump_equiv (rtx_insn *insn, bool taken)
3887 {
3888   int cond_known_true;
3889   rtx op0, op1;
3890   rtx set;
3891   machine_mode mode, mode0, mode1;
3892   int reversed_nonequality = 0;
3893   enum rtx_code code;
3894 
3895   /* Ensure this is the right kind of insn.  */
3896   gcc_assert (any_condjump_p (insn));
3897 
3898   set = pc_set (insn);
3899 
3900   /* See if this jump condition is known true or false.  */
3901   if (taken)
3902     cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3903   else
3904     cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3905 
3906   /* Get the type of comparison being done and the operands being compared.
3907      If we had to reverse a non-equality condition, record that fact so we
3908      know that it isn't valid for floating-point.  */
3909   code = GET_CODE (XEXP (SET_SRC (set), 0));
3910   op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3911   op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3912 
3913   /* On a cc0 target the cc0-setter and cc0-user may end up in different
3914      blocks.  When that happens the tracking of the cc0-setter via
3915      PREV_INSN_CC0 is spoiled.  That means that fold_rtx may return
3916      NULL_RTX.  In those cases, there's nothing to record.  */
3917   if (op0 == NULL_RTX || op1 == NULL_RTX)
3918     return;
3919 
3920   code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3921   if (! cond_known_true)
3922     {
3923       code = reversed_comparison_code_parts (code, op0, op1, insn);
3924 
3925       /* Don't remember if we can't find the inverse.  */
3926       if (code == UNKNOWN)
3927 	return;
3928     }
3929 
3930   /* The mode is the mode of the non-constant.  */
3931   mode = mode0;
3932   if (mode1 != VOIDmode)
3933     mode = mode1;
3934 
3935   record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3936 }
3937 
3938 /* Yet another form of subreg creation.  In this case, we want something in
3939    MODE, and we should assume OP has MODE iff it is naturally modeless.  */
3940 
3941 static rtx
record_jump_cond_subreg(machine_mode mode,rtx op)3942 record_jump_cond_subreg (machine_mode mode, rtx op)
3943 {
3944   machine_mode op_mode = GET_MODE (op);
3945   if (op_mode == mode || op_mode == VOIDmode)
3946     return op;
3947   return lowpart_subreg (mode, op, op_mode);
3948 }
3949 
3950 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3951    REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3952    Make any useful entries we can with that information.  Called from
3953    above function and called recursively.  */
3954 
3955 static void
record_jump_cond(enum rtx_code code,machine_mode mode,rtx op0,rtx op1,int reversed_nonequality)3956 record_jump_cond (enum rtx_code code, machine_mode mode, rtx op0,
3957 		  rtx op1, int reversed_nonequality)
3958 {
3959   unsigned op0_hash, op1_hash;
3960   int op0_in_memory, op1_in_memory;
3961   struct table_elt *op0_elt, *op1_elt;
3962 
3963   /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3964      we know that they are also equal in the smaller mode (this is also
3965      true for all smaller modes whether or not there is a SUBREG, but
3966      is not worth testing for with no SUBREG).  */
3967 
3968   /* Note that GET_MODE (op0) may not equal MODE.  */
3969   if (code == EQ && paradoxical_subreg_p (op0))
3970     {
3971       machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3972       rtx tem = record_jump_cond_subreg (inner_mode, op1);
3973       if (tem)
3974 	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3975 			  reversed_nonequality);
3976     }
3977 
3978   if (code == EQ && paradoxical_subreg_p (op1))
3979     {
3980       machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3981       rtx tem = record_jump_cond_subreg (inner_mode, op0);
3982       if (tem)
3983 	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3984 			  reversed_nonequality);
3985     }
3986 
3987   /* Similarly, if this is an NE comparison, and either is a SUBREG
3988      making a smaller mode, we know the whole thing is also NE.  */
3989 
3990   /* Note that GET_MODE (op0) may not equal MODE;
3991      if we test MODE instead, we can get an infinite recursion
3992      alternating between two modes each wider than MODE.  */
3993 
3994   if (code == NE
3995       && partial_subreg_p (op0)
3996       && subreg_lowpart_p (op0))
3997     {
3998       machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3999       rtx tem = record_jump_cond_subreg (inner_mode, op1);
4000       if (tem)
4001 	record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4002 			  reversed_nonequality);
4003     }
4004 
4005   if (code == NE
4006       && partial_subreg_p (op1)
4007       && subreg_lowpart_p (op1))
4008     {
4009       machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4010       rtx tem = record_jump_cond_subreg (inner_mode, op0);
4011       if (tem)
4012 	record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4013 			  reversed_nonequality);
4014     }
4015 
4016   /* Hash both operands.  */
4017 
4018   do_not_record = 0;
4019   hash_arg_in_memory = 0;
4020   op0_hash = HASH (op0, mode);
4021   op0_in_memory = hash_arg_in_memory;
4022 
4023   if (do_not_record)
4024     return;
4025 
4026   do_not_record = 0;
4027   hash_arg_in_memory = 0;
4028   op1_hash = HASH (op1, mode);
4029   op1_in_memory = hash_arg_in_memory;
4030 
4031   if (do_not_record)
4032     return;
4033 
4034   /* Look up both operands.  */
4035   op0_elt = lookup (op0, op0_hash, mode);
4036   op1_elt = lookup (op1, op1_hash, mode);
4037 
4038   /* If both operands are already equivalent or if they are not in the
4039      table but are identical, do nothing.  */
4040   if ((op0_elt != 0 && op1_elt != 0
4041        && op0_elt->first_same_value == op1_elt->first_same_value)
4042       || op0 == op1 || rtx_equal_p (op0, op1))
4043     return;
4044 
4045   /* If we aren't setting two things equal all we can do is save this
4046      comparison.   Similarly if this is floating-point.  In the latter
4047      case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4048      If we record the equality, we might inadvertently delete code
4049      whose intent was to change -0 to +0.  */
4050 
4051   if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4052     {
4053       struct qty_table_elem *ent;
4054       int qty;
4055 
4056       /* If we reversed a floating-point comparison, if OP0 is not a
4057 	 register, or if OP1 is neither a register or constant, we can't
4058 	 do anything.  */
4059 
4060       if (!REG_P (op1))
4061 	op1 = equiv_constant (op1);
4062 
4063       if ((reversed_nonequality && FLOAT_MODE_P (mode))
4064 	  || !REG_P (op0) || op1 == 0)
4065 	return;
4066 
4067       /* Put OP0 in the hash table if it isn't already.  This gives it a
4068 	 new quantity number.  */
4069       if (op0_elt == 0)
4070 	{
4071 	  if (insert_regs (op0, NULL, 0))
4072 	    {
4073 	      rehash_using_reg (op0);
4074 	      op0_hash = HASH (op0, mode);
4075 
4076 	      /* If OP0 is contained in OP1, this changes its hash code
4077 		 as well.  Faster to rehash than to check, except
4078 		 for the simple case of a constant.  */
4079 	      if (! CONSTANT_P (op1))
4080 		op1_hash = HASH (op1,mode);
4081 	    }
4082 
4083 	  op0_elt = insert (op0, NULL, op0_hash, mode);
4084 	  op0_elt->in_memory = op0_in_memory;
4085 	}
4086 
4087       qty = REG_QTY (REGNO (op0));
4088       ent = &qty_table[qty];
4089 
4090       ent->comparison_code = code;
4091       if (REG_P (op1))
4092 	{
4093 	  /* Look it up again--in case op0 and op1 are the same.  */
4094 	  op1_elt = lookup (op1, op1_hash, mode);
4095 
4096 	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4097 	  if (op1_elt == 0)
4098 	    {
4099 	      if (insert_regs (op1, NULL, 0))
4100 		{
4101 		  rehash_using_reg (op1);
4102 		  op1_hash = HASH (op1, mode);
4103 		}
4104 
4105 	      op1_elt = insert (op1, NULL, op1_hash, mode);
4106 	      op1_elt->in_memory = op1_in_memory;
4107 	    }
4108 
4109 	  ent->comparison_const = NULL_RTX;
4110 	  ent->comparison_qty = REG_QTY (REGNO (op1));
4111 	}
4112       else
4113 	{
4114 	  ent->comparison_const = op1;
4115 	  ent->comparison_qty = -1;
4116 	}
4117 
4118       return;
4119     }
4120 
4121   /* If either side is still missing an equivalence, make it now,
4122      then merge the equivalences.  */
4123 
4124   if (op0_elt == 0)
4125     {
4126       if (insert_regs (op0, NULL, 0))
4127 	{
4128 	  rehash_using_reg (op0);
4129 	  op0_hash = HASH (op0, mode);
4130 	}
4131 
4132       op0_elt = insert (op0, NULL, op0_hash, mode);
4133       op0_elt->in_memory = op0_in_memory;
4134     }
4135 
4136   if (op1_elt == 0)
4137     {
4138       if (insert_regs (op1, NULL, 0))
4139 	{
4140 	  rehash_using_reg (op1);
4141 	  op1_hash = HASH (op1, mode);
4142 	}
4143 
4144       op1_elt = insert (op1, NULL, op1_hash, mode);
4145       op1_elt->in_memory = op1_in_memory;
4146     }
4147 
4148   merge_equiv_classes (op0_elt, op1_elt);
4149 }
4150 
4151 /* CSE processing for one instruction.
4152 
4153    Most "true" common subexpressions are mostly optimized away in GIMPLE,
4154    but the few that "leak through" are cleaned up by cse_insn, and complex
4155    addressing modes are often formed here.
4156 
4157    The main function is cse_insn, and between here and that function
4158    a couple of helper functions is defined to keep the size of cse_insn
4159    within reasonable proportions.
4160 
4161    Data is shared between the main and helper functions via STRUCT SET,
4162    that contains all data related for every set in the instruction that
4163    is being processed.
4164 
4165    Note that cse_main processes all sets in the instruction.  Most
4166    passes in GCC only process simple SET insns or single_set insns, but
4167    CSE processes insns with multiple sets as well.  */
4168 
4169 /* Data on one SET contained in the instruction.  */
4170 
4171 struct set
4172 {
4173   /* The SET rtx itself.  */
4174   rtx rtl;
4175   /* The SET_SRC of the rtx (the original value, if it is changing).  */
4176   rtx src;
4177   /* The hash-table element for the SET_SRC of the SET.  */
4178   struct table_elt *src_elt;
4179   /* Hash value for the SET_SRC.  */
4180   unsigned src_hash;
4181   /* Hash value for the SET_DEST.  */
4182   unsigned dest_hash;
4183   /* The SET_DEST, with SUBREG, etc., stripped.  */
4184   rtx inner_dest;
4185   /* Nonzero if the SET_SRC is in memory.  */
4186   char src_in_memory;
4187   /* Nonzero if the SET_SRC contains something
4188      whose value cannot be predicted and understood.  */
4189   char src_volatile;
4190   /* Original machine mode, in case it becomes a CONST_INT.
4191      The size of this field should match the size of the mode
4192      field of struct rtx_def (see rtl.h).  */
4193   ENUM_BITFIELD(machine_mode) mode : 8;
4194   /* Hash value of constant equivalent for SET_SRC.  */
4195   unsigned src_const_hash;
4196   /* A constant equivalent for SET_SRC, if any.  */
4197   rtx src_const;
4198   /* Table entry for constant equivalent for SET_SRC, if any.  */
4199   struct table_elt *src_const_elt;
4200   /* Table entry for the destination address.  */
4201   struct table_elt *dest_addr_elt;
4202 };
4203 
4204 /* Special handling for (set REG0 REG1) where REG0 is the
4205    "cheapest", cheaper than REG1.  After cse, REG1 will probably not
4206    be used in the sequel, so (if easily done) change this insn to
4207    (set REG1 REG0) and replace REG1 with REG0 in the previous insn
4208    that computed their value.  Then REG1 will become a dead store
4209    and won't cloud the situation for later optimizations.
4210 
4211    Do not make this change if REG1 is a hard register, because it will
4212    then be used in the sequel and we may be changing a two-operand insn
4213    into a three-operand insn.
4214 
4215    This is the last transformation that cse_insn will try to do.  */
4216 
4217 static void
try_back_substitute_reg(rtx set,rtx_insn * insn)4218 try_back_substitute_reg (rtx set, rtx_insn *insn)
4219 {
4220   rtx dest = SET_DEST (set);
4221   rtx src = SET_SRC (set);
4222 
4223   if (REG_P (dest)
4224       && REG_P (src) && ! HARD_REGISTER_P (src)
4225       && REGNO_QTY_VALID_P (REGNO (src)))
4226     {
4227       int src_q = REG_QTY (REGNO (src));
4228       struct qty_table_elem *src_ent = &qty_table[src_q];
4229 
4230       if (src_ent->first_reg == REGNO (dest))
4231 	{
4232 	  /* Scan for the previous nonnote insn, but stop at a basic
4233 	     block boundary.  */
4234 	  rtx_insn *prev = insn;
4235 	  rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
4236 	  do
4237 	    {
4238 	      prev = PREV_INSN (prev);
4239 	    }
4240 	  while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
4241 
4242 	  /* Do not swap the registers around if the previous instruction
4243 	     attaches a REG_EQUIV note to REG1.
4244 
4245 	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
4246 	     from the pseudo that originally shadowed an incoming argument
4247 	     to another register.  Some uses of REG_EQUIV might rely on it
4248 	     being attached to REG1 rather than REG2.
4249 
4250 	     This section previously turned the REG_EQUIV into a REG_EQUAL
4251 	     note.  We cannot do that because REG_EQUIV may provide an
4252 	     uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
4253 	  if (NONJUMP_INSN_P (prev)
4254 	      && GET_CODE (PATTERN (prev)) == SET
4255 	      && SET_DEST (PATTERN (prev)) == src
4256 	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
4257 	    {
4258 	      rtx note;
4259 
4260 	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
4261 	      validate_change (insn, &SET_DEST (set), src, 1);
4262 	      validate_change (insn, &SET_SRC (set), dest, 1);
4263 	      apply_change_group ();
4264 
4265 	      /* If INSN has a REG_EQUAL note, and this note mentions
4266 		 REG0, then we must delete it, because the value in
4267 		 REG0 has changed.  If the note's value is REG1, we must
4268 		 also delete it because that is now this insn's dest.  */
4269 	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4270 	      if (note != 0
4271 		  && (reg_mentioned_p (dest, XEXP (note, 0))
4272 		      || rtx_equal_p (src, XEXP (note, 0))))
4273 		remove_note (insn, note);
4274 
4275 	      /* If INSN has a REG_ARGS_SIZE note, move it to PREV.  */
4276 	      note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4277 	      if (note != 0)
4278 		{
4279 		  remove_note (insn, note);
4280 		  gcc_assert (!find_reg_note (prev, REG_ARGS_SIZE, NULL_RTX));
4281 		  set_unique_reg_note (prev, REG_ARGS_SIZE, XEXP (note, 0));
4282 		}
4283 	    }
4284 	}
4285     }
4286 }
4287 
4288 /* Record all the SETs in this instruction into SETS_PTR,
4289    and return the number of recorded sets.  */
4290 static int
find_sets_in_insn(rtx_insn * insn,struct set ** psets)4291 find_sets_in_insn (rtx_insn *insn, struct set **psets)
4292 {
4293   struct set *sets = *psets;
4294   int n_sets = 0;
4295   rtx x = PATTERN (insn);
4296 
4297   if (GET_CODE (x) == SET)
4298     {
4299       /* Ignore SETs that are unconditional jumps.
4300 	 They never need cse processing, so this does not hurt.
4301 	 The reason is not efficiency but rather
4302 	 so that we can test at the end for instructions
4303 	 that have been simplified to unconditional jumps
4304 	 and not be misled by unchanged instructions
4305 	 that were unconditional jumps to begin with.  */
4306       if (SET_DEST (x) == pc_rtx
4307 	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4308 	;
4309       /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4310 	 The hard function value register is used only once, to copy to
4311 	 someplace else, so it isn't worth cse'ing.  */
4312       else if (GET_CODE (SET_SRC (x)) == CALL)
4313 	;
4314       else
4315 	sets[n_sets++].rtl = x;
4316     }
4317   else if (GET_CODE (x) == PARALLEL)
4318     {
4319       int i, lim = XVECLEN (x, 0);
4320 
4321       /* Go over the expressions of the PARALLEL in forward order, to
4322 	 put them in the same order in the SETS array.  */
4323       for (i = 0; i < lim; i++)
4324 	{
4325 	  rtx y = XVECEXP (x, 0, i);
4326 	  if (GET_CODE (y) == SET)
4327 	    {
4328 	      /* As above, we ignore unconditional jumps and call-insns and
4329 		 ignore the result of apply_change_group.  */
4330 	      if (SET_DEST (y) == pc_rtx
4331 		  && GET_CODE (SET_SRC (y)) == LABEL_REF)
4332 		;
4333 	      else if (GET_CODE (SET_SRC (y)) == CALL)
4334 		;
4335 	      else
4336 		sets[n_sets++].rtl = y;
4337 	    }
4338 	}
4339     }
4340 
4341   return n_sets;
4342 }
4343 
4344 /* Subroutine of canonicalize_insn.  X is an ASM_OPERANDS in INSN.  */
4345 
4346 static void
canon_asm_operands(rtx x,rtx_insn * insn)4347 canon_asm_operands (rtx x, rtx_insn *insn)
4348 {
4349   for (int i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
4350     {
4351       rtx input = ASM_OPERANDS_INPUT (x, i);
4352       if (!(REG_P (input) && HARD_REGISTER_P (input)))
4353 	{
4354 	  input = canon_reg (input, insn);
4355 	  validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1);
4356 	}
4357     }
4358 }
4359 
4360 /* Where possible, substitute every register reference in the N_SETS
4361    number of SETS in INSN with the canonical register.
4362 
4363    Register canonicalization propagatest the earliest register (i.e.
4364    one that is set before INSN) with the same value.  This is a very
4365    useful, simple form of CSE, to clean up warts from expanding GIMPLE
4366    to RTL.  For instance, a CONST for an address is usually expanded
4367    multiple times to loads into different registers, thus creating many
4368    subexpressions of the form:
4369 
4370    (set (reg1) (some_const))
4371    (set (mem (... reg1 ...) (thing)))
4372    (set (reg2) (some_const))
4373    (set (mem (... reg2 ...) (thing)))
4374 
4375    After canonicalizing, the code takes the following form:
4376 
4377    (set (reg1) (some_const))
4378    (set (mem (... reg1 ...) (thing)))
4379    (set (reg2) (some_const))
4380    (set (mem (... reg1 ...) (thing)))
4381 
4382    The set to reg2 is now trivially dead, and the memory reference (or
4383    address, or whatever) may be a candidate for further CSEing.
4384 
4385    In this function, the result of apply_change_group can be ignored;
4386    see canon_reg.  */
4387 
4388 static void
canonicalize_insn(rtx_insn * insn,struct set ** psets,int n_sets)4389 canonicalize_insn (rtx_insn *insn, struct set **psets, int n_sets)
4390 {
4391   struct set *sets = *psets;
4392   rtx tem;
4393   rtx x = PATTERN (insn);
4394   int i;
4395 
4396   if (CALL_P (insn))
4397     {
4398       for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4399 	if (GET_CODE (XEXP (tem, 0)) != SET)
4400 	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4401     }
4402 
4403   if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
4404     {
4405       canon_reg (SET_SRC (x), insn);
4406       apply_change_group ();
4407       fold_rtx (SET_SRC (x), insn);
4408     }
4409   else if (GET_CODE (x) == CLOBBER)
4410     {
4411       /* If we clobber memory, canon the address.
4412 	 This does nothing when a register is clobbered
4413 	 because we have already invalidated the reg.  */
4414       if (MEM_P (XEXP (x, 0)))
4415 	canon_reg (XEXP (x, 0), insn);
4416     }
4417   else if (GET_CODE (x) == USE
4418 	   && ! (REG_P (XEXP (x, 0))
4419 		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4420     /* Canonicalize a USE of a pseudo register or memory location.  */
4421     canon_reg (x, insn);
4422   else if (GET_CODE (x) == ASM_OPERANDS)
4423     canon_asm_operands (x, insn);
4424   else if (GET_CODE (x) == CALL)
4425     {
4426       canon_reg (x, insn);
4427       apply_change_group ();
4428       fold_rtx (x, insn);
4429     }
4430   else if (DEBUG_INSN_P (insn))
4431     canon_reg (PATTERN (insn), insn);
4432   else if (GET_CODE (x) == PARALLEL)
4433     {
4434       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4435 	{
4436 	  rtx y = XVECEXP (x, 0, i);
4437 	  if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
4438 	    {
4439 	      canon_reg (SET_SRC (y), insn);
4440 	      apply_change_group ();
4441 	      fold_rtx (SET_SRC (y), insn);
4442 	    }
4443 	  else if (GET_CODE (y) == CLOBBER)
4444 	    {
4445 	      if (MEM_P (XEXP (y, 0)))
4446 		canon_reg (XEXP (y, 0), insn);
4447 	    }
4448 	  else if (GET_CODE (y) == USE
4449 		   && ! (REG_P (XEXP (y, 0))
4450 			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4451 	    canon_reg (y, insn);
4452 	  else if (GET_CODE (y) == ASM_OPERANDS)
4453 	    canon_asm_operands (y, insn);
4454 	  else if (GET_CODE (y) == CALL)
4455 	    {
4456 	      canon_reg (y, insn);
4457 	      apply_change_group ();
4458 	      fold_rtx (y, insn);
4459 	    }
4460 	}
4461     }
4462 
4463   if (n_sets == 1 && REG_NOTES (insn) != 0
4464       && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4465     {
4466       /* We potentially will process this insn many times.  Therefore,
4467 	 drop the REG_EQUAL note if it is equal to the SET_SRC of the
4468 	 unique set in INSN.
4469 
4470 	 Do not do so if the REG_EQUAL note is for a STRICT_LOW_PART,
4471 	 because cse_insn handles those specially.  */
4472       if (GET_CODE (SET_DEST (sets[0].rtl)) != STRICT_LOW_PART
4473 	  && rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)))
4474 	remove_note (insn, tem);
4475       else
4476 	{
4477 	  canon_reg (XEXP (tem, 0), insn);
4478 	  apply_change_group ();
4479 	  XEXP (tem, 0) = fold_rtx (XEXP (tem, 0), insn);
4480 	  df_notes_rescan (insn);
4481 	}
4482     }
4483 
4484   /* Canonicalize sources and addresses of destinations.
4485      We do this in a separate pass to avoid problems when a MATCH_DUP is
4486      present in the insn pattern.  In that case, we want to ensure that
4487      we don't break the duplicate nature of the pattern.  So we will replace
4488      both operands at the same time.  Otherwise, we would fail to find an
4489      equivalent substitution in the loop calling validate_change below.
4490 
4491      We used to suppress canonicalization of DEST if it appears in SRC,
4492      but we don't do this any more.  */
4493 
4494   for (i = 0; i < n_sets; i++)
4495     {
4496       rtx dest = SET_DEST (sets[i].rtl);
4497       rtx src = SET_SRC (sets[i].rtl);
4498       rtx new_rtx = canon_reg (src, insn);
4499 
4500       validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4501 
4502       if (GET_CODE (dest) == ZERO_EXTRACT)
4503 	{
4504 	  validate_change (insn, &XEXP (dest, 1),
4505 			   canon_reg (XEXP (dest, 1), insn), 1);
4506 	  validate_change (insn, &XEXP (dest, 2),
4507 			   canon_reg (XEXP (dest, 2), insn), 1);
4508 	}
4509 
4510       while (GET_CODE (dest) == SUBREG
4511 	     || GET_CODE (dest) == ZERO_EXTRACT
4512 	     || GET_CODE (dest) == STRICT_LOW_PART)
4513 	dest = XEXP (dest, 0);
4514 
4515       if (MEM_P (dest))
4516 	canon_reg (dest, insn);
4517     }
4518 
4519   /* Now that we have done all the replacements, we can apply the change
4520      group and see if they all work.  Note that this will cause some
4521      canonicalizations that would have worked individually not to be applied
4522      because some other canonicalization didn't work, but this should not
4523      occur often.
4524 
4525      The result of apply_change_group can be ignored; see canon_reg.  */
4526 
4527   apply_change_group ();
4528 }
4529 
4530 /* Main function of CSE.
4531    First simplify sources and addresses of all assignments
4532    in the instruction, using previously-computed equivalents values.
4533    Then install the new sources and destinations in the table
4534    of available values.  */
4535 
4536 static void
cse_insn(rtx_insn * insn)4537 cse_insn (rtx_insn *insn)
4538 {
4539   rtx x = PATTERN (insn);
4540   int i;
4541   rtx tem;
4542   int n_sets = 0;
4543 
4544   rtx src_eqv = 0;
4545   struct table_elt *src_eqv_elt = 0;
4546   int src_eqv_volatile = 0;
4547   int src_eqv_in_memory = 0;
4548   unsigned src_eqv_hash = 0;
4549 
4550   struct set *sets = (struct set *) 0;
4551 
4552   if (GET_CODE (x) == SET)
4553     sets = XALLOCA (struct set);
4554   else if (GET_CODE (x) == PARALLEL)
4555     sets = XALLOCAVEC (struct set, XVECLEN (x, 0));
4556 
4557   this_insn = insn;
4558   /* Records what this insn does to set CC0.  */
4559   this_insn_cc0 = 0;
4560   this_insn_cc0_mode = VOIDmode;
4561 
4562   /* Find all regs explicitly clobbered in this insn,
4563      to ensure they are not replaced with any other regs
4564      elsewhere in this insn.  */
4565   invalidate_from_sets_and_clobbers (insn);
4566 
4567   /* Record all the SETs in this instruction.  */
4568   n_sets = find_sets_in_insn (insn, &sets);
4569 
4570   /* Substitute the canonical register where possible.  */
4571   canonicalize_insn (insn, &sets, n_sets);
4572 
4573   /* If this insn has a REG_EQUAL note, store the equivalent value in SRC_EQV,
4574      if different, or if the DEST is a STRICT_LOW_PART/ZERO_EXTRACT.  The
4575      latter condition is necessary because SRC_EQV is handled specially for
4576      this case, and if it isn't set, then there will be no equivalence
4577      for the destination.  */
4578   if (n_sets == 1 && REG_NOTES (insn) != 0
4579       && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
4580     {
4581 
4582       if (GET_CODE (SET_DEST (sets[0].rtl)) != ZERO_EXTRACT
4583 	  && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4584 	      || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4585 	src_eqv = copy_rtx (XEXP (tem, 0));
4586       /* If DEST is of the form ZERO_EXTACT, as in:
4587 	 (set (zero_extract:SI (reg:SI 119)
4588 		  (const_int 16 [0x10])
4589 		  (const_int 16 [0x10]))
4590 	      (const_int 51154 [0xc7d2]))
4591 	 REG_EQUAL note will specify the value of register (reg:SI 119) at this
4592 	 point.  Note that this is different from SRC_EQV. We can however
4593 	 calculate SRC_EQV with the position and width of ZERO_EXTRACT.  */
4594       else if (GET_CODE (SET_DEST (sets[0].rtl)) == ZERO_EXTRACT
4595 	       && CONST_INT_P (XEXP (tem, 0))
4596 	       && CONST_INT_P (XEXP (SET_DEST (sets[0].rtl), 1))
4597 	       && CONST_INT_P (XEXP (SET_DEST (sets[0].rtl), 2)))
4598 	{
4599 	  rtx dest_reg = XEXP (SET_DEST (sets[0].rtl), 0);
4600 	  /* This is the mode of XEXP (tem, 0) as well.  */
4601 	  scalar_int_mode dest_mode
4602 	    = as_a <scalar_int_mode> (GET_MODE (dest_reg));
4603 	  rtx width = XEXP (SET_DEST (sets[0].rtl), 1);
4604 	  rtx pos = XEXP (SET_DEST (sets[0].rtl), 2);
4605 	  HOST_WIDE_INT val = INTVAL (XEXP (tem, 0));
4606 	  HOST_WIDE_INT mask;
4607 	  unsigned int shift;
4608 	  if (BITS_BIG_ENDIAN)
4609 	    shift = (GET_MODE_PRECISION (dest_mode)
4610 		     - INTVAL (pos) - INTVAL (width));
4611 	  else
4612 	    shift = INTVAL (pos);
4613 	  if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
4614 	    mask = HOST_WIDE_INT_M1;
4615 	  else
4616 	    mask = (HOST_WIDE_INT_1 << INTVAL (width)) - 1;
4617 	  val = (val >> shift) & mask;
4618 	  src_eqv = GEN_INT (val);
4619 	}
4620     }
4621 
4622   /* Set sets[i].src_elt to the class each source belongs to.
4623      Detect assignments from or to volatile things
4624      and set set[i] to zero so they will be ignored
4625      in the rest of this function.
4626 
4627      Nothing in this loop changes the hash table or the register chains.  */
4628 
4629   for (i = 0; i < n_sets; i++)
4630     {
4631       bool repeat = false;
4632       bool noop_insn = false;
4633       rtx src, dest;
4634       rtx src_folded;
4635       struct table_elt *elt = 0, *p;
4636       machine_mode mode;
4637       rtx src_eqv_here;
4638       rtx src_const = 0;
4639       rtx src_related = 0;
4640       bool src_related_is_const_anchor = false;
4641       struct table_elt *src_const_elt = 0;
4642       int src_cost = MAX_COST;
4643       int src_eqv_cost = MAX_COST;
4644       int src_folded_cost = MAX_COST;
4645       int src_related_cost = MAX_COST;
4646       int src_elt_cost = MAX_COST;
4647       int src_regcost = MAX_COST;
4648       int src_eqv_regcost = MAX_COST;
4649       int src_folded_regcost = MAX_COST;
4650       int src_related_regcost = MAX_COST;
4651       int src_elt_regcost = MAX_COST;
4652       /* Set nonzero if we need to call force_const_mem on with the
4653 	 contents of src_folded before using it.  */
4654       int src_folded_force_flag = 0;
4655       scalar_int_mode int_mode;
4656 
4657       dest = SET_DEST (sets[i].rtl);
4658       src = SET_SRC (sets[i].rtl);
4659 
4660       /* If SRC is a constant that has no machine mode,
4661 	 hash it with the destination's machine mode.
4662 	 This way we can keep different modes separate.  */
4663 
4664       mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4665       sets[i].mode = mode;
4666 
4667       if (src_eqv)
4668 	{
4669 	  machine_mode eqvmode = mode;
4670 	  if (GET_CODE (dest) == STRICT_LOW_PART)
4671 	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4672 	  do_not_record = 0;
4673 	  hash_arg_in_memory = 0;
4674 	  src_eqv_hash = HASH (src_eqv, eqvmode);
4675 
4676 	  /* Find the equivalence class for the equivalent expression.  */
4677 
4678 	  if (!do_not_record)
4679 	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4680 
4681 	  src_eqv_volatile = do_not_record;
4682 	  src_eqv_in_memory = hash_arg_in_memory;
4683 	}
4684 
4685       /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4686 	 value of the INNER register, not the destination.  So it is not
4687 	 a valid substitution for the source.  But save it for later.  */
4688       if (GET_CODE (dest) == STRICT_LOW_PART)
4689 	src_eqv_here = 0;
4690       else
4691 	src_eqv_here = src_eqv;
4692 
4693       /* Simplify and foldable subexpressions in SRC.  Then get the fully-
4694 	 simplified result, which may not necessarily be valid.  */
4695       src_folded = fold_rtx (src, NULL);
4696 
4697 #if 0
4698       /* ??? This caused bad code to be generated for the m68k port with -O2.
4699 	 Suppose src is (CONST_INT -1), and that after truncation src_folded
4700 	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
4701 	 At the end we will add src and src_const to the same equivalence
4702 	 class.  We now have 3 and -1 on the same equivalence class.  This
4703 	 causes later instructions to be mis-optimized.  */
4704       /* If storing a constant in a bitfield, pre-truncate the constant
4705 	 so we will be able to record it later.  */
4706       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4707 	{
4708 	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4709 
4710 	  if (CONST_INT_P (src)
4711 	      && CONST_INT_P (width)
4712 	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4713 	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4714 	    src_folded
4715 	      = GEN_INT (INTVAL (src) & ((HOST_WIDE_INT_1
4716 					  << INTVAL (width)) - 1));
4717 	}
4718 #endif
4719 
4720       /* Compute SRC's hash code, and also notice if it
4721 	 should not be recorded at all.  In that case,
4722 	 prevent any further processing of this assignment.
4723 
4724 	 We set DO_NOT_RECORD if the destination has a REG_UNUSED note.
4725 	 This avoids getting the source register into the tables, where it
4726 	 may be invalidated later (via REG_QTY), then trigger an ICE upon
4727 	 re-insertion.
4728 
4729 	 This is only a problem in multi-set insns.  If it were a single
4730 	 set the dead copy would have been removed.  If the RHS were anything
4731 	 but a simple REG, then we won't call insert_regs and thus there's
4732 	 no potential for triggering the ICE.  */
4733       do_not_record = (REG_P (dest)
4734 		       && REG_P (src)
4735 		       && find_reg_note (insn, REG_UNUSED, dest));
4736       hash_arg_in_memory = 0;
4737 
4738       sets[i].src = src;
4739       sets[i].src_hash = HASH (src, mode);
4740       sets[i].src_volatile = do_not_record;
4741       sets[i].src_in_memory = hash_arg_in_memory;
4742 
4743       /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4744 	 a pseudo, do not record SRC.  Using SRC as a replacement for
4745 	 anything else will be incorrect in that situation.  Note that
4746 	 this usually occurs only for stack slots, in which case all the
4747 	 RTL would be referring to SRC, so we don't lose any optimization
4748 	 opportunities by not having SRC in the hash table.  */
4749 
4750       if (MEM_P (src)
4751 	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4752 	  && REG_P (dest)
4753 	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4754 	sets[i].src_volatile = 1;
4755 
4756       else if (GET_CODE (src) == ASM_OPERANDS
4757 	       && GET_CODE (x) == PARALLEL)
4758 	{
4759 	  /* Do not record result of a non-volatile inline asm with
4760 	     more than one result.  */
4761 	  if (n_sets > 1)
4762 	    sets[i].src_volatile = 1;
4763 
4764 	  int j, lim = XVECLEN (x, 0);
4765 	  for (j = 0; j < lim; j++)
4766 	    {
4767 	      rtx y = XVECEXP (x, 0, j);
4768 	      /* And do not record result of a non-volatile inline asm
4769 		 with "memory" clobber.  */
4770 	      if (GET_CODE (y) == CLOBBER && MEM_P (XEXP (y, 0)))
4771 		{
4772 		  sets[i].src_volatile = 1;
4773 		  break;
4774 		}
4775 	    }
4776 	}
4777 
4778 #if 0
4779       /* It is no longer clear why we used to do this, but it doesn't
4780 	 appear to still be needed.  So let's try without it since this
4781 	 code hurts cse'ing widened ops.  */
4782       /* If source is a paradoxical subreg (such as QI treated as an SI),
4783 	 treat it as volatile.  It may do the work of an SI in one context
4784 	 where the extra bits are not being used, but cannot replace an SI
4785 	 in general.  */
4786       if (paradoxical_subreg_p (src))
4787 	sets[i].src_volatile = 1;
4788 #endif
4789 
4790       /* Locate all possible equivalent forms for SRC.  Try to replace
4791          SRC in the insn with each cheaper equivalent.
4792 
4793          We have the following types of equivalents: SRC itself, a folded
4794          version, a value given in a REG_EQUAL note, or a value related
4795 	 to a constant.
4796 
4797          Each of these equivalents may be part of an additional class
4798          of equivalents (if more than one is in the table, they must be in
4799          the same class; we check for this).
4800 
4801 	 If the source is volatile, we don't do any table lookups.
4802 
4803          We note any constant equivalent for possible later use in a
4804          REG_NOTE.  */
4805 
4806       if (!sets[i].src_volatile)
4807 	elt = lookup (src, sets[i].src_hash, mode);
4808 
4809       sets[i].src_elt = elt;
4810 
4811       if (elt && src_eqv_here && src_eqv_elt)
4812 	{
4813 	  if (elt->first_same_value != src_eqv_elt->first_same_value)
4814 	    {
4815 	      /* The REG_EQUAL is indicating that two formerly distinct
4816 		 classes are now equivalent.  So merge them.  */
4817 	      merge_equiv_classes (elt, src_eqv_elt);
4818 	      src_eqv_hash = HASH (src_eqv, elt->mode);
4819 	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4820 	    }
4821 
4822 	  src_eqv_here = 0;
4823 	}
4824 
4825       else if (src_eqv_elt)
4826 	elt = src_eqv_elt;
4827 
4828       /* Try to find a constant somewhere and record it in `src_const'.
4829 	 Record its table element, if any, in `src_const_elt'.  Look in
4830 	 any known equivalences first.  (If the constant is not in the
4831 	 table, also set `sets[i].src_const_hash').  */
4832       if (elt)
4833 	for (p = elt->first_same_value; p; p = p->next_same_value)
4834 	  if (p->is_const)
4835 	    {
4836 	      src_const = p->exp;
4837 	      src_const_elt = elt;
4838 	      break;
4839 	    }
4840 
4841       if (src_const == 0
4842 	  && (CONSTANT_P (src_folded)
4843 	      /* Consider (minus (label_ref L1) (label_ref L2)) as
4844 		 "constant" here so we will record it. This allows us
4845 		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
4846 	      || (GET_CODE (src_folded) == MINUS
4847 		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4848 		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4849 	src_const = src_folded, src_const_elt = elt;
4850       else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4851 	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4852 
4853       /* If we don't know if the constant is in the table, get its
4854 	 hash code and look it up.  */
4855       if (src_const && src_const_elt == 0)
4856 	{
4857 	  sets[i].src_const_hash = HASH (src_const, mode);
4858 	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4859 	}
4860 
4861       sets[i].src_const = src_const;
4862       sets[i].src_const_elt = src_const_elt;
4863 
4864       /* If the constant and our source are both in the table, mark them as
4865 	 equivalent.  Otherwise, if a constant is in the table but the source
4866 	 isn't, set ELT to it.  */
4867       if (src_const_elt && elt
4868 	  && src_const_elt->first_same_value != elt->first_same_value)
4869 	merge_equiv_classes (elt, src_const_elt);
4870       else if (src_const_elt && elt == 0)
4871 	elt = src_const_elt;
4872 
4873       /* See if there is a register linearly related to a constant
4874          equivalent of SRC.  */
4875       if (src_const
4876 	  && (GET_CODE (src_const) == CONST
4877 	      || (src_const_elt && src_const_elt->related_value != 0)))
4878 	{
4879 	  src_related = use_related_value (src_const, src_const_elt);
4880 	  if (src_related)
4881 	    {
4882 	      struct table_elt *src_related_elt
4883 		= lookup (src_related, HASH (src_related, mode), mode);
4884 	      if (src_related_elt && elt)
4885 		{
4886 		  if (elt->first_same_value
4887 		      != src_related_elt->first_same_value)
4888 		    /* This can occur when we previously saw a CONST
4889 		       involving a SYMBOL_REF and then see the SYMBOL_REF
4890 		       twice.  Merge the involved classes.  */
4891 		    merge_equiv_classes (elt, src_related_elt);
4892 
4893 		  src_related = 0;
4894 		  src_related_elt = 0;
4895 		}
4896 	      else if (src_related_elt && elt == 0)
4897 		elt = src_related_elt;
4898 	    }
4899 	}
4900 
4901       /* See if we have a CONST_INT that is already in a register in a
4902 	 wider mode.  */
4903 
4904       if (src_const && src_related == 0 && CONST_INT_P (src_const)
4905 	  && is_int_mode (mode, &int_mode)
4906 	  && GET_MODE_PRECISION (int_mode) < BITS_PER_WORD)
4907 	{
4908 	  opt_scalar_int_mode wider_mode_iter;
4909 	  FOR_EACH_WIDER_MODE (wider_mode_iter, int_mode)
4910 	    {
4911 	      scalar_int_mode wider_mode = wider_mode_iter.require ();
4912 	      if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
4913 		break;
4914 
4915 	      struct table_elt *const_elt
4916 		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4917 
4918 	      if (const_elt == 0)
4919 		continue;
4920 
4921 	      for (const_elt = const_elt->first_same_value;
4922 		   const_elt; const_elt = const_elt->next_same_value)
4923 		if (REG_P (const_elt->exp))
4924 		  {
4925 		    src_related = gen_lowpart (int_mode, const_elt->exp);
4926 		    break;
4927 		  }
4928 
4929 	      if (src_related != 0)
4930 		break;
4931 	    }
4932 	}
4933 
4934       /* Another possibility is that we have an AND with a constant in
4935 	 a mode narrower than a word.  If so, it might have been generated
4936 	 as part of an "if" which would narrow the AND.  If we already
4937 	 have done the AND in a wider mode, we can use a SUBREG of that
4938 	 value.  */
4939 
4940       if (flag_expensive_optimizations && ! src_related
4941 	  && is_a <scalar_int_mode> (mode, &int_mode)
4942 	  && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
4943 	  && GET_MODE_SIZE (int_mode) < UNITS_PER_WORD)
4944 	{
4945 	  opt_scalar_int_mode tmode_iter;
4946 	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4947 
4948 	  FOR_EACH_WIDER_MODE (tmode_iter, int_mode)
4949 	    {
4950 	      scalar_int_mode tmode = tmode_iter.require ();
4951 	      if (GET_MODE_SIZE (tmode) > UNITS_PER_WORD)
4952 		break;
4953 
4954 	      rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4955 	      struct table_elt *larger_elt;
4956 
4957 	      if (inner)
4958 		{
4959 		  PUT_MODE (new_and, tmode);
4960 		  XEXP (new_and, 0) = inner;
4961 		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4962 		  if (larger_elt == 0)
4963 		    continue;
4964 
4965 		  for (larger_elt = larger_elt->first_same_value;
4966 		       larger_elt; larger_elt = larger_elt->next_same_value)
4967 		    if (REG_P (larger_elt->exp))
4968 		      {
4969 			src_related
4970 			  = gen_lowpart (int_mode, larger_elt->exp);
4971 			break;
4972 		      }
4973 
4974 		  if (src_related)
4975 		    break;
4976 		}
4977 	    }
4978 	}
4979 
4980       /* See if a MEM has already been loaded with a widening operation;
4981 	 if it has, we can use a subreg of that.  Many CISC machines
4982 	 also have such operations, but this is only likely to be
4983 	 beneficial on these machines.  */
4984 
4985       rtx_code extend_op;
4986       if (flag_expensive_optimizations && src_related == 0
4987 	  && MEM_P (src) && ! do_not_record
4988 	  && is_a <scalar_int_mode> (mode, &int_mode)
4989 	  && (extend_op = load_extend_op (int_mode)) != UNKNOWN)
4990 	{
4991 	  struct rtx_def memory_extend_buf;
4992 	  rtx memory_extend_rtx = &memory_extend_buf;
4993 
4994 	  /* Set what we are trying to extend and the operation it might
4995 	     have been extended with.  */
4996 	  memset (memory_extend_rtx, 0, sizeof (*memory_extend_rtx));
4997 	  PUT_CODE (memory_extend_rtx, extend_op);
4998 	  XEXP (memory_extend_rtx, 0) = src;
4999 
5000 	  opt_scalar_int_mode tmode_iter;
5001 	  FOR_EACH_WIDER_MODE (tmode_iter, int_mode)
5002 	    {
5003 	      struct table_elt *larger_elt;
5004 
5005 	      scalar_int_mode tmode = tmode_iter.require ();
5006 	      if (GET_MODE_SIZE (tmode) > UNITS_PER_WORD)
5007 		break;
5008 
5009 	      PUT_MODE (memory_extend_rtx, tmode);
5010 	      larger_elt = lookup (memory_extend_rtx,
5011 				   HASH (memory_extend_rtx, tmode), tmode);
5012 	      if (larger_elt == 0)
5013 		continue;
5014 
5015 	      for (larger_elt = larger_elt->first_same_value;
5016 		   larger_elt; larger_elt = larger_elt->next_same_value)
5017 		if (REG_P (larger_elt->exp))
5018 		  {
5019 		    src_related = gen_lowpart (int_mode, larger_elt->exp);
5020 		    break;
5021 		  }
5022 
5023 	      if (src_related)
5024 		break;
5025 	    }
5026 	}
5027 
5028       /* Try to express the constant using a register+offset expression
5029 	 derived from a constant anchor.  */
5030 
5031       if (targetm.const_anchor
5032 	  && !src_related
5033 	  && src_const
5034 	  && GET_CODE (src_const) == CONST_INT)
5035 	{
5036 	  src_related = try_const_anchors (src_const, mode);
5037 	  src_related_is_const_anchor = src_related != NULL_RTX;
5038 	}
5039 
5040 
5041       if (src == src_folded)
5042 	src_folded = 0;
5043 
5044       /* At this point, ELT, if nonzero, points to a class of expressions
5045          equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5046 	 and SRC_RELATED, if nonzero, each contain additional equivalent
5047 	 expressions.  Prune these latter expressions by deleting expressions
5048 	 already in the equivalence class.
5049 
5050 	 Check for an equivalent identical to the destination.  If found,
5051 	 this is the preferred equivalent since it will likely lead to
5052 	 elimination of the insn.  Indicate this by placing it in
5053 	 `src_related'.  */
5054 
5055       if (elt)
5056 	elt = elt->first_same_value;
5057       for (p = elt; p; p = p->next_same_value)
5058 	{
5059 	  enum rtx_code code = GET_CODE (p->exp);
5060 
5061 	  /* If the expression is not valid, ignore it.  Then we do not
5062 	     have to check for validity below.  In most cases, we can use
5063 	     `rtx_equal_p', since canonicalization has already been done.  */
5064 	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5065 	    continue;
5066 
5067 	  /* Also skip paradoxical subregs, unless that's what we're
5068 	     looking for.  */
5069 	  if (paradoxical_subreg_p (p->exp)
5070 	      && ! (src != 0
5071 		    && GET_CODE (src) == SUBREG
5072 		    && GET_MODE (src) == GET_MODE (p->exp)
5073 		    && partial_subreg_p (GET_MODE (SUBREG_REG (src)),
5074 					 GET_MODE (SUBREG_REG (p->exp)))))
5075 	    continue;
5076 
5077 	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5078 	    src = 0;
5079 	  else if (src_folded && GET_CODE (src_folded) == code
5080 		   && rtx_equal_p (src_folded, p->exp))
5081 	    src_folded = 0;
5082 	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5083 		   && rtx_equal_p (src_eqv_here, p->exp))
5084 	    src_eqv_here = 0;
5085 	  else if (src_related && GET_CODE (src_related) == code
5086 		   && rtx_equal_p (src_related, p->exp))
5087 	    src_related = 0;
5088 
5089 	  /* This is the same as the destination of the insns, we want
5090 	     to prefer it.  Copy it to src_related.  The code below will
5091 	     then give it a negative cost.  */
5092 	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5093 	    src_related = p->exp;
5094 	}
5095 
5096       /* Find the cheapest valid equivalent, trying all the available
5097          possibilities.  Prefer items not in the hash table to ones
5098          that are when they are equal cost.  Note that we can never
5099          worsen an insn as the current contents will also succeed.
5100 	 If we find an equivalent identical to the destination, use it as best,
5101 	 since this insn will probably be eliminated in that case.  */
5102       if (src)
5103 	{
5104 	  if (rtx_equal_p (src, dest))
5105 	    src_cost = src_regcost = -1;
5106 	  else
5107 	    {
5108 	      src_cost = COST (src, mode);
5109 	      src_regcost = approx_reg_cost (src);
5110 	    }
5111 	}
5112 
5113       if (src_eqv_here)
5114 	{
5115 	  if (rtx_equal_p (src_eqv_here, dest))
5116 	    src_eqv_cost = src_eqv_regcost = -1;
5117 	  else
5118 	    {
5119 	      src_eqv_cost = COST (src_eqv_here, mode);
5120 	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5121 	    }
5122 	}
5123 
5124       if (src_folded)
5125 	{
5126 	  if (rtx_equal_p (src_folded, dest))
5127 	    src_folded_cost = src_folded_regcost = -1;
5128 	  else
5129 	    {
5130 	      src_folded_cost = COST (src_folded, mode);
5131 	      src_folded_regcost = approx_reg_cost (src_folded);
5132 	    }
5133 	}
5134 
5135       if (src_related)
5136 	{
5137 	  if (rtx_equal_p (src_related, dest))
5138 	    src_related_cost = src_related_regcost = -1;
5139 	  else
5140 	    {
5141 	      src_related_cost = COST (src_related, mode);
5142 	      src_related_regcost = approx_reg_cost (src_related);
5143 
5144 	      /* If a const-anchor is used to synthesize a constant that
5145 		 normally requires multiple instructions then slightly prefer
5146 		 it over the original sequence.  These instructions are likely
5147 		 to become redundant now.  We can't compare against the cost
5148 		 of src_eqv_here because, on MIPS for example, multi-insn
5149 		 constants have zero cost; they are assumed to be hoisted from
5150 		 loops.  */
5151 	      if (src_related_is_const_anchor
5152 		  && src_related_cost == src_cost
5153 		  && src_eqv_here)
5154 		src_related_cost--;
5155 	    }
5156 	}
5157 
5158       /* If this was an indirect jump insn, a known label will really be
5159 	 cheaper even though it looks more expensive.  */
5160       if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5161 	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5162 
5163       /* Terminate loop when replacement made.  This must terminate since
5164          the current contents will be tested and will always be valid.  */
5165       while (1)
5166 	{
5167 	  rtx trial;
5168 
5169 	  /* Skip invalid entries.  */
5170 	  while (elt && !REG_P (elt->exp)
5171 		 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5172 	    elt = elt->next_same_value;
5173 
5174 	  /* A paradoxical subreg would be bad here: it'll be the right
5175 	     size, but later may be adjusted so that the upper bits aren't
5176 	     what we want.  So reject it.  */
5177 	  if (elt != 0
5178 	      && paradoxical_subreg_p (elt->exp)
5179 	      /* It is okay, though, if the rtx we're trying to match
5180 		 will ignore any of the bits we can't predict.  */
5181 	      && ! (src != 0
5182 		    && GET_CODE (src) == SUBREG
5183 		    && GET_MODE (src) == GET_MODE (elt->exp)
5184 		    && partial_subreg_p (GET_MODE (SUBREG_REG (src)),
5185 					 GET_MODE (SUBREG_REG (elt->exp)))))
5186 	    {
5187 	      elt = elt->next_same_value;
5188 	      continue;
5189 	    }
5190 
5191 	  if (elt)
5192 	    {
5193 	      src_elt_cost = elt->cost;
5194 	      src_elt_regcost = elt->regcost;
5195 	    }
5196 
5197 	  /* Find cheapest and skip it for the next time.   For items
5198 	     of equal cost, use this order:
5199 	     src_folded, src, src_eqv, src_related and hash table entry.  */
5200 	  if (src_folded
5201 	      && preferable (src_folded_cost, src_folded_regcost,
5202 			     src_cost, src_regcost) <= 0
5203 	      && preferable (src_folded_cost, src_folded_regcost,
5204 			     src_eqv_cost, src_eqv_regcost) <= 0
5205 	      && preferable (src_folded_cost, src_folded_regcost,
5206 			     src_related_cost, src_related_regcost) <= 0
5207 	      && preferable (src_folded_cost, src_folded_regcost,
5208 			     src_elt_cost, src_elt_regcost) <= 0)
5209 	    {
5210 	      trial = src_folded, src_folded_cost = MAX_COST;
5211 	      if (src_folded_force_flag)
5212 		{
5213 		  rtx forced = force_const_mem (mode, trial);
5214 		  if (forced)
5215 		    trial = forced;
5216 		}
5217 	    }
5218 	  else if (src
5219 		   && preferable (src_cost, src_regcost,
5220 				  src_eqv_cost, src_eqv_regcost) <= 0
5221 		   && preferable (src_cost, src_regcost,
5222 				  src_related_cost, src_related_regcost) <= 0
5223 		   && preferable (src_cost, src_regcost,
5224 				  src_elt_cost, src_elt_regcost) <= 0)
5225 	    trial = src, src_cost = MAX_COST;
5226 	  else if (src_eqv_here
5227 		   && preferable (src_eqv_cost, src_eqv_regcost,
5228 				  src_related_cost, src_related_regcost) <= 0
5229 		   && preferable (src_eqv_cost, src_eqv_regcost,
5230 				  src_elt_cost, src_elt_regcost) <= 0)
5231 	    trial = src_eqv_here, src_eqv_cost = MAX_COST;
5232 	  else if (src_related
5233 		   && preferable (src_related_cost, src_related_regcost,
5234 				  src_elt_cost, src_elt_regcost) <= 0)
5235 	    trial = src_related, src_related_cost = MAX_COST;
5236 	  else
5237 	    {
5238 	      trial = elt->exp;
5239 	      elt = elt->next_same_value;
5240 	      src_elt_cost = MAX_COST;
5241 	    }
5242 
5243 	  /* Try to optimize
5244 	     (set (reg:M N) (const_int A))
5245 	     (set (reg:M2 O) (const_int B))
5246 	     (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5247 		  (reg:M2 O)).  */
5248 	  if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5249 	      && CONST_INT_P (trial)
5250 	      && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
5251 	      && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
5252 	      && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
5253 	      && (known_ge
5254 		  (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl))),
5255 		   INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))))
5256 	      && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
5257 		  + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
5258 		  <= HOST_BITS_PER_WIDE_INT))
5259 	    {
5260 	      rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
5261 	      rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5262 	      rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
5263 	      unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
5264 	      struct table_elt *dest_elt
5265 		= lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
5266 	      rtx dest_cst = NULL;
5267 
5268 	      if (dest_elt)
5269 		for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5270 		  if (p->is_const && CONST_INT_P (p->exp))
5271 		    {
5272 		      dest_cst = p->exp;
5273 		      break;
5274 		    }
5275 	      if (dest_cst)
5276 		{
5277 		  HOST_WIDE_INT val = INTVAL (dest_cst);
5278 		  HOST_WIDE_INT mask;
5279 		  unsigned int shift;
5280 		  /* This is the mode of DEST_CST as well.  */
5281 		  scalar_int_mode dest_mode
5282 		    = as_a <scalar_int_mode> (GET_MODE (dest_reg));
5283 		  if (BITS_BIG_ENDIAN)
5284 		    shift = GET_MODE_PRECISION (dest_mode)
5285 			    - INTVAL (pos) - INTVAL (width);
5286 		  else
5287 		    shift = INTVAL (pos);
5288 		  if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5289 		    mask = HOST_WIDE_INT_M1;
5290 		  else
5291 		    mask = (HOST_WIDE_INT_1 << INTVAL (width)) - 1;
5292 		  val &= ~(mask << shift);
5293 		  val |= (INTVAL (trial) & mask) << shift;
5294 		  val = trunc_int_for_mode (val, dest_mode);
5295 		  validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5296 					   dest_reg, 1);
5297 		  validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5298 					   GEN_INT (val), 1);
5299 		  if (apply_change_group ())
5300 		    {
5301 		      rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5302 		      if (note)
5303 			{
5304 			  remove_note (insn, note);
5305 			  df_notes_rescan (insn);
5306 			}
5307 		      src_eqv = NULL_RTX;
5308 		      src_eqv_elt = NULL;
5309 		      src_eqv_volatile = 0;
5310 		      src_eqv_in_memory = 0;
5311 		      src_eqv_hash = 0;
5312 		      repeat = true;
5313 		      break;
5314 		    }
5315 		}
5316 	    }
5317 
5318 	  /* We don't normally have an insn matching (set (pc) (pc)), so
5319 	     check for this separately here.  We will delete such an
5320 	     insn below.
5321 
5322 	     For other cases such as a table jump or conditional jump
5323 	     where we know the ultimate target, go ahead and replace the
5324 	     operand.  While that may not make a valid insn, we will
5325 	     reemit the jump below (and also insert any necessary
5326 	     barriers).  */
5327 	  if (n_sets == 1 && dest == pc_rtx
5328 	      && (trial == pc_rtx
5329 		  || (GET_CODE (trial) == LABEL_REF
5330 		      && ! condjump_p (insn))))
5331 	    {
5332 	      /* Don't substitute non-local labels, this confuses CFG.  */
5333 	      if (GET_CODE (trial) == LABEL_REF
5334 		  && LABEL_REF_NONLOCAL_P (trial))
5335 		continue;
5336 
5337 	      SET_SRC (sets[i].rtl) = trial;
5338 	      cse_jumps_altered = true;
5339 	      break;
5340 	    }
5341 
5342 	  /* Similarly, lots of targets don't allow no-op
5343 	     (set (mem x) (mem x)) moves.  Even (set (reg x) (reg x))
5344 	     might be impossible for certain registers (like CC registers).  */
5345 	  else if (n_sets == 1
5346 		   && !CALL_P (insn)
5347 		   && (MEM_P (trial) || REG_P (trial))
5348 		   && rtx_equal_p (trial, dest)
5349 		   && !side_effects_p (dest)
5350 		   && (cfun->can_delete_dead_exceptions
5351 		       || insn_nothrow_p (insn))
5352 		   /* We can only remove the later store if the earlier aliases
5353 		      at least all accesses the later one.  */
5354 		   && (!MEM_P (trial)
5355 		       || ((MEM_ALIAS_SET (dest) == MEM_ALIAS_SET (trial)
5356 			    || alias_set_subset_of (MEM_ALIAS_SET (dest),
5357 						    MEM_ALIAS_SET (trial)))
5358 			    && (!MEM_EXPR (trial)
5359 				|| refs_same_for_tbaa_p (MEM_EXPR (trial),
5360 							 MEM_EXPR (dest))))))
5361 	    {
5362 	      SET_SRC (sets[i].rtl) = trial;
5363 	      noop_insn = true;
5364 	      break;
5365 	    }
5366 
5367 	  /* Reject certain invalid forms of CONST that we create.  */
5368 	  else if (CONSTANT_P (trial)
5369 		   && GET_CODE (trial) == CONST
5370 		   /* Reject cases that will cause decode_rtx_const to
5371 		      die.  On the alpha when simplifying a switch, we
5372 		      get (const (truncate (minus (label_ref)
5373 		      (label_ref)))).  */
5374 		   && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5375 		       /* Likewise on IA-64, except without the
5376 			  truncate.  */
5377 		       || (GET_CODE (XEXP (trial, 0)) == MINUS
5378 			   && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5379 			   && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5380 	    /* Do nothing for this case.  */
5381 	    ;
5382 
5383 	  /* Do not replace anything with a MEM, except the replacement
5384 	     is a no-op.  This allows this loop to terminate.  */
5385 	  else if (MEM_P (trial) && !rtx_equal_p (trial, SET_SRC(sets[i].rtl)))
5386 	    /* Do nothing for this case.  */
5387 	    ;
5388 
5389 	  /* Look for a substitution that makes a valid insn.  */
5390 	  else if (validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5391 					    trial, 0))
5392 	    {
5393 	      rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
5394 
5395 	      /* The result of apply_change_group can be ignored; see
5396 		 canon_reg.  */
5397 
5398 	      validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
5399 	      apply_change_group ();
5400 
5401 	      break;
5402 	    }
5403 
5404 	  /* If we previously found constant pool entries for
5405 	     constants and this is a constant, try making a
5406 	     pool entry.  Put it in src_folded unless we already have done
5407 	     this since that is where it likely came from.  */
5408 
5409 	  else if (constant_pool_entries_cost
5410 		   && CONSTANT_P (trial)
5411 		   && (src_folded == 0
5412 		       || (!MEM_P (src_folded)
5413 			   && ! src_folded_force_flag))
5414 		   && GET_MODE_CLASS (mode) != MODE_CC
5415 		   && mode != VOIDmode)
5416 	    {
5417 	      src_folded_force_flag = 1;
5418 	      src_folded = trial;
5419 	      src_folded_cost = constant_pool_entries_cost;
5420 	      src_folded_regcost = constant_pool_entries_regcost;
5421 	    }
5422 	}
5423 
5424       /* If we changed the insn too much, handle this set from scratch.  */
5425       if (repeat)
5426 	{
5427 	  i--;
5428 	  continue;
5429 	}
5430 
5431       src = SET_SRC (sets[i].rtl);
5432 
5433       /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5434 	 However, there is an important exception:  If both are registers
5435 	 that are not the head of their equivalence class, replace SET_SRC
5436 	 with the head of the class.  If we do not do this, we will have
5437 	 both registers live over a portion of the basic block.  This way,
5438 	 their lifetimes will likely abut instead of overlapping.  */
5439       if (REG_P (dest)
5440 	  && REGNO_QTY_VALID_P (REGNO (dest)))
5441 	{
5442 	  int dest_q = REG_QTY (REGNO (dest));
5443 	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5444 
5445 	  if (dest_ent->mode == GET_MODE (dest)
5446 	      && dest_ent->first_reg != REGNO (dest)
5447 	      && REG_P (src) && REGNO (src) == REGNO (dest)
5448 	      /* Don't do this if the original insn had a hard reg as
5449 		 SET_SRC or SET_DEST.  */
5450 	      && (!REG_P (sets[i].src)
5451 		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5452 	      && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5453 	    /* We can't call canon_reg here because it won't do anything if
5454 	       SRC is a hard register.  */
5455 	    {
5456 	      int src_q = REG_QTY (REGNO (src));
5457 	      struct qty_table_elem *src_ent = &qty_table[src_q];
5458 	      int first = src_ent->first_reg;
5459 	      rtx new_src
5460 		= (first >= FIRST_PSEUDO_REGISTER
5461 		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5462 
5463 	      /* We must use validate-change even for this, because this
5464 		 might be a special no-op instruction, suitable only to
5465 		 tag notes onto.  */
5466 	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5467 		{
5468 		  src = new_src;
5469 		  /* If we had a constant that is cheaper than what we are now
5470 		     setting SRC to, use that constant.  We ignored it when we
5471 		     thought we could make this into a no-op.  */
5472 		  if (src_const && COST (src_const, mode) < COST (src, mode)
5473 		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5474 					  src_const, 0))
5475 		    src = src_const;
5476 		}
5477 	    }
5478 	}
5479 
5480       /* If we made a change, recompute SRC values.  */
5481       if (src != sets[i].src)
5482 	{
5483 	  do_not_record = 0;
5484 	  hash_arg_in_memory = 0;
5485 	  sets[i].src = src;
5486 	  sets[i].src_hash = HASH (src, mode);
5487 	  sets[i].src_volatile = do_not_record;
5488 	  sets[i].src_in_memory = hash_arg_in_memory;
5489 	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5490 	}
5491 
5492       /* If this is a single SET, we are setting a register, and we have an
5493 	 equivalent constant, we want to add a REG_EQUAL note if the constant
5494 	 is different from the source.  We don't want to do it for a constant
5495 	 pseudo since verifying that this pseudo hasn't been eliminated is a
5496 	 pain; moreover such a note won't help anything.
5497 
5498 	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5499 	 which can be created for a reference to a compile time computable
5500 	 entry in a jump table.  */
5501       if (n_sets == 1
5502 	  && REG_P (dest)
5503 	  && src_const
5504 	  && !REG_P (src_const)
5505 	  && !(GET_CODE (src_const) == SUBREG
5506 	       && REG_P (SUBREG_REG (src_const)))
5507 	  && !(GET_CODE (src_const) == CONST
5508 	       && GET_CODE (XEXP (src_const, 0)) == MINUS
5509 	       && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5510 	       && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF)
5511 	  && !rtx_equal_p (src, src_const))
5512 	{
5513 	  /* Make sure that the rtx is not shared.  */
5514 	  src_const = copy_rtx (src_const);
5515 
5516 	  /* Record the actual constant value in a REG_EQUAL note,
5517 	     making a new one if one does not already exist.  */
5518 	  set_unique_reg_note (insn, REG_EQUAL, src_const);
5519 	  df_notes_rescan (insn);
5520 	}
5521 
5522       /* Now deal with the destination.  */
5523       do_not_record = 0;
5524 
5525       /* Look within any ZERO_EXTRACT to the MEM or REG within it.  */
5526       while (GET_CODE (dest) == SUBREG
5527 	     || GET_CODE (dest) == ZERO_EXTRACT
5528 	     || GET_CODE (dest) == STRICT_LOW_PART)
5529 	dest = XEXP (dest, 0);
5530 
5531       sets[i].inner_dest = dest;
5532 
5533       if (MEM_P (dest))
5534 	{
5535 #ifdef PUSH_ROUNDING
5536 	  /* Stack pushes invalidate the stack pointer.  */
5537 	  rtx addr = XEXP (dest, 0);
5538 	  if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5539 	      && XEXP (addr, 0) == stack_pointer_rtx)
5540 	    invalidate (stack_pointer_rtx, VOIDmode);
5541 #endif
5542 	  dest = fold_rtx (dest, insn);
5543 	}
5544 
5545       /* Compute the hash code of the destination now,
5546 	 before the effects of this instruction are recorded,
5547 	 since the register values used in the address computation
5548 	 are those before this instruction.  */
5549       sets[i].dest_hash = HASH (dest, mode);
5550 
5551       /* Don't enter a bit-field in the hash table
5552 	 because the value in it after the store
5553 	 may not equal what was stored, due to truncation.  */
5554 
5555       if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5556 	{
5557 	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5558 
5559 	  if (src_const != 0 && CONST_INT_P (src_const)
5560 	      && CONST_INT_P (width)
5561 	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5562 	      && ! (INTVAL (src_const)
5563 		    & (HOST_WIDE_INT_M1U << INTVAL (width))))
5564 	    /* Exception: if the value is constant,
5565 	       and it won't be truncated, record it.  */
5566 	    ;
5567 	  else
5568 	    {
5569 	      /* This is chosen so that the destination will be invalidated
5570 		 but no new value will be recorded.
5571 		 We must invalidate because sometimes constant
5572 		 values can be recorded for bitfields.  */
5573 	      sets[i].src_elt = 0;
5574 	      sets[i].src_volatile = 1;
5575 	      src_eqv = 0;
5576 	      src_eqv_elt = 0;
5577 	    }
5578 	}
5579 
5580       /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5581 	 the insn.  */
5582       else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5583 	{
5584 	  /* One less use of the label this insn used to jump to.  */
5585 	  cse_cfg_altered |= delete_insn_and_edges (insn);
5586 	  cse_jumps_altered = true;
5587 	  /* No more processing for this set.  */
5588 	  sets[i].rtl = 0;
5589 	}
5590 
5591       /* Similarly for no-op moves.  */
5592       else if (noop_insn)
5593 	{
5594 	  if (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
5595 	    cse_cfg_altered = true;
5596 	  cse_cfg_altered |= delete_insn_and_edges (insn);
5597 	  /* No more processing for this set.  */
5598 	  sets[i].rtl = 0;
5599 	}
5600 
5601       /* If this SET is now setting PC to a label, we know it used to
5602 	 be a conditional or computed branch.  */
5603       else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5604 	       && !LABEL_REF_NONLOCAL_P (src))
5605 	{
5606 	  /* We reemit the jump in as many cases as possible just in
5607 	     case the form of an unconditional jump is significantly
5608 	     different than a computed jump or conditional jump.
5609 
5610 	     If this insn has multiple sets, then reemitting the
5611 	     jump is nontrivial.  So instead we just force rerecognition
5612 	     and hope for the best.  */
5613 	  if (n_sets == 1)
5614 	    {
5615 	      rtx_jump_insn *new_rtx;
5616 	      rtx note;
5617 
5618 	      rtx_insn *seq = targetm.gen_jump (XEXP (src, 0));
5619 	      new_rtx = emit_jump_insn_before (seq, insn);
5620 	      JUMP_LABEL (new_rtx) = XEXP (src, 0);
5621 	      LABEL_NUSES (XEXP (src, 0))++;
5622 
5623 	      /* Make sure to copy over REG_NON_LOCAL_GOTO.  */
5624 	      note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5625 	      if (note)
5626 		{
5627 		  XEXP (note, 1) = NULL_RTX;
5628 		  REG_NOTES (new_rtx) = note;
5629 		}
5630 
5631 	      cse_cfg_altered |= delete_insn_and_edges (insn);
5632 	      insn = new_rtx;
5633 	    }
5634 	  else
5635 	    INSN_CODE (insn) = -1;
5636 
5637 	  /* Do not bother deleting any unreachable code, let jump do it.  */
5638 	  cse_jumps_altered = true;
5639 	  sets[i].rtl = 0;
5640 	}
5641 
5642       /* If destination is volatile, invalidate it and then do no further
5643 	 processing for this assignment.  */
5644 
5645       else if (do_not_record)
5646 	{
5647 	  invalidate_dest (dest);
5648 	  sets[i].rtl = 0;
5649 	}
5650 
5651       if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5652 	{
5653 	  do_not_record = 0;
5654 	  sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5655 	  if (do_not_record)
5656 	    {
5657 	      invalidate_dest (SET_DEST (sets[i].rtl));
5658 	      sets[i].rtl = 0;
5659 	    }
5660 	}
5661 
5662       /* If setting CC0, record what it was set to, or a constant, if it
5663 	 is equivalent to a constant.  If it is being set to a floating-point
5664 	 value, make a COMPARE with the appropriate constant of 0.  If we
5665 	 don't do this, later code can interpret this as a test against
5666 	 const0_rtx, which can cause problems if we try to put it into an
5667 	 insn as a floating-point operand.  */
5668       if (dest == cc0_rtx)
5669 	{
5670 	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5671 	  this_insn_cc0_mode = mode;
5672 	  if (FLOAT_MODE_P (mode))
5673 	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5674 					     CONST0_RTX (mode));
5675 	}
5676     }
5677 
5678   /* Now enter all non-volatile source expressions in the hash table
5679      if they are not already present.
5680      Record their equivalence classes in src_elt.
5681      This way we can insert the corresponding destinations into
5682      the same classes even if the actual sources are no longer in them
5683      (having been invalidated).  */
5684 
5685   if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5686       && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5687     {
5688       struct table_elt *elt;
5689       struct table_elt *classp = sets[0].src_elt;
5690       rtx dest = SET_DEST (sets[0].rtl);
5691       machine_mode eqvmode = GET_MODE (dest);
5692 
5693       if (GET_CODE (dest) == STRICT_LOW_PART)
5694 	{
5695 	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5696 	  classp = 0;
5697 	}
5698       if (insert_regs (src_eqv, classp, 0))
5699 	{
5700 	  rehash_using_reg (src_eqv);
5701 	  src_eqv_hash = HASH (src_eqv, eqvmode);
5702 	}
5703       elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5704       elt->in_memory = src_eqv_in_memory;
5705       src_eqv_elt = elt;
5706 
5707       /* Check to see if src_eqv_elt is the same as a set source which
5708 	 does not yet have an elt, and if so set the elt of the set source
5709 	 to src_eqv_elt.  */
5710       for (i = 0; i < n_sets; i++)
5711 	if (sets[i].rtl && sets[i].src_elt == 0
5712 	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5713 	  sets[i].src_elt = src_eqv_elt;
5714     }
5715 
5716   for (i = 0; i < n_sets; i++)
5717     if (sets[i].rtl && ! sets[i].src_volatile
5718 	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5719       {
5720 	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5721 	  {
5722 	    /* REG_EQUAL in setting a STRICT_LOW_PART
5723 	       gives an equivalent for the entire destination register,
5724 	       not just for the subreg being stored in now.
5725 	       This is a more interesting equivalence, so we arrange later
5726 	       to treat the entire reg as the destination.  */
5727 	    sets[i].src_elt = src_eqv_elt;
5728 	    sets[i].src_hash = src_eqv_hash;
5729 	  }
5730 	else
5731 	  {
5732 	    /* Insert source and constant equivalent into hash table, if not
5733 	       already present.  */
5734 	    struct table_elt *classp = src_eqv_elt;
5735 	    rtx src = sets[i].src;
5736 	    rtx dest = SET_DEST (sets[i].rtl);
5737 	    machine_mode mode
5738 	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5739 
5740 	    /* It's possible that we have a source value known to be
5741 	       constant but don't have a REG_EQUAL note on the insn.
5742 	       Lack of a note will mean src_eqv_elt will be NULL.  This
5743 	       can happen where we've generated a SUBREG to access a
5744 	       CONST_INT that is already in a register in a wider mode.
5745 	       Ensure that the source expression is put in the proper
5746 	       constant class.  */
5747 	    if (!classp)
5748 	      classp = sets[i].src_const_elt;
5749 
5750 	    if (sets[i].src_elt == 0)
5751 	      {
5752 		struct table_elt *elt;
5753 
5754 		/* Note that these insert_regs calls cannot remove
5755 		   any of the src_elt's, because they would have failed to
5756 		   match if not still valid.  */
5757 		if (insert_regs (src, classp, 0))
5758 		  {
5759 		    rehash_using_reg (src);
5760 		    sets[i].src_hash = HASH (src, mode);
5761 		  }
5762 		elt = insert (src, classp, sets[i].src_hash, mode);
5763 		elt->in_memory = sets[i].src_in_memory;
5764 		/* If inline asm has any clobbers, ensure we only reuse
5765 		   existing inline asms and never try to put the ASM_OPERANDS
5766 		   into an insn that isn't inline asm.  */
5767 		if (GET_CODE (src) == ASM_OPERANDS
5768 		    && GET_CODE (x) == PARALLEL)
5769 		  elt->cost = MAX_COST;
5770 		sets[i].src_elt = classp = elt;
5771 	      }
5772 	    if (sets[i].src_const && sets[i].src_const_elt == 0
5773 		&& src != sets[i].src_const
5774 		&& ! rtx_equal_p (sets[i].src_const, src))
5775 	      sets[i].src_elt = insert (sets[i].src_const, classp,
5776 					sets[i].src_const_hash, mode);
5777 	  }
5778       }
5779     else if (sets[i].src_elt == 0)
5780       /* If we did not insert the source into the hash table (e.g., it was
5781 	 volatile), note the equivalence class for the REG_EQUAL value, if any,
5782 	 so that the destination goes into that class.  */
5783       sets[i].src_elt = src_eqv_elt;
5784 
5785   /* Record destination addresses in the hash table.  This allows us to
5786      check if they are invalidated by other sets.  */
5787   for (i = 0; i < n_sets; i++)
5788     {
5789       if (sets[i].rtl)
5790 	{
5791 	  rtx x = sets[i].inner_dest;
5792 	  struct table_elt *elt;
5793 	  machine_mode mode;
5794 	  unsigned hash;
5795 
5796 	  if (MEM_P (x))
5797 	    {
5798 	      x = XEXP (x, 0);
5799 	      mode = GET_MODE (x);
5800 	      hash = HASH (x, mode);
5801 	      elt = lookup (x, hash, mode);
5802 	      if (!elt)
5803 		{
5804 		  if (insert_regs (x, NULL, 0))
5805 		    {
5806 		      rtx dest = SET_DEST (sets[i].rtl);
5807 
5808 		      rehash_using_reg (x);
5809 		      hash = HASH (x, mode);
5810 		      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5811 		    }
5812 		  elt = insert (x, NULL, hash, mode);
5813 		}
5814 
5815 	      sets[i].dest_addr_elt = elt;
5816 	    }
5817 	  else
5818 	    sets[i].dest_addr_elt = NULL;
5819 	}
5820     }
5821 
5822   invalidate_from_clobbers (insn);
5823 
5824   /* Some registers are invalidated by subroutine calls.  Memory is
5825      invalidated by non-constant calls.  */
5826 
5827   if (CALL_P (insn))
5828     {
5829       if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5830 	invalidate_memory ();
5831       else
5832 	/* For const/pure calls, invalidate any argument slots, because
5833 	   those are owned by the callee.  */
5834 	for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5835 	  if (GET_CODE (XEXP (tem, 0)) == USE
5836 	      && MEM_P (XEXP (XEXP (tem, 0), 0)))
5837 	    invalidate (XEXP (XEXP (tem, 0), 0), VOIDmode);
5838       invalidate_for_call (insn);
5839     }
5840 
5841   /* Now invalidate everything set by this instruction.
5842      If a SUBREG or other funny destination is being set,
5843      sets[i].rtl is still nonzero, so here we invalidate the reg
5844      a part of which is being set.  */
5845 
5846   for (i = 0; i < n_sets; i++)
5847     if (sets[i].rtl)
5848       {
5849 	/* We can't use the inner dest, because the mode associated with
5850 	   a ZERO_EXTRACT is significant.  */
5851 	rtx dest = SET_DEST (sets[i].rtl);
5852 
5853 	/* Needed for registers to remove the register from its
5854 	   previous quantity's chain.
5855 	   Needed for memory if this is a nonvarying address, unless
5856 	   we have just done an invalidate_memory that covers even those.  */
5857 	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5858 	  invalidate (dest, VOIDmode);
5859 	else if (MEM_P (dest))
5860 	  invalidate (dest, VOIDmode);
5861 	else if (GET_CODE (dest) == STRICT_LOW_PART
5862 		 || GET_CODE (dest) == ZERO_EXTRACT)
5863 	  invalidate (XEXP (dest, 0), GET_MODE (dest));
5864       }
5865 
5866   /* Don't cse over a call to setjmp; on some machines (eg VAX)
5867      the regs restored by the longjmp come from a later time
5868      than the setjmp.  */
5869   if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5870     {
5871       flush_hash_table ();
5872       goto done;
5873     }
5874 
5875   /* Make sure registers mentioned in destinations
5876      are safe for use in an expression to be inserted.
5877      This removes from the hash table
5878      any invalid entry that refers to one of these registers.
5879 
5880      We don't care about the return value from mention_regs because
5881      we are going to hash the SET_DEST values unconditionally.  */
5882 
5883   for (i = 0; i < n_sets; i++)
5884     {
5885       if (sets[i].rtl)
5886 	{
5887 	  rtx x = SET_DEST (sets[i].rtl);
5888 
5889 	  if (!REG_P (x))
5890 	    mention_regs (x);
5891 	  else
5892 	    {
5893 	      /* We used to rely on all references to a register becoming
5894 		 inaccessible when a register changes to a new quantity,
5895 		 since that changes the hash code.  However, that is not
5896 		 safe, since after HASH_SIZE new quantities we get a
5897 		 hash 'collision' of a register with its own invalid
5898 		 entries.  And since SUBREGs have been changed not to
5899 		 change their hash code with the hash code of the register,
5900 		 it wouldn't work any longer at all.  So we have to check
5901 		 for any invalid references lying around now.
5902 		 This code is similar to the REG case in mention_regs,
5903 		 but it knows that reg_tick has been incremented, and
5904 		 it leaves reg_in_table as -1 .  */
5905 	      unsigned int regno = REGNO (x);
5906 	      unsigned int endregno = END_REGNO (x);
5907 	      unsigned int i;
5908 
5909 	      for (i = regno; i < endregno; i++)
5910 		{
5911 		  if (REG_IN_TABLE (i) >= 0)
5912 		    {
5913 		      remove_invalid_refs (i);
5914 		      REG_IN_TABLE (i) = -1;
5915 		    }
5916 		}
5917 	    }
5918 	}
5919     }
5920 
5921   /* We may have just removed some of the src_elt's from the hash table.
5922      So replace each one with the current head of the same class.
5923      Also check if destination addresses have been removed.  */
5924 
5925   for (i = 0; i < n_sets; i++)
5926     if (sets[i].rtl)
5927       {
5928 	if (sets[i].dest_addr_elt
5929 	    && sets[i].dest_addr_elt->first_same_value == 0)
5930 	  {
5931 	    /* The elt was removed, which means this destination is not
5932 	       valid after this instruction.  */
5933 	    sets[i].rtl = NULL_RTX;
5934 	  }
5935 	else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5936 	  /* If elt was removed, find current head of same class,
5937 	     or 0 if nothing remains of that class.  */
5938 	  {
5939 	    struct table_elt *elt = sets[i].src_elt;
5940 
5941 	    while (elt && elt->prev_same_value)
5942 	      elt = elt->prev_same_value;
5943 
5944 	    while (elt && elt->first_same_value == 0)
5945 	      elt = elt->next_same_value;
5946 	    sets[i].src_elt = elt ? elt->first_same_value : 0;
5947 	  }
5948       }
5949 
5950   /* Now insert the destinations into their equivalence classes.  */
5951 
5952   for (i = 0; i < n_sets; i++)
5953     if (sets[i].rtl)
5954       {
5955 	rtx dest = SET_DEST (sets[i].rtl);
5956 	struct table_elt *elt;
5957 
5958 	/* Don't record value if we are not supposed to risk allocating
5959 	   floating-point values in registers that might be wider than
5960 	   memory.  */
5961 	if ((flag_float_store
5962 	     && MEM_P (dest)
5963 	     && FLOAT_MODE_P (GET_MODE (dest)))
5964 	    /* Don't record BLKmode values, because we don't know the
5965 	       size of it, and can't be sure that other BLKmode values
5966 	       have the same or smaller size.  */
5967 	    || GET_MODE (dest) == BLKmode
5968 	    /* If we didn't put a REG_EQUAL value or a source into the hash
5969 	       table, there is no point is recording DEST.  */
5970 	    || sets[i].src_elt == 0)
5971 	  continue;
5972 
5973 	/* STRICT_LOW_PART isn't part of the value BEING set,
5974 	   and neither is the SUBREG inside it.
5975 	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
5976 	if (GET_CODE (dest) == STRICT_LOW_PART)
5977 	  dest = SUBREG_REG (XEXP (dest, 0));
5978 
5979 	if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5980 	  /* Registers must also be inserted into chains for quantities.  */
5981 	  if (insert_regs (dest, sets[i].src_elt, 1))
5982 	    {
5983 	      /* If `insert_regs' changes something, the hash code must be
5984 		 recalculated.  */
5985 	      rehash_using_reg (dest);
5986 	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5987 	    }
5988 
5989 	/* If DEST is a paradoxical SUBREG, don't record DEST since the bits
5990 	   outside the mode of GET_MODE (SUBREG_REG (dest)) are undefined.  */
5991 	if (paradoxical_subreg_p (dest))
5992 	  continue;
5993 
5994 	elt = insert (dest, sets[i].src_elt,
5995 		      sets[i].dest_hash, GET_MODE (dest));
5996 
5997 	/* If this is a constant, insert the constant anchors with the
5998 	   equivalent register-offset expressions using register DEST.  */
5999 	if (targetm.const_anchor
6000 	    && REG_P (dest)
6001 	    && SCALAR_INT_MODE_P (GET_MODE (dest))
6002 	    && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
6003 	  insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
6004 
6005 	elt->in_memory = (MEM_P (sets[i].inner_dest)
6006 			  && !MEM_READONLY_P (sets[i].inner_dest));
6007 
6008 	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6009 	   narrower than M2, and both M1 and M2 are the same number of words,
6010 	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6011 	   make that equivalence as well.
6012 
6013 	   However, BAR may have equivalences for which gen_lowpart
6014 	   will produce a simpler value than gen_lowpart applied to
6015 	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6016 	   BAR's equivalences.  If we don't get a simplified form, make
6017 	   the SUBREG.  It will not be used in an equivalence, but will
6018 	   cause two similar assignments to be detected.
6019 
6020 	   Note the loop below will find SUBREG_REG (DEST) since we have
6021 	   already entered SRC and DEST of the SET in the table.  */
6022 
6023 	if (GET_CODE (dest) == SUBREG
6024 	    && (known_equal_after_align_down
6025 		(GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1,
6026 		 GET_MODE_SIZE (GET_MODE (dest)) - 1,
6027 		 UNITS_PER_WORD))
6028 	    && !partial_subreg_p (dest)
6029 	    && sets[i].src_elt != 0)
6030 	  {
6031 	    machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6032 	    struct table_elt *elt, *classp = 0;
6033 
6034 	    for (elt = sets[i].src_elt->first_same_value; elt;
6035 		 elt = elt->next_same_value)
6036 	      {
6037 		rtx new_src = 0;
6038 		unsigned src_hash;
6039 		struct table_elt *src_elt;
6040 
6041 		/* Ignore invalid entries.  */
6042 		if (!REG_P (elt->exp)
6043 		    && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6044 		  continue;
6045 
6046 		/* We may have already been playing subreg games.  If the
6047 		   mode is already correct for the destination, use it.  */
6048 		if (GET_MODE (elt->exp) == new_mode)
6049 		  new_src = elt->exp;
6050 		else
6051 		  {
6052 		    poly_uint64 byte
6053 		      = subreg_lowpart_offset (new_mode, GET_MODE (dest));
6054 		    new_src = simplify_gen_subreg (new_mode, elt->exp,
6055 					           GET_MODE (dest), byte);
6056 		  }
6057 
6058 		/* The call to simplify_gen_subreg fails if the value
6059 		   is VOIDmode, yet we can't do any simplification, e.g.
6060 		   for EXPR_LISTs denoting function call results.
6061 		   It is invalid to construct a SUBREG with a VOIDmode
6062 		   SUBREG_REG, hence a zero new_src means we can't do
6063 		   this substitution.  */
6064 		if (! new_src)
6065 		  continue;
6066 
6067 		src_hash = HASH (new_src, new_mode);
6068 		src_elt = lookup (new_src, src_hash, new_mode);
6069 
6070 		/* Put the new source in the hash table is if isn't
6071 		   already.  */
6072 		if (src_elt == 0)
6073 		  {
6074 		    if (insert_regs (new_src, classp, 0))
6075 		      {
6076 			rehash_using_reg (new_src);
6077 			src_hash = HASH (new_src, new_mode);
6078 		      }
6079 		    src_elt = insert (new_src, classp, src_hash, new_mode);
6080 		    src_elt->in_memory = elt->in_memory;
6081 		    if (GET_CODE (new_src) == ASM_OPERANDS
6082 			&& elt->cost == MAX_COST)
6083 		      src_elt->cost = MAX_COST;
6084 		  }
6085 		else if (classp && classp != src_elt->first_same_value)
6086 		  /* Show that two things that we've seen before are
6087 		     actually the same.  */
6088 		  merge_equiv_classes (src_elt, classp);
6089 
6090 		classp = src_elt->first_same_value;
6091 		/* Ignore invalid entries.  */
6092 		while (classp
6093 		       && !REG_P (classp->exp)
6094 		       && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6095 		  classp = classp->next_same_value;
6096 	      }
6097 	  }
6098       }
6099 
6100   /* Special handling for (set REG0 REG1) where REG0 is the
6101      "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6102      be used in the sequel, so (if easily done) change this insn to
6103      (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6104      that computed their value.  Then REG1 will become a dead store
6105      and won't cloud the situation for later optimizations.
6106 
6107      Do not make this change if REG1 is a hard register, because it will
6108      then be used in the sequel and we may be changing a two-operand insn
6109      into a three-operand insn.
6110 
6111      Also do not do this if we are operating on a copy of INSN.  */
6112 
6113   if (n_sets == 1 && sets[0].rtl)
6114     try_back_substitute_reg (sets[0].rtl, insn);
6115 
6116 done:;
6117 }
6118 
6119 /* Remove from the hash table all expressions that reference memory.  */
6120 
6121 static void
invalidate_memory(void)6122 invalidate_memory (void)
6123 {
6124   int i;
6125   struct table_elt *p, *next;
6126 
6127   for (i = 0; i < HASH_SIZE; i++)
6128     for (p = table[i]; p; p = next)
6129       {
6130 	next = p->next_same_hash;
6131 	if (p->in_memory)
6132 	  remove_from_table (p, i);
6133       }
6134 }
6135 
6136 /* Perform invalidation on the basis of everything about INSN,
6137    except for invalidating the actual places that are SET in it.
6138    This includes the places CLOBBERed, and anything that might
6139    alias with something that is SET or CLOBBERed.  */
6140 
6141 static void
invalidate_from_clobbers(rtx_insn * insn)6142 invalidate_from_clobbers (rtx_insn *insn)
6143 {
6144   rtx x = PATTERN (insn);
6145 
6146   if (GET_CODE (x) == CLOBBER)
6147     {
6148       rtx ref = XEXP (x, 0);
6149       if (ref)
6150 	{
6151 	  if (REG_P (ref) || GET_CODE (ref) == SUBREG
6152 	      || MEM_P (ref))
6153 	    invalidate (ref, VOIDmode);
6154 	  else if (GET_CODE (ref) == STRICT_LOW_PART
6155 		   || GET_CODE (ref) == ZERO_EXTRACT)
6156 	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6157 	}
6158     }
6159   else if (GET_CODE (x) == PARALLEL)
6160     {
6161       int i;
6162       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6163 	{
6164 	  rtx y = XVECEXP (x, 0, i);
6165 	  if (GET_CODE (y) == CLOBBER)
6166 	    {
6167 	      rtx ref = XEXP (y, 0);
6168 	      if (REG_P (ref) || GET_CODE (ref) == SUBREG
6169 		  || MEM_P (ref))
6170 		invalidate (ref, VOIDmode);
6171 	      else if (GET_CODE (ref) == STRICT_LOW_PART
6172 		       || GET_CODE (ref) == ZERO_EXTRACT)
6173 		invalidate (XEXP (ref, 0), GET_MODE (ref));
6174 	    }
6175 	}
6176     }
6177 }
6178 
6179 /* Perform invalidation on the basis of everything about INSN.
6180    This includes the places CLOBBERed, and anything that might
6181    alias with something that is SET or CLOBBERed.  */
6182 
6183 static void
invalidate_from_sets_and_clobbers(rtx_insn * insn)6184 invalidate_from_sets_and_clobbers (rtx_insn *insn)
6185 {
6186   rtx tem;
6187   rtx x = PATTERN (insn);
6188 
6189   if (CALL_P (insn))
6190     {
6191       for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6192 	{
6193 	  rtx temx = XEXP (tem, 0);
6194 	  if (GET_CODE (temx) == CLOBBER)
6195 	    invalidate (SET_DEST (temx), VOIDmode);
6196 	}
6197     }
6198 
6199   /* Ensure we invalidate the destination register of a CALL insn.
6200      This is necessary for machines where this register is a fixed_reg,
6201      because no other code would invalidate it.  */
6202   if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL)
6203     invalidate (SET_DEST (x), VOIDmode);
6204 
6205   else if (GET_CODE (x) == PARALLEL)
6206     {
6207       int i;
6208 
6209       for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6210 	{
6211 	  rtx y = XVECEXP (x, 0, i);
6212 	  if (GET_CODE (y) == CLOBBER)
6213 	    {
6214 	      rtx clobbered = XEXP (y, 0);
6215 
6216 	      if (REG_P (clobbered)
6217 		  || GET_CODE (clobbered) == SUBREG)
6218 		invalidate (clobbered, VOIDmode);
6219 	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
6220 		       || GET_CODE (clobbered) == ZERO_EXTRACT)
6221 		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6222 	    }
6223 	  else if (GET_CODE (y) == SET && GET_CODE (SET_SRC (y)) == CALL)
6224 	    invalidate (SET_DEST (y), VOIDmode);
6225 	}
6226     }
6227 }
6228 
6229 static rtx cse_process_note (rtx);
6230 
6231 /* A simplify_replace_fn_rtx callback for cse_process_note.  Process X,
6232    part of the REG_NOTES of an insn.  Replace any registers with either
6233    an equivalent constant or the canonical form of the register.
6234    Only replace addresses if the containing MEM remains valid.
6235 
6236    Return the replacement for X, or null if it should be simplified
6237    recursively.  */
6238 
6239 static rtx
cse_process_note_1(rtx x,const_rtx,void *)6240 cse_process_note_1 (rtx x, const_rtx, void *)
6241 {
6242   if (MEM_P (x))
6243     {
6244       validate_change (x, &XEXP (x, 0), cse_process_note (XEXP (x, 0)), false);
6245       return x;
6246     }
6247 
6248   if (REG_P (x))
6249     {
6250       int i = REG_QTY (REGNO (x));
6251 
6252       /* Return a constant or a constant register.  */
6253       if (REGNO_QTY_VALID_P (REGNO (x)))
6254 	{
6255 	  struct qty_table_elem *ent = &qty_table[i];
6256 
6257 	  if (ent->const_rtx != NULL_RTX
6258 	      && (CONSTANT_P (ent->const_rtx)
6259 		  || REG_P (ent->const_rtx)))
6260 	    {
6261 	      rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
6262 	      if (new_rtx)
6263 		return copy_rtx (new_rtx);
6264 	    }
6265 	}
6266 
6267       /* Otherwise, canonicalize this register.  */
6268       return canon_reg (x, NULL);
6269     }
6270 
6271   return NULL_RTX;
6272 }
6273 
6274 /* Process X, part of the REG_NOTES of an insn.  Replace any registers in it
6275    with either an equivalent constant or the canonical form of the register.
6276    Only replace addresses if the containing MEM remains valid.  */
6277 
6278 static rtx
cse_process_note(rtx x)6279 cse_process_note (rtx x)
6280 {
6281   return simplify_replace_fn_rtx (x, NULL_RTX, cse_process_note_1, NULL);
6282 }
6283 
6284 
6285 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6286 
6287    DATA is a pointer to a struct cse_basic_block_data, that is used to
6288    describe the path.
6289    It is filled with a queue of basic blocks, starting with FIRST_BB
6290    and following a trace through the CFG.
6291 
6292    If all paths starting at FIRST_BB have been followed, or no new path
6293    starting at FIRST_BB can be constructed, this function returns FALSE.
6294    Otherwise, DATA->path is filled and the function returns TRUE indicating
6295    that a path to follow was found.
6296 
6297    If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6298    block in the path will be FIRST_BB.  */
6299 
6300 static bool
cse_find_path(basic_block first_bb,struct cse_basic_block_data * data,int follow_jumps)6301 cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6302 	       int follow_jumps)
6303 {
6304   basic_block bb;
6305   edge e;
6306   int path_size;
6307 
6308   bitmap_set_bit (cse_visited_basic_blocks, first_bb->index);
6309 
6310   /* See if there is a previous path.  */
6311   path_size = data->path_size;
6312 
6313   /* There is a previous path.  Make sure it started with FIRST_BB.  */
6314   if (path_size)
6315     gcc_assert (data->path[0].bb == first_bb);
6316 
6317   /* There was only one basic block in the last path.  Clear the path and
6318      return, so that paths starting at another basic block can be tried.  */
6319   if (path_size == 1)
6320     {
6321       path_size = 0;
6322       goto done;
6323     }
6324 
6325   /* If the path was empty from the beginning, construct a new path.  */
6326   if (path_size == 0)
6327     data->path[path_size++].bb = first_bb;
6328   else
6329     {
6330       /* Otherwise, path_size must be equal to or greater than 2, because
6331 	 a previous path exists that is at least two basic blocks long.
6332 
6333 	 Update the previous branch path, if any.  If the last branch was
6334 	 previously along the branch edge, take the fallthrough edge now.  */
6335       while (path_size >= 2)
6336 	{
6337 	  basic_block last_bb_in_path, previous_bb_in_path;
6338 	  edge e;
6339 
6340 	  --path_size;
6341 	  last_bb_in_path = data->path[path_size].bb;
6342 	  previous_bb_in_path = data->path[path_size - 1].bb;
6343 
6344 	  /* If we previously followed a path along the branch edge, try
6345 	     the fallthru edge now.  */
6346 	  if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6347 	      && any_condjump_p (BB_END (previous_bb_in_path))
6348 	      && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6349 	      && e == BRANCH_EDGE (previous_bb_in_path))
6350 	    {
6351 	      bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
6352 	      if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
6353 		  && single_pred_p (bb)
6354 		  /* We used to assert here that we would only see blocks
6355 		     that we have not visited yet.  But we may end up
6356 		     visiting basic blocks twice if the CFG has changed
6357 		     in this run of cse_main, because when the CFG changes
6358 		     the topological sort of the CFG also changes.  A basic
6359 		     blocks that previously had more than two predecessors
6360 		     may now have a single predecessor, and become part of
6361 		     a path that starts at another basic block.
6362 
6363 		     We still want to visit each basic block only once, so
6364 		     halt the path here if we have already visited BB.  */
6365 		  && !bitmap_bit_p (cse_visited_basic_blocks, bb->index))
6366 		{
6367 		  bitmap_set_bit (cse_visited_basic_blocks, bb->index);
6368 		  data->path[path_size++].bb = bb;
6369 		  break;
6370 		}
6371 	    }
6372 
6373 	  data->path[path_size].bb = NULL;
6374 	}
6375 
6376       /* If only one block remains in the path, bail.  */
6377       if (path_size == 1)
6378 	{
6379 	  path_size = 0;
6380 	  goto done;
6381 	}
6382     }
6383 
6384   /* Extend the path if possible.  */
6385   if (follow_jumps)
6386     {
6387       bb = data->path[path_size - 1].bb;
6388       while (bb && path_size < param_max_cse_path_length)
6389 	{
6390 	  if (single_succ_p (bb))
6391 	    e = single_succ_edge (bb);
6392 	  else if (EDGE_COUNT (bb->succs) == 2
6393 		   && any_condjump_p (BB_END (bb)))
6394 	    {
6395 	      /* First try to follow the branch.  If that doesn't lead
6396 		 to a useful path, follow the fallthru edge.  */
6397 	      e = BRANCH_EDGE (bb);
6398 	      if (!single_pred_p (e->dest))
6399 		e = FALLTHRU_EDGE (bb);
6400 	    }
6401 	  else
6402 	    e = NULL;
6403 
6404 	  if (e
6405 	      && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
6406 	      && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
6407 	      && single_pred_p (e->dest)
6408 	      /* Avoid visiting basic blocks twice.  The large comment
6409 		 above explains why this can happen.  */
6410 	      && !bitmap_bit_p (cse_visited_basic_blocks, e->dest->index))
6411 	    {
6412 	      basic_block bb2 = e->dest;
6413 	      bitmap_set_bit (cse_visited_basic_blocks, bb2->index);
6414 	      data->path[path_size++].bb = bb2;
6415 	      bb = bb2;
6416 	    }
6417 	  else
6418 	    bb = NULL;
6419 	}
6420     }
6421 
6422 done:
6423   data->path_size = path_size;
6424   return path_size != 0;
6425 }
6426 
6427 /* Dump the path in DATA to file F.  NSETS is the number of sets
6428    in the path.  */
6429 
6430 static void
cse_dump_path(struct cse_basic_block_data * data,int nsets,FILE * f)6431 cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6432 {
6433   int path_entry;
6434 
6435   fprintf (f, ";; Following path with %d sets: ", nsets);
6436   for (path_entry = 0; path_entry < data->path_size; path_entry++)
6437     fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6438   fputc ('\n', f);
6439   fflush (f);
6440 }
6441 
6442 
6443 /* Return true if BB has exception handling successor edges.  */
6444 
6445 static bool
have_eh_succ_edges(basic_block bb)6446 have_eh_succ_edges (basic_block bb)
6447 {
6448   edge e;
6449   edge_iterator ei;
6450 
6451   FOR_EACH_EDGE (e, ei, bb->succs)
6452     if (e->flags & EDGE_EH)
6453       return true;
6454 
6455   return false;
6456 }
6457 
6458 
6459 /* Scan to the end of the path described by DATA.  Return an estimate of
6460    the total number of SETs of all insns in the path.  */
6461 
6462 static void
cse_prescan_path(struct cse_basic_block_data * data)6463 cse_prescan_path (struct cse_basic_block_data *data)
6464 {
6465   int nsets = 0;
6466   int path_size = data->path_size;
6467   int path_entry;
6468 
6469   /* Scan to end of each basic block in the path.  */
6470   for (path_entry = 0; path_entry < path_size; path_entry++)
6471     {
6472       basic_block bb;
6473       rtx_insn *insn;
6474 
6475       bb = data->path[path_entry].bb;
6476 
6477       FOR_BB_INSNS (bb, insn)
6478 	{
6479 	  if (!INSN_P (insn))
6480 	    continue;
6481 
6482 	  /* A PARALLEL can have lots of SETs in it,
6483 	     especially if it is really an ASM_OPERANDS.  */
6484 	  if (GET_CODE (PATTERN (insn)) == PARALLEL)
6485 	    nsets += XVECLEN (PATTERN (insn), 0);
6486 	  else
6487 	    nsets += 1;
6488 	}
6489     }
6490 
6491   data->nsets = nsets;
6492 }
6493 
6494 /* Return true if the pattern of INSN uses a LABEL_REF for which
6495    there isn't a REG_LABEL_OPERAND note.  */
6496 
6497 static bool
check_for_label_ref(rtx_insn * insn)6498 check_for_label_ref (rtx_insn *insn)
6499 {
6500   /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6501      note for it, we must rerun jump since it needs to place the note.  If
6502      this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6503      don't do this since no REG_LABEL_OPERAND will be added.  */
6504   subrtx_iterator::array_type array;
6505   FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
6506     {
6507       const_rtx x = *iter;
6508       if (GET_CODE (x) == LABEL_REF
6509 	  && !LABEL_REF_NONLOCAL_P (x)
6510 	  && (!JUMP_P (insn)
6511 	      || !label_is_jump_target_p (label_ref_label (x), insn))
6512 	  && LABEL_P (label_ref_label (x))
6513 	  && INSN_UID (label_ref_label (x)) != 0
6514 	  && !find_reg_note (insn, REG_LABEL_OPERAND, label_ref_label (x)))
6515 	return true;
6516     }
6517   return false;
6518 }
6519 
6520 /* Process a single extended basic block described by EBB_DATA.  */
6521 
6522 static void
cse_extended_basic_block(struct cse_basic_block_data * ebb_data)6523 cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6524 {
6525   int path_size = ebb_data->path_size;
6526   int path_entry;
6527   int num_insns = 0;
6528 
6529   /* Allocate the space needed by qty_table.  */
6530   qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6531 
6532   new_basic_block ();
6533   cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6534   cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
6535   for (path_entry = 0; path_entry < path_size; path_entry++)
6536     {
6537       basic_block bb;
6538       rtx_insn *insn;
6539 
6540       bb = ebb_data->path[path_entry].bb;
6541 
6542       /* Invalidate recorded information for eh regs if there is an EH
6543 	 edge pointing to that bb.  */
6544       if (bb_has_eh_pred (bb))
6545 	{
6546 	  df_ref def;
6547 
6548 	  FOR_EACH_ARTIFICIAL_DEF (def, bb->index)
6549 	    if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6550 	      invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6551 	}
6552 
6553       optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6554       FOR_BB_INSNS (bb, insn)
6555 	{
6556 	  /* If we have processed 1,000 insns, flush the hash table to
6557 	     avoid extreme quadratic behavior.  We must not include NOTEs
6558 	     in the count since there may be more of them when generating
6559 	     debugging information.  If we clear the table at different
6560 	     times, code generated with -g -O might be different than code
6561 	     generated with -O but not -g.
6562 
6563 	     FIXME: This is a real kludge and needs to be done some other
6564 		    way.  */
6565 	  if (NONDEBUG_INSN_P (insn)
6566 	      && num_insns++ > param_max_cse_insns)
6567 	    {
6568 	      flush_hash_table ();
6569 	      num_insns = 0;
6570 	    }
6571 
6572 	  if (INSN_P (insn))
6573 	    {
6574 	      /* Process notes first so we have all notes in canonical forms
6575 		 when looking for duplicate operations.  */
6576 	      bool changed = false;
6577 	      for (rtx note = REG_NOTES (insn); note; note = XEXP (note, 1))
6578 		if (REG_NOTE_KIND (note) == REG_EQUAL)
6579 		  {
6580 		    rtx newval = cse_process_note (XEXP (note, 0));
6581 		    if (newval != XEXP (note, 0))
6582 		      {
6583 			XEXP (note, 0) = newval;
6584 			changed = true;
6585 		      }
6586 		  }
6587 	      if (changed)
6588 		df_notes_rescan (insn);
6589 
6590 	      cse_insn (insn);
6591 
6592 	      /* If we haven't already found an insn where we added a LABEL_REF,
6593 		 check this one.  */
6594 	      if (INSN_P (insn) && !recorded_label_ref
6595 		  && check_for_label_ref (insn))
6596 		recorded_label_ref = true;
6597 
6598 	      if (HAVE_cc0 && NONDEBUG_INSN_P (insn))
6599 		{
6600 		  /* If the previous insn sets CC0 and this insn no
6601 		     longer references CC0, delete the previous insn.
6602 		     Here we use fact that nothing expects CC0 to be
6603 		     valid over an insn, which is true until the final
6604 		     pass.  */
6605 		  rtx_insn *prev_insn;
6606 		  rtx tem;
6607 
6608 		  prev_insn = prev_nonnote_nondebug_insn (insn);
6609 		  if (prev_insn && NONJUMP_INSN_P (prev_insn)
6610 		      && (tem = single_set (prev_insn)) != NULL_RTX
6611 		      && SET_DEST (tem) == cc0_rtx
6612 		      && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6613 		    delete_insn (prev_insn);
6614 
6615 		  /* If this insn is not the last insn in the basic
6616 		     block, it will be PREV_INSN(insn) in the next
6617 		     iteration.  If we recorded any CC0-related
6618 		     information for this insn, remember it.  */
6619 		  if (insn != BB_END (bb))
6620 		    {
6621 		      prev_insn_cc0 = this_insn_cc0;
6622 		      prev_insn_cc0_mode = this_insn_cc0_mode;
6623 		    }
6624 		}
6625 	    }
6626 	}
6627 
6628       /* With non-call exceptions, we are not always able to update
6629 	 the CFG properly inside cse_insn.  So clean up possibly
6630 	 redundant EH edges here.  */
6631       if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
6632 	cse_cfg_altered |= purge_dead_edges (bb);
6633 
6634       /* If we changed a conditional jump, we may have terminated
6635 	 the path we are following.  Check that by verifying that
6636 	 the edge we would take still exists.  If the edge does
6637 	 not exist anymore, purge the remainder of the path.
6638 	 Note that this will cause us to return to the caller.  */
6639       if (path_entry < path_size - 1)
6640 	{
6641 	  basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6642 	  if (!find_edge (bb, next_bb))
6643 	    {
6644 	      do
6645 		{
6646 		  path_size--;
6647 
6648 		  /* If we truncate the path, we must also reset the
6649 		     visited bit on the remaining blocks in the path,
6650 		     or we will never visit them at all.  */
6651 		  bitmap_clear_bit (cse_visited_basic_blocks,
6652 			     ebb_data->path[path_size].bb->index);
6653 		  ebb_data->path[path_size].bb = NULL;
6654 		}
6655 	      while (path_size - 1 != path_entry);
6656 	      ebb_data->path_size = path_size;
6657 	    }
6658 	}
6659 
6660       /* If this is a conditional jump insn, record any known
6661 	 equivalences due to the condition being tested.  */
6662       insn = BB_END (bb);
6663       if (path_entry < path_size - 1
6664 	  && EDGE_COUNT (bb->succs) == 2
6665 	  && JUMP_P (insn)
6666 	  && single_set (insn)
6667 	  && any_condjump_p (insn))
6668 	{
6669 	  basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6670 	  bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6671 	  record_jump_equiv (insn, taken);
6672 	}
6673 
6674       /* Clear the CC0-tracking related insns, they can't provide
6675 	 useful information across basic block boundaries.  */
6676       prev_insn_cc0 = 0;
6677     }
6678 
6679   gcc_assert (next_qty <= max_qty);
6680 
6681   free (qty_table);
6682 }
6683 
6684 
6685 /* Perform cse on the instructions of a function.
6686    F is the first instruction.
6687    NREGS is one plus the highest pseudo-reg number used in the instruction.
6688 
6689    Return 2 if jump optimizations should be redone due to simplifications
6690    in conditional jump instructions.
6691    Return 1 if the CFG should be cleaned up because it has been modified.
6692    Return 0 otherwise.  */
6693 
6694 static int
cse_main(rtx_insn * f ATTRIBUTE_UNUSED,int nregs)6695 cse_main (rtx_insn *f ATTRIBUTE_UNUSED, int nregs)
6696 {
6697   struct cse_basic_block_data ebb_data;
6698   basic_block bb;
6699   int *rc_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
6700   int i, n_blocks;
6701 
6702   /* CSE doesn't use dominane info but can invalidate it in different ways.
6703      For simplicity free dominance info here.  */
6704   free_dominance_info (CDI_DOMINATORS);
6705 
6706   df_set_flags (DF_LR_RUN_DCE);
6707   df_note_add_problem ();
6708   df_analyze ();
6709   df_set_flags (DF_DEFER_INSN_RESCAN);
6710 
6711   reg_scan (get_insns (), max_reg_num ());
6712   init_cse_reg_info (nregs);
6713 
6714   ebb_data.path = XNEWVEC (struct branch_path,
6715 			   param_max_cse_path_length);
6716 
6717   cse_cfg_altered = false;
6718   cse_jumps_altered = false;
6719   recorded_label_ref = false;
6720   constant_pool_entries_cost = 0;
6721   constant_pool_entries_regcost = 0;
6722   ebb_data.path_size = 0;
6723   ebb_data.nsets = 0;
6724   rtl_hooks = cse_rtl_hooks;
6725 
6726   init_recog ();
6727   init_alias_analysis ();
6728 
6729   reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6730 
6731   /* Set up the table of already visited basic blocks.  */
6732   cse_visited_basic_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6733   bitmap_clear (cse_visited_basic_blocks);
6734 
6735   /* Loop over basic blocks in reverse completion order (RPO),
6736      excluding the ENTRY and EXIT blocks.  */
6737   n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6738   i = 0;
6739   while (i < n_blocks)
6740     {
6741       /* Find the first block in the RPO queue that we have not yet
6742 	 processed before.  */
6743       do
6744 	{
6745 	  bb = BASIC_BLOCK_FOR_FN (cfun, rc_order[i++]);
6746 	}
6747       while (bitmap_bit_p (cse_visited_basic_blocks, bb->index)
6748 	     && i < n_blocks);
6749 
6750       /* Find all paths starting with BB, and process them.  */
6751       while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6752 	{
6753 	  /* Pre-scan the path.  */
6754 	  cse_prescan_path (&ebb_data);
6755 
6756 	  /* If this basic block has no sets, skip it.  */
6757 	  if (ebb_data.nsets == 0)
6758 	    continue;
6759 
6760 	  /* Get a reasonable estimate for the maximum number of qty's
6761 	     needed for this path.  For this, we take the number of sets
6762 	     and multiply that by MAX_RECOG_OPERANDS.  */
6763 	  max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6764 
6765 	  /* Dump the path we're about to process.  */
6766 	  if (dump_file)
6767 	    cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6768 
6769 	  cse_extended_basic_block (&ebb_data);
6770 	}
6771     }
6772 
6773   /* Clean up.  */
6774   end_alias_analysis ();
6775   free (reg_eqv_table);
6776   free (ebb_data.path);
6777   sbitmap_free (cse_visited_basic_blocks);
6778   free (rc_order);
6779   rtl_hooks = general_rtl_hooks;
6780 
6781   if (cse_jumps_altered || recorded_label_ref)
6782     return 2;
6783   else if (cse_cfg_altered)
6784     return 1;
6785   else
6786     return 0;
6787 }
6788 
6789 /* Count the number of times registers are used (not set) in X.
6790    COUNTS is an array in which we accumulate the count, INCR is how much
6791    we count each register usage.
6792 
6793    Don't count a usage of DEST, which is the SET_DEST of a SET which
6794    contains X in its SET_SRC.  This is because such a SET does not
6795    modify the liveness of DEST.
6796    DEST is set to pc_rtx for a trapping insn, or for an insn with side effects.
6797    We must then count uses of a SET_DEST regardless, because the insn can't be
6798    deleted here.  */
6799 
6800 static void
count_reg_usage(rtx x,int * counts,rtx dest,int incr)6801 count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6802 {
6803   enum rtx_code code;
6804   rtx note;
6805   const char *fmt;
6806   int i, j;
6807 
6808   if (x == 0)
6809     return;
6810 
6811   switch (code = GET_CODE (x))
6812     {
6813     case REG:
6814       if (x != dest)
6815 	counts[REGNO (x)] += incr;
6816       return;
6817 
6818     case PC:
6819     case CC0:
6820     case CONST:
6821     CASE_CONST_ANY:
6822     case SYMBOL_REF:
6823     case LABEL_REF:
6824       return;
6825 
6826     case CLOBBER:
6827       /* If we are clobbering a MEM, mark any registers inside the address
6828          as being used.  */
6829       if (MEM_P (XEXP (x, 0)))
6830 	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6831       return;
6832 
6833     case SET:
6834       /* Unless we are setting a REG, count everything in SET_DEST.  */
6835       if (!REG_P (SET_DEST (x)))
6836 	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6837       count_reg_usage (SET_SRC (x), counts,
6838 		       dest ? dest : SET_DEST (x),
6839 		       incr);
6840       return;
6841 
6842     case DEBUG_INSN:
6843       return;
6844 
6845     case CALL_INSN:
6846     case INSN:
6847     case JUMP_INSN:
6848       /* We expect dest to be NULL_RTX here.  If the insn may throw,
6849 	 or if it cannot be deleted due to side-effects, mark this fact
6850 	 by setting DEST to pc_rtx.  */
6851       if ((!cfun->can_delete_dead_exceptions && !insn_nothrow_p (x))
6852 	  || side_effects_p (PATTERN (x)))
6853 	dest = pc_rtx;
6854       if (code == CALL_INSN)
6855 	count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6856       count_reg_usage (PATTERN (x), counts, dest, incr);
6857 
6858       /* Things used in a REG_EQUAL note aren't dead since loop may try to
6859 	 use them.  */
6860 
6861       note = find_reg_equal_equiv_note (x);
6862       if (note)
6863 	{
6864 	  rtx eqv = XEXP (note, 0);
6865 
6866 	  if (GET_CODE (eqv) == EXPR_LIST)
6867 	  /* This REG_EQUAL note describes the result of a function call.
6868 	     Process all the arguments.  */
6869 	    do
6870 	      {
6871 		count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6872 		eqv = XEXP (eqv, 1);
6873 	      }
6874 	    while (eqv && GET_CODE (eqv) == EXPR_LIST);
6875 	  else
6876 	    count_reg_usage (eqv, counts, dest, incr);
6877 	}
6878       return;
6879 
6880     case EXPR_LIST:
6881       if (REG_NOTE_KIND (x) == REG_EQUAL
6882 	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6883 	  /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6884 	     involving registers in the address.  */
6885 	  || GET_CODE (XEXP (x, 0)) == CLOBBER)
6886 	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6887 
6888       count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6889       return;
6890 
6891     case ASM_OPERANDS:
6892       /* Iterate over just the inputs, not the constraints as well.  */
6893       for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6894 	count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6895       return;
6896 
6897     case INSN_LIST:
6898     case INT_LIST:
6899       gcc_unreachable ();
6900 
6901     default:
6902       break;
6903     }
6904 
6905   fmt = GET_RTX_FORMAT (code);
6906   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6907     {
6908       if (fmt[i] == 'e')
6909 	count_reg_usage (XEXP (x, i), counts, dest, incr);
6910       else if (fmt[i] == 'E')
6911 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6912 	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6913     }
6914 }
6915 
6916 /* Return true if X is a dead register.  */
6917 
6918 static inline int
is_dead_reg(const_rtx x,int * counts)6919 is_dead_reg (const_rtx x, int *counts)
6920 {
6921   return (REG_P (x)
6922 	  && REGNO (x) >= FIRST_PSEUDO_REGISTER
6923 	  && counts[REGNO (x)] == 0);
6924 }
6925 
6926 /* Return true if set is live.  */
6927 static bool
set_live_p(rtx set,rtx_insn * insn ATTRIBUTE_UNUSED,int * counts)6928 set_live_p (rtx set, rtx_insn *insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
6929 	    int *counts)
6930 {
6931   rtx_insn *tem;
6932 
6933   if (set_noop_p (set))
6934     ;
6935 
6936   else if (GET_CODE (SET_DEST (set)) == CC0
6937 	   && !side_effects_p (SET_SRC (set))
6938 	   && ((tem = next_nonnote_nondebug_insn (insn)) == NULL_RTX
6939 	       || !INSN_P (tem)
6940 	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6941     return false;
6942   else if (!is_dead_reg (SET_DEST (set), counts)
6943 	   || side_effects_p (SET_SRC (set)))
6944     return true;
6945   return false;
6946 }
6947 
6948 /* Return true if insn is live.  */
6949 
6950 static bool
insn_live_p(rtx_insn * insn,int * counts)6951 insn_live_p (rtx_insn *insn, int *counts)
6952 {
6953   int i;
6954   if (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
6955     return true;
6956   else if (GET_CODE (PATTERN (insn)) == SET)
6957     return set_live_p (PATTERN (insn), insn, counts);
6958   else if (GET_CODE (PATTERN (insn)) == PARALLEL)
6959     {
6960       for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6961 	{
6962 	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
6963 
6964 	  if (GET_CODE (elt) == SET)
6965 	    {
6966 	      if (set_live_p (elt, insn, counts))
6967 		return true;
6968 	    }
6969 	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6970 	    return true;
6971 	}
6972       return false;
6973     }
6974   else if (DEBUG_INSN_P (insn))
6975     {
6976       rtx_insn *next;
6977 
6978       if (DEBUG_MARKER_INSN_P (insn))
6979 	return true;
6980 
6981       for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
6982 	if (NOTE_P (next))
6983 	  continue;
6984 	else if (!DEBUG_INSN_P (next))
6985 	  return true;
6986 	/* If we find an inspection point, such as a debug begin stmt,
6987 	   we want to keep the earlier debug insn.  */
6988 	else if (DEBUG_MARKER_INSN_P (next))
6989 	  return true;
6990 	else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
6991 	  return false;
6992 
6993       return true;
6994     }
6995   else
6996     return true;
6997 }
6998 
6999 /* Count the number of stores into pseudo.  Callback for note_stores.  */
7000 
7001 static void
count_stores(rtx x,const_rtx set ATTRIBUTE_UNUSED,void * data)7002 count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data)
7003 {
7004   int *counts = (int *) data;
7005   if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
7006     counts[REGNO (x)]++;
7007 }
7008 
7009 /* Return if DEBUG_INSN pattern PAT needs to be reset because some dead
7010    pseudo doesn't have a replacement.  COUNTS[X] is zero if register X
7011    is dead and REPLACEMENTS[X] is null if it has no replacemenet.
7012    Set *SEEN_REPL to true if we see a dead register that does have
7013    a replacement.  */
7014 
7015 static bool
is_dead_debug_insn(const_rtx pat,int * counts,rtx * replacements,bool * seen_repl)7016 is_dead_debug_insn (const_rtx pat, int *counts, rtx *replacements,
7017 		    bool *seen_repl)
7018 {
7019   subrtx_iterator::array_type array;
7020   FOR_EACH_SUBRTX (iter, array, pat, NONCONST)
7021     {
7022       const_rtx x = *iter;
7023       if (is_dead_reg (x, counts))
7024 	{
7025 	  if (replacements && replacements[REGNO (x)] != NULL_RTX)
7026 	    *seen_repl = true;
7027 	  else
7028 	    return true;
7029 	}
7030     }
7031   return false;
7032 }
7033 
7034 /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR.
7035    Callback for simplify_replace_fn_rtx.  */
7036 
7037 static rtx
replace_dead_reg(rtx x,const_rtx old_rtx ATTRIBUTE_UNUSED,void * data)7038 replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data)
7039 {
7040   rtx *replacements = (rtx *) data;
7041 
7042   if (REG_P (x)
7043       && REGNO (x) >= FIRST_PSEUDO_REGISTER
7044       && replacements[REGNO (x)] != NULL_RTX)
7045     {
7046       if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)]))
7047 	return replacements[REGNO (x)];
7048       return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)],
7049 			     GET_MODE (replacements[REGNO (x)]));
7050     }
7051   return NULL_RTX;
7052 }
7053 
7054 /* Scan all the insns and delete any that are dead; i.e., they store a register
7055    that is never used or they copy a register to itself.
7056 
7057    This is used to remove insns made obviously dead by cse, loop or other
7058    optimizations.  It improves the heuristics in loop since it won't try to
7059    move dead invariants out of loops or make givs for dead quantities.  The
7060    remaining passes of the compilation are also sped up.  */
7061 
7062 int
delete_trivially_dead_insns(rtx_insn * insns,int nreg)7063 delete_trivially_dead_insns (rtx_insn *insns, int nreg)
7064 {
7065   int *counts;
7066   rtx_insn *insn, *prev;
7067   rtx *replacements = NULL;
7068   int ndead = 0;
7069 
7070   timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7071   /* First count the number of times each register is used.  */
7072   if (MAY_HAVE_DEBUG_BIND_INSNS)
7073     {
7074       counts = XCNEWVEC (int, nreg * 3);
7075       for (insn = insns; insn; insn = NEXT_INSN (insn))
7076 	if (DEBUG_BIND_INSN_P (insn))
7077 	  count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7078 			   NULL_RTX, 1);
7079 	else if (INSN_P (insn))
7080 	  {
7081 	    count_reg_usage (insn, counts, NULL_RTX, 1);
7082 	    note_stores (insn, count_stores, counts + nreg * 2);
7083 	  }
7084       /* If there can be debug insns, COUNTS are 3 consecutive arrays.
7085 	 First one counts how many times each pseudo is used outside
7086 	 of debug insns, second counts how many times each pseudo is
7087 	 used in debug insns and third counts how many times a pseudo
7088 	 is stored.  */
7089     }
7090   else
7091     {
7092       counts = XCNEWVEC (int, nreg);
7093       for (insn = insns; insn; insn = NEXT_INSN (insn))
7094 	if (INSN_P (insn))
7095 	  count_reg_usage (insn, counts, NULL_RTX, 1);
7096       /* If no debug insns can be present, COUNTS is just an array
7097 	 which counts how many times each pseudo is used.  */
7098     }
7099   /* Pseudo PIC register should be considered as used due to possible
7100      new usages generated.  */
7101   if (!reload_completed
7102       && pic_offset_table_rtx
7103       && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
7104     counts[REGNO (pic_offset_table_rtx)]++;
7105   /* Go from the last insn to the first and delete insns that only set unused
7106      registers or copy a register to itself.  As we delete an insn, remove
7107      usage counts for registers it uses.
7108 
7109      The first jump optimization pass may leave a real insn as the last
7110      insn in the function.   We must not skip that insn or we may end
7111      up deleting code that is not really dead.
7112 
7113      If some otherwise unused register is only used in DEBUG_INSNs,
7114      try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before
7115      the setter.  Then go through DEBUG_INSNs and if a DEBUG_EXPR
7116      has been created for the unused register, replace it with
7117      the DEBUG_EXPR, otherwise reset the DEBUG_INSN.  */
7118   for (insn = get_last_insn (); insn; insn = prev)
7119     {
7120       int live_insn = 0;
7121 
7122       prev = PREV_INSN (insn);
7123       if (!INSN_P (insn))
7124 	continue;
7125 
7126       live_insn = insn_live_p (insn, counts);
7127 
7128       /* If this is a dead insn, delete it and show registers in it aren't
7129 	 being used.  */
7130 
7131       if (! live_insn && dbg_cnt (delete_trivial_dead))
7132 	{
7133 	  if (DEBUG_INSN_P (insn))
7134 	    {
7135 	      if (DEBUG_BIND_INSN_P (insn))
7136 		count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
7137 				 NULL_RTX, -1);
7138 	    }
7139 	  else
7140 	    {
7141 	      rtx set;
7142 	      if (MAY_HAVE_DEBUG_BIND_INSNS
7143 		  && (set = single_set (insn)) != NULL_RTX
7144 		  && is_dead_reg (SET_DEST (set), counts)
7145 		  /* Used at least once in some DEBUG_INSN.  */
7146 		  && counts[REGNO (SET_DEST (set)) + nreg] > 0
7147 		  /* And set exactly once.  */
7148 		  && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1
7149 		  && !side_effects_p (SET_SRC (set))
7150 		  && asm_noperands (PATTERN (insn)) < 0)
7151 		{
7152 		  rtx dval, bind_var_loc;
7153 		  rtx_insn *bind;
7154 
7155 		  /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
7156 		  dval = make_debug_expr_from_rtl (SET_DEST (set));
7157 
7158 		  /* Emit a debug bind insn before the insn in which
7159 		     reg dies.  */
7160 		  bind_var_loc =
7161 		    gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)),
7162 					  DEBUG_EXPR_TREE_DECL (dval),
7163 					  SET_SRC (set),
7164 					  VAR_INIT_STATUS_INITIALIZED);
7165 		  count_reg_usage (bind_var_loc, counts + nreg, NULL_RTX, 1);
7166 
7167 		  bind = emit_debug_insn_before (bind_var_loc, insn);
7168 		  df_insn_rescan (bind);
7169 
7170 		  if (replacements == NULL)
7171 		    replacements = XCNEWVEC (rtx, nreg);
7172 		  replacements[REGNO (SET_DEST (set))] = dval;
7173 		}
7174 
7175 	      count_reg_usage (insn, counts, NULL_RTX, -1);
7176 	      ndead++;
7177 	    }
7178 	  cse_cfg_altered |= delete_insn_and_edges (insn);
7179 	}
7180     }
7181 
7182   if (MAY_HAVE_DEBUG_BIND_INSNS)
7183     {
7184       for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
7185 	if (DEBUG_BIND_INSN_P (insn))
7186 	  {
7187 	    /* If this debug insn references a dead register that wasn't replaced
7188 	       with an DEBUG_EXPR, reset the DEBUG_INSN.  */
7189 	    bool seen_repl = false;
7190 	    if (is_dead_debug_insn (INSN_VAR_LOCATION_LOC (insn),
7191 				    counts, replacements, &seen_repl))
7192 	      {
7193 		INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
7194 		df_insn_rescan (insn);
7195 	      }
7196 	    else if (seen_repl)
7197 	      {
7198 		INSN_VAR_LOCATION_LOC (insn)
7199 		  = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
7200 					     NULL_RTX, replace_dead_reg,
7201 					     replacements);
7202 		df_insn_rescan (insn);
7203 	      }
7204 	  }
7205       free (replacements);
7206     }
7207 
7208   if (dump_file && ndead)
7209     fprintf (dump_file, "Deleted %i trivially dead insns\n",
7210 	     ndead);
7211   /* Clean up.  */
7212   free (counts);
7213   timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7214   return ndead;
7215 }
7216 
7217 /* If LOC contains references to NEWREG in a different mode, change them
7218    to use NEWREG instead.  */
7219 
7220 static void
cse_change_cc_mode(subrtx_ptr_iterator::array_type & array,rtx * loc,rtx_insn * insn,rtx newreg)7221 cse_change_cc_mode (subrtx_ptr_iterator::array_type &array,
7222 		    rtx *loc, rtx_insn *insn, rtx newreg)
7223 {
7224   FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
7225     {
7226       rtx *loc = *iter;
7227       rtx x = *loc;
7228       if (x
7229 	  && REG_P (x)
7230 	  && REGNO (x) == REGNO (newreg)
7231 	  && GET_MODE (x) != GET_MODE (newreg))
7232 	{
7233 	  validate_change (insn, loc, newreg, 1);
7234 	  iter.skip_subrtxes ();
7235 	}
7236     }
7237 }
7238 
7239 /* Change the mode of any reference to the register REGNO (NEWREG) to
7240    GET_MODE (NEWREG) in INSN.  */
7241 
7242 static void
cse_change_cc_mode_insn(rtx_insn * insn,rtx newreg)7243 cse_change_cc_mode_insn (rtx_insn *insn, rtx newreg)
7244 {
7245   int success;
7246 
7247   if (!INSN_P (insn))
7248     return;
7249 
7250   subrtx_ptr_iterator::array_type array;
7251   cse_change_cc_mode (array, &PATTERN (insn), insn, newreg);
7252   cse_change_cc_mode (array, &REG_NOTES (insn), insn, newreg);
7253 
7254   /* If the following assertion was triggered, there is most probably
7255      something wrong with the cc_modes_compatible back end function.
7256      CC modes only can be considered compatible if the insn - with the mode
7257      replaced by any of the compatible modes - can still be recognized.  */
7258   success = apply_change_group ();
7259   gcc_assert (success);
7260 }
7261 
7262 /* Change the mode of any reference to the register REGNO (NEWREG) to
7263    GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
7264    any instruction which modifies NEWREG.  */
7265 
7266 static void
cse_change_cc_mode_insns(rtx_insn * start,rtx_insn * end,rtx newreg)7267 cse_change_cc_mode_insns (rtx_insn *start, rtx_insn *end, rtx newreg)
7268 {
7269   rtx_insn *insn;
7270 
7271   for (insn = start; insn != end; insn = NEXT_INSN (insn))
7272     {
7273       if (! INSN_P (insn))
7274 	continue;
7275 
7276       if (reg_set_p (newreg, insn))
7277 	return;
7278 
7279       cse_change_cc_mode_insn (insn, newreg);
7280     }
7281 }
7282 
7283 /* BB is a basic block which finishes with CC_REG as a condition code
7284    register which is set to CC_SRC.  Look through the successors of BB
7285    to find blocks which have a single predecessor (i.e., this one),
7286    and look through those blocks for an assignment to CC_REG which is
7287    equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
7288    permitted to change the mode of CC_SRC to a compatible mode.  This
7289    returns VOIDmode if no equivalent assignments were found.
7290    Otherwise it returns the mode which CC_SRC should wind up with.
7291    ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
7292    but is passed unmodified down to recursive calls in order to prevent
7293    endless recursion.
7294 
7295    The main complexity in this function is handling the mode issues.
7296    We may have more than one duplicate which we can eliminate, and we
7297    try to find a mode which will work for multiple duplicates.  */
7298 
7299 static machine_mode
cse_cc_succs(basic_block bb,basic_block orig_bb,rtx cc_reg,rtx cc_src,bool can_change_mode)7300 cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
7301 	      bool can_change_mode)
7302 {
7303   bool found_equiv;
7304   machine_mode mode;
7305   unsigned int insn_count;
7306   edge e;
7307   rtx_insn *insns[2];
7308   machine_mode modes[2];
7309   rtx_insn *last_insns[2];
7310   unsigned int i;
7311   rtx newreg;
7312   edge_iterator ei;
7313 
7314   /* We expect to have two successors.  Look at both before picking
7315      the final mode for the comparison.  If we have more successors
7316      (i.e., some sort of table jump, although that seems unlikely),
7317      then we require all beyond the first two to use the same
7318      mode.  */
7319 
7320   found_equiv = false;
7321   mode = GET_MODE (cc_src);
7322   insn_count = 0;
7323   FOR_EACH_EDGE (e, ei, bb->succs)
7324     {
7325       rtx_insn *insn;
7326       rtx_insn *end;
7327 
7328       if (e->flags & EDGE_COMPLEX)
7329 	continue;
7330 
7331       if (EDGE_COUNT (e->dest->preds) != 1
7332 	  || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
7333 	  /* Avoid endless recursion on unreachable blocks.  */
7334 	  || e->dest == orig_bb)
7335 	continue;
7336 
7337       end = NEXT_INSN (BB_END (e->dest));
7338       for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7339 	{
7340 	  rtx set;
7341 
7342 	  if (! INSN_P (insn))
7343 	    continue;
7344 
7345 	  /* If CC_SRC is modified, we have to stop looking for
7346 	     something which uses it.  */
7347 	  if (modified_in_p (cc_src, insn))
7348 	    break;
7349 
7350 	  /* Check whether INSN sets CC_REG to CC_SRC.  */
7351 	  set = single_set (insn);
7352 	  if (set
7353 	      && REG_P (SET_DEST (set))
7354 	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7355 	    {
7356 	      bool found;
7357 	      machine_mode set_mode;
7358 	      machine_mode comp_mode;
7359 
7360 	      found = false;
7361 	      set_mode = GET_MODE (SET_SRC (set));
7362 	      comp_mode = set_mode;
7363 	      if (rtx_equal_p (cc_src, SET_SRC (set)))
7364 		found = true;
7365 	      else if (GET_CODE (cc_src) == COMPARE
7366 		       && GET_CODE (SET_SRC (set)) == COMPARE
7367 		       && mode != set_mode
7368 		       && rtx_equal_p (XEXP (cc_src, 0),
7369 				       XEXP (SET_SRC (set), 0))
7370 		       && rtx_equal_p (XEXP (cc_src, 1),
7371 				       XEXP (SET_SRC (set), 1)))
7372 
7373 		{
7374 		  comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7375 		  if (comp_mode != VOIDmode
7376 		      && (can_change_mode || comp_mode == mode))
7377 		    found = true;
7378 		}
7379 
7380 	      if (found)
7381 		{
7382 		  found_equiv = true;
7383 		  if (insn_count < ARRAY_SIZE (insns))
7384 		    {
7385 		      insns[insn_count] = insn;
7386 		      modes[insn_count] = set_mode;
7387 		      last_insns[insn_count] = end;
7388 		      ++insn_count;
7389 
7390 		      if (mode != comp_mode)
7391 			{
7392 			  gcc_assert (can_change_mode);
7393 			  mode = comp_mode;
7394 
7395 			  /* The modified insn will be re-recognized later.  */
7396 			  PUT_MODE (cc_src, mode);
7397 			}
7398 		    }
7399 		  else
7400 		    {
7401 		      if (set_mode != mode)
7402 			{
7403 			  /* We found a matching expression in the
7404 			     wrong mode, but we don't have room to
7405 			     store it in the array.  Punt.  This case
7406 			     should be rare.  */
7407 			  break;
7408 			}
7409 		      /* INSN sets CC_REG to a value equal to CC_SRC
7410 			 with the right mode.  We can simply delete
7411 			 it.  */
7412 		      delete_insn (insn);
7413 		    }
7414 
7415 		  /* We found an instruction to delete.  Keep looking,
7416 		     in the hopes of finding a three-way jump.  */
7417 		  continue;
7418 		}
7419 
7420 	      /* We found an instruction which sets the condition
7421 		 code, so don't look any farther.  */
7422 	      break;
7423 	    }
7424 
7425 	  /* If INSN sets CC_REG in some other way, don't look any
7426 	     farther.  */
7427 	  if (reg_set_p (cc_reg, insn))
7428 	    break;
7429 	}
7430 
7431       /* If we fell off the bottom of the block, we can keep looking
7432 	 through successors.  We pass CAN_CHANGE_MODE as false because
7433 	 we aren't prepared to handle compatibility between the
7434 	 further blocks and this block.  */
7435       if (insn == end)
7436 	{
7437 	  machine_mode submode;
7438 
7439 	  submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
7440 	  if (submode != VOIDmode)
7441 	    {
7442 	      gcc_assert (submode == mode);
7443 	      found_equiv = true;
7444 	      can_change_mode = false;
7445 	    }
7446 	}
7447     }
7448 
7449   if (! found_equiv)
7450     return VOIDmode;
7451 
7452   /* Now INSN_COUNT is the number of instructions we found which set
7453      CC_REG to a value equivalent to CC_SRC.  The instructions are in
7454      INSNS.  The modes used by those instructions are in MODES.  */
7455 
7456   newreg = NULL_RTX;
7457   for (i = 0; i < insn_count; ++i)
7458     {
7459       if (modes[i] != mode)
7460 	{
7461 	  /* We need to change the mode of CC_REG in INSNS[i] and
7462 	     subsequent instructions.  */
7463 	  if (! newreg)
7464 	    {
7465 	      if (GET_MODE (cc_reg) == mode)
7466 		newreg = cc_reg;
7467 	      else
7468 		newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7469 	    }
7470 	  cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7471 				    newreg);
7472 	}
7473 
7474       cse_cfg_altered |= delete_insn_and_edges (insns[i]);
7475     }
7476 
7477   return mode;
7478 }
7479 
7480 /* If we have a fixed condition code register (or two), walk through
7481    the instructions and try to eliminate duplicate assignments.  */
7482 
7483 static void
cse_condition_code_reg(void)7484 cse_condition_code_reg (void)
7485 {
7486   unsigned int cc_regno_1;
7487   unsigned int cc_regno_2;
7488   rtx cc_reg_1;
7489   rtx cc_reg_2;
7490   basic_block bb;
7491 
7492   if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7493     return;
7494 
7495   cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7496   if (cc_regno_2 != INVALID_REGNUM)
7497     cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7498   else
7499     cc_reg_2 = NULL_RTX;
7500 
7501   FOR_EACH_BB_FN (bb, cfun)
7502     {
7503       rtx_insn *last_insn;
7504       rtx cc_reg;
7505       rtx_insn *insn;
7506       rtx_insn *cc_src_insn;
7507       rtx cc_src;
7508       machine_mode mode;
7509       machine_mode orig_mode;
7510 
7511       /* Look for blocks which end with a conditional jump based on a
7512 	 condition code register.  Then look for the instruction which
7513 	 sets the condition code register.  Then look through the
7514 	 successor blocks for instructions which set the condition
7515 	 code register to the same value.  There are other possible
7516 	 uses of the condition code register, but these are by far the
7517 	 most common and the ones which we are most likely to be able
7518 	 to optimize.  */
7519 
7520       last_insn = BB_END (bb);
7521       if (!JUMP_P (last_insn))
7522 	continue;
7523 
7524       if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7525 	cc_reg = cc_reg_1;
7526       else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7527 	cc_reg = cc_reg_2;
7528       else
7529 	continue;
7530 
7531       cc_src_insn = NULL;
7532       cc_src = NULL_RTX;
7533       for (insn = PREV_INSN (last_insn);
7534 	   insn && insn != PREV_INSN (BB_HEAD (bb));
7535 	   insn = PREV_INSN (insn))
7536 	{
7537 	  rtx set;
7538 
7539 	  if (! INSN_P (insn))
7540 	    continue;
7541 	  set = single_set (insn);
7542 	  if (set
7543 	      && REG_P (SET_DEST (set))
7544 	      && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7545 	    {
7546 	      cc_src_insn = insn;
7547 	      cc_src = SET_SRC (set);
7548 	      break;
7549 	    }
7550 	  else if (reg_set_p (cc_reg, insn))
7551 	    break;
7552 	}
7553 
7554       if (! cc_src_insn)
7555 	continue;
7556 
7557       if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7558 	continue;
7559 
7560       /* Now CC_REG is a condition code register used for a
7561 	 conditional jump at the end of the block, and CC_SRC, in
7562 	 CC_SRC_INSN, is the value to which that condition code
7563 	 register is set, and CC_SRC is still meaningful at the end of
7564 	 the basic block.  */
7565 
7566       orig_mode = GET_MODE (cc_src);
7567       mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
7568       if (mode != VOIDmode)
7569 	{
7570 	  gcc_assert (mode == GET_MODE (cc_src));
7571 	  if (mode != orig_mode)
7572 	    {
7573 	      rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7574 
7575 	      cse_change_cc_mode_insn (cc_src_insn, newreg);
7576 
7577 	      /* Do the same in the following insns that use the
7578 		 current value of CC_REG within BB.  */
7579 	      cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7580 					NEXT_INSN (last_insn),
7581 					newreg);
7582 	    }
7583 	}
7584     }
7585 }
7586 
7587 
7588 /* Perform common subexpression elimination.  Nonzero value from
7589    `cse_main' means that jumps were simplified and some code may now
7590    be unreachable, so do jump optimization again.  */
7591 static unsigned int
rest_of_handle_cse(void)7592 rest_of_handle_cse (void)
7593 {
7594   int tem;
7595 
7596   if (dump_file)
7597     dump_flow_info (dump_file, dump_flags);
7598 
7599   tem = cse_main (get_insns (), max_reg_num ());
7600 
7601   /* If we are not running more CSE passes, then we are no longer
7602      expecting CSE to be run.  But always rerun it in a cheap mode.  */
7603   cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7604 
7605   if (tem == 2)
7606     {
7607       timevar_push (TV_JUMP);
7608       rebuild_jump_labels (get_insns ());
7609       cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7610       timevar_pop (TV_JUMP);
7611     }
7612   else if (tem == 1 || optimize > 1)
7613     cse_cfg_altered |= cleanup_cfg (0);
7614 
7615   return 0;
7616 }
7617 
7618 namespace {
7619 
7620 const pass_data pass_data_cse =
7621 {
7622   RTL_PASS, /* type */
7623   "cse1", /* name */
7624   OPTGROUP_NONE, /* optinfo_flags */
7625   TV_CSE, /* tv_id */
7626   0, /* properties_required */
7627   0, /* properties_provided */
7628   0, /* properties_destroyed */
7629   0, /* todo_flags_start */
7630   TODO_df_finish, /* todo_flags_finish */
7631 };
7632 
7633 class pass_cse : public rtl_opt_pass
7634 {
7635 public:
pass_cse(gcc::context * ctxt)7636   pass_cse (gcc::context *ctxt)
7637     : rtl_opt_pass (pass_data_cse, ctxt)
7638   {}
7639 
7640   /* opt_pass methods: */
gate(function *)7641   virtual bool gate (function *) { return optimize > 0; }
execute(function *)7642   virtual unsigned int execute (function *) { return rest_of_handle_cse (); }
7643 
7644 }; // class pass_cse
7645 
7646 } // anon namespace
7647 
7648 rtl_opt_pass *
make_pass_cse(gcc::context * ctxt)7649 make_pass_cse (gcc::context *ctxt)
7650 {
7651   return new pass_cse (ctxt);
7652 }
7653 
7654 
7655 /* Run second CSE pass after loop optimizations.  */
7656 static unsigned int
rest_of_handle_cse2(void)7657 rest_of_handle_cse2 (void)
7658 {
7659   int tem;
7660 
7661   if (dump_file)
7662     dump_flow_info (dump_file, dump_flags);
7663 
7664   tem = cse_main (get_insns (), max_reg_num ());
7665 
7666   /* Run a pass to eliminate duplicated assignments to condition code
7667      registers.  We have to run this after bypass_jumps, because it
7668      makes it harder for that pass to determine whether a jump can be
7669      bypassed safely.  */
7670   cse_condition_code_reg ();
7671 
7672   delete_trivially_dead_insns (get_insns (), max_reg_num ());
7673 
7674   if (tem == 2)
7675     {
7676       timevar_push (TV_JUMP);
7677       rebuild_jump_labels (get_insns ());
7678       cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7679       timevar_pop (TV_JUMP);
7680     }
7681   else if (tem == 1 || cse_cfg_altered)
7682     cse_cfg_altered |= cleanup_cfg (0);
7683 
7684   cse_not_expected = 1;
7685   return 0;
7686 }
7687 
7688 
7689 namespace {
7690 
7691 const pass_data pass_data_cse2 =
7692 {
7693   RTL_PASS, /* type */
7694   "cse2", /* name */
7695   OPTGROUP_NONE, /* optinfo_flags */
7696   TV_CSE2, /* tv_id */
7697   0, /* properties_required */
7698   0, /* properties_provided */
7699   0, /* properties_destroyed */
7700   0, /* todo_flags_start */
7701   TODO_df_finish, /* todo_flags_finish */
7702 };
7703 
7704 class pass_cse2 : public rtl_opt_pass
7705 {
7706 public:
pass_cse2(gcc::context * ctxt)7707   pass_cse2 (gcc::context *ctxt)
7708     : rtl_opt_pass (pass_data_cse2, ctxt)
7709   {}
7710 
7711   /* opt_pass methods: */
gate(function *)7712   virtual bool gate (function *)
7713     {
7714       return optimize > 0 && flag_rerun_cse_after_loop;
7715     }
7716 
execute(function *)7717   virtual unsigned int execute (function *) { return rest_of_handle_cse2 (); }
7718 
7719 }; // class pass_cse2
7720 
7721 } // anon namespace
7722 
7723 rtl_opt_pass *
make_pass_cse2(gcc::context * ctxt)7724 make_pass_cse2 (gcc::context *ctxt)
7725 {
7726   return new pass_cse2 (ctxt);
7727 }
7728 
7729 /* Run second CSE pass after loop optimizations.  */
7730 static unsigned int
rest_of_handle_cse_after_global_opts(void)7731 rest_of_handle_cse_after_global_opts (void)
7732 {
7733   int save_cfj;
7734   int tem;
7735 
7736   /* We only want to do local CSE, so don't follow jumps.  */
7737   save_cfj = flag_cse_follow_jumps;
7738   flag_cse_follow_jumps = 0;
7739 
7740   rebuild_jump_labels (get_insns ());
7741   tem = cse_main (get_insns (), max_reg_num ());
7742   cse_cfg_altered |= purge_all_dead_edges ();
7743   delete_trivially_dead_insns (get_insns (), max_reg_num ());
7744 
7745   cse_not_expected = !flag_rerun_cse_after_loop;
7746 
7747   /* If cse altered any jumps, rerun jump opts to clean things up.  */
7748   if (tem == 2)
7749     {
7750       timevar_push (TV_JUMP);
7751       rebuild_jump_labels (get_insns ());
7752       cse_cfg_altered |= cleanup_cfg (CLEANUP_CFG_CHANGED);
7753       timevar_pop (TV_JUMP);
7754     }
7755   else if (tem == 1 || cse_cfg_altered)
7756     cse_cfg_altered |= cleanup_cfg (0);
7757 
7758   flag_cse_follow_jumps = save_cfj;
7759   return 0;
7760 }
7761 
7762 namespace {
7763 
7764 const pass_data pass_data_cse_after_global_opts =
7765 {
7766   RTL_PASS, /* type */
7767   "cse_local", /* name */
7768   OPTGROUP_NONE, /* optinfo_flags */
7769   TV_CSE, /* tv_id */
7770   0, /* properties_required */
7771   0, /* properties_provided */
7772   0, /* properties_destroyed */
7773   0, /* todo_flags_start */
7774   TODO_df_finish, /* todo_flags_finish */
7775 };
7776 
7777 class pass_cse_after_global_opts : public rtl_opt_pass
7778 {
7779 public:
pass_cse_after_global_opts(gcc::context * ctxt)7780   pass_cse_after_global_opts (gcc::context *ctxt)
7781     : rtl_opt_pass (pass_data_cse_after_global_opts, ctxt)
7782   {}
7783 
7784   /* opt_pass methods: */
gate(function *)7785   virtual bool gate (function *)
7786     {
7787       return optimize > 0 && flag_rerun_cse_after_global_opts;
7788     }
7789 
execute(function *)7790   virtual unsigned int execute (function *)
7791     {
7792       return rest_of_handle_cse_after_global_opts ();
7793     }
7794 
7795 }; // class pass_cse_after_global_opts
7796 
7797 } // anon namespace
7798 
7799 rtl_opt_pass *
make_pass_cse_after_global_opts(gcc::context * ctxt)7800 make_pass_cse_after_global_opts (gcc::context *ctxt)
7801 {
7802   return new pass_cse_after_global_opts (ctxt);
7803 }
7804