1*e4b17023SJohn Marino /* Common subexpression elimination for GNU compiler. 2*e4b17023SJohn Marino Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998 3*e4b17023SJohn Marino 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 4*e4b17023SJohn Marino 2011 Free Software Foundation, Inc. 5*e4b17023SJohn Marino 6*e4b17023SJohn Marino This file is part of GCC. 7*e4b17023SJohn Marino 8*e4b17023SJohn Marino GCC is free software; you can redistribute it and/or modify it under 9*e4b17023SJohn Marino the terms of the GNU General Public License as published by the Free 10*e4b17023SJohn Marino Software Foundation; either version 3, or (at your option) any later 11*e4b17023SJohn Marino version. 12*e4b17023SJohn Marino 13*e4b17023SJohn Marino GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14*e4b17023SJohn Marino WARRANTY; without even the implied warranty of MERCHANTABILITY or 15*e4b17023SJohn Marino FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16*e4b17023SJohn Marino for more details. 17*e4b17023SJohn Marino 18*e4b17023SJohn Marino You should have received a copy of the GNU General Public License 19*e4b17023SJohn Marino along with GCC; see the file COPYING3. If not see 20*e4b17023SJohn Marino <http://www.gnu.org/licenses/>. */ 21*e4b17023SJohn Marino 22*e4b17023SJohn Marino #include "config.h" 23*e4b17023SJohn Marino #include "system.h" 24*e4b17023SJohn Marino #include "coretypes.h" 25*e4b17023SJohn Marino #include "tm.h" 26*e4b17023SJohn Marino #include "rtl.h" 27*e4b17023SJohn Marino #include "tm_p.h" 28*e4b17023SJohn Marino #include "hard-reg-set.h" 29*e4b17023SJohn Marino #include "regs.h" 30*e4b17023SJohn Marino #include "basic-block.h" 31*e4b17023SJohn Marino #include "flags.h" 32*e4b17023SJohn Marino #include "insn-config.h" 33*e4b17023SJohn Marino #include "recog.h" 34*e4b17023SJohn Marino #include "function.h" 35*e4b17023SJohn Marino #include "expr.h" 36*e4b17023SJohn Marino #include "diagnostic-core.h" 37*e4b17023SJohn Marino #include "toplev.h" 38*e4b17023SJohn Marino #include "output.h" 39*e4b17023SJohn Marino #include "ggc.h" 40*e4b17023SJohn Marino #include "timevar.h" 41*e4b17023SJohn Marino #include "except.h" 42*e4b17023SJohn Marino #include "target.h" 43*e4b17023SJohn Marino #include "params.h" 44*e4b17023SJohn Marino #include "rtlhooks-def.h" 45*e4b17023SJohn Marino #include "tree-pass.h" 46*e4b17023SJohn Marino #include "df.h" 47*e4b17023SJohn Marino #include "dbgcnt.h" 48*e4b17023SJohn Marino 49*e4b17023SJohn Marino /* The basic idea of common subexpression elimination is to go 50*e4b17023SJohn Marino through the code, keeping a record of expressions that would 51*e4b17023SJohn Marino have the same value at the current scan point, and replacing 52*e4b17023SJohn Marino expressions encountered with the cheapest equivalent expression. 53*e4b17023SJohn Marino 54*e4b17023SJohn Marino It is too complicated to keep track of the different possibilities 55*e4b17023SJohn Marino when control paths merge in this code; so, at each label, we forget all 56*e4b17023SJohn Marino that is known and start fresh. This can be described as processing each 57*e4b17023SJohn Marino extended basic block separately. We have a separate pass to perform 58*e4b17023SJohn Marino global CSE. 59*e4b17023SJohn Marino 60*e4b17023SJohn Marino Note CSE can turn a conditional or computed jump into a nop or 61*e4b17023SJohn Marino an unconditional jump. When this occurs we arrange to run the jump 62*e4b17023SJohn Marino optimizer after CSE to delete the unreachable code. 63*e4b17023SJohn Marino 64*e4b17023SJohn Marino We use two data structures to record the equivalent expressions: 65*e4b17023SJohn Marino a hash table for most expressions, and a vector of "quantity 66*e4b17023SJohn Marino numbers" to record equivalent (pseudo) registers. 67*e4b17023SJohn Marino 68*e4b17023SJohn Marino The use of the special data structure for registers is desirable 69*e4b17023SJohn Marino because it is faster. It is possible because registers references 70*e4b17023SJohn Marino contain a fairly small number, the register number, taken from 71*e4b17023SJohn Marino a contiguously allocated series, and two register references are 72*e4b17023SJohn Marino identical if they have the same number. General expressions 73*e4b17023SJohn Marino do not have any such thing, so the only way to retrieve the 74*e4b17023SJohn Marino information recorded on an expression other than a register 75*e4b17023SJohn Marino is to keep it in a hash table. 76*e4b17023SJohn Marino 77*e4b17023SJohn Marino Registers and "quantity numbers": 78*e4b17023SJohn Marino 79*e4b17023SJohn Marino At the start of each basic block, all of the (hardware and pseudo) 80*e4b17023SJohn Marino registers used in the function are given distinct quantity 81*e4b17023SJohn Marino numbers to indicate their contents. During scan, when the code 82*e4b17023SJohn Marino copies one register into another, we copy the quantity number. 83*e4b17023SJohn Marino When a register is loaded in any other way, we allocate a new 84*e4b17023SJohn Marino quantity number to describe the value generated by this operation. 85*e4b17023SJohn Marino `REG_QTY (N)' records what quantity register N is currently thought 86*e4b17023SJohn Marino of as containing. 87*e4b17023SJohn Marino 88*e4b17023SJohn Marino All real quantity numbers are greater than or equal to zero. 89*e4b17023SJohn Marino If register N has not been assigned a quantity, `REG_QTY (N)' will 90*e4b17023SJohn Marino equal -N - 1, which is always negative. 91*e4b17023SJohn Marino 92*e4b17023SJohn Marino Quantity numbers below zero do not exist and none of the `qty_table' 93*e4b17023SJohn Marino entries should be referenced with a negative index. 94*e4b17023SJohn Marino 95*e4b17023SJohn Marino We also maintain a bidirectional chain of registers for each 96*e4b17023SJohn Marino quantity number. The `qty_table` members `first_reg' and `last_reg', 97*e4b17023SJohn Marino and `reg_eqv_table' members `next' and `prev' hold these chains. 98*e4b17023SJohn Marino 99*e4b17023SJohn Marino The first register in a chain is the one whose lifespan is least local. 100*e4b17023SJohn Marino Among equals, it is the one that was seen first. 101*e4b17023SJohn Marino We replace any equivalent register with that one. 102*e4b17023SJohn Marino 103*e4b17023SJohn Marino If two registers have the same quantity number, it must be true that 104*e4b17023SJohn Marino REG expressions with qty_table `mode' must be in the hash table for both 105*e4b17023SJohn Marino registers and must be in the same class. 106*e4b17023SJohn Marino 107*e4b17023SJohn Marino The converse is not true. Since hard registers may be referenced in 108*e4b17023SJohn Marino any mode, two REG expressions might be equivalent in the hash table 109*e4b17023SJohn Marino but not have the same quantity number if the quantity number of one 110*e4b17023SJohn Marino of the registers is not the same mode as those expressions. 111*e4b17023SJohn Marino 112*e4b17023SJohn Marino Constants and quantity numbers 113*e4b17023SJohn Marino 114*e4b17023SJohn Marino When a quantity has a known constant value, that value is stored 115*e4b17023SJohn Marino in the appropriate qty_table `const_rtx'. This is in addition to 116*e4b17023SJohn Marino putting the constant in the hash table as is usual for non-regs. 117*e4b17023SJohn Marino 118*e4b17023SJohn Marino Whether a reg or a constant is preferred is determined by the configuration 119*e4b17023SJohn Marino macro CONST_COSTS and will often depend on the constant value. In any 120*e4b17023SJohn Marino event, expressions containing constants can be simplified, by fold_rtx. 121*e4b17023SJohn Marino 122*e4b17023SJohn Marino When a quantity has a known nearly constant value (such as an address 123*e4b17023SJohn Marino of a stack slot), that value is stored in the appropriate qty_table 124*e4b17023SJohn Marino `const_rtx'. 125*e4b17023SJohn Marino 126*e4b17023SJohn Marino Integer constants don't have a machine mode. However, cse 127*e4b17023SJohn Marino determines the intended machine mode from the destination 128*e4b17023SJohn Marino of the instruction that moves the constant. The machine mode 129*e4b17023SJohn Marino is recorded in the hash table along with the actual RTL 130*e4b17023SJohn Marino constant expression so that different modes are kept separate. 131*e4b17023SJohn Marino 132*e4b17023SJohn Marino Other expressions: 133*e4b17023SJohn Marino 134*e4b17023SJohn Marino To record known equivalences among expressions in general 135*e4b17023SJohn Marino we use a hash table called `table'. It has a fixed number of buckets 136*e4b17023SJohn Marino that contain chains of `struct table_elt' elements for expressions. 137*e4b17023SJohn Marino These chains connect the elements whose expressions have the same 138*e4b17023SJohn Marino hash codes. 139*e4b17023SJohn Marino 140*e4b17023SJohn Marino Other chains through the same elements connect the elements which 141*e4b17023SJohn Marino currently have equivalent values. 142*e4b17023SJohn Marino 143*e4b17023SJohn Marino Register references in an expression are canonicalized before hashing 144*e4b17023SJohn Marino the expression. This is done using `reg_qty' and qty_table `first_reg'. 145*e4b17023SJohn Marino The hash code of a register reference is computed using the quantity 146*e4b17023SJohn Marino number, not the register number. 147*e4b17023SJohn Marino 148*e4b17023SJohn Marino When the value of an expression changes, it is necessary to remove from the 149*e4b17023SJohn Marino hash table not just that expression but all expressions whose values 150*e4b17023SJohn Marino could be different as a result. 151*e4b17023SJohn Marino 152*e4b17023SJohn Marino 1. If the value changing is in memory, except in special cases 153*e4b17023SJohn Marino ANYTHING referring to memory could be changed. That is because 154*e4b17023SJohn Marino nobody knows where a pointer does not point. 155*e4b17023SJohn Marino The function `invalidate_memory' removes what is necessary. 156*e4b17023SJohn Marino 157*e4b17023SJohn Marino The special cases are when the address is constant or is 158*e4b17023SJohn Marino a constant plus a fixed register such as the frame pointer 159*e4b17023SJohn Marino or a static chain pointer. When such addresses are stored in, 160*e4b17023SJohn Marino we can tell exactly which other such addresses must be invalidated 161*e4b17023SJohn Marino due to overlap. `invalidate' does this. 162*e4b17023SJohn Marino All expressions that refer to non-constant 163*e4b17023SJohn Marino memory addresses are also invalidated. `invalidate_memory' does this. 164*e4b17023SJohn Marino 165*e4b17023SJohn Marino 2. If the value changing is a register, all expressions 166*e4b17023SJohn Marino containing references to that register, and only those, 167*e4b17023SJohn Marino must be removed. 168*e4b17023SJohn Marino 169*e4b17023SJohn Marino Because searching the entire hash table for expressions that contain 170*e4b17023SJohn Marino a register is very slow, we try to figure out when it isn't necessary. 171*e4b17023SJohn Marino Precisely, this is necessary only when expressions have been 172*e4b17023SJohn Marino entered in the hash table using this register, and then the value has 173*e4b17023SJohn Marino changed, and then another expression wants to be added to refer to 174*e4b17023SJohn Marino the register's new value. This sequence of circumstances is rare 175*e4b17023SJohn Marino within any one basic block. 176*e4b17023SJohn Marino 177*e4b17023SJohn Marino `REG_TICK' and `REG_IN_TABLE', accessors for members of 178*e4b17023SJohn Marino cse_reg_info, are used to detect this case. REG_TICK (i) is 179*e4b17023SJohn Marino incremented whenever a value is stored in register i. 180*e4b17023SJohn Marino REG_IN_TABLE (i) holds -1 if no references to register i have been 181*e4b17023SJohn Marino entered in the table; otherwise, it contains the value REG_TICK (i) 182*e4b17023SJohn Marino had when the references were entered. If we want to enter a 183*e4b17023SJohn Marino reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and 184*e4b17023SJohn Marino remove old references. Until we want to enter a new entry, the 185*e4b17023SJohn Marino mere fact that the two vectors don't match makes the entries be 186*e4b17023SJohn Marino ignored if anyone tries to match them. 187*e4b17023SJohn Marino 188*e4b17023SJohn Marino Registers themselves are entered in the hash table as well as in 189*e4b17023SJohn Marino the equivalent-register chains. However, `REG_TICK' and 190*e4b17023SJohn Marino `REG_IN_TABLE' do not apply to expressions which are simple 191*e4b17023SJohn Marino register references. These expressions are removed from the table 192*e4b17023SJohn Marino immediately when they become invalid, and this can be done even if 193*e4b17023SJohn Marino we do not immediately search for all the expressions that refer to 194*e4b17023SJohn Marino the register. 195*e4b17023SJohn Marino 196*e4b17023SJohn Marino A CLOBBER rtx in an instruction invalidates its operand for further 197*e4b17023SJohn Marino reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK 198*e4b17023SJohn Marino invalidates everything that resides in memory. 199*e4b17023SJohn Marino 200*e4b17023SJohn Marino Related expressions: 201*e4b17023SJohn Marino 202*e4b17023SJohn Marino Constant expressions that differ only by an additive integer 203*e4b17023SJohn Marino are called related. When a constant expression is put in 204*e4b17023SJohn Marino the table, the related expression with no constant term 205*e4b17023SJohn Marino is also entered. These are made to point at each other 206*e4b17023SJohn Marino so that it is possible to find out if there exists any 207*e4b17023SJohn Marino register equivalent to an expression related to a given expression. */ 208*e4b17023SJohn Marino 209*e4b17023SJohn Marino /* Length of qty_table vector. We know in advance we will not need 210*e4b17023SJohn Marino a quantity number this big. */ 211*e4b17023SJohn Marino 212*e4b17023SJohn Marino static int max_qty; 213*e4b17023SJohn Marino 214*e4b17023SJohn Marino /* Next quantity number to be allocated. 215*e4b17023SJohn Marino This is 1 + the largest number needed so far. */ 216*e4b17023SJohn Marino 217*e4b17023SJohn Marino static int next_qty; 218*e4b17023SJohn Marino 219*e4b17023SJohn Marino /* Per-qty information tracking. 220*e4b17023SJohn Marino 221*e4b17023SJohn Marino `first_reg' and `last_reg' track the head and tail of the 222*e4b17023SJohn Marino chain of registers which currently contain this quantity. 223*e4b17023SJohn Marino 224*e4b17023SJohn Marino `mode' contains the machine mode of this quantity. 225*e4b17023SJohn Marino 226*e4b17023SJohn Marino `const_rtx' holds the rtx of the constant value of this 227*e4b17023SJohn Marino quantity, if known. A summations of the frame/arg pointer 228*e4b17023SJohn Marino and a constant can also be entered here. When this holds 229*e4b17023SJohn Marino a known value, `const_insn' is the insn which stored the 230*e4b17023SJohn Marino constant value. 231*e4b17023SJohn Marino 232*e4b17023SJohn Marino `comparison_{code,const,qty}' are used to track when a 233*e4b17023SJohn Marino comparison between a quantity and some constant or register has 234*e4b17023SJohn Marino been passed. In such a case, we know the results of the comparison 235*e4b17023SJohn Marino in case we see it again. These members record a comparison that 236*e4b17023SJohn Marino is known to be true. `comparison_code' holds the rtx code of such 237*e4b17023SJohn Marino a comparison, else it is set to UNKNOWN and the other two 238*e4b17023SJohn Marino comparison members are undefined. `comparison_const' holds 239*e4b17023SJohn Marino the constant being compared against, or zero if the comparison 240*e4b17023SJohn Marino is not against a constant. `comparison_qty' holds the quantity 241*e4b17023SJohn Marino being compared against when the result is known. If the comparison 242*e4b17023SJohn Marino is not with a register, `comparison_qty' is -1. */ 243*e4b17023SJohn Marino 244*e4b17023SJohn Marino struct qty_table_elem 245*e4b17023SJohn Marino { 246*e4b17023SJohn Marino rtx const_rtx; 247*e4b17023SJohn Marino rtx const_insn; 248*e4b17023SJohn Marino rtx comparison_const; 249*e4b17023SJohn Marino int comparison_qty; 250*e4b17023SJohn Marino unsigned int first_reg, last_reg; 251*e4b17023SJohn Marino /* The sizes of these fields should match the sizes of the 252*e4b17023SJohn Marino code and mode fields of struct rtx_def (see rtl.h). */ 253*e4b17023SJohn Marino ENUM_BITFIELD(rtx_code) comparison_code : 16; 254*e4b17023SJohn Marino ENUM_BITFIELD(machine_mode) mode : 8; 255*e4b17023SJohn Marino }; 256*e4b17023SJohn Marino 257*e4b17023SJohn Marino /* The table of all qtys, indexed by qty number. */ 258*e4b17023SJohn Marino static struct qty_table_elem *qty_table; 259*e4b17023SJohn Marino 260*e4b17023SJohn Marino /* Structure used to pass arguments via for_each_rtx to function 261*e4b17023SJohn Marino cse_change_cc_mode. */ 262*e4b17023SJohn Marino struct change_cc_mode_args 263*e4b17023SJohn Marino { 264*e4b17023SJohn Marino rtx insn; 265*e4b17023SJohn Marino rtx newreg; 266*e4b17023SJohn Marino }; 267*e4b17023SJohn Marino 268*e4b17023SJohn Marino #ifdef HAVE_cc0 269*e4b17023SJohn Marino /* For machines that have a CC0, we do not record its value in the hash 270*e4b17023SJohn Marino table since its use is guaranteed to be the insn immediately following 271*e4b17023SJohn Marino its definition and any other insn is presumed to invalidate it. 272*e4b17023SJohn Marino 273*e4b17023SJohn Marino Instead, we store below the current and last value assigned to CC0. 274*e4b17023SJohn Marino If it should happen to be a constant, it is stored in preference 275*e4b17023SJohn Marino to the actual assigned value. In case it is a constant, we store 276*e4b17023SJohn Marino the mode in which the constant should be interpreted. */ 277*e4b17023SJohn Marino 278*e4b17023SJohn Marino static rtx this_insn_cc0, prev_insn_cc0; 279*e4b17023SJohn Marino static enum machine_mode this_insn_cc0_mode, prev_insn_cc0_mode; 280*e4b17023SJohn Marino #endif 281*e4b17023SJohn Marino 282*e4b17023SJohn Marino /* Insn being scanned. */ 283*e4b17023SJohn Marino 284*e4b17023SJohn Marino static rtx this_insn; 285*e4b17023SJohn Marino static bool optimize_this_for_speed_p; 286*e4b17023SJohn Marino 287*e4b17023SJohn Marino /* Index by register number, gives the number of the next (or 288*e4b17023SJohn Marino previous) register in the chain of registers sharing the same 289*e4b17023SJohn Marino value. 290*e4b17023SJohn Marino 291*e4b17023SJohn Marino Or -1 if this register is at the end of the chain. 292*e4b17023SJohn Marino 293*e4b17023SJohn Marino If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */ 294*e4b17023SJohn Marino 295*e4b17023SJohn Marino /* Per-register equivalence chain. */ 296*e4b17023SJohn Marino struct reg_eqv_elem 297*e4b17023SJohn Marino { 298*e4b17023SJohn Marino int next, prev; 299*e4b17023SJohn Marino }; 300*e4b17023SJohn Marino 301*e4b17023SJohn Marino /* The table of all register equivalence chains. */ 302*e4b17023SJohn Marino static struct reg_eqv_elem *reg_eqv_table; 303*e4b17023SJohn Marino 304*e4b17023SJohn Marino struct cse_reg_info 305*e4b17023SJohn Marino { 306*e4b17023SJohn Marino /* The timestamp at which this register is initialized. */ 307*e4b17023SJohn Marino unsigned int timestamp; 308*e4b17023SJohn Marino 309*e4b17023SJohn Marino /* The quantity number of the register's current contents. */ 310*e4b17023SJohn Marino int reg_qty; 311*e4b17023SJohn Marino 312*e4b17023SJohn Marino /* The number of times the register has been altered in the current 313*e4b17023SJohn Marino basic block. */ 314*e4b17023SJohn Marino int reg_tick; 315*e4b17023SJohn Marino 316*e4b17023SJohn Marino /* The REG_TICK value at which rtx's containing this register are 317*e4b17023SJohn Marino valid in the hash table. If this does not equal the current 318*e4b17023SJohn Marino reg_tick value, such expressions existing in the hash table are 319*e4b17023SJohn Marino invalid. */ 320*e4b17023SJohn Marino int reg_in_table; 321*e4b17023SJohn Marino 322*e4b17023SJohn Marino /* The SUBREG that was set when REG_TICK was last incremented. Set 323*e4b17023SJohn Marino to -1 if the last store was to the whole register, not a subreg. */ 324*e4b17023SJohn Marino unsigned int subreg_ticked; 325*e4b17023SJohn Marino }; 326*e4b17023SJohn Marino 327*e4b17023SJohn Marino /* A table of cse_reg_info indexed by register numbers. */ 328*e4b17023SJohn Marino static struct cse_reg_info *cse_reg_info_table; 329*e4b17023SJohn Marino 330*e4b17023SJohn Marino /* The size of the above table. */ 331*e4b17023SJohn Marino static unsigned int cse_reg_info_table_size; 332*e4b17023SJohn Marino 333*e4b17023SJohn Marino /* The index of the first entry that has not been initialized. */ 334*e4b17023SJohn Marino static unsigned int cse_reg_info_table_first_uninitialized; 335*e4b17023SJohn Marino 336*e4b17023SJohn Marino /* The timestamp at the beginning of the current run of 337*e4b17023SJohn Marino cse_extended_basic_block. We increment this variable at the beginning of 338*e4b17023SJohn Marino the current run of cse_extended_basic_block. The timestamp field of a 339*e4b17023SJohn Marino cse_reg_info entry matches the value of this variable if and only 340*e4b17023SJohn Marino if the entry has been initialized during the current run of 341*e4b17023SJohn Marino cse_extended_basic_block. */ 342*e4b17023SJohn Marino static unsigned int cse_reg_info_timestamp; 343*e4b17023SJohn Marino 344*e4b17023SJohn Marino /* A HARD_REG_SET containing all the hard registers for which there is 345*e4b17023SJohn Marino currently a REG expression in the hash table. Note the difference 346*e4b17023SJohn Marino from the above variables, which indicate if the REG is mentioned in some 347*e4b17023SJohn Marino expression in the table. */ 348*e4b17023SJohn Marino 349*e4b17023SJohn Marino static HARD_REG_SET hard_regs_in_table; 350*e4b17023SJohn Marino 351*e4b17023SJohn Marino /* True if CSE has altered the CFG. */ 352*e4b17023SJohn Marino static bool cse_cfg_altered; 353*e4b17023SJohn Marino 354*e4b17023SJohn Marino /* True if CSE has altered conditional jump insns in such a way 355*e4b17023SJohn Marino that jump optimization should be redone. */ 356*e4b17023SJohn Marino static bool cse_jumps_altered; 357*e4b17023SJohn Marino 358*e4b17023SJohn Marino /* True if we put a LABEL_REF into the hash table for an INSN 359*e4b17023SJohn Marino without a REG_LABEL_OPERAND, we have to rerun jump after CSE 360*e4b17023SJohn Marino to put in the note. */ 361*e4b17023SJohn Marino static bool recorded_label_ref; 362*e4b17023SJohn Marino 363*e4b17023SJohn Marino /* canon_hash stores 1 in do_not_record 364*e4b17023SJohn Marino if it notices a reference to CC0, PC, or some other volatile 365*e4b17023SJohn Marino subexpression. */ 366*e4b17023SJohn Marino 367*e4b17023SJohn Marino static int do_not_record; 368*e4b17023SJohn Marino 369*e4b17023SJohn Marino /* canon_hash stores 1 in hash_arg_in_memory 370*e4b17023SJohn Marino if it notices a reference to memory within the expression being hashed. */ 371*e4b17023SJohn Marino 372*e4b17023SJohn Marino static int hash_arg_in_memory; 373*e4b17023SJohn Marino 374*e4b17023SJohn Marino /* The hash table contains buckets which are chains of `struct table_elt's, 375*e4b17023SJohn Marino each recording one expression's information. 376*e4b17023SJohn Marino That expression is in the `exp' field. 377*e4b17023SJohn Marino 378*e4b17023SJohn Marino The canon_exp field contains a canonical (from the point of view of 379*e4b17023SJohn Marino alias analysis) version of the `exp' field. 380*e4b17023SJohn Marino 381*e4b17023SJohn Marino Those elements with the same hash code are chained in both directions 382*e4b17023SJohn Marino through the `next_same_hash' and `prev_same_hash' fields. 383*e4b17023SJohn Marino 384*e4b17023SJohn Marino Each set of expressions with equivalent values 385*e4b17023SJohn Marino are on a two-way chain through the `next_same_value' 386*e4b17023SJohn Marino and `prev_same_value' fields, and all point with 387*e4b17023SJohn Marino the `first_same_value' field at the first element in 388*e4b17023SJohn Marino that chain. The chain is in order of increasing cost. 389*e4b17023SJohn Marino Each element's cost value is in its `cost' field. 390*e4b17023SJohn Marino 391*e4b17023SJohn Marino The `in_memory' field is nonzero for elements that 392*e4b17023SJohn Marino involve any reference to memory. These elements are removed 393*e4b17023SJohn Marino whenever a write is done to an unidentified location in memory. 394*e4b17023SJohn Marino To be safe, we assume that a memory address is unidentified unless 395*e4b17023SJohn Marino the address is either a symbol constant or a constant plus 396*e4b17023SJohn Marino the frame pointer or argument pointer. 397*e4b17023SJohn Marino 398*e4b17023SJohn Marino The `related_value' field is used to connect related expressions 399*e4b17023SJohn Marino (that differ by adding an integer). 400*e4b17023SJohn Marino The related expressions are chained in a circular fashion. 401*e4b17023SJohn Marino `related_value' is zero for expressions for which this 402*e4b17023SJohn Marino chain is not useful. 403*e4b17023SJohn Marino 404*e4b17023SJohn Marino The `cost' field stores the cost of this element's expression. 405*e4b17023SJohn Marino The `regcost' field stores the value returned by approx_reg_cost for 406*e4b17023SJohn Marino this element's expression. 407*e4b17023SJohn Marino 408*e4b17023SJohn Marino The `is_const' flag is set if the element is a constant (including 409*e4b17023SJohn Marino a fixed address). 410*e4b17023SJohn Marino 411*e4b17023SJohn Marino The `flag' field is used as a temporary during some search routines. 412*e4b17023SJohn Marino 413*e4b17023SJohn Marino The `mode' field is usually the same as GET_MODE (`exp'), but 414*e4b17023SJohn Marino if `exp' is a CONST_INT and has no machine mode then the `mode' 415*e4b17023SJohn Marino field is the mode it was being used as. Each constant is 416*e4b17023SJohn Marino recorded separately for each mode it is used with. */ 417*e4b17023SJohn Marino 418*e4b17023SJohn Marino struct table_elt 419*e4b17023SJohn Marino { 420*e4b17023SJohn Marino rtx exp; 421*e4b17023SJohn Marino rtx canon_exp; 422*e4b17023SJohn Marino struct table_elt *next_same_hash; 423*e4b17023SJohn Marino struct table_elt *prev_same_hash; 424*e4b17023SJohn Marino struct table_elt *next_same_value; 425*e4b17023SJohn Marino struct table_elt *prev_same_value; 426*e4b17023SJohn Marino struct table_elt *first_same_value; 427*e4b17023SJohn Marino struct table_elt *related_value; 428*e4b17023SJohn Marino int cost; 429*e4b17023SJohn Marino int regcost; 430*e4b17023SJohn Marino /* The size of this field should match the size 431*e4b17023SJohn Marino of the mode field of struct rtx_def (see rtl.h). */ 432*e4b17023SJohn Marino ENUM_BITFIELD(machine_mode) mode : 8; 433*e4b17023SJohn Marino char in_memory; 434*e4b17023SJohn Marino char is_const; 435*e4b17023SJohn Marino char flag; 436*e4b17023SJohn Marino }; 437*e4b17023SJohn Marino 438*e4b17023SJohn Marino /* We don't want a lot of buckets, because we rarely have very many 439*e4b17023SJohn Marino things stored in the hash table, and a lot of buckets slows 440*e4b17023SJohn Marino down a lot of loops that happen frequently. */ 441*e4b17023SJohn Marino #define HASH_SHIFT 5 442*e4b17023SJohn Marino #define HASH_SIZE (1 << HASH_SHIFT) 443*e4b17023SJohn Marino #define HASH_MASK (HASH_SIZE - 1) 444*e4b17023SJohn Marino 445*e4b17023SJohn Marino /* Compute hash code of X in mode M. Special-case case where X is a pseudo 446*e4b17023SJohn Marino register (hard registers may require `do_not_record' to be set). */ 447*e4b17023SJohn Marino 448*e4b17023SJohn Marino #define HASH(X, M) \ 449*e4b17023SJohn Marino ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \ 450*e4b17023SJohn Marino ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \ 451*e4b17023SJohn Marino : canon_hash (X, M)) & HASH_MASK) 452*e4b17023SJohn Marino 453*e4b17023SJohn Marino /* Like HASH, but without side-effects. */ 454*e4b17023SJohn Marino #define SAFE_HASH(X, M) \ 455*e4b17023SJohn Marino ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \ 456*e4b17023SJohn Marino ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \ 457*e4b17023SJohn Marino : safe_hash (X, M)) & HASH_MASK) 458*e4b17023SJohn Marino 459*e4b17023SJohn Marino /* Determine whether register number N is considered a fixed register for the 460*e4b17023SJohn Marino purpose of approximating register costs. 461*e4b17023SJohn Marino It is desirable to replace other regs with fixed regs, to reduce need for 462*e4b17023SJohn Marino non-fixed hard regs. 463*e4b17023SJohn Marino A reg wins if it is either the frame pointer or designated as fixed. */ 464*e4b17023SJohn Marino #define FIXED_REGNO_P(N) \ 465*e4b17023SJohn Marino ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \ 466*e4b17023SJohn Marino || fixed_regs[N] || global_regs[N]) 467*e4b17023SJohn Marino 468*e4b17023SJohn Marino /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed 469*e4b17023SJohn Marino hard registers and pointers into the frame are the cheapest with a cost 470*e4b17023SJohn Marino of 0. Next come pseudos with a cost of one and other hard registers with 471*e4b17023SJohn Marino a cost of 2. Aside from these special cases, call `rtx_cost'. */ 472*e4b17023SJohn Marino 473*e4b17023SJohn Marino #define CHEAP_REGNO(N) \ 474*e4b17023SJohn Marino (REGNO_PTR_FRAME_P(N) \ 475*e4b17023SJohn Marino || (HARD_REGISTER_NUM_P (N) \ 476*e4b17023SJohn Marino && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS)) 477*e4b17023SJohn Marino 478*e4b17023SJohn Marino #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET, 1)) 479*e4b17023SJohn Marino #define COST_IN(X, OUTER, OPNO) (REG_P (X) ? 0 : notreg_cost (X, OUTER, OPNO)) 480*e4b17023SJohn Marino 481*e4b17023SJohn Marino /* Get the number of times this register has been updated in this 482*e4b17023SJohn Marino basic block. */ 483*e4b17023SJohn Marino 484*e4b17023SJohn Marino #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick) 485*e4b17023SJohn Marino 486*e4b17023SJohn Marino /* Get the point at which REG was recorded in the table. */ 487*e4b17023SJohn Marino 488*e4b17023SJohn Marino #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table) 489*e4b17023SJohn Marino 490*e4b17023SJohn Marino /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a 491*e4b17023SJohn Marino SUBREG). */ 492*e4b17023SJohn Marino 493*e4b17023SJohn Marino #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked) 494*e4b17023SJohn Marino 495*e4b17023SJohn Marino /* Get the quantity number for REG. */ 496*e4b17023SJohn Marino 497*e4b17023SJohn Marino #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty) 498*e4b17023SJohn Marino 499*e4b17023SJohn Marino /* Determine if the quantity number for register X represents a valid index 500*e4b17023SJohn Marino into the qty_table. */ 501*e4b17023SJohn Marino 502*e4b17023SJohn Marino #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0) 503*e4b17023SJohn Marino 504*e4b17023SJohn Marino /* Compare table_elt X and Y and return true iff X is cheaper than Y. */ 505*e4b17023SJohn Marino 506*e4b17023SJohn Marino #define CHEAPER(X, Y) \ 507*e4b17023SJohn Marino (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0) 508*e4b17023SJohn Marino 509*e4b17023SJohn Marino static struct table_elt *table[HASH_SIZE]; 510*e4b17023SJohn Marino 511*e4b17023SJohn Marino /* Chain of `struct table_elt's made so far for this function 512*e4b17023SJohn Marino but currently removed from the table. */ 513*e4b17023SJohn Marino 514*e4b17023SJohn Marino static struct table_elt *free_element_chain; 515*e4b17023SJohn Marino 516*e4b17023SJohn Marino /* Set to the cost of a constant pool reference if one was found for a 517*e4b17023SJohn Marino symbolic constant. If this was found, it means we should try to 518*e4b17023SJohn Marino convert constants into constant pool entries if they don't fit in 519*e4b17023SJohn Marino the insn. */ 520*e4b17023SJohn Marino 521*e4b17023SJohn Marino static int constant_pool_entries_cost; 522*e4b17023SJohn Marino static int constant_pool_entries_regcost; 523*e4b17023SJohn Marino 524*e4b17023SJohn Marino /* Trace a patch through the CFG. */ 525*e4b17023SJohn Marino 526*e4b17023SJohn Marino struct branch_path 527*e4b17023SJohn Marino { 528*e4b17023SJohn Marino /* The basic block for this path entry. */ 529*e4b17023SJohn Marino basic_block bb; 530*e4b17023SJohn Marino }; 531*e4b17023SJohn Marino 532*e4b17023SJohn Marino /* This data describes a block that will be processed by 533*e4b17023SJohn Marino cse_extended_basic_block. */ 534*e4b17023SJohn Marino 535*e4b17023SJohn Marino struct cse_basic_block_data 536*e4b17023SJohn Marino { 537*e4b17023SJohn Marino /* Total number of SETs in block. */ 538*e4b17023SJohn Marino int nsets; 539*e4b17023SJohn Marino /* Size of current branch path, if any. */ 540*e4b17023SJohn Marino int path_size; 541*e4b17023SJohn Marino /* Current path, indicating which basic_blocks will be processed. */ 542*e4b17023SJohn Marino struct branch_path *path; 543*e4b17023SJohn Marino }; 544*e4b17023SJohn Marino 545*e4b17023SJohn Marino 546*e4b17023SJohn Marino /* Pointers to the live in/live out bitmaps for the boundaries of the 547*e4b17023SJohn Marino current EBB. */ 548*e4b17023SJohn Marino static bitmap cse_ebb_live_in, cse_ebb_live_out; 549*e4b17023SJohn Marino 550*e4b17023SJohn Marino /* A simple bitmap to track which basic blocks have been visited 551*e4b17023SJohn Marino already as part of an already processed extended basic block. */ 552*e4b17023SJohn Marino static sbitmap cse_visited_basic_blocks; 553*e4b17023SJohn Marino 554*e4b17023SJohn Marino static bool fixed_base_plus_p (rtx x); 555*e4b17023SJohn Marino static int notreg_cost (rtx, enum rtx_code, int); 556*e4b17023SJohn Marino static int approx_reg_cost_1 (rtx *, void *); 557*e4b17023SJohn Marino static int approx_reg_cost (rtx); 558*e4b17023SJohn Marino static int preferable (int, int, int, int); 559*e4b17023SJohn Marino static void new_basic_block (void); 560*e4b17023SJohn Marino static void make_new_qty (unsigned int, enum machine_mode); 561*e4b17023SJohn Marino static void make_regs_eqv (unsigned int, unsigned int); 562*e4b17023SJohn Marino static void delete_reg_equiv (unsigned int); 563*e4b17023SJohn Marino static int mention_regs (rtx); 564*e4b17023SJohn Marino static int insert_regs (rtx, struct table_elt *, int); 565*e4b17023SJohn Marino static void remove_from_table (struct table_elt *, unsigned); 566*e4b17023SJohn Marino static void remove_pseudo_from_table (rtx, unsigned); 567*e4b17023SJohn Marino static struct table_elt *lookup (rtx, unsigned, enum machine_mode); 568*e4b17023SJohn Marino static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode); 569*e4b17023SJohn Marino static rtx lookup_as_function (rtx, enum rtx_code); 570*e4b17023SJohn Marino static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned, 571*e4b17023SJohn Marino enum machine_mode, int, int); 572*e4b17023SJohn Marino static struct table_elt *insert (rtx, struct table_elt *, unsigned, 573*e4b17023SJohn Marino enum machine_mode); 574*e4b17023SJohn Marino static void merge_equiv_classes (struct table_elt *, struct table_elt *); 575*e4b17023SJohn Marino static void invalidate (rtx, enum machine_mode); 576*e4b17023SJohn Marino static void remove_invalid_refs (unsigned int); 577*e4b17023SJohn Marino static void remove_invalid_subreg_refs (unsigned int, unsigned int, 578*e4b17023SJohn Marino enum machine_mode); 579*e4b17023SJohn Marino static void rehash_using_reg (rtx); 580*e4b17023SJohn Marino static void invalidate_memory (void); 581*e4b17023SJohn Marino static void invalidate_for_call (void); 582*e4b17023SJohn Marino static rtx use_related_value (rtx, struct table_elt *); 583*e4b17023SJohn Marino 584*e4b17023SJohn Marino static inline unsigned canon_hash (rtx, enum machine_mode); 585*e4b17023SJohn Marino static inline unsigned safe_hash (rtx, enum machine_mode); 586*e4b17023SJohn Marino static inline unsigned hash_rtx_string (const char *); 587*e4b17023SJohn Marino 588*e4b17023SJohn Marino static rtx canon_reg (rtx, rtx); 589*e4b17023SJohn Marino static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *, 590*e4b17023SJohn Marino enum machine_mode *, 591*e4b17023SJohn Marino enum machine_mode *); 592*e4b17023SJohn Marino static rtx fold_rtx (rtx, rtx); 593*e4b17023SJohn Marino static rtx equiv_constant (rtx); 594*e4b17023SJohn Marino static void record_jump_equiv (rtx, bool); 595*e4b17023SJohn Marino static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx, 596*e4b17023SJohn Marino int); 597*e4b17023SJohn Marino static void cse_insn (rtx); 598*e4b17023SJohn Marino static void cse_prescan_path (struct cse_basic_block_data *); 599*e4b17023SJohn Marino static void invalidate_from_clobbers (rtx); 600*e4b17023SJohn Marino static rtx cse_process_notes (rtx, rtx, bool *); 601*e4b17023SJohn Marino static void cse_extended_basic_block (struct cse_basic_block_data *); 602*e4b17023SJohn Marino static void count_reg_usage (rtx, int *, rtx, int); 603*e4b17023SJohn Marino static int check_for_label_ref (rtx *, void *); 604*e4b17023SJohn Marino extern void dump_class (struct table_elt*); 605*e4b17023SJohn Marino static void get_cse_reg_info_1 (unsigned int regno); 606*e4b17023SJohn Marino static struct cse_reg_info * get_cse_reg_info (unsigned int regno); 607*e4b17023SJohn Marino static int check_dependence (rtx *, void *); 608*e4b17023SJohn Marino 609*e4b17023SJohn Marino static void flush_hash_table (void); 610*e4b17023SJohn Marino static bool insn_live_p (rtx, int *); 611*e4b17023SJohn Marino static bool set_live_p (rtx, rtx, int *); 612*e4b17023SJohn Marino static int cse_change_cc_mode (rtx *, void *); 613*e4b17023SJohn Marino static void cse_change_cc_mode_insn (rtx, rtx); 614*e4b17023SJohn Marino static void cse_change_cc_mode_insns (rtx, rtx, rtx); 615*e4b17023SJohn Marino static enum machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx, 616*e4b17023SJohn Marino bool); 617*e4b17023SJohn Marino 618*e4b17023SJohn Marino 619*e4b17023SJohn Marino #undef RTL_HOOKS_GEN_LOWPART 620*e4b17023SJohn Marino #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible 621*e4b17023SJohn Marino 622*e4b17023SJohn Marino static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER; 623*e4b17023SJohn Marino 624*e4b17023SJohn Marino /* Nonzero if X has the form (PLUS frame-pointer integer). We check for 625*e4b17023SJohn Marino virtual regs here because the simplify_*_operation routines are called 626*e4b17023SJohn Marino by integrate.c, which is called before virtual register instantiation. */ 627*e4b17023SJohn Marino 628*e4b17023SJohn Marino static bool 629*e4b17023SJohn Marino fixed_base_plus_p (rtx x) 630*e4b17023SJohn Marino { 631*e4b17023SJohn Marino switch (GET_CODE (x)) 632*e4b17023SJohn Marino { 633*e4b17023SJohn Marino case REG: 634*e4b17023SJohn Marino if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx) 635*e4b17023SJohn Marino return true; 636*e4b17023SJohn Marino if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]) 637*e4b17023SJohn Marino return true; 638*e4b17023SJohn Marino if (REGNO (x) >= FIRST_VIRTUAL_REGISTER 639*e4b17023SJohn Marino && REGNO (x) <= LAST_VIRTUAL_REGISTER) 640*e4b17023SJohn Marino return true; 641*e4b17023SJohn Marino return false; 642*e4b17023SJohn Marino 643*e4b17023SJohn Marino case PLUS: 644*e4b17023SJohn Marino if (!CONST_INT_P (XEXP (x, 1))) 645*e4b17023SJohn Marino return false; 646*e4b17023SJohn Marino return fixed_base_plus_p (XEXP (x, 0)); 647*e4b17023SJohn Marino 648*e4b17023SJohn Marino default: 649*e4b17023SJohn Marino return false; 650*e4b17023SJohn Marino } 651*e4b17023SJohn Marino } 652*e4b17023SJohn Marino 653*e4b17023SJohn Marino /* Dump the expressions in the equivalence class indicated by CLASSP. 654*e4b17023SJohn Marino This function is used only for debugging. */ 655*e4b17023SJohn Marino void 656*e4b17023SJohn Marino dump_class (struct table_elt *classp) 657*e4b17023SJohn Marino { 658*e4b17023SJohn Marino struct table_elt *elt; 659*e4b17023SJohn Marino 660*e4b17023SJohn Marino fprintf (stderr, "Equivalence chain for "); 661*e4b17023SJohn Marino print_rtl (stderr, classp->exp); 662*e4b17023SJohn Marino fprintf (stderr, ": \n"); 663*e4b17023SJohn Marino 664*e4b17023SJohn Marino for (elt = classp->first_same_value; elt; elt = elt->next_same_value) 665*e4b17023SJohn Marino { 666*e4b17023SJohn Marino print_rtl (stderr, elt->exp); 667*e4b17023SJohn Marino fprintf (stderr, "\n"); 668*e4b17023SJohn Marino } 669*e4b17023SJohn Marino } 670*e4b17023SJohn Marino 671*e4b17023SJohn Marino /* Subroutine of approx_reg_cost; called through for_each_rtx. */ 672*e4b17023SJohn Marino 673*e4b17023SJohn Marino static int 674*e4b17023SJohn Marino approx_reg_cost_1 (rtx *xp, void *data) 675*e4b17023SJohn Marino { 676*e4b17023SJohn Marino rtx x = *xp; 677*e4b17023SJohn Marino int *cost_p = (int *) data; 678*e4b17023SJohn Marino 679*e4b17023SJohn Marino if (x && REG_P (x)) 680*e4b17023SJohn Marino { 681*e4b17023SJohn Marino unsigned int regno = REGNO (x); 682*e4b17023SJohn Marino 683*e4b17023SJohn Marino if (! CHEAP_REGNO (regno)) 684*e4b17023SJohn Marino { 685*e4b17023SJohn Marino if (regno < FIRST_PSEUDO_REGISTER) 686*e4b17023SJohn Marino { 687*e4b17023SJohn Marino if (targetm.small_register_classes_for_mode_p (GET_MODE (x))) 688*e4b17023SJohn Marino return 1; 689*e4b17023SJohn Marino *cost_p += 2; 690*e4b17023SJohn Marino } 691*e4b17023SJohn Marino else 692*e4b17023SJohn Marino *cost_p += 1; 693*e4b17023SJohn Marino } 694*e4b17023SJohn Marino } 695*e4b17023SJohn Marino 696*e4b17023SJohn Marino return 0; 697*e4b17023SJohn Marino } 698*e4b17023SJohn Marino 699*e4b17023SJohn Marino /* Return an estimate of the cost of the registers used in an rtx. 700*e4b17023SJohn Marino This is mostly the number of different REG expressions in the rtx; 701*e4b17023SJohn Marino however for some exceptions like fixed registers we use a cost of 702*e4b17023SJohn Marino 0. If any other hard register reference occurs, return MAX_COST. */ 703*e4b17023SJohn Marino 704*e4b17023SJohn Marino static int 705*e4b17023SJohn Marino approx_reg_cost (rtx x) 706*e4b17023SJohn Marino { 707*e4b17023SJohn Marino int cost = 0; 708*e4b17023SJohn Marino 709*e4b17023SJohn Marino if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost)) 710*e4b17023SJohn Marino return MAX_COST; 711*e4b17023SJohn Marino 712*e4b17023SJohn Marino return cost; 713*e4b17023SJohn Marino } 714*e4b17023SJohn Marino 715*e4b17023SJohn Marino /* Return a negative value if an rtx A, whose costs are given by COST_A 716*e4b17023SJohn Marino and REGCOST_A, is more desirable than an rtx B. 717*e4b17023SJohn Marino Return a positive value if A is less desirable, or 0 if the two are 718*e4b17023SJohn Marino equally good. */ 719*e4b17023SJohn Marino static int 720*e4b17023SJohn Marino preferable (int cost_a, int regcost_a, int cost_b, int regcost_b) 721*e4b17023SJohn Marino { 722*e4b17023SJohn Marino /* First, get rid of cases involving expressions that are entirely 723*e4b17023SJohn Marino unwanted. */ 724*e4b17023SJohn Marino if (cost_a != cost_b) 725*e4b17023SJohn Marino { 726*e4b17023SJohn Marino if (cost_a == MAX_COST) 727*e4b17023SJohn Marino return 1; 728*e4b17023SJohn Marino if (cost_b == MAX_COST) 729*e4b17023SJohn Marino return -1; 730*e4b17023SJohn Marino } 731*e4b17023SJohn Marino 732*e4b17023SJohn Marino /* Avoid extending lifetimes of hardregs. */ 733*e4b17023SJohn Marino if (regcost_a != regcost_b) 734*e4b17023SJohn Marino { 735*e4b17023SJohn Marino if (regcost_a == MAX_COST) 736*e4b17023SJohn Marino return 1; 737*e4b17023SJohn Marino if (regcost_b == MAX_COST) 738*e4b17023SJohn Marino return -1; 739*e4b17023SJohn Marino } 740*e4b17023SJohn Marino 741*e4b17023SJohn Marino /* Normal operation costs take precedence. */ 742*e4b17023SJohn Marino if (cost_a != cost_b) 743*e4b17023SJohn Marino return cost_a - cost_b; 744*e4b17023SJohn Marino /* Only if these are identical consider effects on register pressure. */ 745*e4b17023SJohn Marino if (regcost_a != regcost_b) 746*e4b17023SJohn Marino return regcost_a - regcost_b; 747*e4b17023SJohn Marino return 0; 748*e4b17023SJohn Marino } 749*e4b17023SJohn Marino 750*e4b17023SJohn Marino /* Internal function, to compute cost when X is not a register; called 751*e4b17023SJohn Marino from COST macro to keep it simple. */ 752*e4b17023SJohn Marino 753*e4b17023SJohn Marino static int 754*e4b17023SJohn Marino notreg_cost (rtx x, enum rtx_code outer, int opno) 755*e4b17023SJohn Marino { 756*e4b17023SJohn Marino return ((GET_CODE (x) == SUBREG 757*e4b17023SJohn Marino && REG_P (SUBREG_REG (x)) 758*e4b17023SJohn Marino && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT 759*e4b17023SJohn Marino && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT 760*e4b17023SJohn Marino && (GET_MODE_SIZE (GET_MODE (x)) 761*e4b17023SJohn Marino < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) 762*e4b17023SJohn Marino && subreg_lowpart_p (x) 763*e4b17023SJohn Marino && TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (x), 764*e4b17023SJohn Marino GET_MODE (SUBREG_REG (x)))) 765*e4b17023SJohn Marino ? 0 766*e4b17023SJohn Marino : rtx_cost (x, outer, opno, optimize_this_for_speed_p) * 2); 767*e4b17023SJohn Marino } 768*e4b17023SJohn Marino 769*e4b17023SJohn Marino 770*e4b17023SJohn Marino /* Initialize CSE_REG_INFO_TABLE. */ 771*e4b17023SJohn Marino 772*e4b17023SJohn Marino static void 773*e4b17023SJohn Marino init_cse_reg_info (unsigned int nregs) 774*e4b17023SJohn Marino { 775*e4b17023SJohn Marino /* Do we need to grow the table? */ 776*e4b17023SJohn Marino if (nregs > cse_reg_info_table_size) 777*e4b17023SJohn Marino { 778*e4b17023SJohn Marino unsigned int new_size; 779*e4b17023SJohn Marino 780*e4b17023SJohn Marino if (cse_reg_info_table_size < 2048) 781*e4b17023SJohn Marino { 782*e4b17023SJohn Marino /* Compute a new size that is a power of 2 and no smaller 783*e4b17023SJohn Marino than the large of NREGS and 64. */ 784*e4b17023SJohn Marino new_size = (cse_reg_info_table_size 785*e4b17023SJohn Marino ? cse_reg_info_table_size : 64); 786*e4b17023SJohn Marino 787*e4b17023SJohn Marino while (new_size < nregs) 788*e4b17023SJohn Marino new_size *= 2; 789*e4b17023SJohn Marino } 790*e4b17023SJohn Marino else 791*e4b17023SJohn Marino { 792*e4b17023SJohn Marino /* If we need a big table, allocate just enough to hold 793*e4b17023SJohn Marino NREGS registers. */ 794*e4b17023SJohn Marino new_size = nregs; 795*e4b17023SJohn Marino } 796*e4b17023SJohn Marino 797*e4b17023SJohn Marino /* Reallocate the table with NEW_SIZE entries. */ 798*e4b17023SJohn Marino free (cse_reg_info_table); 799*e4b17023SJohn Marino cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size); 800*e4b17023SJohn Marino cse_reg_info_table_size = new_size; 801*e4b17023SJohn Marino cse_reg_info_table_first_uninitialized = 0; 802*e4b17023SJohn Marino } 803*e4b17023SJohn Marino 804*e4b17023SJohn Marino /* Do we have all of the first NREGS entries initialized? */ 805*e4b17023SJohn Marino if (cse_reg_info_table_first_uninitialized < nregs) 806*e4b17023SJohn Marino { 807*e4b17023SJohn Marino unsigned int old_timestamp = cse_reg_info_timestamp - 1; 808*e4b17023SJohn Marino unsigned int i; 809*e4b17023SJohn Marino 810*e4b17023SJohn Marino /* Put the old timestamp on newly allocated entries so that they 811*e4b17023SJohn Marino will all be considered out of date. We do not touch those 812*e4b17023SJohn Marino entries beyond the first NREGS entries to be nice to the 813*e4b17023SJohn Marino virtual memory. */ 814*e4b17023SJohn Marino for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++) 815*e4b17023SJohn Marino cse_reg_info_table[i].timestamp = old_timestamp; 816*e4b17023SJohn Marino 817*e4b17023SJohn Marino cse_reg_info_table_first_uninitialized = nregs; 818*e4b17023SJohn Marino } 819*e4b17023SJohn Marino } 820*e4b17023SJohn Marino 821*e4b17023SJohn Marino /* Given REGNO, initialize the cse_reg_info entry for REGNO. */ 822*e4b17023SJohn Marino 823*e4b17023SJohn Marino static void 824*e4b17023SJohn Marino get_cse_reg_info_1 (unsigned int regno) 825*e4b17023SJohn Marino { 826*e4b17023SJohn Marino /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this 827*e4b17023SJohn Marino entry will be considered to have been initialized. */ 828*e4b17023SJohn Marino cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp; 829*e4b17023SJohn Marino 830*e4b17023SJohn Marino /* Initialize the rest of the entry. */ 831*e4b17023SJohn Marino cse_reg_info_table[regno].reg_tick = 1; 832*e4b17023SJohn Marino cse_reg_info_table[regno].reg_in_table = -1; 833*e4b17023SJohn Marino cse_reg_info_table[regno].subreg_ticked = -1; 834*e4b17023SJohn Marino cse_reg_info_table[regno].reg_qty = -regno - 1; 835*e4b17023SJohn Marino } 836*e4b17023SJohn Marino 837*e4b17023SJohn Marino /* Find a cse_reg_info entry for REGNO. */ 838*e4b17023SJohn Marino 839*e4b17023SJohn Marino static inline struct cse_reg_info * 840*e4b17023SJohn Marino get_cse_reg_info (unsigned int regno) 841*e4b17023SJohn Marino { 842*e4b17023SJohn Marino struct cse_reg_info *p = &cse_reg_info_table[regno]; 843*e4b17023SJohn Marino 844*e4b17023SJohn Marino /* If this entry has not been initialized, go ahead and initialize 845*e4b17023SJohn Marino it. */ 846*e4b17023SJohn Marino if (p->timestamp != cse_reg_info_timestamp) 847*e4b17023SJohn Marino get_cse_reg_info_1 (regno); 848*e4b17023SJohn Marino 849*e4b17023SJohn Marino return p; 850*e4b17023SJohn Marino } 851*e4b17023SJohn Marino 852*e4b17023SJohn Marino /* Clear the hash table and initialize each register with its own quantity, 853*e4b17023SJohn Marino for a new basic block. */ 854*e4b17023SJohn Marino 855*e4b17023SJohn Marino static void 856*e4b17023SJohn Marino new_basic_block (void) 857*e4b17023SJohn Marino { 858*e4b17023SJohn Marino int i; 859*e4b17023SJohn Marino 860*e4b17023SJohn Marino next_qty = 0; 861*e4b17023SJohn Marino 862*e4b17023SJohn Marino /* Invalidate cse_reg_info_table. */ 863*e4b17023SJohn Marino cse_reg_info_timestamp++; 864*e4b17023SJohn Marino 865*e4b17023SJohn Marino /* Clear out hash table state for this pass. */ 866*e4b17023SJohn Marino CLEAR_HARD_REG_SET (hard_regs_in_table); 867*e4b17023SJohn Marino 868*e4b17023SJohn Marino /* The per-quantity values used to be initialized here, but it is 869*e4b17023SJohn Marino much faster to initialize each as it is made in `make_new_qty'. */ 870*e4b17023SJohn Marino 871*e4b17023SJohn Marino for (i = 0; i < HASH_SIZE; i++) 872*e4b17023SJohn Marino { 873*e4b17023SJohn Marino struct table_elt *first; 874*e4b17023SJohn Marino 875*e4b17023SJohn Marino first = table[i]; 876*e4b17023SJohn Marino if (first != NULL) 877*e4b17023SJohn Marino { 878*e4b17023SJohn Marino struct table_elt *last = first; 879*e4b17023SJohn Marino 880*e4b17023SJohn Marino table[i] = NULL; 881*e4b17023SJohn Marino 882*e4b17023SJohn Marino while (last->next_same_hash != NULL) 883*e4b17023SJohn Marino last = last->next_same_hash; 884*e4b17023SJohn Marino 885*e4b17023SJohn Marino /* Now relink this hash entire chain into 886*e4b17023SJohn Marino the free element list. */ 887*e4b17023SJohn Marino 888*e4b17023SJohn Marino last->next_same_hash = free_element_chain; 889*e4b17023SJohn Marino free_element_chain = first; 890*e4b17023SJohn Marino } 891*e4b17023SJohn Marino } 892*e4b17023SJohn Marino 893*e4b17023SJohn Marino #ifdef HAVE_cc0 894*e4b17023SJohn Marino prev_insn_cc0 = 0; 895*e4b17023SJohn Marino #endif 896*e4b17023SJohn Marino } 897*e4b17023SJohn Marino 898*e4b17023SJohn Marino /* Say that register REG contains a quantity in mode MODE not in any 899*e4b17023SJohn Marino register before and initialize that quantity. */ 900*e4b17023SJohn Marino 901*e4b17023SJohn Marino static void 902*e4b17023SJohn Marino make_new_qty (unsigned int reg, enum machine_mode mode) 903*e4b17023SJohn Marino { 904*e4b17023SJohn Marino int q; 905*e4b17023SJohn Marino struct qty_table_elem *ent; 906*e4b17023SJohn Marino struct reg_eqv_elem *eqv; 907*e4b17023SJohn Marino 908*e4b17023SJohn Marino gcc_assert (next_qty < max_qty); 909*e4b17023SJohn Marino 910*e4b17023SJohn Marino q = REG_QTY (reg) = next_qty++; 911*e4b17023SJohn Marino ent = &qty_table[q]; 912*e4b17023SJohn Marino ent->first_reg = reg; 913*e4b17023SJohn Marino ent->last_reg = reg; 914*e4b17023SJohn Marino ent->mode = mode; 915*e4b17023SJohn Marino ent->const_rtx = ent->const_insn = NULL_RTX; 916*e4b17023SJohn Marino ent->comparison_code = UNKNOWN; 917*e4b17023SJohn Marino 918*e4b17023SJohn Marino eqv = ®_eqv_table[reg]; 919*e4b17023SJohn Marino eqv->next = eqv->prev = -1; 920*e4b17023SJohn Marino } 921*e4b17023SJohn Marino 922*e4b17023SJohn Marino /* Make reg NEW equivalent to reg OLD. 923*e4b17023SJohn Marino OLD is not changing; NEW is. */ 924*e4b17023SJohn Marino 925*e4b17023SJohn Marino static void 926*e4b17023SJohn Marino make_regs_eqv (unsigned int new_reg, unsigned int old_reg) 927*e4b17023SJohn Marino { 928*e4b17023SJohn Marino unsigned int lastr, firstr; 929*e4b17023SJohn Marino int q = REG_QTY (old_reg); 930*e4b17023SJohn Marino struct qty_table_elem *ent; 931*e4b17023SJohn Marino 932*e4b17023SJohn Marino ent = &qty_table[q]; 933*e4b17023SJohn Marino 934*e4b17023SJohn Marino /* Nothing should become eqv until it has a "non-invalid" qty number. */ 935*e4b17023SJohn Marino gcc_assert (REGNO_QTY_VALID_P (old_reg)); 936*e4b17023SJohn Marino 937*e4b17023SJohn Marino REG_QTY (new_reg) = q; 938*e4b17023SJohn Marino firstr = ent->first_reg; 939*e4b17023SJohn Marino lastr = ent->last_reg; 940*e4b17023SJohn Marino 941*e4b17023SJohn Marino /* Prefer fixed hard registers to anything. Prefer pseudo regs to other 942*e4b17023SJohn Marino hard regs. Among pseudos, if NEW will live longer than any other reg 943*e4b17023SJohn Marino of the same qty, and that is beyond the current basic block, 944*e4b17023SJohn Marino make it the new canonical replacement for this qty. */ 945*e4b17023SJohn Marino if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr)) 946*e4b17023SJohn Marino /* Certain fixed registers might be of the class NO_REGS. This means 947*e4b17023SJohn Marino that not only can they not be allocated by the compiler, but 948*e4b17023SJohn Marino they cannot be used in substitutions or canonicalizations 949*e4b17023SJohn Marino either. */ 950*e4b17023SJohn Marino && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS) 951*e4b17023SJohn Marino && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg)) 952*e4b17023SJohn Marino || (new_reg >= FIRST_PSEUDO_REGISTER 953*e4b17023SJohn Marino && (firstr < FIRST_PSEUDO_REGISTER 954*e4b17023SJohn Marino || (bitmap_bit_p (cse_ebb_live_out, new_reg) 955*e4b17023SJohn Marino && !bitmap_bit_p (cse_ebb_live_out, firstr)) 956*e4b17023SJohn Marino || (bitmap_bit_p (cse_ebb_live_in, new_reg) 957*e4b17023SJohn Marino && !bitmap_bit_p (cse_ebb_live_in, firstr)))))) 958*e4b17023SJohn Marino { 959*e4b17023SJohn Marino reg_eqv_table[firstr].prev = new_reg; 960*e4b17023SJohn Marino reg_eqv_table[new_reg].next = firstr; 961*e4b17023SJohn Marino reg_eqv_table[new_reg].prev = -1; 962*e4b17023SJohn Marino ent->first_reg = new_reg; 963*e4b17023SJohn Marino } 964*e4b17023SJohn Marino else 965*e4b17023SJohn Marino { 966*e4b17023SJohn Marino /* If NEW is a hard reg (known to be non-fixed), insert at end. 967*e4b17023SJohn Marino Otherwise, insert before any non-fixed hard regs that are at the 968*e4b17023SJohn Marino end. Registers of class NO_REGS cannot be used as an 969*e4b17023SJohn Marino equivalent for anything. */ 970*e4b17023SJohn Marino while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0 971*e4b17023SJohn Marino && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr)) 972*e4b17023SJohn Marino && new_reg >= FIRST_PSEUDO_REGISTER) 973*e4b17023SJohn Marino lastr = reg_eqv_table[lastr].prev; 974*e4b17023SJohn Marino reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next; 975*e4b17023SJohn Marino if (reg_eqv_table[lastr].next >= 0) 976*e4b17023SJohn Marino reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg; 977*e4b17023SJohn Marino else 978*e4b17023SJohn Marino qty_table[q].last_reg = new_reg; 979*e4b17023SJohn Marino reg_eqv_table[lastr].next = new_reg; 980*e4b17023SJohn Marino reg_eqv_table[new_reg].prev = lastr; 981*e4b17023SJohn Marino } 982*e4b17023SJohn Marino } 983*e4b17023SJohn Marino 984*e4b17023SJohn Marino /* Remove REG from its equivalence class. */ 985*e4b17023SJohn Marino 986*e4b17023SJohn Marino static void 987*e4b17023SJohn Marino delete_reg_equiv (unsigned int reg) 988*e4b17023SJohn Marino { 989*e4b17023SJohn Marino struct qty_table_elem *ent; 990*e4b17023SJohn Marino int q = REG_QTY (reg); 991*e4b17023SJohn Marino int p, n; 992*e4b17023SJohn Marino 993*e4b17023SJohn Marino /* If invalid, do nothing. */ 994*e4b17023SJohn Marino if (! REGNO_QTY_VALID_P (reg)) 995*e4b17023SJohn Marino return; 996*e4b17023SJohn Marino 997*e4b17023SJohn Marino ent = &qty_table[q]; 998*e4b17023SJohn Marino 999*e4b17023SJohn Marino p = reg_eqv_table[reg].prev; 1000*e4b17023SJohn Marino n = reg_eqv_table[reg].next; 1001*e4b17023SJohn Marino 1002*e4b17023SJohn Marino if (n != -1) 1003*e4b17023SJohn Marino reg_eqv_table[n].prev = p; 1004*e4b17023SJohn Marino else 1005*e4b17023SJohn Marino ent->last_reg = p; 1006*e4b17023SJohn Marino if (p != -1) 1007*e4b17023SJohn Marino reg_eqv_table[p].next = n; 1008*e4b17023SJohn Marino else 1009*e4b17023SJohn Marino ent->first_reg = n; 1010*e4b17023SJohn Marino 1011*e4b17023SJohn Marino REG_QTY (reg) = -reg - 1; 1012*e4b17023SJohn Marino } 1013*e4b17023SJohn Marino 1014*e4b17023SJohn Marino /* Remove any invalid expressions from the hash table 1015*e4b17023SJohn Marino that refer to any of the registers contained in expression X. 1016*e4b17023SJohn Marino 1017*e4b17023SJohn Marino Make sure that newly inserted references to those registers 1018*e4b17023SJohn Marino as subexpressions will be considered valid. 1019*e4b17023SJohn Marino 1020*e4b17023SJohn Marino mention_regs is not called when a register itself 1021*e4b17023SJohn Marino is being stored in the table. 1022*e4b17023SJohn Marino 1023*e4b17023SJohn Marino Return 1 if we have done something that may have changed the hash code 1024*e4b17023SJohn Marino of X. */ 1025*e4b17023SJohn Marino 1026*e4b17023SJohn Marino static int 1027*e4b17023SJohn Marino mention_regs (rtx x) 1028*e4b17023SJohn Marino { 1029*e4b17023SJohn Marino enum rtx_code code; 1030*e4b17023SJohn Marino int i, j; 1031*e4b17023SJohn Marino const char *fmt; 1032*e4b17023SJohn Marino int changed = 0; 1033*e4b17023SJohn Marino 1034*e4b17023SJohn Marino if (x == 0) 1035*e4b17023SJohn Marino return 0; 1036*e4b17023SJohn Marino 1037*e4b17023SJohn Marino code = GET_CODE (x); 1038*e4b17023SJohn Marino if (code == REG) 1039*e4b17023SJohn Marino { 1040*e4b17023SJohn Marino unsigned int regno = REGNO (x); 1041*e4b17023SJohn Marino unsigned int endregno = END_REGNO (x); 1042*e4b17023SJohn Marino unsigned int i; 1043*e4b17023SJohn Marino 1044*e4b17023SJohn Marino for (i = regno; i < endregno; i++) 1045*e4b17023SJohn Marino { 1046*e4b17023SJohn Marino if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i)) 1047*e4b17023SJohn Marino remove_invalid_refs (i); 1048*e4b17023SJohn Marino 1049*e4b17023SJohn Marino REG_IN_TABLE (i) = REG_TICK (i); 1050*e4b17023SJohn Marino SUBREG_TICKED (i) = -1; 1051*e4b17023SJohn Marino } 1052*e4b17023SJohn Marino 1053*e4b17023SJohn Marino return 0; 1054*e4b17023SJohn Marino } 1055*e4b17023SJohn Marino 1056*e4b17023SJohn Marino /* If this is a SUBREG, we don't want to discard other SUBREGs of the same 1057*e4b17023SJohn Marino pseudo if they don't use overlapping words. We handle only pseudos 1058*e4b17023SJohn Marino here for simplicity. */ 1059*e4b17023SJohn Marino if (code == SUBREG && REG_P (SUBREG_REG (x)) 1060*e4b17023SJohn Marino && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER) 1061*e4b17023SJohn Marino { 1062*e4b17023SJohn Marino unsigned int i = REGNO (SUBREG_REG (x)); 1063*e4b17023SJohn Marino 1064*e4b17023SJohn Marino if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i)) 1065*e4b17023SJohn Marino { 1066*e4b17023SJohn Marino /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and 1067*e4b17023SJohn Marino the last store to this register really stored into this 1068*e4b17023SJohn Marino subreg, then remove the memory of this subreg. 1069*e4b17023SJohn Marino Otherwise, remove any memory of the entire register and 1070*e4b17023SJohn Marino all its subregs from the table. */ 1071*e4b17023SJohn Marino if (REG_TICK (i) - REG_IN_TABLE (i) > 1 1072*e4b17023SJohn Marino || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x))) 1073*e4b17023SJohn Marino remove_invalid_refs (i); 1074*e4b17023SJohn Marino else 1075*e4b17023SJohn Marino remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x)); 1076*e4b17023SJohn Marino } 1077*e4b17023SJohn Marino 1078*e4b17023SJohn Marino REG_IN_TABLE (i) = REG_TICK (i); 1079*e4b17023SJohn Marino SUBREG_TICKED (i) = REGNO (SUBREG_REG (x)); 1080*e4b17023SJohn Marino return 0; 1081*e4b17023SJohn Marino } 1082*e4b17023SJohn Marino 1083*e4b17023SJohn Marino /* If X is a comparison or a COMPARE and either operand is a register 1084*e4b17023SJohn Marino that does not have a quantity, give it one. This is so that a later 1085*e4b17023SJohn Marino call to record_jump_equiv won't cause X to be assigned a different 1086*e4b17023SJohn Marino hash code and not found in the table after that call. 1087*e4b17023SJohn Marino 1088*e4b17023SJohn Marino It is not necessary to do this here, since rehash_using_reg can 1089*e4b17023SJohn Marino fix up the table later, but doing this here eliminates the need to 1090*e4b17023SJohn Marino call that expensive function in the most common case where the only 1091*e4b17023SJohn Marino use of the register is in the comparison. */ 1092*e4b17023SJohn Marino 1093*e4b17023SJohn Marino if (code == COMPARE || COMPARISON_P (x)) 1094*e4b17023SJohn Marino { 1095*e4b17023SJohn Marino if (REG_P (XEXP (x, 0)) 1096*e4b17023SJohn Marino && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))) 1097*e4b17023SJohn Marino if (insert_regs (XEXP (x, 0), NULL, 0)) 1098*e4b17023SJohn Marino { 1099*e4b17023SJohn Marino rehash_using_reg (XEXP (x, 0)); 1100*e4b17023SJohn Marino changed = 1; 1101*e4b17023SJohn Marino } 1102*e4b17023SJohn Marino 1103*e4b17023SJohn Marino if (REG_P (XEXP (x, 1)) 1104*e4b17023SJohn Marino && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))) 1105*e4b17023SJohn Marino if (insert_regs (XEXP (x, 1), NULL, 0)) 1106*e4b17023SJohn Marino { 1107*e4b17023SJohn Marino rehash_using_reg (XEXP (x, 1)); 1108*e4b17023SJohn Marino changed = 1; 1109*e4b17023SJohn Marino } 1110*e4b17023SJohn Marino } 1111*e4b17023SJohn Marino 1112*e4b17023SJohn Marino fmt = GET_RTX_FORMAT (code); 1113*e4b17023SJohn Marino for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 1114*e4b17023SJohn Marino if (fmt[i] == 'e') 1115*e4b17023SJohn Marino changed |= mention_regs (XEXP (x, i)); 1116*e4b17023SJohn Marino else if (fmt[i] == 'E') 1117*e4b17023SJohn Marino for (j = 0; j < XVECLEN (x, i); j++) 1118*e4b17023SJohn Marino changed |= mention_regs (XVECEXP (x, i, j)); 1119*e4b17023SJohn Marino 1120*e4b17023SJohn Marino return changed; 1121*e4b17023SJohn Marino } 1122*e4b17023SJohn Marino 1123*e4b17023SJohn Marino /* Update the register quantities for inserting X into the hash table 1124*e4b17023SJohn Marino with a value equivalent to CLASSP. 1125*e4b17023SJohn Marino (If the class does not contain a REG, it is irrelevant.) 1126*e4b17023SJohn Marino If MODIFIED is nonzero, X is a destination; it is being modified. 1127*e4b17023SJohn Marino Note that delete_reg_equiv should be called on a register 1128*e4b17023SJohn Marino before insert_regs is done on that register with MODIFIED != 0. 1129*e4b17023SJohn Marino 1130*e4b17023SJohn Marino Nonzero value means that elements of reg_qty have changed 1131*e4b17023SJohn Marino so X's hash code may be different. */ 1132*e4b17023SJohn Marino 1133*e4b17023SJohn Marino static int 1134*e4b17023SJohn Marino insert_regs (rtx x, struct table_elt *classp, int modified) 1135*e4b17023SJohn Marino { 1136*e4b17023SJohn Marino if (REG_P (x)) 1137*e4b17023SJohn Marino { 1138*e4b17023SJohn Marino unsigned int regno = REGNO (x); 1139*e4b17023SJohn Marino int qty_valid; 1140*e4b17023SJohn Marino 1141*e4b17023SJohn Marino /* If REGNO is in the equivalence table already but is of the 1142*e4b17023SJohn Marino wrong mode for that equivalence, don't do anything here. */ 1143*e4b17023SJohn Marino 1144*e4b17023SJohn Marino qty_valid = REGNO_QTY_VALID_P (regno); 1145*e4b17023SJohn Marino if (qty_valid) 1146*e4b17023SJohn Marino { 1147*e4b17023SJohn Marino struct qty_table_elem *ent = &qty_table[REG_QTY (regno)]; 1148*e4b17023SJohn Marino 1149*e4b17023SJohn Marino if (ent->mode != GET_MODE (x)) 1150*e4b17023SJohn Marino return 0; 1151*e4b17023SJohn Marino } 1152*e4b17023SJohn Marino 1153*e4b17023SJohn Marino if (modified || ! qty_valid) 1154*e4b17023SJohn Marino { 1155*e4b17023SJohn Marino if (classp) 1156*e4b17023SJohn Marino for (classp = classp->first_same_value; 1157*e4b17023SJohn Marino classp != 0; 1158*e4b17023SJohn Marino classp = classp->next_same_value) 1159*e4b17023SJohn Marino if (REG_P (classp->exp) 1160*e4b17023SJohn Marino && GET_MODE (classp->exp) == GET_MODE (x)) 1161*e4b17023SJohn Marino { 1162*e4b17023SJohn Marino unsigned c_regno = REGNO (classp->exp); 1163*e4b17023SJohn Marino 1164*e4b17023SJohn Marino gcc_assert (REGNO_QTY_VALID_P (c_regno)); 1165*e4b17023SJohn Marino 1166*e4b17023SJohn Marino /* Suppose that 5 is hard reg and 100 and 101 are 1167*e4b17023SJohn Marino pseudos. Consider 1168*e4b17023SJohn Marino 1169*e4b17023SJohn Marino (set (reg:si 100) (reg:si 5)) 1170*e4b17023SJohn Marino (set (reg:si 5) (reg:si 100)) 1171*e4b17023SJohn Marino (set (reg:di 101) (reg:di 5)) 1172*e4b17023SJohn Marino 1173*e4b17023SJohn Marino We would now set REG_QTY (101) = REG_QTY (5), but the 1174*e4b17023SJohn Marino entry for 5 is in SImode. When we use this later in 1175*e4b17023SJohn Marino copy propagation, we get the register in wrong mode. */ 1176*e4b17023SJohn Marino if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x)) 1177*e4b17023SJohn Marino continue; 1178*e4b17023SJohn Marino 1179*e4b17023SJohn Marino make_regs_eqv (regno, c_regno); 1180*e4b17023SJohn Marino return 1; 1181*e4b17023SJohn Marino } 1182*e4b17023SJohn Marino 1183*e4b17023SJohn Marino /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger 1184*e4b17023SJohn Marino than REG_IN_TABLE to find out if there was only a single preceding 1185*e4b17023SJohn Marino invalidation - for the SUBREG - or another one, which would be 1186*e4b17023SJohn Marino for the full register. However, if we find here that REG_TICK 1187*e4b17023SJohn Marino indicates that the register is invalid, it means that it has 1188*e4b17023SJohn Marino been invalidated in a separate operation. The SUBREG might be used 1189*e4b17023SJohn Marino now (then this is a recursive call), or we might use the full REG 1190*e4b17023SJohn Marino now and a SUBREG of it later. So bump up REG_TICK so that 1191*e4b17023SJohn Marino mention_regs will do the right thing. */ 1192*e4b17023SJohn Marino if (! modified 1193*e4b17023SJohn Marino && REG_IN_TABLE (regno) >= 0 1194*e4b17023SJohn Marino && REG_TICK (regno) == REG_IN_TABLE (regno) + 1) 1195*e4b17023SJohn Marino REG_TICK (regno)++; 1196*e4b17023SJohn Marino make_new_qty (regno, GET_MODE (x)); 1197*e4b17023SJohn Marino return 1; 1198*e4b17023SJohn Marino } 1199*e4b17023SJohn Marino 1200*e4b17023SJohn Marino return 0; 1201*e4b17023SJohn Marino } 1202*e4b17023SJohn Marino 1203*e4b17023SJohn Marino /* If X is a SUBREG, we will likely be inserting the inner register in the 1204*e4b17023SJohn Marino table. If that register doesn't have an assigned quantity number at 1205*e4b17023SJohn Marino this point but does later, the insertion that we will be doing now will 1206*e4b17023SJohn Marino not be accessible because its hash code will have changed. So assign 1207*e4b17023SJohn Marino a quantity number now. */ 1208*e4b17023SJohn Marino 1209*e4b17023SJohn Marino else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)) 1210*e4b17023SJohn Marino && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x)))) 1211*e4b17023SJohn Marino { 1212*e4b17023SJohn Marino insert_regs (SUBREG_REG (x), NULL, 0); 1213*e4b17023SJohn Marino mention_regs (x); 1214*e4b17023SJohn Marino return 1; 1215*e4b17023SJohn Marino } 1216*e4b17023SJohn Marino else 1217*e4b17023SJohn Marino return mention_regs (x); 1218*e4b17023SJohn Marino } 1219*e4b17023SJohn Marino 1220*e4b17023SJohn Marino 1221*e4b17023SJohn Marino /* Compute upper and lower anchors for CST. Also compute the offset of CST 1222*e4b17023SJohn Marino from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff 1223*e4b17023SJohn Marino CST is equal to an anchor. */ 1224*e4b17023SJohn Marino 1225*e4b17023SJohn Marino static bool 1226*e4b17023SJohn Marino compute_const_anchors (rtx cst, 1227*e4b17023SJohn Marino HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs, 1228*e4b17023SJohn Marino HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs) 1229*e4b17023SJohn Marino { 1230*e4b17023SJohn Marino HOST_WIDE_INT n = INTVAL (cst); 1231*e4b17023SJohn Marino 1232*e4b17023SJohn Marino *lower_base = n & ~(targetm.const_anchor - 1); 1233*e4b17023SJohn Marino if (*lower_base == n) 1234*e4b17023SJohn Marino return false; 1235*e4b17023SJohn Marino 1236*e4b17023SJohn Marino *upper_base = 1237*e4b17023SJohn Marino (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1); 1238*e4b17023SJohn Marino *upper_offs = n - *upper_base; 1239*e4b17023SJohn Marino *lower_offs = n - *lower_base; 1240*e4b17023SJohn Marino return true; 1241*e4b17023SJohn Marino } 1242*e4b17023SJohn Marino 1243*e4b17023SJohn Marino /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */ 1244*e4b17023SJohn Marino 1245*e4b17023SJohn Marino static void 1246*e4b17023SJohn Marino insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs, 1247*e4b17023SJohn Marino enum machine_mode mode) 1248*e4b17023SJohn Marino { 1249*e4b17023SJohn Marino struct table_elt *elt; 1250*e4b17023SJohn Marino unsigned hash; 1251*e4b17023SJohn Marino rtx anchor_exp; 1252*e4b17023SJohn Marino rtx exp; 1253*e4b17023SJohn Marino 1254*e4b17023SJohn Marino anchor_exp = GEN_INT (anchor); 1255*e4b17023SJohn Marino hash = HASH (anchor_exp, mode); 1256*e4b17023SJohn Marino elt = lookup (anchor_exp, hash, mode); 1257*e4b17023SJohn Marino if (!elt) 1258*e4b17023SJohn Marino elt = insert (anchor_exp, NULL, hash, mode); 1259*e4b17023SJohn Marino 1260*e4b17023SJohn Marino exp = plus_constant (reg, offs); 1261*e4b17023SJohn Marino /* REG has just been inserted and the hash codes recomputed. */ 1262*e4b17023SJohn Marino mention_regs (exp); 1263*e4b17023SJohn Marino hash = HASH (exp, mode); 1264*e4b17023SJohn Marino 1265*e4b17023SJohn Marino /* Use the cost of the register rather than the whole expression. When 1266*e4b17023SJohn Marino looking up constant anchors we will further offset the corresponding 1267*e4b17023SJohn Marino expression therefore it does not make sense to prefer REGs over 1268*e4b17023SJohn Marino reg-immediate additions. Prefer instead the oldest expression. Also 1269*e4b17023SJohn Marino don't prefer pseudos over hard regs so that we derive constants in 1270*e4b17023SJohn Marino argument registers from other argument registers rather than from the 1271*e4b17023SJohn Marino original pseudo that was used to synthesize the constant. */ 1272*e4b17023SJohn Marino insert_with_costs (exp, elt, hash, mode, COST (reg), 1); 1273*e4b17023SJohn Marino } 1274*e4b17023SJohn Marino 1275*e4b17023SJohn Marino /* The constant CST is equivalent to the register REG. Create 1276*e4b17023SJohn Marino equivalences between the two anchors of CST and the corresponding 1277*e4b17023SJohn Marino register-offset expressions using REG. */ 1278*e4b17023SJohn Marino 1279*e4b17023SJohn Marino static void 1280*e4b17023SJohn Marino insert_const_anchors (rtx reg, rtx cst, enum machine_mode mode) 1281*e4b17023SJohn Marino { 1282*e4b17023SJohn Marino HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs; 1283*e4b17023SJohn Marino 1284*e4b17023SJohn Marino if (!compute_const_anchors (cst, &lower_base, &lower_offs, 1285*e4b17023SJohn Marino &upper_base, &upper_offs)) 1286*e4b17023SJohn Marino return; 1287*e4b17023SJohn Marino 1288*e4b17023SJohn Marino /* Ignore anchors of value 0. Constants accessible from zero are 1289*e4b17023SJohn Marino simple. */ 1290*e4b17023SJohn Marino if (lower_base != 0) 1291*e4b17023SJohn Marino insert_const_anchor (lower_base, reg, -lower_offs, mode); 1292*e4b17023SJohn Marino 1293*e4b17023SJohn Marino if (upper_base != 0) 1294*e4b17023SJohn Marino insert_const_anchor (upper_base, reg, -upper_offs, mode); 1295*e4b17023SJohn Marino } 1296*e4b17023SJohn Marino 1297*e4b17023SJohn Marino /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of 1298*e4b17023SJohn Marino ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a 1299*e4b17023SJohn Marino valid expression. Return the cheapest and oldest of such expressions. In 1300*e4b17023SJohn Marino *OLD, return how old the resulting expression is compared to the other 1301*e4b17023SJohn Marino equivalent expressions. */ 1302*e4b17023SJohn Marino 1303*e4b17023SJohn Marino static rtx 1304*e4b17023SJohn Marino find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs, 1305*e4b17023SJohn Marino unsigned *old) 1306*e4b17023SJohn Marino { 1307*e4b17023SJohn Marino struct table_elt *elt; 1308*e4b17023SJohn Marino unsigned idx; 1309*e4b17023SJohn Marino struct table_elt *match_elt; 1310*e4b17023SJohn Marino rtx match; 1311*e4b17023SJohn Marino 1312*e4b17023SJohn Marino /* Find the cheapest and *oldest* expression to maximize the chance of 1313*e4b17023SJohn Marino reusing the same pseudo. */ 1314*e4b17023SJohn Marino 1315*e4b17023SJohn Marino match_elt = NULL; 1316*e4b17023SJohn Marino match = NULL_RTX; 1317*e4b17023SJohn Marino for (elt = anchor_elt->first_same_value, idx = 0; 1318*e4b17023SJohn Marino elt; 1319*e4b17023SJohn Marino elt = elt->next_same_value, idx++) 1320*e4b17023SJohn Marino { 1321*e4b17023SJohn Marino if (match_elt && CHEAPER (match_elt, elt)) 1322*e4b17023SJohn Marino return match; 1323*e4b17023SJohn Marino 1324*e4b17023SJohn Marino if (REG_P (elt->exp) 1325*e4b17023SJohn Marino || (GET_CODE (elt->exp) == PLUS 1326*e4b17023SJohn Marino && REG_P (XEXP (elt->exp, 0)) 1327*e4b17023SJohn Marino && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT)) 1328*e4b17023SJohn Marino { 1329*e4b17023SJohn Marino rtx x; 1330*e4b17023SJohn Marino 1331*e4b17023SJohn Marino /* Ignore expressions that are no longer valid. */ 1332*e4b17023SJohn Marino if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false)) 1333*e4b17023SJohn Marino continue; 1334*e4b17023SJohn Marino 1335*e4b17023SJohn Marino x = plus_constant (elt->exp, offs); 1336*e4b17023SJohn Marino if (REG_P (x) 1337*e4b17023SJohn Marino || (GET_CODE (x) == PLUS 1338*e4b17023SJohn Marino && IN_RANGE (INTVAL (XEXP (x, 1)), 1339*e4b17023SJohn Marino -targetm.const_anchor, 1340*e4b17023SJohn Marino targetm.const_anchor - 1))) 1341*e4b17023SJohn Marino { 1342*e4b17023SJohn Marino match = x; 1343*e4b17023SJohn Marino match_elt = elt; 1344*e4b17023SJohn Marino *old = idx; 1345*e4b17023SJohn Marino } 1346*e4b17023SJohn Marino } 1347*e4b17023SJohn Marino } 1348*e4b17023SJohn Marino 1349*e4b17023SJohn Marino return match; 1350*e4b17023SJohn Marino } 1351*e4b17023SJohn Marino 1352*e4b17023SJohn Marino /* Try to express the constant SRC_CONST using a register+offset expression 1353*e4b17023SJohn Marino derived from a constant anchor. Return it if successful or NULL_RTX, 1354*e4b17023SJohn Marino otherwise. */ 1355*e4b17023SJohn Marino 1356*e4b17023SJohn Marino static rtx 1357*e4b17023SJohn Marino try_const_anchors (rtx src_const, enum machine_mode mode) 1358*e4b17023SJohn Marino { 1359*e4b17023SJohn Marino struct table_elt *lower_elt, *upper_elt; 1360*e4b17023SJohn Marino HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs; 1361*e4b17023SJohn Marino rtx lower_anchor_rtx, upper_anchor_rtx; 1362*e4b17023SJohn Marino rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX; 1363*e4b17023SJohn Marino unsigned lower_old, upper_old; 1364*e4b17023SJohn Marino 1365*e4b17023SJohn Marino if (!compute_const_anchors (src_const, &lower_base, &lower_offs, 1366*e4b17023SJohn Marino &upper_base, &upper_offs)) 1367*e4b17023SJohn Marino return NULL_RTX; 1368*e4b17023SJohn Marino 1369*e4b17023SJohn Marino lower_anchor_rtx = GEN_INT (lower_base); 1370*e4b17023SJohn Marino upper_anchor_rtx = GEN_INT (upper_base); 1371*e4b17023SJohn Marino lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode); 1372*e4b17023SJohn Marino upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode); 1373*e4b17023SJohn Marino 1374*e4b17023SJohn Marino if (lower_elt) 1375*e4b17023SJohn Marino lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old); 1376*e4b17023SJohn Marino if (upper_elt) 1377*e4b17023SJohn Marino upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old); 1378*e4b17023SJohn Marino 1379*e4b17023SJohn Marino if (!lower_exp) 1380*e4b17023SJohn Marino return upper_exp; 1381*e4b17023SJohn Marino if (!upper_exp) 1382*e4b17023SJohn Marino return lower_exp; 1383*e4b17023SJohn Marino 1384*e4b17023SJohn Marino /* Return the older expression. */ 1385*e4b17023SJohn Marino return (upper_old > lower_old ? upper_exp : lower_exp); 1386*e4b17023SJohn Marino } 1387*e4b17023SJohn Marino 1388*e4b17023SJohn Marino /* Look in or update the hash table. */ 1389*e4b17023SJohn Marino 1390*e4b17023SJohn Marino /* Remove table element ELT from use in the table. 1391*e4b17023SJohn Marino HASH is its hash code, made using the HASH macro. 1392*e4b17023SJohn Marino It's an argument because often that is known in advance 1393*e4b17023SJohn Marino and we save much time not recomputing it. */ 1394*e4b17023SJohn Marino 1395*e4b17023SJohn Marino static void 1396*e4b17023SJohn Marino remove_from_table (struct table_elt *elt, unsigned int hash) 1397*e4b17023SJohn Marino { 1398*e4b17023SJohn Marino if (elt == 0) 1399*e4b17023SJohn Marino return; 1400*e4b17023SJohn Marino 1401*e4b17023SJohn Marino /* Mark this element as removed. See cse_insn. */ 1402*e4b17023SJohn Marino elt->first_same_value = 0; 1403*e4b17023SJohn Marino 1404*e4b17023SJohn Marino /* Remove the table element from its equivalence class. */ 1405*e4b17023SJohn Marino 1406*e4b17023SJohn Marino { 1407*e4b17023SJohn Marino struct table_elt *prev = elt->prev_same_value; 1408*e4b17023SJohn Marino struct table_elt *next = elt->next_same_value; 1409*e4b17023SJohn Marino 1410*e4b17023SJohn Marino if (next) 1411*e4b17023SJohn Marino next->prev_same_value = prev; 1412*e4b17023SJohn Marino 1413*e4b17023SJohn Marino if (prev) 1414*e4b17023SJohn Marino prev->next_same_value = next; 1415*e4b17023SJohn Marino else 1416*e4b17023SJohn Marino { 1417*e4b17023SJohn Marino struct table_elt *newfirst = next; 1418*e4b17023SJohn Marino while (next) 1419*e4b17023SJohn Marino { 1420*e4b17023SJohn Marino next->first_same_value = newfirst; 1421*e4b17023SJohn Marino next = next->next_same_value; 1422*e4b17023SJohn Marino } 1423*e4b17023SJohn Marino } 1424*e4b17023SJohn Marino } 1425*e4b17023SJohn Marino 1426*e4b17023SJohn Marino /* Remove the table element from its hash bucket. */ 1427*e4b17023SJohn Marino 1428*e4b17023SJohn Marino { 1429*e4b17023SJohn Marino struct table_elt *prev = elt->prev_same_hash; 1430*e4b17023SJohn Marino struct table_elt *next = elt->next_same_hash; 1431*e4b17023SJohn Marino 1432*e4b17023SJohn Marino if (next) 1433*e4b17023SJohn Marino next->prev_same_hash = prev; 1434*e4b17023SJohn Marino 1435*e4b17023SJohn Marino if (prev) 1436*e4b17023SJohn Marino prev->next_same_hash = next; 1437*e4b17023SJohn Marino else if (table[hash] == elt) 1438*e4b17023SJohn Marino table[hash] = next; 1439*e4b17023SJohn Marino else 1440*e4b17023SJohn Marino { 1441*e4b17023SJohn Marino /* This entry is not in the proper hash bucket. This can happen 1442*e4b17023SJohn Marino when two classes were merged by `merge_equiv_classes'. Search 1443*e4b17023SJohn Marino for the hash bucket that it heads. This happens only very 1444*e4b17023SJohn Marino rarely, so the cost is acceptable. */ 1445*e4b17023SJohn Marino for (hash = 0; hash < HASH_SIZE; hash++) 1446*e4b17023SJohn Marino if (table[hash] == elt) 1447*e4b17023SJohn Marino table[hash] = next; 1448*e4b17023SJohn Marino } 1449*e4b17023SJohn Marino } 1450*e4b17023SJohn Marino 1451*e4b17023SJohn Marino /* Remove the table element from its related-value circular chain. */ 1452*e4b17023SJohn Marino 1453*e4b17023SJohn Marino if (elt->related_value != 0 && elt->related_value != elt) 1454*e4b17023SJohn Marino { 1455*e4b17023SJohn Marino struct table_elt *p = elt->related_value; 1456*e4b17023SJohn Marino 1457*e4b17023SJohn Marino while (p->related_value != elt) 1458*e4b17023SJohn Marino p = p->related_value; 1459*e4b17023SJohn Marino p->related_value = elt->related_value; 1460*e4b17023SJohn Marino if (p->related_value == p) 1461*e4b17023SJohn Marino p->related_value = 0; 1462*e4b17023SJohn Marino } 1463*e4b17023SJohn Marino 1464*e4b17023SJohn Marino /* Now add it to the free element chain. */ 1465*e4b17023SJohn Marino elt->next_same_hash = free_element_chain; 1466*e4b17023SJohn Marino free_element_chain = elt; 1467*e4b17023SJohn Marino } 1468*e4b17023SJohn Marino 1469*e4b17023SJohn Marino /* Same as above, but X is a pseudo-register. */ 1470*e4b17023SJohn Marino 1471*e4b17023SJohn Marino static void 1472*e4b17023SJohn Marino remove_pseudo_from_table (rtx x, unsigned int hash) 1473*e4b17023SJohn Marino { 1474*e4b17023SJohn Marino struct table_elt *elt; 1475*e4b17023SJohn Marino 1476*e4b17023SJohn Marino /* Because a pseudo-register can be referenced in more than one 1477*e4b17023SJohn Marino mode, we might have to remove more than one table entry. */ 1478*e4b17023SJohn Marino while ((elt = lookup_for_remove (x, hash, VOIDmode))) 1479*e4b17023SJohn Marino remove_from_table (elt, hash); 1480*e4b17023SJohn Marino } 1481*e4b17023SJohn Marino 1482*e4b17023SJohn Marino /* Look up X in the hash table and return its table element, 1483*e4b17023SJohn Marino or 0 if X is not in the table. 1484*e4b17023SJohn Marino 1485*e4b17023SJohn Marino MODE is the machine-mode of X, or if X is an integer constant 1486*e4b17023SJohn Marino with VOIDmode then MODE is the mode with which X will be used. 1487*e4b17023SJohn Marino 1488*e4b17023SJohn Marino Here we are satisfied to find an expression whose tree structure 1489*e4b17023SJohn Marino looks like X. */ 1490*e4b17023SJohn Marino 1491*e4b17023SJohn Marino static struct table_elt * 1492*e4b17023SJohn Marino lookup (rtx x, unsigned int hash, enum machine_mode mode) 1493*e4b17023SJohn Marino { 1494*e4b17023SJohn Marino struct table_elt *p; 1495*e4b17023SJohn Marino 1496*e4b17023SJohn Marino for (p = table[hash]; p; p = p->next_same_hash) 1497*e4b17023SJohn Marino if (mode == p->mode && ((x == p->exp && REG_P (x)) 1498*e4b17023SJohn Marino || exp_equiv_p (x, p->exp, !REG_P (x), false))) 1499*e4b17023SJohn Marino return p; 1500*e4b17023SJohn Marino 1501*e4b17023SJohn Marino return 0; 1502*e4b17023SJohn Marino } 1503*e4b17023SJohn Marino 1504*e4b17023SJohn Marino /* Like `lookup' but don't care whether the table element uses invalid regs. 1505*e4b17023SJohn Marino Also ignore discrepancies in the machine mode of a register. */ 1506*e4b17023SJohn Marino 1507*e4b17023SJohn Marino static struct table_elt * 1508*e4b17023SJohn Marino lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode) 1509*e4b17023SJohn Marino { 1510*e4b17023SJohn Marino struct table_elt *p; 1511*e4b17023SJohn Marino 1512*e4b17023SJohn Marino if (REG_P (x)) 1513*e4b17023SJohn Marino { 1514*e4b17023SJohn Marino unsigned int regno = REGNO (x); 1515*e4b17023SJohn Marino 1516*e4b17023SJohn Marino /* Don't check the machine mode when comparing registers; 1517*e4b17023SJohn Marino invalidating (REG:SI 0) also invalidates (REG:DF 0). */ 1518*e4b17023SJohn Marino for (p = table[hash]; p; p = p->next_same_hash) 1519*e4b17023SJohn Marino if (REG_P (p->exp) 1520*e4b17023SJohn Marino && REGNO (p->exp) == regno) 1521*e4b17023SJohn Marino return p; 1522*e4b17023SJohn Marino } 1523*e4b17023SJohn Marino else 1524*e4b17023SJohn Marino { 1525*e4b17023SJohn Marino for (p = table[hash]; p; p = p->next_same_hash) 1526*e4b17023SJohn Marino if (mode == p->mode 1527*e4b17023SJohn Marino && (x == p->exp || exp_equiv_p (x, p->exp, 0, false))) 1528*e4b17023SJohn Marino return p; 1529*e4b17023SJohn Marino } 1530*e4b17023SJohn Marino 1531*e4b17023SJohn Marino return 0; 1532*e4b17023SJohn Marino } 1533*e4b17023SJohn Marino 1534*e4b17023SJohn Marino /* Look for an expression equivalent to X and with code CODE. 1535*e4b17023SJohn Marino If one is found, return that expression. */ 1536*e4b17023SJohn Marino 1537*e4b17023SJohn Marino static rtx 1538*e4b17023SJohn Marino lookup_as_function (rtx x, enum rtx_code code) 1539*e4b17023SJohn Marino { 1540*e4b17023SJohn Marino struct table_elt *p 1541*e4b17023SJohn Marino = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x)); 1542*e4b17023SJohn Marino 1543*e4b17023SJohn Marino if (p == 0) 1544*e4b17023SJohn Marino return 0; 1545*e4b17023SJohn Marino 1546*e4b17023SJohn Marino for (p = p->first_same_value; p; p = p->next_same_value) 1547*e4b17023SJohn Marino if (GET_CODE (p->exp) == code 1548*e4b17023SJohn Marino /* Make sure this is a valid entry in the table. */ 1549*e4b17023SJohn Marino && exp_equiv_p (p->exp, p->exp, 1, false)) 1550*e4b17023SJohn Marino return p->exp; 1551*e4b17023SJohn Marino 1552*e4b17023SJohn Marino return 0; 1553*e4b17023SJohn Marino } 1554*e4b17023SJohn Marino 1555*e4b17023SJohn Marino /* Insert X in the hash table, assuming HASH is its hash code and 1556*e4b17023SJohn Marino CLASSP is an element of the class it should go in (or 0 if a new 1557*e4b17023SJohn Marino class should be made). COST is the code of X and reg_cost is the 1558*e4b17023SJohn Marino cost of registers in X. It is inserted at the proper position to 1559*e4b17023SJohn Marino keep the class in the order cheapest first. 1560*e4b17023SJohn Marino 1561*e4b17023SJohn Marino MODE is the machine-mode of X, or if X is an integer constant 1562*e4b17023SJohn Marino with VOIDmode then MODE is the mode with which X will be used. 1563*e4b17023SJohn Marino 1564*e4b17023SJohn Marino For elements of equal cheapness, the most recent one 1565*e4b17023SJohn Marino goes in front, except that the first element in the list 1566*e4b17023SJohn Marino remains first unless a cheaper element is added. The order of 1567*e4b17023SJohn Marino pseudo-registers does not matter, as canon_reg will be called to 1568*e4b17023SJohn Marino find the cheapest when a register is retrieved from the table. 1569*e4b17023SJohn Marino 1570*e4b17023SJohn Marino The in_memory field in the hash table element is set to 0. 1571*e4b17023SJohn Marino The caller must set it nonzero if appropriate. 1572*e4b17023SJohn Marino 1573*e4b17023SJohn Marino You should call insert_regs (X, CLASSP, MODIFY) before calling here, 1574*e4b17023SJohn Marino and if insert_regs returns a nonzero value 1575*e4b17023SJohn Marino you must then recompute its hash code before calling here. 1576*e4b17023SJohn Marino 1577*e4b17023SJohn Marino If necessary, update table showing constant values of quantities. */ 1578*e4b17023SJohn Marino 1579*e4b17023SJohn Marino static struct table_elt * 1580*e4b17023SJohn Marino insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash, 1581*e4b17023SJohn Marino enum machine_mode mode, int cost, int reg_cost) 1582*e4b17023SJohn Marino { 1583*e4b17023SJohn Marino struct table_elt *elt; 1584*e4b17023SJohn Marino 1585*e4b17023SJohn Marino /* If X is a register and we haven't made a quantity for it, 1586*e4b17023SJohn Marino something is wrong. */ 1587*e4b17023SJohn Marino gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x))); 1588*e4b17023SJohn Marino 1589*e4b17023SJohn Marino /* If X is a hard register, show it is being put in the table. */ 1590*e4b17023SJohn Marino if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) 1591*e4b17023SJohn Marino add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x)); 1592*e4b17023SJohn Marino 1593*e4b17023SJohn Marino /* Put an element for X into the right hash bucket. */ 1594*e4b17023SJohn Marino 1595*e4b17023SJohn Marino elt = free_element_chain; 1596*e4b17023SJohn Marino if (elt) 1597*e4b17023SJohn Marino free_element_chain = elt->next_same_hash; 1598*e4b17023SJohn Marino else 1599*e4b17023SJohn Marino elt = XNEW (struct table_elt); 1600*e4b17023SJohn Marino 1601*e4b17023SJohn Marino elt->exp = x; 1602*e4b17023SJohn Marino elt->canon_exp = NULL_RTX; 1603*e4b17023SJohn Marino elt->cost = cost; 1604*e4b17023SJohn Marino elt->regcost = reg_cost; 1605*e4b17023SJohn Marino elt->next_same_value = 0; 1606*e4b17023SJohn Marino elt->prev_same_value = 0; 1607*e4b17023SJohn Marino elt->next_same_hash = table[hash]; 1608*e4b17023SJohn Marino elt->prev_same_hash = 0; 1609*e4b17023SJohn Marino elt->related_value = 0; 1610*e4b17023SJohn Marino elt->in_memory = 0; 1611*e4b17023SJohn Marino elt->mode = mode; 1612*e4b17023SJohn Marino elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x)); 1613*e4b17023SJohn Marino 1614*e4b17023SJohn Marino if (table[hash]) 1615*e4b17023SJohn Marino table[hash]->prev_same_hash = elt; 1616*e4b17023SJohn Marino table[hash] = elt; 1617*e4b17023SJohn Marino 1618*e4b17023SJohn Marino /* Put it into the proper value-class. */ 1619*e4b17023SJohn Marino if (classp) 1620*e4b17023SJohn Marino { 1621*e4b17023SJohn Marino classp = classp->first_same_value; 1622*e4b17023SJohn Marino if (CHEAPER (elt, classp)) 1623*e4b17023SJohn Marino /* Insert at the head of the class. */ 1624*e4b17023SJohn Marino { 1625*e4b17023SJohn Marino struct table_elt *p; 1626*e4b17023SJohn Marino elt->next_same_value = classp; 1627*e4b17023SJohn Marino classp->prev_same_value = elt; 1628*e4b17023SJohn Marino elt->first_same_value = elt; 1629*e4b17023SJohn Marino 1630*e4b17023SJohn Marino for (p = classp; p; p = p->next_same_value) 1631*e4b17023SJohn Marino p->first_same_value = elt; 1632*e4b17023SJohn Marino } 1633*e4b17023SJohn Marino else 1634*e4b17023SJohn Marino { 1635*e4b17023SJohn Marino /* Insert not at head of the class. */ 1636*e4b17023SJohn Marino /* Put it after the last element cheaper than X. */ 1637*e4b17023SJohn Marino struct table_elt *p, *next; 1638*e4b17023SJohn Marino 1639*e4b17023SJohn Marino for (p = classp; 1640*e4b17023SJohn Marino (next = p->next_same_value) && CHEAPER (next, elt); 1641*e4b17023SJohn Marino p = next) 1642*e4b17023SJohn Marino ; 1643*e4b17023SJohn Marino 1644*e4b17023SJohn Marino /* Put it after P and before NEXT. */ 1645*e4b17023SJohn Marino elt->next_same_value = next; 1646*e4b17023SJohn Marino if (next) 1647*e4b17023SJohn Marino next->prev_same_value = elt; 1648*e4b17023SJohn Marino 1649*e4b17023SJohn Marino elt->prev_same_value = p; 1650*e4b17023SJohn Marino p->next_same_value = elt; 1651*e4b17023SJohn Marino elt->first_same_value = classp; 1652*e4b17023SJohn Marino } 1653*e4b17023SJohn Marino } 1654*e4b17023SJohn Marino else 1655*e4b17023SJohn Marino elt->first_same_value = elt; 1656*e4b17023SJohn Marino 1657*e4b17023SJohn Marino /* If this is a constant being set equivalent to a register or a register 1658*e4b17023SJohn Marino being set equivalent to a constant, note the constant equivalence. 1659*e4b17023SJohn Marino 1660*e4b17023SJohn Marino If this is a constant, it cannot be equivalent to a different constant, 1661*e4b17023SJohn Marino and a constant is the only thing that can be cheaper than a register. So 1662*e4b17023SJohn Marino we know the register is the head of the class (before the constant was 1663*e4b17023SJohn Marino inserted). 1664*e4b17023SJohn Marino 1665*e4b17023SJohn Marino If this is a register that is not already known equivalent to a 1666*e4b17023SJohn Marino constant, we must check the entire class. 1667*e4b17023SJohn Marino 1668*e4b17023SJohn Marino If this is a register that is already known equivalent to an insn, 1669*e4b17023SJohn Marino update the qtys `const_insn' to show that `this_insn' is the latest 1670*e4b17023SJohn Marino insn making that quantity equivalent to the constant. */ 1671*e4b17023SJohn Marino 1672*e4b17023SJohn Marino if (elt->is_const && classp && REG_P (classp->exp) 1673*e4b17023SJohn Marino && !REG_P (x)) 1674*e4b17023SJohn Marino { 1675*e4b17023SJohn Marino int exp_q = REG_QTY (REGNO (classp->exp)); 1676*e4b17023SJohn Marino struct qty_table_elem *exp_ent = &qty_table[exp_q]; 1677*e4b17023SJohn Marino 1678*e4b17023SJohn Marino exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x); 1679*e4b17023SJohn Marino exp_ent->const_insn = this_insn; 1680*e4b17023SJohn Marino } 1681*e4b17023SJohn Marino 1682*e4b17023SJohn Marino else if (REG_P (x) 1683*e4b17023SJohn Marino && classp 1684*e4b17023SJohn Marino && ! qty_table[REG_QTY (REGNO (x))].const_rtx 1685*e4b17023SJohn Marino && ! elt->is_const) 1686*e4b17023SJohn Marino { 1687*e4b17023SJohn Marino struct table_elt *p; 1688*e4b17023SJohn Marino 1689*e4b17023SJohn Marino for (p = classp; p != 0; p = p->next_same_value) 1690*e4b17023SJohn Marino { 1691*e4b17023SJohn Marino if (p->is_const && !REG_P (p->exp)) 1692*e4b17023SJohn Marino { 1693*e4b17023SJohn Marino int x_q = REG_QTY (REGNO (x)); 1694*e4b17023SJohn Marino struct qty_table_elem *x_ent = &qty_table[x_q]; 1695*e4b17023SJohn Marino 1696*e4b17023SJohn Marino x_ent->const_rtx 1697*e4b17023SJohn Marino = gen_lowpart (GET_MODE (x), p->exp); 1698*e4b17023SJohn Marino x_ent->const_insn = this_insn; 1699*e4b17023SJohn Marino break; 1700*e4b17023SJohn Marino } 1701*e4b17023SJohn Marino } 1702*e4b17023SJohn Marino } 1703*e4b17023SJohn Marino 1704*e4b17023SJohn Marino else if (REG_P (x) 1705*e4b17023SJohn Marino && qty_table[REG_QTY (REGNO (x))].const_rtx 1706*e4b17023SJohn Marino && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode) 1707*e4b17023SJohn Marino qty_table[REG_QTY (REGNO (x))].const_insn = this_insn; 1708*e4b17023SJohn Marino 1709*e4b17023SJohn Marino /* If this is a constant with symbolic value, 1710*e4b17023SJohn Marino and it has a term with an explicit integer value, 1711*e4b17023SJohn Marino link it up with related expressions. */ 1712*e4b17023SJohn Marino if (GET_CODE (x) == CONST) 1713*e4b17023SJohn Marino { 1714*e4b17023SJohn Marino rtx subexp = get_related_value (x); 1715*e4b17023SJohn Marino unsigned subhash; 1716*e4b17023SJohn Marino struct table_elt *subelt, *subelt_prev; 1717*e4b17023SJohn Marino 1718*e4b17023SJohn Marino if (subexp != 0) 1719*e4b17023SJohn Marino { 1720*e4b17023SJohn Marino /* Get the integer-free subexpression in the hash table. */ 1721*e4b17023SJohn Marino subhash = SAFE_HASH (subexp, mode); 1722*e4b17023SJohn Marino subelt = lookup (subexp, subhash, mode); 1723*e4b17023SJohn Marino if (subelt == 0) 1724*e4b17023SJohn Marino subelt = insert (subexp, NULL, subhash, mode); 1725*e4b17023SJohn Marino /* Initialize SUBELT's circular chain if it has none. */ 1726*e4b17023SJohn Marino if (subelt->related_value == 0) 1727*e4b17023SJohn Marino subelt->related_value = subelt; 1728*e4b17023SJohn Marino /* Find the element in the circular chain that precedes SUBELT. */ 1729*e4b17023SJohn Marino subelt_prev = subelt; 1730*e4b17023SJohn Marino while (subelt_prev->related_value != subelt) 1731*e4b17023SJohn Marino subelt_prev = subelt_prev->related_value; 1732*e4b17023SJohn Marino /* Put new ELT into SUBELT's circular chain just before SUBELT. 1733*e4b17023SJohn Marino This way the element that follows SUBELT is the oldest one. */ 1734*e4b17023SJohn Marino elt->related_value = subelt_prev->related_value; 1735*e4b17023SJohn Marino subelt_prev->related_value = elt; 1736*e4b17023SJohn Marino } 1737*e4b17023SJohn Marino } 1738*e4b17023SJohn Marino 1739*e4b17023SJohn Marino return elt; 1740*e4b17023SJohn Marino } 1741*e4b17023SJohn Marino 1742*e4b17023SJohn Marino /* Wrap insert_with_costs by passing the default costs. */ 1743*e4b17023SJohn Marino 1744*e4b17023SJohn Marino static struct table_elt * 1745*e4b17023SJohn Marino insert (rtx x, struct table_elt *classp, unsigned int hash, 1746*e4b17023SJohn Marino enum machine_mode mode) 1747*e4b17023SJohn Marino { 1748*e4b17023SJohn Marino return 1749*e4b17023SJohn Marino insert_with_costs (x, classp, hash, mode, COST (x), approx_reg_cost (x)); 1750*e4b17023SJohn Marino } 1751*e4b17023SJohn Marino 1752*e4b17023SJohn Marino 1753*e4b17023SJohn Marino /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from 1754*e4b17023SJohn Marino CLASS2 into CLASS1. This is done when we have reached an insn which makes 1755*e4b17023SJohn Marino the two classes equivalent. 1756*e4b17023SJohn Marino 1757*e4b17023SJohn Marino CLASS1 will be the surviving class; CLASS2 should not be used after this 1758*e4b17023SJohn Marino call. 1759*e4b17023SJohn Marino 1760*e4b17023SJohn Marino Any invalid entries in CLASS2 will not be copied. */ 1761*e4b17023SJohn Marino 1762*e4b17023SJohn Marino static void 1763*e4b17023SJohn Marino merge_equiv_classes (struct table_elt *class1, struct table_elt *class2) 1764*e4b17023SJohn Marino { 1765*e4b17023SJohn Marino struct table_elt *elt, *next, *new_elt; 1766*e4b17023SJohn Marino 1767*e4b17023SJohn Marino /* Ensure we start with the head of the classes. */ 1768*e4b17023SJohn Marino class1 = class1->first_same_value; 1769*e4b17023SJohn Marino class2 = class2->first_same_value; 1770*e4b17023SJohn Marino 1771*e4b17023SJohn Marino /* If they were already equal, forget it. */ 1772*e4b17023SJohn Marino if (class1 == class2) 1773*e4b17023SJohn Marino return; 1774*e4b17023SJohn Marino 1775*e4b17023SJohn Marino for (elt = class2; elt; elt = next) 1776*e4b17023SJohn Marino { 1777*e4b17023SJohn Marino unsigned int hash; 1778*e4b17023SJohn Marino rtx exp = elt->exp; 1779*e4b17023SJohn Marino enum machine_mode mode = elt->mode; 1780*e4b17023SJohn Marino 1781*e4b17023SJohn Marino next = elt->next_same_value; 1782*e4b17023SJohn Marino 1783*e4b17023SJohn Marino /* Remove old entry, make a new one in CLASS1's class. 1784*e4b17023SJohn Marino Don't do this for invalid entries as we cannot find their 1785*e4b17023SJohn Marino hash code (it also isn't necessary). */ 1786*e4b17023SJohn Marino if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false)) 1787*e4b17023SJohn Marino { 1788*e4b17023SJohn Marino bool need_rehash = false; 1789*e4b17023SJohn Marino 1790*e4b17023SJohn Marino hash_arg_in_memory = 0; 1791*e4b17023SJohn Marino hash = HASH (exp, mode); 1792*e4b17023SJohn Marino 1793*e4b17023SJohn Marino if (REG_P (exp)) 1794*e4b17023SJohn Marino { 1795*e4b17023SJohn Marino need_rehash = REGNO_QTY_VALID_P (REGNO (exp)); 1796*e4b17023SJohn Marino delete_reg_equiv (REGNO (exp)); 1797*e4b17023SJohn Marino } 1798*e4b17023SJohn Marino 1799*e4b17023SJohn Marino if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER) 1800*e4b17023SJohn Marino remove_pseudo_from_table (exp, hash); 1801*e4b17023SJohn Marino else 1802*e4b17023SJohn Marino remove_from_table (elt, hash); 1803*e4b17023SJohn Marino 1804*e4b17023SJohn Marino if (insert_regs (exp, class1, 0) || need_rehash) 1805*e4b17023SJohn Marino { 1806*e4b17023SJohn Marino rehash_using_reg (exp); 1807*e4b17023SJohn Marino hash = HASH (exp, mode); 1808*e4b17023SJohn Marino } 1809*e4b17023SJohn Marino new_elt = insert (exp, class1, hash, mode); 1810*e4b17023SJohn Marino new_elt->in_memory = hash_arg_in_memory; 1811*e4b17023SJohn Marino } 1812*e4b17023SJohn Marino } 1813*e4b17023SJohn Marino } 1814*e4b17023SJohn Marino 1815*e4b17023SJohn Marino /* Flush the entire hash table. */ 1816*e4b17023SJohn Marino 1817*e4b17023SJohn Marino static void 1818*e4b17023SJohn Marino flush_hash_table (void) 1819*e4b17023SJohn Marino { 1820*e4b17023SJohn Marino int i; 1821*e4b17023SJohn Marino struct table_elt *p; 1822*e4b17023SJohn Marino 1823*e4b17023SJohn Marino for (i = 0; i < HASH_SIZE; i++) 1824*e4b17023SJohn Marino for (p = table[i]; p; p = table[i]) 1825*e4b17023SJohn Marino { 1826*e4b17023SJohn Marino /* Note that invalidate can remove elements 1827*e4b17023SJohn Marino after P in the current hash chain. */ 1828*e4b17023SJohn Marino if (REG_P (p->exp)) 1829*e4b17023SJohn Marino invalidate (p->exp, VOIDmode); 1830*e4b17023SJohn Marino else 1831*e4b17023SJohn Marino remove_from_table (p, i); 1832*e4b17023SJohn Marino } 1833*e4b17023SJohn Marino } 1834*e4b17023SJohn Marino 1835*e4b17023SJohn Marino /* Function called for each rtx to check whether true dependence exist. */ 1836*e4b17023SJohn Marino struct check_dependence_data 1837*e4b17023SJohn Marino { 1838*e4b17023SJohn Marino enum machine_mode mode; 1839*e4b17023SJohn Marino rtx exp; 1840*e4b17023SJohn Marino rtx addr; 1841*e4b17023SJohn Marino }; 1842*e4b17023SJohn Marino 1843*e4b17023SJohn Marino static int 1844*e4b17023SJohn Marino check_dependence (rtx *x, void *data) 1845*e4b17023SJohn Marino { 1846*e4b17023SJohn Marino struct check_dependence_data *d = (struct check_dependence_data *) data; 1847*e4b17023SJohn Marino if (*x && MEM_P (*x)) 1848*e4b17023SJohn Marino return canon_true_dependence (d->exp, d->mode, d->addr, *x, NULL_RTX); 1849*e4b17023SJohn Marino else 1850*e4b17023SJohn Marino return 0; 1851*e4b17023SJohn Marino } 1852*e4b17023SJohn Marino 1853*e4b17023SJohn Marino /* Remove from the hash table, or mark as invalid, all expressions whose 1854*e4b17023SJohn Marino values could be altered by storing in X. X is a register, a subreg, or 1855*e4b17023SJohn Marino a memory reference with nonvarying address (because, when a memory 1856*e4b17023SJohn Marino reference with a varying address is stored in, all memory references are 1857*e4b17023SJohn Marino removed by invalidate_memory so specific invalidation is superfluous). 1858*e4b17023SJohn Marino FULL_MODE, if not VOIDmode, indicates that this much should be 1859*e4b17023SJohn Marino invalidated instead of just the amount indicated by the mode of X. This 1860*e4b17023SJohn Marino is only used for bitfield stores into memory. 1861*e4b17023SJohn Marino 1862*e4b17023SJohn Marino A nonvarying address may be just a register or just a symbol reference, 1863*e4b17023SJohn Marino or it may be either of those plus a numeric offset. */ 1864*e4b17023SJohn Marino 1865*e4b17023SJohn Marino static void 1866*e4b17023SJohn Marino invalidate (rtx x, enum machine_mode full_mode) 1867*e4b17023SJohn Marino { 1868*e4b17023SJohn Marino int i; 1869*e4b17023SJohn Marino struct table_elt *p; 1870*e4b17023SJohn Marino rtx addr; 1871*e4b17023SJohn Marino 1872*e4b17023SJohn Marino switch (GET_CODE (x)) 1873*e4b17023SJohn Marino { 1874*e4b17023SJohn Marino case REG: 1875*e4b17023SJohn Marino { 1876*e4b17023SJohn Marino /* If X is a register, dependencies on its contents are recorded 1877*e4b17023SJohn Marino through the qty number mechanism. Just change the qty number of 1878*e4b17023SJohn Marino the register, mark it as invalid for expressions that refer to it, 1879*e4b17023SJohn Marino and remove it itself. */ 1880*e4b17023SJohn Marino unsigned int regno = REGNO (x); 1881*e4b17023SJohn Marino unsigned int hash = HASH (x, GET_MODE (x)); 1882*e4b17023SJohn Marino 1883*e4b17023SJohn Marino /* Remove REGNO from any quantity list it might be on and indicate 1884*e4b17023SJohn Marino that its value might have changed. If it is a pseudo, remove its 1885*e4b17023SJohn Marino entry from the hash table. 1886*e4b17023SJohn Marino 1887*e4b17023SJohn Marino For a hard register, we do the first two actions above for any 1888*e4b17023SJohn Marino additional hard registers corresponding to X. Then, if any of these 1889*e4b17023SJohn Marino registers are in the table, we must remove any REG entries that 1890*e4b17023SJohn Marino overlap these registers. */ 1891*e4b17023SJohn Marino 1892*e4b17023SJohn Marino delete_reg_equiv (regno); 1893*e4b17023SJohn Marino REG_TICK (regno)++; 1894*e4b17023SJohn Marino SUBREG_TICKED (regno) = -1; 1895*e4b17023SJohn Marino 1896*e4b17023SJohn Marino if (regno >= FIRST_PSEUDO_REGISTER) 1897*e4b17023SJohn Marino remove_pseudo_from_table (x, hash); 1898*e4b17023SJohn Marino else 1899*e4b17023SJohn Marino { 1900*e4b17023SJohn Marino HOST_WIDE_INT in_table 1901*e4b17023SJohn Marino = TEST_HARD_REG_BIT (hard_regs_in_table, regno); 1902*e4b17023SJohn Marino unsigned int endregno = END_HARD_REGNO (x); 1903*e4b17023SJohn Marino unsigned int tregno, tendregno, rn; 1904*e4b17023SJohn Marino struct table_elt *p, *next; 1905*e4b17023SJohn Marino 1906*e4b17023SJohn Marino CLEAR_HARD_REG_BIT (hard_regs_in_table, regno); 1907*e4b17023SJohn Marino 1908*e4b17023SJohn Marino for (rn = regno + 1; rn < endregno; rn++) 1909*e4b17023SJohn Marino { 1910*e4b17023SJohn Marino in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn); 1911*e4b17023SJohn Marino CLEAR_HARD_REG_BIT (hard_regs_in_table, rn); 1912*e4b17023SJohn Marino delete_reg_equiv (rn); 1913*e4b17023SJohn Marino REG_TICK (rn)++; 1914*e4b17023SJohn Marino SUBREG_TICKED (rn) = -1; 1915*e4b17023SJohn Marino } 1916*e4b17023SJohn Marino 1917*e4b17023SJohn Marino if (in_table) 1918*e4b17023SJohn Marino for (hash = 0; hash < HASH_SIZE; hash++) 1919*e4b17023SJohn Marino for (p = table[hash]; p; p = next) 1920*e4b17023SJohn Marino { 1921*e4b17023SJohn Marino next = p->next_same_hash; 1922*e4b17023SJohn Marino 1923*e4b17023SJohn Marino if (!REG_P (p->exp) 1924*e4b17023SJohn Marino || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER) 1925*e4b17023SJohn Marino continue; 1926*e4b17023SJohn Marino 1927*e4b17023SJohn Marino tregno = REGNO (p->exp); 1928*e4b17023SJohn Marino tendregno = END_HARD_REGNO (p->exp); 1929*e4b17023SJohn Marino if (tendregno > regno && tregno < endregno) 1930*e4b17023SJohn Marino remove_from_table (p, hash); 1931*e4b17023SJohn Marino } 1932*e4b17023SJohn Marino } 1933*e4b17023SJohn Marino } 1934*e4b17023SJohn Marino return; 1935*e4b17023SJohn Marino 1936*e4b17023SJohn Marino case SUBREG: 1937*e4b17023SJohn Marino invalidate (SUBREG_REG (x), VOIDmode); 1938*e4b17023SJohn Marino return; 1939*e4b17023SJohn Marino 1940*e4b17023SJohn Marino case PARALLEL: 1941*e4b17023SJohn Marino for (i = XVECLEN (x, 0) - 1; i >= 0; --i) 1942*e4b17023SJohn Marino invalidate (XVECEXP (x, 0, i), VOIDmode); 1943*e4b17023SJohn Marino return; 1944*e4b17023SJohn Marino 1945*e4b17023SJohn Marino case EXPR_LIST: 1946*e4b17023SJohn Marino /* This is part of a disjoint return value; extract the location in 1947*e4b17023SJohn Marino question ignoring the offset. */ 1948*e4b17023SJohn Marino invalidate (XEXP (x, 0), VOIDmode); 1949*e4b17023SJohn Marino return; 1950*e4b17023SJohn Marino 1951*e4b17023SJohn Marino case MEM: 1952*e4b17023SJohn Marino addr = canon_rtx (get_addr (XEXP (x, 0))); 1953*e4b17023SJohn Marino /* Calculate the canonical version of X here so that 1954*e4b17023SJohn Marino true_dependence doesn't generate new RTL for X on each call. */ 1955*e4b17023SJohn Marino x = canon_rtx (x); 1956*e4b17023SJohn Marino 1957*e4b17023SJohn Marino /* Remove all hash table elements that refer to overlapping pieces of 1958*e4b17023SJohn Marino memory. */ 1959*e4b17023SJohn Marino if (full_mode == VOIDmode) 1960*e4b17023SJohn Marino full_mode = GET_MODE (x); 1961*e4b17023SJohn Marino 1962*e4b17023SJohn Marino for (i = 0; i < HASH_SIZE; i++) 1963*e4b17023SJohn Marino { 1964*e4b17023SJohn Marino struct table_elt *next; 1965*e4b17023SJohn Marino 1966*e4b17023SJohn Marino for (p = table[i]; p; p = next) 1967*e4b17023SJohn Marino { 1968*e4b17023SJohn Marino next = p->next_same_hash; 1969*e4b17023SJohn Marino if (p->in_memory) 1970*e4b17023SJohn Marino { 1971*e4b17023SJohn Marino struct check_dependence_data d; 1972*e4b17023SJohn Marino 1973*e4b17023SJohn Marino /* Just canonicalize the expression once; 1974*e4b17023SJohn Marino otherwise each time we call invalidate 1975*e4b17023SJohn Marino true_dependence will canonicalize the 1976*e4b17023SJohn Marino expression again. */ 1977*e4b17023SJohn Marino if (!p->canon_exp) 1978*e4b17023SJohn Marino p->canon_exp = canon_rtx (p->exp); 1979*e4b17023SJohn Marino d.exp = x; 1980*e4b17023SJohn Marino d.addr = addr; 1981*e4b17023SJohn Marino d.mode = full_mode; 1982*e4b17023SJohn Marino if (for_each_rtx (&p->canon_exp, check_dependence, &d)) 1983*e4b17023SJohn Marino remove_from_table (p, i); 1984*e4b17023SJohn Marino } 1985*e4b17023SJohn Marino } 1986*e4b17023SJohn Marino } 1987*e4b17023SJohn Marino return; 1988*e4b17023SJohn Marino 1989*e4b17023SJohn Marino default: 1990*e4b17023SJohn Marino gcc_unreachable (); 1991*e4b17023SJohn Marino } 1992*e4b17023SJohn Marino } 1993*e4b17023SJohn Marino 1994*e4b17023SJohn Marino /* Remove all expressions that refer to register REGNO, 1995*e4b17023SJohn Marino since they are already invalid, and we are about to 1996*e4b17023SJohn Marino mark that register valid again and don't want the old 1997*e4b17023SJohn Marino expressions to reappear as valid. */ 1998*e4b17023SJohn Marino 1999*e4b17023SJohn Marino static void 2000*e4b17023SJohn Marino remove_invalid_refs (unsigned int regno) 2001*e4b17023SJohn Marino { 2002*e4b17023SJohn Marino unsigned int i; 2003*e4b17023SJohn Marino struct table_elt *p, *next; 2004*e4b17023SJohn Marino 2005*e4b17023SJohn Marino for (i = 0; i < HASH_SIZE; i++) 2006*e4b17023SJohn Marino for (p = table[i]; p; p = next) 2007*e4b17023SJohn Marino { 2008*e4b17023SJohn Marino next = p->next_same_hash; 2009*e4b17023SJohn Marino if (!REG_P (p->exp) 2010*e4b17023SJohn Marino && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0)) 2011*e4b17023SJohn Marino remove_from_table (p, i); 2012*e4b17023SJohn Marino } 2013*e4b17023SJohn Marino } 2014*e4b17023SJohn Marino 2015*e4b17023SJohn Marino /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET, 2016*e4b17023SJohn Marino and mode MODE. */ 2017*e4b17023SJohn Marino static void 2018*e4b17023SJohn Marino remove_invalid_subreg_refs (unsigned int regno, unsigned int offset, 2019*e4b17023SJohn Marino enum machine_mode mode) 2020*e4b17023SJohn Marino { 2021*e4b17023SJohn Marino unsigned int i; 2022*e4b17023SJohn Marino struct table_elt *p, *next; 2023*e4b17023SJohn Marino unsigned int end = offset + (GET_MODE_SIZE (mode) - 1); 2024*e4b17023SJohn Marino 2025*e4b17023SJohn Marino for (i = 0; i < HASH_SIZE; i++) 2026*e4b17023SJohn Marino for (p = table[i]; p; p = next) 2027*e4b17023SJohn Marino { 2028*e4b17023SJohn Marino rtx exp = p->exp; 2029*e4b17023SJohn Marino next = p->next_same_hash; 2030*e4b17023SJohn Marino 2031*e4b17023SJohn Marino if (!REG_P (exp) 2032*e4b17023SJohn Marino && (GET_CODE (exp) != SUBREG 2033*e4b17023SJohn Marino || !REG_P (SUBREG_REG (exp)) 2034*e4b17023SJohn Marino || REGNO (SUBREG_REG (exp)) != regno 2035*e4b17023SJohn Marino || (((SUBREG_BYTE (exp) 2036*e4b17023SJohn Marino + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset) 2037*e4b17023SJohn Marino && SUBREG_BYTE (exp) <= end)) 2038*e4b17023SJohn Marino && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0)) 2039*e4b17023SJohn Marino remove_from_table (p, i); 2040*e4b17023SJohn Marino } 2041*e4b17023SJohn Marino } 2042*e4b17023SJohn Marino 2043*e4b17023SJohn Marino /* Recompute the hash codes of any valid entries in the hash table that 2044*e4b17023SJohn Marino reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG. 2045*e4b17023SJohn Marino 2046*e4b17023SJohn Marino This is called when we make a jump equivalence. */ 2047*e4b17023SJohn Marino 2048*e4b17023SJohn Marino static void 2049*e4b17023SJohn Marino rehash_using_reg (rtx x) 2050*e4b17023SJohn Marino { 2051*e4b17023SJohn Marino unsigned int i; 2052*e4b17023SJohn Marino struct table_elt *p, *next; 2053*e4b17023SJohn Marino unsigned hash; 2054*e4b17023SJohn Marino 2055*e4b17023SJohn Marino if (GET_CODE (x) == SUBREG) 2056*e4b17023SJohn Marino x = SUBREG_REG (x); 2057*e4b17023SJohn Marino 2058*e4b17023SJohn Marino /* If X is not a register or if the register is known not to be in any 2059*e4b17023SJohn Marino valid entries in the table, we have no work to do. */ 2060*e4b17023SJohn Marino 2061*e4b17023SJohn Marino if (!REG_P (x) 2062*e4b17023SJohn Marino || REG_IN_TABLE (REGNO (x)) < 0 2063*e4b17023SJohn Marino || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x))) 2064*e4b17023SJohn Marino return; 2065*e4b17023SJohn Marino 2066*e4b17023SJohn Marino /* Scan all hash chains looking for valid entries that mention X. 2067*e4b17023SJohn Marino If we find one and it is in the wrong hash chain, move it. */ 2068*e4b17023SJohn Marino 2069*e4b17023SJohn Marino for (i = 0; i < HASH_SIZE; i++) 2070*e4b17023SJohn Marino for (p = table[i]; p; p = next) 2071*e4b17023SJohn Marino { 2072*e4b17023SJohn Marino next = p->next_same_hash; 2073*e4b17023SJohn Marino if (reg_mentioned_p (x, p->exp) 2074*e4b17023SJohn Marino && exp_equiv_p (p->exp, p->exp, 1, false) 2075*e4b17023SJohn Marino && i != (hash = SAFE_HASH (p->exp, p->mode))) 2076*e4b17023SJohn Marino { 2077*e4b17023SJohn Marino if (p->next_same_hash) 2078*e4b17023SJohn Marino p->next_same_hash->prev_same_hash = p->prev_same_hash; 2079*e4b17023SJohn Marino 2080*e4b17023SJohn Marino if (p->prev_same_hash) 2081*e4b17023SJohn Marino p->prev_same_hash->next_same_hash = p->next_same_hash; 2082*e4b17023SJohn Marino else 2083*e4b17023SJohn Marino table[i] = p->next_same_hash; 2084*e4b17023SJohn Marino 2085*e4b17023SJohn Marino p->next_same_hash = table[hash]; 2086*e4b17023SJohn Marino p->prev_same_hash = 0; 2087*e4b17023SJohn Marino if (table[hash]) 2088*e4b17023SJohn Marino table[hash]->prev_same_hash = p; 2089*e4b17023SJohn Marino table[hash] = p; 2090*e4b17023SJohn Marino } 2091*e4b17023SJohn Marino } 2092*e4b17023SJohn Marino } 2093*e4b17023SJohn Marino 2094*e4b17023SJohn Marino /* Remove from the hash table any expression that is a call-clobbered 2095*e4b17023SJohn Marino register. Also update their TICK values. */ 2096*e4b17023SJohn Marino 2097*e4b17023SJohn Marino static void 2098*e4b17023SJohn Marino invalidate_for_call (void) 2099*e4b17023SJohn Marino { 2100*e4b17023SJohn Marino unsigned int regno, endregno; 2101*e4b17023SJohn Marino unsigned int i; 2102*e4b17023SJohn Marino unsigned hash; 2103*e4b17023SJohn Marino struct table_elt *p, *next; 2104*e4b17023SJohn Marino int in_table = 0; 2105*e4b17023SJohn Marino 2106*e4b17023SJohn Marino /* Go through all the hard registers. For each that is clobbered in 2107*e4b17023SJohn Marino a CALL_INSN, remove the register from quantity chains and update 2108*e4b17023SJohn Marino reg_tick if defined. Also see if any of these registers is currently 2109*e4b17023SJohn Marino in the table. */ 2110*e4b17023SJohn Marino 2111*e4b17023SJohn Marino for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 2112*e4b17023SJohn Marino if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)) 2113*e4b17023SJohn Marino { 2114*e4b17023SJohn Marino delete_reg_equiv (regno); 2115*e4b17023SJohn Marino if (REG_TICK (regno) >= 0) 2116*e4b17023SJohn Marino { 2117*e4b17023SJohn Marino REG_TICK (regno)++; 2118*e4b17023SJohn Marino SUBREG_TICKED (regno) = -1; 2119*e4b17023SJohn Marino } 2120*e4b17023SJohn Marino 2121*e4b17023SJohn Marino in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0); 2122*e4b17023SJohn Marino } 2123*e4b17023SJohn Marino 2124*e4b17023SJohn Marino /* In the case where we have no call-clobbered hard registers in the 2125*e4b17023SJohn Marino table, we are done. Otherwise, scan the table and remove any 2126*e4b17023SJohn Marino entry that overlaps a call-clobbered register. */ 2127*e4b17023SJohn Marino 2128*e4b17023SJohn Marino if (in_table) 2129*e4b17023SJohn Marino for (hash = 0; hash < HASH_SIZE; hash++) 2130*e4b17023SJohn Marino for (p = table[hash]; p; p = next) 2131*e4b17023SJohn Marino { 2132*e4b17023SJohn Marino next = p->next_same_hash; 2133*e4b17023SJohn Marino 2134*e4b17023SJohn Marino if (!REG_P (p->exp) 2135*e4b17023SJohn Marino || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER) 2136*e4b17023SJohn Marino continue; 2137*e4b17023SJohn Marino 2138*e4b17023SJohn Marino regno = REGNO (p->exp); 2139*e4b17023SJohn Marino endregno = END_HARD_REGNO (p->exp); 2140*e4b17023SJohn Marino 2141*e4b17023SJohn Marino for (i = regno; i < endregno; i++) 2142*e4b17023SJohn Marino if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)) 2143*e4b17023SJohn Marino { 2144*e4b17023SJohn Marino remove_from_table (p, hash); 2145*e4b17023SJohn Marino break; 2146*e4b17023SJohn Marino } 2147*e4b17023SJohn Marino } 2148*e4b17023SJohn Marino } 2149*e4b17023SJohn Marino 2150*e4b17023SJohn Marino /* Given an expression X of type CONST, 2151*e4b17023SJohn Marino and ELT which is its table entry (or 0 if it 2152*e4b17023SJohn Marino is not in the hash table), 2153*e4b17023SJohn Marino return an alternate expression for X as a register plus integer. 2154*e4b17023SJohn Marino If none can be found, return 0. */ 2155*e4b17023SJohn Marino 2156*e4b17023SJohn Marino static rtx 2157*e4b17023SJohn Marino use_related_value (rtx x, struct table_elt *elt) 2158*e4b17023SJohn Marino { 2159*e4b17023SJohn Marino struct table_elt *relt = 0; 2160*e4b17023SJohn Marino struct table_elt *p, *q; 2161*e4b17023SJohn Marino HOST_WIDE_INT offset; 2162*e4b17023SJohn Marino 2163*e4b17023SJohn Marino /* First, is there anything related known? 2164*e4b17023SJohn Marino If we have a table element, we can tell from that. 2165*e4b17023SJohn Marino Otherwise, must look it up. */ 2166*e4b17023SJohn Marino 2167*e4b17023SJohn Marino if (elt != 0 && elt->related_value != 0) 2168*e4b17023SJohn Marino relt = elt; 2169*e4b17023SJohn Marino else if (elt == 0 && GET_CODE (x) == CONST) 2170*e4b17023SJohn Marino { 2171*e4b17023SJohn Marino rtx subexp = get_related_value (x); 2172*e4b17023SJohn Marino if (subexp != 0) 2173*e4b17023SJohn Marino relt = lookup (subexp, 2174*e4b17023SJohn Marino SAFE_HASH (subexp, GET_MODE (subexp)), 2175*e4b17023SJohn Marino GET_MODE (subexp)); 2176*e4b17023SJohn Marino } 2177*e4b17023SJohn Marino 2178*e4b17023SJohn Marino if (relt == 0) 2179*e4b17023SJohn Marino return 0; 2180*e4b17023SJohn Marino 2181*e4b17023SJohn Marino /* Search all related table entries for one that has an 2182*e4b17023SJohn Marino equivalent register. */ 2183*e4b17023SJohn Marino 2184*e4b17023SJohn Marino p = relt; 2185*e4b17023SJohn Marino while (1) 2186*e4b17023SJohn Marino { 2187*e4b17023SJohn Marino /* This loop is strange in that it is executed in two different cases. 2188*e4b17023SJohn Marino The first is when X is already in the table. Then it is searching 2189*e4b17023SJohn Marino the RELATED_VALUE list of X's class (RELT). The second case is when 2190*e4b17023SJohn Marino X is not in the table. Then RELT points to a class for the related 2191*e4b17023SJohn Marino value. 2192*e4b17023SJohn Marino 2193*e4b17023SJohn Marino Ensure that, whatever case we are in, that we ignore classes that have 2194*e4b17023SJohn Marino the same value as X. */ 2195*e4b17023SJohn Marino 2196*e4b17023SJohn Marino if (rtx_equal_p (x, p->exp)) 2197*e4b17023SJohn Marino q = 0; 2198*e4b17023SJohn Marino else 2199*e4b17023SJohn Marino for (q = p->first_same_value; q; q = q->next_same_value) 2200*e4b17023SJohn Marino if (REG_P (q->exp)) 2201*e4b17023SJohn Marino break; 2202*e4b17023SJohn Marino 2203*e4b17023SJohn Marino if (q) 2204*e4b17023SJohn Marino break; 2205*e4b17023SJohn Marino 2206*e4b17023SJohn Marino p = p->related_value; 2207*e4b17023SJohn Marino 2208*e4b17023SJohn Marino /* We went all the way around, so there is nothing to be found. 2209*e4b17023SJohn Marino Alternatively, perhaps RELT was in the table for some other reason 2210*e4b17023SJohn Marino and it has no related values recorded. */ 2211*e4b17023SJohn Marino if (p == relt || p == 0) 2212*e4b17023SJohn Marino break; 2213*e4b17023SJohn Marino } 2214*e4b17023SJohn Marino 2215*e4b17023SJohn Marino if (q == 0) 2216*e4b17023SJohn Marino return 0; 2217*e4b17023SJohn Marino 2218*e4b17023SJohn Marino offset = (get_integer_term (x) - get_integer_term (p->exp)); 2219*e4b17023SJohn Marino /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */ 2220*e4b17023SJohn Marino return plus_constant (q->exp, offset); 2221*e4b17023SJohn Marino } 2222*e4b17023SJohn Marino 2223*e4b17023SJohn Marino 2224*e4b17023SJohn Marino /* Hash a string. Just add its bytes up. */ 2225*e4b17023SJohn Marino static inline unsigned 2226*e4b17023SJohn Marino hash_rtx_string (const char *ps) 2227*e4b17023SJohn Marino { 2228*e4b17023SJohn Marino unsigned hash = 0; 2229*e4b17023SJohn Marino const unsigned char *p = (const unsigned char *) ps; 2230*e4b17023SJohn Marino 2231*e4b17023SJohn Marino if (p) 2232*e4b17023SJohn Marino while (*p) 2233*e4b17023SJohn Marino hash += *p++; 2234*e4b17023SJohn Marino 2235*e4b17023SJohn Marino return hash; 2236*e4b17023SJohn Marino } 2237*e4b17023SJohn Marino 2238*e4b17023SJohn Marino /* Same as hash_rtx, but call CB on each rtx if it is not NULL. 2239*e4b17023SJohn Marino When the callback returns true, we continue with the new rtx. */ 2240*e4b17023SJohn Marino 2241*e4b17023SJohn Marino unsigned 2242*e4b17023SJohn Marino hash_rtx_cb (const_rtx x, enum machine_mode mode, 2243*e4b17023SJohn Marino int *do_not_record_p, int *hash_arg_in_memory_p, 2244*e4b17023SJohn Marino bool have_reg_qty, hash_rtx_callback_function cb) 2245*e4b17023SJohn Marino { 2246*e4b17023SJohn Marino int i, j; 2247*e4b17023SJohn Marino unsigned hash = 0; 2248*e4b17023SJohn Marino enum rtx_code code; 2249*e4b17023SJohn Marino const char *fmt; 2250*e4b17023SJohn Marino enum machine_mode newmode; 2251*e4b17023SJohn Marino rtx newx; 2252*e4b17023SJohn Marino 2253*e4b17023SJohn Marino /* Used to turn recursion into iteration. We can't rely on GCC's 2254*e4b17023SJohn Marino tail-recursion elimination since we need to keep accumulating values 2255*e4b17023SJohn Marino in HASH. */ 2256*e4b17023SJohn Marino repeat: 2257*e4b17023SJohn Marino if (x == 0) 2258*e4b17023SJohn Marino return hash; 2259*e4b17023SJohn Marino 2260*e4b17023SJohn Marino /* Invoke the callback first. */ 2261*e4b17023SJohn Marino if (cb != NULL 2262*e4b17023SJohn Marino && ((*cb) (x, mode, &newx, &newmode))) 2263*e4b17023SJohn Marino { 2264*e4b17023SJohn Marino hash += hash_rtx_cb (newx, newmode, do_not_record_p, 2265*e4b17023SJohn Marino hash_arg_in_memory_p, have_reg_qty, cb); 2266*e4b17023SJohn Marino return hash; 2267*e4b17023SJohn Marino } 2268*e4b17023SJohn Marino 2269*e4b17023SJohn Marino code = GET_CODE (x); 2270*e4b17023SJohn Marino switch (code) 2271*e4b17023SJohn Marino { 2272*e4b17023SJohn Marino case REG: 2273*e4b17023SJohn Marino { 2274*e4b17023SJohn Marino unsigned int regno = REGNO (x); 2275*e4b17023SJohn Marino 2276*e4b17023SJohn Marino if (do_not_record_p && !reload_completed) 2277*e4b17023SJohn Marino { 2278*e4b17023SJohn Marino /* On some machines, we can't record any non-fixed hard register, 2279*e4b17023SJohn Marino because extending its life will cause reload problems. We 2280*e4b17023SJohn Marino consider ap, fp, sp, gp to be fixed for this purpose. 2281*e4b17023SJohn Marino 2282*e4b17023SJohn Marino We also consider CCmode registers to be fixed for this purpose; 2283*e4b17023SJohn Marino failure to do so leads to failure to simplify 0<100 type of 2284*e4b17023SJohn Marino conditionals. 2285*e4b17023SJohn Marino 2286*e4b17023SJohn Marino On all machines, we can't record any global registers. 2287*e4b17023SJohn Marino Nor should we record any register that is in a small 2288*e4b17023SJohn Marino class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */ 2289*e4b17023SJohn Marino bool record; 2290*e4b17023SJohn Marino 2291*e4b17023SJohn Marino if (regno >= FIRST_PSEUDO_REGISTER) 2292*e4b17023SJohn Marino record = true; 2293*e4b17023SJohn Marino else if (x == frame_pointer_rtx 2294*e4b17023SJohn Marino || x == hard_frame_pointer_rtx 2295*e4b17023SJohn Marino || x == arg_pointer_rtx 2296*e4b17023SJohn Marino || x == stack_pointer_rtx 2297*e4b17023SJohn Marino || x == pic_offset_table_rtx) 2298*e4b17023SJohn Marino record = true; 2299*e4b17023SJohn Marino else if (global_regs[regno]) 2300*e4b17023SJohn Marino record = false; 2301*e4b17023SJohn Marino else if (fixed_regs[regno]) 2302*e4b17023SJohn Marino record = true; 2303*e4b17023SJohn Marino else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC) 2304*e4b17023SJohn Marino record = true; 2305*e4b17023SJohn Marino else if (targetm.small_register_classes_for_mode_p (GET_MODE (x))) 2306*e4b17023SJohn Marino record = false; 2307*e4b17023SJohn Marino else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno))) 2308*e4b17023SJohn Marino record = false; 2309*e4b17023SJohn Marino else 2310*e4b17023SJohn Marino record = true; 2311*e4b17023SJohn Marino 2312*e4b17023SJohn Marino if (!record) 2313*e4b17023SJohn Marino { 2314*e4b17023SJohn Marino *do_not_record_p = 1; 2315*e4b17023SJohn Marino return 0; 2316*e4b17023SJohn Marino } 2317*e4b17023SJohn Marino } 2318*e4b17023SJohn Marino 2319*e4b17023SJohn Marino hash += ((unsigned int) REG << 7); 2320*e4b17023SJohn Marino hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno); 2321*e4b17023SJohn Marino return hash; 2322*e4b17023SJohn Marino } 2323*e4b17023SJohn Marino 2324*e4b17023SJohn Marino /* We handle SUBREG of a REG specially because the underlying 2325*e4b17023SJohn Marino reg changes its hash value with every value change; we don't 2326*e4b17023SJohn Marino want to have to forget unrelated subregs when one subreg changes. */ 2327*e4b17023SJohn Marino case SUBREG: 2328*e4b17023SJohn Marino { 2329*e4b17023SJohn Marino if (REG_P (SUBREG_REG (x))) 2330*e4b17023SJohn Marino { 2331*e4b17023SJohn Marino hash += (((unsigned int) SUBREG << 7) 2332*e4b17023SJohn Marino + REGNO (SUBREG_REG (x)) 2333*e4b17023SJohn Marino + (SUBREG_BYTE (x) / UNITS_PER_WORD)); 2334*e4b17023SJohn Marino return hash; 2335*e4b17023SJohn Marino } 2336*e4b17023SJohn Marino break; 2337*e4b17023SJohn Marino } 2338*e4b17023SJohn Marino 2339*e4b17023SJohn Marino case CONST_INT: 2340*e4b17023SJohn Marino hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode 2341*e4b17023SJohn Marino + (unsigned int) INTVAL (x)); 2342*e4b17023SJohn Marino return hash; 2343*e4b17023SJohn Marino 2344*e4b17023SJohn Marino case CONST_DOUBLE: 2345*e4b17023SJohn Marino /* This is like the general case, except that it only counts 2346*e4b17023SJohn Marino the integers representing the constant. */ 2347*e4b17023SJohn Marino hash += (unsigned int) code + (unsigned int) GET_MODE (x); 2348*e4b17023SJohn Marino if (GET_MODE (x) != VOIDmode) 2349*e4b17023SJohn Marino hash += real_hash (CONST_DOUBLE_REAL_VALUE (x)); 2350*e4b17023SJohn Marino else 2351*e4b17023SJohn Marino hash += ((unsigned int) CONST_DOUBLE_LOW (x) 2352*e4b17023SJohn Marino + (unsigned int) CONST_DOUBLE_HIGH (x)); 2353*e4b17023SJohn Marino return hash; 2354*e4b17023SJohn Marino 2355*e4b17023SJohn Marino case CONST_FIXED: 2356*e4b17023SJohn Marino hash += (unsigned int) code + (unsigned int) GET_MODE (x); 2357*e4b17023SJohn Marino hash += fixed_hash (CONST_FIXED_VALUE (x)); 2358*e4b17023SJohn Marino return hash; 2359*e4b17023SJohn Marino 2360*e4b17023SJohn Marino case CONST_VECTOR: 2361*e4b17023SJohn Marino { 2362*e4b17023SJohn Marino int units; 2363*e4b17023SJohn Marino rtx elt; 2364*e4b17023SJohn Marino 2365*e4b17023SJohn Marino units = CONST_VECTOR_NUNITS (x); 2366*e4b17023SJohn Marino 2367*e4b17023SJohn Marino for (i = 0; i < units; ++i) 2368*e4b17023SJohn Marino { 2369*e4b17023SJohn Marino elt = CONST_VECTOR_ELT (x, i); 2370*e4b17023SJohn Marino hash += hash_rtx_cb (elt, GET_MODE (elt), 2371*e4b17023SJohn Marino do_not_record_p, hash_arg_in_memory_p, 2372*e4b17023SJohn Marino have_reg_qty, cb); 2373*e4b17023SJohn Marino } 2374*e4b17023SJohn Marino 2375*e4b17023SJohn Marino return hash; 2376*e4b17023SJohn Marino } 2377*e4b17023SJohn Marino 2378*e4b17023SJohn Marino /* Assume there is only one rtx object for any given label. */ 2379*e4b17023SJohn Marino case LABEL_REF: 2380*e4b17023SJohn Marino /* We don't hash on the address of the CODE_LABEL to avoid bootstrap 2381*e4b17023SJohn Marino differences and differences between each stage's debugging dumps. */ 2382*e4b17023SJohn Marino hash += (((unsigned int) LABEL_REF << 7) 2383*e4b17023SJohn Marino + CODE_LABEL_NUMBER (XEXP (x, 0))); 2384*e4b17023SJohn Marino return hash; 2385*e4b17023SJohn Marino 2386*e4b17023SJohn Marino case SYMBOL_REF: 2387*e4b17023SJohn Marino { 2388*e4b17023SJohn Marino /* Don't hash on the symbol's address to avoid bootstrap differences. 2389*e4b17023SJohn Marino Different hash values may cause expressions to be recorded in 2390*e4b17023SJohn Marino different orders and thus different registers to be used in the 2391*e4b17023SJohn Marino final assembler. This also avoids differences in the dump files 2392*e4b17023SJohn Marino between various stages. */ 2393*e4b17023SJohn Marino unsigned int h = 0; 2394*e4b17023SJohn Marino const unsigned char *p = (const unsigned char *) XSTR (x, 0); 2395*e4b17023SJohn Marino 2396*e4b17023SJohn Marino while (*p) 2397*e4b17023SJohn Marino h += (h << 7) + *p++; /* ??? revisit */ 2398*e4b17023SJohn Marino 2399*e4b17023SJohn Marino hash += ((unsigned int) SYMBOL_REF << 7) + h; 2400*e4b17023SJohn Marino return hash; 2401*e4b17023SJohn Marino } 2402*e4b17023SJohn Marino 2403*e4b17023SJohn Marino case MEM: 2404*e4b17023SJohn Marino /* We don't record if marked volatile or if BLKmode since we don't 2405*e4b17023SJohn Marino know the size of the move. */ 2406*e4b17023SJohn Marino if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)) 2407*e4b17023SJohn Marino { 2408*e4b17023SJohn Marino *do_not_record_p = 1; 2409*e4b17023SJohn Marino return 0; 2410*e4b17023SJohn Marino } 2411*e4b17023SJohn Marino if (hash_arg_in_memory_p && !MEM_READONLY_P (x)) 2412*e4b17023SJohn Marino *hash_arg_in_memory_p = 1; 2413*e4b17023SJohn Marino 2414*e4b17023SJohn Marino /* Now that we have already found this special case, 2415*e4b17023SJohn Marino might as well speed it up as much as possible. */ 2416*e4b17023SJohn Marino hash += (unsigned) MEM; 2417*e4b17023SJohn Marino x = XEXP (x, 0); 2418*e4b17023SJohn Marino goto repeat; 2419*e4b17023SJohn Marino 2420*e4b17023SJohn Marino case USE: 2421*e4b17023SJohn Marino /* A USE that mentions non-volatile memory needs special 2422*e4b17023SJohn Marino handling since the MEM may be BLKmode which normally 2423*e4b17023SJohn Marino prevents an entry from being made. Pure calls are 2424*e4b17023SJohn Marino marked by a USE which mentions BLKmode memory. 2425*e4b17023SJohn Marino See calls.c:emit_call_1. */ 2426*e4b17023SJohn Marino if (MEM_P (XEXP (x, 0)) 2427*e4b17023SJohn Marino && ! MEM_VOLATILE_P (XEXP (x, 0))) 2428*e4b17023SJohn Marino { 2429*e4b17023SJohn Marino hash += (unsigned) USE; 2430*e4b17023SJohn Marino x = XEXP (x, 0); 2431*e4b17023SJohn Marino 2432*e4b17023SJohn Marino if (hash_arg_in_memory_p && !MEM_READONLY_P (x)) 2433*e4b17023SJohn Marino *hash_arg_in_memory_p = 1; 2434*e4b17023SJohn Marino 2435*e4b17023SJohn Marino /* Now that we have already found this special case, 2436*e4b17023SJohn Marino might as well speed it up as much as possible. */ 2437*e4b17023SJohn Marino hash += (unsigned) MEM; 2438*e4b17023SJohn Marino x = XEXP (x, 0); 2439*e4b17023SJohn Marino goto repeat; 2440*e4b17023SJohn Marino } 2441*e4b17023SJohn Marino break; 2442*e4b17023SJohn Marino 2443*e4b17023SJohn Marino case PRE_DEC: 2444*e4b17023SJohn Marino case PRE_INC: 2445*e4b17023SJohn Marino case POST_DEC: 2446*e4b17023SJohn Marino case POST_INC: 2447*e4b17023SJohn Marino case PRE_MODIFY: 2448*e4b17023SJohn Marino case POST_MODIFY: 2449*e4b17023SJohn Marino case PC: 2450*e4b17023SJohn Marino case CC0: 2451*e4b17023SJohn Marino case CALL: 2452*e4b17023SJohn Marino case UNSPEC_VOLATILE: 2453*e4b17023SJohn Marino if (do_not_record_p) { 2454*e4b17023SJohn Marino *do_not_record_p = 1; 2455*e4b17023SJohn Marino return 0; 2456*e4b17023SJohn Marino } 2457*e4b17023SJohn Marino else 2458*e4b17023SJohn Marino return hash; 2459*e4b17023SJohn Marino break; 2460*e4b17023SJohn Marino 2461*e4b17023SJohn Marino case ASM_OPERANDS: 2462*e4b17023SJohn Marino if (do_not_record_p && MEM_VOLATILE_P (x)) 2463*e4b17023SJohn Marino { 2464*e4b17023SJohn Marino *do_not_record_p = 1; 2465*e4b17023SJohn Marino return 0; 2466*e4b17023SJohn Marino } 2467*e4b17023SJohn Marino else 2468*e4b17023SJohn Marino { 2469*e4b17023SJohn Marino /* We don't want to take the filename and line into account. */ 2470*e4b17023SJohn Marino hash += (unsigned) code + (unsigned) GET_MODE (x) 2471*e4b17023SJohn Marino + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x)) 2472*e4b17023SJohn Marino + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x)) 2473*e4b17023SJohn Marino + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x); 2474*e4b17023SJohn Marino 2475*e4b17023SJohn Marino if (ASM_OPERANDS_INPUT_LENGTH (x)) 2476*e4b17023SJohn Marino { 2477*e4b17023SJohn Marino for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) 2478*e4b17023SJohn Marino { 2479*e4b17023SJohn Marino hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i), 2480*e4b17023SJohn Marino GET_MODE (ASM_OPERANDS_INPUT (x, i)), 2481*e4b17023SJohn Marino do_not_record_p, hash_arg_in_memory_p, 2482*e4b17023SJohn Marino have_reg_qty, cb) 2483*e4b17023SJohn Marino + hash_rtx_string 2484*e4b17023SJohn Marino (ASM_OPERANDS_INPUT_CONSTRAINT (x, i))); 2485*e4b17023SJohn Marino } 2486*e4b17023SJohn Marino 2487*e4b17023SJohn Marino hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0)); 2488*e4b17023SJohn Marino x = ASM_OPERANDS_INPUT (x, 0); 2489*e4b17023SJohn Marino mode = GET_MODE (x); 2490*e4b17023SJohn Marino goto repeat; 2491*e4b17023SJohn Marino } 2492*e4b17023SJohn Marino 2493*e4b17023SJohn Marino return hash; 2494*e4b17023SJohn Marino } 2495*e4b17023SJohn Marino break; 2496*e4b17023SJohn Marino 2497*e4b17023SJohn Marino default: 2498*e4b17023SJohn Marino break; 2499*e4b17023SJohn Marino } 2500*e4b17023SJohn Marino 2501*e4b17023SJohn Marino i = GET_RTX_LENGTH (code) - 1; 2502*e4b17023SJohn Marino hash += (unsigned) code + (unsigned) GET_MODE (x); 2503*e4b17023SJohn Marino fmt = GET_RTX_FORMAT (code); 2504*e4b17023SJohn Marino for (; i >= 0; i--) 2505*e4b17023SJohn Marino { 2506*e4b17023SJohn Marino switch (fmt[i]) 2507*e4b17023SJohn Marino { 2508*e4b17023SJohn Marino case 'e': 2509*e4b17023SJohn Marino /* If we are about to do the last recursive call 2510*e4b17023SJohn Marino needed at this level, change it into iteration. 2511*e4b17023SJohn Marino This function is called enough to be worth it. */ 2512*e4b17023SJohn Marino if (i == 0) 2513*e4b17023SJohn Marino { 2514*e4b17023SJohn Marino x = XEXP (x, i); 2515*e4b17023SJohn Marino goto repeat; 2516*e4b17023SJohn Marino } 2517*e4b17023SJohn Marino 2518*e4b17023SJohn Marino hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p, 2519*e4b17023SJohn Marino hash_arg_in_memory_p, 2520*e4b17023SJohn Marino have_reg_qty, cb); 2521*e4b17023SJohn Marino break; 2522*e4b17023SJohn Marino 2523*e4b17023SJohn Marino case 'E': 2524*e4b17023SJohn Marino for (j = 0; j < XVECLEN (x, i); j++) 2525*e4b17023SJohn Marino hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p, 2526*e4b17023SJohn Marino hash_arg_in_memory_p, 2527*e4b17023SJohn Marino have_reg_qty, cb); 2528*e4b17023SJohn Marino break; 2529*e4b17023SJohn Marino 2530*e4b17023SJohn Marino case 's': 2531*e4b17023SJohn Marino hash += hash_rtx_string (XSTR (x, i)); 2532*e4b17023SJohn Marino break; 2533*e4b17023SJohn Marino 2534*e4b17023SJohn Marino case 'i': 2535*e4b17023SJohn Marino hash += (unsigned int) XINT (x, i); 2536*e4b17023SJohn Marino break; 2537*e4b17023SJohn Marino 2538*e4b17023SJohn Marino case '0': case 't': 2539*e4b17023SJohn Marino /* Unused. */ 2540*e4b17023SJohn Marino break; 2541*e4b17023SJohn Marino 2542*e4b17023SJohn Marino default: 2543*e4b17023SJohn Marino gcc_unreachable (); 2544*e4b17023SJohn Marino } 2545*e4b17023SJohn Marino } 2546*e4b17023SJohn Marino 2547*e4b17023SJohn Marino return hash; 2548*e4b17023SJohn Marino } 2549*e4b17023SJohn Marino 2550*e4b17023SJohn Marino /* Hash an rtx. We are careful to make sure the value is never negative. 2551*e4b17023SJohn Marino Equivalent registers hash identically. 2552*e4b17023SJohn Marino MODE is used in hashing for CONST_INTs only; 2553*e4b17023SJohn Marino otherwise the mode of X is used. 2554*e4b17023SJohn Marino 2555*e4b17023SJohn Marino Store 1 in DO_NOT_RECORD_P if any subexpression is volatile. 2556*e4b17023SJohn Marino 2557*e4b17023SJohn Marino If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains 2558*e4b17023SJohn Marino a MEM rtx which does not have the RTX_UNCHANGING_P bit set. 2559*e4b17023SJohn Marino 2560*e4b17023SJohn Marino Note that cse_insn knows that the hash code of a MEM expression 2561*e4b17023SJohn Marino is just (int) MEM plus the hash code of the address. */ 2562*e4b17023SJohn Marino 2563*e4b17023SJohn Marino unsigned 2564*e4b17023SJohn Marino hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p, 2565*e4b17023SJohn Marino int *hash_arg_in_memory_p, bool have_reg_qty) 2566*e4b17023SJohn Marino { 2567*e4b17023SJohn Marino return hash_rtx_cb (x, mode, do_not_record_p, 2568*e4b17023SJohn Marino hash_arg_in_memory_p, have_reg_qty, NULL); 2569*e4b17023SJohn Marino } 2570*e4b17023SJohn Marino 2571*e4b17023SJohn Marino /* Hash an rtx X for cse via hash_rtx. 2572*e4b17023SJohn Marino Stores 1 in do_not_record if any subexpression is volatile. 2573*e4b17023SJohn Marino Stores 1 in hash_arg_in_memory if X contains a mem rtx which 2574*e4b17023SJohn Marino does not have the RTX_UNCHANGING_P bit set. */ 2575*e4b17023SJohn Marino 2576*e4b17023SJohn Marino static inline unsigned 2577*e4b17023SJohn Marino canon_hash (rtx x, enum machine_mode mode) 2578*e4b17023SJohn Marino { 2579*e4b17023SJohn Marino return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true); 2580*e4b17023SJohn Marino } 2581*e4b17023SJohn Marino 2582*e4b17023SJohn Marino /* Like canon_hash but with no side effects, i.e. do_not_record 2583*e4b17023SJohn Marino and hash_arg_in_memory are not changed. */ 2584*e4b17023SJohn Marino 2585*e4b17023SJohn Marino static inline unsigned 2586*e4b17023SJohn Marino safe_hash (rtx x, enum machine_mode mode) 2587*e4b17023SJohn Marino { 2588*e4b17023SJohn Marino int dummy_do_not_record; 2589*e4b17023SJohn Marino return hash_rtx (x, mode, &dummy_do_not_record, NULL, true); 2590*e4b17023SJohn Marino } 2591*e4b17023SJohn Marino 2592*e4b17023SJohn Marino /* Return 1 iff X and Y would canonicalize into the same thing, 2593*e4b17023SJohn Marino without actually constructing the canonicalization of either one. 2594*e4b17023SJohn Marino If VALIDATE is nonzero, 2595*e4b17023SJohn Marino we assume X is an expression being processed from the rtl 2596*e4b17023SJohn Marino and Y was found in the hash table. We check register refs 2597*e4b17023SJohn Marino in Y for being marked as valid. 2598*e4b17023SJohn Marino 2599*e4b17023SJohn Marino If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */ 2600*e4b17023SJohn Marino 2601*e4b17023SJohn Marino int 2602*e4b17023SJohn Marino exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse) 2603*e4b17023SJohn Marino { 2604*e4b17023SJohn Marino int i, j; 2605*e4b17023SJohn Marino enum rtx_code code; 2606*e4b17023SJohn Marino const char *fmt; 2607*e4b17023SJohn Marino 2608*e4b17023SJohn Marino /* Note: it is incorrect to assume an expression is equivalent to itself 2609*e4b17023SJohn Marino if VALIDATE is nonzero. */ 2610*e4b17023SJohn Marino if (x == y && !validate) 2611*e4b17023SJohn Marino return 1; 2612*e4b17023SJohn Marino 2613*e4b17023SJohn Marino if (x == 0 || y == 0) 2614*e4b17023SJohn Marino return x == y; 2615*e4b17023SJohn Marino 2616*e4b17023SJohn Marino code = GET_CODE (x); 2617*e4b17023SJohn Marino if (code != GET_CODE (y)) 2618*e4b17023SJohn Marino return 0; 2619*e4b17023SJohn Marino 2620*e4b17023SJohn Marino /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */ 2621*e4b17023SJohn Marino if (GET_MODE (x) != GET_MODE (y)) 2622*e4b17023SJohn Marino return 0; 2623*e4b17023SJohn Marino 2624*e4b17023SJohn Marino /* MEMs refering to different address space are not equivalent. */ 2625*e4b17023SJohn Marino if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y)) 2626*e4b17023SJohn Marino return 0; 2627*e4b17023SJohn Marino 2628*e4b17023SJohn Marino switch (code) 2629*e4b17023SJohn Marino { 2630*e4b17023SJohn Marino case PC: 2631*e4b17023SJohn Marino case CC0: 2632*e4b17023SJohn Marino case CONST_INT: 2633*e4b17023SJohn Marino case CONST_DOUBLE: 2634*e4b17023SJohn Marino case CONST_FIXED: 2635*e4b17023SJohn Marino return x == y; 2636*e4b17023SJohn Marino 2637*e4b17023SJohn Marino case LABEL_REF: 2638*e4b17023SJohn Marino return XEXP (x, 0) == XEXP (y, 0); 2639*e4b17023SJohn Marino 2640*e4b17023SJohn Marino case SYMBOL_REF: 2641*e4b17023SJohn Marino return XSTR (x, 0) == XSTR (y, 0); 2642*e4b17023SJohn Marino 2643*e4b17023SJohn Marino case REG: 2644*e4b17023SJohn Marino if (for_gcse) 2645*e4b17023SJohn Marino return REGNO (x) == REGNO (y); 2646*e4b17023SJohn Marino else 2647*e4b17023SJohn Marino { 2648*e4b17023SJohn Marino unsigned int regno = REGNO (y); 2649*e4b17023SJohn Marino unsigned int i; 2650*e4b17023SJohn Marino unsigned int endregno = END_REGNO (y); 2651*e4b17023SJohn Marino 2652*e4b17023SJohn Marino /* If the quantities are not the same, the expressions are not 2653*e4b17023SJohn Marino equivalent. If there are and we are not to validate, they 2654*e4b17023SJohn Marino are equivalent. Otherwise, ensure all regs are up-to-date. */ 2655*e4b17023SJohn Marino 2656*e4b17023SJohn Marino if (REG_QTY (REGNO (x)) != REG_QTY (regno)) 2657*e4b17023SJohn Marino return 0; 2658*e4b17023SJohn Marino 2659*e4b17023SJohn Marino if (! validate) 2660*e4b17023SJohn Marino return 1; 2661*e4b17023SJohn Marino 2662*e4b17023SJohn Marino for (i = regno; i < endregno; i++) 2663*e4b17023SJohn Marino if (REG_IN_TABLE (i) != REG_TICK (i)) 2664*e4b17023SJohn Marino return 0; 2665*e4b17023SJohn Marino 2666*e4b17023SJohn Marino return 1; 2667*e4b17023SJohn Marino } 2668*e4b17023SJohn Marino 2669*e4b17023SJohn Marino case MEM: 2670*e4b17023SJohn Marino if (for_gcse) 2671*e4b17023SJohn Marino { 2672*e4b17023SJohn Marino /* A volatile mem should not be considered equivalent to any 2673*e4b17023SJohn Marino other. */ 2674*e4b17023SJohn Marino if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y)) 2675*e4b17023SJohn Marino return 0; 2676*e4b17023SJohn Marino 2677*e4b17023SJohn Marino /* Can't merge two expressions in different alias sets, since we 2678*e4b17023SJohn Marino can decide that the expression is transparent in a block when 2679*e4b17023SJohn Marino it isn't, due to it being set with the different alias set. 2680*e4b17023SJohn Marino 2681*e4b17023SJohn Marino Also, can't merge two expressions with different MEM_ATTRS. 2682*e4b17023SJohn Marino They could e.g. be two different entities allocated into the 2683*e4b17023SJohn Marino same space on the stack (see e.g. PR25130). In that case, the 2684*e4b17023SJohn Marino MEM addresses can be the same, even though the two MEMs are 2685*e4b17023SJohn Marino absolutely not equivalent. 2686*e4b17023SJohn Marino 2687*e4b17023SJohn Marino But because really all MEM attributes should be the same for 2688*e4b17023SJohn Marino equivalent MEMs, we just use the invariant that MEMs that have 2689*e4b17023SJohn Marino the same attributes share the same mem_attrs data structure. */ 2690*e4b17023SJohn Marino if (MEM_ATTRS (x) != MEM_ATTRS (y)) 2691*e4b17023SJohn Marino return 0; 2692*e4b17023SJohn Marino } 2693*e4b17023SJohn Marino break; 2694*e4b17023SJohn Marino 2695*e4b17023SJohn Marino /* For commutative operations, check both orders. */ 2696*e4b17023SJohn Marino case PLUS: 2697*e4b17023SJohn Marino case MULT: 2698*e4b17023SJohn Marino case AND: 2699*e4b17023SJohn Marino case IOR: 2700*e4b17023SJohn Marino case XOR: 2701*e4b17023SJohn Marino case NE: 2702*e4b17023SJohn Marino case EQ: 2703*e4b17023SJohn Marino return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), 2704*e4b17023SJohn Marino validate, for_gcse) 2705*e4b17023SJohn Marino && exp_equiv_p (XEXP (x, 1), XEXP (y, 1), 2706*e4b17023SJohn Marino validate, for_gcse)) 2707*e4b17023SJohn Marino || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1), 2708*e4b17023SJohn Marino validate, for_gcse) 2709*e4b17023SJohn Marino && exp_equiv_p (XEXP (x, 1), XEXP (y, 0), 2710*e4b17023SJohn Marino validate, for_gcse))); 2711*e4b17023SJohn Marino 2712*e4b17023SJohn Marino case ASM_OPERANDS: 2713*e4b17023SJohn Marino /* We don't use the generic code below because we want to 2714*e4b17023SJohn Marino disregard filename and line numbers. */ 2715*e4b17023SJohn Marino 2716*e4b17023SJohn Marino /* A volatile asm isn't equivalent to any other. */ 2717*e4b17023SJohn Marino if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y)) 2718*e4b17023SJohn Marino return 0; 2719*e4b17023SJohn Marino 2720*e4b17023SJohn Marino if (GET_MODE (x) != GET_MODE (y) 2721*e4b17023SJohn Marino || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y)) 2722*e4b17023SJohn Marino || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x), 2723*e4b17023SJohn Marino ASM_OPERANDS_OUTPUT_CONSTRAINT (y)) 2724*e4b17023SJohn Marino || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y) 2725*e4b17023SJohn Marino || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y)) 2726*e4b17023SJohn Marino return 0; 2727*e4b17023SJohn Marino 2728*e4b17023SJohn Marino if (ASM_OPERANDS_INPUT_LENGTH (x)) 2729*e4b17023SJohn Marino { 2730*e4b17023SJohn Marino for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--) 2731*e4b17023SJohn Marino if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i), 2732*e4b17023SJohn Marino ASM_OPERANDS_INPUT (y, i), 2733*e4b17023SJohn Marino validate, for_gcse) 2734*e4b17023SJohn Marino || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i), 2735*e4b17023SJohn Marino ASM_OPERANDS_INPUT_CONSTRAINT (y, i))) 2736*e4b17023SJohn Marino return 0; 2737*e4b17023SJohn Marino } 2738*e4b17023SJohn Marino 2739*e4b17023SJohn Marino return 1; 2740*e4b17023SJohn Marino 2741*e4b17023SJohn Marino default: 2742*e4b17023SJohn Marino break; 2743*e4b17023SJohn Marino } 2744*e4b17023SJohn Marino 2745*e4b17023SJohn Marino /* Compare the elements. If any pair of corresponding elements 2746*e4b17023SJohn Marino fail to match, return 0 for the whole thing. */ 2747*e4b17023SJohn Marino 2748*e4b17023SJohn Marino fmt = GET_RTX_FORMAT (code); 2749*e4b17023SJohn Marino for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 2750*e4b17023SJohn Marino { 2751*e4b17023SJohn Marino switch (fmt[i]) 2752*e4b17023SJohn Marino { 2753*e4b17023SJohn Marino case 'e': 2754*e4b17023SJohn Marino if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), 2755*e4b17023SJohn Marino validate, for_gcse)) 2756*e4b17023SJohn Marino return 0; 2757*e4b17023SJohn Marino break; 2758*e4b17023SJohn Marino 2759*e4b17023SJohn Marino case 'E': 2760*e4b17023SJohn Marino if (XVECLEN (x, i) != XVECLEN (y, i)) 2761*e4b17023SJohn Marino return 0; 2762*e4b17023SJohn Marino for (j = 0; j < XVECLEN (x, i); j++) 2763*e4b17023SJohn Marino if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j), 2764*e4b17023SJohn Marino validate, for_gcse)) 2765*e4b17023SJohn Marino return 0; 2766*e4b17023SJohn Marino break; 2767*e4b17023SJohn Marino 2768*e4b17023SJohn Marino case 's': 2769*e4b17023SJohn Marino if (strcmp (XSTR (x, i), XSTR (y, i))) 2770*e4b17023SJohn Marino return 0; 2771*e4b17023SJohn Marino break; 2772*e4b17023SJohn Marino 2773*e4b17023SJohn Marino case 'i': 2774*e4b17023SJohn Marino if (XINT (x, i) != XINT (y, i)) 2775*e4b17023SJohn Marino return 0; 2776*e4b17023SJohn Marino break; 2777*e4b17023SJohn Marino 2778*e4b17023SJohn Marino case 'w': 2779*e4b17023SJohn Marino if (XWINT (x, i) != XWINT (y, i)) 2780*e4b17023SJohn Marino return 0; 2781*e4b17023SJohn Marino break; 2782*e4b17023SJohn Marino 2783*e4b17023SJohn Marino case '0': 2784*e4b17023SJohn Marino case 't': 2785*e4b17023SJohn Marino break; 2786*e4b17023SJohn Marino 2787*e4b17023SJohn Marino default: 2788*e4b17023SJohn Marino gcc_unreachable (); 2789*e4b17023SJohn Marino } 2790*e4b17023SJohn Marino } 2791*e4b17023SJohn Marino 2792*e4b17023SJohn Marino return 1; 2793*e4b17023SJohn Marino } 2794*e4b17023SJohn Marino 2795*e4b17023SJohn Marino /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate 2796*e4b17023SJohn Marino the result if necessary. INSN is as for canon_reg. */ 2797*e4b17023SJohn Marino 2798*e4b17023SJohn Marino static void 2799*e4b17023SJohn Marino validate_canon_reg (rtx *xloc, rtx insn) 2800*e4b17023SJohn Marino { 2801*e4b17023SJohn Marino if (*xloc) 2802*e4b17023SJohn Marino { 2803*e4b17023SJohn Marino rtx new_rtx = canon_reg (*xloc, insn); 2804*e4b17023SJohn Marino 2805*e4b17023SJohn Marino /* If replacing pseudo with hard reg or vice versa, ensure the 2806*e4b17023SJohn Marino insn remains valid. Likewise if the insn has MATCH_DUPs. */ 2807*e4b17023SJohn Marino gcc_assert (insn && new_rtx); 2808*e4b17023SJohn Marino validate_change (insn, xloc, new_rtx, 1); 2809*e4b17023SJohn Marino } 2810*e4b17023SJohn Marino } 2811*e4b17023SJohn Marino 2812*e4b17023SJohn Marino /* Canonicalize an expression: 2813*e4b17023SJohn Marino replace each register reference inside it 2814*e4b17023SJohn Marino with the "oldest" equivalent register. 2815*e4b17023SJohn Marino 2816*e4b17023SJohn Marino If INSN is nonzero validate_change is used to ensure that INSN remains valid 2817*e4b17023SJohn Marino after we make our substitution. The calls are made with IN_GROUP nonzero 2818*e4b17023SJohn Marino so apply_change_group must be called upon the outermost return from this 2819*e4b17023SJohn Marino function (unless INSN is zero). The result of apply_change_group can 2820*e4b17023SJohn Marino generally be discarded since the changes we are making are optional. */ 2821*e4b17023SJohn Marino 2822*e4b17023SJohn Marino static rtx 2823*e4b17023SJohn Marino canon_reg (rtx x, rtx insn) 2824*e4b17023SJohn Marino { 2825*e4b17023SJohn Marino int i; 2826*e4b17023SJohn Marino enum rtx_code code; 2827*e4b17023SJohn Marino const char *fmt; 2828*e4b17023SJohn Marino 2829*e4b17023SJohn Marino if (x == 0) 2830*e4b17023SJohn Marino return x; 2831*e4b17023SJohn Marino 2832*e4b17023SJohn Marino code = GET_CODE (x); 2833*e4b17023SJohn Marino switch (code) 2834*e4b17023SJohn Marino { 2835*e4b17023SJohn Marino case PC: 2836*e4b17023SJohn Marino case CC0: 2837*e4b17023SJohn Marino case CONST: 2838*e4b17023SJohn Marino case CONST_INT: 2839*e4b17023SJohn Marino case CONST_DOUBLE: 2840*e4b17023SJohn Marino case CONST_FIXED: 2841*e4b17023SJohn Marino case CONST_VECTOR: 2842*e4b17023SJohn Marino case SYMBOL_REF: 2843*e4b17023SJohn Marino case LABEL_REF: 2844*e4b17023SJohn Marino case ADDR_VEC: 2845*e4b17023SJohn Marino case ADDR_DIFF_VEC: 2846*e4b17023SJohn Marino return x; 2847*e4b17023SJohn Marino 2848*e4b17023SJohn Marino case REG: 2849*e4b17023SJohn Marino { 2850*e4b17023SJohn Marino int first; 2851*e4b17023SJohn Marino int q; 2852*e4b17023SJohn Marino struct qty_table_elem *ent; 2853*e4b17023SJohn Marino 2854*e4b17023SJohn Marino /* Never replace a hard reg, because hard regs can appear 2855*e4b17023SJohn Marino in more than one machine mode, and we must preserve the mode 2856*e4b17023SJohn Marino of each occurrence. Also, some hard regs appear in 2857*e4b17023SJohn Marino MEMs that are shared and mustn't be altered. Don't try to 2858*e4b17023SJohn Marino replace any reg that maps to a reg of class NO_REGS. */ 2859*e4b17023SJohn Marino if (REGNO (x) < FIRST_PSEUDO_REGISTER 2860*e4b17023SJohn Marino || ! REGNO_QTY_VALID_P (REGNO (x))) 2861*e4b17023SJohn Marino return x; 2862*e4b17023SJohn Marino 2863*e4b17023SJohn Marino q = REG_QTY (REGNO (x)); 2864*e4b17023SJohn Marino ent = &qty_table[q]; 2865*e4b17023SJohn Marino first = ent->first_reg; 2866*e4b17023SJohn Marino return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first] 2867*e4b17023SJohn Marino : REGNO_REG_CLASS (first) == NO_REGS ? x 2868*e4b17023SJohn Marino : gen_rtx_REG (ent->mode, first)); 2869*e4b17023SJohn Marino } 2870*e4b17023SJohn Marino 2871*e4b17023SJohn Marino default: 2872*e4b17023SJohn Marino break; 2873*e4b17023SJohn Marino } 2874*e4b17023SJohn Marino 2875*e4b17023SJohn Marino fmt = GET_RTX_FORMAT (code); 2876*e4b17023SJohn Marino for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 2877*e4b17023SJohn Marino { 2878*e4b17023SJohn Marino int j; 2879*e4b17023SJohn Marino 2880*e4b17023SJohn Marino if (fmt[i] == 'e') 2881*e4b17023SJohn Marino validate_canon_reg (&XEXP (x, i), insn); 2882*e4b17023SJohn Marino else if (fmt[i] == 'E') 2883*e4b17023SJohn Marino for (j = 0; j < XVECLEN (x, i); j++) 2884*e4b17023SJohn Marino validate_canon_reg (&XVECEXP (x, i, j), insn); 2885*e4b17023SJohn Marino } 2886*e4b17023SJohn Marino 2887*e4b17023SJohn Marino return x; 2888*e4b17023SJohn Marino } 2889*e4b17023SJohn Marino 2890*e4b17023SJohn Marino /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison 2891*e4b17023SJohn Marino operation (EQ, NE, GT, etc.), follow it back through the hash table and 2892*e4b17023SJohn Marino what values are being compared. 2893*e4b17023SJohn Marino 2894*e4b17023SJohn Marino *PARG1 and *PARG2 are updated to contain the rtx representing the values 2895*e4b17023SJohn Marino actually being compared. For example, if *PARG1 was (cc0) and *PARG2 2896*e4b17023SJohn Marino was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were 2897*e4b17023SJohn Marino compared to produce cc0. 2898*e4b17023SJohn Marino 2899*e4b17023SJohn Marino The return value is the comparison operator and is either the code of 2900*e4b17023SJohn Marino A or the code corresponding to the inverse of the comparison. */ 2901*e4b17023SJohn Marino 2902*e4b17023SJohn Marino static enum rtx_code 2903*e4b17023SJohn Marino find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2, 2904*e4b17023SJohn Marino enum machine_mode *pmode1, enum machine_mode *pmode2) 2905*e4b17023SJohn Marino { 2906*e4b17023SJohn Marino rtx arg1, arg2; 2907*e4b17023SJohn Marino 2908*e4b17023SJohn Marino arg1 = *parg1, arg2 = *parg2; 2909*e4b17023SJohn Marino 2910*e4b17023SJohn Marino /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */ 2911*e4b17023SJohn Marino 2912*e4b17023SJohn Marino while (arg2 == CONST0_RTX (GET_MODE (arg1))) 2913*e4b17023SJohn Marino { 2914*e4b17023SJohn Marino /* Set nonzero when we find something of interest. */ 2915*e4b17023SJohn Marino rtx x = 0; 2916*e4b17023SJohn Marino int reverse_code = 0; 2917*e4b17023SJohn Marino struct table_elt *p = 0; 2918*e4b17023SJohn Marino 2919*e4b17023SJohn Marino /* If arg1 is a COMPARE, extract the comparison arguments from it. 2920*e4b17023SJohn Marino On machines with CC0, this is the only case that can occur, since 2921*e4b17023SJohn Marino fold_rtx will return the COMPARE or item being compared with zero 2922*e4b17023SJohn Marino when given CC0. */ 2923*e4b17023SJohn Marino 2924*e4b17023SJohn Marino if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx) 2925*e4b17023SJohn Marino x = arg1; 2926*e4b17023SJohn Marino 2927*e4b17023SJohn Marino /* If ARG1 is a comparison operator and CODE is testing for 2928*e4b17023SJohn Marino STORE_FLAG_VALUE, get the inner arguments. */ 2929*e4b17023SJohn Marino 2930*e4b17023SJohn Marino else if (COMPARISON_P (arg1)) 2931*e4b17023SJohn Marino { 2932*e4b17023SJohn Marino #ifdef FLOAT_STORE_FLAG_VALUE 2933*e4b17023SJohn Marino REAL_VALUE_TYPE fsfv; 2934*e4b17023SJohn Marino #endif 2935*e4b17023SJohn Marino 2936*e4b17023SJohn Marino if (code == NE 2937*e4b17023SJohn Marino || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT 2938*e4b17023SJohn Marino && code == LT && STORE_FLAG_VALUE == -1) 2939*e4b17023SJohn Marino #ifdef FLOAT_STORE_FLAG_VALUE 2940*e4b17023SJohn Marino || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1)) 2941*e4b17023SJohn Marino && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)), 2942*e4b17023SJohn Marino REAL_VALUE_NEGATIVE (fsfv))) 2943*e4b17023SJohn Marino #endif 2944*e4b17023SJohn Marino ) 2945*e4b17023SJohn Marino x = arg1; 2946*e4b17023SJohn Marino else if (code == EQ 2947*e4b17023SJohn Marino || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT 2948*e4b17023SJohn Marino && code == GE && STORE_FLAG_VALUE == -1) 2949*e4b17023SJohn Marino #ifdef FLOAT_STORE_FLAG_VALUE 2950*e4b17023SJohn Marino || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1)) 2951*e4b17023SJohn Marino && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)), 2952*e4b17023SJohn Marino REAL_VALUE_NEGATIVE (fsfv))) 2953*e4b17023SJohn Marino #endif 2954*e4b17023SJohn Marino ) 2955*e4b17023SJohn Marino x = arg1, reverse_code = 1; 2956*e4b17023SJohn Marino } 2957*e4b17023SJohn Marino 2958*e4b17023SJohn Marino /* ??? We could also check for 2959*e4b17023SJohn Marino 2960*e4b17023SJohn Marino (ne (and (eq (...) (const_int 1))) (const_int 0)) 2961*e4b17023SJohn Marino 2962*e4b17023SJohn Marino and related forms, but let's wait until we see them occurring. */ 2963*e4b17023SJohn Marino 2964*e4b17023SJohn Marino if (x == 0) 2965*e4b17023SJohn Marino /* Look up ARG1 in the hash table and see if it has an equivalence 2966*e4b17023SJohn Marino that lets us see what is being compared. */ 2967*e4b17023SJohn Marino p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1)); 2968*e4b17023SJohn Marino if (p) 2969*e4b17023SJohn Marino { 2970*e4b17023SJohn Marino p = p->first_same_value; 2971*e4b17023SJohn Marino 2972*e4b17023SJohn Marino /* If what we compare is already known to be constant, that is as 2973*e4b17023SJohn Marino good as it gets. 2974*e4b17023SJohn Marino We need to break the loop in this case, because otherwise we 2975*e4b17023SJohn Marino can have an infinite loop when looking at a reg that is known 2976*e4b17023SJohn Marino to be a constant which is the same as a comparison of a reg 2977*e4b17023SJohn Marino against zero which appears later in the insn stream, which in 2978*e4b17023SJohn Marino turn is constant and the same as the comparison of the first reg 2979*e4b17023SJohn Marino against zero... */ 2980*e4b17023SJohn Marino if (p->is_const) 2981*e4b17023SJohn Marino break; 2982*e4b17023SJohn Marino } 2983*e4b17023SJohn Marino 2984*e4b17023SJohn Marino for (; p; p = p->next_same_value) 2985*e4b17023SJohn Marino { 2986*e4b17023SJohn Marino enum machine_mode inner_mode = GET_MODE (p->exp); 2987*e4b17023SJohn Marino #ifdef FLOAT_STORE_FLAG_VALUE 2988*e4b17023SJohn Marino REAL_VALUE_TYPE fsfv; 2989*e4b17023SJohn Marino #endif 2990*e4b17023SJohn Marino 2991*e4b17023SJohn Marino /* If the entry isn't valid, skip it. */ 2992*e4b17023SJohn Marino if (! exp_equiv_p (p->exp, p->exp, 1, false)) 2993*e4b17023SJohn Marino continue; 2994*e4b17023SJohn Marino 2995*e4b17023SJohn Marino /* If it's the same comparison we're already looking at, skip it. */ 2996*e4b17023SJohn Marino if (COMPARISON_P (p->exp) 2997*e4b17023SJohn Marino && XEXP (p->exp, 0) == arg1 2998*e4b17023SJohn Marino && XEXP (p->exp, 1) == arg2) 2999*e4b17023SJohn Marino continue; 3000*e4b17023SJohn Marino 3001*e4b17023SJohn Marino if (GET_CODE (p->exp) == COMPARE 3002*e4b17023SJohn Marino /* Another possibility is that this machine has a compare insn 3003*e4b17023SJohn Marino that includes the comparison code. In that case, ARG1 would 3004*e4b17023SJohn Marino be equivalent to a comparison operation that would set ARG1 to 3005*e4b17023SJohn Marino either STORE_FLAG_VALUE or zero. If this is an NE operation, 3006*e4b17023SJohn Marino ORIG_CODE is the actual comparison being done; if it is an EQ, 3007*e4b17023SJohn Marino we must reverse ORIG_CODE. On machine with a negative value 3008*e4b17023SJohn Marino for STORE_FLAG_VALUE, also look at LT and GE operations. */ 3009*e4b17023SJohn Marino || ((code == NE 3010*e4b17023SJohn Marino || (code == LT 3011*e4b17023SJohn Marino && val_signbit_known_set_p (inner_mode, 3012*e4b17023SJohn Marino STORE_FLAG_VALUE)) 3013*e4b17023SJohn Marino #ifdef FLOAT_STORE_FLAG_VALUE 3014*e4b17023SJohn Marino || (code == LT 3015*e4b17023SJohn Marino && SCALAR_FLOAT_MODE_P (inner_mode) 3016*e4b17023SJohn Marino && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)), 3017*e4b17023SJohn Marino REAL_VALUE_NEGATIVE (fsfv))) 3018*e4b17023SJohn Marino #endif 3019*e4b17023SJohn Marino ) 3020*e4b17023SJohn Marino && COMPARISON_P (p->exp))) 3021*e4b17023SJohn Marino { 3022*e4b17023SJohn Marino x = p->exp; 3023*e4b17023SJohn Marino break; 3024*e4b17023SJohn Marino } 3025*e4b17023SJohn Marino else if ((code == EQ 3026*e4b17023SJohn Marino || (code == GE 3027*e4b17023SJohn Marino && val_signbit_known_set_p (inner_mode, 3028*e4b17023SJohn Marino STORE_FLAG_VALUE)) 3029*e4b17023SJohn Marino #ifdef FLOAT_STORE_FLAG_VALUE 3030*e4b17023SJohn Marino || (code == GE 3031*e4b17023SJohn Marino && SCALAR_FLOAT_MODE_P (inner_mode) 3032*e4b17023SJohn Marino && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)), 3033*e4b17023SJohn Marino REAL_VALUE_NEGATIVE (fsfv))) 3034*e4b17023SJohn Marino #endif 3035*e4b17023SJohn Marino ) 3036*e4b17023SJohn Marino && COMPARISON_P (p->exp)) 3037*e4b17023SJohn Marino { 3038*e4b17023SJohn Marino reverse_code = 1; 3039*e4b17023SJohn Marino x = p->exp; 3040*e4b17023SJohn Marino break; 3041*e4b17023SJohn Marino } 3042*e4b17023SJohn Marino 3043*e4b17023SJohn Marino /* If this non-trapping address, e.g. fp + constant, the 3044*e4b17023SJohn Marino equivalent is a better operand since it may let us predict 3045*e4b17023SJohn Marino the value of the comparison. */ 3046*e4b17023SJohn Marino else if (!rtx_addr_can_trap_p (p->exp)) 3047*e4b17023SJohn Marino { 3048*e4b17023SJohn Marino arg1 = p->exp; 3049*e4b17023SJohn Marino continue; 3050*e4b17023SJohn Marino } 3051*e4b17023SJohn Marino } 3052*e4b17023SJohn Marino 3053*e4b17023SJohn Marino /* If we didn't find a useful equivalence for ARG1, we are done. 3054*e4b17023SJohn Marino Otherwise, set up for the next iteration. */ 3055*e4b17023SJohn Marino if (x == 0) 3056*e4b17023SJohn Marino break; 3057*e4b17023SJohn Marino 3058*e4b17023SJohn Marino /* If we need to reverse the comparison, make sure that that is 3059*e4b17023SJohn Marino possible -- we can't necessarily infer the value of GE from LT 3060*e4b17023SJohn Marino with floating-point operands. */ 3061*e4b17023SJohn Marino if (reverse_code) 3062*e4b17023SJohn Marino { 3063*e4b17023SJohn Marino enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX); 3064*e4b17023SJohn Marino if (reversed == UNKNOWN) 3065*e4b17023SJohn Marino break; 3066*e4b17023SJohn Marino else 3067*e4b17023SJohn Marino code = reversed; 3068*e4b17023SJohn Marino } 3069*e4b17023SJohn Marino else if (COMPARISON_P (x)) 3070*e4b17023SJohn Marino code = GET_CODE (x); 3071*e4b17023SJohn Marino arg1 = XEXP (x, 0), arg2 = XEXP (x, 1); 3072*e4b17023SJohn Marino } 3073*e4b17023SJohn Marino 3074*e4b17023SJohn Marino /* Return our results. Return the modes from before fold_rtx 3075*e4b17023SJohn Marino because fold_rtx might produce const_int, and then it's too late. */ 3076*e4b17023SJohn Marino *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2); 3077*e4b17023SJohn Marino *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0); 3078*e4b17023SJohn Marino 3079*e4b17023SJohn Marino return code; 3080*e4b17023SJohn Marino } 3081*e4b17023SJohn Marino 3082*e4b17023SJohn Marino /* If X is a nontrivial arithmetic operation on an argument for which 3083*e4b17023SJohn Marino a constant value can be determined, return the result of operating 3084*e4b17023SJohn Marino on that value, as a constant. Otherwise, return X, possibly with 3085*e4b17023SJohn Marino one or more operands changed to a forward-propagated constant. 3086*e4b17023SJohn Marino 3087*e4b17023SJohn Marino If X is a register whose contents are known, we do NOT return 3088*e4b17023SJohn Marino those contents here; equiv_constant is called to perform that task. 3089*e4b17023SJohn Marino For SUBREGs and MEMs, we do that both here and in equiv_constant. 3090*e4b17023SJohn Marino 3091*e4b17023SJohn Marino INSN is the insn that we may be modifying. If it is 0, make a copy 3092*e4b17023SJohn Marino of X before modifying it. */ 3093*e4b17023SJohn Marino 3094*e4b17023SJohn Marino static rtx 3095*e4b17023SJohn Marino fold_rtx (rtx x, rtx insn) 3096*e4b17023SJohn Marino { 3097*e4b17023SJohn Marino enum rtx_code code; 3098*e4b17023SJohn Marino enum machine_mode mode; 3099*e4b17023SJohn Marino const char *fmt; 3100*e4b17023SJohn Marino int i; 3101*e4b17023SJohn Marino rtx new_rtx = 0; 3102*e4b17023SJohn Marino int changed = 0; 3103*e4b17023SJohn Marino 3104*e4b17023SJohn Marino /* Operands of X. */ 3105*e4b17023SJohn Marino rtx folded_arg0; 3106*e4b17023SJohn Marino rtx folded_arg1; 3107*e4b17023SJohn Marino 3108*e4b17023SJohn Marino /* Constant equivalents of first three operands of X; 3109*e4b17023SJohn Marino 0 when no such equivalent is known. */ 3110*e4b17023SJohn Marino rtx const_arg0; 3111*e4b17023SJohn Marino rtx const_arg1; 3112*e4b17023SJohn Marino rtx const_arg2; 3113*e4b17023SJohn Marino 3114*e4b17023SJohn Marino /* The mode of the first operand of X. We need this for sign and zero 3115*e4b17023SJohn Marino extends. */ 3116*e4b17023SJohn Marino enum machine_mode mode_arg0; 3117*e4b17023SJohn Marino 3118*e4b17023SJohn Marino if (x == 0) 3119*e4b17023SJohn Marino return x; 3120*e4b17023SJohn Marino 3121*e4b17023SJohn Marino /* Try to perform some initial simplifications on X. */ 3122*e4b17023SJohn Marino code = GET_CODE (x); 3123*e4b17023SJohn Marino switch (code) 3124*e4b17023SJohn Marino { 3125*e4b17023SJohn Marino case MEM: 3126*e4b17023SJohn Marino case SUBREG: 3127*e4b17023SJohn Marino if ((new_rtx = equiv_constant (x)) != NULL_RTX) 3128*e4b17023SJohn Marino return new_rtx; 3129*e4b17023SJohn Marino return x; 3130*e4b17023SJohn Marino 3131*e4b17023SJohn Marino case CONST: 3132*e4b17023SJohn Marino case CONST_INT: 3133*e4b17023SJohn Marino case CONST_DOUBLE: 3134*e4b17023SJohn Marino case CONST_FIXED: 3135*e4b17023SJohn Marino case CONST_VECTOR: 3136*e4b17023SJohn Marino case SYMBOL_REF: 3137*e4b17023SJohn Marino case LABEL_REF: 3138*e4b17023SJohn Marino case REG: 3139*e4b17023SJohn Marino case PC: 3140*e4b17023SJohn Marino /* No use simplifying an EXPR_LIST 3141*e4b17023SJohn Marino since they are used only for lists of args 3142*e4b17023SJohn Marino in a function call's REG_EQUAL note. */ 3143*e4b17023SJohn Marino case EXPR_LIST: 3144*e4b17023SJohn Marino return x; 3145*e4b17023SJohn Marino 3146*e4b17023SJohn Marino #ifdef HAVE_cc0 3147*e4b17023SJohn Marino case CC0: 3148*e4b17023SJohn Marino return prev_insn_cc0; 3149*e4b17023SJohn Marino #endif 3150*e4b17023SJohn Marino 3151*e4b17023SJohn Marino case ASM_OPERANDS: 3152*e4b17023SJohn Marino if (insn) 3153*e4b17023SJohn Marino { 3154*e4b17023SJohn Marino for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--) 3155*e4b17023SJohn Marino validate_change (insn, &ASM_OPERANDS_INPUT (x, i), 3156*e4b17023SJohn Marino fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0); 3157*e4b17023SJohn Marino } 3158*e4b17023SJohn Marino return x; 3159*e4b17023SJohn Marino 3160*e4b17023SJohn Marino #ifdef NO_FUNCTION_CSE 3161*e4b17023SJohn Marino case CALL: 3162*e4b17023SJohn Marino if (CONSTANT_P (XEXP (XEXP (x, 0), 0))) 3163*e4b17023SJohn Marino return x; 3164*e4b17023SJohn Marino break; 3165*e4b17023SJohn Marino #endif 3166*e4b17023SJohn Marino 3167*e4b17023SJohn Marino /* Anything else goes through the loop below. */ 3168*e4b17023SJohn Marino default: 3169*e4b17023SJohn Marino break; 3170*e4b17023SJohn Marino } 3171*e4b17023SJohn Marino 3172*e4b17023SJohn Marino mode = GET_MODE (x); 3173*e4b17023SJohn Marino const_arg0 = 0; 3174*e4b17023SJohn Marino const_arg1 = 0; 3175*e4b17023SJohn Marino const_arg2 = 0; 3176*e4b17023SJohn Marino mode_arg0 = VOIDmode; 3177*e4b17023SJohn Marino 3178*e4b17023SJohn Marino /* Try folding our operands. 3179*e4b17023SJohn Marino Then see which ones have constant values known. */ 3180*e4b17023SJohn Marino 3181*e4b17023SJohn Marino fmt = GET_RTX_FORMAT (code); 3182*e4b17023SJohn Marino for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 3183*e4b17023SJohn Marino if (fmt[i] == 'e') 3184*e4b17023SJohn Marino { 3185*e4b17023SJohn Marino rtx folded_arg = XEXP (x, i), const_arg; 3186*e4b17023SJohn Marino enum machine_mode mode_arg = GET_MODE (folded_arg); 3187*e4b17023SJohn Marino 3188*e4b17023SJohn Marino switch (GET_CODE (folded_arg)) 3189*e4b17023SJohn Marino { 3190*e4b17023SJohn Marino case MEM: 3191*e4b17023SJohn Marino case REG: 3192*e4b17023SJohn Marino case SUBREG: 3193*e4b17023SJohn Marino const_arg = equiv_constant (folded_arg); 3194*e4b17023SJohn Marino break; 3195*e4b17023SJohn Marino 3196*e4b17023SJohn Marino case CONST: 3197*e4b17023SJohn Marino case CONST_INT: 3198*e4b17023SJohn Marino case SYMBOL_REF: 3199*e4b17023SJohn Marino case LABEL_REF: 3200*e4b17023SJohn Marino case CONST_DOUBLE: 3201*e4b17023SJohn Marino case CONST_FIXED: 3202*e4b17023SJohn Marino case CONST_VECTOR: 3203*e4b17023SJohn Marino const_arg = folded_arg; 3204*e4b17023SJohn Marino break; 3205*e4b17023SJohn Marino 3206*e4b17023SJohn Marino #ifdef HAVE_cc0 3207*e4b17023SJohn Marino case CC0: 3208*e4b17023SJohn Marino folded_arg = prev_insn_cc0; 3209*e4b17023SJohn Marino mode_arg = prev_insn_cc0_mode; 3210*e4b17023SJohn Marino const_arg = equiv_constant (folded_arg); 3211*e4b17023SJohn Marino break; 3212*e4b17023SJohn Marino #endif 3213*e4b17023SJohn Marino 3214*e4b17023SJohn Marino default: 3215*e4b17023SJohn Marino folded_arg = fold_rtx (folded_arg, insn); 3216*e4b17023SJohn Marino const_arg = equiv_constant (folded_arg); 3217*e4b17023SJohn Marino break; 3218*e4b17023SJohn Marino } 3219*e4b17023SJohn Marino 3220*e4b17023SJohn Marino /* For the first three operands, see if the operand 3221*e4b17023SJohn Marino is constant or equivalent to a constant. */ 3222*e4b17023SJohn Marino switch (i) 3223*e4b17023SJohn Marino { 3224*e4b17023SJohn Marino case 0: 3225*e4b17023SJohn Marino folded_arg0 = folded_arg; 3226*e4b17023SJohn Marino const_arg0 = const_arg; 3227*e4b17023SJohn Marino mode_arg0 = mode_arg; 3228*e4b17023SJohn Marino break; 3229*e4b17023SJohn Marino case 1: 3230*e4b17023SJohn Marino folded_arg1 = folded_arg; 3231*e4b17023SJohn Marino const_arg1 = const_arg; 3232*e4b17023SJohn Marino break; 3233*e4b17023SJohn Marino case 2: 3234*e4b17023SJohn Marino const_arg2 = const_arg; 3235*e4b17023SJohn Marino break; 3236*e4b17023SJohn Marino } 3237*e4b17023SJohn Marino 3238*e4b17023SJohn Marino /* Pick the least expensive of the argument and an equivalent constant 3239*e4b17023SJohn Marino argument. */ 3240*e4b17023SJohn Marino if (const_arg != 0 3241*e4b17023SJohn Marino && const_arg != folded_arg 3242*e4b17023SJohn Marino && COST_IN (const_arg, code, i) <= COST_IN (folded_arg, code, i) 3243*e4b17023SJohn Marino 3244*e4b17023SJohn Marino /* It's not safe to substitute the operand of a conversion 3245*e4b17023SJohn Marino operator with a constant, as the conversion's identity 3246*e4b17023SJohn Marino depends upon the mode of its operand. This optimization 3247*e4b17023SJohn Marino is handled by the call to simplify_unary_operation. */ 3248*e4b17023SJohn Marino && (GET_RTX_CLASS (code) != RTX_UNARY 3249*e4b17023SJohn Marino || GET_MODE (const_arg) == mode_arg0 3250*e4b17023SJohn Marino || (code != ZERO_EXTEND 3251*e4b17023SJohn Marino && code != SIGN_EXTEND 3252*e4b17023SJohn Marino && code != TRUNCATE 3253*e4b17023SJohn Marino && code != FLOAT_TRUNCATE 3254*e4b17023SJohn Marino && code != FLOAT_EXTEND 3255*e4b17023SJohn Marino && code != FLOAT 3256*e4b17023SJohn Marino && code != FIX 3257*e4b17023SJohn Marino && code != UNSIGNED_FLOAT 3258*e4b17023SJohn Marino && code != UNSIGNED_FIX))) 3259*e4b17023SJohn Marino folded_arg = const_arg; 3260*e4b17023SJohn Marino 3261*e4b17023SJohn Marino if (folded_arg == XEXP (x, i)) 3262*e4b17023SJohn Marino continue; 3263*e4b17023SJohn Marino 3264*e4b17023SJohn Marino if (insn == NULL_RTX && !changed) 3265*e4b17023SJohn Marino x = copy_rtx (x); 3266*e4b17023SJohn Marino changed = 1; 3267*e4b17023SJohn Marino validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1); 3268*e4b17023SJohn Marino } 3269*e4b17023SJohn Marino 3270*e4b17023SJohn Marino if (changed) 3271*e4b17023SJohn Marino { 3272*e4b17023SJohn Marino /* Canonicalize X if necessary, and keep const_argN and folded_argN 3273*e4b17023SJohn Marino consistent with the order in X. */ 3274*e4b17023SJohn Marino if (canonicalize_change_group (insn, x)) 3275*e4b17023SJohn Marino { 3276*e4b17023SJohn Marino rtx tem; 3277*e4b17023SJohn Marino tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem; 3278*e4b17023SJohn Marino tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem; 3279*e4b17023SJohn Marino } 3280*e4b17023SJohn Marino 3281*e4b17023SJohn Marino apply_change_group (); 3282*e4b17023SJohn Marino } 3283*e4b17023SJohn Marino 3284*e4b17023SJohn Marino /* If X is an arithmetic operation, see if we can simplify it. */ 3285*e4b17023SJohn Marino 3286*e4b17023SJohn Marino switch (GET_RTX_CLASS (code)) 3287*e4b17023SJohn Marino { 3288*e4b17023SJohn Marino case RTX_UNARY: 3289*e4b17023SJohn Marino { 3290*e4b17023SJohn Marino /* We can't simplify extension ops unless we know the 3291*e4b17023SJohn Marino original mode. */ 3292*e4b17023SJohn Marino if ((code == ZERO_EXTEND || code == SIGN_EXTEND) 3293*e4b17023SJohn Marino && mode_arg0 == VOIDmode) 3294*e4b17023SJohn Marino break; 3295*e4b17023SJohn Marino 3296*e4b17023SJohn Marino new_rtx = simplify_unary_operation (code, mode, 3297*e4b17023SJohn Marino const_arg0 ? const_arg0 : folded_arg0, 3298*e4b17023SJohn Marino mode_arg0); 3299*e4b17023SJohn Marino } 3300*e4b17023SJohn Marino break; 3301*e4b17023SJohn Marino 3302*e4b17023SJohn Marino case RTX_COMPARE: 3303*e4b17023SJohn Marino case RTX_COMM_COMPARE: 3304*e4b17023SJohn Marino /* See what items are actually being compared and set FOLDED_ARG[01] 3305*e4b17023SJohn Marino to those values and CODE to the actual comparison code. If any are 3306*e4b17023SJohn Marino constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't 3307*e4b17023SJohn Marino do anything if both operands are already known to be constant. */ 3308*e4b17023SJohn Marino 3309*e4b17023SJohn Marino /* ??? Vector mode comparisons are not supported yet. */ 3310*e4b17023SJohn Marino if (VECTOR_MODE_P (mode)) 3311*e4b17023SJohn Marino break; 3312*e4b17023SJohn Marino 3313*e4b17023SJohn Marino if (const_arg0 == 0 || const_arg1 == 0) 3314*e4b17023SJohn Marino { 3315*e4b17023SJohn Marino struct table_elt *p0, *p1; 3316*e4b17023SJohn Marino rtx true_rtx, false_rtx; 3317*e4b17023SJohn Marino enum machine_mode mode_arg1; 3318*e4b17023SJohn Marino 3319*e4b17023SJohn Marino if (SCALAR_FLOAT_MODE_P (mode)) 3320*e4b17023SJohn Marino { 3321*e4b17023SJohn Marino #ifdef FLOAT_STORE_FLAG_VALUE 3322*e4b17023SJohn Marino true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE 3323*e4b17023SJohn Marino (FLOAT_STORE_FLAG_VALUE (mode), mode)); 3324*e4b17023SJohn Marino #else 3325*e4b17023SJohn Marino true_rtx = NULL_RTX; 3326*e4b17023SJohn Marino #endif 3327*e4b17023SJohn Marino false_rtx = CONST0_RTX (mode); 3328*e4b17023SJohn Marino } 3329*e4b17023SJohn Marino else 3330*e4b17023SJohn Marino { 3331*e4b17023SJohn Marino true_rtx = const_true_rtx; 3332*e4b17023SJohn Marino false_rtx = const0_rtx; 3333*e4b17023SJohn Marino } 3334*e4b17023SJohn Marino 3335*e4b17023SJohn Marino code = find_comparison_args (code, &folded_arg0, &folded_arg1, 3336*e4b17023SJohn Marino &mode_arg0, &mode_arg1); 3337*e4b17023SJohn Marino 3338*e4b17023SJohn Marino /* If the mode is VOIDmode or a MODE_CC mode, we don't know 3339*e4b17023SJohn Marino what kinds of things are being compared, so we can't do 3340*e4b17023SJohn Marino anything with this comparison. */ 3341*e4b17023SJohn Marino 3342*e4b17023SJohn Marino if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC) 3343*e4b17023SJohn Marino break; 3344*e4b17023SJohn Marino 3345*e4b17023SJohn Marino const_arg0 = equiv_constant (folded_arg0); 3346*e4b17023SJohn Marino const_arg1 = equiv_constant (folded_arg1); 3347*e4b17023SJohn Marino 3348*e4b17023SJohn Marino /* If we do not now have two constants being compared, see 3349*e4b17023SJohn Marino if we can nevertheless deduce some things about the 3350*e4b17023SJohn Marino comparison. */ 3351*e4b17023SJohn Marino if (const_arg0 == 0 || const_arg1 == 0) 3352*e4b17023SJohn Marino { 3353*e4b17023SJohn Marino if (const_arg1 != NULL) 3354*e4b17023SJohn Marino { 3355*e4b17023SJohn Marino rtx cheapest_simplification; 3356*e4b17023SJohn Marino int cheapest_cost; 3357*e4b17023SJohn Marino rtx simp_result; 3358*e4b17023SJohn Marino struct table_elt *p; 3359*e4b17023SJohn Marino 3360*e4b17023SJohn Marino /* See if we can find an equivalent of folded_arg0 3361*e4b17023SJohn Marino that gets us a cheaper expression, possibly a 3362*e4b17023SJohn Marino constant through simplifications. */ 3363*e4b17023SJohn Marino p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0), 3364*e4b17023SJohn Marino mode_arg0); 3365*e4b17023SJohn Marino 3366*e4b17023SJohn Marino if (p != NULL) 3367*e4b17023SJohn Marino { 3368*e4b17023SJohn Marino cheapest_simplification = x; 3369*e4b17023SJohn Marino cheapest_cost = COST (x); 3370*e4b17023SJohn Marino 3371*e4b17023SJohn Marino for (p = p->first_same_value; p != NULL; p = p->next_same_value) 3372*e4b17023SJohn Marino { 3373*e4b17023SJohn Marino int cost; 3374*e4b17023SJohn Marino 3375*e4b17023SJohn Marino /* If the entry isn't valid, skip it. */ 3376*e4b17023SJohn Marino if (! exp_equiv_p (p->exp, p->exp, 1, false)) 3377*e4b17023SJohn Marino continue; 3378*e4b17023SJohn Marino 3379*e4b17023SJohn Marino /* Try to simplify using this equivalence. */ 3380*e4b17023SJohn Marino simp_result 3381*e4b17023SJohn Marino = simplify_relational_operation (code, mode, 3382*e4b17023SJohn Marino mode_arg0, 3383*e4b17023SJohn Marino p->exp, 3384*e4b17023SJohn Marino const_arg1); 3385*e4b17023SJohn Marino 3386*e4b17023SJohn Marino if (simp_result == NULL) 3387*e4b17023SJohn Marino continue; 3388*e4b17023SJohn Marino 3389*e4b17023SJohn Marino cost = COST (simp_result); 3390*e4b17023SJohn Marino if (cost < cheapest_cost) 3391*e4b17023SJohn Marino { 3392*e4b17023SJohn Marino cheapest_cost = cost; 3393*e4b17023SJohn Marino cheapest_simplification = simp_result; 3394*e4b17023SJohn Marino } 3395*e4b17023SJohn Marino } 3396*e4b17023SJohn Marino 3397*e4b17023SJohn Marino /* If we have a cheaper expression now, use that 3398*e4b17023SJohn Marino and try folding it further, from the top. */ 3399*e4b17023SJohn Marino if (cheapest_simplification != x) 3400*e4b17023SJohn Marino return fold_rtx (copy_rtx (cheapest_simplification), 3401*e4b17023SJohn Marino insn); 3402*e4b17023SJohn Marino } 3403*e4b17023SJohn Marino } 3404*e4b17023SJohn Marino 3405*e4b17023SJohn Marino /* See if the two operands are the same. */ 3406*e4b17023SJohn Marino 3407*e4b17023SJohn Marino if ((REG_P (folded_arg0) 3408*e4b17023SJohn Marino && REG_P (folded_arg1) 3409*e4b17023SJohn Marino && (REG_QTY (REGNO (folded_arg0)) 3410*e4b17023SJohn Marino == REG_QTY (REGNO (folded_arg1)))) 3411*e4b17023SJohn Marino || ((p0 = lookup (folded_arg0, 3412*e4b17023SJohn Marino SAFE_HASH (folded_arg0, mode_arg0), 3413*e4b17023SJohn Marino mode_arg0)) 3414*e4b17023SJohn Marino && (p1 = lookup (folded_arg1, 3415*e4b17023SJohn Marino SAFE_HASH (folded_arg1, mode_arg0), 3416*e4b17023SJohn Marino mode_arg0)) 3417*e4b17023SJohn Marino && p0->first_same_value == p1->first_same_value)) 3418*e4b17023SJohn Marino folded_arg1 = folded_arg0; 3419*e4b17023SJohn Marino 3420*e4b17023SJohn Marino /* If FOLDED_ARG0 is a register, see if the comparison we are 3421*e4b17023SJohn Marino doing now is either the same as we did before or the reverse 3422*e4b17023SJohn Marino (we only check the reverse if not floating-point). */ 3423*e4b17023SJohn Marino else if (REG_P (folded_arg0)) 3424*e4b17023SJohn Marino { 3425*e4b17023SJohn Marino int qty = REG_QTY (REGNO (folded_arg0)); 3426*e4b17023SJohn Marino 3427*e4b17023SJohn Marino if (REGNO_QTY_VALID_P (REGNO (folded_arg0))) 3428*e4b17023SJohn Marino { 3429*e4b17023SJohn Marino struct qty_table_elem *ent = &qty_table[qty]; 3430*e4b17023SJohn Marino 3431*e4b17023SJohn Marino if ((comparison_dominates_p (ent->comparison_code, code) 3432*e4b17023SJohn Marino || (! FLOAT_MODE_P (mode_arg0) 3433*e4b17023SJohn Marino && comparison_dominates_p (ent->comparison_code, 3434*e4b17023SJohn Marino reverse_condition (code)))) 3435*e4b17023SJohn Marino && (rtx_equal_p (ent->comparison_const, folded_arg1) 3436*e4b17023SJohn Marino || (const_arg1 3437*e4b17023SJohn Marino && rtx_equal_p (ent->comparison_const, 3438*e4b17023SJohn Marino const_arg1)) 3439*e4b17023SJohn Marino || (REG_P (folded_arg1) 3440*e4b17023SJohn Marino && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty)))) 3441*e4b17023SJohn Marino { 3442*e4b17023SJohn Marino if (comparison_dominates_p (ent->comparison_code, code)) 3443*e4b17023SJohn Marino { 3444*e4b17023SJohn Marino if (true_rtx) 3445*e4b17023SJohn Marino return true_rtx; 3446*e4b17023SJohn Marino else 3447*e4b17023SJohn Marino break; 3448*e4b17023SJohn Marino } 3449*e4b17023SJohn Marino else 3450*e4b17023SJohn Marino return false_rtx; 3451*e4b17023SJohn Marino } 3452*e4b17023SJohn Marino } 3453*e4b17023SJohn Marino } 3454*e4b17023SJohn Marino } 3455*e4b17023SJohn Marino } 3456*e4b17023SJohn Marino 3457*e4b17023SJohn Marino /* If we are comparing against zero, see if the first operand is 3458*e4b17023SJohn Marino equivalent to an IOR with a constant. If so, we may be able to 3459*e4b17023SJohn Marino determine the result of this comparison. */ 3460*e4b17023SJohn Marino if (const_arg1 == const0_rtx && !const_arg0) 3461*e4b17023SJohn Marino { 3462*e4b17023SJohn Marino rtx y = lookup_as_function (folded_arg0, IOR); 3463*e4b17023SJohn Marino rtx inner_const; 3464*e4b17023SJohn Marino 3465*e4b17023SJohn Marino if (y != 0 3466*e4b17023SJohn Marino && (inner_const = equiv_constant (XEXP (y, 1))) != 0 3467*e4b17023SJohn Marino && CONST_INT_P (inner_const) 3468*e4b17023SJohn Marino && INTVAL (inner_const) != 0) 3469*e4b17023SJohn Marino folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const); 3470*e4b17023SJohn Marino } 3471*e4b17023SJohn Marino 3472*e4b17023SJohn Marino { 3473*e4b17023SJohn Marino rtx op0 = const_arg0 ? const_arg0 : folded_arg0; 3474*e4b17023SJohn Marino rtx op1 = const_arg1 ? const_arg1 : folded_arg1; 3475*e4b17023SJohn Marino new_rtx = simplify_relational_operation (code, mode, mode_arg0, op0, op1); 3476*e4b17023SJohn Marino } 3477*e4b17023SJohn Marino break; 3478*e4b17023SJohn Marino 3479*e4b17023SJohn Marino case RTX_BIN_ARITH: 3480*e4b17023SJohn Marino case RTX_COMM_ARITH: 3481*e4b17023SJohn Marino switch (code) 3482*e4b17023SJohn Marino { 3483*e4b17023SJohn Marino case PLUS: 3484*e4b17023SJohn Marino /* If the second operand is a LABEL_REF, see if the first is a MINUS 3485*e4b17023SJohn Marino with that LABEL_REF as its second operand. If so, the result is 3486*e4b17023SJohn Marino the first operand of that MINUS. This handles switches with an 3487*e4b17023SJohn Marino ADDR_DIFF_VEC table. */ 3488*e4b17023SJohn Marino if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF) 3489*e4b17023SJohn Marino { 3490*e4b17023SJohn Marino rtx y 3491*e4b17023SJohn Marino = GET_CODE (folded_arg0) == MINUS ? folded_arg0 3492*e4b17023SJohn Marino : lookup_as_function (folded_arg0, MINUS); 3493*e4b17023SJohn Marino 3494*e4b17023SJohn Marino if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF 3495*e4b17023SJohn Marino && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0)) 3496*e4b17023SJohn Marino return XEXP (y, 0); 3497*e4b17023SJohn Marino 3498*e4b17023SJohn Marino /* Now try for a CONST of a MINUS like the above. */ 3499*e4b17023SJohn Marino if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0 3500*e4b17023SJohn Marino : lookup_as_function (folded_arg0, CONST))) != 0 3501*e4b17023SJohn Marino && GET_CODE (XEXP (y, 0)) == MINUS 3502*e4b17023SJohn Marino && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF 3503*e4b17023SJohn Marino && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0)) 3504*e4b17023SJohn Marino return XEXP (XEXP (y, 0), 0); 3505*e4b17023SJohn Marino } 3506*e4b17023SJohn Marino 3507*e4b17023SJohn Marino /* Likewise if the operands are in the other order. */ 3508*e4b17023SJohn Marino if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF) 3509*e4b17023SJohn Marino { 3510*e4b17023SJohn Marino rtx y 3511*e4b17023SJohn Marino = GET_CODE (folded_arg1) == MINUS ? folded_arg1 3512*e4b17023SJohn Marino : lookup_as_function (folded_arg1, MINUS); 3513*e4b17023SJohn Marino 3514*e4b17023SJohn Marino if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF 3515*e4b17023SJohn Marino && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0)) 3516*e4b17023SJohn Marino return XEXP (y, 0); 3517*e4b17023SJohn Marino 3518*e4b17023SJohn Marino /* Now try for a CONST of a MINUS like the above. */ 3519*e4b17023SJohn Marino if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1 3520*e4b17023SJohn Marino : lookup_as_function (folded_arg1, CONST))) != 0 3521*e4b17023SJohn Marino && GET_CODE (XEXP (y, 0)) == MINUS 3522*e4b17023SJohn Marino && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF 3523*e4b17023SJohn Marino && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0)) 3524*e4b17023SJohn Marino return XEXP (XEXP (y, 0), 0); 3525*e4b17023SJohn Marino } 3526*e4b17023SJohn Marino 3527*e4b17023SJohn Marino /* If second operand is a register equivalent to a negative 3528*e4b17023SJohn Marino CONST_INT, see if we can find a register equivalent to the 3529*e4b17023SJohn Marino positive constant. Make a MINUS if so. Don't do this for 3530*e4b17023SJohn Marino a non-negative constant since we might then alternate between 3531*e4b17023SJohn Marino choosing positive and negative constants. Having the positive 3532*e4b17023SJohn Marino constant previously-used is the more common case. Be sure 3533*e4b17023SJohn Marino the resulting constant is non-negative; if const_arg1 were 3534*e4b17023SJohn Marino the smallest negative number this would overflow: depending 3535*e4b17023SJohn Marino on the mode, this would either just be the same value (and 3536*e4b17023SJohn Marino hence not save anything) or be incorrect. */ 3537*e4b17023SJohn Marino if (const_arg1 != 0 && CONST_INT_P (const_arg1) 3538*e4b17023SJohn Marino && INTVAL (const_arg1) < 0 3539*e4b17023SJohn Marino /* This used to test 3540*e4b17023SJohn Marino 3541*e4b17023SJohn Marino -INTVAL (const_arg1) >= 0 3542*e4b17023SJohn Marino 3543*e4b17023SJohn Marino But The Sun V5.0 compilers mis-compiled that test. So 3544*e4b17023SJohn Marino instead we test for the problematic value in a more direct 3545*e4b17023SJohn Marino manner and hope the Sun compilers get it correct. */ 3546*e4b17023SJohn Marino && INTVAL (const_arg1) != 3547*e4b17023SJohn Marino ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)) 3548*e4b17023SJohn Marino && REG_P (folded_arg1)) 3549*e4b17023SJohn Marino { 3550*e4b17023SJohn Marino rtx new_const = GEN_INT (-INTVAL (const_arg1)); 3551*e4b17023SJohn Marino struct table_elt *p 3552*e4b17023SJohn Marino = lookup (new_const, SAFE_HASH (new_const, mode), mode); 3553*e4b17023SJohn Marino 3554*e4b17023SJohn Marino if (p) 3555*e4b17023SJohn Marino for (p = p->first_same_value; p; p = p->next_same_value) 3556*e4b17023SJohn Marino if (REG_P (p->exp)) 3557*e4b17023SJohn Marino return simplify_gen_binary (MINUS, mode, folded_arg0, 3558*e4b17023SJohn Marino canon_reg (p->exp, NULL_RTX)); 3559*e4b17023SJohn Marino } 3560*e4b17023SJohn Marino goto from_plus; 3561*e4b17023SJohn Marino 3562*e4b17023SJohn Marino case MINUS: 3563*e4b17023SJohn Marino /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2). 3564*e4b17023SJohn Marino If so, produce (PLUS Z C2-C). */ 3565*e4b17023SJohn Marino if (const_arg1 != 0 && CONST_INT_P (const_arg1)) 3566*e4b17023SJohn Marino { 3567*e4b17023SJohn Marino rtx y = lookup_as_function (XEXP (x, 0), PLUS); 3568*e4b17023SJohn Marino if (y && CONST_INT_P (XEXP (y, 1))) 3569*e4b17023SJohn Marino return fold_rtx (plus_constant (copy_rtx (y), 3570*e4b17023SJohn Marino -INTVAL (const_arg1)), 3571*e4b17023SJohn Marino NULL_RTX); 3572*e4b17023SJohn Marino } 3573*e4b17023SJohn Marino 3574*e4b17023SJohn Marino /* Fall through. */ 3575*e4b17023SJohn Marino 3576*e4b17023SJohn Marino from_plus: 3577*e4b17023SJohn Marino case SMIN: case SMAX: case UMIN: case UMAX: 3578*e4b17023SJohn Marino case IOR: case AND: case XOR: 3579*e4b17023SJohn Marino case MULT: 3580*e4b17023SJohn Marino case ASHIFT: case LSHIFTRT: case ASHIFTRT: 3581*e4b17023SJohn Marino /* If we have (<op> <reg> <const_int>) for an associative OP and REG 3582*e4b17023SJohn Marino is known to be of similar form, we may be able to replace the 3583*e4b17023SJohn Marino operation with a combined operation. This may eliminate the 3584*e4b17023SJohn Marino intermediate operation if every use is simplified in this way. 3585*e4b17023SJohn Marino Note that the similar optimization done by combine.c only works 3586*e4b17023SJohn Marino if the intermediate operation's result has only one reference. */ 3587*e4b17023SJohn Marino 3588*e4b17023SJohn Marino if (REG_P (folded_arg0) 3589*e4b17023SJohn Marino && const_arg1 && CONST_INT_P (const_arg1)) 3590*e4b17023SJohn Marino { 3591*e4b17023SJohn Marino int is_shift 3592*e4b17023SJohn Marino = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT); 3593*e4b17023SJohn Marino rtx y, inner_const, new_const; 3594*e4b17023SJohn Marino rtx canon_const_arg1 = const_arg1; 3595*e4b17023SJohn Marino enum rtx_code associate_code; 3596*e4b17023SJohn Marino 3597*e4b17023SJohn Marino if (is_shift 3598*e4b17023SJohn Marino && (INTVAL (const_arg1) >= GET_MODE_PRECISION (mode) 3599*e4b17023SJohn Marino || INTVAL (const_arg1) < 0)) 3600*e4b17023SJohn Marino { 3601*e4b17023SJohn Marino if (SHIFT_COUNT_TRUNCATED) 3602*e4b17023SJohn Marino canon_const_arg1 = GEN_INT (INTVAL (const_arg1) 3603*e4b17023SJohn Marino & (GET_MODE_BITSIZE (mode) 3604*e4b17023SJohn Marino - 1)); 3605*e4b17023SJohn Marino else 3606*e4b17023SJohn Marino break; 3607*e4b17023SJohn Marino } 3608*e4b17023SJohn Marino 3609*e4b17023SJohn Marino y = lookup_as_function (folded_arg0, code); 3610*e4b17023SJohn Marino if (y == 0) 3611*e4b17023SJohn Marino break; 3612*e4b17023SJohn Marino 3613*e4b17023SJohn Marino /* If we have compiled a statement like 3614*e4b17023SJohn Marino "if (x == (x & mask1))", and now are looking at 3615*e4b17023SJohn Marino "x & mask2", we will have a case where the first operand 3616*e4b17023SJohn Marino of Y is the same as our first operand. Unless we detect 3617*e4b17023SJohn Marino this case, an infinite loop will result. */ 3618*e4b17023SJohn Marino if (XEXP (y, 0) == folded_arg0) 3619*e4b17023SJohn Marino break; 3620*e4b17023SJohn Marino 3621*e4b17023SJohn Marino inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0)); 3622*e4b17023SJohn Marino if (!inner_const || !CONST_INT_P (inner_const)) 3623*e4b17023SJohn Marino break; 3624*e4b17023SJohn Marino 3625*e4b17023SJohn Marino /* Don't associate these operations if they are a PLUS with the 3626*e4b17023SJohn Marino same constant and it is a power of two. These might be doable 3627*e4b17023SJohn Marino with a pre- or post-increment. Similarly for two subtracts of 3628*e4b17023SJohn Marino identical powers of two with post decrement. */ 3629*e4b17023SJohn Marino 3630*e4b17023SJohn Marino if (code == PLUS && const_arg1 == inner_const 3631*e4b17023SJohn Marino && ((HAVE_PRE_INCREMENT 3632*e4b17023SJohn Marino && exact_log2 (INTVAL (const_arg1)) >= 0) 3633*e4b17023SJohn Marino || (HAVE_POST_INCREMENT 3634*e4b17023SJohn Marino && exact_log2 (INTVAL (const_arg1)) >= 0) 3635*e4b17023SJohn Marino || (HAVE_PRE_DECREMENT 3636*e4b17023SJohn Marino && exact_log2 (- INTVAL (const_arg1)) >= 0) 3637*e4b17023SJohn Marino || (HAVE_POST_DECREMENT 3638*e4b17023SJohn Marino && exact_log2 (- INTVAL (const_arg1)) >= 0))) 3639*e4b17023SJohn Marino break; 3640*e4b17023SJohn Marino 3641*e4b17023SJohn Marino /* ??? Vector mode shifts by scalar 3642*e4b17023SJohn Marino shift operand are not supported yet. */ 3643*e4b17023SJohn Marino if (is_shift && VECTOR_MODE_P (mode)) 3644*e4b17023SJohn Marino break; 3645*e4b17023SJohn Marino 3646*e4b17023SJohn Marino if (is_shift 3647*e4b17023SJohn Marino && (INTVAL (inner_const) >= GET_MODE_PRECISION (mode) 3648*e4b17023SJohn Marino || INTVAL (inner_const) < 0)) 3649*e4b17023SJohn Marino { 3650*e4b17023SJohn Marino if (SHIFT_COUNT_TRUNCATED) 3651*e4b17023SJohn Marino inner_const = GEN_INT (INTVAL (inner_const) 3652*e4b17023SJohn Marino & (GET_MODE_BITSIZE (mode) - 1)); 3653*e4b17023SJohn Marino else 3654*e4b17023SJohn Marino break; 3655*e4b17023SJohn Marino } 3656*e4b17023SJohn Marino 3657*e4b17023SJohn Marino /* Compute the code used to compose the constants. For example, 3658*e4b17023SJohn Marino A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */ 3659*e4b17023SJohn Marino 3660*e4b17023SJohn Marino associate_code = (is_shift || code == MINUS ? PLUS : code); 3661*e4b17023SJohn Marino 3662*e4b17023SJohn Marino new_const = simplify_binary_operation (associate_code, mode, 3663*e4b17023SJohn Marino canon_const_arg1, 3664*e4b17023SJohn Marino inner_const); 3665*e4b17023SJohn Marino 3666*e4b17023SJohn Marino if (new_const == 0) 3667*e4b17023SJohn Marino break; 3668*e4b17023SJohn Marino 3669*e4b17023SJohn Marino /* If we are associating shift operations, don't let this 3670*e4b17023SJohn Marino produce a shift of the size of the object or larger. 3671*e4b17023SJohn Marino This could occur when we follow a sign-extend by a right 3672*e4b17023SJohn Marino shift on a machine that does a sign-extend as a pair 3673*e4b17023SJohn Marino of shifts. */ 3674*e4b17023SJohn Marino 3675*e4b17023SJohn Marino if (is_shift 3676*e4b17023SJohn Marino && CONST_INT_P (new_const) 3677*e4b17023SJohn Marino && INTVAL (new_const) >= GET_MODE_PRECISION (mode)) 3678*e4b17023SJohn Marino { 3679*e4b17023SJohn Marino /* As an exception, we can turn an ASHIFTRT of this 3680*e4b17023SJohn Marino form into a shift of the number of bits - 1. */ 3681*e4b17023SJohn Marino if (code == ASHIFTRT) 3682*e4b17023SJohn Marino new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1); 3683*e4b17023SJohn Marino else if (!side_effects_p (XEXP (y, 0))) 3684*e4b17023SJohn Marino return CONST0_RTX (mode); 3685*e4b17023SJohn Marino else 3686*e4b17023SJohn Marino break; 3687*e4b17023SJohn Marino } 3688*e4b17023SJohn Marino 3689*e4b17023SJohn Marino y = copy_rtx (XEXP (y, 0)); 3690*e4b17023SJohn Marino 3691*e4b17023SJohn Marino /* If Y contains our first operand (the most common way this 3692*e4b17023SJohn Marino can happen is if Y is a MEM), we would do into an infinite 3693*e4b17023SJohn Marino loop if we tried to fold it. So don't in that case. */ 3694*e4b17023SJohn Marino 3695*e4b17023SJohn Marino if (! reg_mentioned_p (folded_arg0, y)) 3696*e4b17023SJohn Marino y = fold_rtx (y, insn); 3697*e4b17023SJohn Marino 3698*e4b17023SJohn Marino return simplify_gen_binary (code, mode, y, new_const); 3699*e4b17023SJohn Marino } 3700*e4b17023SJohn Marino break; 3701*e4b17023SJohn Marino 3702*e4b17023SJohn Marino case DIV: case UDIV: 3703*e4b17023SJohn Marino /* ??? The associative optimization performed immediately above is 3704*e4b17023SJohn Marino also possible for DIV and UDIV using associate_code of MULT. 3705*e4b17023SJohn Marino However, we would need extra code to verify that the 3706*e4b17023SJohn Marino multiplication does not overflow, that is, there is no overflow 3707*e4b17023SJohn Marino in the calculation of new_const. */ 3708*e4b17023SJohn Marino break; 3709*e4b17023SJohn Marino 3710*e4b17023SJohn Marino default: 3711*e4b17023SJohn Marino break; 3712*e4b17023SJohn Marino } 3713*e4b17023SJohn Marino 3714*e4b17023SJohn Marino new_rtx = simplify_binary_operation (code, mode, 3715*e4b17023SJohn Marino const_arg0 ? const_arg0 : folded_arg0, 3716*e4b17023SJohn Marino const_arg1 ? const_arg1 : folded_arg1); 3717*e4b17023SJohn Marino break; 3718*e4b17023SJohn Marino 3719*e4b17023SJohn Marino case RTX_OBJ: 3720*e4b17023SJohn Marino /* (lo_sum (high X) X) is simply X. */ 3721*e4b17023SJohn Marino if (code == LO_SUM && const_arg0 != 0 3722*e4b17023SJohn Marino && GET_CODE (const_arg0) == HIGH 3723*e4b17023SJohn Marino && rtx_equal_p (XEXP (const_arg0, 0), const_arg1)) 3724*e4b17023SJohn Marino return const_arg1; 3725*e4b17023SJohn Marino break; 3726*e4b17023SJohn Marino 3727*e4b17023SJohn Marino case RTX_TERNARY: 3728*e4b17023SJohn Marino case RTX_BITFIELD_OPS: 3729*e4b17023SJohn Marino new_rtx = simplify_ternary_operation (code, mode, mode_arg0, 3730*e4b17023SJohn Marino const_arg0 ? const_arg0 : folded_arg0, 3731*e4b17023SJohn Marino const_arg1 ? const_arg1 : folded_arg1, 3732*e4b17023SJohn Marino const_arg2 ? const_arg2 : XEXP (x, 2)); 3733*e4b17023SJohn Marino break; 3734*e4b17023SJohn Marino 3735*e4b17023SJohn Marino default: 3736*e4b17023SJohn Marino break; 3737*e4b17023SJohn Marino } 3738*e4b17023SJohn Marino 3739*e4b17023SJohn Marino return new_rtx ? new_rtx : x; 3740*e4b17023SJohn Marino } 3741*e4b17023SJohn Marino 3742*e4b17023SJohn Marino /* Return a constant value currently equivalent to X. 3743*e4b17023SJohn Marino Return 0 if we don't know one. */ 3744*e4b17023SJohn Marino 3745*e4b17023SJohn Marino static rtx 3746*e4b17023SJohn Marino equiv_constant (rtx x) 3747*e4b17023SJohn Marino { 3748*e4b17023SJohn Marino if (REG_P (x) 3749*e4b17023SJohn Marino && REGNO_QTY_VALID_P (REGNO (x))) 3750*e4b17023SJohn Marino { 3751*e4b17023SJohn Marino int x_q = REG_QTY (REGNO (x)); 3752*e4b17023SJohn Marino struct qty_table_elem *x_ent = &qty_table[x_q]; 3753*e4b17023SJohn Marino 3754*e4b17023SJohn Marino if (x_ent->const_rtx) 3755*e4b17023SJohn Marino x = gen_lowpart (GET_MODE (x), x_ent->const_rtx); 3756*e4b17023SJohn Marino } 3757*e4b17023SJohn Marino 3758*e4b17023SJohn Marino if (x == 0 || CONSTANT_P (x)) 3759*e4b17023SJohn Marino return x; 3760*e4b17023SJohn Marino 3761*e4b17023SJohn Marino if (GET_CODE (x) == SUBREG) 3762*e4b17023SJohn Marino { 3763*e4b17023SJohn Marino enum machine_mode mode = GET_MODE (x); 3764*e4b17023SJohn Marino enum machine_mode imode = GET_MODE (SUBREG_REG (x)); 3765*e4b17023SJohn Marino rtx new_rtx; 3766*e4b17023SJohn Marino 3767*e4b17023SJohn Marino /* See if we previously assigned a constant value to this SUBREG. */ 3768*e4b17023SJohn Marino if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0 3769*e4b17023SJohn Marino || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0 3770*e4b17023SJohn Marino || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0) 3771*e4b17023SJohn Marino return new_rtx; 3772*e4b17023SJohn Marino 3773*e4b17023SJohn Marino /* If we didn't and if doing so makes sense, see if we previously 3774*e4b17023SJohn Marino assigned a constant value to the enclosing word mode SUBREG. */ 3775*e4b17023SJohn Marino if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode) 3776*e4b17023SJohn Marino && GET_MODE_SIZE (word_mode) < GET_MODE_SIZE (imode)) 3777*e4b17023SJohn Marino { 3778*e4b17023SJohn Marino int byte = SUBREG_BYTE (x) - subreg_lowpart_offset (mode, word_mode); 3779*e4b17023SJohn Marino if (byte >= 0 && (byte % UNITS_PER_WORD) == 0) 3780*e4b17023SJohn Marino { 3781*e4b17023SJohn Marino rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte); 3782*e4b17023SJohn Marino new_rtx = lookup_as_function (y, CONST_INT); 3783*e4b17023SJohn Marino if (new_rtx) 3784*e4b17023SJohn Marino return gen_lowpart (mode, new_rtx); 3785*e4b17023SJohn Marino } 3786*e4b17023SJohn Marino } 3787*e4b17023SJohn Marino 3788*e4b17023SJohn Marino /* Otherwise see if we already have a constant for the inner REG. */ 3789*e4b17023SJohn Marino if (REG_P (SUBREG_REG (x)) 3790*e4b17023SJohn Marino && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0) 3791*e4b17023SJohn Marino return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x)); 3792*e4b17023SJohn Marino 3793*e4b17023SJohn Marino return 0; 3794*e4b17023SJohn Marino } 3795*e4b17023SJohn Marino 3796*e4b17023SJohn Marino /* If X is a MEM, see if it is a constant-pool reference, or look it up in 3797*e4b17023SJohn Marino the hash table in case its value was seen before. */ 3798*e4b17023SJohn Marino 3799*e4b17023SJohn Marino if (MEM_P (x)) 3800*e4b17023SJohn Marino { 3801*e4b17023SJohn Marino struct table_elt *elt; 3802*e4b17023SJohn Marino 3803*e4b17023SJohn Marino x = avoid_constant_pool_reference (x); 3804*e4b17023SJohn Marino if (CONSTANT_P (x)) 3805*e4b17023SJohn Marino return x; 3806*e4b17023SJohn Marino 3807*e4b17023SJohn Marino elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x)); 3808*e4b17023SJohn Marino if (elt == 0) 3809*e4b17023SJohn Marino return 0; 3810*e4b17023SJohn Marino 3811*e4b17023SJohn Marino for (elt = elt->first_same_value; elt; elt = elt->next_same_value) 3812*e4b17023SJohn Marino if (elt->is_const && CONSTANT_P (elt->exp)) 3813*e4b17023SJohn Marino return elt->exp; 3814*e4b17023SJohn Marino } 3815*e4b17023SJohn Marino 3816*e4b17023SJohn Marino return 0; 3817*e4b17023SJohn Marino } 3818*e4b17023SJohn Marino 3819*e4b17023SJohn Marino /* Given INSN, a jump insn, TAKEN indicates if we are following the 3820*e4b17023SJohn Marino "taken" branch. 3821*e4b17023SJohn Marino 3822*e4b17023SJohn Marino In certain cases, this can cause us to add an equivalence. For example, 3823*e4b17023SJohn Marino if we are following the taken case of 3824*e4b17023SJohn Marino if (i == 2) 3825*e4b17023SJohn Marino we can add the fact that `i' and '2' are now equivalent. 3826*e4b17023SJohn Marino 3827*e4b17023SJohn Marino In any case, we can record that this comparison was passed. If the same 3828*e4b17023SJohn Marino comparison is seen later, we will know its value. */ 3829*e4b17023SJohn Marino 3830*e4b17023SJohn Marino static void 3831*e4b17023SJohn Marino record_jump_equiv (rtx insn, bool taken) 3832*e4b17023SJohn Marino { 3833*e4b17023SJohn Marino int cond_known_true; 3834*e4b17023SJohn Marino rtx op0, op1; 3835*e4b17023SJohn Marino rtx set; 3836*e4b17023SJohn Marino enum machine_mode mode, mode0, mode1; 3837*e4b17023SJohn Marino int reversed_nonequality = 0; 3838*e4b17023SJohn Marino enum rtx_code code; 3839*e4b17023SJohn Marino 3840*e4b17023SJohn Marino /* Ensure this is the right kind of insn. */ 3841*e4b17023SJohn Marino gcc_assert (any_condjump_p (insn)); 3842*e4b17023SJohn Marino 3843*e4b17023SJohn Marino set = pc_set (insn); 3844*e4b17023SJohn Marino 3845*e4b17023SJohn Marino /* See if this jump condition is known true or false. */ 3846*e4b17023SJohn Marino if (taken) 3847*e4b17023SJohn Marino cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx); 3848*e4b17023SJohn Marino else 3849*e4b17023SJohn Marino cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx); 3850*e4b17023SJohn Marino 3851*e4b17023SJohn Marino /* Get the type of comparison being done and the operands being compared. 3852*e4b17023SJohn Marino If we had to reverse a non-equality condition, record that fact so we 3853*e4b17023SJohn Marino know that it isn't valid for floating-point. */ 3854*e4b17023SJohn Marino code = GET_CODE (XEXP (SET_SRC (set), 0)); 3855*e4b17023SJohn Marino op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn); 3856*e4b17023SJohn Marino op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn); 3857*e4b17023SJohn Marino 3858*e4b17023SJohn Marino code = find_comparison_args (code, &op0, &op1, &mode0, &mode1); 3859*e4b17023SJohn Marino if (! cond_known_true) 3860*e4b17023SJohn Marino { 3861*e4b17023SJohn Marino code = reversed_comparison_code_parts (code, op0, op1, insn); 3862*e4b17023SJohn Marino 3863*e4b17023SJohn Marino /* Don't remember if we can't find the inverse. */ 3864*e4b17023SJohn Marino if (code == UNKNOWN) 3865*e4b17023SJohn Marino return; 3866*e4b17023SJohn Marino } 3867*e4b17023SJohn Marino 3868*e4b17023SJohn Marino /* The mode is the mode of the non-constant. */ 3869*e4b17023SJohn Marino mode = mode0; 3870*e4b17023SJohn Marino if (mode1 != VOIDmode) 3871*e4b17023SJohn Marino mode = mode1; 3872*e4b17023SJohn Marino 3873*e4b17023SJohn Marino record_jump_cond (code, mode, op0, op1, reversed_nonequality); 3874*e4b17023SJohn Marino } 3875*e4b17023SJohn Marino 3876*e4b17023SJohn Marino /* Yet another form of subreg creation. In this case, we want something in 3877*e4b17023SJohn Marino MODE, and we should assume OP has MODE iff it is naturally modeless. */ 3878*e4b17023SJohn Marino 3879*e4b17023SJohn Marino static rtx 3880*e4b17023SJohn Marino record_jump_cond_subreg (enum machine_mode mode, rtx op) 3881*e4b17023SJohn Marino { 3882*e4b17023SJohn Marino enum machine_mode op_mode = GET_MODE (op); 3883*e4b17023SJohn Marino if (op_mode == mode || op_mode == VOIDmode) 3884*e4b17023SJohn Marino return op; 3885*e4b17023SJohn Marino return lowpart_subreg (mode, op, op_mode); 3886*e4b17023SJohn Marino } 3887*e4b17023SJohn Marino 3888*e4b17023SJohn Marino /* We know that comparison CODE applied to OP0 and OP1 in MODE is true. 3889*e4b17023SJohn Marino REVERSED_NONEQUALITY is nonzero if CODE had to be swapped. 3890*e4b17023SJohn Marino Make any useful entries we can with that information. Called from 3891*e4b17023SJohn Marino above function and called recursively. */ 3892*e4b17023SJohn Marino 3893*e4b17023SJohn Marino static void 3894*e4b17023SJohn Marino record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0, 3895*e4b17023SJohn Marino rtx op1, int reversed_nonequality) 3896*e4b17023SJohn Marino { 3897*e4b17023SJohn Marino unsigned op0_hash, op1_hash; 3898*e4b17023SJohn Marino int op0_in_memory, op1_in_memory; 3899*e4b17023SJohn Marino struct table_elt *op0_elt, *op1_elt; 3900*e4b17023SJohn Marino 3901*e4b17023SJohn Marino /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG, 3902*e4b17023SJohn Marino we know that they are also equal in the smaller mode (this is also 3903*e4b17023SJohn Marino true for all smaller modes whether or not there is a SUBREG, but 3904*e4b17023SJohn Marino is not worth testing for with no SUBREG). */ 3905*e4b17023SJohn Marino 3906*e4b17023SJohn Marino /* Note that GET_MODE (op0) may not equal MODE. */ 3907*e4b17023SJohn Marino if (code == EQ && paradoxical_subreg_p (op0)) 3908*e4b17023SJohn Marino { 3909*e4b17023SJohn Marino enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0)); 3910*e4b17023SJohn Marino rtx tem = record_jump_cond_subreg (inner_mode, op1); 3911*e4b17023SJohn Marino if (tem) 3912*e4b17023SJohn Marino record_jump_cond (code, mode, SUBREG_REG (op0), tem, 3913*e4b17023SJohn Marino reversed_nonequality); 3914*e4b17023SJohn Marino } 3915*e4b17023SJohn Marino 3916*e4b17023SJohn Marino if (code == EQ && paradoxical_subreg_p (op1)) 3917*e4b17023SJohn Marino { 3918*e4b17023SJohn Marino enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1)); 3919*e4b17023SJohn Marino rtx tem = record_jump_cond_subreg (inner_mode, op0); 3920*e4b17023SJohn Marino if (tem) 3921*e4b17023SJohn Marino record_jump_cond (code, mode, SUBREG_REG (op1), tem, 3922*e4b17023SJohn Marino reversed_nonequality); 3923*e4b17023SJohn Marino } 3924*e4b17023SJohn Marino 3925*e4b17023SJohn Marino /* Similarly, if this is an NE comparison, and either is a SUBREG 3926*e4b17023SJohn Marino making a smaller mode, we know the whole thing is also NE. */ 3927*e4b17023SJohn Marino 3928*e4b17023SJohn Marino /* Note that GET_MODE (op0) may not equal MODE; 3929*e4b17023SJohn Marino if we test MODE instead, we can get an infinite recursion 3930*e4b17023SJohn Marino alternating between two modes each wider than MODE. */ 3931*e4b17023SJohn Marino 3932*e4b17023SJohn Marino if (code == NE && GET_CODE (op0) == SUBREG 3933*e4b17023SJohn Marino && subreg_lowpart_p (op0) 3934*e4b17023SJohn Marino && (GET_MODE_SIZE (GET_MODE (op0)) 3935*e4b17023SJohn Marino < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))) 3936*e4b17023SJohn Marino { 3937*e4b17023SJohn Marino enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0)); 3938*e4b17023SJohn Marino rtx tem = record_jump_cond_subreg (inner_mode, op1); 3939*e4b17023SJohn Marino if (tem) 3940*e4b17023SJohn Marino record_jump_cond (code, mode, SUBREG_REG (op0), tem, 3941*e4b17023SJohn Marino reversed_nonequality); 3942*e4b17023SJohn Marino } 3943*e4b17023SJohn Marino 3944*e4b17023SJohn Marino if (code == NE && GET_CODE (op1) == SUBREG 3945*e4b17023SJohn Marino && subreg_lowpart_p (op1) 3946*e4b17023SJohn Marino && (GET_MODE_SIZE (GET_MODE (op1)) 3947*e4b17023SJohn Marino < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1))))) 3948*e4b17023SJohn Marino { 3949*e4b17023SJohn Marino enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1)); 3950*e4b17023SJohn Marino rtx tem = record_jump_cond_subreg (inner_mode, op0); 3951*e4b17023SJohn Marino if (tem) 3952*e4b17023SJohn Marino record_jump_cond (code, mode, SUBREG_REG (op1), tem, 3953*e4b17023SJohn Marino reversed_nonequality); 3954*e4b17023SJohn Marino } 3955*e4b17023SJohn Marino 3956*e4b17023SJohn Marino /* Hash both operands. */ 3957*e4b17023SJohn Marino 3958*e4b17023SJohn Marino do_not_record = 0; 3959*e4b17023SJohn Marino hash_arg_in_memory = 0; 3960*e4b17023SJohn Marino op0_hash = HASH (op0, mode); 3961*e4b17023SJohn Marino op0_in_memory = hash_arg_in_memory; 3962*e4b17023SJohn Marino 3963*e4b17023SJohn Marino if (do_not_record) 3964*e4b17023SJohn Marino return; 3965*e4b17023SJohn Marino 3966*e4b17023SJohn Marino do_not_record = 0; 3967*e4b17023SJohn Marino hash_arg_in_memory = 0; 3968*e4b17023SJohn Marino op1_hash = HASH (op1, mode); 3969*e4b17023SJohn Marino op1_in_memory = hash_arg_in_memory; 3970*e4b17023SJohn Marino 3971*e4b17023SJohn Marino if (do_not_record) 3972*e4b17023SJohn Marino return; 3973*e4b17023SJohn Marino 3974*e4b17023SJohn Marino /* Look up both operands. */ 3975*e4b17023SJohn Marino op0_elt = lookup (op0, op0_hash, mode); 3976*e4b17023SJohn Marino op1_elt = lookup (op1, op1_hash, mode); 3977*e4b17023SJohn Marino 3978*e4b17023SJohn Marino /* If both operands are already equivalent or if they are not in the 3979*e4b17023SJohn Marino table but are identical, do nothing. */ 3980*e4b17023SJohn Marino if ((op0_elt != 0 && op1_elt != 0 3981*e4b17023SJohn Marino && op0_elt->first_same_value == op1_elt->first_same_value) 3982*e4b17023SJohn Marino || op0 == op1 || rtx_equal_p (op0, op1)) 3983*e4b17023SJohn Marino return; 3984*e4b17023SJohn Marino 3985*e4b17023SJohn Marino /* If we aren't setting two things equal all we can do is save this 3986*e4b17023SJohn Marino comparison. Similarly if this is floating-point. In the latter 3987*e4b17023SJohn Marino case, OP1 might be zero and both -0.0 and 0.0 are equal to it. 3988*e4b17023SJohn Marino If we record the equality, we might inadvertently delete code 3989*e4b17023SJohn Marino whose intent was to change -0 to +0. */ 3990*e4b17023SJohn Marino 3991*e4b17023SJohn Marino if (code != EQ || FLOAT_MODE_P (GET_MODE (op0))) 3992*e4b17023SJohn Marino { 3993*e4b17023SJohn Marino struct qty_table_elem *ent; 3994*e4b17023SJohn Marino int qty; 3995*e4b17023SJohn Marino 3996*e4b17023SJohn Marino /* If we reversed a floating-point comparison, if OP0 is not a 3997*e4b17023SJohn Marino register, or if OP1 is neither a register or constant, we can't 3998*e4b17023SJohn Marino do anything. */ 3999*e4b17023SJohn Marino 4000*e4b17023SJohn Marino if (!REG_P (op1)) 4001*e4b17023SJohn Marino op1 = equiv_constant (op1); 4002*e4b17023SJohn Marino 4003*e4b17023SJohn Marino if ((reversed_nonequality && FLOAT_MODE_P (mode)) 4004*e4b17023SJohn Marino || !REG_P (op0) || op1 == 0) 4005*e4b17023SJohn Marino return; 4006*e4b17023SJohn Marino 4007*e4b17023SJohn Marino /* Put OP0 in the hash table if it isn't already. This gives it a 4008*e4b17023SJohn Marino new quantity number. */ 4009*e4b17023SJohn Marino if (op0_elt == 0) 4010*e4b17023SJohn Marino { 4011*e4b17023SJohn Marino if (insert_regs (op0, NULL, 0)) 4012*e4b17023SJohn Marino { 4013*e4b17023SJohn Marino rehash_using_reg (op0); 4014*e4b17023SJohn Marino op0_hash = HASH (op0, mode); 4015*e4b17023SJohn Marino 4016*e4b17023SJohn Marino /* If OP0 is contained in OP1, this changes its hash code 4017*e4b17023SJohn Marino as well. Faster to rehash than to check, except 4018*e4b17023SJohn Marino for the simple case of a constant. */ 4019*e4b17023SJohn Marino if (! CONSTANT_P (op1)) 4020*e4b17023SJohn Marino op1_hash = HASH (op1,mode); 4021*e4b17023SJohn Marino } 4022*e4b17023SJohn Marino 4023*e4b17023SJohn Marino op0_elt = insert (op0, NULL, op0_hash, mode); 4024*e4b17023SJohn Marino op0_elt->in_memory = op0_in_memory; 4025*e4b17023SJohn Marino } 4026*e4b17023SJohn Marino 4027*e4b17023SJohn Marino qty = REG_QTY (REGNO (op0)); 4028*e4b17023SJohn Marino ent = &qty_table[qty]; 4029*e4b17023SJohn Marino 4030*e4b17023SJohn Marino ent->comparison_code = code; 4031*e4b17023SJohn Marino if (REG_P (op1)) 4032*e4b17023SJohn Marino { 4033*e4b17023SJohn Marino /* Look it up again--in case op0 and op1 are the same. */ 4034*e4b17023SJohn Marino op1_elt = lookup (op1, op1_hash, mode); 4035*e4b17023SJohn Marino 4036*e4b17023SJohn Marino /* Put OP1 in the hash table so it gets a new quantity number. */ 4037*e4b17023SJohn Marino if (op1_elt == 0) 4038*e4b17023SJohn Marino { 4039*e4b17023SJohn Marino if (insert_regs (op1, NULL, 0)) 4040*e4b17023SJohn Marino { 4041*e4b17023SJohn Marino rehash_using_reg (op1); 4042*e4b17023SJohn Marino op1_hash = HASH (op1, mode); 4043*e4b17023SJohn Marino } 4044*e4b17023SJohn Marino 4045*e4b17023SJohn Marino op1_elt = insert (op1, NULL, op1_hash, mode); 4046*e4b17023SJohn Marino op1_elt->in_memory = op1_in_memory; 4047*e4b17023SJohn Marino } 4048*e4b17023SJohn Marino 4049*e4b17023SJohn Marino ent->comparison_const = NULL_RTX; 4050*e4b17023SJohn Marino ent->comparison_qty = REG_QTY (REGNO (op1)); 4051*e4b17023SJohn Marino } 4052*e4b17023SJohn Marino else 4053*e4b17023SJohn Marino { 4054*e4b17023SJohn Marino ent->comparison_const = op1; 4055*e4b17023SJohn Marino ent->comparison_qty = -1; 4056*e4b17023SJohn Marino } 4057*e4b17023SJohn Marino 4058*e4b17023SJohn Marino return; 4059*e4b17023SJohn Marino } 4060*e4b17023SJohn Marino 4061*e4b17023SJohn Marino /* If either side is still missing an equivalence, make it now, 4062*e4b17023SJohn Marino then merge the equivalences. */ 4063*e4b17023SJohn Marino 4064*e4b17023SJohn Marino if (op0_elt == 0) 4065*e4b17023SJohn Marino { 4066*e4b17023SJohn Marino if (insert_regs (op0, NULL, 0)) 4067*e4b17023SJohn Marino { 4068*e4b17023SJohn Marino rehash_using_reg (op0); 4069*e4b17023SJohn Marino op0_hash = HASH (op0, mode); 4070*e4b17023SJohn Marino } 4071*e4b17023SJohn Marino 4072*e4b17023SJohn Marino op0_elt = insert (op0, NULL, op0_hash, mode); 4073*e4b17023SJohn Marino op0_elt->in_memory = op0_in_memory; 4074*e4b17023SJohn Marino } 4075*e4b17023SJohn Marino 4076*e4b17023SJohn Marino if (op1_elt == 0) 4077*e4b17023SJohn Marino { 4078*e4b17023SJohn Marino if (insert_regs (op1, NULL, 0)) 4079*e4b17023SJohn Marino { 4080*e4b17023SJohn Marino rehash_using_reg (op1); 4081*e4b17023SJohn Marino op1_hash = HASH (op1, mode); 4082*e4b17023SJohn Marino } 4083*e4b17023SJohn Marino 4084*e4b17023SJohn Marino op1_elt = insert (op1, NULL, op1_hash, mode); 4085*e4b17023SJohn Marino op1_elt->in_memory = op1_in_memory; 4086*e4b17023SJohn Marino } 4087*e4b17023SJohn Marino 4088*e4b17023SJohn Marino merge_equiv_classes (op0_elt, op1_elt); 4089*e4b17023SJohn Marino } 4090*e4b17023SJohn Marino 4091*e4b17023SJohn Marino /* CSE processing for one instruction. 4092*e4b17023SJohn Marino First simplify sources and addresses of all assignments 4093*e4b17023SJohn Marino in the instruction, using previously-computed equivalents values. 4094*e4b17023SJohn Marino Then install the new sources and destinations in the table 4095*e4b17023SJohn Marino of available values. */ 4096*e4b17023SJohn Marino 4097*e4b17023SJohn Marino /* Data on one SET contained in the instruction. */ 4098*e4b17023SJohn Marino 4099*e4b17023SJohn Marino struct set 4100*e4b17023SJohn Marino { 4101*e4b17023SJohn Marino /* The SET rtx itself. */ 4102*e4b17023SJohn Marino rtx rtl; 4103*e4b17023SJohn Marino /* The SET_SRC of the rtx (the original value, if it is changing). */ 4104*e4b17023SJohn Marino rtx src; 4105*e4b17023SJohn Marino /* The hash-table element for the SET_SRC of the SET. */ 4106*e4b17023SJohn Marino struct table_elt *src_elt; 4107*e4b17023SJohn Marino /* Hash value for the SET_SRC. */ 4108*e4b17023SJohn Marino unsigned src_hash; 4109*e4b17023SJohn Marino /* Hash value for the SET_DEST. */ 4110*e4b17023SJohn Marino unsigned dest_hash; 4111*e4b17023SJohn Marino /* The SET_DEST, with SUBREG, etc., stripped. */ 4112*e4b17023SJohn Marino rtx inner_dest; 4113*e4b17023SJohn Marino /* Nonzero if the SET_SRC is in memory. */ 4114*e4b17023SJohn Marino char src_in_memory; 4115*e4b17023SJohn Marino /* Nonzero if the SET_SRC contains something 4116*e4b17023SJohn Marino whose value cannot be predicted and understood. */ 4117*e4b17023SJohn Marino char src_volatile; 4118*e4b17023SJohn Marino /* Original machine mode, in case it becomes a CONST_INT. 4119*e4b17023SJohn Marino The size of this field should match the size of the mode 4120*e4b17023SJohn Marino field of struct rtx_def (see rtl.h). */ 4121*e4b17023SJohn Marino ENUM_BITFIELD(machine_mode) mode : 8; 4122*e4b17023SJohn Marino /* A constant equivalent for SET_SRC, if any. */ 4123*e4b17023SJohn Marino rtx src_const; 4124*e4b17023SJohn Marino /* Hash value of constant equivalent for SET_SRC. */ 4125*e4b17023SJohn Marino unsigned src_const_hash; 4126*e4b17023SJohn Marino /* Table entry for constant equivalent for SET_SRC, if any. */ 4127*e4b17023SJohn Marino struct table_elt *src_const_elt; 4128*e4b17023SJohn Marino /* Table entry for the destination address. */ 4129*e4b17023SJohn Marino struct table_elt *dest_addr_elt; 4130*e4b17023SJohn Marino }; 4131*e4b17023SJohn Marino 4132*e4b17023SJohn Marino static void 4133*e4b17023SJohn Marino cse_insn (rtx insn) 4134*e4b17023SJohn Marino { 4135*e4b17023SJohn Marino rtx x = PATTERN (insn); 4136*e4b17023SJohn Marino int i; 4137*e4b17023SJohn Marino rtx tem; 4138*e4b17023SJohn Marino int n_sets = 0; 4139*e4b17023SJohn Marino 4140*e4b17023SJohn Marino rtx src_eqv = 0; 4141*e4b17023SJohn Marino struct table_elt *src_eqv_elt = 0; 4142*e4b17023SJohn Marino int src_eqv_volatile = 0; 4143*e4b17023SJohn Marino int src_eqv_in_memory = 0; 4144*e4b17023SJohn Marino unsigned src_eqv_hash = 0; 4145*e4b17023SJohn Marino 4146*e4b17023SJohn Marino struct set *sets = (struct set *) 0; 4147*e4b17023SJohn Marino 4148*e4b17023SJohn Marino this_insn = insn; 4149*e4b17023SJohn Marino #ifdef HAVE_cc0 4150*e4b17023SJohn Marino /* Records what this insn does to set CC0. */ 4151*e4b17023SJohn Marino this_insn_cc0 = 0; 4152*e4b17023SJohn Marino this_insn_cc0_mode = VOIDmode; 4153*e4b17023SJohn Marino #endif 4154*e4b17023SJohn Marino 4155*e4b17023SJohn Marino /* Find all the SETs and CLOBBERs in this instruction. 4156*e4b17023SJohn Marino Record all the SETs in the array `set' and count them. 4157*e4b17023SJohn Marino Also determine whether there is a CLOBBER that invalidates 4158*e4b17023SJohn Marino all memory references, or all references at varying addresses. */ 4159*e4b17023SJohn Marino 4160*e4b17023SJohn Marino if (CALL_P (insn)) 4161*e4b17023SJohn Marino { 4162*e4b17023SJohn Marino for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1)) 4163*e4b17023SJohn Marino { 4164*e4b17023SJohn Marino if (GET_CODE (XEXP (tem, 0)) == CLOBBER) 4165*e4b17023SJohn Marino invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode); 4166*e4b17023SJohn Marino XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn); 4167*e4b17023SJohn Marino } 4168*e4b17023SJohn Marino } 4169*e4b17023SJohn Marino 4170*e4b17023SJohn Marino if (GET_CODE (x) == SET) 4171*e4b17023SJohn Marino { 4172*e4b17023SJohn Marino sets = XALLOCA (struct set); 4173*e4b17023SJohn Marino sets[0].rtl = x; 4174*e4b17023SJohn Marino 4175*e4b17023SJohn Marino /* Ignore SETs that are unconditional jumps. 4176*e4b17023SJohn Marino They never need cse processing, so this does not hurt. 4177*e4b17023SJohn Marino The reason is not efficiency but rather 4178*e4b17023SJohn Marino so that we can test at the end for instructions 4179*e4b17023SJohn Marino that have been simplified to unconditional jumps 4180*e4b17023SJohn Marino and not be misled by unchanged instructions 4181*e4b17023SJohn Marino that were unconditional jumps to begin with. */ 4182*e4b17023SJohn Marino if (SET_DEST (x) == pc_rtx 4183*e4b17023SJohn Marino && GET_CODE (SET_SRC (x)) == LABEL_REF) 4184*e4b17023SJohn Marino ; 4185*e4b17023SJohn Marino 4186*e4b17023SJohn Marino /* Don't count call-insns, (set (reg 0) (call ...)), as a set. 4187*e4b17023SJohn Marino The hard function value register is used only once, to copy to 4188*e4b17023SJohn Marino someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)! 4189*e4b17023SJohn Marino Ensure we invalidate the destination register. On the 80386 no 4190*e4b17023SJohn Marino other code would invalidate it since it is a fixed_reg. 4191*e4b17023SJohn Marino We need not check the return of apply_change_group; see canon_reg. */ 4192*e4b17023SJohn Marino 4193*e4b17023SJohn Marino else if (GET_CODE (SET_SRC (x)) == CALL) 4194*e4b17023SJohn Marino { 4195*e4b17023SJohn Marino canon_reg (SET_SRC (x), insn); 4196*e4b17023SJohn Marino apply_change_group (); 4197*e4b17023SJohn Marino fold_rtx (SET_SRC (x), insn); 4198*e4b17023SJohn Marino invalidate (SET_DEST (x), VOIDmode); 4199*e4b17023SJohn Marino } 4200*e4b17023SJohn Marino else 4201*e4b17023SJohn Marino n_sets = 1; 4202*e4b17023SJohn Marino } 4203*e4b17023SJohn Marino else if (GET_CODE (x) == PARALLEL) 4204*e4b17023SJohn Marino { 4205*e4b17023SJohn Marino int lim = XVECLEN (x, 0); 4206*e4b17023SJohn Marino 4207*e4b17023SJohn Marino sets = XALLOCAVEC (struct set, lim); 4208*e4b17023SJohn Marino 4209*e4b17023SJohn Marino /* Find all regs explicitly clobbered in this insn, 4210*e4b17023SJohn Marino and ensure they are not replaced with any other regs 4211*e4b17023SJohn Marino elsewhere in this insn. 4212*e4b17023SJohn Marino When a reg that is clobbered is also used for input, 4213*e4b17023SJohn Marino we should presume that that is for a reason, 4214*e4b17023SJohn Marino and we should not substitute some other register 4215*e4b17023SJohn Marino which is not supposed to be clobbered. 4216*e4b17023SJohn Marino Therefore, this loop cannot be merged into the one below 4217*e4b17023SJohn Marino because a CALL may precede a CLOBBER and refer to the 4218*e4b17023SJohn Marino value clobbered. We must not let a canonicalization do 4219*e4b17023SJohn Marino anything in that case. */ 4220*e4b17023SJohn Marino for (i = 0; i < lim; i++) 4221*e4b17023SJohn Marino { 4222*e4b17023SJohn Marino rtx y = XVECEXP (x, 0, i); 4223*e4b17023SJohn Marino if (GET_CODE (y) == CLOBBER) 4224*e4b17023SJohn Marino { 4225*e4b17023SJohn Marino rtx clobbered = XEXP (y, 0); 4226*e4b17023SJohn Marino 4227*e4b17023SJohn Marino if (REG_P (clobbered) 4228*e4b17023SJohn Marino || GET_CODE (clobbered) == SUBREG) 4229*e4b17023SJohn Marino invalidate (clobbered, VOIDmode); 4230*e4b17023SJohn Marino else if (GET_CODE (clobbered) == STRICT_LOW_PART 4231*e4b17023SJohn Marino || GET_CODE (clobbered) == ZERO_EXTRACT) 4232*e4b17023SJohn Marino invalidate (XEXP (clobbered, 0), GET_MODE (clobbered)); 4233*e4b17023SJohn Marino } 4234*e4b17023SJohn Marino } 4235*e4b17023SJohn Marino 4236*e4b17023SJohn Marino for (i = 0; i < lim; i++) 4237*e4b17023SJohn Marino { 4238*e4b17023SJohn Marino rtx y = XVECEXP (x, 0, i); 4239*e4b17023SJohn Marino if (GET_CODE (y) == SET) 4240*e4b17023SJohn Marino { 4241*e4b17023SJohn Marino /* As above, we ignore unconditional jumps and call-insns and 4242*e4b17023SJohn Marino ignore the result of apply_change_group. */ 4243*e4b17023SJohn Marino if (GET_CODE (SET_SRC (y)) == CALL) 4244*e4b17023SJohn Marino { 4245*e4b17023SJohn Marino canon_reg (SET_SRC (y), insn); 4246*e4b17023SJohn Marino apply_change_group (); 4247*e4b17023SJohn Marino fold_rtx (SET_SRC (y), insn); 4248*e4b17023SJohn Marino invalidate (SET_DEST (y), VOIDmode); 4249*e4b17023SJohn Marino } 4250*e4b17023SJohn Marino else if (SET_DEST (y) == pc_rtx 4251*e4b17023SJohn Marino && GET_CODE (SET_SRC (y)) == LABEL_REF) 4252*e4b17023SJohn Marino ; 4253*e4b17023SJohn Marino else 4254*e4b17023SJohn Marino sets[n_sets++].rtl = y; 4255*e4b17023SJohn Marino } 4256*e4b17023SJohn Marino else if (GET_CODE (y) == CLOBBER) 4257*e4b17023SJohn Marino { 4258*e4b17023SJohn Marino /* If we clobber memory, canon the address. 4259*e4b17023SJohn Marino This does nothing when a register is clobbered 4260*e4b17023SJohn Marino because we have already invalidated the reg. */ 4261*e4b17023SJohn Marino if (MEM_P (XEXP (y, 0))) 4262*e4b17023SJohn Marino canon_reg (XEXP (y, 0), insn); 4263*e4b17023SJohn Marino } 4264*e4b17023SJohn Marino else if (GET_CODE (y) == USE 4265*e4b17023SJohn Marino && ! (REG_P (XEXP (y, 0)) 4266*e4b17023SJohn Marino && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER)) 4267*e4b17023SJohn Marino canon_reg (y, insn); 4268*e4b17023SJohn Marino else if (GET_CODE (y) == CALL) 4269*e4b17023SJohn Marino { 4270*e4b17023SJohn Marino /* The result of apply_change_group can be ignored; see 4271*e4b17023SJohn Marino canon_reg. */ 4272*e4b17023SJohn Marino canon_reg (y, insn); 4273*e4b17023SJohn Marino apply_change_group (); 4274*e4b17023SJohn Marino fold_rtx (y, insn); 4275*e4b17023SJohn Marino } 4276*e4b17023SJohn Marino } 4277*e4b17023SJohn Marino } 4278*e4b17023SJohn Marino else if (GET_CODE (x) == CLOBBER) 4279*e4b17023SJohn Marino { 4280*e4b17023SJohn Marino if (MEM_P (XEXP (x, 0))) 4281*e4b17023SJohn Marino canon_reg (XEXP (x, 0), insn); 4282*e4b17023SJohn Marino } 4283*e4b17023SJohn Marino /* Canonicalize a USE of a pseudo register or memory location. */ 4284*e4b17023SJohn Marino else if (GET_CODE (x) == USE 4285*e4b17023SJohn Marino && ! (REG_P (XEXP (x, 0)) 4286*e4b17023SJohn Marino && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)) 4287*e4b17023SJohn Marino canon_reg (x, insn); 4288*e4b17023SJohn Marino else if (GET_CODE (x) == ASM_OPERANDS) 4289*e4b17023SJohn Marino { 4290*e4b17023SJohn Marino for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--) 4291*e4b17023SJohn Marino { 4292*e4b17023SJohn Marino rtx input = ASM_OPERANDS_INPUT (x, i); 4293*e4b17023SJohn Marino if (!(REG_P (input) && REGNO (input) < FIRST_PSEUDO_REGISTER)) 4294*e4b17023SJohn Marino { 4295*e4b17023SJohn Marino input = canon_reg (input, insn); 4296*e4b17023SJohn Marino validate_change (insn, &ASM_OPERANDS_INPUT (x, i), input, 1); 4297*e4b17023SJohn Marino } 4298*e4b17023SJohn Marino } 4299*e4b17023SJohn Marino } 4300*e4b17023SJohn Marino else if (GET_CODE (x) == CALL) 4301*e4b17023SJohn Marino { 4302*e4b17023SJohn Marino /* The result of apply_change_group can be ignored; see canon_reg. */ 4303*e4b17023SJohn Marino canon_reg (x, insn); 4304*e4b17023SJohn Marino apply_change_group (); 4305*e4b17023SJohn Marino fold_rtx (x, insn); 4306*e4b17023SJohn Marino } 4307*e4b17023SJohn Marino else if (DEBUG_INSN_P (insn)) 4308*e4b17023SJohn Marino canon_reg (PATTERN (insn), insn); 4309*e4b17023SJohn Marino 4310*e4b17023SJohn Marino /* Store the equivalent value in SRC_EQV, if different, or if the DEST 4311*e4b17023SJohn Marino is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV 4312*e4b17023SJohn Marino is handled specially for this case, and if it isn't set, then there will 4313*e4b17023SJohn Marino be no equivalence for the destination. */ 4314*e4b17023SJohn Marino if (n_sets == 1 && REG_NOTES (insn) != 0 4315*e4b17023SJohn Marino && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0 4316*e4b17023SJohn Marino && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl)) 4317*e4b17023SJohn Marino || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART)) 4318*e4b17023SJohn Marino { 4319*e4b17023SJohn Marino /* The result of apply_change_group can be ignored; see canon_reg. */ 4320*e4b17023SJohn Marino canon_reg (XEXP (tem, 0), insn); 4321*e4b17023SJohn Marino apply_change_group (); 4322*e4b17023SJohn Marino src_eqv = fold_rtx (XEXP (tem, 0), insn); 4323*e4b17023SJohn Marino XEXP (tem, 0) = copy_rtx (src_eqv); 4324*e4b17023SJohn Marino df_notes_rescan (insn); 4325*e4b17023SJohn Marino } 4326*e4b17023SJohn Marino 4327*e4b17023SJohn Marino /* Canonicalize sources and addresses of destinations. 4328*e4b17023SJohn Marino We do this in a separate pass to avoid problems when a MATCH_DUP is 4329*e4b17023SJohn Marino present in the insn pattern. In that case, we want to ensure that 4330*e4b17023SJohn Marino we don't break the duplicate nature of the pattern. So we will replace 4331*e4b17023SJohn Marino both operands at the same time. Otherwise, we would fail to find an 4332*e4b17023SJohn Marino equivalent substitution in the loop calling validate_change below. 4333*e4b17023SJohn Marino 4334*e4b17023SJohn Marino We used to suppress canonicalization of DEST if it appears in SRC, 4335*e4b17023SJohn Marino but we don't do this any more. */ 4336*e4b17023SJohn Marino 4337*e4b17023SJohn Marino for (i = 0; i < n_sets; i++) 4338*e4b17023SJohn Marino { 4339*e4b17023SJohn Marino rtx dest = SET_DEST (sets[i].rtl); 4340*e4b17023SJohn Marino rtx src = SET_SRC (sets[i].rtl); 4341*e4b17023SJohn Marino rtx new_rtx = canon_reg (src, insn); 4342*e4b17023SJohn Marino 4343*e4b17023SJohn Marino validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1); 4344*e4b17023SJohn Marino 4345*e4b17023SJohn Marino if (GET_CODE (dest) == ZERO_EXTRACT) 4346*e4b17023SJohn Marino { 4347*e4b17023SJohn Marino validate_change (insn, &XEXP (dest, 1), 4348*e4b17023SJohn Marino canon_reg (XEXP (dest, 1), insn), 1); 4349*e4b17023SJohn Marino validate_change (insn, &XEXP (dest, 2), 4350*e4b17023SJohn Marino canon_reg (XEXP (dest, 2), insn), 1); 4351*e4b17023SJohn Marino } 4352*e4b17023SJohn Marino 4353*e4b17023SJohn Marino while (GET_CODE (dest) == SUBREG 4354*e4b17023SJohn Marino || GET_CODE (dest) == ZERO_EXTRACT 4355*e4b17023SJohn Marino || GET_CODE (dest) == STRICT_LOW_PART) 4356*e4b17023SJohn Marino dest = XEXP (dest, 0); 4357*e4b17023SJohn Marino 4358*e4b17023SJohn Marino if (MEM_P (dest)) 4359*e4b17023SJohn Marino canon_reg (dest, insn); 4360*e4b17023SJohn Marino } 4361*e4b17023SJohn Marino 4362*e4b17023SJohn Marino /* Now that we have done all the replacements, we can apply the change 4363*e4b17023SJohn Marino group and see if they all work. Note that this will cause some 4364*e4b17023SJohn Marino canonicalizations that would have worked individually not to be applied 4365*e4b17023SJohn Marino because some other canonicalization didn't work, but this should not 4366*e4b17023SJohn Marino occur often. 4367*e4b17023SJohn Marino 4368*e4b17023SJohn Marino The result of apply_change_group can be ignored; see canon_reg. */ 4369*e4b17023SJohn Marino 4370*e4b17023SJohn Marino apply_change_group (); 4371*e4b17023SJohn Marino 4372*e4b17023SJohn Marino /* Set sets[i].src_elt to the class each source belongs to. 4373*e4b17023SJohn Marino Detect assignments from or to volatile things 4374*e4b17023SJohn Marino and set set[i] to zero so they will be ignored 4375*e4b17023SJohn Marino in the rest of this function. 4376*e4b17023SJohn Marino 4377*e4b17023SJohn Marino Nothing in this loop changes the hash table or the register chains. */ 4378*e4b17023SJohn Marino 4379*e4b17023SJohn Marino for (i = 0; i < n_sets; i++) 4380*e4b17023SJohn Marino { 4381*e4b17023SJohn Marino bool repeat = false; 4382*e4b17023SJohn Marino rtx src, dest; 4383*e4b17023SJohn Marino rtx src_folded; 4384*e4b17023SJohn Marino struct table_elt *elt = 0, *p; 4385*e4b17023SJohn Marino enum machine_mode mode; 4386*e4b17023SJohn Marino rtx src_eqv_here; 4387*e4b17023SJohn Marino rtx src_const = 0; 4388*e4b17023SJohn Marino rtx src_related = 0; 4389*e4b17023SJohn Marino bool src_related_is_const_anchor = false; 4390*e4b17023SJohn Marino struct table_elt *src_const_elt = 0; 4391*e4b17023SJohn Marino int src_cost = MAX_COST; 4392*e4b17023SJohn Marino int src_eqv_cost = MAX_COST; 4393*e4b17023SJohn Marino int src_folded_cost = MAX_COST; 4394*e4b17023SJohn Marino int src_related_cost = MAX_COST; 4395*e4b17023SJohn Marino int src_elt_cost = MAX_COST; 4396*e4b17023SJohn Marino int src_regcost = MAX_COST; 4397*e4b17023SJohn Marino int src_eqv_regcost = MAX_COST; 4398*e4b17023SJohn Marino int src_folded_regcost = MAX_COST; 4399*e4b17023SJohn Marino int src_related_regcost = MAX_COST; 4400*e4b17023SJohn Marino int src_elt_regcost = MAX_COST; 4401*e4b17023SJohn Marino /* Set nonzero if we need to call force_const_mem on with the 4402*e4b17023SJohn Marino contents of src_folded before using it. */ 4403*e4b17023SJohn Marino int src_folded_force_flag = 0; 4404*e4b17023SJohn Marino 4405*e4b17023SJohn Marino dest = SET_DEST (sets[i].rtl); 4406*e4b17023SJohn Marino src = SET_SRC (sets[i].rtl); 4407*e4b17023SJohn Marino 4408*e4b17023SJohn Marino /* If SRC is a constant that has no machine mode, 4409*e4b17023SJohn Marino hash it with the destination's machine mode. 4410*e4b17023SJohn Marino This way we can keep different modes separate. */ 4411*e4b17023SJohn Marino 4412*e4b17023SJohn Marino mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src); 4413*e4b17023SJohn Marino sets[i].mode = mode; 4414*e4b17023SJohn Marino 4415*e4b17023SJohn Marino if (src_eqv) 4416*e4b17023SJohn Marino { 4417*e4b17023SJohn Marino enum machine_mode eqvmode = mode; 4418*e4b17023SJohn Marino if (GET_CODE (dest) == STRICT_LOW_PART) 4419*e4b17023SJohn Marino eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0))); 4420*e4b17023SJohn Marino do_not_record = 0; 4421*e4b17023SJohn Marino hash_arg_in_memory = 0; 4422*e4b17023SJohn Marino src_eqv_hash = HASH (src_eqv, eqvmode); 4423*e4b17023SJohn Marino 4424*e4b17023SJohn Marino /* Find the equivalence class for the equivalent expression. */ 4425*e4b17023SJohn Marino 4426*e4b17023SJohn Marino if (!do_not_record) 4427*e4b17023SJohn Marino src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode); 4428*e4b17023SJohn Marino 4429*e4b17023SJohn Marino src_eqv_volatile = do_not_record; 4430*e4b17023SJohn Marino src_eqv_in_memory = hash_arg_in_memory; 4431*e4b17023SJohn Marino } 4432*e4b17023SJohn Marino 4433*e4b17023SJohn Marino /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the 4434*e4b17023SJohn Marino value of the INNER register, not the destination. So it is not 4435*e4b17023SJohn Marino a valid substitution for the source. But save it for later. */ 4436*e4b17023SJohn Marino if (GET_CODE (dest) == STRICT_LOW_PART) 4437*e4b17023SJohn Marino src_eqv_here = 0; 4438*e4b17023SJohn Marino else 4439*e4b17023SJohn Marino src_eqv_here = src_eqv; 4440*e4b17023SJohn Marino 4441*e4b17023SJohn Marino /* Simplify and foldable subexpressions in SRC. Then get the fully- 4442*e4b17023SJohn Marino simplified result, which may not necessarily be valid. */ 4443*e4b17023SJohn Marino src_folded = fold_rtx (src, insn); 4444*e4b17023SJohn Marino 4445*e4b17023SJohn Marino #if 0 4446*e4b17023SJohn Marino /* ??? This caused bad code to be generated for the m68k port with -O2. 4447*e4b17023SJohn Marino Suppose src is (CONST_INT -1), and that after truncation src_folded 4448*e4b17023SJohn Marino is (CONST_INT 3). Suppose src_folded is then used for src_const. 4449*e4b17023SJohn Marino At the end we will add src and src_const to the same equivalence 4450*e4b17023SJohn Marino class. We now have 3 and -1 on the same equivalence class. This 4451*e4b17023SJohn Marino causes later instructions to be mis-optimized. */ 4452*e4b17023SJohn Marino /* If storing a constant in a bitfield, pre-truncate the constant 4453*e4b17023SJohn Marino so we will be able to record it later. */ 4454*e4b17023SJohn Marino if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT) 4455*e4b17023SJohn Marino { 4456*e4b17023SJohn Marino rtx width = XEXP (SET_DEST (sets[i].rtl), 1); 4457*e4b17023SJohn Marino 4458*e4b17023SJohn Marino if (CONST_INT_P (src) 4459*e4b17023SJohn Marino && CONST_INT_P (width) 4460*e4b17023SJohn Marino && INTVAL (width) < HOST_BITS_PER_WIDE_INT 4461*e4b17023SJohn Marino && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width)))) 4462*e4b17023SJohn Marino src_folded 4463*e4b17023SJohn Marino = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1 4464*e4b17023SJohn Marino << INTVAL (width)) - 1)); 4465*e4b17023SJohn Marino } 4466*e4b17023SJohn Marino #endif 4467*e4b17023SJohn Marino 4468*e4b17023SJohn Marino /* Compute SRC's hash code, and also notice if it 4469*e4b17023SJohn Marino should not be recorded at all. In that case, 4470*e4b17023SJohn Marino prevent any further processing of this assignment. */ 4471*e4b17023SJohn Marino do_not_record = 0; 4472*e4b17023SJohn Marino hash_arg_in_memory = 0; 4473*e4b17023SJohn Marino 4474*e4b17023SJohn Marino sets[i].src = src; 4475*e4b17023SJohn Marino sets[i].src_hash = HASH (src, mode); 4476*e4b17023SJohn Marino sets[i].src_volatile = do_not_record; 4477*e4b17023SJohn Marino sets[i].src_in_memory = hash_arg_in_memory; 4478*e4b17023SJohn Marino 4479*e4b17023SJohn Marino /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is 4480*e4b17023SJohn Marino a pseudo, do not record SRC. Using SRC as a replacement for 4481*e4b17023SJohn Marino anything else will be incorrect in that situation. Note that 4482*e4b17023SJohn Marino this usually occurs only for stack slots, in which case all the 4483*e4b17023SJohn Marino RTL would be referring to SRC, so we don't lose any optimization 4484*e4b17023SJohn Marino opportunities by not having SRC in the hash table. */ 4485*e4b17023SJohn Marino 4486*e4b17023SJohn Marino if (MEM_P (src) 4487*e4b17023SJohn Marino && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0 4488*e4b17023SJohn Marino && REG_P (dest) 4489*e4b17023SJohn Marino && REGNO (dest) >= FIRST_PSEUDO_REGISTER) 4490*e4b17023SJohn Marino sets[i].src_volatile = 1; 4491*e4b17023SJohn Marino 4492*e4b17023SJohn Marino #if 0 4493*e4b17023SJohn Marino /* It is no longer clear why we used to do this, but it doesn't 4494*e4b17023SJohn Marino appear to still be needed. So let's try without it since this 4495*e4b17023SJohn Marino code hurts cse'ing widened ops. */ 4496*e4b17023SJohn Marino /* If source is a paradoxical subreg (such as QI treated as an SI), 4497*e4b17023SJohn Marino treat it as volatile. It may do the work of an SI in one context 4498*e4b17023SJohn Marino where the extra bits are not being used, but cannot replace an SI 4499*e4b17023SJohn Marino in general. */ 4500*e4b17023SJohn Marino if (paradoxical_subreg_p (src)) 4501*e4b17023SJohn Marino sets[i].src_volatile = 1; 4502*e4b17023SJohn Marino #endif 4503*e4b17023SJohn Marino 4504*e4b17023SJohn Marino /* Locate all possible equivalent forms for SRC. Try to replace 4505*e4b17023SJohn Marino SRC in the insn with each cheaper equivalent. 4506*e4b17023SJohn Marino 4507*e4b17023SJohn Marino We have the following types of equivalents: SRC itself, a folded 4508*e4b17023SJohn Marino version, a value given in a REG_EQUAL note, or a value related 4509*e4b17023SJohn Marino to a constant. 4510*e4b17023SJohn Marino 4511*e4b17023SJohn Marino Each of these equivalents may be part of an additional class 4512*e4b17023SJohn Marino of equivalents (if more than one is in the table, they must be in 4513*e4b17023SJohn Marino the same class; we check for this). 4514*e4b17023SJohn Marino 4515*e4b17023SJohn Marino If the source is volatile, we don't do any table lookups. 4516*e4b17023SJohn Marino 4517*e4b17023SJohn Marino We note any constant equivalent for possible later use in a 4518*e4b17023SJohn Marino REG_NOTE. */ 4519*e4b17023SJohn Marino 4520*e4b17023SJohn Marino if (!sets[i].src_volatile) 4521*e4b17023SJohn Marino elt = lookup (src, sets[i].src_hash, mode); 4522*e4b17023SJohn Marino 4523*e4b17023SJohn Marino sets[i].src_elt = elt; 4524*e4b17023SJohn Marino 4525*e4b17023SJohn Marino if (elt && src_eqv_here && src_eqv_elt) 4526*e4b17023SJohn Marino { 4527*e4b17023SJohn Marino if (elt->first_same_value != src_eqv_elt->first_same_value) 4528*e4b17023SJohn Marino { 4529*e4b17023SJohn Marino /* The REG_EQUAL is indicating that two formerly distinct 4530*e4b17023SJohn Marino classes are now equivalent. So merge them. */ 4531*e4b17023SJohn Marino merge_equiv_classes (elt, src_eqv_elt); 4532*e4b17023SJohn Marino src_eqv_hash = HASH (src_eqv, elt->mode); 4533*e4b17023SJohn Marino src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode); 4534*e4b17023SJohn Marino } 4535*e4b17023SJohn Marino 4536*e4b17023SJohn Marino src_eqv_here = 0; 4537*e4b17023SJohn Marino } 4538*e4b17023SJohn Marino 4539*e4b17023SJohn Marino else if (src_eqv_elt) 4540*e4b17023SJohn Marino elt = src_eqv_elt; 4541*e4b17023SJohn Marino 4542*e4b17023SJohn Marino /* Try to find a constant somewhere and record it in `src_const'. 4543*e4b17023SJohn Marino Record its table element, if any, in `src_const_elt'. Look in 4544*e4b17023SJohn Marino any known equivalences first. (If the constant is not in the 4545*e4b17023SJohn Marino table, also set `sets[i].src_const_hash'). */ 4546*e4b17023SJohn Marino if (elt) 4547*e4b17023SJohn Marino for (p = elt->first_same_value; p; p = p->next_same_value) 4548*e4b17023SJohn Marino if (p->is_const) 4549*e4b17023SJohn Marino { 4550*e4b17023SJohn Marino src_const = p->exp; 4551*e4b17023SJohn Marino src_const_elt = elt; 4552*e4b17023SJohn Marino break; 4553*e4b17023SJohn Marino } 4554*e4b17023SJohn Marino 4555*e4b17023SJohn Marino if (src_const == 0 4556*e4b17023SJohn Marino && (CONSTANT_P (src_folded) 4557*e4b17023SJohn Marino /* Consider (minus (label_ref L1) (label_ref L2)) as 4558*e4b17023SJohn Marino "constant" here so we will record it. This allows us 4559*e4b17023SJohn Marino to fold switch statements when an ADDR_DIFF_VEC is used. */ 4560*e4b17023SJohn Marino || (GET_CODE (src_folded) == MINUS 4561*e4b17023SJohn Marino && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF 4562*e4b17023SJohn Marino && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF))) 4563*e4b17023SJohn Marino src_const = src_folded, src_const_elt = elt; 4564*e4b17023SJohn Marino else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here)) 4565*e4b17023SJohn Marino src_const = src_eqv_here, src_const_elt = src_eqv_elt; 4566*e4b17023SJohn Marino 4567*e4b17023SJohn Marino /* If we don't know if the constant is in the table, get its 4568*e4b17023SJohn Marino hash code and look it up. */ 4569*e4b17023SJohn Marino if (src_const && src_const_elt == 0) 4570*e4b17023SJohn Marino { 4571*e4b17023SJohn Marino sets[i].src_const_hash = HASH (src_const, mode); 4572*e4b17023SJohn Marino src_const_elt = lookup (src_const, sets[i].src_const_hash, mode); 4573*e4b17023SJohn Marino } 4574*e4b17023SJohn Marino 4575*e4b17023SJohn Marino sets[i].src_const = src_const; 4576*e4b17023SJohn Marino sets[i].src_const_elt = src_const_elt; 4577*e4b17023SJohn Marino 4578*e4b17023SJohn Marino /* If the constant and our source are both in the table, mark them as 4579*e4b17023SJohn Marino equivalent. Otherwise, if a constant is in the table but the source 4580*e4b17023SJohn Marino isn't, set ELT to it. */ 4581*e4b17023SJohn Marino if (src_const_elt && elt 4582*e4b17023SJohn Marino && src_const_elt->first_same_value != elt->first_same_value) 4583*e4b17023SJohn Marino merge_equiv_classes (elt, src_const_elt); 4584*e4b17023SJohn Marino else if (src_const_elt && elt == 0) 4585*e4b17023SJohn Marino elt = src_const_elt; 4586*e4b17023SJohn Marino 4587*e4b17023SJohn Marino /* See if there is a register linearly related to a constant 4588*e4b17023SJohn Marino equivalent of SRC. */ 4589*e4b17023SJohn Marino if (src_const 4590*e4b17023SJohn Marino && (GET_CODE (src_const) == CONST 4591*e4b17023SJohn Marino || (src_const_elt && src_const_elt->related_value != 0))) 4592*e4b17023SJohn Marino { 4593*e4b17023SJohn Marino src_related = use_related_value (src_const, src_const_elt); 4594*e4b17023SJohn Marino if (src_related) 4595*e4b17023SJohn Marino { 4596*e4b17023SJohn Marino struct table_elt *src_related_elt 4597*e4b17023SJohn Marino = lookup (src_related, HASH (src_related, mode), mode); 4598*e4b17023SJohn Marino if (src_related_elt && elt) 4599*e4b17023SJohn Marino { 4600*e4b17023SJohn Marino if (elt->first_same_value 4601*e4b17023SJohn Marino != src_related_elt->first_same_value) 4602*e4b17023SJohn Marino /* This can occur when we previously saw a CONST 4603*e4b17023SJohn Marino involving a SYMBOL_REF and then see the SYMBOL_REF 4604*e4b17023SJohn Marino twice. Merge the involved classes. */ 4605*e4b17023SJohn Marino merge_equiv_classes (elt, src_related_elt); 4606*e4b17023SJohn Marino 4607*e4b17023SJohn Marino src_related = 0; 4608*e4b17023SJohn Marino src_related_elt = 0; 4609*e4b17023SJohn Marino } 4610*e4b17023SJohn Marino else if (src_related_elt && elt == 0) 4611*e4b17023SJohn Marino elt = src_related_elt; 4612*e4b17023SJohn Marino } 4613*e4b17023SJohn Marino } 4614*e4b17023SJohn Marino 4615*e4b17023SJohn Marino /* See if we have a CONST_INT that is already in a register in a 4616*e4b17023SJohn Marino wider mode. */ 4617*e4b17023SJohn Marino 4618*e4b17023SJohn Marino if (src_const && src_related == 0 && CONST_INT_P (src_const) 4619*e4b17023SJohn Marino && GET_MODE_CLASS (mode) == MODE_INT 4620*e4b17023SJohn Marino && GET_MODE_PRECISION (mode) < BITS_PER_WORD) 4621*e4b17023SJohn Marino { 4622*e4b17023SJohn Marino enum machine_mode wider_mode; 4623*e4b17023SJohn Marino 4624*e4b17023SJohn Marino for (wider_mode = GET_MODE_WIDER_MODE (mode); 4625*e4b17023SJohn Marino wider_mode != VOIDmode 4626*e4b17023SJohn Marino && GET_MODE_PRECISION (wider_mode) <= BITS_PER_WORD 4627*e4b17023SJohn Marino && src_related == 0; 4628*e4b17023SJohn Marino wider_mode = GET_MODE_WIDER_MODE (wider_mode)) 4629*e4b17023SJohn Marino { 4630*e4b17023SJohn Marino struct table_elt *const_elt 4631*e4b17023SJohn Marino = lookup (src_const, HASH (src_const, wider_mode), wider_mode); 4632*e4b17023SJohn Marino 4633*e4b17023SJohn Marino if (const_elt == 0) 4634*e4b17023SJohn Marino continue; 4635*e4b17023SJohn Marino 4636*e4b17023SJohn Marino for (const_elt = const_elt->first_same_value; 4637*e4b17023SJohn Marino const_elt; const_elt = const_elt->next_same_value) 4638*e4b17023SJohn Marino if (REG_P (const_elt->exp)) 4639*e4b17023SJohn Marino { 4640*e4b17023SJohn Marino src_related = gen_lowpart (mode, const_elt->exp); 4641*e4b17023SJohn Marino break; 4642*e4b17023SJohn Marino } 4643*e4b17023SJohn Marino } 4644*e4b17023SJohn Marino } 4645*e4b17023SJohn Marino 4646*e4b17023SJohn Marino /* Another possibility is that we have an AND with a constant in 4647*e4b17023SJohn Marino a mode narrower than a word. If so, it might have been generated 4648*e4b17023SJohn Marino as part of an "if" which would narrow the AND. If we already 4649*e4b17023SJohn Marino have done the AND in a wider mode, we can use a SUBREG of that 4650*e4b17023SJohn Marino value. */ 4651*e4b17023SJohn Marino 4652*e4b17023SJohn Marino if (flag_expensive_optimizations && ! src_related 4653*e4b17023SJohn Marino && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1)) 4654*e4b17023SJohn Marino && GET_MODE_SIZE (mode) < UNITS_PER_WORD) 4655*e4b17023SJohn Marino { 4656*e4b17023SJohn Marino enum machine_mode tmode; 4657*e4b17023SJohn Marino rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1)); 4658*e4b17023SJohn Marino 4659*e4b17023SJohn Marino for (tmode = GET_MODE_WIDER_MODE (mode); 4660*e4b17023SJohn Marino GET_MODE_SIZE (tmode) <= UNITS_PER_WORD; 4661*e4b17023SJohn Marino tmode = GET_MODE_WIDER_MODE (tmode)) 4662*e4b17023SJohn Marino { 4663*e4b17023SJohn Marino rtx inner = gen_lowpart (tmode, XEXP (src, 0)); 4664*e4b17023SJohn Marino struct table_elt *larger_elt; 4665*e4b17023SJohn Marino 4666*e4b17023SJohn Marino if (inner) 4667*e4b17023SJohn Marino { 4668*e4b17023SJohn Marino PUT_MODE (new_and, tmode); 4669*e4b17023SJohn Marino XEXP (new_and, 0) = inner; 4670*e4b17023SJohn Marino larger_elt = lookup (new_and, HASH (new_and, tmode), tmode); 4671*e4b17023SJohn Marino if (larger_elt == 0) 4672*e4b17023SJohn Marino continue; 4673*e4b17023SJohn Marino 4674*e4b17023SJohn Marino for (larger_elt = larger_elt->first_same_value; 4675*e4b17023SJohn Marino larger_elt; larger_elt = larger_elt->next_same_value) 4676*e4b17023SJohn Marino if (REG_P (larger_elt->exp)) 4677*e4b17023SJohn Marino { 4678*e4b17023SJohn Marino src_related 4679*e4b17023SJohn Marino = gen_lowpart (mode, larger_elt->exp); 4680*e4b17023SJohn Marino break; 4681*e4b17023SJohn Marino } 4682*e4b17023SJohn Marino 4683*e4b17023SJohn Marino if (src_related) 4684*e4b17023SJohn Marino break; 4685*e4b17023SJohn Marino } 4686*e4b17023SJohn Marino } 4687*e4b17023SJohn Marino } 4688*e4b17023SJohn Marino 4689*e4b17023SJohn Marino #ifdef LOAD_EXTEND_OP 4690*e4b17023SJohn Marino /* See if a MEM has already been loaded with a widening operation; 4691*e4b17023SJohn Marino if it has, we can use a subreg of that. Many CISC machines 4692*e4b17023SJohn Marino also have such operations, but this is only likely to be 4693*e4b17023SJohn Marino beneficial on these machines. */ 4694*e4b17023SJohn Marino 4695*e4b17023SJohn Marino if (flag_expensive_optimizations && src_related == 0 4696*e4b17023SJohn Marino && (GET_MODE_SIZE (mode) < UNITS_PER_WORD) 4697*e4b17023SJohn Marino && GET_MODE_CLASS (mode) == MODE_INT 4698*e4b17023SJohn Marino && MEM_P (src) && ! do_not_record 4699*e4b17023SJohn Marino && LOAD_EXTEND_OP (mode) != UNKNOWN) 4700*e4b17023SJohn Marino { 4701*e4b17023SJohn Marino struct rtx_def memory_extend_buf; 4702*e4b17023SJohn Marino rtx memory_extend_rtx = &memory_extend_buf; 4703*e4b17023SJohn Marino enum machine_mode tmode; 4704*e4b17023SJohn Marino 4705*e4b17023SJohn Marino /* Set what we are trying to extend and the operation it might 4706*e4b17023SJohn Marino have been extended with. */ 4707*e4b17023SJohn Marino memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx)); 4708*e4b17023SJohn Marino PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode)); 4709*e4b17023SJohn Marino XEXP (memory_extend_rtx, 0) = src; 4710*e4b17023SJohn Marino 4711*e4b17023SJohn Marino for (tmode = GET_MODE_WIDER_MODE (mode); 4712*e4b17023SJohn Marino GET_MODE_SIZE (tmode) <= UNITS_PER_WORD; 4713*e4b17023SJohn Marino tmode = GET_MODE_WIDER_MODE (tmode)) 4714*e4b17023SJohn Marino { 4715*e4b17023SJohn Marino struct table_elt *larger_elt; 4716*e4b17023SJohn Marino 4717*e4b17023SJohn Marino PUT_MODE (memory_extend_rtx, tmode); 4718*e4b17023SJohn Marino larger_elt = lookup (memory_extend_rtx, 4719*e4b17023SJohn Marino HASH (memory_extend_rtx, tmode), tmode); 4720*e4b17023SJohn Marino if (larger_elt == 0) 4721*e4b17023SJohn Marino continue; 4722*e4b17023SJohn Marino 4723*e4b17023SJohn Marino for (larger_elt = larger_elt->first_same_value; 4724*e4b17023SJohn Marino larger_elt; larger_elt = larger_elt->next_same_value) 4725*e4b17023SJohn Marino if (REG_P (larger_elt->exp)) 4726*e4b17023SJohn Marino { 4727*e4b17023SJohn Marino src_related = gen_lowpart (mode, larger_elt->exp); 4728*e4b17023SJohn Marino break; 4729*e4b17023SJohn Marino } 4730*e4b17023SJohn Marino 4731*e4b17023SJohn Marino if (src_related) 4732*e4b17023SJohn Marino break; 4733*e4b17023SJohn Marino } 4734*e4b17023SJohn Marino } 4735*e4b17023SJohn Marino #endif /* LOAD_EXTEND_OP */ 4736*e4b17023SJohn Marino 4737*e4b17023SJohn Marino /* Try to express the constant using a register+offset expression 4738*e4b17023SJohn Marino derived from a constant anchor. */ 4739*e4b17023SJohn Marino 4740*e4b17023SJohn Marino if (targetm.const_anchor 4741*e4b17023SJohn Marino && !src_related 4742*e4b17023SJohn Marino && src_const 4743*e4b17023SJohn Marino && GET_CODE (src_const) == CONST_INT) 4744*e4b17023SJohn Marino { 4745*e4b17023SJohn Marino src_related = try_const_anchors (src_const, mode); 4746*e4b17023SJohn Marino src_related_is_const_anchor = src_related != NULL_RTX; 4747*e4b17023SJohn Marino } 4748*e4b17023SJohn Marino 4749*e4b17023SJohn Marino 4750*e4b17023SJohn Marino if (src == src_folded) 4751*e4b17023SJohn Marino src_folded = 0; 4752*e4b17023SJohn Marino 4753*e4b17023SJohn Marino /* At this point, ELT, if nonzero, points to a class of expressions 4754*e4b17023SJohn Marino equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED, 4755*e4b17023SJohn Marino and SRC_RELATED, if nonzero, each contain additional equivalent 4756*e4b17023SJohn Marino expressions. Prune these latter expressions by deleting expressions 4757*e4b17023SJohn Marino already in the equivalence class. 4758*e4b17023SJohn Marino 4759*e4b17023SJohn Marino Check for an equivalent identical to the destination. If found, 4760*e4b17023SJohn Marino this is the preferred equivalent since it will likely lead to 4761*e4b17023SJohn Marino elimination of the insn. Indicate this by placing it in 4762*e4b17023SJohn Marino `src_related'. */ 4763*e4b17023SJohn Marino 4764*e4b17023SJohn Marino if (elt) 4765*e4b17023SJohn Marino elt = elt->first_same_value; 4766*e4b17023SJohn Marino for (p = elt; p; p = p->next_same_value) 4767*e4b17023SJohn Marino { 4768*e4b17023SJohn Marino enum rtx_code code = GET_CODE (p->exp); 4769*e4b17023SJohn Marino 4770*e4b17023SJohn Marino /* If the expression is not valid, ignore it. Then we do not 4771*e4b17023SJohn Marino have to check for validity below. In most cases, we can use 4772*e4b17023SJohn Marino `rtx_equal_p', since canonicalization has already been done. */ 4773*e4b17023SJohn Marino if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false)) 4774*e4b17023SJohn Marino continue; 4775*e4b17023SJohn Marino 4776*e4b17023SJohn Marino /* Also skip paradoxical subregs, unless that's what we're 4777*e4b17023SJohn Marino looking for. */ 4778*e4b17023SJohn Marino if (paradoxical_subreg_p (p->exp) 4779*e4b17023SJohn Marino && ! (src != 0 4780*e4b17023SJohn Marino && GET_CODE (src) == SUBREG 4781*e4b17023SJohn Marino && GET_MODE (src) == GET_MODE (p->exp) 4782*e4b17023SJohn Marino && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))) 4783*e4b17023SJohn Marino < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp)))))) 4784*e4b17023SJohn Marino continue; 4785*e4b17023SJohn Marino 4786*e4b17023SJohn Marino if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp)) 4787*e4b17023SJohn Marino src = 0; 4788*e4b17023SJohn Marino else if (src_folded && GET_CODE (src_folded) == code 4789*e4b17023SJohn Marino && rtx_equal_p (src_folded, p->exp)) 4790*e4b17023SJohn Marino src_folded = 0; 4791*e4b17023SJohn Marino else if (src_eqv_here && GET_CODE (src_eqv_here) == code 4792*e4b17023SJohn Marino && rtx_equal_p (src_eqv_here, p->exp)) 4793*e4b17023SJohn Marino src_eqv_here = 0; 4794*e4b17023SJohn Marino else if (src_related && GET_CODE (src_related) == code 4795*e4b17023SJohn Marino && rtx_equal_p (src_related, p->exp)) 4796*e4b17023SJohn Marino src_related = 0; 4797*e4b17023SJohn Marino 4798*e4b17023SJohn Marino /* This is the same as the destination of the insns, we want 4799*e4b17023SJohn Marino to prefer it. Copy it to src_related. The code below will 4800*e4b17023SJohn Marino then give it a negative cost. */ 4801*e4b17023SJohn Marino if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest)) 4802*e4b17023SJohn Marino src_related = dest; 4803*e4b17023SJohn Marino } 4804*e4b17023SJohn Marino 4805*e4b17023SJohn Marino /* Find the cheapest valid equivalent, trying all the available 4806*e4b17023SJohn Marino possibilities. Prefer items not in the hash table to ones 4807*e4b17023SJohn Marino that are when they are equal cost. Note that we can never 4808*e4b17023SJohn Marino worsen an insn as the current contents will also succeed. 4809*e4b17023SJohn Marino If we find an equivalent identical to the destination, use it as best, 4810*e4b17023SJohn Marino since this insn will probably be eliminated in that case. */ 4811*e4b17023SJohn Marino if (src) 4812*e4b17023SJohn Marino { 4813*e4b17023SJohn Marino if (rtx_equal_p (src, dest)) 4814*e4b17023SJohn Marino src_cost = src_regcost = -1; 4815*e4b17023SJohn Marino else 4816*e4b17023SJohn Marino { 4817*e4b17023SJohn Marino src_cost = COST (src); 4818*e4b17023SJohn Marino src_regcost = approx_reg_cost (src); 4819*e4b17023SJohn Marino } 4820*e4b17023SJohn Marino } 4821*e4b17023SJohn Marino 4822*e4b17023SJohn Marino if (src_eqv_here) 4823*e4b17023SJohn Marino { 4824*e4b17023SJohn Marino if (rtx_equal_p (src_eqv_here, dest)) 4825*e4b17023SJohn Marino src_eqv_cost = src_eqv_regcost = -1; 4826*e4b17023SJohn Marino else 4827*e4b17023SJohn Marino { 4828*e4b17023SJohn Marino src_eqv_cost = COST (src_eqv_here); 4829*e4b17023SJohn Marino src_eqv_regcost = approx_reg_cost (src_eqv_here); 4830*e4b17023SJohn Marino } 4831*e4b17023SJohn Marino } 4832*e4b17023SJohn Marino 4833*e4b17023SJohn Marino if (src_folded) 4834*e4b17023SJohn Marino { 4835*e4b17023SJohn Marino if (rtx_equal_p (src_folded, dest)) 4836*e4b17023SJohn Marino src_folded_cost = src_folded_regcost = -1; 4837*e4b17023SJohn Marino else 4838*e4b17023SJohn Marino { 4839*e4b17023SJohn Marino src_folded_cost = COST (src_folded); 4840*e4b17023SJohn Marino src_folded_regcost = approx_reg_cost (src_folded); 4841*e4b17023SJohn Marino } 4842*e4b17023SJohn Marino } 4843*e4b17023SJohn Marino 4844*e4b17023SJohn Marino if (src_related) 4845*e4b17023SJohn Marino { 4846*e4b17023SJohn Marino if (rtx_equal_p (src_related, dest)) 4847*e4b17023SJohn Marino src_related_cost = src_related_regcost = -1; 4848*e4b17023SJohn Marino else 4849*e4b17023SJohn Marino { 4850*e4b17023SJohn Marino src_related_cost = COST (src_related); 4851*e4b17023SJohn Marino src_related_regcost = approx_reg_cost (src_related); 4852*e4b17023SJohn Marino 4853*e4b17023SJohn Marino /* If a const-anchor is used to synthesize a constant that 4854*e4b17023SJohn Marino normally requires multiple instructions then slightly prefer 4855*e4b17023SJohn Marino it over the original sequence. These instructions are likely 4856*e4b17023SJohn Marino to become redundant now. We can't compare against the cost 4857*e4b17023SJohn Marino of src_eqv_here because, on MIPS for example, multi-insn 4858*e4b17023SJohn Marino constants have zero cost; they are assumed to be hoisted from 4859*e4b17023SJohn Marino loops. */ 4860*e4b17023SJohn Marino if (src_related_is_const_anchor 4861*e4b17023SJohn Marino && src_related_cost == src_cost 4862*e4b17023SJohn Marino && src_eqv_here) 4863*e4b17023SJohn Marino src_related_cost--; 4864*e4b17023SJohn Marino } 4865*e4b17023SJohn Marino } 4866*e4b17023SJohn Marino 4867*e4b17023SJohn Marino /* If this was an indirect jump insn, a known label will really be 4868*e4b17023SJohn Marino cheaper even though it looks more expensive. */ 4869*e4b17023SJohn Marino if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF) 4870*e4b17023SJohn Marino src_folded = src_const, src_folded_cost = src_folded_regcost = -1; 4871*e4b17023SJohn Marino 4872*e4b17023SJohn Marino /* Terminate loop when replacement made. This must terminate since 4873*e4b17023SJohn Marino the current contents will be tested and will always be valid. */ 4874*e4b17023SJohn Marino while (1) 4875*e4b17023SJohn Marino { 4876*e4b17023SJohn Marino rtx trial; 4877*e4b17023SJohn Marino 4878*e4b17023SJohn Marino /* Skip invalid entries. */ 4879*e4b17023SJohn Marino while (elt && !REG_P (elt->exp) 4880*e4b17023SJohn Marino && ! exp_equiv_p (elt->exp, elt->exp, 1, false)) 4881*e4b17023SJohn Marino elt = elt->next_same_value; 4882*e4b17023SJohn Marino 4883*e4b17023SJohn Marino /* A paradoxical subreg would be bad here: it'll be the right 4884*e4b17023SJohn Marino size, but later may be adjusted so that the upper bits aren't 4885*e4b17023SJohn Marino what we want. So reject it. */ 4886*e4b17023SJohn Marino if (elt != 0 4887*e4b17023SJohn Marino && paradoxical_subreg_p (elt->exp) 4888*e4b17023SJohn Marino /* It is okay, though, if the rtx we're trying to match 4889*e4b17023SJohn Marino will ignore any of the bits we can't predict. */ 4890*e4b17023SJohn Marino && ! (src != 0 4891*e4b17023SJohn Marino && GET_CODE (src) == SUBREG 4892*e4b17023SJohn Marino && GET_MODE (src) == GET_MODE (elt->exp) 4893*e4b17023SJohn Marino && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))) 4894*e4b17023SJohn Marino < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp)))))) 4895*e4b17023SJohn Marino { 4896*e4b17023SJohn Marino elt = elt->next_same_value; 4897*e4b17023SJohn Marino continue; 4898*e4b17023SJohn Marino } 4899*e4b17023SJohn Marino 4900*e4b17023SJohn Marino if (elt) 4901*e4b17023SJohn Marino { 4902*e4b17023SJohn Marino src_elt_cost = elt->cost; 4903*e4b17023SJohn Marino src_elt_regcost = elt->regcost; 4904*e4b17023SJohn Marino } 4905*e4b17023SJohn Marino 4906*e4b17023SJohn Marino /* Find cheapest and skip it for the next time. For items 4907*e4b17023SJohn Marino of equal cost, use this order: 4908*e4b17023SJohn Marino src_folded, src, src_eqv, src_related and hash table entry. */ 4909*e4b17023SJohn Marino if (src_folded 4910*e4b17023SJohn Marino && preferable (src_folded_cost, src_folded_regcost, 4911*e4b17023SJohn Marino src_cost, src_regcost) <= 0 4912*e4b17023SJohn Marino && preferable (src_folded_cost, src_folded_regcost, 4913*e4b17023SJohn Marino src_eqv_cost, src_eqv_regcost) <= 0 4914*e4b17023SJohn Marino && preferable (src_folded_cost, src_folded_regcost, 4915*e4b17023SJohn Marino src_related_cost, src_related_regcost) <= 0 4916*e4b17023SJohn Marino && preferable (src_folded_cost, src_folded_regcost, 4917*e4b17023SJohn Marino src_elt_cost, src_elt_regcost) <= 0) 4918*e4b17023SJohn Marino { 4919*e4b17023SJohn Marino trial = src_folded, src_folded_cost = MAX_COST; 4920*e4b17023SJohn Marino if (src_folded_force_flag) 4921*e4b17023SJohn Marino { 4922*e4b17023SJohn Marino rtx forced = force_const_mem (mode, trial); 4923*e4b17023SJohn Marino if (forced) 4924*e4b17023SJohn Marino trial = forced; 4925*e4b17023SJohn Marino } 4926*e4b17023SJohn Marino } 4927*e4b17023SJohn Marino else if (src 4928*e4b17023SJohn Marino && preferable (src_cost, src_regcost, 4929*e4b17023SJohn Marino src_eqv_cost, src_eqv_regcost) <= 0 4930*e4b17023SJohn Marino && preferable (src_cost, src_regcost, 4931*e4b17023SJohn Marino src_related_cost, src_related_regcost) <= 0 4932*e4b17023SJohn Marino && preferable (src_cost, src_regcost, 4933*e4b17023SJohn Marino src_elt_cost, src_elt_regcost) <= 0) 4934*e4b17023SJohn Marino trial = src, src_cost = MAX_COST; 4935*e4b17023SJohn Marino else if (src_eqv_here 4936*e4b17023SJohn Marino && preferable (src_eqv_cost, src_eqv_regcost, 4937*e4b17023SJohn Marino src_related_cost, src_related_regcost) <= 0 4938*e4b17023SJohn Marino && preferable (src_eqv_cost, src_eqv_regcost, 4939*e4b17023SJohn Marino src_elt_cost, src_elt_regcost) <= 0) 4940*e4b17023SJohn Marino trial = src_eqv_here, src_eqv_cost = MAX_COST; 4941*e4b17023SJohn Marino else if (src_related 4942*e4b17023SJohn Marino && preferable (src_related_cost, src_related_regcost, 4943*e4b17023SJohn Marino src_elt_cost, src_elt_regcost) <= 0) 4944*e4b17023SJohn Marino trial = src_related, src_related_cost = MAX_COST; 4945*e4b17023SJohn Marino else 4946*e4b17023SJohn Marino { 4947*e4b17023SJohn Marino trial = elt->exp; 4948*e4b17023SJohn Marino elt = elt->next_same_value; 4949*e4b17023SJohn Marino src_elt_cost = MAX_COST; 4950*e4b17023SJohn Marino } 4951*e4b17023SJohn Marino 4952*e4b17023SJohn Marino /* Avoid creation of overlapping memory moves. */ 4953*e4b17023SJohn Marino if (MEM_P (trial) && MEM_P (SET_DEST (sets[i].rtl))) 4954*e4b17023SJohn Marino { 4955*e4b17023SJohn Marino rtx src, dest; 4956*e4b17023SJohn Marino 4957*e4b17023SJohn Marino /* BLKmode moves are not handled by cse anyway. */ 4958*e4b17023SJohn Marino if (GET_MODE (trial) == BLKmode) 4959*e4b17023SJohn Marino break; 4960*e4b17023SJohn Marino 4961*e4b17023SJohn Marino src = canon_rtx (trial); 4962*e4b17023SJohn Marino dest = canon_rtx (SET_DEST (sets[i].rtl)); 4963*e4b17023SJohn Marino 4964*e4b17023SJohn Marino if (!MEM_P (src) || !MEM_P (dest) 4965*e4b17023SJohn Marino || !nonoverlapping_memrefs_p (src, dest, false)) 4966*e4b17023SJohn Marino break; 4967*e4b17023SJohn Marino } 4968*e4b17023SJohn Marino 4969*e4b17023SJohn Marino /* Try to optimize 4970*e4b17023SJohn Marino (set (reg:M N) (const_int A)) 4971*e4b17023SJohn Marino (set (reg:M2 O) (const_int B)) 4972*e4b17023SJohn Marino (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D)) 4973*e4b17023SJohn Marino (reg:M2 O)). */ 4974*e4b17023SJohn Marino if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT 4975*e4b17023SJohn Marino && CONST_INT_P (trial) 4976*e4b17023SJohn Marino && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1)) 4977*e4b17023SJohn Marino && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2)) 4978*e4b17023SJohn Marino && REG_P (XEXP (SET_DEST (sets[i].rtl), 0)) 4979*e4b17023SJohn Marino && (GET_MODE_PRECISION (GET_MODE (SET_DEST (sets[i].rtl))) 4980*e4b17023SJohn Marino >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))) 4981*e4b17023SJohn Marino && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)) 4982*e4b17023SJohn Marino + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2)) 4983*e4b17023SJohn Marino <= HOST_BITS_PER_WIDE_INT)) 4984*e4b17023SJohn Marino { 4985*e4b17023SJohn Marino rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0); 4986*e4b17023SJohn Marino rtx width = XEXP (SET_DEST (sets[i].rtl), 1); 4987*e4b17023SJohn Marino rtx pos = XEXP (SET_DEST (sets[i].rtl), 2); 4988*e4b17023SJohn Marino unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg)); 4989*e4b17023SJohn Marino struct table_elt *dest_elt 4990*e4b17023SJohn Marino = lookup (dest_reg, dest_hash, GET_MODE (dest_reg)); 4991*e4b17023SJohn Marino rtx dest_cst = NULL; 4992*e4b17023SJohn Marino 4993*e4b17023SJohn Marino if (dest_elt) 4994*e4b17023SJohn Marino for (p = dest_elt->first_same_value; p; p = p->next_same_value) 4995*e4b17023SJohn Marino if (p->is_const && CONST_INT_P (p->exp)) 4996*e4b17023SJohn Marino { 4997*e4b17023SJohn Marino dest_cst = p->exp; 4998*e4b17023SJohn Marino break; 4999*e4b17023SJohn Marino } 5000*e4b17023SJohn Marino if (dest_cst) 5001*e4b17023SJohn Marino { 5002*e4b17023SJohn Marino HOST_WIDE_INT val = INTVAL (dest_cst); 5003*e4b17023SJohn Marino HOST_WIDE_INT mask; 5004*e4b17023SJohn Marino unsigned int shift; 5005*e4b17023SJohn Marino if (BITS_BIG_ENDIAN) 5006*e4b17023SJohn Marino shift = GET_MODE_PRECISION (GET_MODE (dest_reg)) 5007*e4b17023SJohn Marino - INTVAL (pos) - INTVAL (width); 5008*e4b17023SJohn Marino else 5009*e4b17023SJohn Marino shift = INTVAL (pos); 5010*e4b17023SJohn Marino if (INTVAL (width) == HOST_BITS_PER_WIDE_INT) 5011*e4b17023SJohn Marino mask = ~(HOST_WIDE_INT) 0; 5012*e4b17023SJohn Marino else 5013*e4b17023SJohn Marino mask = ((HOST_WIDE_INT) 1 << INTVAL (width)) - 1; 5014*e4b17023SJohn Marino val &= ~(mask << shift); 5015*e4b17023SJohn Marino val |= (INTVAL (trial) & mask) << shift; 5016*e4b17023SJohn Marino val = trunc_int_for_mode (val, GET_MODE (dest_reg)); 5017*e4b17023SJohn Marino validate_unshare_change (insn, &SET_DEST (sets[i].rtl), 5018*e4b17023SJohn Marino dest_reg, 1); 5019*e4b17023SJohn Marino validate_unshare_change (insn, &SET_SRC (sets[i].rtl), 5020*e4b17023SJohn Marino GEN_INT (val), 1); 5021*e4b17023SJohn Marino if (apply_change_group ()) 5022*e4b17023SJohn Marino { 5023*e4b17023SJohn Marino rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX); 5024*e4b17023SJohn Marino if (note) 5025*e4b17023SJohn Marino { 5026*e4b17023SJohn Marino remove_note (insn, note); 5027*e4b17023SJohn Marino df_notes_rescan (insn); 5028*e4b17023SJohn Marino } 5029*e4b17023SJohn Marino src_eqv = NULL_RTX; 5030*e4b17023SJohn Marino src_eqv_elt = NULL; 5031*e4b17023SJohn Marino src_eqv_volatile = 0; 5032*e4b17023SJohn Marino src_eqv_in_memory = 0; 5033*e4b17023SJohn Marino src_eqv_hash = 0; 5034*e4b17023SJohn Marino repeat = true; 5035*e4b17023SJohn Marino break; 5036*e4b17023SJohn Marino } 5037*e4b17023SJohn Marino } 5038*e4b17023SJohn Marino } 5039*e4b17023SJohn Marino 5040*e4b17023SJohn Marino /* We don't normally have an insn matching (set (pc) (pc)), so 5041*e4b17023SJohn Marino check for this separately here. We will delete such an 5042*e4b17023SJohn Marino insn below. 5043*e4b17023SJohn Marino 5044*e4b17023SJohn Marino For other cases such as a table jump or conditional jump 5045*e4b17023SJohn Marino where we know the ultimate target, go ahead and replace the 5046*e4b17023SJohn Marino operand. While that may not make a valid insn, we will 5047*e4b17023SJohn Marino reemit the jump below (and also insert any necessary 5048*e4b17023SJohn Marino barriers). */ 5049*e4b17023SJohn Marino if (n_sets == 1 && dest == pc_rtx 5050*e4b17023SJohn Marino && (trial == pc_rtx 5051*e4b17023SJohn Marino || (GET_CODE (trial) == LABEL_REF 5052*e4b17023SJohn Marino && ! condjump_p (insn)))) 5053*e4b17023SJohn Marino { 5054*e4b17023SJohn Marino /* Don't substitute non-local labels, this confuses CFG. */ 5055*e4b17023SJohn Marino if (GET_CODE (trial) == LABEL_REF 5056*e4b17023SJohn Marino && LABEL_REF_NONLOCAL_P (trial)) 5057*e4b17023SJohn Marino continue; 5058*e4b17023SJohn Marino 5059*e4b17023SJohn Marino SET_SRC (sets[i].rtl) = trial; 5060*e4b17023SJohn Marino cse_jumps_altered = true; 5061*e4b17023SJohn Marino break; 5062*e4b17023SJohn Marino } 5063*e4b17023SJohn Marino 5064*e4b17023SJohn Marino /* Reject certain invalid forms of CONST that we create. */ 5065*e4b17023SJohn Marino else if (CONSTANT_P (trial) 5066*e4b17023SJohn Marino && GET_CODE (trial) == CONST 5067*e4b17023SJohn Marino /* Reject cases that will cause decode_rtx_const to 5068*e4b17023SJohn Marino die. On the alpha when simplifying a switch, we 5069*e4b17023SJohn Marino get (const (truncate (minus (label_ref) 5070*e4b17023SJohn Marino (label_ref)))). */ 5071*e4b17023SJohn Marino && (GET_CODE (XEXP (trial, 0)) == TRUNCATE 5072*e4b17023SJohn Marino /* Likewise on IA-64, except without the 5073*e4b17023SJohn Marino truncate. */ 5074*e4b17023SJohn Marino || (GET_CODE (XEXP (trial, 0)) == MINUS 5075*e4b17023SJohn Marino && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF 5076*e4b17023SJohn Marino && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF))) 5077*e4b17023SJohn Marino /* Do nothing for this case. */ 5078*e4b17023SJohn Marino ; 5079*e4b17023SJohn Marino 5080*e4b17023SJohn Marino /* Look for a substitution that makes a valid insn. */ 5081*e4b17023SJohn Marino else if (validate_unshare_change 5082*e4b17023SJohn Marino (insn, &SET_SRC (sets[i].rtl), trial, 0)) 5083*e4b17023SJohn Marino { 5084*e4b17023SJohn Marino rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn); 5085*e4b17023SJohn Marino 5086*e4b17023SJohn Marino /* The result of apply_change_group can be ignored; see 5087*e4b17023SJohn Marino canon_reg. */ 5088*e4b17023SJohn Marino 5089*e4b17023SJohn Marino validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1); 5090*e4b17023SJohn Marino apply_change_group (); 5091*e4b17023SJohn Marino 5092*e4b17023SJohn Marino break; 5093*e4b17023SJohn Marino } 5094*e4b17023SJohn Marino 5095*e4b17023SJohn Marino /* If we previously found constant pool entries for 5096*e4b17023SJohn Marino constants and this is a constant, try making a 5097*e4b17023SJohn Marino pool entry. Put it in src_folded unless we already have done 5098*e4b17023SJohn Marino this since that is where it likely came from. */ 5099*e4b17023SJohn Marino 5100*e4b17023SJohn Marino else if (constant_pool_entries_cost 5101*e4b17023SJohn Marino && CONSTANT_P (trial) 5102*e4b17023SJohn Marino && (src_folded == 0 5103*e4b17023SJohn Marino || (!MEM_P (src_folded) 5104*e4b17023SJohn Marino && ! src_folded_force_flag)) 5105*e4b17023SJohn Marino && GET_MODE_CLASS (mode) != MODE_CC 5106*e4b17023SJohn Marino && mode != VOIDmode) 5107*e4b17023SJohn Marino { 5108*e4b17023SJohn Marino src_folded_force_flag = 1; 5109*e4b17023SJohn Marino src_folded = trial; 5110*e4b17023SJohn Marino src_folded_cost = constant_pool_entries_cost; 5111*e4b17023SJohn Marino src_folded_regcost = constant_pool_entries_regcost; 5112*e4b17023SJohn Marino } 5113*e4b17023SJohn Marino } 5114*e4b17023SJohn Marino 5115*e4b17023SJohn Marino /* If we changed the insn too much, handle this set from scratch. */ 5116*e4b17023SJohn Marino if (repeat) 5117*e4b17023SJohn Marino { 5118*e4b17023SJohn Marino i--; 5119*e4b17023SJohn Marino continue; 5120*e4b17023SJohn Marino } 5121*e4b17023SJohn Marino 5122*e4b17023SJohn Marino src = SET_SRC (sets[i].rtl); 5123*e4b17023SJohn Marino 5124*e4b17023SJohn Marino /* In general, it is good to have a SET with SET_SRC == SET_DEST. 5125*e4b17023SJohn Marino However, there is an important exception: If both are registers 5126*e4b17023SJohn Marino that are not the head of their equivalence class, replace SET_SRC 5127*e4b17023SJohn Marino with the head of the class. If we do not do this, we will have 5128*e4b17023SJohn Marino both registers live over a portion of the basic block. This way, 5129*e4b17023SJohn Marino their lifetimes will likely abut instead of overlapping. */ 5130*e4b17023SJohn Marino if (REG_P (dest) 5131*e4b17023SJohn Marino && REGNO_QTY_VALID_P (REGNO (dest))) 5132*e4b17023SJohn Marino { 5133*e4b17023SJohn Marino int dest_q = REG_QTY (REGNO (dest)); 5134*e4b17023SJohn Marino struct qty_table_elem *dest_ent = &qty_table[dest_q]; 5135*e4b17023SJohn Marino 5136*e4b17023SJohn Marino if (dest_ent->mode == GET_MODE (dest) 5137*e4b17023SJohn Marino && dest_ent->first_reg != REGNO (dest) 5138*e4b17023SJohn Marino && REG_P (src) && REGNO (src) == REGNO (dest) 5139*e4b17023SJohn Marino /* Don't do this if the original insn had a hard reg as 5140*e4b17023SJohn Marino SET_SRC or SET_DEST. */ 5141*e4b17023SJohn Marino && (!REG_P (sets[i].src) 5142*e4b17023SJohn Marino || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER) 5143*e4b17023SJohn Marino && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)) 5144*e4b17023SJohn Marino /* We can't call canon_reg here because it won't do anything if 5145*e4b17023SJohn Marino SRC is a hard register. */ 5146*e4b17023SJohn Marino { 5147*e4b17023SJohn Marino int src_q = REG_QTY (REGNO (src)); 5148*e4b17023SJohn Marino struct qty_table_elem *src_ent = &qty_table[src_q]; 5149*e4b17023SJohn Marino int first = src_ent->first_reg; 5150*e4b17023SJohn Marino rtx new_src 5151*e4b17023SJohn Marino = (first >= FIRST_PSEUDO_REGISTER 5152*e4b17023SJohn Marino ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first)); 5153*e4b17023SJohn Marino 5154*e4b17023SJohn Marino /* We must use validate-change even for this, because this 5155*e4b17023SJohn Marino might be a special no-op instruction, suitable only to 5156*e4b17023SJohn Marino tag notes onto. */ 5157*e4b17023SJohn Marino if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0)) 5158*e4b17023SJohn Marino { 5159*e4b17023SJohn Marino src = new_src; 5160*e4b17023SJohn Marino /* If we had a constant that is cheaper than what we are now 5161*e4b17023SJohn Marino setting SRC to, use that constant. We ignored it when we 5162*e4b17023SJohn Marino thought we could make this into a no-op. */ 5163*e4b17023SJohn Marino if (src_const && COST (src_const) < COST (src) 5164*e4b17023SJohn Marino && validate_change (insn, &SET_SRC (sets[i].rtl), 5165*e4b17023SJohn Marino src_const, 0)) 5166*e4b17023SJohn Marino src = src_const; 5167*e4b17023SJohn Marino } 5168*e4b17023SJohn Marino } 5169*e4b17023SJohn Marino } 5170*e4b17023SJohn Marino 5171*e4b17023SJohn Marino /* If we made a change, recompute SRC values. */ 5172*e4b17023SJohn Marino if (src != sets[i].src) 5173*e4b17023SJohn Marino { 5174*e4b17023SJohn Marino do_not_record = 0; 5175*e4b17023SJohn Marino hash_arg_in_memory = 0; 5176*e4b17023SJohn Marino sets[i].src = src; 5177*e4b17023SJohn Marino sets[i].src_hash = HASH (src, mode); 5178*e4b17023SJohn Marino sets[i].src_volatile = do_not_record; 5179*e4b17023SJohn Marino sets[i].src_in_memory = hash_arg_in_memory; 5180*e4b17023SJohn Marino sets[i].src_elt = lookup (src, sets[i].src_hash, mode); 5181*e4b17023SJohn Marino } 5182*e4b17023SJohn Marino 5183*e4b17023SJohn Marino /* If this is a single SET, we are setting a register, and we have an 5184*e4b17023SJohn Marino equivalent constant, we want to add a REG_NOTE. We don't want 5185*e4b17023SJohn Marino to write a REG_EQUAL note for a constant pseudo since verifying that 5186*e4b17023SJohn Marino that pseudo hasn't been eliminated is a pain. Such a note also 5187*e4b17023SJohn Marino won't help anything. 5188*e4b17023SJohn Marino 5189*e4b17023SJohn Marino Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF))) 5190*e4b17023SJohn Marino which can be created for a reference to a compile time computable 5191*e4b17023SJohn Marino entry in a jump table. */ 5192*e4b17023SJohn Marino 5193*e4b17023SJohn Marino if (n_sets == 1 && src_const && REG_P (dest) 5194*e4b17023SJohn Marino && !REG_P (src_const) 5195*e4b17023SJohn Marino && ! (GET_CODE (src_const) == CONST 5196*e4b17023SJohn Marino && GET_CODE (XEXP (src_const, 0)) == MINUS 5197*e4b17023SJohn Marino && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF 5198*e4b17023SJohn Marino && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF)) 5199*e4b17023SJohn Marino { 5200*e4b17023SJohn Marino /* We only want a REG_EQUAL note if src_const != src. */ 5201*e4b17023SJohn Marino if (! rtx_equal_p (src, src_const)) 5202*e4b17023SJohn Marino { 5203*e4b17023SJohn Marino /* Make sure that the rtx is not shared. */ 5204*e4b17023SJohn Marino src_const = copy_rtx (src_const); 5205*e4b17023SJohn Marino 5206*e4b17023SJohn Marino /* Record the actual constant value in a REG_EQUAL note, 5207*e4b17023SJohn Marino making a new one if one does not already exist. */ 5208*e4b17023SJohn Marino set_unique_reg_note (insn, REG_EQUAL, src_const); 5209*e4b17023SJohn Marino df_notes_rescan (insn); 5210*e4b17023SJohn Marino } 5211*e4b17023SJohn Marino } 5212*e4b17023SJohn Marino 5213*e4b17023SJohn Marino /* Now deal with the destination. */ 5214*e4b17023SJohn Marino do_not_record = 0; 5215*e4b17023SJohn Marino 5216*e4b17023SJohn Marino /* Look within any ZERO_EXTRACT to the MEM or REG within it. */ 5217*e4b17023SJohn Marino while (GET_CODE (dest) == SUBREG 5218*e4b17023SJohn Marino || GET_CODE (dest) == ZERO_EXTRACT 5219*e4b17023SJohn Marino || GET_CODE (dest) == STRICT_LOW_PART) 5220*e4b17023SJohn Marino dest = XEXP (dest, 0); 5221*e4b17023SJohn Marino 5222*e4b17023SJohn Marino sets[i].inner_dest = dest; 5223*e4b17023SJohn Marino 5224*e4b17023SJohn Marino if (MEM_P (dest)) 5225*e4b17023SJohn Marino { 5226*e4b17023SJohn Marino #ifdef PUSH_ROUNDING 5227*e4b17023SJohn Marino /* Stack pushes invalidate the stack pointer. */ 5228*e4b17023SJohn Marino rtx addr = XEXP (dest, 0); 5229*e4b17023SJohn Marino if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC 5230*e4b17023SJohn Marino && XEXP (addr, 0) == stack_pointer_rtx) 5231*e4b17023SJohn Marino invalidate (stack_pointer_rtx, VOIDmode); 5232*e4b17023SJohn Marino #endif 5233*e4b17023SJohn Marino dest = fold_rtx (dest, insn); 5234*e4b17023SJohn Marino } 5235*e4b17023SJohn Marino 5236*e4b17023SJohn Marino /* Compute the hash code of the destination now, 5237*e4b17023SJohn Marino before the effects of this instruction are recorded, 5238*e4b17023SJohn Marino since the register values used in the address computation 5239*e4b17023SJohn Marino are those before this instruction. */ 5240*e4b17023SJohn Marino sets[i].dest_hash = HASH (dest, mode); 5241*e4b17023SJohn Marino 5242*e4b17023SJohn Marino /* Don't enter a bit-field in the hash table 5243*e4b17023SJohn Marino because the value in it after the store 5244*e4b17023SJohn Marino may not equal what was stored, due to truncation. */ 5245*e4b17023SJohn Marino 5246*e4b17023SJohn Marino if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT) 5247*e4b17023SJohn Marino { 5248*e4b17023SJohn Marino rtx width = XEXP (SET_DEST (sets[i].rtl), 1); 5249*e4b17023SJohn Marino 5250*e4b17023SJohn Marino if (src_const != 0 && CONST_INT_P (src_const) 5251*e4b17023SJohn Marino && CONST_INT_P (width) 5252*e4b17023SJohn Marino && INTVAL (width) < HOST_BITS_PER_WIDE_INT 5253*e4b17023SJohn Marino && ! (INTVAL (src_const) 5254*e4b17023SJohn Marino & ((HOST_WIDE_INT) (-1) << INTVAL (width)))) 5255*e4b17023SJohn Marino /* Exception: if the value is constant, 5256*e4b17023SJohn Marino and it won't be truncated, record it. */ 5257*e4b17023SJohn Marino ; 5258*e4b17023SJohn Marino else 5259*e4b17023SJohn Marino { 5260*e4b17023SJohn Marino /* This is chosen so that the destination will be invalidated 5261*e4b17023SJohn Marino but no new value will be recorded. 5262*e4b17023SJohn Marino We must invalidate because sometimes constant 5263*e4b17023SJohn Marino values can be recorded for bitfields. */ 5264*e4b17023SJohn Marino sets[i].src_elt = 0; 5265*e4b17023SJohn Marino sets[i].src_volatile = 1; 5266*e4b17023SJohn Marino src_eqv = 0; 5267*e4b17023SJohn Marino src_eqv_elt = 0; 5268*e4b17023SJohn Marino } 5269*e4b17023SJohn Marino } 5270*e4b17023SJohn Marino 5271*e4b17023SJohn Marino /* If only one set in a JUMP_INSN and it is now a no-op, we can delete 5272*e4b17023SJohn Marino the insn. */ 5273*e4b17023SJohn Marino else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx) 5274*e4b17023SJohn Marino { 5275*e4b17023SJohn Marino /* One less use of the label this insn used to jump to. */ 5276*e4b17023SJohn Marino delete_insn_and_edges (insn); 5277*e4b17023SJohn Marino cse_jumps_altered = true; 5278*e4b17023SJohn Marino /* No more processing for this set. */ 5279*e4b17023SJohn Marino sets[i].rtl = 0; 5280*e4b17023SJohn Marino } 5281*e4b17023SJohn Marino 5282*e4b17023SJohn Marino /* If this SET is now setting PC to a label, we know it used to 5283*e4b17023SJohn Marino be a conditional or computed branch. */ 5284*e4b17023SJohn Marino else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF 5285*e4b17023SJohn Marino && !LABEL_REF_NONLOCAL_P (src)) 5286*e4b17023SJohn Marino { 5287*e4b17023SJohn Marino /* We reemit the jump in as many cases as possible just in 5288*e4b17023SJohn Marino case the form of an unconditional jump is significantly 5289*e4b17023SJohn Marino different than a computed jump or conditional jump. 5290*e4b17023SJohn Marino 5291*e4b17023SJohn Marino If this insn has multiple sets, then reemitting the 5292*e4b17023SJohn Marino jump is nontrivial. So instead we just force rerecognition 5293*e4b17023SJohn Marino and hope for the best. */ 5294*e4b17023SJohn Marino if (n_sets == 1) 5295*e4b17023SJohn Marino { 5296*e4b17023SJohn Marino rtx new_rtx, note; 5297*e4b17023SJohn Marino 5298*e4b17023SJohn Marino new_rtx = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn); 5299*e4b17023SJohn Marino JUMP_LABEL (new_rtx) = XEXP (src, 0); 5300*e4b17023SJohn Marino LABEL_NUSES (XEXP (src, 0))++; 5301*e4b17023SJohn Marino 5302*e4b17023SJohn Marino /* Make sure to copy over REG_NON_LOCAL_GOTO. */ 5303*e4b17023SJohn Marino note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0); 5304*e4b17023SJohn Marino if (note) 5305*e4b17023SJohn Marino { 5306*e4b17023SJohn Marino XEXP (note, 1) = NULL_RTX; 5307*e4b17023SJohn Marino REG_NOTES (new_rtx) = note; 5308*e4b17023SJohn Marino } 5309*e4b17023SJohn Marino 5310*e4b17023SJohn Marino delete_insn_and_edges (insn); 5311*e4b17023SJohn Marino insn = new_rtx; 5312*e4b17023SJohn Marino } 5313*e4b17023SJohn Marino else 5314*e4b17023SJohn Marino INSN_CODE (insn) = -1; 5315*e4b17023SJohn Marino 5316*e4b17023SJohn Marino /* Do not bother deleting any unreachable code, let jump do it. */ 5317*e4b17023SJohn Marino cse_jumps_altered = true; 5318*e4b17023SJohn Marino sets[i].rtl = 0; 5319*e4b17023SJohn Marino } 5320*e4b17023SJohn Marino 5321*e4b17023SJohn Marino /* If destination is volatile, invalidate it and then do no further 5322*e4b17023SJohn Marino processing for this assignment. */ 5323*e4b17023SJohn Marino 5324*e4b17023SJohn Marino else if (do_not_record) 5325*e4b17023SJohn Marino { 5326*e4b17023SJohn Marino if (REG_P (dest) || GET_CODE (dest) == SUBREG) 5327*e4b17023SJohn Marino invalidate (dest, VOIDmode); 5328*e4b17023SJohn Marino else if (MEM_P (dest)) 5329*e4b17023SJohn Marino invalidate (dest, VOIDmode); 5330*e4b17023SJohn Marino else if (GET_CODE (dest) == STRICT_LOW_PART 5331*e4b17023SJohn Marino || GET_CODE (dest) == ZERO_EXTRACT) 5332*e4b17023SJohn Marino invalidate (XEXP (dest, 0), GET_MODE (dest)); 5333*e4b17023SJohn Marino sets[i].rtl = 0; 5334*e4b17023SJohn Marino } 5335*e4b17023SJohn Marino 5336*e4b17023SJohn Marino if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl)) 5337*e4b17023SJohn Marino sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode); 5338*e4b17023SJohn Marino 5339*e4b17023SJohn Marino #ifdef HAVE_cc0 5340*e4b17023SJohn Marino /* If setting CC0, record what it was set to, or a constant, if it 5341*e4b17023SJohn Marino is equivalent to a constant. If it is being set to a floating-point 5342*e4b17023SJohn Marino value, make a COMPARE with the appropriate constant of 0. If we 5343*e4b17023SJohn Marino don't do this, later code can interpret this as a test against 5344*e4b17023SJohn Marino const0_rtx, which can cause problems if we try to put it into an 5345*e4b17023SJohn Marino insn as a floating-point operand. */ 5346*e4b17023SJohn Marino if (dest == cc0_rtx) 5347*e4b17023SJohn Marino { 5348*e4b17023SJohn Marino this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src; 5349*e4b17023SJohn Marino this_insn_cc0_mode = mode; 5350*e4b17023SJohn Marino if (FLOAT_MODE_P (mode)) 5351*e4b17023SJohn Marino this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0, 5352*e4b17023SJohn Marino CONST0_RTX (mode)); 5353*e4b17023SJohn Marino } 5354*e4b17023SJohn Marino #endif 5355*e4b17023SJohn Marino } 5356*e4b17023SJohn Marino 5357*e4b17023SJohn Marino /* Now enter all non-volatile source expressions in the hash table 5358*e4b17023SJohn Marino if they are not already present. 5359*e4b17023SJohn Marino Record their equivalence classes in src_elt. 5360*e4b17023SJohn Marino This way we can insert the corresponding destinations into 5361*e4b17023SJohn Marino the same classes even if the actual sources are no longer in them 5362*e4b17023SJohn Marino (having been invalidated). */ 5363*e4b17023SJohn Marino 5364*e4b17023SJohn Marino if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile 5365*e4b17023SJohn Marino && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl))) 5366*e4b17023SJohn Marino { 5367*e4b17023SJohn Marino struct table_elt *elt; 5368*e4b17023SJohn Marino struct table_elt *classp = sets[0].src_elt; 5369*e4b17023SJohn Marino rtx dest = SET_DEST (sets[0].rtl); 5370*e4b17023SJohn Marino enum machine_mode eqvmode = GET_MODE (dest); 5371*e4b17023SJohn Marino 5372*e4b17023SJohn Marino if (GET_CODE (dest) == STRICT_LOW_PART) 5373*e4b17023SJohn Marino { 5374*e4b17023SJohn Marino eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0))); 5375*e4b17023SJohn Marino classp = 0; 5376*e4b17023SJohn Marino } 5377*e4b17023SJohn Marino if (insert_regs (src_eqv, classp, 0)) 5378*e4b17023SJohn Marino { 5379*e4b17023SJohn Marino rehash_using_reg (src_eqv); 5380*e4b17023SJohn Marino src_eqv_hash = HASH (src_eqv, eqvmode); 5381*e4b17023SJohn Marino } 5382*e4b17023SJohn Marino elt = insert (src_eqv, classp, src_eqv_hash, eqvmode); 5383*e4b17023SJohn Marino elt->in_memory = src_eqv_in_memory; 5384*e4b17023SJohn Marino src_eqv_elt = elt; 5385*e4b17023SJohn Marino 5386*e4b17023SJohn Marino /* Check to see if src_eqv_elt is the same as a set source which 5387*e4b17023SJohn Marino does not yet have an elt, and if so set the elt of the set source 5388*e4b17023SJohn Marino to src_eqv_elt. */ 5389*e4b17023SJohn Marino for (i = 0; i < n_sets; i++) 5390*e4b17023SJohn Marino if (sets[i].rtl && sets[i].src_elt == 0 5391*e4b17023SJohn Marino && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv)) 5392*e4b17023SJohn Marino sets[i].src_elt = src_eqv_elt; 5393*e4b17023SJohn Marino } 5394*e4b17023SJohn Marino 5395*e4b17023SJohn Marino for (i = 0; i < n_sets; i++) 5396*e4b17023SJohn Marino if (sets[i].rtl && ! sets[i].src_volatile 5397*e4b17023SJohn Marino && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl))) 5398*e4b17023SJohn Marino { 5399*e4b17023SJohn Marino if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART) 5400*e4b17023SJohn Marino { 5401*e4b17023SJohn Marino /* REG_EQUAL in setting a STRICT_LOW_PART 5402*e4b17023SJohn Marino gives an equivalent for the entire destination register, 5403*e4b17023SJohn Marino not just for the subreg being stored in now. 5404*e4b17023SJohn Marino This is a more interesting equivalence, so we arrange later 5405*e4b17023SJohn Marino to treat the entire reg as the destination. */ 5406*e4b17023SJohn Marino sets[i].src_elt = src_eqv_elt; 5407*e4b17023SJohn Marino sets[i].src_hash = src_eqv_hash; 5408*e4b17023SJohn Marino } 5409*e4b17023SJohn Marino else 5410*e4b17023SJohn Marino { 5411*e4b17023SJohn Marino /* Insert source and constant equivalent into hash table, if not 5412*e4b17023SJohn Marino already present. */ 5413*e4b17023SJohn Marino struct table_elt *classp = src_eqv_elt; 5414*e4b17023SJohn Marino rtx src = sets[i].src; 5415*e4b17023SJohn Marino rtx dest = SET_DEST (sets[i].rtl); 5416*e4b17023SJohn Marino enum machine_mode mode 5417*e4b17023SJohn Marino = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src); 5418*e4b17023SJohn Marino 5419*e4b17023SJohn Marino /* It's possible that we have a source value known to be 5420*e4b17023SJohn Marino constant but don't have a REG_EQUAL note on the insn. 5421*e4b17023SJohn Marino Lack of a note will mean src_eqv_elt will be NULL. This 5422*e4b17023SJohn Marino can happen where we've generated a SUBREG to access a 5423*e4b17023SJohn Marino CONST_INT that is already in a register in a wider mode. 5424*e4b17023SJohn Marino Ensure that the source expression is put in the proper 5425*e4b17023SJohn Marino constant class. */ 5426*e4b17023SJohn Marino if (!classp) 5427*e4b17023SJohn Marino classp = sets[i].src_const_elt; 5428*e4b17023SJohn Marino 5429*e4b17023SJohn Marino if (sets[i].src_elt == 0) 5430*e4b17023SJohn Marino { 5431*e4b17023SJohn Marino struct table_elt *elt; 5432*e4b17023SJohn Marino 5433*e4b17023SJohn Marino /* Note that these insert_regs calls cannot remove 5434*e4b17023SJohn Marino any of the src_elt's, because they would have failed to 5435*e4b17023SJohn Marino match if not still valid. */ 5436*e4b17023SJohn Marino if (insert_regs (src, classp, 0)) 5437*e4b17023SJohn Marino { 5438*e4b17023SJohn Marino rehash_using_reg (src); 5439*e4b17023SJohn Marino sets[i].src_hash = HASH (src, mode); 5440*e4b17023SJohn Marino } 5441*e4b17023SJohn Marino elt = insert (src, classp, sets[i].src_hash, mode); 5442*e4b17023SJohn Marino elt->in_memory = sets[i].src_in_memory; 5443*e4b17023SJohn Marino sets[i].src_elt = classp = elt; 5444*e4b17023SJohn Marino } 5445*e4b17023SJohn Marino if (sets[i].src_const && sets[i].src_const_elt == 0 5446*e4b17023SJohn Marino && src != sets[i].src_const 5447*e4b17023SJohn Marino && ! rtx_equal_p (sets[i].src_const, src)) 5448*e4b17023SJohn Marino sets[i].src_elt = insert (sets[i].src_const, classp, 5449*e4b17023SJohn Marino sets[i].src_const_hash, mode); 5450*e4b17023SJohn Marino } 5451*e4b17023SJohn Marino } 5452*e4b17023SJohn Marino else if (sets[i].src_elt == 0) 5453*e4b17023SJohn Marino /* If we did not insert the source into the hash table (e.g., it was 5454*e4b17023SJohn Marino volatile), note the equivalence class for the REG_EQUAL value, if any, 5455*e4b17023SJohn Marino so that the destination goes into that class. */ 5456*e4b17023SJohn Marino sets[i].src_elt = src_eqv_elt; 5457*e4b17023SJohn Marino 5458*e4b17023SJohn Marino /* Record destination addresses in the hash table. This allows us to 5459*e4b17023SJohn Marino check if they are invalidated by other sets. */ 5460*e4b17023SJohn Marino for (i = 0; i < n_sets; i++) 5461*e4b17023SJohn Marino { 5462*e4b17023SJohn Marino if (sets[i].rtl) 5463*e4b17023SJohn Marino { 5464*e4b17023SJohn Marino rtx x = sets[i].inner_dest; 5465*e4b17023SJohn Marino struct table_elt *elt; 5466*e4b17023SJohn Marino enum machine_mode mode; 5467*e4b17023SJohn Marino unsigned hash; 5468*e4b17023SJohn Marino 5469*e4b17023SJohn Marino if (MEM_P (x)) 5470*e4b17023SJohn Marino { 5471*e4b17023SJohn Marino x = XEXP (x, 0); 5472*e4b17023SJohn Marino mode = GET_MODE (x); 5473*e4b17023SJohn Marino hash = HASH (x, mode); 5474*e4b17023SJohn Marino elt = lookup (x, hash, mode); 5475*e4b17023SJohn Marino if (!elt) 5476*e4b17023SJohn Marino { 5477*e4b17023SJohn Marino if (insert_regs (x, NULL, 0)) 5478*e4b17023SJohn Marino { 5479*e4b17023SJohn Marino rtx dest = SET_DEST (sets[i].rtl); 5480*e4b17023SJohn Marino 5481*e4b17023SJohn Marino rehash_using_reg (x); 5482*e4b17023SJohn Marino hash = HASH (x, mode); 5483*e4b17023SJohn Marino sets[i].dest_hash = HASH (dest, GET_MODE (dest)); 5484*e4b17023SJohn Marino } 5485*e4b17023SJohn Marino elt = insert (x, NULL, hash, mode); 5486*e4b17023SJohn Marino } 5487*e4b17023SJohn Marino 5488*e4b17023SJohn Marino sets[i].dest_addr_elt = elt; 5489*e4b17023SJohn Marino } 5490*e4b17023SJohn Marino else 5491*e4b17023SJohn Marino sets[i].dest_addr_elt = NULL; 5492*e4b17023SJohn Marino } 5493*e4b17023SJohn Marino } 5494*e4b17023SJohn Marino 5495*e4b17023SJohn Marino invalidate_from_clobbers (x); 5496*e4b17023SJohn Marino 5497*e4b17023SJohn Marino /* Some registers are invalidated by subroutine calls. Memory is 5498*e4b17023SJohn Marino invalidated by non-constant calls. */ 5499*e4b17023SJohn Marino 5500*e4b17023SJohn Marino if (CALL_P (insn)) 5501*e4b17023SJohn Marino { 5502*e4b17023SJohn Marino if (!(RTL_CONST_OR_PURE_CALL_P (insn))) 5503*e4b17023SJohn Marino invalidate_memory (); 5504*e4b17023SJohn Marino invalidate_for_call (); 5505*e4b17023SJohn Marino } 5506*e4b17023SJohn Marino 5507*e4b17023SJohn Marino /* Now invalidate everything set by this instruction. 5508*e4b17023SJohn Marino If a SUBREG or other funny destination is being set, 5509*e4b17023SJohn Marino sets[i].rtl is still nonzero, so here we invalidate the reg 5510*e4b17023SJohn Marino a part of which is being set. */ 5511*e4b17023SJohn Marino 5512*e4b17023SJohn Marino for (i = 0; i < n_sets; i++) 5513*e4b17023SJohn Marino if (sets[i].rtl) 5514*e4b17023SJohn Marino { 5515*e4b17023SJohn Marino /* We can't use the inner dest, because the mode associated with 5516*e4b17023SJohn Marino a ZERO_EXTRACT is significant. */ 5517*e4b17023SJohn Marino rtx dest = SET_DEST (sets[i].rtl); 5518*e4b17023SJohn Marino 5519*e4b17023SJohn Marino /* Needed for registers to remove the register from its 5520*e4b17023SJohn Marino previous quantity's chain. 5521*e4b17023SJohn Marino Needed for memory if this is a nonvarying address, unless 5522*e4b17023SJohn Marino we have just done an invalidate_memory that covers even those. */ 5523*e4b17023SJohn Marino if (REG_P (dest) || GET_CODE (dest) == SUBREG) 5524*e4b17023SJohn Marino invalidate (dest, VOIDmode); 5525*e4b17023SJohn Marino else if (MEM_P (dest)) 5526*e4b17023SJohn Marino invalidate (dest, VOIDmode); 5527*e4b17023SJohn Marino else if (GET_CODE (dest) == STRICT_LOW_PART 5528*e4b17023SJohn Marino || GET_CODE (dest) == ZERO_EXTRACT) 5529*e4b17023SJohn Marino invalidate (XEXP (dest, 0), GET_MODE (dest)); 5530*e4b17023SJohn Marino } 5531*e4b17023SJohn Marino 5532*e4b17023SJohn Marino /* A volatile ASM invalidates everything. */ 5533*e4b17023SJohn Marino if (NONJUMP_INSN_P (insn) 5534*e4b17023SJohn Marino && GET_CODE (PATTERN (insn)) == ASM_OPERANDS 5535*e4b17023SJohn Marino && MEM_VOLATILE_P (PATTERN (insn))) 5536*e4b17023SJohn Marino flush_hash_table (); 5537*e4b17023SJohn Marino 5538*e4b17023SJohn Marino /* Don't cse over a call to setjmp; on some machines (eg VAX) 5539*e4b17023SJohn Marino the regs restored by the longjmp come from a later time 5540*e4b17023SJohn Marino than the setjmp. */ 5541*e4b17023SJohn Marino if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL)) 5542*e4b17023SJohn Marino { 5543*e4b17023SJohn Marino flush_hash_table (); 5544*e4b17023SJohn Marino goto done; 5545*e4b17023SJohn Marino } 5546*e4b17023SJohn Marino 5547*e4b17023SJohn Marino /* Make sure registers mentioned in destinations 5548*e4b17023SJohn Marino are safe for use in an expression to be inserted. 5549*e4b17023SJohn Marino This removes from the hash table 5550*e4b17023SJohn Marino any invalid entry that refers to one of these registers. 5551*e4b17023SJohn Marino 5552*e4b17023SJohn Marino We don't care about the return value from mention_regs because 5553*e4b17023SJohn Marino we are going to hash the SET_DEST values unconditionally. */ 5554*e4b17023SJohn Marino 5555*e4b17023SJohn Marino for (i = 0; i < n_sets; i++) 5556*e4b17023SJohn Marino { 5557*e4b17023SJohn Marino if (sets[i].rtl) 5558*e4b17023SJohn Marino { 5559*e4b17023SJohn Marino rtx x = SET_DEST (sets[i].rtl); 5560*e4b17023SJohn Marino 5561*e4b17023SJohn Marino if (!REG_P (x)) 5562*e4b17023SJohn Marino mention_regs (x); 5563*e4b17023SJohn Marino else 5564*e4b17023SJohn Marino { 5565*e4b17023SJohn Marino /* We used to rely on all references to a register becoming 5566*e4b17023SJohn Marino inaccessible when a register changes to a new quantity, 5567*e4b17023SJohn Marino since that changes the hash code. However, that is not 5568*e4b17023SJohn Marino safe, since after HASH_SIZE new quantities we get a 5569*e4b17023SJohn Marino hash 'collision' of a register with its own invalid 5570*e4b17023SJohn Marino entries. And since SUBREGs have been changed not to 5571*e4b17023SJohn Marino change their hash code with the hash code of the register, 5572*e4b17023SJohn Marino it wouldn't work any longer at all. So we have to check 5573*e4b17023SJohn Marino for any invalid references lying around now. 5574*e4b17023SJohn Marino This code is similar to the REG case in mention_regs, 5575*e4b17023SJohn Marino but it knows that reg_tick has been incremented, and 5576*e4b17023SJohn Marino it leaves reg_in_table as -1 . */ 5577*e4b17023SJohn Marino unsigned int regno = REGNO (x); 5578*e4b17023SJohn Marino unsigned int endregno = END_REGNO (x); 5579*e4b17023SJohn Marino unsigned int i; 5580*e4b17023SJohn Marino 5581*e4b17023SJohn Marino for (i = regno; i < endregno; i++) 5582*e4b17023SJohn Marino { 5583*e4b17023SJohn Marino if (REG_IN_TABLE (i) >= 0) 5584*e4b17023SJohn Marino { 5585*e4b17023SJohn Marino remove_invalid_refs (i); 5586*e4b17023SJohn Marino REG_IN_TABLE (i) = -1; 5587*e4b17023SJohn Marino } 5588*e4b17023SJohn Marino } 5589*e4b17023SJohn Marino } 5590*e4b17023SJohn Marino } 5591*e4b17023SJohn Marino } 5592*e4b17023SJohn Marino 5593*e4b17023SJohn Marino /* We may have just removed some of the src_elt's from the hash table. 5594*e4b17023SJohn Marino So replace each one with the current head of the same class. 5595*e4b17023SJohn Marino Also check if destination addresses have been removed. */ 5596*e4b17023SJohn Marino 5597*e4b17023SJohn Marino for (i = 0; i < n_sets; i++) 5598*e4b17023SJohn Marino if (sets[i].rtl) 5599*e4b17023SJohn Marino { 5600*e4b17023SJohn Marino if (sets[i].dest_addr_elt 5601*e4b17023SJohn Marino && sets[i].dest_addr_elt->first_same_value == 0) 5602*e4b17023SJohn Marino { 5603*e4b17023SJohn Marino /* The elt was removed, which means this destination is not 5604*e4b17023SJohn Marino valid after this instruction. */ 5605*e4b17023SJohn Marino sets[i].rtl = NULL_RTX; 5606*e4b17023SJohn Marino } 5607*e4b17023SJohn Marino else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0) 5608*e4b17023SJohn Marino /* If elt was removed, find current head of same class, 5609*e4b17023SJohn Marino or 0 if nothing remains of that class. */ 5610*e4b17023SJohn Marino { 5611*e4b17023SJohn Marino struct table_elt *elt = sets[i].src_elt; 5612*e4b17023SJohn Marino 5613*e4b17023SJohn Marino while (elt && elt->prev_same_value) 5614*e4b17023SJohn Marino elt = elt->prev_same_value; 5615*e4b17023SJohn Marino 5616*e4b17023SJohn Marino while (elt && elt->first_same_value == 0) 5617*e4b17023SJohn Marino elt = elt->next_same_value; 5618*e4b17023SJohn Marino sets[i].src_elt = elt ? elt->first_same_value : 0; 5619*e4b17023SJohn Marino } 5620*e4b17023SJohn Marino } 5621*e4b17023SJohn Marino 5622*e4b17023SJohn Marino /* Now insert the destinations into their equivalence classes. */ 5623*e4b17023SJohn Marino 5624*e4b17023SJohn Marino for (i = 0; i < n_sets; i++) 5625*e4b17023SJohn Marino if (sets[i].rtl) 5626*e4b17023SJohn Marino { 5627*e4b17023SJohn Marino rtx dest = SET_DEST (sets[i].rtl); 5628*e4b17023SJohn Marino struct table_elt *elt; 5629*e4b17023SJohn Marino 5630*e4b17023SJohn Marino /* Don't record value if we are not supposed to risk allocating 5631*e4b17023SJohn Marino floating-point values in registers that might be wider than 5632*e4b17023SJohn Marino memory. */ 5633*e4b17023SJohn Marino if ((flag_float_store 5634*e4b17023SJohn Marino && MEM_P (dest) 5635*e4b17023SJohn Marino && FLOAT_MODE_P (GET_MODE (dest))) 5636*e4b17023SJohn Marino /* Don't record BLKmode values, because we don't know the 5637*e4b17023SJohn Marino size of it, and can't be sure that other BLKmode values 5638*e4b17023SJohn Marino have the same or smaller size. */ 5639*e4b17023SJohn Marino || GET_MODE (dest) == BLKmode 5640*e4b17023SJohn Marino /* If we didn't put a REG_EQUAL value or a source into the hash 5641*e4b17023SJohn Marino table, there is no point is recording DEST. */ 5642*e4b17023SJohn Marino || sets[i].src_elt == 0 5643*e4b17023SJohn Marino /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND 5644*e4b17023SJohn Marino or SIGN_EXTEND, don't record DEST since it can cause 5645*e4b17023SJohn Marino some tracking to be wrong. 5646*e4b17023SJohn Marino 5647*e4b17023SJohn Marino ??? Think about this more later. */ 5648*e4b17023SJohn Marino || (paradoxical_subreg_p (dest) 5649*e4b17023SJohn Marino && (GET_CODE (sets[i].src) == SIGN_EXTEND 5650*e4b17023SJohn Marino || GET_CODE (sets[i].src) == ZERO_EXTEND))) 5651*e4b17023SJohn Marino continue; 5652*e4b17023SJohn Marino 5653*e4b17023SJohn Marino /* STRICT_LOW_PART isn't part of the value BEING set, 5654*e4b17023SJohn Marino and neither is the SUBREG inside it. 5655*e4b17023SJohn Marino Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */ 5656*e4b17023SJohn Marino if (GET_CODE (dest) == STRICT_LOW_PART) 5657*e4b17023SJohn Marino dest = SUBREG_REG (XEXP (dest, 0)); 5658*e4b17023SJohn Marino 5659*e4b17023SJohn Marino if (REG_P (dest) || GET_CODE (dest) == SUBREG) 5660*e4b17023SJohn Marino /* Registers must also be inserted into chains for quantities. */ 5661*e4b17023SJohn Marino if (insert_regs (dest, sets[i].src_elt, 1)) 5662*e4b17023SJohn Marino { 5663*e4b17023SJohn Marino /* If `insert_regs' changes something, the hash code must be 5664*e4b17023SJohn Marino recalculated. */ 5665*e4b17023SJohn Marino rehash_using_reg (dest); 5666*e4b17023SJohn Marino sets[i].dest_hash = HASH (dest, GET_MODE (dest)); 5667*e4b17023SJohn Marino } 5668*e4b17023SJohn Marino 5669*e4b17023SJohn Marino elt = insert (dest, sets[i].src_elt, 5670*e4b17023SJohn Marino sets[i].dest_hash, GET_MODE (dest)); 5671*e4b17023SJohn Marino 5672*e4b17023SJohn Marino /* If this is a constant, insert the constant anchors with the 5673*e4b17023SJohn Marino equivalent register-offset expressions using register DEST. */ 5674*e4b17023SJohn Marino if (targetm.const_anchor 5675*e4b17023SJohn Marino && REG_P (dest) 5676*e4b17023SJohn Marino && SCALAR_INT_MODE_P (GET_MODE (dest)) 5677*e4b17023SJohn Marino && GET_CODE (sets[i].src_elt->exp) == CONST_INT) 5678*e4b17023SJohn Marino insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest)); 5679*e4b17023SJohn Marino 5680*e4b17023SJohn Marino elt->in_memory = (MEM_P (sets[i].inner_dest) 5681*e4b17023SJohn Marino && !MEM_READONLY_P (sets[i].inner_dest)); 5682*e4b17023SJohn Marino 5683*e4b17023SJohn Marino /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no 5684*e4b17023SJohn Marino narrower than M2, and both M1 and M2 are the same number of words, 5685*e4b17023SJohn Marino we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so 5686*e4b17023SJohn Marino make that equivalence as well. 5687*e4b17023SJohn Marino 5688*e4b17023SJohn Marino However, BAR may have equivalences for which gen_lowpart 5689*e4b17023SJohn Marino will produce a simpler value than gen_lowpart applied to 5690*e4b17023SJohn Marino BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all 5691*e4b17023SJohn Marino BAR's equivalences. If we don't get a simplified form, make 5692*e4b17023SJohn Marino the SUBREG. It will not be used in an equivalence, but will 5693*e4b17023SJohn Marino cause two similar assignments to be detected. 5694*e4b17023SJohn Marino 5695*e4b17023SJohn Marino Note the loop below will find SUBREG_REG (DEST) since we have 5696*e4b17023SJohn Marino already entered SRC and DEST of the SET in the table. */ 5697*e4b17023SJohn Marino 5698*e4b17023SJohn Marino if (GET_CODE (dest) == SUBREG 5699*e4b17023SJohn Marino && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1) 5700*e4b17023SJohn Marino / UNITS_PER_WORD) 5701*e4b17023SJohn Marino == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD) 5702*e4b17023SJohn Marino && (GET_MODE_SIZE (GET_MODE (dest)) 5703*e4b17023SJohn Marino >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))) 5704*e4b17023SJohn Marino && sets[i].src_elt != 0) 5705*e4b17023SJohn Marino { 5706*e4b17023SJohn Marino enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest)); 5707*e4b17023SJohn Marino struct table_elt *elt, *classp = 0; 5708*e4b17023SJohn Marino 5709*e4b17023SJohn Marino for (elt = sets[i].src_elt->first_same_value; elt; 5710*e4b17023SJohn Marino elt = elt->next_same_value) 5711*e4b17023SJohn Marino { 5712*e4b17023SJohn Marino rtx new_src = 0; 5713*e4b17023SJohn Marino unsigned src_hash; 5714*e4b17023SJohn Marino struct table_elt *src_elt; 5715*e4b17023SJohn Marino int byte = 0; 5716*e4b17023SJohn Marino 5717*e4b17023SJohn Marino /* Ignore invalid entries. */ 5718*e4b17023SJohn Marino if (!REG_P (elt->exp) 5719*e4b17023SJohn Marino && ! exp_equiv_p (elt->exp, elt->exp, 1, false)) 5720*e4b17023SJohn Marino continue; 5721*e4b17023SJohn Marino 5722*e4b17023SJohn Marino /* We may have already been playing subreg games. If the 5723*e4b17023SJohn Marino mode is already correct for the destination, use it. */ 5724*e4b17023SJohn Marino if (GET_MODE (elt->exp) == new_mode) 5725*e4b17023SJohn Marino new_src = elt->exp; 5726*e4b17023SJohn Marino else 5727*e4b17023SJohn Marino { 5728*e4b17023SJohn Marino /* Calculate big endian correction for the SUBREG_BYTE. 5729*e4b17023SJohn Marino We have already checked that M1 (GET_MODE (dest)) 5730*e4b17023SJohn Marino is not narrower than M2 (new_mode). */ 5731*e4b17023SJohn Marino if (BYTES_BIG_ENDIAN) 5732*e4b17023SJohn Marino byte = (GET_MODE_SIZE (GET_MODE (dest)) 5733*e4b17023SJohn Marino - GET_MODE_SIZE (new_mode)); 5734*e4b17023SJohn Marino 5735*e4b17023SJohn Marino new_src = simplify_gen_subreg (new_mode, elt->exp, 5736*e4b17023SJohn Marino GET_MODE (dest), byte); 5737*e4b17023SJohn Marino } 5738*e4b17023SJohn Marino 5739*e4b17023SJohn Marino /* The call to simplify_gen_subreg fails if the value 5740*e4b17023SJohn Marino is VOIDmode, yet we can't do any simplification, e.g. 5741*e4b17023SJohn Marino for EXPR_LISTs denoting function call results. 5742*e4b17023SJohn Marino It is invalid to construct a SUBREG with a VOIDmode 5743*e4b17023SJohn Marino SUBREG_REG, hence a zero new_src means we can't do 5744*e4b17023SJohn Marino this substitution. */ 5745*e4b17023SJohn Marino if (! new_src) 5746*e4b17023SJohn Marino continue; 5747*e4b17023SJohn Marino 5748*e4b17023SJohn Marino src_hash = HASH (new_src, new_mode); 5749*e4b17023SJohn Marino src_elt = lookup (new_src, src_hash, new_mode); 5750*e4b17023SJohn Marino 5751*e4b17023SJohn Marino /* Put the new source in the hash table is if isn't 5752*e4b17023SJohn Marino already. */ 5753*e4b17023SJohn Marino if (src_elt == 0) 5754*e4b17023SJohn Marino { 5755*e4b17023SJohn Marino if (insert_regs (new_src, classp, 0)) 5756*e4b17023SJohn Marino { 5757*e4b17023SJohn Marino rehash_using_reg (new_src); 5758*e4b17023SJohn Marino src_hash = HASH (new_src, new_mode); 5759*e4b17023SJohn Marino } 5760*e4b17023SJohn Marino src_elt = insert (new_src, classp, src_hash, new_mode); 5761*e4b17023SJohn Marino src_elt->in_memory = elt->in_memory; 5762*e4b17023SJohn Marino } 5763*e4b17023SJohn Marino else if (classp && classp != src_elt->first_same_value) 5764*e4b17023SJohn Marino /* Show that two things that we've seen before are 5765*e4b17023SJohn Marino actually the same. */ 5766*e4b17023SJohn Marino merge_equiv_classes (src_elt, classp); 5767*e4b17023SJohn Marino 5768*e4b17023SJohn Marino classp = src_elt->first_same_value; 5769*e4b17023SJohn Marino /* Ignore invalid entries. */ 5770*e4b17023SJohn Marino while (classp 5771*e4b17023SJohn Marino && !REG_P (classp->exp) 5772*e4b17023SJohn Marino && ! exp_equiv_p (classp->exp, classp->exp, 1, false)) 5773*e4b17023SJohn Marino classp = classp->next_same_value; 5774*e4b17023SJohn Marino } 5775*e4b17023SJohn Marino } 5776*e4b17023SJohn Marino } 5777*e4b17023SJohn Marino 5778*e4b17023SJohn Marino /* Special handling for (set REG0 REG1) where REG0 is the 5779*e4b17023SJohn Marino "cheapest", cheaper than REG1. After cse, REG1 will probably not 5780*e4b17023SJohn Marino be used in the sequel, so (if easily done) change this insn to 5781*e4b17023SJohn Marino (set REG1 REG0) and replace REG1 with REG0 in the previous insn 5782*e4b17023SJohn Marino that computed their value. Then REG1 will become a dead store 5783*e4b17023SJohn Marino and won't cloud the situation for later optimizations. 5784*e4b17023SJohn Marino 5785*e4b17023SJohn Marino Do not make this change if REG1 is a hard register, because it will 5786*e4b17023SJohn Marino then be used in the sequel and we may be changing a two-operand insn 5787*e4b17023SJohn Marino into a three-operand insn. 5788*e4b17023SJohn Marino 5789*e4b17023SJohn Marino Also do not do this if we are operating on a copy of INSN. */ 5790*e4b17023SJohn Marino 5791*e4b17023SJohn Marino if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl)) 5792*e4b17023SJohn Marino && NEXT_INSN (PREV_INSN (insn)) == insn 5793*e4b17023SJohn Marino && REG_P (SET_SRC (sets[0].rtl)) 5794*e4b17023SJohn Marino && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER 5795*e4b17023SJohn Marino && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))) 5796*e4b17023SJohn Marino { 5797*e4b17023SJohn Marino int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl))); 5798*e4b17023SJohn Marino struct qty_table_elem *src_ent = &qty_table[src_q]; 5799*e4b17023SJohn Marino 5800*e4b17023SJohn Marino if (src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl))) 5801*e4b17023SJohn Marino { 5802*e4b17023SJohn Marino /* Scan for the previous nonnote insn, but stop at a basic 5803*e4b17023SJohn Marino block boundary. */ 5804*e4b17023SJohn Marino rtx prev = insn; 5805*e4b17023SJohn Marino rtx bb_head = BB_HEAD (BLOCK_FOR_INSN (insn)); 5806*e4b17023SJohn Marino do 5807*e4b17023SJohn Marino { 5808*e4b17023SJohn Marino prev = PREV_INSN (prev); 5809*e4b17023SJohn Marino } 5810*e4b17023SJohn Marino while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev))); 5811*e4b17023SJohn Marino 5812*e4b17023SJohn Marino /* Do not swap the registers around if the previous instruction 5813*e4b17023SJohn Marino attaches a REG_EQUIV note to REG1. 5814*e4b17023SJohn Marino 5815*e4b17023SJohn Marino ??? It's not entirely clear whether we can transfer a REG_EQUIV 5816*e4b17023SJohn Marino from the pseudo that originally shadowed an incoming argument 5817*e4b17023SJohn Marino to another register. Some uses of REG_EQUIV might rely on it 5818*e4b17023SJohn Marino being attached to REG1 rather than REG2. 5819*e4b17023SJohn Marino 5820*e4b17023SJohn Marino This section previously turned the REG_EQUIV into a REG_EQUAL 5821*e4b17023SJohn Marino note. We cannot do that because REG_EQUIV may provide an 5822*e4b17023SJohn Marino uninitialized stack slot when REG_PARM_STACK_SPACE is used. */ 5823*e4b17023SJohn Marino if (NONJUMP_INSN_P (prev) 5824*e4b17023SJohn Marino && GET_CODE (PATTERN (prev)) == SET 5825*e4b17023SJohn Marino && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl) 5826*e4b17023SJohn Marino && ! find_reg_note (prev, REG_EQUIV, NULL_RTX)) 5827*e4b17023SJohn Marino { 5828*e4b17023SJohn Marino rtx dest = SET_DEST (sets[0].rtl); 5829*e4b17023SJohn Marino rtx src = SET_SRC (sets[0].rtl); 5830*e4b17023SJohn Marino rtx note; 5831*e4b17023SJohn Marino 5832*e4b17023SJohn Marino validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1); 5833*e4b17023SJohn Marino validate_change (insn, &SET_DEST (sets[0].rtl), src, 1); 5834*e4b17023SJohn Marino validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1); 5835*e4b17023SJohn Marino apply_change_group (); 5836*e4b17023SJohn Marino 5837*e4b17023SJohn Marino /* If INSN has a REG_EQUAL note, and this note mentions 5838*e4b17023SJohn Marino REG0, then we must delete it, because the value in 5839*e4b17023SJohn Marino REG0 has changed. If the note's value is REG1, we must 5840*e4b17023SJohn Marino also delete it because that is now this insn's dest. */ 5841*e4b17023SJohn Marino note = find_reg_note (insn, REG_EQUAL, NULL_RTX); 5842*e4b17023SJohn Marino if (note != 0 5843*e4b17023SJohn Marino && (reg_mentioned_p (dest, XEXP (note, 0)) 5844*e4b17023SJohn Marino || rtx_equal_p (src, XEXP (note, 0)))) 5845*e4b17023SJohn Marino remove_note (insn, note); 5846*e4b17023SJohn Marino } 5847*e4b17023SJohn Marino } 5848*e4b17023SJohn Marino } 5849*e4b17023SJohn Marino 5850*e4b17023SJohn Marino done:; 5851*e4b17023SJohn Marino } 5852*e4b17023SJohn Marino 5853*e4b17023SJohn Marino /* Remove from the hash table all expressions that reference memory. */ 5854*e4b17023SJohn Marino 5855*e4b17023SJohn Marino static void 5856*e4b17023SJohn Marino invalidate_memory (void) 5857*e4b17023SJohn Marino { 5858*e4b17023SJohn Marino int i; 5859*e4b17023SJohn Marino struct table_elt *p, *next; 5860*e4b17023SJohn Marino 5861*e4b17023SJohn Marino for (i = 0; i < HASH_SIZE; i++) 5862*e4b17023SJohn Marino for (p = table[i]; p; p = next) 5863*e4b17023SJohn Marino { 5864*e4b17023SJohn Marino next = p->next_same_hash; 5865*e4b17023SJohn Marino if (p->in_memory) 5866*e4b17023SJohn Marino remove_from_table (p, i); 5867*e4b17023SJohn Marino } 5868*e4b17023SJohn Marino } 5869*e4b17023SJohn Marino 5870*e4b17023SJohn Marino /* Perform invalidation on the basis of everything about an insn 5871*e4b17023SJohn Marino except for invalidating the actual places that are SET in it. 5872*e4b17023SJohn Marino This includes the places CLOBBERed, and anything that might 5873*e4b17023SJohn Marino alias with something that is SET or CLOBBERed. 5874*e4b17023SJohn Marino 5875*e4b17023SJohn Marino X is the pattern of the insn. */ 5876*e4b17023SJohn Marino 5877*e4b17023SJohn Marino static void 5878*e4b17023SJohn Marino invalidate_from_clobbers (rtx x) 5879*e4b17023SJohn Marino { 5880*e4b17023SJohn Marino if (GET_CODE (x) == CLOBBER) 5881*e4b17023SJohn Marino { 5882*e4b17023SJohn Marino rtx ref = XEXP (x, 0); 5883*e4b17023SJohn Marino if (ref) 5884*e4b17023SJohn Marino { 5885*e4b17023SJohn Marino if (REG_P (ref) || GET_CODE (ref) == SUBREG 5886*e4b17023SJohn Marino || MEM_P (ref)) 5887*e4b17023SJohn Marino invalidate (ref, VOIDmode); 5888*e4b17023SJohn Marino else if (GET_CODE (ref) == STRICT_LOW_PART 5889*e4b17023SJohn Marino || GET_CODE (ref) == ZERO_EXTRACT) 5890*e4b17023SJohn Marino invalidate (XEXP (ref, 0), GET_MODE (ref)); 5891*e4b17023SJohn Marino } 5892*e4b17023SJohn Marino } 5893*e4b17023SJohn Marino else if (GET_CODE (x) == PARALLEL) 5894*e4b17023SJohn Marino { 5895*e4b17023SJohn Marino int i; 5896*e4b17023SJohn Marino for (i = XVECLEN (x, 0) - 1; i >= 0; i--) 5897*e4b17023SJohn Marino { 5898*e4b17023SJohn Marino rtx y = XVECEXP (x, 0, i); 5899*e4b17023SJohn Marino if (GET_CODE (y) == CLOBBER) 5900*e4b17023SJohn Marino { 5901*e4b17023SJohn Marino rtx ref = XEXP (y, 0); 5902*e4b17023SJohn Marino if (REG_P (ref) || GET_CODE (ref) == SUBREG 5903*e4b17023SJohn Marino || MEM_P (ref)) 5904*e4b17023SJohn Marino invalidate (ref, VOIDmode); 5905*e4b17023SJohn Marino else if (GET_CODE (ref) == STRICT_LOW_PART 5906*e4b17023SJohn Marino || GET_CODE (ref) == ZERO_EXTRACT) 5907*e4b17023SJohn Marino invalidate (XEXP (ref, 0), GET_MODE (ref)); 5908*e4b17023SJohn Marino } 5909*e4b17023SJohn Marino } 5910*e4b17023SJohn Marino } 5911*e4b17023SJohn Marino } 5912*e4b17023SJohn Marino 5913*e4b17023SJohn Marino /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes 5914*e4b17023SJohn Marino and replace any registers in them with either an equivalent constant 5915*e4b17023SJohn Marino or the canonical form of the register. If we are inside an address, 5916*e4b17023SJohn Marino only do this if the address remains valid. 5917*e4b17023SJohn Marino 5918*e4b17023SJohn Marino OBJECT is 0 except when within a MEM in which case it is the MEM. 5919*e4b17023SJohn Marino 5920*e4b17023SJohn Marino Return the replacement for X. */ 5921*e4b17023SJohn Marino 5922*e4b17023SJohn Marino static rtx 5923*e4b17023SJohn Marino cse_process_notes_1 (rtx x, rtx object, bool *changed) 5924*e4b17023SJohn Marino { 5925*e4b17023SJohn Marino enum rtx_code code = GET_CODE (x); 5926*e4b17023SJohn Marino const char *fmt = GET_RTX_FORMAT (code); 5927*e4b17023SJohn Marino int i; 5928*e4b17023SJohn Marino 5929*e4b17023SJohn Marino switch (code) 5930*e4b17023SJohn Marino { 5931*e4b17023SJohn Marino case CONST_INT: 5932*e4b17023SJohn Marino case CONST: 5933*e4b17023SJohn Marino case SYMBOL_REF: 5934*e4b17023SJohn Marino case LABEL_REF: 5935*e4b17023SJohn Marino case CONST_DOUBLE: 5936*e4b17023SJohn Marino case CONST_FIXED: 5937*e4b17023SJohn Marino case CONST_VECTOR: 5938*e4b17023SJohn Marino case PC: 5939*e4b17023SJohn Marino case CC0: 5940*e4b17023SJohn Marino case LO_SUM: 5941*e4b17023SJohn Marino return x; 5942*e4b17023SJohn Marino 5943*e4b17023SJohn Marino case MEM: 5944*e4b17023SJohn Marino validate_change (x, &XEXP (x, 0), 5945*e4b17023SJohn Marino cse_process_notes (XEXP (x, 0), x, changed), 0); 5946*e4b17023SJohn Marino return x; 5947*e4b17023SJohn Marino 5948*e4b17023SJohn Marino case EXPR_LIST: 5949*e4b17023SJohn Marino case INSN_LIST: 5950*e4b17023SJohn Marino if (REG_NOTE_KIND (x) == REG_EQUAL) 5951*e4b17023SJohn Marino XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed); 5952*e4b17023SJohn Marino if (XEXP (x, 1)) 5953*e4b17023SJohn Marino XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed); 5954*e4b17023SJohn Marino return x; 5955*e4b17023SJohn Marino 5956*e4b17023SJohn Marino case SIGN_EXTEND: 5957*e4b17023SJohn Marino case ZERO_EXTEND: 5958*e4b17023SJohn Marino case SUBREG: 5959*e4b17023SJohn Marino { 5960*e4b17023SJohn Marino rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed); 5961*e4b17023SJohn Marino /* We don't substitute VOIDmode constants into these rtx, 5962*e4b17023SJohn Marino since they would impede folding. */ 5963*e4b17023SJohn Marino if (GET_MODE (new_rtx) != VOIDmode) 5964*e4b17023SJohn Marino validate_change (object, &XEXP (x, 0), new_rtx, 0); 5965*e4b17023SJohn Marino return x; 5966*e4b17023SJohn Marino } 5967*e4b17023SJohn Marino 5968*e4b17023SJohn Marino case REG: 5969*e4b17023SJohn Marino i = REG_QTY (REGNO (x)); 5970*e4b17023SJohn Marino 5971*e4b17023SJohn Marino /* Return a constant or a constant register. */ 5972*e4b17023SJohn Marino if (REGNO_QTY_VALID_P (REGNO (x))) 5973*e4b17023SJohn Marino { 5974*e4b17023SJohn Marino struct qty_table_elem *ent = &qty_table[i]; 5975*e4b17023SJohn Marino 5976*e4b17023SJohn Marino if (ent->const_rtx != NULL_RTX 5977*e4b17023SJohn Marino && (CONSTANT_P (ent->const_rtx) 5978*e4b17023SJohn Marino || REG_P (ent->const_rtx))) 5979*e4b17023SJohn Marino { 5980*e4b17023SJohn Marino rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx); 5981*e4b17023SJohn Marino if (new_rtx) 5982*e4b17023SJohn Marino return copy_rtx (new_rtx); 5983*e4b17023SJohn Marino } 5984*e4b17023SJohn Marino } 5985*e4b17023SJohn Marino 5986*e4b17023SJohn Marino /* Otherwise, canonicalize this register. */ 5987*e4b17023SJohn Marino return canon_reg (x, NULL_RTX); 5988*e4b17023SJohn Marino 5989*e4b17023SJohn Marino default: 5990*e4b17023SJohn Marino break; 5991*e4b17023SJohn Marino } 5992*e4b17023SJohn Marino 5993*e4b17023SJohn Marino for (i = 0; i < GET_RTX_LENGTH (code); i++) 5994*e4b17023SJohn Marino if (fmt[i] == 'e') 5995*e4b17023SJohn Marino validate_change (object, &XEXP (x, i), 5996*e4b17023SJohn Marino cse_process_notes (XEXP (x, i), object, changed), 0); 5997*e4b17023SJohn Marino 5998*e4b17023SJohn Marino return x; 5999*e4b17023SJohn Marino } 6000*e4b17023SJohn Marino 6001*e4b17023SJohn Marino static rtx 6002*e4b17023SJohn Marino cse_process_notes (rtx x, rtx object, bool *changed) 6003*e4b17023SJohn Marino { 6004*e4b17023SJohn Marino rtx new_rtx = cse_process_notes_1 (x, object, changed); 6005*e4b17023SJohn Marino if (new_rtx != x) 6006*e4b17023SJohn Marino *changed = true; 6007*e4b17023SJohn Marino return new_rtx; 6008*e4b17023SJohn Marino } 6009*e4b17023SJohn Marino 6010*e4b17023SJohn Marino 6011*e4b17023SJohn Marino /* Find a path in the CFG, starting with FIRST_BB to perform CSE on. 6012*e4b17023SJohn Marino 6013*e4b17023SJohn Marino DATA is a pointer to a struct cse_basic_block_data, that is used to 6014*e4b17023SJohn Marino describe the path. 6015*e4b17023SJohn Marino It is filled with a queue of basic blocks, starting with FIRST_BB 6016*e4b17023SJohn Marino and following a trace through the CFG. 6017*e4b17023SJohn Marino 6018*e4b17023SJohn Marino If all paths starting at FIRST_BB have been followed, or no new path 6019*e4b17023SJohn Marino starting at FIRST_BB can be constructed, this function returns FALSE. 6020*e4b17023SJohn Marino Otherwise, DATA->path is filled and the function returns TRUE indicating 6021*e4b17023SJohn Marino that a path to follow was found. 6022*e4b17023SJohn Marino 6023*e4b17023SJohn Marino If FOLLOW_JUMPS is false, the maximum path length is 1 and the only 6024*e4b17023SJohn Marino block in the path will be FIRST_BB. */ 6025*e4b17023SJohn Marino 6026*e4b17023SJohn Marino static bool 6027*e4b17023SJohn Marino cse_find_path (basic_block first_bb, struct cse_basic_block_data *data, 6028*e4b17023SJohn Marino int follow_jumps) 6029*e4b17023SJohn Marino { 6030*e4b17023SJohn Marino basic_block bb; 6031*e4b17023SJohn Marino edge e; 6032*e4b17023SJohn Marino int path_size; 6033*e4b17023SJohn Marino 6034*e4b17023SJohn Marino SET_BIT (cse_visited_basic_blocks, first_bb->index); 6035*e4b17023SJohn Marino 6036*e4b17023SJohn Marino /* See if there is a previous path. */ 6037*e4b17023SJohn Marino path_size = data->path_size; 6038*e4b17023SJohn Marino 6039*e4b17023SJohn Marino /* There is a previous path. Make sure it started with FIRST_BB. */ 6040*e4b17023SJohn Marino if (path_size) 6041*e4b17023SJohn Marino gcc_assert (data->path[0].bb == first_bb); 6042*e4b17023SJohn Marino 6043*e4b17023SJohn Marino /* There was only one basic block in the last path. Clear the path and 6044*e4b17023SJohn Marino return, so that paths starting at another basic block can be tried. */ 6045*e4b17023SJohn Marino if (path_size == 1) 6046*e4b17023SJohn Marino { 6047*e4b17023SJohn Marino path_size = 0; 6048*e4b17023SJohn Marino goto done; 6049*e4b17023SJohn Marino } 6050*e4b17023SJohn Marino 6051*e4b17023SJohn Marino /* If the path was empty from the beginning, construct a new path. */ 6052*e4b17023SJohn Marino if (path_size == 0) 6053*e4b17023SJohn Marino data->path[path_size++].bb = first_bb; 6054*e4b17023SJohn Marino else 6055*e4b17023SJohn Marino { 6056*e4b17023SJohn Marino /* Otherwise, path_size must be equal to or greater than 2, because 6057*e4b17023SJohn Marino a previous path exists that is at least two basic blocks long. 6058*e4b17023SJohn Marino 6059*e4b17023SJohn Marino Update the previous branch path, if any. If the last branch was 6060*e4b17023SJohn Marino previously along the branch edge, take the fallthrough edge now. */ 6061*e4b17023SJohn Marino while (path_size >= 2) 6062*e4b17023SJohn Marino { 6063*e4b17023SJohn Marino basic_block last_bb_in_path, previous_bb_in_path; 6064*e4b17023SJohn Marino edge e; 6065*e4b17023SJohn Marino 6066*e4b17023SJohn Marino --path_size; 6067*e4b17023SJohn Marino last_bb_in_path = data->path[path_size].bb; 6068*e4b17023SJohn Marino previous_bb_in_path = data->path[path_size - 1].bb; 6069*e4b17023SJohn Marino 6070*e4b17023SJohn Marino /* If we previously followed a path along the branch edge, try 6071*e4b17023SJohn Marino the fallthru edge now. */ 6072*e4b17023SJohn Marino if (EDGE_COUNT (previous_bb_in_path->succs) == 2 6073*e4b17023SJohn Marino && any_condjump_p (BB_END (previous_bb_in_path)) 6074*e4b17023SJohn Marino && (e = find_edge (previous_bb_in_path, last_bb_in_path)) 6075*e4b17023SJohn Marino && e == BRANCH_EDGE (previous_bb_in_path)) 6076*e4b17023SJohn Marino { 6077*e4b17023SJohn Marino bb = FALLTHRU_EDGE (previous_bb_in_path)->dest; 6078*e4b17023SJohn Marino if (bb != EXIT_BLOCK_PTR 6079*e4b17023SJohn Marino && single_pred_p (bb) 6080*e4b17023SJohn Marino /* We used to assert here that we would only see blocks 6081*e4b17023SJohn Marino that we have not visited yet. But we may end up 6082*e4b17023SJohn Marino visiting basic blocks twice if the CFG has changed 6083*e4b17023SJohn Marino in this run of cse_main, because when the CFG changes 6084*e4b17023SJohn Marino the topological sort of the CFG also changes. A basic 6085*e4b17023SJohn Marino blocks that previously had more than two predecessors 6086*e4b17023SJohn Marino may now have a single predecessor, and become part of 6087*e4b17023SJohn Marino a path that starts at another basic block. 6088*e4b17023SJohn Marino 6089*e4b17023SJohn Marino We still want to visit each basic block only once, so 6090*e4b17023SJohn Marino halt the path here if we have already visited BB. */ 6091*e4b17023SJohn Marino && !TEST_BIT (cse_visited_basic_blocks, bb->index)) 6092*e4b17023SJohn Marino { 6093*e4b17023SJohn Marino SET_BIT (cse_visited_basic_blocks, bb->index); 6094*e4b17023SJohn Marino data->path[path_size++].bb = bb; 6095*e4b17023SJohn Marino break; 6096*e4b17023SJohn Marino } 6097*e4b17023SJohn Marino } 6098*e4b17023SJohn Marino 6099*e4b17023SJohn Marino data->path[path_size].bb = NULL; 6100*e4b17023SJohn Marino } 6101*e4b17023SJohn Marino 6102*e4b17023SJohn Marino /* If only one block remains in the path, bail. */ 6103*e4b17023SJohn Marino if (path_size == 1) 6104*e4b17023SJohn Marino { 6105*e4b17023SJohn Marino path_size = 0; 6106*e4b17023SJohn Marino goto done; 6107*e4b17023SJohn Marino } 6108*e4b17023SJohn Marino } 6109*e4b17023SJohn Marino 6110*e4b17023SJohn Marino /* Extend the path if possible. */ 6111*e4b17023SJohn Marino if (follow_jumps) 6112*e4b17023SJohn Marino { 6113*e4b17023SJohn Marino bb = data->path[path_size - 1].bb; 6114*e4b17023SJohn Marino while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH)) 6115*e4b17023SJohn Marino { 6116*e4b17023SJohn Marino if (single_succ_p (bb)) 6117*e4b17023SJohn Marino e = single_succ_edge (bb); 6118*e4b17023SJohn Marino else if (EDGE_COUNT (bb->succs) == 2 6119*e4b17023SJohn Marino && any_condjump_p (BB_END (bb))) 6120*e4b17023SJohn Marino { 6121*e4b17023SJohn Marino /* First try to follow the branch. If that doesn't lead 6122*e4b17023SJohn Marino to a useful path, follow the fallthru edge. */ 6123*e4b17023SJohn Marino e = BRANCH_EDGE (bb); 6124*e4b17023SJohn Marino if (!single_pred_p (e->dest)) 6125*e4b17023SJohn Marino e = FALLTHRU_EDGE (bb); 6126*e4b17023SJohn Marino } 6127*e4b17023SJohn Marino else 6128*e4b17023SJohn Marino e = NULL; 6129*e4b17023SJohn Marino 6130*e4b17023SJohn Marino if (e 6131*e4b17023SJohn Marino && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label) 6132*e4b17023SJohn Marino && e->dest != EXIT_BLOCK_PTR 6133*e4b17023SJohn Marino && single_pred_p (e->dest) 6134*e4b17023SJohn Marino /* Avoid visiting basic blocks twice. The large comment 6135*e4b17023SJohn Marino above explains why this can happen. */ 6136*e4b17023SJohn Marino && !TEST_BIT (cse_visited_basic_blocks, e->dest->index)) 6137*e4b17023SJohn Marino { 6138*e4b17023SJohn Marino basic_block bb2 = e->dest; 6139*e4b17023SJohn Marino SET_BIT (cse_visited_basic_blocks, bb2->index); 6140*e4b17023SJohn Marino data->path[path_size++].bb = bb2; 6141*e4b17023SJohn Marino bb = bb2; 6142*e4b17023SJohn Marino } 6143*e4b17023SJohn Marino else 6144*e4b17023SJohn Marino bb = NULL; 6145*e4b17023SJohn Marino } 6146*e4b17023SJohn Marino } 6147*e4b17023SJohn Marino 6148*e4b17023SJohn Marino done: 6149*e4b17023SJohn Marino data->path_size = path_size; 6150*e4b17023SJohn Marino return path_size != 0; 6151*e4b17023SJohn Marino } 6152*e4b17023SJohn Marino 6153*e4b17023SJohn Marino /* Dump the path in DATA to file F. NSETS is the number of sets 6154*e4b17023SJohn Marino in the path. */ 6155*e4b17023SJohn Marino 6156*e4b17023SJohn Marino static void 6157*e4b17023SJohn Marino cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f) 6158*e4b17023SJohn Marino { 6159*e4b17023SJohn Marino int path_entry; 6160*e4b17023SJohn Marino 6161*e4b17023SJohn Marino fprintf (f, ";; Following path with %d sets: ", nsets); 6162*e4b17023SJohn Marino for (path_entry = 0; path_entry < data->path_size; path_entry++) 6163*e4b17023SJohn Marino fprintf (f, "%d ", (data->path[path_entry].bb)->index); 6164*e4b17023SJohn Marino fputc ('\n', dump_file); 6165*e4b17023SJohn Marino fflush (f); 6166*e4b17023SJohn Marino } 6167*e4b17023SJohn Marino 6168*e4b17023SJohn Marino 6169*e4b17023SJohn Marino /* Return true if BB has exception handling successor edges. */ 6170*e4b17023SJohn Marino 6171*e4b17023SJohn Marino static bool 6172*e4b17023SJohn Marino have_eh_succ_edges (basic_block bb) 6173*e4b17023SJohn Marino { 6174*e4b17023SJohn Marino edge e; 6175*e4b17023SJohn Marino edge_iterator ei; 6176*e4b17023SJohn Marino 6177*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, bb->succs) 6178*e4b17023SJohn Marino if (e->flags & EDGE_EH) 6179*e4b17023SJohn Marino return true; 6180*e4b17023SJohn Marino 6181*e4b17023SJohn Marino return false; 6182*e4b17023SJohn Marino } 6183*e4b17023SJohn Marino 6184*e4b17023SJohn Marino 6185*e4b17023SJohn Marino /* Scan to the end of the path described by DATA. Return an estimate of 6186*e4b17023SJohn Marino the total number of SETs of all insns in the path. */ 6187*e4b17023SJohn Marino 6188*e4b17023SJohn Marino static void 6189*e4b17023SJohn Marino cse_prescan_path (struct cse_basic_block_data *data) 6190*e4b17023SJohn Marino { 6191*e4b17023SJohn Marino int nsets = 0; 6192*e4b17023SJohn Marino int path_size = data->path_size; 6193*e4b17023SJohn Marino int path_entry; 6194*e4b17023SJohn Marino 6195*e4b17023SJohn Marino /* Scan to end of each basic block in the path. */ 6196*e4b17023SJohn Marino for (path_entry = 0; path_entry < path_size; path_entry++) 6197*e4b17023SJohn Marino { 6198*e4b17023SJohn Marino basic_block bb; 6199*e4b17023SJohn Marino rtx insn; 6200*e4b17023SJohn Marino 6201*e4b17023SJohn Marino bb = data->path[path_entry].bb; 6202*e4b17023SJohn Marino 6203*e4b17023SJohn Marino FOR_BB_INSNS (bb, insn) 6204*e4b17023SJohn Marino { 6205*e4b17023SJohn Marino if (!INSN_P (insn)) 6206*e4b17023SJohn Marino continue; 6207*e4b17023SJohn Marino 6208*e4b17023SJohn Marino /* A PARALLEL can have lots of SETs in it, 6209*e4b17023SJohn Marino especially if it is really an ASM_OPERANDS. */ 6210*e4b17023SJohn Marino if (GET_CODE (PATTERN (insn)) == PARALLEL) 6211*e4b17023SJohn Marino nsets += XVECLEN (PATTERN (insn), 0); 6212*e4b17023SJohn Marino else 6213*e4b17023SJohn Marino nsets += 1; 6214*e4b17023SJohn Marino } 6215*e4b17023SJohn Marino } 6216*e4b17023SJohn Marino 6217*e4b17023SJohn Marino data->nsets = nsets; 6218*e4b17023SJohn Marino } 6219*e4b17023SJohn Marino 6220*e4b17023SJohn Marino /* Process a single extended basic block described by EBB_DATA. */ 6221*e4b17023SJohn Marino 6222*e4b17023SJohn Marino static void 6223*e4b17023SJohn Marino cse_extended_basic_block (struct cse_basic_block_data *ebb_data) 6224*e4b17023SJohn Marino { 6225*e4b17023SJohn Marino int path_size = ebb_data->path_size; 6226*e4b17023SJohn Marino int path_entry; 6227*e4b17023SJohn Marino int num_insns = 0; 6228*e4b17023SJohn Marino 6229*e4b17023SJohn Marino /* Allocate the space needed by qty_table. */ 6230*e4b17023SJohn Marino qty_table = XNEWVEC (struct qty_table_elem, max_qty); 6231*e4b17023SJohn Marino 6232*e4b17023SJohn Marino new_basic_block (); 6233*e4b17023SJohn Marino cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb); 6234*e4b17023SJohn Marino cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb); 6235*e4b17023SJohn Marino for (path_entry = 0; path_entry < path_size; path_entry++) 6236*e4b17023SJohn Marino { 6237*e4b17023SJohn Marino basic_block bb; 6238*e4b17023SJohn Marino rtx insn; 6239*e4b17023SJohn Marino 6240*e4b17023SJohn Marino bb = ebb_data->path[path_entry].bb; 6241*e4b17023SJohn Marino 6242*e4b17023SJohn Marino /* Invalidate recorded information for eh regs if there is an EH 6243*e4b17023SJohn Marino edge pointing to that bb. */ 6244*e4b17023SJohn Marino if (bb_has_eh_pred (bb)) 6245*e4b17023SJohn Marino { 6246*e4b17023SJohn Marino df_ref *def_rec; 6247*e4b17023SJohn Marino 6248*e4b17023SJohn Marino for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++) 6249*e4b17023SJohn Marino { 6250*e4b17023SJohn Marino df_ref def = *def_rec; 6251*e4b17023SJohn Marino if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) 6252*e4b17023SJohn Marino invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def))); 6253*e4b17023SJohn Marino } 6254*e4b17023SJohn Marino } 6255*e4b17023SJohn Marino 6256*e4b17023SJohn Marino optimize_this_for_speed_p = optimize_bb_for_speed_p (bb); 6257*e4b17023SJohn Marino FOR_BB_INSNS (bb, insn) 6258*e4b17023SJohn Marino { 6259*e4b17023SJohn Marino /* If we have processed 1,000 insns, flush the hash table to 6260*e4b17023SJohn Marino avoid extreme quadratic behavior. We must not include NOTEs 6261*e4b17023SJohn Marino in the count since there may be more of them when generating 6262*e4b17023SJohn Marino debugging information. If we clear the table at different 6263*e4b17023SJohn Marino times, code generated with -g -O might be different than code 6264*e4b17023SJohn Marino generated with -O but not -g. 6265*e4b17023SJohn Marino 6266*e4b17023SJohn Marino FIXME: This is a real kludge and needs to be done some other 6267*e4b17023SJohn Marino way. */ 6268*e4b17023SJohn Marino if (NONDEBUG_INSN_P (insn) 6269*e4b17023SJohn Marino && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS)) 6270*e4b17023SJohn Marino { 6271*e4b17023SJohn Marino flush_hash_table (); 6272*e4b17023SJohn Marino num_insns = 0; 6273*e4b17023SJohn Marino } 6274*e4b17023SJohn Marino 6275*e4b17023SJohn Marino if (INSN_P (insn)) 6276*e4b17023SJohn Marino { 6277*e4b17023SJohn Marino /* Process notes first so we have all notes in canonical forms 6278*e4b17023SJohn Marino when looking for duplicate operations. */ 6279*e4b17023SJohn Marino if (REG_NOTES (insn)) 6280*e4b17023SJohn Marino { 6281*e4b17023SJohn Marino bool changed = false; 6282*e4b17023SJohn Marino REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), 6283*e4b17023SJohn Marino NULL_RTX, &changed); 6284*e4b17023SJohn Marino if (changed) 6285*e4b17023SJohn Marino df_notes_rescan (insn); 6286*e4b17023SJohn Marino } 6287*e4b17023SJohn Marino 6288*e4b17023SJohn Marino cse_insn (insn); 6289*e4b17023SJohn Marino 6290*e4b17023SJohn Marino /* If we haven't already found an insn where we added a LABEL_REF, 6291*e4b17023SJohn Marino check this one. */ 6292*e4b17023SJohn Marino if (INSN_P (insn) && !recorded_label_ref 6293*e4b17023SJohn Marino && for_each_rtx (&PATTERN (insn), check_for_label_ref, 6294*e4b17023SJohn Marino (void *) insn)) 6295*e4b17023SJohn Marino recorded_label_ref = true; 6296*e4b17023SJohn Marino 6297*e4b17023SJohn Marino #ifdef HAVE_cc0 6298*e4b17023SJohn Marino if (NONDEBUG_INSN_P (insn)) 6299*e4b17023SJohn Marino { 6300*e4b17023SJohn Marino /* If the previous insn sets CC0 and this insn no 6301*e4b17023SJohn Marino longer references CC0, delete the previous insn. 6302*e4b17023SJohn Marino Here we use fact that nothing expects CC0 to be 6303*e4b17023SJohn Marino valid over an insn, which is true until the final 6304*e4b17023SJohn Marino pass. */ 6305*e4b17023SJohn Marino rtx prev_insn, tem; 6306*e4b17023SJohn Marino 6307*e4b17023SJohn Marino prev_insn = prev_nonnote_nondebug_insn (insn); 6308*e4b17023SJohn Marino if (prev_insn && NONJUMP_INSN_P (prev_insn) 6309*e4b17023SJohn Marino && (tem = single_set (prev_insn)) != NULL_RTX 6310*e4b17023SJohn Marino && SET_DEST (tem) == cc0_rtx 6311*e4b17023SJohn Marino && ! reg_mentioned_p (cc0_rtx, PATTERN (insn))) 6312*e4b17023SJohn Marino delete_insn (prev_insn); 6313*e4b17023SJohn Marino 6314*e4b17023SJohn Marino /* If this insn is not the last insn in the basic 6315*e4b17023SJohn Marino block, it will be PREV_INSN(insn) in the next 6316*e4b17023SJohn Marino iteration. If we recorded any CC0-related 6317*e4b17023SJohn Marino information for this insn, remember it. */ 6318*e4b17023SJohn Marino if (insn != BB_END (bb)) 6319*e4b17023SJohn Marino { 6320*e4b17023SJohn Marino prev_insn_cc0 = this_insn_cc0; 6321*e4b17023SJohn Marino prev_insn_cc0_mode = this_insn_cc0_mode; 6322*e4b17023SJohn Marino } 6323*e4b17023SJohn Marino } 6324*e4b17023SJohn Marino #endif 6325*e4b17023SJohn Marino } 6326*e4b17023SJohn Marino } 6327*e4b17023SJohn Marino 6328*e4b17023SJohn Marino /* With non-call exceptions, we are not always able to update 6329*e4b17023SJohn Marino the CFG properly inside cse_insn. So clean up possibly 6330*e4b17023SJohn Marino redundant EH edges here. */ 6331*e4b17023SJohn Marino if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb)) 6332*e4b17023SJohn Marino cse_cfg_altered |= purge_dead_edges (bb); 6333*e4b17023SJohn Marino 6334*e4b17023SJohn Marino /* If we changed a conditional jump, we may have terminated 6335*e4b17023SJohn Marino the path we are following. Check that by verifying that 6336*e4b17023SJohn Marino the edge we would take still exists. If the edge does 6337*e4b17023SJohn Marino not exist anymore, purge the remainder of the path. 6338*e4b17023SJohn Marino Note that this will cause us to return to the caller. */ 6339*e4b17023SJohn Marino if (path_entry < path_size - 1) 6340*e4b17023SJohn Marino { 6341*e4b17023SJohn Marino basic_block next_bb = ebb_data->path[path_entry + 1].bb; 6342*e4b17023SJohn Marino if (!find_edge (bb, next_bb)) 6343*e4b17023SJohn Marino { 6344*e4b17023SJohn Marino do 6345*e4b17023SJohn Marino { 6346*e4b17023SJohn Marino path_size--; 6347*e4b17023SJohn Marino 6348*e4b17023SJohn Marino /* If we truncate the path, we must also reset the 6349*e4b17023SJohn Marino visited bit on the remaining blocks in the path, 6350*e4b17023SJohn Marino or we will never visit them at all. */ 6351*e4b17023SJohn Marino RESET_BIT (cse_visited_basic_blocks, 6352*e4b17023SJohn Marino ebb_data->path[path_size].bb->index); 6353*e4b17023SJohn Marino ebb_data->path[path_size].bb = NULL; 6354*e4b17023SJohn Marino } 6355*e4b17023SJohn Marino while (path_size - 1 != path_entry); 6356*e4b17023SJohn Marino ebb_data->path_size = path_size; 6357*e4b17023SJohn Marino } 6358*e4b17023SJohn Marino } 6359*e4b17023SJohn Marino 6360*e4b17023SJohn Marino /* If this is a conditional jump insn, record any known 6361*e4b17023SJohn Marino equivalences due to the condition being tested. */ 6362*e4b17023SJohn Marino insn = BB_END (bb); 6363*e4b17023SJohn Marino if (path_entry < path_size - 1 6364*e4b17023SJohn Marino && JUMP_P (insn) 6365*e4b17023SJohn Marino && single_set (insn) 6366*e4b17023SJohn Marino && any_condjump_p (insn)) 6367*e4b17023SJohn Marino { 6368*e4b17023SJohn Marino basic_block next_bb = ebb_data->path[path_entry + 1].bb; 6369*e4b17023SJohn Marino bool taken = (next_bb == BRANCH_EDGE (bb)->dest); 6370*e4b17023SJohn Marino record_jump_equiv (insn, taken); 6371*e4b17023SJohn Marino } 6372*e4b17023SJohn Marino 6373*e4b17023SJohn Marino #ifdef HAVE_cc0 6374*e4b17023SJohn Marino /* Clear the CC0-tracking related insns, they can't provide 6375*e4b17023SJohn Marino useful information across basic block boundaries. */ 6376*e4b17023SJohn Marino prev_insn_cc0 = 0; 6377*e4b17023SJohn Marino #endif 6378*e4b17023SJohn Marino } 6379*e4b17023SJohn Marino 6380*e4b17023SJohn Marino gcc_assert (next_qty <= max_qty); 6381*e4b17023SJohn Marino 6382*e4b17023SJohn Marino free (qty_table); 6383*e4b17023SJohn Marino } 6384*e4b17023SJohn Marino 6385*e4b17023SJohn Marino 6386*e4b17023SJohn Marino /* Perform cse on the instructions of a function. 6387*e4b17023SJohn Marino F is the first instruction. 6388*e4b17023SJohn Marino NREGS is one plus the highest pseudo-reg number used in the instruction. 6389*e4b17023SJohn Marino 6390*e4b17023SJohn Marino Return 2 if jump optimizations should be redone due to simplifications 6391*e4b17023SJohn Marino in conditional jump instructions. 6392*e4b17023SJohn Marino Return 1 if the CFG should be cleaned up because it has been modified. 6393*e4b17023SJohn Marino Return 0 otherwise. */ 6394*e4b17023SJohn Marino 6395*e4b17023SJohn Marino int 6396*e4b17023SJohn Marino cse_main (rtx f ATTRIBUTE_UNUSED, int nregs) 6397*e4b17023SJohn Marino { 6398*e4b17023SJohn Marino struct cse_basic_block_data ebb_data; 6399*e4b17023SJohn Marino basic_block bb; 6400*e4b17023SJohn Marino int *rc_order = XNEWVEC (int, last_basic_block); 6401*e4b17023SJohn Marino int i, n_blocks; 6402*e4b17023SJohn Marino 6403*e4b17023SJohn Marino df_set_flags (DF_LR_RUN_DCE); 6404*e4b17023SJohn Marino df_analyze (); 6405*e4b17023SJohn Marino df_set_flags (DF_DEFER_INSN_RESCAN); 6406*e4b17023SJohn Marino 6407*e4b17023SJohn Marino reg_scan (get_insns (), max_reg_num ()); 6408*e4b17023SJohn Marino init_cse_reg_info (nregs); 6409*e4b17023SJohn Marino 6410*e4b17023SJohn Marino ebb_data.path = XNEWVEC (struct branch_path, 6411*e4b17023SJohn Marino PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH)); 6412*e4b17023SJohn Marino 6413*e4b17023SJohn Marino cse_cfg_altered = false; 6414*e4b17023SJohn Marino cse_jumps_altered = false; 6415*e4b17023SJohn Marino recorded_label_ref = false; 6416*e4b17023SJohn Marino constant_pool_entries_cost = 0; 6417*e4b17023SJohn Marino constant_pool_entries_regcost = 0; 6418*e4b17023SJohn Marino ebb_data.path_size = 0; 6419*e4b17023SJohn Marino ebb_data.nsets = 0; 6420*e4b17023SJohn Marino rtl_hooks = cse_rtl_hooks; 6421*e4b17023SJohn Marino 6422*e4b17023SJohn Marino init_recog (); 6423*e4b17023SJohn Marino init_alias_analysis (); 6424*e4b17023SJohn Marino 6425*e4b17023SJohn Marino reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs); 6426*e4b17023SJohn Marino 6427*e4b17023SJohn Marino /* Set up the table of already visited basic blocks. */ 6428*e4b17023SJohn Marino cse_visited_basic_blocks = sbitmap_alloc (last_basic_block); 6429*e4b17023SJohn Marino sbitmap_zero (cse_visited_basic_blocks); 6430*e4b17023SJohn Marino 6431*e4b17023SJohn Marino /* Loop over basic blocks in reverse completion order (RPO), 6432*e4b17023SJohn Marino excluding the ENTRY and EXIT blocks. */ 6433*e4b17023SJohn Marino n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false); 6434*e4b17023SJohn Marino i = 0; 6435*e4b17023SJohn Marino while (i < n_blocks) 6436*e4b17023SJohn Marino { 6437*e4b17023SJohn Marino /* Find the first block in the RPO queue that we have not yet 6438*e4b17023SJohn Marino processed before. */ 6439*e4b17023SJohn Marino do 6440*e4b17023SJohn Marino { 6441*e4b17023SJohn Marino bb = BASIC_BLOCK (rc_order[i++]); 6442*e4b17023SJohn Marino } 6443*e4b17023SJohn Marino while (TEST_BIT (cse_visited_basic_blocks, bb->index) 6444*e4b17023SJohn Marino && i < n_blocks); 6445*e4b17023SJohn Marino 6446*e4b17023SJohn Marino /* Find all paths starting with BB, and process them. */ 6447*e4b17023SJohn Marino while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps)) 6448*e4b17023SJohn Marino { 6449*e4b17023SJohn Marino /* Pre-scan the path. */ 6450*e4b17023SJohn Marino cse_prescan_path (&ebb_data); 6451*e4b17023SJohn Marino 6452*e4b17023SJohn Marino /* If this basic block has no sets, skip it. */ 6453*e4b17023SJohn Marino if (ebb_data.nsets == 0) 6454*e4b17023SJohn Marino continue; 6455*e4b17023SJohn Marino 6456*e4b17023SJohn Marino /* Get a reasonable estimate for the maximum number of qty's 6457*e4b17023SJohn Marino needed for this path. For this, we take the number of sets 6458*e4b17023SJohn Marino and multiply that by MAX_RECOG_OPERANDS. */ 6459*e4b17023SJohn Marino max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS; 6460*e4b17023SJohn Marino 6461*e4b17023SJohn Marino /* Dump the path we're about to process. */ 6462*e4b17023SJohn Marino if (dump_file) 6463*e4b17023SJohn Marino cse_dump_path (&ebb_data, ebb_data.nsets, dump_file); 6464*e4b17023SJohn Marino 6465*e4b17023SJohn Marino cse_extended_basic_block (&ebb_data); 6466*e4b17023SJohn Marino } 6467*e4b17023SJohn Marino } 6468*e4b17023SJohn Marino 6469*e4b17023SJohn Marino /* Clean up. */ 6470*e4b17023SJohn Marino end_alias_analysis (); 6471*e4b17023SJohn Marino free (reg_eqv_table); 6472*e4b17023SJohn Marino free (ebb_data.path); 6473*e4b17023SJohn Marino sbitmap_free (cse_visited_basic_blocks); 6474*e4b17023SJohn Marino free (rc_order); 6475*e4b17023SJohn Marino rtl_hooks = general_rtl_hooks; 6476*e4b17023SJohn Marino 6477*e4b17023SJohn Marino if (cse_jumps_altered || recorded_label_ref) 6478*e4b17023SJohn Marino return 2; 6479*e4b17023SJohn Marino else if (cse_cfg_altered) 6480*e4b17023SJohn Marino return 1; 6481*e4b17023SJohn Marino else 6482*e4b17023SJohn Marino return 0; 6483*e4b17023SJohn Marino } 6484*e4b17023SJohn Marino 6485*e4b17023SJohn Marino /* Called via for_each_rtx to see if an insn is using a LABEL_REF for 6486*e4b17023SJohn Marino which there isn't a REG_LABEL_OPERAND note. 6487*e4b17023SJohn Marino Return one if so. DATA is the insn. */ 6488*e4b17023SJohn Marino 6489*e4b17023SJohn Marino static int 6490*e4b17023SJohn Marino check_for_label_ref (rtx *rtl, void *data) 6491*e4b17023SJohn Marino { 6492*e4b17023SJohn Marino rtx insn = (rtx) data; 6493*e4b17023SJohn Marino 6494*e4b17023SJohn Marino /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND 6495*e4b17023SJohn Marino note for it, we must rerun jump since it needs to place the note. If 6496*e4b17023SJohn Marino this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain, 6497*e4b17023SJohn Marino don't do this since no REG_LABEL_OPERAND will be added. */ 6498*e4b17023SJohn Marino return (GET_CODE (*rtl) == LABEL_REF 6499*e4b17023SJohn Marino && ! LABEL_REF_NONLOCAL_P (*rtl) 6500*e4b17023SJohn Marino && (!JUMP_P (insn) 6501*e4b17023SJohn Marino || !label_is_jump_target_p (XEXP (*rtl, 0), insn)) 6502*e4b17023SJohn Marino && LABEL_P (XEXP (*rtl, 0)) 6503*e4b17023SJohn Marino && INSN_UID (XEXP (*rtl, 0)) != 0 6504*e4b17023SJohn Marino && ! find_reg_note (insn, REG_LABEL_OPERAND, XEXP (*rtl, 0))); 6505*e4b17023SJohn Marino } 6506*e4b17023SJohn Marino 6507*e4b17023SJohn Marino /* Count the number of times registers are used (not set) in X. 6508*e4b17023SJohn Marino COUNTS is an array in which we accumulate the count, INCR is how much 6509*e4b17023SJohn Marino we count each register usage. 6510*e4b17023SJohn Marino 6511*e4b17023SJohn Marino Don't count a usage of DEST, which is the SET_DEST of a SET which 6512*e4b17023SJohn Marino contains X in its SET_SRC. This is because such a SET does not 6513*e4b17023SJohn Marino modify the liveness of DEST. 6514*e4b17023SJohn Marino DEST is set to pc_rtx for a trapping insn, or for an insn with side effects. 6515*e4b17023SJohn Marino We must then count uses of a SET_DEST regardless, because the insn can't be 6516*e4b17023SJohn Marino deleted here. */ 6517*e4b17023SJohn Marino 6518*e4b17023SJohn Marino static void 6519*e4b17023SJohn Marino count_reg_usage (rtx x, int *counts, rtx dest, int incr) 6520*e4b17023SJohn Marino { 6521*e4b17023SJohn Marino enum rtx_code code; 6522*e4b17023SJohn Marino rtx note; 6523*e4b17023SJohn Marino const char *fmt; 6524*e4b17023SJohn Marino int i, j; 6525*e4b17023SJohn Marino 6526*e4b17023SJohn Marino if (x == 0) 6527*e4b17023SJohn Marino return; 6528*e4b17023SJohn Marino 6529*e4b17023SJohn Marino switch (code = GET_CODE (x)) 6530*e4b17023SJohn Marino { 6531*e4b17023SJohn Marino case REG: 6532*e4b17023SJohn Marino if (x != dest) 6533*e4b17023SJohn Marino counts[REGNO (x)] += incr; 6534*e4b17023SJohn Marino return; 6535*e4b17023SJohn Marino 6536*e4b17023SJohn Marino case PC: 6537*e4b17023SJohn Marino case CC0: 6538*e4b17023SJohn Marino case CONST: 6539*e4b17023SJohn Marino case CONST_INT: 6540*e4b17023SJohn Marino case CONST_DOUBLE: 6541*e4b17023SJohn Marino case CONST_FIXED: 6542*e4b17023SJohn Marino case CONST_VECTOR: 6543*e4b17023SJohn Marino case SYMBOL_REF: 6544*e4b17023SJohn Marino case LABEL_REF: 6545*e4b17023SJohn Marino return; 6546*e4b17023SJohn Marino 6547*e4b17023SJohn Marino case CLOBBER: 6548*e4b17023SJohn Marino /* If we are clobbering a MEM, mark any registers inside the address 6549*e4b17023SJohn Marino as being used. */ 6550*e4b17023SJohn Marino if (MEM_P (XEXP (x, 0))) 6551*e4b17023SJohn Marino count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr); 6552*e4b17023SJohn Marino return; 6553*e4b17023SJohn Marino 6554*e4b17023SJohn Marino case SET: 6555*e4b17023SJohn Marino /* Unless we are setting a REG, count everything in SET_DEST. */ 6556*e4b17023SJohn Marino if (!REG_P (SET_DEST (x))) 6557*e4b17023SJohn Marino count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr); 6558*e4b17023SJohn Marino count_reg_usage (SET_SRC (x), counts, 6559*e4b17023SJohn Marino dest ? dest : SET_DEST (x), 6560*e4b17023SJohn Marino incr); 6561*e4b17023SJohn Marino return; 6562*e4b17023SJohn Marino 6563*e4b17023SJohn Marino case DEBUG_INSN: 6564*e4b17023SJohn Marino return; 6565*e4b17023SJohn Marino 6566*e4b17023SJohn Marino case CALL_INSN: 6567*e4b17023SJohn Marino case INSN: 6568*e4b17023SJohn Marino case JUMP_INSN: 6569*e4b17023SJohn Marino /* We expect dest to be NULL_RTX here. If the insn may trap, 6570*e4b17023SJohn Marino or if it cannot be deleted due to side-effects, mark this fact 6571*e4b17023SJohn Marino by setting DEST to pc_rtx. */ 6572*e4b17023SJohn Marino if (insn_could_throw_p (x) || side_effects_p (PATTERN (x))) 6573*e4b17023SJohn Marino dest = pc_rtx; 6574*e4b17023SJohn Marino if (code == CALL_INSN) 6575*e4b17023SJohn Marino count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr); 6576*e4b17023SJohn Marino count_reg_usage (PATTERN (x), counts, dest, incr); 6577*e4b17023SJohn Marino 6578*e4b17023SJohn Marino /* Things used in a REG_EQUAL note aren't dead since loop may try to 6579*e4b17023SJohn Marino use them. */ 6580*e4b17023SJohn Marino 6581*e4b17023SJohn Marino note = find_reg_equal_equiv_note (x); 6582*e4b17023SJohn Marino if (note) 6583*e4b17023SJohn Marino { 6584*e4b17023SJohn Marino rtx eqv = XEXP (note, 0); 6585*e4b17023SJohn Marino 6586*e4b17023SJohn Marino if (GET_CODE (eqv) == EXPR_LIST) 6587*e4b17023SJohn Marino /* This REG_EQUAL note describes the result of a function call. 6588*e4b17023SJohn Marino Process all the arguments. */ 6589*e4b17023SJohn Marino do 6590*e4b17023SJohn Marino { 6591*e4b17023SJohn Marino count_reg_usage (XEXP (eqv, 0), counts, dest, incr); 6592*e4b17023SJohn Marino eqv = XEXP (eqv, 1); 6593*e4b17023SJohn Marino } 6594*e4b17023SJohn Marino while (eqv && GET_CODE (eqv) == EXPR_LIST); 6595*e4b17023SJohn Marino else 6596*e4b17023SJohn Marino count_reg_usage (eqv, counts, dest, incr); 6597*e4b17023SJohn Marino } 6598*e4b17023SJohn Marino return; 6599*e4b17023SJohn Marino 6600*e4b17023SJohn Marino case EXPR_LIST: 6601*e4b17023SJohn Marino if (REG_NOTE_KIND (x) == REG_EQUAL 6602*e4b17023SJohn Marino || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE) 6603*e4b17023SJohn Marino /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)), 6604*e4b17023SJohn Marino involving registers in the address. */ 6605*e4b17023SJohn Marino || GET_CODE (XEXP (x, 0)) == CLOBBER) 6606*e4b17023SJohn Marino count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr); 6607*e4b17023SJohn Marino 6608*e4b17023SJohn Marino count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr); 6609*e4b17023SJohn Marino return; 6610*e4b17023SJohn Marino 6611*e4b17023SJohn Marino case ASM_OPERANDS: 6612*e4b17023SJohn Marino /* Iterate over just the inputs, not the constraints as well. */ 6613*e4b17023SJohn Marino for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--) 6614*e4b17023SJohn Marino count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr); 6615*e4b17023SJohn Marino return; 6616*e4b17023SJohn Marino 6617*e4b17023SJohn Marino case INSN_LIST: 6618*e4b17023SJohn Marino gcc_unreachable (); 6619*e4b17023SJohn Marino 6620*e4b17023SJohn Marino default: 6621*e4b17023SJohn Marino break; 6622*e4b17023SJohn Marino } 6623*e4b17023SJohn Marino 6624*e4b17023SJohn Marino fmt = GET_RTX_FORMAT (code); 6625*e4b17023SJohn Marino for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 6626*e4b17023SJohn Marino { 6627*e4b17023SJohn Marino if (fmt[i] == 'e') 6628*e4b17023SJohn Marino count_reg_usage (XEXP (x, i), counts, dest, incr); 6629*e4b17023SJohn Marino else if (fmt[i] == 'E') 6630*e4b17023SJohn Marino for (j = XVECLEN (x, i) - 1; j >= 0; j--) 6631*e4b17023SJohn Marino count_reg_usage (XVECEXP (x, i, j), counts, dest, incr); 6632*e4b17023SJohn Marino } 6633*e4b17023SJohn Marino } 6634*e4b17023SJohn Marino 6635*e4b17023SJohn Marino /* Return true if X is a dead register. */ 6636*e4b17023SJohn Marino 6637*e4b17023SJohn Marino static inline int 6638*e4b17023SJohn Marino is_dead_reg (rtx x, int *counts) 6639*e4b17023SJohn Marino { 6640*e4b17023SJohn Marino return (REG_P (x) 6641*e4b17023SJohn Marino && REGNO (x) >= FIRST_PSEUDO_REGISTER 6642*e4b17023SJohn Marino && counts[REGNO (x)] == 0); 6643*e4b17023SJohn Marino } 6644*e4b17023SJohn Marino 6645*e4b17023SJohn Marino /* Return true if set is live. */ 6646*e4b17023SJohn Marino static bool 6647*e4b17023SJohn Marino set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */ 6648*e4b17023SJohn Marino int *counts) 6649*e4b17023SJohn Marino { 6650*e4b17023SJohn Marino #ifdef HAVE_cc0 6651*e4b17023SJohn Marino rtx tem; 6652*e4b17023SJohn Marino #endif 6653*e4b17023SJohn Marino 6654*e4b17023SJohn Marino if (set_noop_p (set)) 6655*e4b17023SJohn Marino ; 6656*e4b17023SJohn Marino 6657*e4b17023SJohn Marino #ifdef HAVE_cc0 6658*e4b17023SJohn Marino else if (GET_CODE (SET_DEST (set)) == CC0 6659*e4b17023SJohn Marino && !side_effects_p (SET_SRC (set)) 6660*e4b17023SJohn Marino && ((tem = next_nonnote_nondebug_insn (insn)) == NULL_RTX 6661*e4b17023SJohn Marino || !INSN_P (tem) 6662*e4b17023SJohn Marino || !reg_referenced_p (cc0_rtx, PATTERN (tem)))) 6663*e4b17023SJohn Marino return false; 6664*e4b17023SJohn Marino #endif 6665*e4b17023SJohn Marino else if (!is_dead_reg (SET_DEST (set), counts) 6666*e4b17023SJohn Marino || side_effects_p (SET_SRC (set))) 6667*e4b17023SJohn Marino return true; 6668*e4b17023SJohn Marino return false; 6669*e4b17023SJohn Marino } 6670*e4b17023SJohn Marino 6671*e4b17023SJohn Marino /* Return true if insn is live. */ 6672*e4b17023SJohn Marino 6673*e4b17023SJohn Marino static bool 6674*e4b17023SJohn Marino insn_live_p (rtx insn, int *counts) 6675*e4b17023SJohn Marino { 6676*e4b17023SJohn Marino int i; 6677*e4b17023SJohn Marino if (insn_could_throw_p (insn)) 6678*e4b17023SJohn Marino return true; 6679*e4b17023SJohn Marino else if (GET_CODE (PATTERN (insn)) == SET) 6680*e4b17023SJohn Marino return set_live_p (PATTERN (insn), insn, counts); 6681*e4b17023SJohn Marino else if (GET_CODE (PATTERN (insn)) == PARALLEL) 6682*e4b17023SJohn Marino { 6683*e4b17023SJohn Marino for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) 6684*e4b17023SJohn Marino { 6685*e4b17023SJohn Marino rtx elt = XVECEXP (PATTERN (insn), 0, i); 6686*e4b17023SJohn Marino 6687*e4b17023SJohn Marino if (GET_CODE (elt) == SET) 6688*e4b17023SJohn Marino { 6689*e4b17023SJohn Marino if (set_live_p (elt, insn, counts)) 6690*e4b17023SJohn Marino return true; 6691*e4b17023SJohn Marino } 6692*e4b17023SJohn Marino else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE) 6693*e4b17023SJohn Marino return true; 6694*e4b17023SJohn Marino } 6695*e4b17023SJohn Marino return false; 6696*e4b17023SJohn Marino } 6697*e4b17023SJohn Marino else if (DEBUG_INSN_P (insn)) 6698*e4b17023SJohn Marino { 6699*e4b17023SJohn Marino rtx next; 6700*e4b17023SJohn Marino 6701*e4b17023SJohn Marino for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next)) 6702*e4b17023SJohn Marino if (NOTE_P (next)) 6703*e4b17023SJohn Marino continue; 6704*e4b17023SJohn Marino else if (!DEBUG_INSN_P (next)) 6705*e4b17023SJohn Marino return true; 6706*e4b17023SJohn Marino else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next)) 6707*e4b17023SJohn Marino return false; 6708*e4b17023SJohn Marino 6709*e4b17023SJohn Marino return true; 6710*e4b17023SJohn Marino } 6711*e4b17023SJohn Marino else 6712*e4b17023SJohn Marino return true; 6713*e4b17023SJohn Marino } 6714*e4b17023SJohn Marino 6715*e4b17023SJohn Marino /* Count the number of stores into pseudo. Callback for note_stores. */ 6716*e4b17023SJohn Marino 6717*e4b17023SJohn Marino static void 6718*e4b17023SJohn Marino count_stores (rtx x, const_rtx set ATTRIBUTE_UNUSED, void *data) 6719*e4b17023SJohn Marino { 6720*e4b17023SJohn Marino int *counts = (int *) data; 6721*e4b17023SJohn Marino if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER) 6722*e4b17023SJohn Marino counts[REGNO (x)]++; 6723*e4b17023SJohn Marino } 6724*e4b17023SJohn Marino 6725*e4b17023SJohn Marino struct dead_debug_insn_data 6726*e4b17023SJohn Marino { 6727*e4b17023SJohn Marino int *counts; 6728*e4b17023SJohn Marino rtx *replacements; 6729*e4b17023SJohn Marino bool seen_repl; 6730*e4b17023SJohn Marino }; 6731*e4b17023SJohn Marino 6732*e4b17023SJohn Marino /* Return if a DEBUG_INSN needs to be reset because some dead 6733*e4b17023SJohn Marino pseudo doesn't have a replacement. Callback for for_each_rtx. */ 6734*e4b17023SJohn Marino 6735*e4b17023SJohn Marino static int 6736*e4b17023SJohn Marino is_dead_debug_insn (rtx *loc, void *data) 6737*e4b17023SJohn Marino { 6738*e4b17023SJohn Marino rtx x = *loc; 6739*e4b17023SJohn Marino struct dead_debug_insn_data *ddid = (struct dead_debug_insn_data *) data; 6740*e4b17023SJohn Marino 6741*e4b17023SJohn Marino if (is_dead_reg (x, ddid->counts)) 6742*e4b17023SJohn Marino { 6743*e4b17023SJohn Marino if (ddid->replacements && ddid->replacements[REGNO (x)] != NULL_RTX) 6744*e4b17023SJohn Marino ddid->seen_repl = true; 6745*e4b17023SJohn Marino else 6746*e4b17023SJohn Marino return 1; 6747*e4b17023SJohn Marino } 6748*e4b17023SJohn Marino return 0; 6749*e4b17023SJohn Marino } 6750*e4b17023SJohn Marino 6751*e4b17023SJohn Marino /* Replace a dead pseudo in a DEBUG_INSN with replacement DEBUG_EXPR. 6752*e4b17023SJohn Marino Callback for simplify_replace_fn_rtx. */ 6753*e4b17023SJohn Marino 6754*e4b17023SJohn Marino static rtx 6755*e4b17023SJohn Marino replace_dead_reg (rtx x, const_rtx old_rtx ATTRIBUTE_UNUSED, void *data) 6756*e4b17023SJohn Marino { 6757*e4b17023SJohn Marino rtx *replacements = (rtx *) data; 6758*e4b17023SJohn Marino 6759*e4b17023SJohn Marino if (REG_P (x) 6760*e4b17023SJohn Marino && REGNO (x) >= FIRST_PSEUDO_REGISTER 6761*e4b17023SJohn Marino && replacements[REGNO (x)] != NULL_RTX) 6762*e4b17023SJohn Marino { 6763*e4b17023SJohn Marino if (GET_MODE (x) == GET_MODE (replacements[REGNO (x)])) 6764*e4b17023SJohn Marino return replacements[REGNO (x)]; 6765*e4b17023SJohn Marino return lowpart_subreg (GET_MODE (x), replacements[REGNO (x)], 6766*e4b17023SJohn Marino GET_MODE (replacements[REGNO (x)])); 6767*e4b17023SJohn Marino } 6768*e4b17023SJohn Marino return NULL_RTX; 6769*e4b17023SJohn Marino } 6770*e4b17023SJohn Marino 6771*e4b17023SJohn Marino /* Scan all the insns and delete any that are dead; i.e., they store a register 6772*e4b17023SJohn Marino that is never used or they copy a register to itself. 6773*e4b17023SJohn Marino 6774*e4b17023SJohn Marino This is used to remove insns made obviously dead by cse, loop or other 6775*e4b17023SJohn Marino optimizations. It improves the heuristics in loop since it won't try to 6776*e4b17023SJohn Marino move dead invariants out of loops or make givs for dead quantities. The 6777*e4b17023SJohn Marino remaining passes of the compilation are also sped up. */ 6778*e4b17023SJohn Marino 6779*e4b17023SJohn Marino int 6780*e4b17023SJohn Marino delete_trivially_dead_insns (rtx insns, int nreg) 6781*e4b17023SJohn Marino { 6782*e4b17023SJohn Marino int *counts; 6783*e4b17023SJohn Marino rtx insn, prev; 6784*e4b17023SJohn Marino rtx *replacements = NULL; 6785*e4b17023SJohn Marino int ndead = 0; 6786*e4b17023SJohn Marino 6787*e4b17023SJohn Marino timevar_push (TV_DELETE_TRIVIALLY_DEAD); 6788*e4b17023SJohn Marino /* First count the number of times each register is used. */ 6789*e4b17023SJohn Marino if (MAY_HAVE_DEBUG_INSNS) 6790*e4b17023SJohn Marino { 6791*e4b17023SJohn Marino counts = XCNEWVEC (int, nreg * 3); 6792*e4b17023SJohn Marino for (insn = insns; insn; insn = NEXT_INSN (insn)) 6793*e4b17023SJohn Marino if (DEBUG_INSN_P (insn)) 6794*e4b17023SJohn Marino count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg, 6795*e4b17023SJohn Marino NULL_RTX, 1); 6796*e4b17023SJohn Marino else if (INSN_P (insn)) 6797*e4b17023SJohn Marino { 6798*e4b17023SJohn Marino count_reg_usage (insn, counts, NULL_RTX, 1); 6799*e4b17023SJohn Marino note_stores (PATTERN (insn), count_stores, counts + nreg * 2); 6800*e4b17023SJohn Marino } 6801*e4b17023SJohn Marino /* If there can be debug insns, COUNTS are 3 consecutive arrays. 6802*e4b17023SJohn Marino First one counts how many times each pseudo is used outside 6803*e4b17023SJohn Marino of debug insns, second counts how many times each pseudo is 6804*e4b17023SJohn Marino used in debug insns and third counts how many times a pseudo 6805*e4b17023SJohn Marino is stored. */ 6806*e4b17023SJohn Marino } 6807*e4b17023SJohn Marino else 6808*e4b17023SJohn Marino { 6809*e4b17023SJohn Marino counts = XCNEWVEC (int, nreg); 6810*e4b17023SJohn Marino for (insn = insns; insn; insn = NEXT_INSN (insn)) 6811*e4b17023SJohn Marino if (INSN_P (insn)) 6812*e4b17023SJohn Marino count_reg_usage (insn, counts, NULL_RTX, 1); 6813*e4b17023SJohn Marino /* If no debug insns can be present, COUNTS is just an array 6814*e4b17023SJohn Marino which counts how many times each pseudo is used. */ 6815*e4b17023SJohn Marino } 6816*e4b17023SJohn Marino /* Go from the last insn to the first and delete insns that only set unused 6817*e4b17023SJohn Marino registers or copy a register to itself. As we delete an insn, remove 6818*e4b17023SJohn Marino usage counts for registers it uses. 6819*e4b17023SJohn Marino 6820*e4b17023SJohn Marino The first jump optimization pass may leave a real insn as the last 6821*e4b17023SJohn Marino insn in the function. We must not skip that insn or we may end 6822*e4b17023SJohn Marino up deleting code that is not really dead. 6823*e4b17023SJohn Marino 6824*e4b17023SJohn Marino If some otherwise unused register is only used in DEBUG_INSNs, 6825*e4b17023SJohn Marino try to create a DEBUG_EXPR temporary and emit a DEBUG_INSN before 6826*e4b17023SJohn Marino the setter. Then go through DEBUG_INSNs and if a DEBUG_EXPR 6827*e4b17023SJohn Marino has been created for the unused register, replace it with 6828*e4b17023SJohn Marino the DEBUG_EXPR, otherwise reset the DEBUG_INSN. */ 6829*e4b17023SJohn Marino for (insn = get_last_insn (); insn; insn = prev) 6830*e4b17023SJohn Marino { 6831*e4b17023SJohn Marino int live_insn = 0; 6832*e4b17023SJohn Marino 6833*e4b17023SJohn Marino prev = PREV_INSN (insn); 6834*e4b17023SJohn Marino if (!INSN_P (insn)) 6835*e4b17023SJohn Marino continue; 6836*e4b17023SJohn Marino 6837*e4b17023SJohn Marino live_insn = insn_live_p (insn, counts); 6838*e4b17023SJohn Marino 6839*e4b17023SJohn Marino /* If this is a dead insn, delete it and show registers in it aren't 6840*e4b17023SJohn Marino being used. */ 6841*e4b17023SJohn Marino 6842*e4b17023SJohn Marino if (! live_insn && dbg_cnt (delete_trivial_dead)) 6843*e4b17023SJohn Marino { 6844*e4b17023SJohn Marino if (DEBUG_INSN_P (insn)) 6845*e4b17023SJohn Marino count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg, 6846*e4b17023SJohn Marino NULL_RTX, -1); 6847*e4b17023SJohn Marino else 6848*e4b17023SJohn Marino { 6849*e4b17023SJohn Marino rtx set; 6850*e4b17023SJohn Marino if (MAY_HAVE_DEBUG_INSNS 6851*e4b17023SJohn Marino && (set = single_set (insn)) != NULL_RTX 6852*e4b17023SJohn Marino && is_dead_reg (SET_DEST (set), counts) 6853*e4b17023SJohn Marino /* Used at least once in some DEBUG_INSN. */ 6854*e4b17023SJohn Marino && counts[REGNO (SET_DEST (set)) + nreg] > 0 6855*e4b17023SJohn Marino /* And set exactly once. */ 6856*e4b17023SJohn Marino && counts[REGNO (SET_DEST (set)) + nreg * 2] == 1 6857*e4b17023SJohn Marino && !side_effects_p (SET_SRC (set)) 6858*e4b17023SJohn Marino && asm_noperands (PATTERN (insn)) < 0) 6859*e4b17023SJohn Marino { 6860*e4b17023SJohn Marino rtx dval, bind; 6861*e4b17023SJohn Marino 6862*e4b17023SJohn Marino /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */ 6863*e4b17023SJohn Marino dval = make_debug_expr_from_rtl (SET_DEST (set)); 6864*e4b17023SJohn Marino 6865*e4b17023SJohn Marino /* Emit a debug bind insn before the insn in which 6866*e4b17023SJohn Marino reg dies. */ 6867*e4b17023SJohn Marino bind = gen_rtx_VAR_LOCATION (GET_MODE (SET_DEST (set)), 6868*e4b17023SJohn Marino DEBUG_EXPR_TREE_DECL (dval), 6869*e4b17023SJohn Marino SET_SRC (set), 6870*e4b17023SJohn Marino VAR_INIT_STATUS_INITIALIZED); 6871*e4b17023SJohn Marino count_reg_usage (bind, counts + nreg, NULL_RTX, 1); 6872*e4b17023SJohn Marino 6873*e4b17023SJohn Marino bind = emit_debug_insn_before (bind, insn); 6874*e4b17023SJohn Marino df_insn_rescan (bind); 6875*e4b17023SJohn Marino 6876*e4b17023SJohn Marino if (replacements == NULL) 6877*e4b17023SJohn Marino replacements = XCNEWVEC (rtx, nreg); 6878*e4b17023SJohn Marino replacements[REGNO (SET_DEST (set))] = dval; 6879*e4b17023SJohn Marino } 6880*e4b17023SJohn Marino 6881*e4b17023SJohn Marino count_reg_usage (insn, counts, NULL_RTX, -1); 6882*e4b17023SJohn Marino ndead++; 6883*e4b17023SJohn Marino } 6884*e4b17023SJohn Marino delete_insn_and_edges (insn); 6885*e4b17023SJohn Marino } 6886*e4b17023SJohn Marino } 6887*e4b17023SJohn Marino 6888*e4b17023SJohn Marino if (MAY_HAVE_DEBUG_INSNS) 6889*e4b17023SJohn Marino { 6890*e4b17023SJohn Marino struct dead_debug_insn_data ddid; 6891*e4b17023SJohn Marino ddid.counts = counts; 6892*e4b17023SJohn Marino ddid.replacements = replacements; 6893*e4b17023SJohn Marino for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) 6894*e4b17023SJohn Marino if (DEBUG_INSN_P (insn)) 6895*e4b17023SJohn Marino { 6896*e4b17023SJohn Marino /* If this debug insn references a dead register that wasn't replaced 6897*e4b17023SJohn Marino with an DEBUG_EXPR, reset the DEBUG_INSN. */ 6898*e4b17023SJohn Marino ddid.seen_repl = false; 6899*e4b17023SJohn Marino if (for_each_rtx (&INSN_VAR_LOCATION_LOC (insn), 6900*e4b17023SJohn Marino is_dead_debug_insn, &ddid)) 6901*e4b17023SJohn Marino { 6902*e4b17023SJohn Marino INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC (); 6903*e4b17023SJohn Marino df_insn_rescan (insn); 6904*e4b17023SJohn Marino } 6905*e4b17023SJohn Marino else if (ddid.seen_repl) 6906*e4b17023SJohn Marino { 6907*e4b17023SJohn Marino INSN_VAR_LOCATION_LOC (insn) 6908*e4b17023SJohn Marino = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn), 6909*e4b17023SJohn Marino NULL_RTX, replace_dead_reg, 6910*e4b17023SJohn Marino replacements); 6911*e4b17023SJohn Marino df_insn_rescan (insn); 6912*e4b17023SJohn Marino } 6913*e4b17023SJohn Marino } 6914*e4b17023SJohn Marino free (replacements); 6915*e4b17023SJohn Marino } 6916*e4b17023SJohn Marino 6917*e4b17023SJohn Marino if (dump_file && ndead) 6918*e4b17023SJohn Marino fprintf (dump_file, "Deleted %i trivially dead insns\n", 6919*e4b17023SJohn Marino ndead); 6920*e4b17023SJohn Marino /* Clean up. */ 6921*e4b17023SJohn Marino free (counts); 6922*e4b17023SJohn Marino timevar_pop (TV_DELETE_TRIVIALLY_DEAD); 6923*e4b17023SJohn Marino return ndead; 6924*e4b17023SJohn Marino } 6925*e4b17023SJohn Marino 6926*e4b17023SJohn Marino /* This function is called via for_each_rtx. The argument, NEWREG, is 6927*e4b17023SJohn Marino a condition code register with the desired mode. If we are looking 6928*e4b17023SJohn Marino at the same register in a different mode, replace it with 6929*e4b17023SJohn Marino NEWREG. */ 6930*e4b17023SJohn Marino 6931*e4b17023SJohn Marino static int 6932*e4b17023SJohn Marino cse_change_cc_mode (rtx *loc, void *data) 6933*e4b17023SJohn Marino { 6934*e4b17023SJohn Marino struct change_cc_mode_args* args = (struct change_cc_mode_args*)data; 6935*e4b17023SJohn Marino 6936*e4b17023SJohn Marino if (*loc 6937*e4b17023SJohn Marino && REG_P (*loc) 6938*e4b17023SJohn Marino && REGNO (*loc) == REGNO (args->newreg) 6939*e4b17023SJohn Marino && GET_MODE (*loc) != GET_MODE (args->newreg)) 6940*e4b17023SJohn Marino { 6941*e4b17023SJohn Marino validate_change (args->insn, loc, args->newreg, 1); 6942*e4b17023SJohn Marino 6943*e4b17023SJohn Marino return -1; 6944*e4b17023SJohn Marino } 6945*e4b17023SJohn Marino return 0; 6946*e4b17023SJohn Marino } 6947*e4b17023SJohn Marino 6948*e4b17023SJohn Marino /* Change the mode of any reference to the register REGNO (NEWREG) to 6949*e4b17023SJohn Marino GET_MODE (NEWREG) in INSN. */ 6950*e4b17023SJohn Marino 6951*e4b17023SJohn Marino static void 6952*e4b17023SJohn Marino cse_change_cc_mode_insn (rtx insn, rtx newreg) 6953*e4b17023SJohn Marino { 6954*e4b17023SJohn Marino struct change_cc_mode_args args; 6955*e4b17023SJohn Marino int success; 6956*e4b17023SJohn Marino 6957*e4b17023SJohn Marino if (!INSN_P (insn)) 6958*e4b17023SJohn Marino return; 6959*e4b17023SJohn Marino 6960*e4b17023SJohn Marino args.insn = insn; 6961*e4b17023SJohn Marino args.newreg = newreg; 6962*e4b17023SJohn Marino 6963*e4b17023SJohn Marino for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args); 6964*e4b17023SJohn Marino for_each_rtx (®_NOTES (insn), cse_change_cc_mode, &args); 6965*e4b17023SJohn Marino 6966*e4b17023SJohn Marino /* If the following assertion was triggered, there is most probably 6967*e4b17023SJohn Marino something wrong with the cc_modes_compatible back end function. 6968*e4b17023SJohn Marino CC modes only can be considered compatible if the insn - with the mode 6969*e4b17023SJohn Marino replaced by any of the compatible modes - can still be recognized. */ 6970*e4b17023SJohn Marino success = apply_change_group (); 6971*e4b17023SJohn Marino gcc_assert (success); 6972*e4b17023SJohn Marino } 6973*e4b17023SJohn Marino 6974*e4b17023SJohn Marino /* Change the mode of any reference to the register REGNO (NEWREG) to 6975*e4b17023SJohn Marino GET_MODE (NEWREG), starting at START. Stop before END. Stop at 6976*e4b17023SJohn Marino any instruction which modifies NEWREG. */ 6977*e4b17023SJohn Marino 6978*e4b17023SJohn Marino static void 6979*e4b17023SJohn Marino cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg) 6980*e4b17023SJohn Marino { 6981*e4b17023SJohn Marino rtx insn; 6982*e4b17023SJohn Marino 6983*e4b17023SJohn Marino for (insn = start; insn != end; insn = NEXT_INSN (insn)) 6984*e4b17023SJohn Marino { 6985*e4b17023SJohn Marino if (! INSN_P (insn)) 6986*e4b17023SJohn Marino continue; 6987*e4b17023SJohn Marino 6988*e4b17023SJohn Marino if (reg_set_p (newreg, insn)) 6989*e4b17023SJohn Marino return; 6990*e4b17023SJohn Marino 6991*e4b17023SJohn Marino cse_change_cc_mode_insn (insn, newreg); 6992*e4b17023SJohn Marino } 6993*e4b17023SJohn Marino } 6994*e4b17023SJohn Marino 6995*e4b17023SJohn Marino /* BB is a basic block which finishes with CC_REG as a condition code 6996*e4b17023SJohn Marino register which is set to CC_SRC. Look through the successors of BB 6997*e4b17023SJohn Marino to find blocks which have a single predecessor (i.e., this one), 6998*e4b17023SJohn Marino and look through those blocks for an assignment to CC_REG which is 6999*e4b17023SJohn Marino equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are 7000*e4b17023SJohn Marino permitted to change the mode of CC_SRC to a compatible mode. This 7001*e4b17023SJohn Marino returns VOIDmode if no equivalent assignments were found. 7002*e4b17023SJohn Marino Otherwise it returns the mode which CC_SRC should wind up with. 7003*e4b17023SJohn Marino ORIG_BB should be the same as BB in the outermost cse_cc_succs call, 7004*e4b17023SJohn Marino but is passed unmodified down to recursive calls in order to prevent 7005*e4b17023SJohn Marino endless recursion. 7006*e4b17023SJohn Marino 7007*e4b17023SJohn Marino The main complexity in this function is handling the mode issues. 7008*e4b17023SJohn Marino We may have more than one duplicate which we can eliminate, and we 7009*e4b17023SJohn Marino try to find a mode which will work for multiple duplicates. */ 7010*e4b17023SJohn Marino 7011*e4b17023SJohn Marino static enum machine_mode 7012*e4b17023SJohn Marino cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src, 7013*e4b17023SJohn Marino bool can_change_mode) 7014*e4b17023SJohn Marino { 7015*e4b17023SJohn Marino bool found_equiv; 7016*e4b17023SJohn Marino enum machine_mode mode; 7017*e4b17023SJohn Marino unsigned int insn_count; 7018*e4b17023SJohn Marino edge e; 7019*e4b17023SJohn Marino rtx insns[2]; 7020*e4b17023SJohn Marino enum machine_mode modes[2]; 7021*e4b17023SJohn Marino rtx last_insns[2]; 7022*e4b17023SJohn Marino unsigned int i; 7023*e4b17023SJohn Marino rtx newreg; 7024*e4b17023SJohn Marino edge_iterator ei; 7025*e4b17023SJohn Marino 7026*e4b17023SJohn Marino /* We expect to have two successors. Look at both before picking 7027*e4b17023SJohn Marino the final mode for the comparison. If we have more successors 7028*e4b17023SJohn Marino (i.e., some sort of table jump, although that seems unlikely), 7029*e4b17023SJohn Marino then we require all beyond the first two to use the same 7030*e4b17023SJohn Marino mode. */ 7031*e4b17023SJohn Marino 7032*e4b17023SJohn Marino found_equiv = false; 7033*e4b17023SJohn Marino mode = GET_MODE (cc_src); 7034*e4b17023SJohn Marino insn_count = 0; 7035*e4b17023SJohn Marino FOR_EACH_EDGE (e, ei, bb->succs) 7036*e4b17023SJohn Marino { 7037*e4b17023SJohn Marino rtx insn; 7038*e4b17023SJohn Marino rtx end; 7039*e4b17023SJohn Marino 7040*e4b17023SJohn Marino if (e->flags & EDGE_COMPLEX) 7041*e4b17023SJohn Marino continue; 7042*e4b17023SJohn Marino 7043*e4b17023SJohn Marino if (EDGE_COUNT (e->dest->preds) != 1 7044*e4b17023SJohn Marino || e->dest == EXIT_BLOCK_PTR 7045*e4b17023SJohn Marino /* Avoid endless recursion on unreachable blocks. */ 7046*e4b17023SJohn Marino || e->dest == orig_bb) 7047*e4b17023SJohn Marino continue; 7048*e4b17023SJohn Marino 7049*e4b17023SJohn Marino end = NEXT_INSN (BB_END (e->dest)); 7050*e4b17023SJohn Marino for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn)) 7051*e4b17023SJohn Marino { 7052*e4b17023SJohn Marino rtx set; 7053*e4b17023SJohn Marino 7054*e4b17023SJohn Marino if (! INSN_P (insn)) 7055*e4b17023SJohn Marino continue; 7056*e4b17023SJohn Marino 7057*e4b17023SJohn Marino /* If CC_SRC is modified, we have to stop looking for 7058*e4b17023SJohn Marino something which uses it. */ 7059*e4b17023SJohn Marino if (modified_in_p (cc_src, insn)) 7060*e4b17023SJohn Marino break; 7061*e4b17023SJohn Marino 7062*e4b17023SJohn Marino /* Check whether INSN sets CC_REG to CC_SRC. */ 7063*e4b17023SJohn Marino set = single_set (insn); 7064*e4b17023SJohn Marino if (set 7065*e4b17023SJohn Marino && REG_P (SET_DEST (set)) 7066*e4b17023SJohn Marino && REGNO (SET_DEST (set)) == REGNO (cc_reg)) 7067*e4b17023SJohn Marino { 7068*e4b17023SJohn Marino bool found; 7069*e4b17023SJohn Marino enum machine_mode set_mode; 7070*e4b17023SJohn Marino enum machine_mode comp_mode; 7071*e4b17023SJohn Marino 7072*e4b17023SJohn Marino found = false; 7073*e4b17023SJohn Marino set_mode = GET_MODE (SET_SRC (set)); 7074*e4b17023SJohn Marino comp_mode = set_mode; 7075*e4b17023SJohn Marino if (rtx_equal_p (cc_src, SET_SRC (set))) 7076*e4b17023SJohn Marino found = true; 7077*e4b17023SJohn Marino else if (GET_CODE (cc_src) == COMPARE 7078*e4b17023SJohn Marino && GET_CODE (SET_SRC (set)) == COMPARE 7079*e4b17023SJohn Marino && mode != set_mode 7080*e4b17023SJohn Marino && rtx_equal_p (XEXP (cc_src, 0), 7081*e4b17023SJohn Marino XEXP (SET_SRC (set), 0)) 7082*e4b17023SJohn Marino && rtx_equal_p (XEXP (cc_src, 1), 7083*e4b17023SJohn Marino XEXP (SET_SRC (set), 1))) 7084*e4b17023SJohn Marino 7085*e4b17023SJohn Marino { 7086*e4b17023SJohn Marino comp_mode = targetm.cc_modes_compatible (mode, set_mode); 7087*e4b17023SJohn Marino if (comp_mode != VOIDmode 7088*e4b17023SJohn Marino && (can_change_mode || comp_mode == mode)) 7089*e4b17023SJohn Marino found = true; 7090*e4b17023SJohn Marino } 7091*e4b17023SJohn Marino 7092*e4b17023SJohn Marino if (found) 7093*e4b17023SJohn Marino { 7094*e4b17023SJohn Marino found_equiv = true; 7095*e4b17023SJohn Marino if (insn_count < ARRAY_SIZE (insns)) 7096*e4b17023SJohn Marino { 7097*e4b17023SJohn Marino insns[insn_count] = insn; 7098*e4b17023SJohn Marino modes[insn_count] = set_mode; 7099*e4b17023SJohn Marino last_insns[insn_count] = end; 7100*e4b17023SJohn Marino ++insn_count; 7101*e4b17023SJohn Marino 7102*e4b17023SJohn Marino if (mode != comp_mode) 7103*e4b17023SJohn Marino { 7104*e4b17023SJohn Marino gcc_assert (can_change_mode); 7105*e4b17023SJohn Marino mode = comp_mode; 7106*e4b17023SJohn Marino 7107*e4b17023SJohn Marino /* The modified insn will be re-recognized later. */ 7108*e4b17023SJohn Marino PUT_MODE (cc_src, mode); 7109*e4b17023SJohn Marino } 7110*e4b17023SJohn Marino } 7111*e4b17023SJohn Marino else 7112*e4b17023SJohn Marino { 7113*e4b17023SJohn Marino if (set_mode != mode) 7114*e4b17023SJohn Marino { 7115*e4b17023SJohn Marino /* We found a matching expression in the 7116*e4b17023SJohn Marino wrong mode, but we don't have room to 7117*e4b17023SJohn Marino store it in the array. Punt. This case 7118*e4b17023SJohn Marino should be rare. */ 7119*e4b17023SJohn Marino break; 7120*e4b17023SJohn Marino } 7121*e4b17023SJohn Marino /* INSN sets CC_REG to a value equal to CC_SRC 7122*e4b17023SJohn Marino with the right mode. We can simply delete 7123*e4b17023SJohn Marino it. */ 7124*e4b17023SJohn Marino delete_insn (insn); 7125*e4b17023SJohn Marino } 7126*e4b17023SJohn Marino 7127*e4b17023SJohn Marino /* We found an instruction to delete. Keep looking, 7128*e4b17023SJohn Marino in the hopes of finding a three-way jump. */ 7129*e4b17023SJohn Marino continue; 7130*e4b17023SJohn Marino } 7131*e4b17023SJohn Marino 7132*e4b17023SJohn Marino /* We found an instruction which sets the condition 7133*e4b17023SJohn Marino code, so don't look any farther. */ 7134*e4b17023SJohn Marino break; 7135*e4b17023SJohn Marino } 7136*e4b17023SJohn Marino 7137*e4b17023SJohn Marino /* If INSN sets CC_REG in some other way, don't look any 7138*e4b17023SJohn Marino farther. */ 7139*e4b17023SJohn Marino if (reg_set_p (cc_reg, insn)) 7140*e4b17023SJohn Marino break; 7141*e4b17023SJohn Marino } 7142*e4b17023SJohn Marino 7143*e4b17023SJohn Marino /* If we fell off the bottom of the block, we can keep looking 7144*e4b17023SJohn Marino through successors. We pass CAN_CHANGE_MODE as false because 7145*e4b17023SJohn Marino we aren't prepared to handle compatibility between the 7146*e4b17023SJohn Marino further blocks and this block. */ 7147*e4b17023SJohn Marino if (insn == end) 7148*e4b17023SJohn Marino { 7149*e4b17023SJohn Marino enum machine_mode submode; 7150*e4b17023SJohn Marino 7151*e4b17023SJohn Marino submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false); 7152*e4b17023SJohn Marino if (submode != VOIDmode) 7153*e4b17023SJohn Marino { 7154*e4b17023SJohn Marino gcc_assert (submode == mode); 7155*e4b17023SJohn Marino found_equiv = true; 7156*e4b17023SJohn Marino can_change_mode = false; 7157*e4b17023SJohn Marino } 7158*e4b17023SJohn Marino } 7159*e4b17023SJohn Marino } 7160*e4b17023SJohn Marino 7161*e4b17023SJohn Marino if (! found_equiv) 7162*e4b17023SJohn Marino return VOIDmode; 7163*e4b17023SJohn Marino 7164*e4b17023SJohn Marino /* Now INSN_COUNT is the number of instructions we found which set 7165*e4b17023SJohn Marino CC_REG to a value equivalent to CC_SRC. The instructions are in 7166*e4b17023SJohn Marino INSNS. The modes used by those instructions are in MODES. */ 7167*e4b17023SJohn Marino 7168*e4b17023SJohn Marino newreg = NULL_RTX; 7169*e4b17023SJohn Marino for (i = 0; i < insn_count; ++i) 7170*e4b17023SJohn Marino { 7171*e4b17023SJohn Marino if (modes[i] != mode) 7172*e4b17023SJohn Marino { 7173*e4b17023SJohn Marino /* We need to change the mode of CC_REG in INSNS[i] and 7174*e4b17023SJohn Marino subsequent instructions. */ 7175*e4b17023SJohn Marino if (! newreg) 7176*e4b17023SJohn Marino { 7177*e4b17023SJohn Marino if (GET_MODE (cc_reg) == mode) 7178*e4b17023SJohn Marino newreg = cc_reg; 7179*e4b17023SJohn Marino else 7180*e4b17023SJohn Marino newreg = gen_rtx_REG (mode, REGNO (cc_reg)); 7181*e4b17023SJohn Marino } 7182*e4b17023SJohn Marino cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i], 7183*e4b17023SJohn Marino newreg); 7184*e4b17023SJohn Marino } 7185*e4b17023SJohn Marino 7186*e4b17023SJohn Marino delete_insn_and_edges (insns[i]); 7187*e4b17023SJohn Marino } 7188*e4b17023SJohn Marino 7189*e4b17023SJohn Marino return mode; 7190*e4b17023SJohn Marino } 7191*e4b17023SJohn Marino 7192*e4b17023SJohn Marino /* If we have a fixed condition code register (or two), walk through 7193*e4b17023SJohn Marino the instructions and try to eliminate duplicate assignments. */ 7194*e4b17023SJohn Marino 7195*e4b17023SJohn Marino static void 7196*e4b17023SJohn Marino cse_condition_code_reg (void) 7197*e4b17023SJohn Marino { 7198*e4b17023SJohn Marino unsigned int cc_regno_1; 7199*e4b17023SJohn Marino unsigned int cc_regno_2; 7200*e4b17023SJohn Marino rtx cc_reg_1; 7201*e4b17023SJohn Marino rtx cc_reg_2; 7202*e4b17023SJohn Marino basic_block bb; 7203*e4b17023SJohn Marino 7204*e4b17023SJohn Marino if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2)) 7205*e4b17023SJohn Marino return; 7206*e4b17023SJohn Marino 7207*e4b17023SJohn Marino cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1); 7208*e4b17023SJohn Marino if (cc_regno_2 != INVALID_REGNUM) 7209*e4b17023SJohn Marino cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2); 7210*e4b17023SJohn Marino else 7211*e4b17023SJohn Marino cc_reg_2 = NULL_RTX; 7212*e4b17023SJohn Marino 7213*e4b17023SJohn Marino FOR_EACH_BB (bb) 7214*e4b17023SJohn Marino { 7215*e4b17023SJohn Marino rtx last_insn; 7216*e4b17023SJohn Marino rtx cc_reg; 7217*e4b17023SJohn Marino rtx insn; 7218*e4b17023SJohn Marino rtx cc_src_insn; 7219*e4b17023SJohn Marino rtx cc_src; 7220*e4b17023SJohn Marino enum machine_mode mode; 7221*e4b17023SJohn Marino enum machine_mode orig_mode; 7222*e4b17023SJohn Marino 7223*e4b17023SJohn Marino /* Look for blocks which end with a conditional jump based on a 7224*e4b17023SJohn Marino condition code register. Then look for the instruction which 7225*e4b17023SJohn Marino sets the condition code register. Then look through the 7226*e4b17023SJohn Marino successor blocks for instructions which set the condition 7227*e4b17023SJohn Marino code register to the same value. There are other possible 7228*e4b17023SJohn Marino uses of the condition code register, but these are by far the 7229*e4b17023SJohn Marino most common and the ones which we are most likely to be able 7230*e4b17023SJohn Marino to optimize. */ 7231*e4b17023SJohn Marino 7232*e4b17023SJohn Marino last_insn = BB_END (bb); 7233*e4b17023SJohn Marino if (!JUMP_P (last_insn)) 7234*e4b17023SJohn Marino continue; 7235*e4b17023SJohn Marino 7236*e4b17023SJohn Marino if (reg_referenced_p (cc_reg_1, PATTERN (last_insn))) 7237*e4b17023SJohn Marino cc_reg = cc_reg_1; 7238*e4b17023SJohn Marino else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn))) 7239*e4b17023SJohn Marino cc_reg = cc_reg_2; 7240*e4b17023SJohn Marino else 7241*e4b17023SJohn Marino continue; 7242*e4b17023SJohn Marino 7243*e4b17023SJohn Marino cc_src_insn = NULL_RTX; 7244*e4b17023SJohn Marino cc_src = NULL_RTX; 7245*e4b17023SJohn Marino for (insn = PREV_INSN (last_insn); 7246*e4b17023SJohn Marino insn && insn != PREV_INSN (BB_HEAD (bb)); 7247*e4b17023SJohn Marino insn = PREV_INSN (insn)) 7248*e4b17023SJohn Marino { 7249*e4b17023SJohn Marino rtx set; 7250*e4b17023SJohn Marino 7251*e4b17023SJohn Marino if (! INSN_P (insn)) 7252*e4b17023SJohn Marino continue; 7253*e4b17023SJohn Marino set = single_set (insn); 7254*e4b17023SJohn Marino if (set 7255*e4b17023SJohn Marino && REG_P (SET_DEST (set)) 7256*e4b17023SJohn Marino && REGNO (SET_DEST (set)) == REGNO (cc_reg)) 7257*e4b17023SJohn Marino { 7258*e4b17023SJohn Marino cc_src_insn = insn; 7259*e4b17023SJohn Marino cc_src = SET_SRC (set); 7260*e4b17023SJohn Marino break; 7261*e4b17023SJohn Marino } 7262*e4b17023SJohn Marino else if (reg_set_p (cc_reg, insn)) 7263*e4b17023SJohn Marino break; 7264*e4b17023SJohn Marino } 7265*e4b17023SJohn Marino 7266*e4b17023SJohn Marino if (! cc_src_insn) 7267*e4b17023SJohn Marino continue; 7268*e4b17023SJohn Marino 7269*e4b17023SJohn Marino if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn))) 7270*e4b17023SJohn Marino continue; 7271*e4b17023SJohn Marino 7272*e4b17023SJohn Marino /* Now CC_REG is a condition code register used for a 7273*e4b17023SJohn Marino conditional jump at the end of the block, and CC_SRC, in 7274*e4b17023SJohn Marino CC_SRC_INSN, is the value to which that condition code 7275*e4b17023SJohn Marino register is set, and CC_SRC is still meaningful at the end of 7276*e4b17023SJohn Marino the basic block. */ 7277*e4b17023SJohn Marino 7278*e4b17023SJohn Marino orig_mode = GET_MODE (cc_src); 7279*e4b17023SJohn Marino mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true); 7280*e4b17023SJohn Marino if (mode != VOIDmode) 7281*e4b17023SJohn Marino { 7282*e4b17023SJohn Marino gcc_assert (mode == GET_MODE (cc_src)); 7283*e4b17023SJohn Marino if (mode != orig_mode) 7284*e4b17023SJohn Marino { 7285*e4b17023SJohn Marino rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg)); 7286*e4b17023SJohn Marino 7287*e4b17023SJohn Marino cse_change_cc_mode_insn (cc_src_insn, newreg); 7288*e4b17023SJohn Marino 7289*e4b17023SJohn Marino /* Do the same in the following insns that use the 7290*e4b17023SJohn Marino current value of CC_REG within BB. */ 7291*e4b17023SJohn Marino cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn), 7292*e4b17023SJohn Marino NEXT_INSN (last_insn), 7293*e4b17023SJohn Marino newreg); 7294*e4b17023SJohn Marino } 7295*e4b17023SJohn Marino } 7296*e4b17023SJohn Marino } 7297*e4b17023SJohn Marino } 7298*e4b17023SJohn Marino 7299*e4b17023SJohn Marino 7300*e4b17023SJohn Marino /* Perform common subexpression elimination. Nonzero value from 7301*e4b17023SJohn Marino `cse_main' means that jumps were simplified and some code may now 7302*e4b17023SJohn Marino be unreachable, so do jump optimization again. */ 7303*e4b17023SJohn Marino static bool 7304*e4b17023SJohn Marino gate_handle_cse (void) 7305*e4b17023SJohn Marino { 7306*e4b17023SJohn Marino return optimize > 0; 7307*e4b17023SJohn Marino } 7308*e4b17023SJohn Marino 7309*e4b17023SJohn Marino static unsigned int 7310*e4b17023SJohn Marino rest_of_handle_cse (void) 7311*e4b17023SJohn Marino { 7312*e4b17023SJohn Marino int tem; 7313*e4b17023SJohn Marino 7314*e4b17023SJohn Marino if (dump_file) 7315*e4b17023SJohn Marino dump_flow_info (dump_file, dump_flags); 7316*e4b17023SJohn Marino 7317*e4b17023SJohn Marino tem = cse_main (get_insns (), max_reg_num ()); 7318*e4b17023SJohn Marino 7319*e4b17023SJohn Marino /* If we are not running more CSE passes, then we are no longer 7320*e4b17023SJohn Marino expecting CSE to be run. But always rerun it in a cheap mode. */ 7321*e4b17023SJohn Marino cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse; 7322*e4b17023SJohn Marino 7323*e4b17023SJohn Marino if (tem == 2) 7324*e4b17023SJohn Marino { 7325*e4b17023SJohn Marino timevar_push (TV_JUMP); 7326*e4b17023SJohn Marino rebuild_jump_labels (get_insns ()); 7327*e4b17023SJohn Marino cleanup_cfg (0); 7328*e4b17023SJohn Marino timevar_pop (TV_JUMP); 7329*e4b17023SJohn Marino } 7330*e4b17023SJohn Marino else if (tem == 1 || optimize > 1) 7331*e4b17023SJohn Marino cleanup_cfg (0); 7332*e4b17023SJohn Marino 7333*e4b17023SJohn Marino return 0; 7334*e4b17023SJohn Marino } 7335*e4b17023SJohn Marino 7336*e4b17023SJohn Marino struct rtl_opt_pass pass_cse = 7337*e4b17023SJohn Marino { 7338*e4b17023SJohn Marino { 7339*e4b17023SJohn Marino RTL_PASS, 7340*e4b17023SJohn Marino "cse1", /* name */ 7341*e4b17023SJohn Marino gate_handle_cse, /* gate */ 7342*e4b17023SJohn Marino rest_of_handle_cse, /* execute */ 7343*e4b17023SJohn Marino NULL, /* sub */ 7344*e4b17023SJohn Marino NULL, /* next */ 7345*e4b17023SJohn Marino 0, /* static_pass_number */ 7346*e4b17023SJohn Marino TV_CSE, /* tv_id */ 7347*e4b17023SJohn Marino 0, /* properties_required */ 7348*e4b17023SJohn Marino 0, /* properties_provided */ 7349*e4b17023SJohn Marino 0, /* properties_destroyed */ 7350*e4b17023SJohn Marino 0, /* todo_flags_start */ 7351*e4b17023SJohn Marino TODO_df_finish | TODO_verify_rtl_sharing | 7352*e4b17023SJohn Marino TODO_ggc_collect | 7353*e4b17023SJohn Marino TODO_verify_flow, /* todo_flags_finish */ 7354*e4b17023SJohn Marino } 7355*e4b17023SJohn Marino }; 7356*e4b17023SJohn Marino 7357*e4b17023SJohn Marino 7358*e4b17023SJohn Marino static bool 7359*e4b17023SJohn Marino gate_handle_cse2 (void) 7360*e4b17023SJohn Marino { 7361*e4b17023SJohn Marino return optimize > 0 && flag_rerun_cse_after_loop; 7362*e4b17023SJohn Marino } 7363*e4b17023SJohn Marino 7364*e4b17023SJohn Marino /* Run second CSE pass after loop optimizations. */ 7365*e4b17023SJohn Marino static unsigned int 7366*e4b17023SJohn Marino rest_of_handle_cse2 (void) 7367*e4b17023SJohn Marino { 7368*e4b17023SJohn Marino int tem; 7369*e4b17023SJohn Marino 7370*e4b17023SJohn Marino if (dump_file) 7371*e4b17023SJohn Marino dump_flow_info (dump_file, dump_flags); 7372*e4b17023SJohn Marino 7373*e4b17023SJohn Marino tem = cse_main (get_insns (), max_reg_num ()); 7374*e4b17023SJohn Marino 7375*e4b17023SJohn Marino /* Run a pass to eliminate duplicated assignments to condition code 7376*e4b17023SJohn Marino registers. We have to run this after bypass_jumps, because it 7377*e4b17023SJohn Marino makes it harder for that pass to determine whether a jump can be 7378*e4b17023SJohn Marino bypassed safely. */ 7379*e4b17023SJohn Marino cse_condition_code_reg (); 7380*e4b17023SJohn Marino 7381*e4b17023SJohn Marino delete_trivially_dead_insns (get_insns (), max_reg_num ()); 7382*e4b17023SJohn Marino 7383*e4b17023SJohn Marino if (tem == 2) 7384*e4b17023SJohn Marino { 7385*e4b17023SJohn Marino timevar_push (TV_JUMP); 7386*e4b17023SJohn Marino rebuild_jump_labels (get_insns ()); 7387*e4b17023SJohn Marino cleanup_cfg (0); 7388*e4b17023SJohn Marino timevar_pop (TV_JUMP); 7389*e4b17023SJohn Marino } 7390*e4b17023SJohn Marino else if (tem == 1) 7391*e4b17023SJohn Marino cleanup_cfg (0); 7392*e4b17023SJohn Marino 7393*e4b17023SJohn Marino cse_not_expected = 1; 7394*e4b17023SJohn Marino return 0; 7395*e4b17023SJohn Marino } 7396*e4b17023SJohn Marino 7397*e4b17023SJohn Marino 7398*e4b17023SJohn Marino struct rtl_opt_pass pass_cse2 = 7399*e4b17023SJohn Marino { 7400*e4b17023SJohn Marino { 7401*e4b17023SJohn Marino RTL_PASS, 7402*e4b17023SJohn Marino "cse2", /* name */ 7403*e4b17023SJohn Marino gate_handle_cse2, /* gate */ 7404*e4b17023SJohn Marino rest_of_handle_cse2, /* execute */ 7405*e4b17023SJohn Marino NULL, /* sub */ 7406*e4b17023SJohn Marino NULL, /* next */ 7407*e4b17023SJohn Marino 0, /* static_pass_number */ 7408*e4b17023SJohn Marino TV_CSE2, /* tv_id */ 7409*e4b17023SJohn Marino 0, /* properties_required */ 7410*e4b17023SJohn Marino 0, /* properties_provided */ 7411*e4b17023SJohn Marino 0, /* properties_destroyed */ 7412*e4b17023SJohn Marino 0, /* todo_flags_start */ 7413*e4b17023SJohn Marino TODO_df_finish | TODO_verify_rtl_sharing | 7414*e4b17023SJohn Marino TODO_ggc_collect | 7415*e4b17023SJohn Marino TODO_verify_flow /* todo_flags_finish */ 7416*e4b17023SJohn Marino } 7417*e4b17023SJohn Marino }; 7418*e4b17023SJohn Marino 7419*e4b17023SJohn Marino static bool 7420*e4b17023SJohn Marino gate_handle_cse_after_global_opts (void) 7421*e4b17023SJohn Marino { 7422*e4b17023SJohn Marino return optimize > 0 && flag_rerun_cse_after_global_opts; 7423*e4b17023SJohn Marino } 7424*e4b17023SJohn Marino 7425*e4b17023SJohn Marino /* Run second CSE pass after loop optimizations. */ 7426*e4b17023SJohn Marino static unsigned int 7427*e4b17023SJohn Marino rest_of_handle_cse_after_global_opts (void) 7428*e4b17023SJohn Marino { 7429*e4b17023SJohn Marino int save_cfj; 7430*e4b17023SJohn Marino int tem; 7431*e4b17023SJohn Marino 7432*e4b17023SJohn Marino /* We only want to do local CSE, so don't follow jumps. */ 7433*e4b17023SJohn Marino save_cfj = flag_cse_follow_jumps; 7434*e4b17023SJohn Marino flag_cse_follow_jumps = 0; 7435*e4b17023SJohn Marino 7436*e4b17023SJohn Marino rebuild_jump_labels (get_insns ()); 7437*e4b17023SJohn Marino tem = cse_main (get_insns (), max_reg_num ()); 7438*e4b17023SJohn Marino purge_all_dead_edges (); 7439*e4b17023SJohn Marino delete_trivially_dead_insns (get_insns (), max_reg_num ()); 7440*e4b17023SJohn Marino 7441*e4b17023SJohn Marino cse_not_expected = !flag_rerun_cse_after_loop; 7442*e4b17023SJohn Marino 7443*e4b17023SJohn Marino /* If cse altered any jumps, rerun jump opts to clean things up. */ 7444*e4b17023SJohn Marino if (tem == 2) 7445*e4b17023SJohn Marino { 7446*e4b17023SJohn Marino timevar_push (TV_JUMP); 7447*e4b17023SJohn Marino rebuild_jump_labels (get_insns ()); 7448*e4b17023SJohn Marino cleanup_cfg (0); 7449*e4b17023SJohn Marino timevar_pop (TV_JUMP); 7450*e4b17023SJohn Marino } 7451*e4b17023SJohn Marino else if (tem == 1) 7452*e4b17023SJohn Marino cleanup_cfg (0); 7453*e4b17023SJohn Marino 7454*e4b17023SJohn Marino flag_cse_follow_jumps = save_cfj; 7455*e4b17023SJohn Marino return 0; 7456*e4b17023SJohn Marino } 7457*e4b17023SJohn Marino 7458*e4b17023SJohn Marino struct rtl_opt_pass pass_cse_after_global_opts = 7459*e4b17023SJohn Marino { 7460*e4b17023SJohn Marino { 7461*e4b17023SJohn Marino RTL_PASS, 7462*e4b17023SJohn Marino "cse_local", /* name */ 7463*e4b17023SJohn Marino gate_handle_cse_after_global_opts, /* gate */ 7464*e4b17023SJohn Marino rest_of_handle_cse_after_global_opts, /* execute */ 7465*e4b17023SJohn Marino NULL, /* sub */ 7466*e4b17023SJohn Marino NULL, /* next */ 7467*e4b17023SJohn Marino 0, /* static_pass_number */ 7468*e4b17023SJohn Marino TV_CSE, /* tv_id */ 7469*e4b17023SJohn Marino 0, /* properties_required */ 7470*e4b17023SJohn Marino 0, /* properties_provided */ 7471*e4b17023SJohn Marino 0, /* properties_destroyed */ 7472*e4b17023SJohn Marino 0, /* todo_flags_start */ 7473*e4b17023SJohn Marino TODO_df_finish | TODO_verify_rtl_sharing | 7474*e4b17023SJohn Marino TODO_ggc_collect | 7475*e4b17023SJohn Marino TODO_verify_flow /* todo_flags_finish */ 7476*e4b17023SJohn Marino } 7477*e4b17023SJohn Marino }; 7478