1 /* Search an insn for pseudo regs that must be in hard regs and are not. 2 Copyright (C) 1987-2013 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 /* This file contains subroutines used only from the file reload1.c. 21 It knows how to scan one insn for operands and values 22 that need to be copied into registers to make valid code. 23 It also finds other operands and values which are valid 24 but for which equivalent values in registers exist and 25 ought to be used instead. 26 27 Before processing the first insn of the function, call `init_reload'. 28 init_reload actually has to be called earlier anyway. 29 30 To scan an insn, call `find_reloads'. This does two things: 31 1. sets up tables describing which values must be reloaded 32 for this insn, and what kind of hard regs they must be reloaded into; 33 2. optionally record the locations where those values appear in 34 the data, so they can be replaced properly later. 35 This is done only if the second arg to `find_reloads' is nonzero. 36 37 The third arg to `find_reloads' specifies the number of levels 38 of indirect addressing supported by the machine. If it is zero, 39 indirect addressing is not valid. If it is one, (MEM (REG n)) 40 is valid even if (REG n) did not get a hard register; if it is two, 41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a 42 hard register, and similarly for higher values. 43 44 Then you must choose the hard regs to reload those pseudo regs into, 45 and generate appropriate load insns before this insn and perhaps 46 also store insns after this insn. Set up the array `reload_reg_rtx' 47 to contain the REG rtx's for the registers you used. In some 48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx' 49 for certain reloads. Then that tells you which register to use, 50 so you do not need to allocate one. But you still do need to add extra 51 instructions to copy the value into and out of that register. 52 53 Finally you must call `subst_reloads' to substitute the reload reg rtx's 54 into the locations already recorded. 55 56 NOTE SIDE EFFECTS: 57 58 find_reloads can alter the operands of the instruction it is called on. 59 60 1. Two operands of any sort may be interchanged, if they are in a 61 commutative instruction. 62 This happens only if find_reloads thinks the instruction will compile 63 better that way. 64 65 2. Pseudo-registers that are equivalent to constants are replaced 66 with those constants if they are not in hard registers. 67 68 1 happens every time find_reloads is called. 69 2 happens only when REPLACE is 1, which is only when 70 actually doing the reloads, not when just counting them. 71 72 Using a reload register for several reloads in one insn: 73 74 When an insn has reloads, it is considered as having three parts: 75 the input reloads, the insn itself after reloading, and the output reloads. 76 Reloads of values used in memory addresses are often needed for only one part. 77 78 When this is so, reload_when_needed records which part needs the reload. 79 Two reloads for different parts of the insn can share the same reload 80 register. 81 82 When a reload is used for addresses in multiple parts, or when it is 83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share 84 a register with any other reload. */ 85 86 #define REG_OK_STRICT 87 88 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */ 89 #undef DEBUG_RELOAD 90 91 #include "config.h" 92 #include "system.h" 93 #include "coretypes.h" 94 #include "tm.h" 95 #include "rtl-error.h" 96 #include "tm_p.h" 97 #include "insn-config.h" 98 #include "expr.h" 99 #include "optabs.h" 100 #include "recog.h" 101 #include "df.h" 102 #include "reload.h" 103 #include "regs.h" 104 #include "addresses.h" 105 #include "hard-reg-set.h" 106 #include "flags.h" 107 #include "function.h" 108 #include "params.h" 109 #include "target.h" 110 #include "ira.h" 111 112 /* True if X is a constant that can be forced into the constant pool. 113 MODE is the mode of the operand, or VOIDmode if not known. */ 114 #define CONST_POOL_OK_P(MODE, X) \ 115 ((MODE) != VOIDmode \ 116 && CONSTANT_P (X) \ 117 && GET_CODE (X) != HIGH \ 118 && !targetm.cannot_force_const_mem (MODE, X)) 119 120 /* True if C is a non-empty register class that has too few registers 121 to be safely used as a reload target class. */ 122 123 static inline bool 124 small_register_class_p (reg_class_t rclass) 125 { 126 return (reg_class_size [(int) rclass] == 1 127 || (reg_class_size [(int) rclass] >= 1 128 && targetm.class_likely_spilled_p (rclass))); 129 } 130 131 132 /* All reloads of the current insn are recorded here. See reload.h for 133 comments. */ 134 int n_reloads; 135 struct reload rld[MAX_RELOADS]; 136 137 /* All the "earlyclobber" operands of the current insn 138 are recorded here. */ 139 int n_earlyclobbers; 140 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS]; 141 142 int reload_n_operands; 143 144 /* Replacing reloads. 145 146 If `replace_reloads' is nonzero, then as each reload is recorded 147 an entry is made for it in the table `replacements'. 148 Then later `subst_reloads' can look through that table and 149 perform all the replacements needed. */ 150 151 /* Nonzero means record the places to replace. */ 152 static int replace_reloads; 153 154 /* Each replacement is recorded with a structure like this. */ 155 struct replacement 156 { 157 rtx *where; /* Location to store in */ 158 int what; /* which reload this is for */ 159 enum machine_mode mode; /* mode it must have */ 160 }; 161 162 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)]; 163 164 /* Number of replacements currently recorded. */ 165 static int n_replacements; 166 167 /* Used to track what is modified by an operand. */ 168 struct decomposition 169 { 170 int reg_flag; /* Nonzero if referencing a register. */ 171 int safe; /* Nonzero if this can't conflict with anything. */ 172 rtx base; /* Base address for MEM. */ 173 HOST_WIDE_INT start; /* Starting offset or register number. */ 174 HOST_WIDE_INT end; /* Ending offset or register number. */ 175 }; 176 177 #ifdef SECONDARY_MEMORY_NEEDED 178 179 /* Save MEMs needed to copy from one class of registers to another. One MEM 180 is used per mode, but normally only one or two modes are ever used. 181 182 We keep two versions, before and after register elimination. The one 183 after register elimination is record separately for each operand. This 184 is done in case the address is not valid to be sure that we separately 185 reload each. */ 186 187 static rtx secondary_memlocs[NUM_MACHINE_MODES]; 188 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS]; 189 static int secondary_memlocs_elim_used = 0; 190 #endif 191 192 /* The instruction we are doing reloads for; 193 so we can test whether a register dies in it. */ 194 static rtx this_insn; 195 196 /* Nonzero if this instruction is a user-specified asm with operands. */ 197 static int this_insn_is_asm; 198 199 /* If hard_regs_live_known is nonzero, 200 we can tell which hard regs are currently live, 201 at least enough to succeed in choosing dummy reloads. */ 202 static int hard_regs_live_known; 203 204 /* Indexed by hard reg number, 205 element is nonnegative if hard reg has been spilled. 206 This vector is passed to `find_reloads' as an argument 207 and is not changed here. */ 208 static short *static_reload_reg_p; 209 210 /* Set to 1 in subst_reg_equivs if it changes anything. */ 211 static int subst_reg_equivs_changed; 212 213 /* On return from push_reload, holds the reload-number for the OUT 214 operand, which can be different for that from the input operand. */ 215 static int output_reloadnum; 216 217 /* Compare two RTX's. */ 218 #define MATCHES(x, y) \ 219 (x == y || (x != 0 && (REG_P (x) \ 220 ? REG_P (y) && REGNO (x) == REGNO (y) \ 221 : rtx_equal_p (x, y) && ! side_effects_p (x)))) 222 223 /* Indicates if two reloads purposes are for similar enough things that we 224 can merge their reloads. */ 225 #define MERGABLE_RELOADS(when1, when2, op1, op2) \ 226 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \ 227 || ((when1) == (when2) && (op1) == (op2)) \ 228 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \ 229 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \ 230 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \ 231 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \ 232 && (when2) == RELOAD_FOR_OTHER_ADDRESS)) 233 234 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */ 235 #define MERGE_TO_OTHER(when1, when2, op1, op2) \ 236 ((when1) != (when2) \ 237 || ! ((op1) == (op2) \ 238 || (when1) == RELOAD_FOR_INPUT \ 239 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \ 240 || (when1) == RELOAD_FOR_OTHER_ADDRESS)) 241 242 /* If we are going to reload an address, compute the reload type to 243 use. */ 244 #define ADDR_TYPE(type) \ 245 ((type) == RELOAD_FOR_INPUT_ADDRESS \ 246 ? RELOAD_FOR_INPADDR_ADDRESS \ 247 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \ 248 ? RELOAD_FOR_OUTADDR_ADDRESS \ 249 : (type))) 250 251 static int push_secondary_reload (int, rtx, int, int, enum reg_class, 252 enum machine_mode, enum reload_type, 253 enum insn_code *, secondary_reload_info *); 254 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode, 255 int, unsigned int); 256 static void push_replacement (rtx *, int, enum machine_mode); 257 static void dup_replacements (rtx *, rtx *); 258 static void combine_reloads (void); 259 static int find_reusable_reload (rtx *, rtx, enum reg_class, 260 enum reload_type, int, int); 261 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode, 262 enum machine_mode, reg_class_t, int, int); 263 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx); 264 static struct decomposition decompose (rtx); 265 static int immune_p (rtx, rtx, struct decomposition); 266 static bool alternative_allows_const_pool_ref (rtx, const char *, int); 267 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx, 268 int *); 269 static rtx make_memloc (rtx, int); 270 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx, 271 addr_space_t, rtx *); 272 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *, 273 int, enum reload_type, int, rtx); 274 static rtx subst_reg_equivs (rtx, rtx); 275 static rtx subst_indexed_address (rtx); 276 static void update_auto_inc_notes (rtx, int, int); 277 static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int, 278 enum rtx_code, enum rtx_code, rtx *, 279 int, enum reload_type,int, rtx); 280 static void find_reloads_address_part (rtx, rtx *, enum reg_class, 281 enum machine_mode, int, 282 enum reload_type, int); 283 static rtx find_reloads_subreg_address (rtx, int, enum reload_type, 284 int, rtx, int *); 285 static void copy_replacements_1 (rtx *, rtx *, int); 286 static int find_inc_amount (rtx, rtx); 287 static int refers_to_mem_for_reload_p (rtx); 288 static int refers_to_regno_for_reload_p (unsigned int, unsigned int, 289 rtx, rtx *); 290 291 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the 292 list yet. */ 293 294 static void 295 push_reg_equiv_alt_mem (int regno, rtx mem) 296 { 297 rtx it; 298 299 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1)) 300 if (rtx_equal_p (XEXP (it, 0), mem)) 301 return; 302 303 reg_equiv_alt_mem_list (regno) 304 = alloc_EXPR_LIST (REG_EQUIV, mem, 305 reg_equiv_alt_mem_list (regno)); 306 } 307 308 /* Determine if any secondary reloads are needed for loading (if IN_P is 309 nonzero) or storing (if IN_P is zero) X to or from a reload register of 310 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads 311 are needed, push them. 312 313 Return the reload number of the secondary reload we made, or -1 if 314 we didn't need one. *PICODE is set to the insn_code to use if we do 315 need a secondary reload. */ 316 317 static int 318 push_secondary_reload (int in_p, rtx x, int opnum, int optional, 319 enum reg_class reload_class, 320 enum machine_mode reload_mode, enum reload_type type, 321 enum insn_code *picode, secondary_reload_info *prev_sri) 322 { 323 enum reg_class rclass = NO_REGS; 324 enum reg_class scratch_class; 325 enum machine_mode mode = reload_mode; 326 enum insn_code icode = CODE_FOR_nothing; 327 enum insn_code t_icode = CODE_FOR_nothing; 328 enum reload_type secondary_type; 329 int s_reload, t_reload = -1; 330 const char *scratch_constraint; 331 char letter; 332 secondary_reload_info sri; 333 334 if (type == RELOAD_FOR_INPUT_ADDRESS 335 || type == RELOAD_FOR_OUTPUT_ADDRESS 336 || type == RELOAD_FOR_INPADDR_ADDRESS 337 || type == RELOAD_FOR_OUTADDR_ADDRESS) 338 secondary_type = type; 339 else 340 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS; 341 342 *picode = CODE_FOR_nothing; 343 344 /* If X is a paradoxical SUBREG, use the inner value to determine both the 345 mode and object being reloaded. */ 346 if (paradoxical_subreg_p (x)) 347 { 348 x = SUBREG_REG (x); 349 reload_mode = GET_MODE (x); 350 } 351 352 /* If X is a pseudo-register that has an equivalent MEM (actually, if it 353 is still a pseudo-register by now, it *must* have an equivalent MEM 354 but we don't want to assume that), use that equivalent when seeing if 355 a secondary reload is needed since whether or not a reload is needed 356 might be sensitive to the form of the MEM. */ 357 358 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER 359 && reg_equiv_mem (REGNO (x))) 360 x = reg_equiv_mem (REGNO (x)); 361 362 sri.icode = CODE_FOR_nothing; 363 sri.prev_sri = prev_sri; 364 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class, 365 reload_mode, &sri); 366 icode = (enum insn_code) sri.icode; 367 368 /* If we don't need any secondary registers, done. */ 369 if (rclass == NO_REGS && icode == CODE_FOR_nothing) 370 return -1; 371 372 if (rclass != NO_REGS) 373 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass, 374 reload_mode, type, &t_icode, &sri); 375 376 /* If we will be using an insn, the secondary reload is for a 377 scratch register. */ 378 379 if (icode != CODE_FOR_nothing) 380 { 381 /* If IN_P is nonzero, the reload register will be the output in 382 operand 0. If IN_P is zero, the reload register will be the input 383 in operand 1. Outputs should have an initial "=", which we must 384 skip. */ 385 386 /* ??? It would be useful to be able to handle only two, or more than 387 three, operands, but for now we can only handle the case of having 388 exactly three: output, input and one temp/scratch. */ 389 gcc_assert (insn_data[(int) icode].n_operands == 3); 390 391 /* ??? We currently have no way to represent a reload that needs 392 an icode to reload from an intermediate tertiary reload register. 393 We should probably have a new field in struct reload to tag a 394 chain of scratch operand reloads onto. */ 395 gcc_assert (rclass == NO_REGS); 396 397 scratch_constraint = insn_data[(int) icode].operand[2].constraint; 398 gcc_assert (*scratch_constraint == '='); 399 scratch_constraint++; 400 if (*scratch_constraint == '&') 401 scratch_constraint++; 402 letter = *scratch_constraint; 403 scratch_class = (letter == 'r' ? GENERAL_REGS 404 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter, 405 scratch_constraint)); 406 407 rclass = scratch_class; 408 mode = insn_data[(int) icode].operand[2].mode; 409 } 410 411 /* This case isn't valid, so fail. Reload is allowed to use the same 412 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but 413 in the case of a secondary register, we actually need two different 414 registers for correct code. We fail here to prevent the possibility of 415 silently generating incorrect code later. 416 417 The convention is that secondary input reloads are valid only if the 418 secondary_class is different from class. If you have such a case, you 419 can not use secondary reloads, you must work around the problem some 420 other way. 421 422 Allow this when a reload_in/out pattern is being used. I.e. assume 423 that the generated code handles this case. */ 424 425 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing 426 || t_icode != CODE_FOR_nothing); 427 428 /* See if we can reuse an existing secondary reload. */ 429 for (s_reload = 0; s_reload < n_reloads; s_reload++) 430 if (rld[s_reload].secondary_p 431 && (reg_class_subset_p (rclass, rld[s_reload].rclass) 432 || reg_class_subset_p (rld[s_reload].rclass, rclass)) 433 && ((in_p && rld[s_reload].inmode == mode) 434 || (! in_p && rld[s_reload].outmode == mode)) 435 && ((in_p && rld[s_reload].secondary_in_reload == t_reload) 436 || (! in_p && rld[s_reload].secondary_out_reload == t_reload)) 437 && ((in_p && rld[s_reload].secondary_in_icode == t_icode) 438 || (! in_p && rld[s_reload].secondary_out_icode == t_icode)) 439 && (small_register_class_p (rclass) 440 || targetm.small_register_classes_for_mode_p (VOIDmode)) 441 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed, 442 opnum, rld[s_reload].opnum)) 443 { 444 if (in_p) 445 rld[s_reload].inmode = mode; 446 if (! in_p) 447 rld[s_reload].outmode = mode; 448 449 if (reg_class_subset_p (rclass, rld[s_reload].rclass)) 450 rld[s_reload].rclass = rclass; 451 452 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum); 453 rld[s_reload].optional &= optional; 454 rld[s_reload].secondary_p = 1; 455 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed, 456 opnum, rld[s_reload].opnum)) 457 rld[s_reload].when_needed = RELOAD_OTHER; 458 459 break; 460 } 461 462 if (s_reload == n_reloads) 463 { 464 #ifdef SECONDARY_MEMORY_NEEDED 465 /* If we need a memory location to copy between the two reload regs, 466 set it up now. Note that we do the input case before making 467 the reload and the output case after. This is due to the 468 way reloads are output. */ 469 470 if (in_p && icode == CODE_FOR_nothing 471 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode)) 472 { 473 get_secondary_mem (x, reload_mode, opnum, type); 474 475 /* We may have just added new reloads. Make sure we add 476 the new reload at the end. */ 477 s_reload = n_reloads; 478 } 479 #endif 480 481 /* We need to make a new secondary reload for this register class. */ 482 rld[s_reload].in = rld[s_reload].out = 0; 483 rld[s_reload].rclass = rclass; 484 485 rld[s_reload].inmode = in_p ? mode : VOIDmode; 486 rld[s_reload].outmode = ! in_p ? mode : VOIDmode; 487 rld[s_reload].reg_rtx = 0; 488 rld[s_reload].optional = optional; 489 rld[s_reload].inc = 0; 490 /* Maybe we could combine these, but it seems too tricky. */ 491 rld[s_reload].nocombine = 1; 492 rld[s_reload].in_reg = 0; 493 rld[s_reload].out_reg = 0; 494 rld[s_reload].opnum = opnum; 495 rld[s_reload].when_needed = secondary_type; 496 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1; 497 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1; 498 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing; 499 rld[s_reload].secondary_out_icode 500 = ! in_p ? t_icode : CODE_FOR_nothing; 501 rld[s_reload].secondary_p = 1; 502 503 n_reloads++; 504 505 #ifdef SECONDARY_MEMORY_NEEDED 506 if (! in_p && icode == CODE_FOR_nothing 507 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode)) 508 get_secondary_mem (x, mode, opnum, type); 509 #endif 510 } 511 512 *picode = icode; 513 return s_reload; 514 } 515 516 /* If a secondary reload is needed, return its class. If both an intermediate 517 register and a scratch register is needed, we return the class of the 518 intermediate register. */ 519 reg_class_t 520 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode, 521 rtx x) 522 { 523 enum insn_code icode; 524 secondary_reload_info sri; 525 526 sri.icode = CODE_FOR_nothing; 527 sri.prev_sri = NULL; 528 rclass 529 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri); 530 icode = (enum insn_code) sri.icode; 531 532 /* If there are no secondary reloads at all, we return NO_REGS. 533 If an intermediate register is needed, we return its class. */ 534 if (icode == CODE_FOR_nothing || rclass != NO_REGS) 535 return rclass; 536 537 /* No intermediate register is needed, but we have a special reload 538 pattern, which we assume for now needs a scratch register. */ 539 return scratch_reload_class (icode); 540 } 541 542 /* ICODE is the insn_code of a reload pattern. Check that it has exactly 543 three operands, verify that operand 2 is an output operand, and return 544 its register class. 545 ??? We'd like to be able to handle any pattern with at least 2 operands, 546 for zero or more scratch registers, but that needs more infrastructure. */ 547 enum reg_class 548 scratch_reload_class (enum insn_code icode) 549 { 550 const char *scratch_constraint; 551 char scratch_letter; 552 enum reg_class rclass; 553 554 gcc_assert (insn_data[(int) icode].n_operands == 3); 555 scratch_constraint = insn_data[(int) icode].operand[2].constraint; 556 gcc_assert (*scratch_constraint == '='); 557 scratch_constraint++; 558 if (*scratch_constraint == '&') 559 scratch_constraint++; 560 scratch_letter = *scratch_constraint; 561 if (scratch_letter == 'r') 562 return GENERAL_REGS; 563 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter, 564 scratch_constraint); 565 gcc_assert (rclass != NO_REGS); 566 return rclass; 567 } 568 569 #ifdef SECONDARY_MEMORY_NEEDED 570 571 /* Return a memory location that will be used to copy X in mode MODE. 572 If we haven't already made a location for this mode in this insn, 573 call find_reloads_address on the location being returned. */ 574 575 rtx 576 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode, 577 int opnum, enum reload_type type) 578 { 579 rtx loc; 580 int mem_valid; 581 582 /* By default, if MODE is narrower than a word, widen it to a word. 583 This is required because most machines that require these memory 584 locations do not support short load and stores from all registers 585 (e.g., FP registers). */ 586 587 #ifdef SECONDARY_MEMORY_NEEDED_MODE 588 mode = SECONDARY_MEMORY_NEEDED_MODE (mode); 589 #else 590 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode)) 591 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0); 592 #endif 593 594 /* If we already have made a MEM for this operand in MODE, return it. */ 595 if (secondary_memlocs_elim[(int) mode][opnum] != 0) 596 return secondary_memlocs_elim[(int) mode][opnum]; 597 598 /* If this is the first time we've tried to get a MEM for this mode, 599 allocate a new one. `something_changed' in reload will get set 600 by noticing that the frame size has changed. */ 601 602 if (secondary_memlocs[(int) mode] == 0) 603 { 604 #ifdef SECONDARY_MEMORY_NEEDED_RTX 605 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode); 606 #else 607 secondary_memlocs[(int) mode] 608 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0); 609 #endif 610 } 611 612 /* Get a version of the address doing any eliminations needed. If that 613 didn't give us a new MEM, make a new one if it isn't valid. */ 614 615 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX); 616 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0), 617 MEM_ADDR_SPACE (loc)); 618 619 if (! mem_valid && loc == secondary_memlocs[(int) mode]) 620 loc = copy_rtx (loc); 621 622 /* The only time the call below will do anything is if the stack 623 offset is too large. In that case IND_LEVELS doesn't matter, so we 624 can just pass a zero. Adjust the type to be the address of the 625 corresponding object. If the address was valid, save the eliminated 626 address. If it wasn't valid, we need to make a reload each time, so 627 don't save it. */ 628 629 if (! mem_valid) 630 { 631 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS 632 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS 633 : RELOAD_OTHER); 634 635 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0), 636 opnum, type, 0, 0); 637 } 638 639 secondary_memlocs_elim[(int) mode][opnum] = loc; 640 if (secondary_memlocs_elim_used <= (int)mode) 641 secondary_memlocs_elim_used = (int)mode + 1; 642 return loc; 643 } 644 645 /* Clear any secondary memory locations we've made. */ 646 647 void 648 clear_secondary_mem (void) 649 { 650 memset (secondary_memlocs, 0, sizeof secondary_memlocs); 651 } 652 #endif /* SECONDARY_MEMORY_NEEDED */ 653 654 655 /* Find the largest class which has at least one register valid in 656 mode INNER, and which for every such register, that register number 657 plus N is also valid in OUTER (if in range) and is cheap to move 658 into REGNO. Such a class must exist. */ 659 660 static enum reg_class 661 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED, 662 enum machine_mode inner ATTRIBUTE_UNUSED, int n, 663 unsigned int dest_regno ATTRIBUTE_UNUSED) 664 { 665 int best_cost = -1; 666 int rclass; 667 int regno; 668 enum reg_class best_class = NO_REGS; 669 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno); 670 unsigned int best_size = 0; 671 int cost; 672 673 for (rclass = 1; rclass < N_REG_CLASSES; rclass++) 674 { 675 int bad = 0; 676 int good = 0; 677 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++) 678 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)) 679 { 680 if (HARD_REGNO_MODE_OK (regno, inner)) 681 { 682 good = 1; 683 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n) 684 || ! HARD_REGNO_MODE_OK (regno + n, outer)) 685 bad = 1; 686 } 687 } 688 689 if (bad || !good) 690 continue; 691 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class); 692 693 if ((reg_class_size[rclass] > best_size 694 && (best_cost < 0 || best_cost >= cost)) 695 || best_cost > cost) 696 { 697 best_class = (enum reg_class) rclass; 698 best_size = reg_class_size[rclass]; 699 best_cost = register_move_cost (outer, (enum reg_class) rclass, 700 dest_class); 701 } 702 } 703 704 gcc_assert (best_size != 0); 705 706 return best_class; 707 } 708 709 /* We are trying to reload a subreg of something that is not a register. 710 Find the largest class which contains only registers valid in 711 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in 712 which we would eventually like to obtain the object. */ 713 714 static enum reg_class 715 find_valid_class_1 (enum machine_mode outer ATTRIBUTE_UNUSED, 716 enum machine_mode mode ATTRIBUTE_UNUSED, 717 enum reg_class dest_class ATTRIBUTE_UNUSED) 718 { 719 int best_cost = -1; 720 int rclass; 721 int regno; 722 enum reg_class best_class = NO_REGS; 723 unsigned int best_size = 0; 724 int cost; 725 726 for (rclass = 1; rclass < N_REG_CLASSES; rclass++) 727 { 728 int bad = 0; 729 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++) 730 { 731 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno) 732 && !HARD_REGNO_MODE_OK (regno, mode)) 733 bad = 1; 734 } 735 736 if (bad) 737 continue; 738 739 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class); 740 741 if ((reg_class_size[rclass] > best_size 742 && (best_cost < 0 || best_cost >= cost)) 743 || best_cost > cost) 744 { 745 best_class = (enum reg_class) rclass; 746 best_size = reg_class_size[rclass]; 747 best_cost = register_move_cost (outer, (enum reg_class) rclass, 748 dest_class); 749 } 750 } 751 752 gcc_assert (best_size != 0); 753 754 #ifdef LIMIT_RELOAD_CLASS 755 best_class = LIMIT_RELOAD_CLASS (mode, best_class); 756 #endif 757 return best_class; 758 } 759 760 /* Return the number of a previously made reload that can be combined with 761 a new one, or n_reloads if none of the existing reloads can be used. 762 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to 763 push_reload, they determine the kind of the new reload that we try to 764 combine. P_IN points to the corresponding value of IN, which can be 765 modified by this function. 766 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */ 767 768 static int 769 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass, 770 enum reload_type type, int opnum, int dont_share) 771 { 772 rtx in = *p_in; 773 int i; 774 /* We can't merge two reloads if the output of either one is 775 earlyclobbered. */ 776 777 if (earlyclobber_operand_p (out)) 778 return n_reloads; 779 780 /* We can use an existing reload if the class is right 781 and at least one of IN and OUT is a match 782 and the other is at worst neutral. 783 (A zero compared against anything is neutral.) 784 785 For targets with small register classes, don't use existing reloads 786 unless they are for the same thing since that can cause us to need 787 more reload registers than we otherwise would. */ 788 789 for (i = 0; i < n_reloads; i++) 790 if ((reg_class_subset_p (rclass, rld[i].rclass) 791 || reg_class_subset_p (rld[i].rclass, rclass)) 792 /* If the existing reload has a register, it must fit our class. */ 793 && (rld[i].reg_rtx == 0 794 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], 795 true_regnum (rld[i].reg_rtx))) 796 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share 797 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out))) 798 || (out != 0 && MATCHES (rld[i].out, out) 799 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in)))) 800 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out)) 801 && (small_register_class_p (rclass) 802 || targetm.small_register_classes_for_mode_p (VOIDmode)) 803 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum)) 804 return i; 805 806 /* Reloading a plain reg for input can match a reload to postincrement 807 that reg, since the postincrement's value is the right value. 808 Likewise, it can match a preincrement reload, since we regard 809 the preincrementation as happening before any ref in this insn 810 to that register. */ 811 for (i = 0; i < n_reloads; i++) 812 if ((reg_class_subset_p (rclass, rld[i].rclass) 813 || reg_class_subset_p (rld[i].rclass, rclass)) 814 /* If the existing reload has a register, it must fit our 815 class. */ 816 && (rld[i].reg_rtx == 0 817 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], 818 true_regnum (rld[i].reg_rtx))) 819 && out == 0 && rld[i].out == 0 && rld[i].in != 0 820 && ((REG_P (in) 821 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC 822 && MATCHES (XEXP (rld[i].in, 0), in)) 823 || (REG_P (rld[i].in) 824 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC 825 && MATCHES (XEXP (in, 0), rld[i].in))) 826 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out)) 827 && (small_register_class_p (rclass) 828 || targetm.small_register_classes_for_mode_p (VOIDmode)) 829 && MERGABLE_RELOADS (type, rld[i].when_needed, 830 opnum, rld[i].opnum)) 831 { 832 /* Make sure reload_in ultimately has the increment, 833 not the plain register. */ 834 if (REG_P (in)) 835 *p_in = rld[i].in; 836 return i; 837 } 838 return n_reloads; 839 } 840 841 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG 842 expression. MODE is the mode that X will be used in. OUTPUT is true if 843 the function is invoked for the output part of an enclosing reload. */ 844 845 static bool 846 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output) 847 { 848 rtx inner; 849 int regno; 850 851 /* Only SUBREGs are problematical. */ 852 if (GET_CODE (x) != SUBREG) 853 return false; 854 855 inner = SUBREG_REG (x); 856 857 /* If INNER is a constant or PLUS, then INNER will need reloading. */ 858 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS) 859 return true; 860 861 /* If INNER is not a register, then INNER will not need reloading. */ 862 if (!REG_P (inner)) 863 return false; 864 865 regno = REGNO (inner); 866 867 /* If INNER is not a hard register, then INNER will not need reloading 868 unless it's a mode dependent memory reference. */ 869 if (regno >= FIRST_PSEUDO_REGISTER) 870 return !output 871 && reg_equiv_mem (regno) != 0 872 && mode_dependent_address_p (XEXP (reg_equiv_mem (regno), 0), 873 MEM_ADDR_SPACE (reg_equiv_mem (regno))); 874 875 /* If INNER is not ok for MODE, then INNER will need reloading. */ 876 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode)) 877 return true; 878 879 /* If this is for an output, and the outer part is a word or smaller, 880 INNER is larger than a word and the number of registers in INNER is 881 not the same as the number of words in INNER, then INNER will need 882 reloading (with an in-out reload). */ 883 return (output 884 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD 885 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD 886 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD) 887 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)])); 888 } 889 890 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without 891 requiring an extra reload register. The caller has already found that 892 IN contains some reference to REGNO, so check that we can produce the 893 new value in a single step. E.g. if we have 894 (set (reg r13) (plus (reg r13) (const int 1))), and there is an 895 instruction that adds one to a register, this should succeed. 896 However, if we have something like 897 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999 898 needs to be loaded into a register first, we need a separate reload 899 register. 900 Such PLUS reloads are generated by find_reload_address_part. 901 The out-of-range PLUS expressions are usually introduced in the instruction 902 patterns by register elimination and substituting pseudos without a home 903 by their function-invariant equivalences. */ 904 static int 905 can_reload_into (rtx in, int regno, enum machine_mode mode) 906 { 907 rtx dst, test_insn; 908 int r = 0; 909 struct recog_data save_recog_data; 910 911 /* For matching constraints, we often get notional input reloads where 912 we want to use the original register as the reload register. I.e. 913 technically this is a non-optional input-output reload, but IN is 914 already a valid register, and has been chosen as the reload register. 915 Speed this up, since it trivially works. */ 916 if (REG_P (in)) 917 return 1; 918 919 /* To test MEMs properly, we'd have to take into account all the reloads 920 that are already scheduled, which can become quite complicated. 921 And since we've already handled address reloads for this MEM, it 922 should always succeed anyway. */ 923 if (MEM_P (in)) 924 return 1; 925 926 /* If we can make a simple SET insn that does the job, everything should 927 be fine. */ 928 dst = gen_rtx_REG (mode, regno); 929 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in)); 930 save_recog_data = recog_data; 931 if (recog_memoized (test_insn) >= 0) 932 { 933 extract_insn (test_insn); 934 r = constrain_operands (1); 935 } 936 recog_data = save_recog_data; 937 return r; 938 } 939 940 /* Record one reload that needs to be performed. 941 IN is an rtx saying where the data are to be found before this instruction. 942 OUT says where they must be stored after the instruction. 943 (IN is zero for data not read, and OUT is zero for data not written.) 944 INLOC and OUTLOC point to the places in the instructions where 945 IN and OUT were found. 946 If IN and OUT are both nonzero, it means the same register must be used 947 to reload both IN and OUT. 948 949 RCLASS is a register class required for the reloaded data. 950 INMODE is the machine mode that the instruction requires 951 for the reg that replaces IN and OUTMODE is likewise for OUT. 952 953 If IN is zero, then OUT's location and mode should be passed as 954 INLOC and INMODE. 955 956 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx. 957 958 OPTIONAL nonzero means this reload does not need to be performed: 959 it can be discarded if that is more convenient. 960 961 OPNUM and TYPE say what the purpose of this reload is. 962 963 The return value is the reload-number for this reload. 964 965 If both IN and OUT are nonzero, in some rare cases we might 966 want to make two separate reloads. (Actually we never do this now.) 967 Therefore, the reload-number for OUT is stored in 968 output_reloadnum when we return; the return value applies to IN. 969 Usually (presently always), when IN and OUT are nonzero, 970 the two reload-numbers are equal, but the caller should be careful to 971 distinguish them. */ 972 973 int 974 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, 975 enum reg_class rclass, enum machine_mode inmode, 976 enum machine_mode outmode, int strict_low, int optional, 977 int opnum, enum reload_type type) 978 { 979 int i; 980 int dont_share = 0; 981 int dont_remove_subreg = 0; 982 #ifdef LIMIT_RELOAD_CLASS 983 rtx *in_subreg_loc = 0, *out_subreg_loc = 0; 984 #endif 985 int secondary_in_reload = -1, secondary_out_reload = -1; 986 enum insn_code secondary_in_icode = CODE_FOR_nothing; 987 enum insn_code secondary_out_icode = CODE_FOR_nothing; 988 enum reg_class subreg_in_class ATTRIBUTE_UNUSED; 989 subreg_in_class = NO_REGS; 990 991 /* INMODE and/or OUTMODE could be VOIDmode if no mode 992 has been specified for the operand. In that case, 993 use the operand's mode as the mode to reload. */ 994 if (inmode == VOIDmode && in != 0) 995 inmode = GET_MODE (in); 996 if (outmode == VOIDmode && out != 0) 997 outmode = GET_MODE (out); 998 999 /* If find_reloads and friends until now missed to replace a pseudo 1000 with a constant of reg_equiv_constant something went wrong 1001 beforehand. 1002 Note that it can't simply be done here if we missed it earlier 1003 since the constant might need to be pushed into the literal pool 1004 and the resulting memref would probably need further 1005 reloading. */ 1006 if (in != 0 && REG_P (in)) 1007 { 1008 int regno = REGNO (in); 1009 1010 gcc_assert (regno < FIRST_PSEUDO_REGISTER 1011 || reg_renumber[regno] >= 0 1012 || reg_equiv_constant (regno) == NULL_RTX); 1013 } 1014 1015 /* reg_equiv_constant only contains constants which are obviously 1016 not appropriate as destination. So if we would need to replace 1017 the destination pseudo with a constant we are in real 1018 trouble. */ 1019 if (out != 0 && REG_P (out)) 1020 { 1021 int regno = REGNO (out); 1022 1023 gcc_assert (regno < FIRST_PSEUDO_REGISTER 1024 || reg_renumber[regno] >= 0 1025 || reg_equiv_constant (regno) == NULL_RTX); 1026 } 1027 1028 /* If we have a read-write operand with an address side-effect, 1029 change either IN or OUT so the side-effect happens only once. */ 1030 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out)) 1031 switch (GET_CODE (XEXP (in, 0))) 1032 { 1033 case POST_INC: case POST_DEC: case POST_MODIFY: 1034 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0)); 1035 break; 1036 1037 case PRE_INC: case PRE_DEC: case PRE_MODIFY: 1038 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0)); 1039 break; 1040 1041 default: 1042 break; 1043 } 1044 1045 /* If we are reloading a (SUBREG constant ...), really reload just the 1046 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)). 1047 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still 1048 a pseudo and hence will become a MEM) with M1 wider than M2 and the 1049 register is a pseudo, also reload the inside expression. 1050 For machines that extend byte loads, do this for any SUBREG of a pseudo 1051 where both M1 and M2 are a word or smaller, M1 is wider than M2, and 1052 M2 is an integral mode that gets extended when loaded. 1053 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R 1054 where either M1 is not valid for R or M2 is wider than a word but we 1055 only need one register to store an M2-sized quantity in R. 1056 (However, if OUT is nonzero, we need to reload the reg *and* 1057 the subreg, so do nothing here, and let following statement handle it.) 1058 1059 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere; 1060 we can't handle it here because CONST_INT does not indicate a mode. 1061 1062 Similarly, we must reload the inside expression if we have a 1063 STRICT_LOW_PART (presumably, in == out in this case). 1064 1065 Also reload the inner expression if it does not require a secondary 1066 reload but the SUBREG does. 1067 1068 Finally, reload the inner expression if it is a register that is in 1069 the class whose registers cannot be referenced in a different size 1070 and M1 is not the same size as M2. If subreg_lowpart_p is false, we 1071 cannot reload just the inside since we might end up with the wrong 1072 register class. But if it is inside a STRICT_LOW_PART, we have 1073 no choice, so we hope we do get the right register class there. */ 1074 1075 if (in != 0 && GET_CODE (in) == SUBREG 1076 && (subreg_lowpart_p (in) || strict_low) 1077 #ifdef CANNOT_CHANGE_MODE_CLASS 1078 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass) 1079 #endif 1080 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))] 1081 && (CONSTANT_P (SUBREG_REG (in)) 1082 || GET_CODE (SUBREG_REG (in)) == PLUS 1083 || strict_low 1084 || (((REG_P (SUBREG_REG (in)) 1085 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER) 1086 || MEM_P (SUBREG_REG (in))) 1087 && ((GET_MODE_PRECISION (inmode) 1088 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in)))) 1089 #ifdef LOAD_EXTEND_OP 1090 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD 1091 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) 1092 <= UNITS_PER_WORD) 1093 && (GET_MODE_PRECISION (inmode) 1094 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in)))) 1095 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in))) 1096 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN) 1097 #endif 1098 #ifdef WORD_REGISTER_OPERATIONS 1099 || ((GET_MODE_PRECISION (inmode) 1100 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in)))) 1101 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD == 1102 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1) 1103 / UNITS_PER_WORD))) 1104 #endif 1105 )) 1106 || (REG_P (SUBREG_REG (in)) 1107 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER 1108 /* The case where out is nonzero 1109 is handled differently in the following statement. */ 1110 && (out == 0 || subreg_lowpart_p (in)) 1111 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD 1112 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) 1113 > UNITS_PER_WORD) 1114 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) 1115 / UNITS_PER_WORD) 1116 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))] 1117 [GET_MODE (SUBREG_REG (in))])) 1118 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode))) 1119 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS 1120 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)), 1121 SUBREG_REG (in)) 1122 == NO_REGS)) 1123 #ifdef CANNOT_CHANGE_MODE_CLASS 1124 || (REG_P (SUBREG_REG (in)) 1125 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER 1126 && REG_CANNOT_CHANGE_MODE_P 1127 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode)) 1128 #endif 1129 )) 1130 { 1131 #ifdef LIMIT_RELOAD_CLASS 1132 in_subreg_loc = inloc; 1133 #endif 1134 inloc = &SUBREG_REG (in); 1135 in = *inloc; 1136 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS) 1137 if (MEM_P (in)) 1138 /* This is supposed to happen only for paradoxical subregs made by 1139 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */ 1140 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode)); 1141 #endif 1142 inmode = GET_MODE (in); 1143 } 1144 1145 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R 1146 where M1 is not valid for R if it was not handled by the code above. 1147 1148 Similar issue for (SUBREG constant ...) if it was not handled by the 1149 code above. This can happen if SUBREG_BYTE != 0. 1150 1151 However, we must reload the inner reg *as well as* the subreg in 1152 that case. */ 1153 1154 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false)) 1155 { 1156 if (REG_P (SUBREG_REG (in)) && HARD_REGISTER_P (SUBREG_REG (in))) 1157 subreg_in_class 1158 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)), 1159 subreg_regno_offset (REGNO (SUBREG_REG (in)), 1160 GET_MODE (SUBREG_REG (in)), 1161 SUBREG_BYTE (in), 1162 GET_MODE (in)), 1163 REGNO (SUBREG_REG (in))); 1164 else if (REG_P (SUBREG_REG (in)) 1165 || GET_CODE (SUBREG_REG (in)) == SYMBOL_REF) 1166 subreg_in_class = find_valid_class_1 (inmode, 1167 GET_MODE (SUBREG_REG (in)), 1168 rclass); 1169 1170 /* This relies on the fact that emit_reload_insns outputs the 1171 instructions for input reloads of type RELOAD_OTHER in the same 1172 order as the reloads. Thus if the outer reload is also of type 1173 RELOAD_OTHER, we are guaranteed that this inner reload will be 1174 output before the outer reload. */ 1175 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0, 1176 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type); 1177 dont_remove_subreg = 1; 1178 } 1179 1180 /* Similarly for paradoxical and problematical SUBREGs on the output. 1181 Note that there is no reason we need worry about the previous value 1182 of SUBREG_REG (out); even if wider than out, storing in a subreg is 1183 entitled to clobber it all (except in the case of a word mode subreg 1184 or of a STRICT_LOW_PART, in that latter case the constraint should 1185 label it input-output.) */ 1186 if (out != 0 && GET_CODE (out) == SUBREG 1187 && (subreg_lowpart_p (out) || strict_low) 1188 #ifdef CANNOT_CHANGE_MODE_CLASS 1189 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass) 1190 #endif 1191 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))] 1192 && (CONSTANT_P (SUBREG_REG (out)) 1193 || strict_low 1194 || (((REG_P (SUBREG_REG (out)) 1195 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER) 1196 || MEM_P (SUBREG_REG (out))) 1197 && ((GET_MODE_PRECISION (outmode) 1198 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out)))) 1199 #ifdef WORD_REGISTER_OPERATIONS 1200 || ((GET_MODE_PRECISION (outmode) 1201 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out)))) 1202 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD == 1203 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1) 1204 / UNITS_PER_WORD))) 1205 #endif 1206 )) 1207 || (REG_P (SUBREG_REG (out)) 1208 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER 1209 /* The case of a word mode subreg 1210 is handled differently in the following statement. */ 1211 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD 1212 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) 1213 > UNITS_PER_WORD)) 1214 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)) 1215 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS 1216 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)), 1217 SUBREG_REG (out)) 1218 == NO_REGS)) 1219 #ifdef CANNOT_CHANGE_MODE_CLASS 1220 || (REG_P (SUBREG_REG (out)) 1221 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER 1222 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)), 1223 GET_MODE (SUBREG_REG (out)), 1224 outmode)) 1225 #endif 1226 )) 1227 { 1228 #ifdef LIMIT_RELOAD_CLASS 1229 out_subreg_loc = outloc; 1230 #endif 1231 outloc = &SUBREG_REG (out); 1232 out = *outloc; 1233 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS) 1234 gcc_assert (!MEM_P (out) 1235 || GET_MODE_SIZE (GET_MODE (out)) 1236 <= GET_MODE_SIZE (outmode)); 1237 #endif 1238 outmode = GET_MODE (out); 1239 } 1240 1241 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R 1242 where either M1 is not valid for R or M2 is wider than a word but we 1243 only need one register to store an M2-sized quantity in R. 1244 1245 However, we must reload the inner reg *as well as* the subreg in 1246 that case and the inner reg is an in-out reload. */ 1247 1248 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true)) 1249 { 1250 enum reg_class in_out_class 1251 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)), 1252 subreg_regno_offset (REGNO (SUBREG_REG (out)), 1253 GET_MODE (SUBREG_REG (out)), 1254 SUBREG_BYTE (out), 1255 GET_MODE (out)), 1256 REGNO (SUBREG_REG (out))); 1257 1258 /* This relies on the fact that emit_reload_insns outputs the 1259 instructions for output reloads of type RELOAD_OTHER in reverse 1260 order of the reloads. Thus if the outer reload is also of type 1261 RELOAD_OTHER, we are guaranteed that this inner reload will be 1262 output after the outer reload. */ 1263 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out), 1264 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode, 1265 0, 0, opnum, RELOAD_OTHER); 1266 dont_remove_subreg = 1; 1267 } 1268 1269 /* If IN appears in OUT, we can't share any input-only reload for IN. */ 1270 if (in != 0 && out != 0 && MEM_P (out) 1271 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS) 1272 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0))) 1273 dont_share = 1; 1274 1275 /* If IN is a SUBREG of a hard register, make a new REG. This 1276 simplifies some of the cases below. */ 1277 1278 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in)) 1279 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER 1280 && ! dont_remove_subreg) 1281 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in)); 1282 1283 /* Similarly for OUT. */ 1284 if (out != 0 && GET_CODE (out) == SUBREG 1285 && REG_P (SUBREG_REG (out)) 1286 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER 1287 && ! dont_remove_subreg) 1288 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out)); 1289 1290 /* Narrow down the class of register wanted if that is 1291 desirable on this machine for efficiency. */ 1292 { 1293 reg_class_t preferred_class = rclass; 1294 1295 if (in != 0) 1296 preferred_class = targetm.preferred_reload_class (in, rclass); 1297 1298 /* Output reloads may need analogous treatment, different in detail. */ 1299 if (out != 0) 1300 preferred_class 1301 = targetm.preferred_output_reload_class (out, preferred_class); 1302 1303 /* Discard what the target said if we cannot do it. */ 1304 if (preferred_class != NO_REGS 1305 || (optional && type == RELOAD_FOR_OUTPUT)) 1306 rclass = (enum reg_class) preferred_class; 1307 } 1308 1309 /* Make sure we use a class that can handle the actual pseudo 1310 inside any subreg. For example, on the 386, QImode regs 1311 can appear within SImode subregs. Although GENERAL_REGS 1312 can handle SImode, QImode needs a smaller class. */ 1313 #ifdef LIMIT_RELOAD_CLASS 1314 if (in_subreg_loc) 1315 rclass = LIMIT_RELOAD_CLASS (inmode, rclass); 1316 else if (in != 0 && GET_CODE (in) == SUBREG) 1317 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass); 1318 1319 if (out_subreg_loc) 1320 rclass = LIMIT_RELOAD_CLASS (outmode, rclass); 1321 if (out != 0 && GET_CODE (out) == SUBREG) 1322 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass); 1323 #endif 1324 1325 /* Verify that this class is at least possible for the mode that 1326 is specified. */ 1327 if (this_insn_is_asm) 1328 { 1329 enum machine_mode mode; 1330 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode)) 1331 mode = inmode; 1332 else 1333 mode = outmode; 1334 if (mode == VOIDmode) 1335 { 1336 error_for_asm (this_insn, "cannot reload integer constant " 1337 "operand in %<asm%>"); 1338 mode = word_mode; 1339 if (in != 0) 1340 inmode = word_mode; 1341 if (out != 0) 1342 outmode = word_mode; 1343 } 1344 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1345 if (HARD_REGNO_MODE_OK (i, mode) 1346 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i)) 1347 break; 1348 if (i == FIRST_PSEUDO_REGISTER) 1349 { 1350 error_for_asm (this_insn, "impossible register constraint " 1351 "in %<asm%>"); 1352 /* Avoid further trouble with this insn. */ 1353 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx); 1354 /* We used to continue here setting class to ALL_REGS, but it triggers 1355 sanity check on i386 for: 1356 void foo(long double d) 1357 { 1358 asm("" :: "a" (d)); 1359 } 1360 Returning zero here ought to be safe as we take care in 1361 find_reloads to not process the reloads when instruction was 1362 replaced by USE. */ 1363 1364 return 0; 1365 } 1366 } 1367 1368 /* Optional output reloads are always OK even if we have no register class, 1369 since the function of these reloads is only to have spill_reg_store etc. 1370 set, so that the storing insn can be deleted later. */ 1371 gcc_assert (rclass != NO_REGS 1372 || (optional != 0 && type == RELOAD_FOR_OUTPUT)); 1373 1374 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share); 1375 1376 if (i == n_reloads) 1377 { 1378 /* See if we need a secondary reload register to move between CLASS 1379 and IN or CLASS and OUT. Get the icode and push any required reloads 1380 needed for each of them if so. */ 1381 1382 if (in != 0) 1383 secondary_in_reload 1384 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type, 1385 &secondary_in_icode, NULL); 1386 if (out != 0 && GET_CODE (out) != SCRATCH) 1387 secondary_out_reload 1388 = push_secondary_reload (0, out, opnum, optional, rclass, outmode, 1389 type, &secondary_out_icode, NULL); 1390 1391 /* We found no existing reload suitable for re-use. 1392 So add an additional reload. */ 1393 1394 #ifdef SECONDARY_MEMORY_NEEDED 1395 if (subreg_in_class == NO_REGS 1396 && in != 0 1397 && (REG_P (in) 1398 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in)))) 1399 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER) 1400 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in)); 1401 /* If a memory location is needed for the copy, make one. */ 1402 if (subreg_in_class != NO_REGS 1403 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode)) 1404 get_secondary_mem (in, inmode, opnum, type); 1405 #endif 1406 1407 i = n_reloads; 1408 rld[i].in = in; 1409 rld[i].out = out; 1410 rld[i].rclass = rclass; 1411 rld[i].inmode = inmode; 1412 rld[i].outmode = outmode; 1413 rld[i].reg_rtx = 0; 1414 rld[i].optional = optional; 1415 rld[i].inc = 0; 1416 rld[i].nocombine = 0; 1417 rld[i].in_reg = inloc ? *inloc : 0; 1418 rld[i].out_reg = outloc ? *outloc : 0; 1419 rld[i].opnum = opnum; 1420 rld[i].when_needed = type; 1421 rld[i].secondary_in_reload = secondary_in_reload; 1422 rld[i].secondary_out_reload = secondary_out_reload; 1423 rld[i].secondary_in_icode = secondary_in_icode; 1424 rld[i].secondary_out_icode = secondary_out_icode; 1425 rld[i].secondary_p = 0; 1426 1427 n_reloads++; 1428 1429 #ifdef SECONDARY_MEMORY_NEEDED 1430 if (out != 0 1431 && (REG_P (out) 1432 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out)))) 1433 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER 1434 && SECONDARY_MEMORY_NEEDED (rclass, 1435 REGNO_REG_CLASS (reg_or_subregno (out)), 1436 outmode)) 1437 get_secondary_mem (out, outmode, opnum, type); 1438 #endif 1439 } 1440 else 1441 { 1442 /* We are reusing an existing reload, 1443 but we may have additional information for it. 1444 For example, we may now have both IN and OUT 1445 while the old one may have just one of them. */ 1446 1447 /* The modes can be different. If they are, we want to reload in 1448 the larger mode, so that the value is valid for both modes. */ 1449 if (inmode != VOIDmode 1450 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode)) 1451 rld[i].inmode = inmode; 1452 if (outmode != VOIDmode 1453 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode)) 1454 rld[i].outmode = outmode; 1455 if (in != 0) 1456 { 1457 rtx in_reg = inloc ? *inloc : 0; 1458 /* If we merge reloads for two distinct rtl expressions that 1459 are identical in content, there might be duplicate address 1460 reloads. Remove the extra set now, so that if we later find 1461 that we can inherit this reload, we can get rid of the 1462 address reloads altogether. 1463 1464 Do not do this if both reloads are optional since the result 1465 would be an optional reload which could potentially leave 1466 unresolved address replacements. 1467 1468 It is not sufficient to call transfer_replacements since 1469 choose_reload_regs will remove the replacements for address 1470 reloads of inherited reloads which results in the same 1471 problem. */ 1472 if (rld[i].in != in && rtx_equal_p (in, rld[i].in) 1473 && ! (rld[i].optional && optional)) 1474 { 1475 /* We must keep the address reload with the lower operand 1476 number alive. */ 1477 if (opnum > rld[i].opnum) 1478 { 1479 remove_address_replacements (in); 1480 in = rld[i].in; 1481 in_reg = rld[i].in_reg; 1482 } 1483 else 1484 remove_address_replacements (rld[i].in); 1485 } 1486 /* When emitting reloads we don't necessarily look at the in- 1487 and outmode, but also directly at the operands (in and out). 1488 So we can't simply overwrite them with whatever we have found 1489 for this (to-be-merged) reload, we have to "merge" that too. 1490 Reusing another reload already verified that we deal with the 1491 same operands, just possibly in different modes. So we 1492 overwrite the operands only when the new mode is larger. 1493 See also PR33613. */ 1494 if (!rld[i].in 1495 || GET_MODE_SIZE (GET_MODE (in)) 1496 > GET_MODE_SIZE (GET_MODE (rld[i].in))) 1497 rld[i].in = in; 1498 if (!rld[i].in_reg 1499 || (in_reg 1500 && GET_MODE_SIZE (GET_MODE (in_reg)) 1501 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg)))) 1502 rld[i].in_reg = in_reg; 1503 } 1504 if (out != 0) 1505 { 1506 if (!rld[i].out 1507 || (out 1508 && GET_MODE_SIZE (GET_MODE (out)) 1509 > GET_MODE_SIZE (GET_MODE (rld[i].out)))) 1510 rld[i].out = out; 1511 if (outloc 1512 && (!rld[i].out_reg 1513 || GET_MODE_SIZE (GET_MODE (*outloc)) 1514 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg)))) 1515 rld[i].out_reg = *outloc; 1516 } 1517 if (reg_class_subset_p (rclass, rld[i].rclass)) 1518 rld[i].rclass = rclass; 1519 rld[i].optional &= optional; 1520 if (MERGE_TO_OTHER (type, rld[i].when_needed, 1521 opnum, rld[i].opnum)) 1522 rld[i].when_needed = RELOAD_OTHER; 1523 rld[i].opnum = MIN (rld[i].opnum, opnum); 1524 } 1525 1526 /* If the ostensible rtx being reloaded differs from the rtx found 1527 in the location to substitute, this reload is not safe to combine 1528 because we cannot reliably tell whether it appears in the insn. */ 1529 1530 if (in != 0 && in != *inloc) 1531 rld[i].nocombine = 1; 1532 1533 #if 0 1534 /* This was replaced by changes in find_reloads_address_1 and the new 1535 function inc_for_reload, which go with a new meaning of reload_inc. */ 1536 1537 /* If this is an IN/OUT reload in an insn that sets the CC, 1538 it must be for an autoincrement. It doesn't work to store 1539 the incremented value after the insn because that would clobber the CC. 1540 So we must do the increment of the value reloaded from, 1541 increment it, store it back, then decrement again. */ 1542 if (out != 0 && sets_cc0_p (PATTERN (this_insn))) 1543 { 1544 out = 0; 1545 rld[i].out = 0; 1546 rld[i].inc = find_inc_amount (PATTERN (this_insn), in); 1547 /* If we did not find a nonzero amount-to-increment-by, 1548 that contradicts the belief that IN is being incremented 1549 in an address in this insn. */ 1550 gcc_assert (rld[i].inc != 0); 1551 } 1552 #endif 1553 1554 /* If we will replace IN and OUT with the reload-reg, 1555 record where they are located so that substitution need 1556 not do a tree walk. */ 1557 1558 if (replace_reloads) 1559 { 1560 if (inloc != 0) 1561 { 1562 struct replacement *r = &replacements[n_replacements++]; 1563 r->what = i; 1564 r->where = inloc; 1565 r->mode = inmode; 1566 } 1567 if (outloc != 0 && outloc != inloc) 1568 { 1569 struct replacement *r = &replacements[n_replacements++]; 1570 r->what = i; 1571 r->where = outloc; 1572 r->mode = outmode; 1573 } 1574 } 1575 1576 /* If this reload is just being introduced and it has both 1577 an incoming quantity and an outgoing quantity that are 1578 supposed to be made to match, see if either one of the two 1579 can serve as the place to reload into. 1580 1581 If one of them is acceptable, set rld[i].reg_rtx 1582 to that one. */ 1583 1584 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0) 1585 { 1586 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc, 1587 inmode, outmode, 1588 rld[i].rclass, i, 1589 earlyclobber_operand_p (out)); 1590 1591 /* If the outgoing register already contains the same value 1592 as the incoming one, we can dispense with loading it. 1593 The easiest way to tell the caller that is to give a phony 1594 value for the incoming operand (same as outgoing one). */ 1595 if (rld[i].reg_rtx == out 1596 && (REG_P (in) || CONSTANT_P (in)) 1597 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out), 1598 static_reload_reg_p, i, inmode)) 1599 rld[i].in = out; 1600 } 1601 1602 /* If this is an input reload and the operand contains a register that 1603 dies in this insn and is used nowhere else, see if it is the right class 1604 to be used for this reload. Use it if so. (This occurs most commonly 1605 in the case of paradoxical SUBREGs and in-out reloads). We cannot do 1606 this if it is also an output reload that mentions the register unless 1607 the output is a SUBREG that clobbers an entire register. 1608 1609 Note that the operand might be one of the spill regs, if it is a 1610 pseudo reg and we are in a block where spilling has not taken place. 1611 But if there is no spilling in this block, that is OK. 1612 An explicitly used hard reg cannot be a spill reg. */ 1613 1614 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known) 1615 { 1616 rtx note; 1617 int regno; 1618 enum machine_mode rel_mode = inmode; 1619 1620 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode)) 1621 rel_mode = outmode; 1622 1623 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1)) 1624 if (REG_NOTE_KIND (note) == REG_DEAD 1625 && REG_P (XEXP (note, 0)) 1626 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER 1627 && reg_mentioned_p (XEXP (note, 0), in) 1628 /* Check that a former pseudo is valid; see find_dummy_reload. */ 1629 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER 1630 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR), 1631 ORIGINAL_REGNO (XEXP (note, 0))) 1632 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)) 1633 && ! refers_to_regno_for_reload_p (regno, 1634 end_hard_regno (rel_mode, 1635 regno), 1636 PATTERN (this_insn), inloc) 1637 /* If this is also an output reload, IN cannot be used as 1638 the reload register if it is set in this insn unless IN 1639 is also OUT. */ 1640 && (out == 0 || in == out 1641 || ! hard_reg_set_here_p (regno, 1642 end_hard_regno (rel_mode, regno), 1643 PATTERN (this_insn))) 1644 /* ??? Why is this code so different from the previous? 1645 Is there any simple coherent way to describe the two together? 1646 What's going on here. */ 1647 && (in != out 1648 || (GET_CODE (in) == SUBREG 1649 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1)) 1650 / UNITS_PER_WORD) 1651 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) 1652 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))) 1653 /* Make sure the operand fits in the reg that dies. */ 1654 && (GET_MODE_SIZE (rel_mode) 1655 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))) 1656 && HARD_REGNO_MODE_OK (regno, inmode) 1657 && HARD_REGNO_MODE_OK (regno, outmode)) 1658 { 1659 unsigned int offs; 1660 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode], 1661 hard_regno_nregs[regno][outmode]); 1662 1663 for (offs = 0; offs < nregs; offs++) 1664 if (fixed_regs[regno + offs] 1665 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], 1666 regno + offs)) 1667 break; 1668 1669 if (offs == nregs 1670 && (! (refers_to_regno_for_reload_p 1671 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0)) 1672 || can_reload_into (in, regno, inmode))) 1673 { 1674 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno); 1675 break; 1676 } 1677 } 1678 } 1679 1680 if (out) 1681 output_reloadnum = i; 1682 1683 return i; 1684 } 1685 1686 /* Record an additional place we must replace a value 1687 for which we have already recorded a reload. 1688 RELOADNUM is the value returned by push_reload 1689 when the reload was recorded. 1690 This is used in insn patterns that use match_dup. */ 1691 1692 static void 1693 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode) 1694 { 1695 if (replace_reloads) 1696 { 1697 struct replacement *r = &replacements[n_replacements++]; 1698 r->what = reloadnum; 1699 r->where = loc; 1700 r->mode = mode; 1701 } 1702 } 1703 1704 /* Duplicate any replacement we have recorded to apply at 1705 location ORIG_LOC to also be performed at DUP_LOC. 1706 This is used in insn patterns that use match_dup. */ 1707 1708 static void 1709 dup_replacements (rtx *dup_loc, rtx *orig_loc) 1710 { 1711 int i, n = n_replacements; 1712 1713 for (i = 0; i < n; i++) 1714 { 1715 struct replacement *r = &replacements[i]; 1716 if (r->where == orig_loc) 1717 push_replacement (dup_loc, r->what, r->mode); 1718 } 1719 } 1720 1721 /* Transfer all replacements that used to be in reload FROM to be in 1722 reload TO. */ 1723 1724 void 1725 transfer_replacements (int to, int from) 1726 { 1727 int i; 1728 1729 for (i = 0; i < n_replacements; i++) 1730 if (replacements[i].what == from) 1731 replacements[i].what = to; 1732 } 1733 1734 /* IN_RTX is the value loaded by a reload that we now decided to inherit, 1735 or a subpart of it. If we have any replacements registered for IN_RTX, 1736 cancel the reloads that were supposed to load them. 1737 Return nonzero if we canceled any reloads. */ 1738 int 1739 remove_address_replacements (rtx in_rtx) 1740 { 1741 int i, j; 1742 char reload_flags[MAX_RELOADS]; 1743 int something_changed = 0; 1744 1745 memset (reload_flags, 0, sizeof reload_flags); 1746 for (i = 0, j = 0; i < n_replacements; i++) 1747 { 1748 if (loc_mentioned_in_p (replacements[i].where, in_rtx)) 1749 reload_flags[replacements[i].what] |= 1; 1750 else 1751 { 1752 replacements[j++] = replacements[i]; 1753 reload_flags[replacements[i].what] |= 2; 1754 } 1755 } 1756 /* Note that the following store must be done before the recursive calls. */ 1757 n_replacements = j; 1758 1759 for (i = n_reloads - 1; i >= 0; i--) 1760 { 1761 if (reload_flags[i] == 1) 1762 { 1763 deallocate_reload_reg (i); 1764 remove_address_replacements (rld[i].in); 1765 rld[i].in = 0; 1766 something_changed = 1; 1767 } 1768 } 1769 return something_changed; 1770 } 1771 1772 /* If there is only one output reload, and it is not for an earlyclobber 1773 operand, try to combine it with a (logically unrelated) input reload 1774 to reduce the number of reload registers needed. 1775 1776 This is safe if the input reload does not appear in 1777 the value being output-reloaded, because this implies 1778 it is not needed any more once the original insn completes. 1779 1780 If that doesn't work, see we can use any of the registers that 1781 die in this insn as a reload register. We can if it is of the right 1782 class and does not appear in the value being output-reloaded. */ 1783 1784 static void 1785 combine_reloads (void) 1786 { 1787 int i, regno; 1788 int output_reload = -1; 1789 int secondary_out = -1; 1790 rtx note; 1791 1792 /* Find the output reload; return unless there is exactly one 1793 and that one is mandatory. */ 1794 1795 for (i = 0; i < n_reloads; i++) 1796 if (rld[i].out != 0) 1797 { 1798 if (output_reload >= 0) 1799 return; 1800 output_reload = i; 1801 } 1802 1803 if (output_reload < 0 || rld[output_reload].optional) 1804 return; 1805 1806 /* An input-output reload isn't combinable. */ 1807 1808 if (rld[output_reload].in != 0) 1809 return; 1810 1811 /* If this reload is for an earlyclobber operand, we can't do anything. */ 1812 if (earlyclobber_operand_p (rld[output_reload].out)) 1813 return; 1814 1815 /* If there is a reload for part of the address of this operand, we would 1816 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend 1817 its life to the point where doing this combine would not lower the 1818 number of spill registers needed. */ 1819 for (i = 0; i < n_reloads; i++) 1820 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS 1821 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) 1822 && rld[i].opnum == rld[output_reload].opnum) 1823 return; 1824 1825 /* Check each input reload; can we combine it? */ 1826 1827 for (i = 0; i < n_reloads; i++) 1828 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine 1829 /* Life span of this reload must not extend past main insn. */ 1830 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS 1831 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS 1832 && rld[i].when_needed != RELOAD_OTHER 1833 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode] 1834 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass] 1835 [(int) rld[output_reload].outmode]) 1836 && rld[i].inc == 0 1837 && rld[i].reg_rtx == 0 1838 #ifdef SECONDARY_MEMORY_NEEDED 1839 /* Don't combine two reloads with different secondary 1840 memory locations. */ 1841 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0 1842 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0 1843 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum], 1844 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum])) 1845 #endif 1846 && (targetm.small_register_classes_for_mode_p (VOIDmode) 1847 ? (rld[i].rclass == rld[output_reload].rclass) 1848 : (reg_class_subset_p (rld[i].rclass, 1849 rld[output_reload].rclass) 1850 || reg_class_subset_p (rld[output_reload].rclass, 1851 rld[i].rclass))) 1852 && (MATCHES (rld[i].in, rld[output_reload].out) 1853 /* Args reversed because the first arg seems to be 1854 the one that we imagine being modified 1855 while the second is the one that might be affected. */ 1856 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out, 1857 rld[i].in) 1858 /* However, if the input is a register that appears inside 1859 the output, then we also can't share. 1860 Imagine (set (mem (reg 69)) (plus (reg 69) ...)). 1861 If the same reload reg is used for both reg 69 and the 1862 result to be stored in memory, then that result 1863 will clobber the address of the memory ref. */ 1864 && ! (REG_P (rld[i].in) 1865 && reg_overlap_mentioned_for_reload_p (rld[i].in, 1866 rld[output_reload].out)))) 1867 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode, 1868 rld[i].when_needed != RELOAD_FOR_INPUT) 1869 && (reg_class_size[(int) rld[i].rclass] 1870 || targetm.small_register_classes_for_mode_p (VOIDmode)) 1871 /* We will allow making things slightly worse by combining an 1872 input and an output, but no worse than that. */ 1873 && (rld[i].when_needed == RELOAD_FOR_INPUT 1874 || rld[i].when_needed == RELOAD_FOR_OUTPUT)) 1875 { 1876 int j; 1877 1878 /* We have found a reload to combine with! */ 1879 rld[i].out = rld[output_reload].out; 1880 rld[i].out_reg = rld[output_reload].out_reg; 1881 rld[i].outmode = rld[output_reload].outmode; 1882 /* Mark the old output reload as inoperative. */ 1883 rld[output_reload].out = 0; 1884 /* The combined reload is needed for the entire insn. */ 1885 rld[i].when_needed = RELOAD_OTHER; 1886 /* If the output reload had a secondary reload, copy it. */ 1887 if (rld[output_reload].secondary_out_reload != -1) 1888 { 1889 rld[i].secondary_out_reload 1890 = rld[output_reload].secondary_out_reload; 1891 rld[i].secondary_out_icode 1892 = rld[output_reload].secondary_out_icode; 1893 } 1894 1895 #ifdef SECONDARY_MEMORY_NEEDED 1896 /* Copy any secondary MEM. */ 1897 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0) 1898 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] 1899 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]; 1900 #endif 1901 /* If required, minimize the register class. */ 1902 if (reg_class_subset_p (rld[output_reload].rclass, 1903 rld[i].rclass)) 1904 rld[i].rclass = rld[output_reload].rclass; 1905 1906 /* Transfer all replacements from the old reload to the combined. */ 1907 for (j = 0; j < n_replacements; j++) 1908 if (replacements[j].what == output_reload) 1909 replacements[j].what = i; 1910 1911 return; 1912 } 1913 1914 /* If this insn has only one operand that is modified or written (assumed 1915 to be the first), it must be the one corresponding to this reload. It 1916 is safe to use anything that dies in this insn for that output provided 1917 that it does not occur in the output (we already know it isn't an 1918 earlyclobber. If this is an asm insn, give up. */ 1919 1920 if (INSN_CODE (this_insn) == -1) 1921 return; 1922 1923 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++) 1924 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '=' 1925 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+') 1926 return; 1927 1928 /* See if some hard register that dies in this insn and is not used in 1929 the output is the right class. Only works if the register we pick 1930 up can fully hold our output reload. */ 1931 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1)) 1932 if (REG_NOTE_KIND (note) == REG_DEAD 1933 && REG_P (XEXP (note, 0)) 1934 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0), 1935 rld[output_reload].out) 1936 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER 1937 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode) 1938 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass], 1939 regno) 1940 && (hard_regno_nregs[regno][rld[output_reload].outmode] 1941 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))]) 1942 /* Ensure that a secondary or tertiary reload for this output 1943 won't want this register. */ 1944 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1 1945 || (!(TEST_HARD_REG_BIT 1946 (reg_class_contents[(int) rld[secondary_out].rclass], regno)) 1947 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1 1948 || !(TEST_HARD_REG_BIT 1949 (reg_class_contents[(int) rld[secondary_out].rclass], 1950 regno))))) 1951 && !fixed_regs[regno] 1952 /* Check that a former pseudo is valid; see find_dummy_reload. */ 1953 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER 1954 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR), 1955 ORIGINAL_REGNO (XEXP (note, 0))) 1956 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))) 1957 { 1958 rld[output_reload].reg_rtx 1959 = gen_rtx_REG (rld[output_reload].outmode, regno); 1960 return; 1961 } 1962 } 1963 1964 /* Try to find a reload register for an in-out reload (expressions IN and OUT). 1965 See if one of IN and OUT is a register that may be used; 1966 this is desirable since a spill-register won't be needed. 1967 If so, return the register rtx that proves acceptable. 1968 1969 INLOC and OUTLOC are locations where IN and OUT appear in the insn. 1970 RCLASS is the register class required for the reload. 1971 1972 If FOR_REAL is >= 0, it is the number of the reload, 1973 and in some cases when it can be discovered that OUT doesn't need 1974 to be computed, clear out rld[FOR_REAL].out. 1975 1976 If FOR_REAL is -1, this should not be done, because this call 1977 is just to see if a register can be found, not to find and install it. 1978 1979 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This 1980 puts an additional constraint on being able to use IN for OUT since 1981 IN must not appear elsewhere in the insn (it is assumed that IN itself 1982 is safe from the earlyclobber). */ 1983 1984 static rtx 1985 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc, 1986 enum machine_mode inmode, enum machine_mode outmode, 1987 reg_class_t rclass, int for_real, int earlyclobber) 1988 { 1989 rtx in = real_in; 1990 rtx out = real_out; 1991 int in_offset = 0; 1992 int out_offset = 0; 1993 rtx value = 0; 1994 1995 /* If operands exceed a word, we can't use either of them 1996 unless they have the same size. */ 1997 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode) 1998 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD 1999 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD)) 2000 return 0; 2001 2002 /* Note that {in,out}_offset are needed only when 'in' or 'out' 2003 respectively refers to a hard register. */ 2004 2005 /* Find the inside of any subregs. */ 2006 while (GET_CODE (out) == SUBREG) 2007 { 2008 if (REG_P (SUBREG_REG (out)) 2009 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER) 2010 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)), 2011 GET_MODE (SUBREG_REG (out)), 2012 SUBREG_BYTE (out), 2013 GET_MODE (out)); 2014 out = SUBREG_REG (out); 2015 } 2016 while (GET_CODE (in) == SUBREG) 2017 { 2018 if (REG_P (SUBREG_REG (in)) 2019 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER) 2020 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)), 2021 GET_MODE (SUBREG_REG (in)), 2022 SUBREG_BYTE (in), 2023 GET_MODE (in)); 2024 in = SUBREG_REG (in); 2025 } 2026 2027 /* Narrow down the reg class, the same way push_reload will; 2028 otherwise we might find a dummy now, but push_reload won't. */ 2029 { 2030 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass); 2031 if (preferred_class != NO_REGS) 2032 rclass = (enum reg_class) preferred_class; 2033 } 2034 2035 /* See if OUT will do. */ 2036 if (REG_P (out) 2037 && REGNO (out) < FIRST_PSEUDO_REGISTER) 2038 { 2039 unsigned int regno = REGNO (out) + out_offset; 2040 unsigned int nwords = hard_regno_nregs[regno][outmode]; 2041 rtx saved_rtx; 2042 2043 /* When we consider whether the insn uses OUT, 2044 ignore references within IN. They don't prevent us 2045 from copying IN into OUT, because those refs would 2046 move into the insn that reloads IN. 2047 2048 However, we only ignore IN in its role as this reload. 2049 If the insn uses IN elsewhere and it contains OUT, 2050 that counts. We can't be sure it's the "same" operand 2051 so it might not go through this reload. 2052 2053 We also need to avoid using OUT if it, or part of it, is a 2054 fixed register. Modifying such registers, even transiently, 2055 may have undefined effects on the machine, such as modifying 2056 the stack pointer. */ 2057 saved_rtx = *inloc; 2058 *inloc = const0_rtx; 2059 2060 if (regno < FIRST_PSEUDO_REGISTER 2061 && HARD_REGNO_MODE_OK (regno, outmode) 2062 && ! refers_to_regno_for_reload_p (regno, regno + nwords, 2063 PATTERN (this_insn), outloc)) 2064 { 2065 unsigned int i; 2066 2067 for (i = 0; i < nwords; i++) 2068 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], 2069 regno + i) 2070 || fixed_regs[regno + i]) 2071 break; 2072 2073 if (i == nwords) 2074 { 2075 if (REG_P (real_out)) 2076 value = real_out; 2077 else 2078 value = gen_rtx_REG (outmode, regno); 2079 } 2080 } 2081 2082 *inloc = saved_rtx; 2083 } 2084 2085 /* Consider using IN if OUT was not acceptable 2086 or if OUT dies in this insn (like the quotient in a divmod insn). 2087 We can't use IN unless it is dies in this insn, 2088 which means we must know accurately which hard regs are live. 2089 Also, the result can't go in IN if IN is used within OUT, 2090 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */ 2091 if (hard_regs_live_known 2092 && REG_P (in) 2093 && REGNO (in) < FIRST_PSEUDO_REGISTER 2094 && (value == 0 2095 || find_reg_note (this_insn, REG_UNUSED, real_out)) 2096 && find_reg_note (this_insn, REG_DEAD, real_in) 2097 && !fixed_regs[REGNO (in)] 2098 && HARD_REGNO_MODE_OK (REGNO (in), 2099 /* The only case where out and real_out might 2100 have different modes is where real_out 2101 is a subreg, and in that case, out 2102 has a real mode. */ 2103 (GET_MODE (out) != VOIDmode 2104 ? GET_MODE (out) : outmode)) 2105 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER 2106 /* However only do this if we can be sure that this input 2107 operand doesn't correspond with an uninitialized pseudo. 2108 global can assign some hardreg to it that is the same as 2109 the one assigned to a different, also live pseudo (as it 2110 can ignore the conflict). We must never introduce writes 2111 to such hardregs, as they would clobber the other live 2112 pseudo. See PR 20973. */ 2113 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR), 2114 ORIGINAL_REGNO (in)) 2115 /* Similarly, only do this if we can be sure that the death 2116 note is still valid. global can assign some hardreg to 2117 the pseudo referenced in the note and simultaneously a 2118 subword of this hardreg to a different, also live pseudo, 2119 because only another subword of the hardreg is actually 2120 used in the insn. This cannot happen if the pseudo has 2121 been assigned exactly one hardreg. See PR 33732. */ 2122 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1))) 2123 { 2124 unsigned int regno = REGNO (in) + in_offset; 2125 unsigned int nwords = hard_regno_nregs[regno][inmode]; 2126 2127 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0) 2128 && ! hard_reg_set_here_p (regno, regno + nwords, 2129 PATTERN (this_insn)) 2130 && (! earlyclobber 2131 || ! refers_to_regno_for_reload_p (regno, regno + nwords, 2132 PATTERN (this_insn), inloc))) 2133 { 2134 unsigned int i; 2135 2136 for (i = 0; i < nwords; i++) 2137 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], 2138 regno + i)) 2139 break; 2140 2141 if (i == nwords) 2142 { 2143 /* If we were going to use OUT as the reload reg 2144 and changed our mind, it means OUT is a dummy that 2145 dies here. So don't bother copying value to it. */ 2146 if (for_real >= 0 && value == real_out) 2147 rld[for_real].out = 0; 2148 if (REG_P (real_in)) 2149 value = real_in; 2150 else 2151 value = gen_rtx_REG (inmode, regno); 2152 } 2153 } 2154 } 2155 2156 return value; 2157 } 2158 2159 /* This page contains subroutines used mainly for determining 2160 whether the IN or an OUT of a reload can serve as the 2161 reload register. */ 2162 2163 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */ 2164 2165 int 2166 earlyclobber_operand_p (rtx x) 2167 { 2168 int i; 2169 2170 for (i = 0; i < n_earlyclobbers; i++) 2171 if (reload_earlyclobbers[i] == x) 2172 return 1; 2173 2174 return 0; 2175 } 2176 2177 /* Return 1 if expression X alters a hard reg in the range 2178 from BEG_REGNO (inclusive) to END_REGNO (exclusive), 2179 either explicitly or in the guise of a pseudo-reg allocated to REGNO. 2180 X should be the body of an instruction. */ 2181 2182 static int 2183 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x) 2184 { 2185 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER) 2186 { 2187 rtx op0 = SET_DEST (x); 2188 2189 while (GET_CODE (op0) == SUBREG) 2190 op0 = SUBREG_REG (op0); 2191 if (REG_P (op0)) 2192 { 2193 unsigned int r = REGNO (op0); 2194 2195 /* See if this reg overlaps range under consideration. */ 2196 if (r < end_regno 2197 && end_hard_regno (GET_MODE (op0), r) > beg_regno) 2198 return 1; 2199 } 2200 } 2201 else if (GET_CODE (x) == PARALLEL) 2202 { 2203 int i = XVECLEN (x, 0) - 1; 2204 2205 for (; i >= 0; i--) 2206 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i))) 2207 return 1; 2208 } 2209 2210 return 0; 2211 } 2212 2213 /* Return 1 if ADDR is a valid memory address for mode MODE 2214 in address space AS, and check that each pseudo reg has the 2215 proper kind of hard reg. */ 2216 2217 int 2218 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED, 2219 rtx addr, addr_space_t as) 2220 { 2221 #ifdef GO_IF_LEGITIMATE_ADDRESS 2222 gcc_assert (ADDR_SPACE_GENERIC_P (as)); 2223 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win); 2224 return 0; 2225 2226 win: 2227 return 1; 2228 #else 2229 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as); 2230 #endif 2231 } 2232 2233 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match 2234 if they are the same hard reg, and has special hacks for 2235 autoincrement and autodecrement. 2236 This is specifically intended for find_reloads to use 2237 in determining whether two operands match. 2238 X is the operand whose number is the lower of the two. 2239 2240 The value is 2 if Y contains a pre-increment that matches 2241 a non-incrementing address in X. */ 2242 2243 /* ??? To be completely correct, we should arrange to pass 2244 for X the output operand and for Y the input operand. 2245 For now, we assume that the output operand has the lower number 2246 because that is natural in (SET output (... input ...)). */ 2247 2248 int 2249 operands_match_p (rtx x, rtx y) 2250 { 2251 int i; 2252 RTX_CODE code = GET_CODE (x); 2253 const char *fmt; 2254 int success_2; 2255 2256 if (x == y) 2257 return 1; 2258 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x)))) 2259 && (REG_P (y) || (GET_CODE (y) == SUBREG 2260 && REG_P (SUBREG_REG (y))))) 2261 { 2262 int j; 2263 2264 if (code == SUBREG) 2265 { 2266 i = REGNO (SUBREG_REG (x)); 2267 if (i >= FIRST_PSEUDO_REGISTER) 2268 goto slow; 2269 i += subreg_regno_offset (REGNO (SUBREG_REG (x)), 2270 GET_MODE (SUBREG_REG (x)), 2271 SUBREG_BYTE (x), 2272 GET_MODE (x)); 2273 } 2274 else 2275 i = REGNO (x); 2276 2277 if (GET_CODE (y) == SUBREG) 2278 { 2279 j = REGNO (SUBREG_REG (y)); 2280 if (j >= FIRST_PSEUDO_REGISTER) 2281 goto slow; 2282 j += subreg_regno_offset (REGNO (SUBREG_REG (y)), 2283 GET_MODE (SUBREG_REG (y)), 2284 SUBREG_BYTE (y), 2285 GET_MODE (y)); 2286 } 2287 else 2288 j = REGNO (y); 2289 2290 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a 2291 multiple hard register group of scalar integer registers, so that 2292 for example (reg:DI 0) and (reg:SI 1) will be considered the same 2293 register. */ 2294 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD 2295 && SCALAR_INT_MODE_P (GET_MODE (x)) 2296 && i < FIRST_PSEUDO_REGISTER) 2297 i += hard_regno_nregs[i][GET_MODE (x)] - 1; 2298 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD 2299 && SCALAR_INT_MODE_P (GET_MODE (y)) 2300 && j < FIRST_PSEUDO_REGISTER) 2301 j += hard_regno_nregs[j][GET_MODE (y)] - 1; 2302 2303 return i == j; 2304 } 2305 /* If two operands must match, because they are really a single 2306 operand of an assembler insn, then two postincrements are invalid 2307 because the assembler insn would increment only once. 2308 On the other hand, a postincrement matches ordinary indexing 2309 if the postincrement is the output operand. */ 2310 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY) 2311 return operands_match_p (XEXP (x, 0), y); 2312 /* Two preincrements are invalid 2313 because the assembler insn would increment only once. 2314 On the other hand, a preincrement matches ordinary indexing 2315 if the preincrement is the input operand. 2316 In this case, return 2, since some callers need to do special 2317 things when this happens. */ 2318 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC 2319 || GET_CODE (y) == PRE_MODIFY) 2320 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0; 2321 2322 slow: 2323 2324 /* Now we have disposed of all the cases in which different rtx codes 2325 can match. */ 2326 if (code != GET_CODE (y)) 2327 return 0; 2328 2329 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */ 2330 if (GET_MODE (x) != GET_MODE (y)) 2331 return 0; 2332 2333 /* MEMs referring to different address space are not equivalent. */ 2334 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y)) 2335 return 0; 2336 2337 switch (code) 2338 { 2339 CASE_CONST_UNIQUE: 2340 return 0; 2341 2342 case LABEL_REF: 2343 return XEXP (x, 0) == XEXP (y, 0); 2344 case SYMBOL_REF: 2345 return XSTR (x, 0) == XSTR (y, 0); 2346 2347 default: 2348 break; 2349 } 2350 2351 /* Compare the elements. If any pair of corresponding elements 2352 fail to match, return 0 for the whole things. */ 2353 2354 success_2 = 0; 2355 fmt = GET_RTX_FORMAT (code); 2356 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 2357 { 2358 int val, j; 2359 switch (fmt[i]) 2360 { 2361 case 'w': 2362 if (XWINT (x, i) != XWINT (y, i)) 2363 return 0; 2364 break; 2365 2366 case 'i': 2367 if (XINT (x, i) != XINT (y, i)) 2368 return 0; 2369 break; 2370 2371 case 'e': 2372 val = operands_match_p (XEXP (x, i), XEXP (y, i)); 2373 if (val == 0) 2374 return 0; 2375 /* If any subexpression returns 2, 2376 we should return 2 if we are successful. */ 2377 if (val == 2) 2378 success_2 = 1; 2379 break; 2380 2381 case '0': 2382 break; 2383 2384 case 'E': 2385 if (XVECLEN (x, i) != XVECLEN (y, i)) 2386 return 0; 2387 for (j = XVECLEN (x, i) - 1; j >= 0; --j) 2388 { 2389 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j)); 2390 if (val == 0) 2391 return 0; 2392 if (val == 2) 2393 success_2 = 1; 2394 } 2395 break; 2396 2397 /* It is believed that rtx's at this level will never 2398 contain anything but integers and other rtx's, 2399 except for within LABEL_REFs and SYMBOL_REFs. */ 2400 default: 2401 gcc_unreachable (); 2402 } 2403 } 2404 return 1 + success_2; 2405 } 2406 2407 /* Describe the range of registers or memory referenced by X. 2408 If X is a register, set REG_FLAG and put the first register 2409 number into START and the last plus one into END. 2410 If X is a memory reference, put a base address into BASE 2411 and a range of integer offsets into START and END. 2412 If X is pushing on the stack, we can assume it causes no trouble, 2413 so we set the SAFE field. */ 2414 2415 static struct decomposition 2416 decompose (rtx x) 2417 { 2418 struct decomposition val; 2419 int all_const = 0; 2420 2421 memset (&val, 0, sizeof (val)); 2422 2423 switch (GET_CODE (x)) 2424 { 2425 case MEM: 2426 { 2427 rtx base = NULL_RTX, offset = 0; 2428 rtx addr = XEXP (x, 0); 2429 2430 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC 2431 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC) 2432 { 2433 val.base = XEXP (addr, 0); 2434 val.start = -GET_MODE_SIZE (GET_MODE (x)); 2435 val.end = GET_MODE_SIZE (GET_MODE (x)); 2436 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM; 2437 return val; 2438 } 2439 2440 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY) 2441 { 2442 if (GET_CODE (XEXP (addr, 1)) == PLUS 2443 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0) 2444 && CONSTANT_P (XEXP (XEXP (addr, 1), 1))) 2445 { 2446 val.base = XEXP (addr, 0); 2447 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1)); 2448 val.end = INTVAL (XEXP (XEXP (addr, 1), 1)); 2449 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM; 2450 return val; 2451 } 2452 } 2453 2454 if (GET_CODE (addr) == CONST) 2455 { 2456 addr = XEXP (addr, 0); 2457 all_const = 1; 2458 } 2459 if (GET_CODE (addr) == PLUS) 2460 { 2461 if (CONSTANT_P (XEXP (addr, 0))) 2462 { 2463 base = XEXP (addr, 1); 2464 offset = XEXP (addr, 0); 2465 } 2466 else if (CONSTANT_P (XEXP (addr, 1))) 2467 { 2468 base = XEXP (addr, 0); 2469 offset = XEXP (addr, 1); 2470 } 2471 } 2472 2473 if (offset == 0) 2474 { 2475 base = addr; 2476 offset = const0_rtx; 2477 } 2478 if (GET_CODE (offset) == CONST) 2479 offset = XEXP (offset, 0); 2480 if (GET_CODE (offset) == PLUS) 2481 { 2482 if (CONST_INT_P (XEXP (offset, 0))) 2483 { 2484 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1)); 2485 offset = XEXP (offset, 0); 2486 } 2487 else if (CONST_INT_P (XEXP (offset, 1))) 2488 { 2489 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0)); 2490 offset = XEXP (offset, 1); 2491 } 2492 else 2493 { 2494 base = gen_rtx_PLUS (GET_MODE (base), base, offset); 2495 offset = const0_rtx; 2496 } 2497 } 2498 else if (!CONST_INT_P (offset)) 2499 { 2500 base = gen_rtx_PLUS (GET_MODE (base), base, offset); 2501 offset = const0_rtx; 2502 } 2503 2504 if (all_const && GET_CODE (base) == PLUS) 2505 base = gen_rtx_CONST (GET_MODE (base), base); 2506 2507 gcc_assert (CONST_INT_P (offset)); 2508 2509 val.start = INTVAL (offset); 2510 val.end = val.start + GET_MODE_SIZE (GET_MODE (x)); 2511 val.base = base; 2512 } 2513 break; 2514 2515 case REG: 2516 val.reg_flag = 1; 2517 val.start = true_regnum (x); 2518 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER) 2519 { 2520 /* A pseudo with no hard reg. */ 2521 val.start = REGNO (x); 2522 val.end = val.start + 1; 2523 } 2524 else 2525 /* A hard reg. */ 2526 val.end = end_hard_regno (GET_MODE (x), val.start); 2527 break; 2528 2529 case SUBREG: 2530 if (!REG_P (SUBREG_REG (x))) 2531 /* This could be more precise, but it's good enough. */ 2532 return decompose (SUBREG_REG (x)); 2533 val.reg_flag = 1; 2534 val.start = true_regnum (x); 2535 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER) 2536 return decompose (SUBREG_REG (x)); 2537 else 2538 /* A hard reg. */ 2539 val.end = val.start + subreg_nregs (x); 2540 break; 2541 2542 case SCRATCH: 2543 /* This hasn't been assigned yet, so it can't conflict yet. */ 2544 val.safe = 1; 2545 break; 2546 2547 default: 2548 gcc_assert (CONSTANT_P (x)); 2549 val.safe = 1; 2550 break; 2551 } 2552 return val; 2553 } 2554 2555 /* Return 1 if altering Y will not modify the value of X. 2556 Y is also described by YDATA, which should be decompose (Y). */ 2557 2558 static int 2559 immune_p (rtx x, rtx y, struct decomposition ydata) 2560 { 2561 struct decomposition xdata; 2562 2563 if (ydata.reg_flag) 2564 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0); 2565 if (ydata.safe) 2566 return 1; 2567 2568 gcc_assert (MEM_P (y)); 2569 /* If Y is memory and X is not, Y can't affect X. */ 2570 if (!MEM_P (x)) 2571 return 1; 2572 2573 xdata = decompose (x); 2574 2575 if (! rtx_equal_p (xdata.base, ydata.base)) 2576 { 2577 /* If bases are distinct symbolic constants, there is no overlap. */ 2578 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base)) 2579 return 1; 2580 /* Constants and stack slots never overlap. */ 2581 if (CONSTANT_P (xdata.base) 2582 && (ydata.base == frame_pointer_rtx 2583 || ydata.base == hard_frame_pointer_rtx 2584 || ydata.base == stack_pointer_rtx)) 2585 return 1; 2586 if (CONSTANT_P (ydata.base) 2587 && (xdata.base == frame_pointer_rtx 2588 || xdata.base == hard_frame_pointer_rtx 2589 || xdata.base == stack_pointer_rtx)) 2590 return 1; 2591 /* If either base is variable, we don't know anything. */ 2592 return 0; 2593 } 2594 2595 return (xdata.start >= ydata.end || ydata.start >= xdata.end); 2596 } 2597 2598 /* Similar, but calls decompose. */ 2599 2600 int 2601 safe_from_earlyclobber (rtx op, rtx clobber) 2602 { 2603 struct decomposition early_data; 2604 2605 early_data = decompose (clobber); 2606 return immune_p (op, clobber, early_data); 2607 } 2608 2609 /* Main entry point of this file: search the body of INSN 2610 for values that need reloading and record them with push_reload. 2611 REPLACE nonzero means record also where the values occur 2612 so that subst_reloads can be used. 2613 2614 IND_LEVELS says how many levels of indirection are supported by this 2615 machine; a value of zero means that a memory reference is not a valid 2616 memory address. 2617 2618 LIVE_KNOWN says we have valid information about which hard 2619 regs are live at each point in the program; this is true when 2620 we are called from global_alloc but false when stupid register 2621 allocation has been done. 2622 2623 RELOAD_REG_P if nonzero is a vector indexed by hard reg number 2624 which is nonnegative if the reg has been commandeered for reloading into. 2625 It is copied into STATIC_RELOAD_REG_P and referenced from there 2626 by various subroutines. 2627 2628 Return TRUE if some operands need to be changed, because of swapping 2629 commutative operands, reg_equiv_address substitution, or whatever. */ 2630 2631 int 2632 find_reloads (rtx insn, int replace, int ind_levels, int live_known, 2633 short *reload_reg_p) 2634 { 2635 int insn_code_number; 2636 int i, j; 2637 int noperands; 2638 /* These start out as the constraints for the insn 2639 and they are chewed up as we consider alternatives. */ 2640 const char *constraints[MAX_RECOG_OPERANDS]; 2641 /* These are the preferred classes for an operand, or NO_REGS if it isn't 2642 a register. */ 2643 enum reg_class preferred_class[MAX_RECOG_OPERANDS]; 2644 char pref_or_nothing[MAX_RECOG_OPERANDS]; 2645 /* Nonzero for a MEM operand whose entire address needs a reload. 2646 May be -1 to indicate the entire address may or may not need a reload. */ 2647 int address_reloaded[MAX_RECOG_OPERANDS]; 2648 /* Nonzero for an address operand that needs to be completely reloaded. 2649 May be -1 to indicate the entire operand may or may not need a reload. */ 2650 int address_operand_reloaded[MAX_RECOG_OPERANDS]; 2651 /* Value of enum reload_type to use for operand. */ 2652 enum reload_type operand_type[MAX_RECOG_OPERANDS]; 2653 /* Value of enum reload_type to use within address of operand. */ 2654 enum reload_type address_type[MAX_RECOG_OPERANDS]; 2655 /* Save the usage of each operand. */ 2656 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS]; 2657 int no_input_reloads = 0, no_output_reloads = 0; 2658 int n_alternatives; 2659 reg_class_t this_alternative[MAX_RECOG_OPERANDS]; 2660 char this_alternative_match_win[MAX_RECOG_OPERANDS]; 2661 char this_alternative_win[MAX_RECOG_OPERANDS]; 2662 char this_alternative_offmemok[MAX_RECOG_OPERANDS]; 2663 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS]; 2664 int this_alternative_matches[MAX_RECOG_OPERANDS]; 2665 reg_class_t goal_alternative[MAX_RECOG_OPERANDS]; 2666 int this_alternative_number; 2667 int goal_alternative_number = 0; 2668 int operand_reloadnum[MAX_RECOG_OPERANDS]; 2669 int goal_alternative_matches[MAX_RECOG_OPERANDS]; 2670 int goal_alternative_matched[MAX_RECOG_OPERANDS]; 2671 char goal_alternative_match_win[MAX_RECOG_OPERANDS]; 2672 char goal_alternative_win[MAX_RECOG_OPERANDS]; 2673 char goal_alternative_offmemok[MAX_RECOG_OPERANDS]; 2674 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS]; 2675 int goal_alternative_swapped; 2676 int best; 2677 int commutative; 2678 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS]; 2679 rtx substed_operand[MAX_RECOG_OPERANDS]; 2680 rtx body = PATTERN (insn); 2681 rtx set = single_set (insn); 2682 int goal_earlyclobber = 0, this_earlyclobber; 2683 enum machine_mode operand_mode[MAX_RECOG_OPERANDS]; 2684 int retval = 0; 2685 2686 this_insn = insn; 2687 n_reloads = 0; 2688 n_replacements = 0; 2689 n_earlyclobbers = 0; 2690 replace_reloads = replace; 2691 hard_regs_live_known = live_known; 2692 static_reload_reg_p = reload_reg_p; 2693 2694 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads; 2695 neither are insns that SET cc0. Insns that use CC0 are not allowed 2696 to have any input reloads. */ 2697 if (JUMP_P (insn) || CALL_P (insn)) 2698 no_output_reloads = 1; 2699 2700 #ifdef HAVE_cc0 2701 if (reg_referenced_p (cc0_rtx, PATTERN (insn))) 2702 no_input_reloads = 1; 2703 if (reg_set_p (cc0_rtx, PATTERN (insn))) 2704 no_output_reloads = 1; 2705 #endif 2706 2707 #ifdef SECONDARY_MEMORY_NEEDED 2708 /* The eliminated forms of any secondary memory locations are per-insn, so 2709 clear them out here. */ 2710 2711 if (secondary_memlocs_elim_used) 2712 { 2713 memset (secondary_memlocs_elim, 0, 2714 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used); 2715 secondary_memlocs_elim_used = 0; 2716 } 2717 #endif 2718 2719 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it 2720 is cheap to move between them. If it is not, there may not be an insn 2721 to do the copy, so we may need a reload. */ 2722 if (GET_CODE (body) == SET 2723 && REG_P (SET_DEST (body)) 2724 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER 2725 && REG_P (SET_SRC (body)) 2726 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER 2727 && register_move_cost (GET_MODE (SET_SRC (body)), 2728 REGNO_REG_CLASS (REGNO (SET_SRC (body))), 2729 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2) 2730 return 0; 2731 2732 extract_insn (insn); 2733 2734 noperands = reload_n_operands = recog_data.n_operands; 2735 n_alternatives = recog_data.n_alternatives; 2736 2737 /* Just return "no reloads" if insn has no operands with constraints. */ 2738 if (noperands == 0 || n_alternatives == 0) 2739 return 0; 2740 2741 insn_code_number = INSN_CODE (insn); 2742 this_insn_is_asm = insn_code_number < 0; 2743 2744 memcpy (operand_mode, recog_data.operand_mode, 2745 noperands * sizeof (enum machine_mode)); 2746 memcpy (constraints, recog_data.constraints, 2747 noperands * sizeof (const char *)); 2748 2749 commutative = -1; 2750 2751 /* If we will need to know, later, whether some pair of operands 2752 are the same, we must compare them now and save the result. 2753 Reloading the base and index registers will clobber them 2754 and afterward they will fail to match. */ 2755 2756 for (i = 0; i < noperands; i++) 2757 { 2758 const char *p; 2759 int c; 2760 char *end; 2761 2762 substed_operand[i] = recog_data.operand[i]; 2763 p = constraints[i]; 2764 2765 modified[i] = RELOAD_READ; 2766 2767 /* Scan this operand's constraint to see if it is an output operand, 2768 an in-out operand, is commutative, or should match another. */ 2769 2770 while ((c = *p)) 2771 { 2772 p += CONSTRAINT_LEN (c, p); 2773 switch (c) 2774 { 2775 case '=': 2776 modified[i] = RELOAD_WRITE; 2777 break; 2778 case '+': 2779 modified[i] = RELOAD_READ_WRITE; 2780 break; 2781 case '%': 2782 { 2783 /* The last operand should not be marked commutative. */ 2784 gcc_assert (i != noperands - 1); 2785 2786 /* We currently only support one commutative pair of 2787 operands. Some existing asm code currently uses more 2788 than one pair. Previously, that would usually work, 2789 but sometimes it would crash the compiler. We 2790 continue supporting that case as well as we can by 2791 silently ignoring all but the first pair. In the 2792 future we may handle it correctly. */ 2793 if (commutative < 0) 2794 commutative = i; 2795 else 2796 gcc_assert (this_insn_is_asm); 2797 } 2798 break; 2799 /* Use of ISDIGIT is tempting here, but it may get expensive because 2800 of locale support we don't want. */ 2801 case '0': case '1': case '2': case '3': case '4': 2802 case '5': case '6': case '7': case '8': case '9': 2803 { 2804 c = strtoul (p - 1, &end, 10); 2805 p = end; 2806 2807 operands_match[c][i] 2808 = operands_match_p (recog_data.operand[c], 2809 recog_data.operand[i]); 2810 2811 /* An operand may not match itself. */ 2812 gcc_assert (c != i); 2813 2814 /* If C can be commuted with C+1, and C might need to match I, 2815 then C+1 might also need to match I. */ 2816 if (commutative >= 0) 2817 { 2818 if (c == commutative || c == commutative + 1) 2819 { 2820 int other = c + (c == commutative ? 1 : -1); 2821 operands_match[other][i] 2822 = operands_match_p (recog_data.operand[other], 2823 recog_data.operand[i]); 2824 } 2825 if (i == commutative || i == commutative + 1) 2826 { 2827 int other = i + (i == commutative ? 1 : -1); 2828 operands_match[c][other] 2829 = operands_match_p (recog_data.operand[c], 2830 recog_data.operand[other]); 2831 } 2832 /* Note that C is supposed to be less than I. 2833 No need to consider altering both C and I because in 2834 that case we would alter one into the other. */ 2835 } 2836 } 2837 } 2838 } 2839 } 2840 2841 /* Examine each operand that is a memory reference or memory address 2842 and reload parts of the addresses into index registers. 2843 Also here any references to pseudo regs that didn't get hard regs 2844 but are equivalent to constants get replaced in the insn itself 2845 with those constants. Nobody will ever see them again. 2846 2847 Finally, set up the preferred classes of each operand. */ 2848 2849 for (i = 0; i < noperands; i++) 2850 { 2851 RTX_CODE code = GET_CODE (recog_data.operand[i]); 2852 2853 address_reloaded[i] = 0; 2854 address_operand_reloaded[i] = 0; 2855 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT 2856 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT 2857 : RELOAD_OTHER); 2858 address_type[i] 2859 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS 2860 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS 2861 : RELOAD_OTHER); 2862 2863 if (*constraints[i] == 0) 2864 /* Ignore things like match_operator operands. */ 2865 ; 2866 else if (constraints[i][0] == 'p' 2867 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i])) 2868 { 2869 address_operand_reloaded[i] 2870 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0, 2871 recog_data.operand[i], 2872 recog_data.operand_loc[i], 2873 i, operand_type[i], ind_levels, insn); 2874 2875 /* If we now have a simple operand where we used to have a 2876 PLUS or MULT, re-recognize and try again. */ 2877 if ((OBJECT_P (*recog_data.operand_loc[i]) 2878 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG) 2879 && (GET_CODE (recog_data.operand[i]) == MULT 2880 || GET_CODE (recog_data.operand[i]) == PLUS)) 2881 { 2882 INSN_CODE (insn) = -1; 2883 retval = find_reloads (insn, replace, ind_levels, live_known, 2884 reload_reg_p); 2885 return retval; 2886 } 2887 2888 recog_data.operand[i] = *recog_data.operand_loc[i]; 2889 substed_operand[i] = recog_data.operand[i]; 2890 2891 /* Address operands are reloaded in their existing mode, 2892 no matter what is specified in the machine description. */ 2893 operand_mode[i] = GET_MODE (recog_data.operand[i]); 2894 2895 /* If the address is a single CONST_INT pick address mode 2896 instead otherwise we will later not know in which mode 2897 the reload should be performed. */ 2898 if (operand_mode[i] == VOIDmode) 2899 operand_mode[i] = Pmode; 2900 2901 } 2902 else if (code == MEM) 2903 { 2904 address_reloaded[i] 2905 = find_reloads_address (GET_MODE (recog_data.operand[i]), 2906 recog_data.operand_loc[i], 2907 XEXP (recog_data.operand[i], 0), 2908 &XEXP (recog_data.operand[i], 0), 2909 i, address_type[i], ind_levels, insn); 2910 recog_data.operand[i] = *recog_data.operand_loc[i]; 2911 substed_operand[i] = recog_data.operand[i]; 2912 } 2913 else if (code == SUBREG) 2914 { 2915 rtx reg = SUBREG_REG (recog_data.operand[i]); 2916 rtx op 2917 = find_reloads_toplev (recog_data.operand[i], i, address_type[i], 2918 ind_levels, 2919 set != 0 2920 && &SET_DEST (set) == recog_data.operand_loc[i], 2921 insn, 2922 &address_reloaded[i]); 2923 2924 /* If we made a MEM to load (a part of) the stackslot of a pseudo 2925 that didn't get a hard register, emit a USE with a REG_EQUAL 2926 note in front so that we might inherit a previous, possibly 2927 wider reload. */ 2928 2929 if (replace 2930 && MEM_P (op) 2931 && REG_P (reg) 2932 && (GET_MODE_SIZE (GET_MODE (reg)) 2933 >= GET_MODE_SIZE (GET_MODE (op))) 2934 && reg_equiv_constant (REGNO (reg)) == 0) 2935 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg), 2936 insn), 2937 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg))); 2938 2939 substed_operand[i] = recog_data.operand[i] = op; 2940 } 2941 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY) 2942 /* We can get a PLUS as an "operand" as a result of register 2943 elimination. See eliminate_regs and gen_reload. We handle 2944 a unary operator by reloading the operand. */ 2945 substed_operand[i] = recog_data.operand[i] 2946 = find_reloads_toplev (recog_data.operand[i], i, address_type[i], 2947 ind_levels, 0, insn, 2948 &address_reloaded[i]); 2949 else if (code == REG) 2950 { 2951 /* This is equivalent to calling find_reloads_toplev. 2952 The code is duplicated for speed. 2953 When we find a pseudo always equivalent to a constant, 2954 we replace it by the constant. We must be sure, however, 2955 that we don't try to replace it in the insn in which it 2956 is being set. */ 2957 int regno = REGNO (recog_data.operand[i]); 2958 if (reg_equiv_constant (regno) != 0 2959 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i])) 2960 { 2961 /* Record the existing mode so that the check if constants are 2962 allowed will work when operand_mode isn't specified. */ 2963 2964 if (operand_mode[i] == VOIDmode) 2965 operand_mode[i] = GET_MODE (recog_data.operand[i]); 2966 2967 substed_operand[i] = recog_data.operand[i] 2968 = reg_equiv_constant (regno); 2969 } 2970 if (reg_equiv_memory_loc (regno) != 0 2971 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)) 2972 /* We need not give a valid is_set_dest argument since the case 2973 of a constant equivalence was checked above. */ 2974 substed_operand[i] = recog_data.operand[i] 2975 = find_reloads_toplev (recog_data.operand[i], i, address_type[i], 2976 ind_levels, 0, insn, 2977 &address_reloaded[i]); 2978 } 2979 /* If the operand is still a register (we didn't replace it with an 2980 equivalent), get the preferred class to reload it into. */ 2981 code = GET_CODE (recog_data.operand[i]); 2982 preferred_class[i] 2983 = ((code == REG && REGNO (recog_data.operand[i]) 2984 >= FIRST_PSEUDO_REGISTER) 2985 ? reg_preferred_class (REGNO (recog_data.operand[i])) 2986 : NO_REGS); 2987 pref_or_nothing[i] 2988 = (code == REG 2989 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER 2990 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS); 2991 } 2992 2993 /* If this is simply a copy from operand 1 to operand 0, merge the 2994 preferred classes for the operands. */ 2995 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set) 2996 && recog_data.operand[1] == SET_SRC (set)) 2997 { 2998 preferred_class[0] = preferred_class[1] 2999 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]]; 3000 pref_or_nothing[0] |= pref_or_nothing[1]; 3001 pref_or_nothing[1] |= pref_or_nothing[0]; 3002 } 3003 3004 /* Now see what we need for pseudo-regs that didn't get hard regs 3005 or got the wrong kind of hard reg. For this, we must consider 3006 all the operands together against the register constraints. */ 3007 3008 best = MAX_RECOG_OPERANDS * 2 + 600; 3009 3010 goal_alternative_swapped = 0; 3011 3012 /* The constraints are made of several alternatives. 3013 Each operand's constraint looks like foo,bar,... with commas 3014 separating the alternatives. The first alternatives for all 3015 operands go together, the second alternatives go together, etc. 3016 3017 First loop over alternatives. */ 3018 3019 for (this_alternative_number = 0; 3020 this_alternative_number < n_alternatives; 3021 this_alternative_number++) 3022 { 3023 int swapped; 3024 3025 if (!recog_data.alternative_enabled_p[this_alternative_number]) 3026 { 3027 int i; 3028 3029 for (i = 0; i < recog_data.n_operands; i++) 3030 constraints[i] = skip_alternative (constraints[i]); 3031 3032 continue; 3033 } 3034 3035 /* If insn is commutative (it's safe to exchange a certain pair 3036 of operands) then we need to try each alternative twice, the 3037 second time matching those two operands as if we had 3038 exchanged them. To do this, really exchange them in 3039 operands. */ 3040 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++) 3041 { 3042 /* Loop over operands for one constraint alternative. */ 3043 /* LOSERS counts those that don't fit this alternative 3044 and would require loading. */ 3045 int losers = 0; 3046 /* BAD is set to 1 if it some operand can't fit this alternative 3047 even after reloading. */ 3048 int bad = 0; 3049 /* REJECT is a count of how undesirable this alternative says it is 3050 if any reloading is required. If the alternative matches exactly 3051 then REJECT is ignored, but otherwise it gets this much 3052 counted against it in addition to the reloading needed. Each 3053 ? counts three times here since we want the disparaging caused by 3054 a bad register class to only count 1/3 as much. */ 3055 int reject = 0; 3056 3057 if (swapped) 3058 { 3059 enum reg_class tclass; 3060 int t; 3061 3062 recog_data.operand[commutative] = substed_operand[commutative + 1]; 3063 recog_data.operand[commutative + 1] = substed_operand[commutative]; 3064 /* Swap the duplicates too. */ 3065 for (i = 0; i < recog_data.n_dups; i++) 3066 if (recog_data.dup_num[i] == commutative 3067 || recog_data.dup_num[i] == commutative + 1) 3068 *recog_data.dup_loc[i] 3069 = recog_data.operand[(int) recog_data.dup_num[i]]; 3070 3071 tclass = preferred_class[commutative]; 3072 preferred_class[commutative] = preferred_class[commutative + 1]; 3073 preferred_class[commutative + 1] = tclass; 3074 3075 t = pref_or_nothing[commutative]; 3076 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1]; 3077 pref_or_nothing[commutative + 1] = t; 3078 3079 t = address_reloaded[commutative]; 3080 address_reloaded[commutative] = address_reloaded[commutative + 1]; 3081 address_reloaded[commutative + 1] = t; 3082 } 3083 3084 this_earlyclobber = 0; 3085 3086 for (i = 0; i < noperands; i++) 3087 { 3088 const char *p = constraints[i]; 3089 char *end; 3090 int len; 3091 int win = 0; 3092 int did_match = 0; 3093 /* 0 => this operand can be reloaded somehow for this alternative. */ 3094 int badop = 1; 3095 /* 0 => this operand can be reloaded if the alternative allows regs. */ 3096 int winreg = 0; 3097 int c; 3098 int m; 3099 rtx operand = recog_data.operand[i]; 3100 int offset = 0; 3101 /* Nonzero means this is a MEM that must be reloaded into a reg 3102 regardless of what the constraint says. */ 3103 int force_reload = 0; 3104 int offmemok = 0; 3105 /* Nonzero if a constant forced into memory would be OK for this 3106 operand. */ 3107 int constmemok = 0; 3108 int earlyclobber = 0; 3109 3110 /* If the predicate accepts a unary operator, it means that 3111 we need to reload the operand, but do not do this for 3112 match_operator and friends. */ 3113 if (UNARY_P (operand) && *p != 0) 3114 operand = XEXP (operand, 0); 3115 3116 /* If the operand is a SUBREG, extract 3117 the REG or MEM (or maybe even a constant) within. 3118 (Constants can occur as a result of reg_equiv_constant.) */ 3119 3120 while (GET_CODE (operand) == SUBREG) 3121 { 3122 /* Offset only matters when operand is a REG and 3123 it is a hard reg. This is because it is passed 3124 to reg_fits_class_p if it is a REG and all pseudos 3125 return 0 from that function. */ 3126 if (REG_P (SUBREG_REG (operand)) 3127 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER) 3128 { 3129 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)), 3130 GET_MODE (SUBREG_REG (operand)), 3131 SUBREG_BYTE (operand), 3132 GET_MODE (operand)) < 0) 3133 force_reload = 1; 3134 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)), 3135 GET_MODE (SUBREG_REG (operand)), 3136 SUBREG_BYTE (operand), 3137 GET_MODE (operand)); 3138 } 3139 operand = SUBREG_REG (operand); 3140 /* Force reload if this is a constant or PLUS or if there may 3141 be a problem accessing OPERAND in the outer mode. */ 3142 if (CONSTANT_P (operand) 3143 || GET_CODE (operand) == PLUS 3144 /* We must force a reload of paradoxical SUBREGs 3145 of a MEM because the alignment of the inner value 3146 may not be enough to do the outer reference. On 3147 big-endian machines, it may also reference outside 3148 the object. 3149 3150 On machines that extend byte operations and we have a 3151 SUBREG where both the inner and outer modes are no wider 3152 than a word and the inner mode is narrower, is integral, 3153 and gets extended when loaded from memory, combine.c has 3154 made assumptions about the behavior of the machine in such 3155 register access. If the data is, in fact, in memory we 3156 must always load using the size assumed to be in the 3157 register and let the insn do the different-sized 3158 accesses. 3159 3160 This is doubly true if WORD_REGISTER_OPERATIONS. In 3161 this case eliminate_regs has left non-paradoxical 3162 subregs for push_reload to see. Make sure it does 3163 by forcing the reload. 3164 3165 ??? When is it right at this stage to have a subreg 3166 of a mem that is _not_ to be handled specially? IMO 3167 those should have been reduced to just a mem. */ 3168 || ((MEM_P (operand) 3169 || (REG_P (operand) 3170 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)) 3171 #ifndef WORD_REGISTER_OPERATIONS 3172 && (((GET_MODE_BITSIZE (GET_MODE (operand)) 3173 < BIGGEST_ALIGNMENT) 3174 && (GET_MODE_SIZE (operand_mode[i]) 3175 > GET_MODE_SIZE (GET_MODE (operand)))) 3176 || BYTES_BIG_ENDIAN 3177 #ifdef LOAD_EXTEND_OP 3178 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD 3179 && (GET_MODE_SIZE (GET_MODE (operand)) 3180 <= UNITS_PER_WORD) 3181 && (GET_MODE_SIZE (operand_mode[i]) 3182 > GET_MODE_SIZE (GET_MODE (operand))) 3183 && INTEGRAL_MODE_P (GET_MODE (operand)) 3184 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN) 3185 #endif 3186 ) 3187 #endif 3188 ) 3189 ) 3190 force_reload = 1; 3191 } 3192 3193 this_alternative[i] = NO_REGS; 3194 this_alternative_win[i] = 0; 3195 this_alternative_match_win[i] = 0; 3196 this_alternative_offmemok[i] = 0; 3197 this_alternative_earlyclobber[i] = 0; 3198 this_alternative_matches[i] = -1; 3199 3200 /* An empty constraint or empty alternative 3201 allows anything which matched the pattern. */ 3202 if (*p == 0 || *p == ',') 3203 win = 1, badop = 0; 3204 3205 /* Scan this alternative's specs for this operand; 3206 set WIN if the operand fits any letter in this alternative. 3207 Otherwise, clear BADOP if this operand could 3208 fit some letter after reloads, 3209 or set WINREG if this operand could fit after reloads 3210 provided the constraint allows some registers. */ 3211 3212 do 3213 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c) 3214 { 3215 case '\0': 3216 len = 0; 3217 break; 3218 case ',': 3219 c = '\0'; 3220 break; 3221 3222 case '=': case '+': case '*': 3223 break; 3224 3225 case '%': 3226 /* We only support one commutative marker, the first 3227 one. We already set commutative above. */ 3228 break; 3229 3230 case '?': 3231 reject += 6; 3232 break; 3233 3234 case '!': 3235 reject = 600; 3236 break; 3237 3238 case '#': 3239 /* Ignore rest of this alternative as far as 3240 reloading is concerned. */ 3241 do 3242 p++; 3243 while (*p && *p != ','); 3244 len = 0; 3245 break; 3246 3247 case '0': case '1': case '2': case '3': case '4': 3248 case '5': case '6': case '7': case '8': case '9': 3249 m = strtoul (p, &end, 10); 3250 p = end; 3251 len = 0; 3252 3253 this_alternative_matches[i] = m; 3254 /* We are supposed to match a previous operand. 3255 If we do, we win if that one did. 3256 If we do not, count both of the operands as losers. 3257 (This is too conservative, since most of the time 3258 only a single reload insn will be needed to make 3259 the two operands win. As a result, this alternative 3260 may be rejected when it is actually desirable.) */ 3261 if ((swapped && (m != commutative || i != commutative + 1)) 3262 /* If we are matching as if two operands were swapped, 3263 also pretend that operands_match had been computed 3264 with swapped. 3265 But if I is the second of those and C is the first, 3266 don't exchange them, because operands_match is valid 3267 only on one side of its diagonal. */ 3268 ? (operands_match 3269 [(m == commutative || m == commutative + 1) 3270 ? 2 * commutative + 1 - m : m] 3271 [(i == commutative || i == commutative + 1) 3272 ? 2 * commutative + 1 - i : i]) 3273 : operands_match[m][i]) 3274 { 3275 /* If we are matching a non-offsettable address where an 3276 offsettable address was expected, then we must reject 3277 this combination, because we can't reload it. */ 3278 if (this_alternative_offmemok[m] 3279 && MEM_P (recog_data.operand[m]) 3280 && this_alternative[m] == NO_REGS 3281 && ! this_alternative_win[m]) 3282 bad = 1; 3283 3284 did_match = this_alternative_win[m]; 3285 } 3286 else 3287 { 3288 /* Operands don't match. */ 3289 rtx value; 3290 int loc1, loc2; 3291 /* Retroactively mark the operand we had to match 3292 as a loser, if it wasn't already. */ 3293 if (this_alternative_win[m]) 3294 losers++; 3295 this_alternative_win[m] = 0; 3296 if (this_alternative[m] == NO_REGS) 3297 bad = 1; 3298 /* But count the pair only once in the total badness of 3299 this alternative, if the pair can be a dummy reload. 3300 The pointers in operand_loc are not swapped; swap 3301 them by hand if necessary. */ 3302 if (swapped && i == commutative) 3303 loc1 = commutative + 1; 3304 else if (swapped && i == commutative + 1) 3305 loc1 = commutative; 3306 else 3307 loc1 = i; 3308 if (swapped && m == commutative) 3309 loc2 = commutative + 1; 3310 else if (swapped && m == commutative + 1) 3311 loc2 = commutative; 3312 else 3313 loc2 = m; 3314 value 3315 = find_dummy_reload (recog_data.operand[i], 3316 recog_data.operand[m], 3317 recog_data.operand_loc[loc1], 3318 recog_data.operand_loc[loc2], 3319 operand_mode[i], operand_mode[m], 3320 this_alternative[m], -1, 3321 this_alternative_earlyclobber[m]); 3322 3323 if (value != 0) 3324 losers--; 3325 } 3326 /* This can be fixed with reloads if the operand 3327 we are supposed to match can be fixed with reloads. */ 3328 badop = 0; 3329 this_alternative[i] = this_alternative[m]; 3330 3331 /* If we have to reload this operand and some previous 3332 operand also had to match the same thing as this 3333 operand, we don't know how to do that. So reject this 3334 alternative. */ 3335 if (! did_match || force_reload) 3336 for (j = 0; j < i; j++) 3337 if (this_alternative_matches[j] 3338 == this_alternative_matches[i]) 3339 badop = 1; 3340 break; 3341 3342 case 'p': 3343 /* All necessary reloads for an address_operand 3344 were handled in find_reloads_address. */ 3345 this_alternative[i] 3346 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC, 3347 ADDRESS, SCRATCH); 3348 win = 1; 3349 badop = 0; 3350 break; 3351 3352 case TARGET_MEM_CONSTRAINT: 3353 if (force_reload) 3354 break; 3355 if (MEM_P (operand) 3356 || (REG_P (operand) 3357 && REGNO (operand) >= FIRST_PSEUDO_REGISTER 3358 && reg_renumber[REGNO (operand)] < 0)) 3359 win = 1; 3360 if (CONST_POOL_OK_P (operand_mode[i], operand)) 3361 badop = 0; 3362 constmemok = 1; 3363 break; 3364 3365 case '<': 3366 if (MEM_P (operand) 3367 && ! address_reloaded[i] 3368 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC 3369 || GET_CODE (XEXP (operand, 0)) == POST_DEC)) 3370 win = 1; 3371 break; 3372 3373 case '>': 3374 if (MEM_P (operand) 3375 && ! address_reloaded[i] 3376 && (GET_CODE (XEXP (operand, 0)) == PRE_INC 3377 || GET_CODE (XEXP (operand, 0)) == POST_INC)) 3378 win = 1; 3379 break; 3380 3381 /* Memory operand whose address is not offsettable. */ 3382 case 'V': 3383 if (force_reload) 3384 break; 3385 if (MEM_P (operand) 3386 && ! (ind_levels ? offsettable_memref_p (operand) 3387 : offsettable_nonstrict_memref_p (operand)) 3388 /* Certain mem addresses will become offsettable 3389 after they themselves are reloaded. This is important; 3390 we don't want our own handling of unoffsettables 3391 to override the handling of reg_equiv_address. */ 3392 && !(REG_P (XEXP (operand, 0)) 3393 && (ind_levels == 0 3394 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0))) 3395 win = 1; 3396 break; 3397 3398 /* Memory operand whose address is offsettable. */ 3399 case 'o': 3400 if (force_reload) 3401 break; 3402 if ((MEM_P (operand) 3403 /* If IND_LEVELS, find_reloads_address won't reload a 3404 pseudo that didn't get a hard reg, so we have to 3405 reject that case. */ 3406 && ((ind_levels ? offsettable_memref_p (operand) 3407 : offsettable_nonstrict_memref_p (operand)) 3408 /* A reloaded address is offsettable because it is now 3409 just a simple register indirect. */ 3410 || address_reloaded[i] == 1)) 3411 || (REG_P (operand) 3412 && REGNO (operand) >= FIRST_PSEUDO_REGISTER 3413 && reg_renumber[REGNO (operand)] < 0 3414 /* If reg_equiv_address is nonzero, we will be 3415 loading it into a register; hence it will be 3416 offsettable, but we cannot say that reg_equiv_mem 3417 is offsettable without checking. */ 3418 && ((reg_equiv_mem (REGNO (operand)) != 0 3419 && offsettable_memref_p (reg_equiv_mem (REGNO (operand)))) 3420 || (reg_equiv_address (REGNO (operand)) != 0)))) 3421 win = 1; 3422 if (CONST_POOL_OK_P (operand_mode[i], operand) 3423 || MEM_P (operand)) 3424 badop = 0; 3425 constmemok = 1; 3426 offmemok = 1; 3427 break; 3428 3429 case '&': 3430 /* Output operand that is stored before the need for the 3431 input operands (and their index registers) is over. */ 3432 earlyclobber = 1, this_earlyclobber = 1; 3433 break; 3434 3435 case 'E': 3436 case 'F': 3437 if (CONST_DOUBLE_AS_FLOAT_P (operand) 3438 || (GET_CODE (operand) == CONST_VECTOR 3439 && (GET_MODE_CLASS (GET_MODE (operand)) 3440 == MODE_VECTOR_FLOAT))) 3441 win = 1; 3442 break; 3443 3444 case 'G': 3445 case 'H': 3446 if (CONST_DOUBLE_AS_FLOAT_P (operand) 3447 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p)) 3448 win = 1; 3449 break; 3450 3451 case 's': 3452 if (CONST_SCALAR_INT_P (operand)) 3453 break; 3454 case 'i': 3455 if (CONSTANT_P (operand) 3456 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand))) 3457 win = 1; 3458 break; 3459 3460 case 'n': 3461 if (CONST_SCALAR_INT_P (operand)) 3462 win = 1; 3463 break; 3464 3465 case 'I': 3466 case 'J': 3467 case 'K': 3468 case 'L': 3469 case 'M': 3470 case 'N': 3471 case 'O': 3472 case 'P': 3473 if (CONST_INT_P (operand) 3474 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p)) 3475 win = 1; 3476 break; 3477 3478 case 'X': 3479 force_reload = 0; 3480 win = 1; 3481 break; 3482 3483 case 'g': 3484 if (! force_reload 3485 /* A PLUS is never a valid operand, but reload can make 3486 it from a register when eliminating registers. */ 3487 && GET_CODE (operand) != PLUS 3488 /* A SCRATCH is not a valid operand. */ 3489 && GET_CODE (operand) != SCRATCH 3490 && (! CONSTANT_P (operand) 3491 || ! flag_pic 3492 || LEGITIMATE_PIC_OPERAND_P (operand)) 3493 && (GENERAL_REGS == ALL_REGS 3494 || !REG_P (operand) 3495 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER 3496 && reg_renumber[REGNO (operand)] < 0))) 3497 win = 1; 3498 /* Drop through into 'r' case. */ 3499 3500 case 'r': 3501 this_alternative[i] 3502 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS]; 3503 goto reg; 3504 3505 default: 3506 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS) 3507 { 3508 #ifdef EXTRA_CONSTRAINT_STR 3509 if (EXTRA_MEMORY_CONSTRAINT (c, p)) 3510 { 3511 if (force_reload) 3512 break; 3513 if (EXTRA_CONSTRAINT_STR (operand, c, p)) 3514 win = 1; 3515 /* If the address was already reloaded, 3516 we win as well. */ 3517 else if (MEM_P (operand) 3518 && address_reloaded[i] == 1) 3519 win = 1; 3520 /* Likewise if the address will be reloaded because 3521 reg_equiv_address is nonzero. For reg_equiv_mem 3522 we have to check. */ 3523 else if (REG_P (operand) 3524 && REGNO (operand) >= FIRST_PSEUDO_REGISTER 3525 && reg_renumber[REGNO (operand)] < 0 3526 && ((reg_equiv_mem (REGNO (operand)) != 0 3527 && EXTRA_CONSTRAINT_STR (reg_equiv_mem (REGNO (operand)), c, p)) 3528 || (reg_equiv_address (REGNO (operand)) != 0))) 3529 win = 1; 3530 3531 /* If we didn't already win, we can reload 3532 constants via force_const_mem, and other 3533 MEMs by reloading the address like for 'o'. */ 3534 if (CONST_POOL_OK_P (operand_mode[i], operand) 3535 || MEM_P (operand)) 3536 badop = 0; 3537 constmemok = 1; 3538 offmemok = 1; 3539 break; 3540 } 3541 if (EXTRA_ADDRESS_CONSTRAINT (c, p)) 3542 { 3543 if (EXTRA_CONSTRAINT_STR (operand, c, p)) 3544 win = 1; 3545 3546 /* If we didn't already win, we can reload 3547 the address into a base register. */ 3548 this_alternative[i] 3549 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC, 3550 ADDRESS, SCRATCH); 3551 badop = 0; 3552 break; 3553 } 3554 3555 if (EXTRA_CONSTRAINT_STR (operand, c, p)) 3556 win = 1; 3557 #endif 3558 break; 3559 } 3560 3561 this_alternative[i] 3562 = (reg_class_subunion 3563 [this_alternative[i]] 3564 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]); 3565 reg: 3566 if (GET_MODE (operand) == BLKmode) 3567 break; 3568 winreg = 1; 3569 if (REG_P (operand) 3570 && reg_fits_class_p (operand, this_alternative[i], 3571 offset, GET_MODE (recog_data.operand[i]))) 3572 win = 1; 3573 break; 3574 } 3575 while ((p += len), c); 3576 3577 if (swapped == (commutative >= 0 ? 1 : 0)) 3578 constraints[i] = p; 3579 3580 /* If this operand could be handled with a reg, 3581 and some reg is allowed, then this operand can be handled. */ 3582 if (winreg && this_alternative[i] != NO_REGS 3583 && (win || !class_only_fixed_regs[this_alternative[i]])) 3584 badop = 0; 3585 3586 /* Record which operands fit this alternative. */ 3587 this_alternative_earlyclobber[i] = earlyclobber; 3588 if (win && ! force_reload) 3589 this_alternative_win[i] = 1; 3590 else if (did_match && ! force_reload) 3591 this_alternative_match_win[i] = 1; 3592 else 3593 { 3594 int const_to_mem = 0; 3595 3596 this_alternative_offmemok[i] = offmemok; 3597 losers++; 3598 if (badop) 3599 bad = 1; 3600 /* Alternative loses if it has no regs for a reg operand. */ 3601 if (REG_P (operand) 3602 && this_alternative[i] == NO_REGS 3603 && this_alternative_matches[i] < 0) 3604 bad = 1; 3605 3606 /* If this is a constant that is reloaded into the desired 3607 class by copying it to memory first, count that as another 3608 reload. This is consistent with other code and is 3609 required to avoid choosing another alternative when 3610 the constant is moved into memory by this function on 3611 an early reload pass. Note that the test here is 3612 precisely the same as in the code below that calls 3613 force_const_mem. */ 3614 if (CONST_POOL_OK_P (operand_mode[i], operand) 3615 && ((targetm.preferred_reload_class (operand, 3616 this_alternative[i]) 3617 == NO_REGS) 3618 || no_input_reloads)) 3619 { 3620 const_to_mem = 1; 3621 if (this_alternative[i] != NO_REGS) 3622 losers++; 3623 } 3624 3625 /* Alternative loses if it requires a type of reload not 3626 permitted for this insn. We can always reload SCRATCH 3627 and objects with a REG_UNUSED note. */ 3628 if (GET_CODE (operand) != SCRATCH 3629 && modified[i] != RELOAD_READ && no_output_reloads 3630 && ! find_reg_note (insn, REG_UNUSED, operand)) 3631 bad = 1; 3632 else if (modified[i] != RELOAD_WRITE && no_input_reloads 3633 && ! const_to_mem) 3634 bad = 1; 3635 3636 /* If we can't reload this value at all, reject this 3637 alternative. Note that we could also lose due to 3638 LIMIT_RELOAD_CLASS, but we don't check that 3639 here. */ 3640 3641 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS) 3642 { 3643 if (targetm.preferred_reload_class (operand, 3644 this_alternative[i]) 3645 == NO_REGS) 3646 reject = 600; 3647 3648 if (operand_type[i] == RELOAD_FOR_OUTPUT 3649 && (targetm.preferred_output_reload_class (operand, 3650 this_alternative[i]) 3651 == NO_REGS)) 3652 reject = 600; 3653 } 3654 3655 /* We prefer to reload pseudos over reloading other things, 3656 since such reloads may be able to be eliminated later. 3657 If we are reloading a SCRATCH, we won't be generating any 3658 insns, just using a register, so it is also preferred. 3659 So bump REJECT in other cases. Don't do this in the 3660 case where we are forcing a constant into memory and 3661 it will then win since we don't want to have a different 3662 alternative match then. */ 3663 if (! (REG_P (operand) 3664 && REGNO (operand) >= FIRST_PSEUDO_REGISTER) 3665 && GET_CODE (operand) != SCRATCH 3666 && ! (const_to_mem && constmemok)) 3667 reject += 2; 3668 3669 /* Input reloads can be inherited more often than output 3670 reloads can be removed, so penalize output reloads. */ 3671 if (operand_type[i] != RELOAD_FOR_INPUT 3672 && GET_CODE (operand) != SCRATCH) 3673 reject++; 3674 } 3675 3676 /* If this operand is a pseudo register that didn't get 3677 a hard reg and this alternative accepts some 3678 register, see if the class that we want is a subset 3679 of the preferred class for this register. If not, 3680 but it intersects that class, use the preferred class 3681 instead. If it does not intersect the preferred 3682 class, show that usage of this alternative should be 3683 discouraged; it will be discouraged more still if the 3684 register is `preferred or nothing'. We do this 3685 because it increases the chance of reusing our spill 3686 register in a later insn and avoiding a pair of 3687 memory stores and loads. 3688 3689 Don't bother with this if this alternative will 3690 accept this operand. 3691 3692 Don't do this for a multiword operand, since it is 3693 only a small win and has the risk of requiring more 3694 spill registers, which could cause a large loss. 3695 3696 Don't do this if the preferred class has only one 3697 register because we might otherwise exhaust the 3698 class. */ 3699 3700 if (! win && ! did_match 3701 && this_alternative[i] != NO_REGS 3702 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD 3703 && reg_class_size [(int) preferred_class[i]] > 0 3704 && ! small_register_class_p (preferred_class[i])) 3705 { 3706 if (! reg_class_subset_p (this_alternative[i], 3707 preferred_class[i])) 3708 { 3709 /* Since we don't have a way of forming the intersection, 3710 we just do something special if the preferred class 3711 is a subset of the class we have; that's the most 3712 common case anyway. */ 3713 if (reg_class_subset_p (preferred_class[i], 3714 this_alternative[i])) 3715 this_alternative[i] = preferred_class[i]; 3716 else 3717 reject += (2 + 2 * pref_or_nothing[i]); 3718 } 3719 } 3720 } 3721 3722 /* Now see if any output operands that are marked "earlyclobber" 3723 in this alternative conflict with any input operands 3724 or any memory addresses. */ 3725 3726 for (i = 0; i < noperands; i++) 3727 if (this_alternative_earlyclobber[i] 3728 && (this_alternative_win[i] || this_alternative_match_win[i])) 3729 { 3730 struct decomposition early_data; 3731 3732 early_data = decompose (recog_data.operand[i]); 3733 3734 gcc_assert (modified[i] != RELOAD_READ); 3735 3736 if (this_alternative[i] == NO_REGS) 3737 { 3738 this_alternative_earlyclobber[i] = 0; 3739 gcc_assert (this_insn_is_asm); 3740 error_for_asm (this_insn, 3741 "%<&%> constraint used with no register class"); 3742 } 3743 3744 for (j = 0; j < noperands; j++) 3745 /* Is this an input operand or a memory ref? */ 3746 if ((MEM_P (recog_data.operand[j]) 3747 || modified[j] != RELOAD_WRITE) 3748 && j != i 3749 /* Ignore things like match_operator operands. */ 3750 && !recog_data.is_operator[j] 3751 /* Don't count an input operand that is constrained to match 3752 the early clobber operand. */ 3753 && ! (this_alternative_matches[j] == i 3754 && rtx_equal_p (recog_data.operand[i], 3755 recog_data.operand[j])) 3756 /* Is it altered by storing the earlyclobber operand? */ 3757 && !immune_p (recog_data.operand[j], recog_data.operand[i], 3758 early_data)) 3759 { 3760 /* If the output is in a non-empty few-regs class, 3761 it's costly to reload it, so reload the input instead. */ 3762 if (small_register_class_p (this_alternative[i]) 3763 && (REG_P (recog_data.operand[j]) 3764 || GET_CODE (recog_data.operand[j]) == SUBREG)) 3765 { 3766 losers++; 3767 this_alternative_win[j] = 0; 3768 this_alternative_match_win[j] = 0; 3769 } 3770 else 3771 break; 3772 } 3773 /* If an earlyclobber operand conflicts with something, 3774 it must be reloaded, so request this and count the cost. */ 3775 if (j != noperands) 3776 { 3777 losers++; 3778 this_alternative_win[i] = 0; 3779 this_alternative_match_win[j] = 0; 3780 for (j = 0; j < noperands; j++) 3781 if (this_alternative_matches[j] == i 3782 && this_alternative_match_win[j]) 3783 { 3784 this_alternative_win[j] = 0; 3785 this_alternative_match_win[j] = 0; 3786 losers++; 3787 } 3788 } 3789 } 3790 3791 /* If one alternative accepts all the operands, no reload required, 3792 choose that alternative; don't consider the remaining ones. */ 3793 if (losers == 0) 3794 { 3795 /* Unswap these so that they are never swapped at `finish'. */ 3796 if (swapped) 3797 { 3798 recog_data.operand[commutative] = substed_operand[commutative]; 3799 recog_data.operand[commutative + 1] 3800 = substed_operand[commutative + 1]; 3801 } 3802 for (i = 0; i < noperands; i++) 3803 { 3804 goal_alternative_win[i] = this_alternative_win[i]; 3805 goal_alternative_match_win[i] = this_alternative_match_win[i]; 3806 goal_alternative[i] = this_alternative[i]; 3807 goal_alternative_offmemok[i] = this_alternative_offmemok[i]; 3808 goal_alternative_matches[i] = this_alternative_matches[i]; 3809 goal_alternative_earlyclobber[i] 3810 = this_alternative_earlyclobber[i]; 3811 } 3812 goal_alternative_number = this_alternative_number; 3813 goal_alternative_swapped = swapped; 3814 goal_earlyclobber = this_earlyclobber; 3815 goto finish; 3816 } 3817 3818 /* REJECT, set by the ! and ? constraint characters and when a register 3819 would be reloaded into a non-preferred class, discourages the use of 3820 this alternative for a reload goal. REJECT is incremented by six 3821 for each ? and two for each non-preferred class. */ 3822 losers = losers * 6 + reject; 3823 3824 /* If this alternative can be made to work by reloading, 3825 and it needs less reloading than the others checked so far, 3826 record it as the chosen goal for reloading. */ 3827 if (! bad) 3828 { 3829 if (best > losers) 3830 { 3831 for (i = 0; i < noperands; i++) 3832 { 3833 goal_alternative[i] = this_alternative[i]; 3834 goal_alternative_win[i] = this_alternative_win[i]; 3835 goal_alternative_match_win[i] 3836 = this_alternative_match_win[i]; 3837 goal_alternative_offmemok[i] 3838 = this_alternative_offmemok[i]; 3839 goal_alternative_matches[i] = this_alternative_matches[i]; 3840 goal_alternative_earlyclobber[i] 3841 = this_alternative_earlyclobber[i]; 3842 } 3843 goal_alternative_swapped = swapped; 3844 best = losers; 3845 goal_alternative_number = this_alternative_number; 3846 goal_earlyclobber = this_earlyclobber; 3847 } 3848 } 3849 3850 if (swapped) 3851 { 3852 enum reg_class tclass; 3853 int t; 3854 3855 /* If the commutative operands have been swapped, swap 3856 them back in order to check the next alternative. */ 3857 recog_data.operand[commutative] = substed_operand[commutative]; 3858 recog_data.operand[commutative + 1] = substed_operand[commutative + 1]; 3859 /* Unswap the duplicates too. */ 3860 for (i = 0; i < recog_data.n_dups; i++) 3861 if (recog_data.dup_num[i] == commutative 3862 || recog_data.dup_num[i] == commutative + 1) 3863 *recog_data.dup_loc[i] 3864 = recog_data.operand[(int) recog_data.dup_num[i]]; 3865 3866 /* Unswap the operand related information as well. */ 3867 tclass = preferred_class[commutative]; 3868 preferred_class[commutative] = preferred_class[commutative + 1]; 3869 preferred_class[commutative + 1] = tclass; 3870 3871 t = pref_or_nothing[commutative]; 3872 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1]; 3873 pref_or_nothing[commutative + 1] = t; 3874 3875 t = address_reloaded[commutative]; 3876 address_reloaded[commutative] = address_reloaded[commutative + 1]; 3877 address_reloaded[commutative + 1] = t; 3878 } 3879 } 3880 } 3881 3882 /* The operands don't meet the constraints. 3883 goal_alternative describes the alternative 3884 that we could reach by reloading the fewest operands. 3885 Reload so as to fit it. */ 3886 3887 if (best == MAX_RECOG_OPERANDS * 2 + 600) 3888 { 3889 /* No alternative works with reloads?? */ 3890 if (insn_code_number >= 0) 3891 fatal_insn ("unable to generate reloads for:", insn); 3892 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>"); 3893 /* Avoid further trouble with this insn. */ 3894 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx); 3895 n_reloads = 0; 3896 return 0; 3897 } 3898 3899 /* Jump to `finish' from above if all operands are valid already. 3900 In that case, goal_alternative_win is all 1. */ 3901 finish: 3902 3903 /* Right now, for any pair of operands I and J that are required to match, 3904 with I < J, 3905 goal_alternative_matches[J] is I. 3906 Set up goal_alternative_matched as the inverse function: 3907 goal_alternative_matched[I] = J. */ 3908 3909 for (i = 0; i < noperands; i++) 3910 goal_alternative_matched[i] = -1; 3911 3912 for (i = 0; i < noperands; i++) 3913 if (! goal_alternative_win[i] 3914 && goal_alternative_matches[i] >= 0) 3915 goal_alternative_matched[goal_alternative_matches[i]] = i; 3916 3917 for (i = 0; i < noperands; i++) 3918 goal_alternative_win[i] |= goal_alternative_match_win[i]; 3919 3920 /* If the best alternative is with operands 1 and 2 swapped, 3921 consider them swapped before reporting the reloads. Update the 3922 operand numbers of any reloads already pushed. */ 3923 3924 if (goal_alternative_swapped) 3925 { 3926 rtx tem; 3927 3928 tem = substed_operand[commutative]; 3929 substed_operand[commutative] = substed_operand[commutative + 1]; 3930 substed_operand[commutative + 1] = tem; 3931 tem = recog_data.operand[commutative]; 3932 recog_data.operand[commutative] = recog_data.operand[commutative + 1]; 3933 recog_data.operand[commutative + 1] = tem; 3934 tem = *recog_data.operand_loc[commutative]; 3935 *recog_data.operand_loc[commutative] 3936 = *recog_data.operand_loc[commutative + 1]; 3937 *recog_data.operand_loc[commutative + 1] = tem; 3938 3939 for (i = 0; i < n_reloads; i++) 3940 { 3941 if (rld[i].opnum == commutative) 3942 rld[i].opnum = commutative + 1; 3943 else if (rld[i].opnum == commutative + 1) 3944 rld[i].opnum = commutative; 3945 } 3946 } 3947 3948 for (i = 0; i < noperands; i++) 3949 { 3950 operand_reloadnum[i] = -1; 3951 3952 /* If this is an earlyclobber operand, we need to widen the scope. 3953 The reload must remain valid from the start of the insn being 3954 reloaded until after the operand is stored into its destination. 3955 We approximate this with RELOAD_OTHER even though we know that we 3956 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads. 3957 3958 One special case that is worth checking is when we have an 3959 output that is earlyclobber but isn't used past the insn (typically 3960 a SCRATCH). In this case, we only need have the reload live 3961 through the insn itself, but not for any of our input or output 3962 reloads. 3963 But we must not accidentally narrow the scope of an existing 3964 RELOAD_OTHER reload - leave these alone. 3965 3966 In any case, anything needed to address this operand can remain 3967 however they were previously categorized. */ 3968 3969 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER) 3970 operand_type[i] 3971 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i]) 3972 ? RELOAD_FOR_INSN : RELOAD_OTHER); 3973 } 3974 3975 /* Any constants that aren't allowed and can't be reloaded 3976 into registers are here changed into memory references. */ 3977 for (i = 0; i < noperands; i++) 3978 if (! goal_alternative_win[i]) 3979 { 3980 rtx op = recog_data.operand[i]; 3981 rtx subreg = NULL_RTX; 3982 rtx plus = NULL_RTX; 3983 enum machine_mode mode = operand_mode[i]; 3984 3985 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in 3986 push_reload so we have to let them pass here. */ 3987 if (GET_CODE (op) == SUBREG) 3988 { 3989 subreg = op; 3990 op = SUBREG_REG (op); 3991 mode = GET_MODE (op); 3992 } 3993 3994 if (GET_CODE (op) == PLUS) 3995 { 3996 plus = op; 3997 op = XEXP (op, 1); 3998 } 3999 4000 if (CONST_POOL_OK_P (mode, op) 4001 && ((targetm.preferred_reload_class (op, goal_alternative[i]) 4002 == NO_REGS) 4003 || no_input_reloads)) 4004 { 4005 int this_address_reloaded; 4006 rtx tem = force_const_mem (mode, op); 4007 4008 /* If we stripped a SUBREG or a PLUS above add it back. */ 4009 if (plus != NULL_RTX) 4010 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem); 4011 4012 if (subreg != NULL_RTX) 4013 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg)); 4014 4015 this_address_reloaded = 0; 4016 substed_operand[i] = recog_data.operand[i] 4017 = find_reloads_toplev (tem, i, address_type[i], ind_levels, 4018 0, insn, &this_address_reloaded); 4019 4020 /* If the alternative accepts constant pool refs directly 4021 there will be no reload needed at all. */ 4022 if (plus == NULL_RTX 4023 && subreg == NULL_RTX 4024 && alternative_allows_const_pool_ref (this_address_reloaded == 0 4025 ? substed_operand[i] 4026 : NULL, 4027 recog_data.constraints[i], 4028 goal_alternative_number)) 4029 goal_alternative_win[i] = 1; 4030 } 4031 } 4032 4033 /* Record the values of the earlyclobber operands for the caller. */ 4034 if (goal_earlyclobber) 4035 for (i = 0; i < noperands; i++) 4036 if (goal_alternative_earlyclobber[i]) 4037 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i]; 4038 4039 /* Now record reloads for all the operands that need them. */ 4040 for (i = 0; i < noperands; i++) 4041 if (! goal_alternative_win[i]) 4042 { 4043 /* Operands that match previous ones have already been handled. */ 4044 if (goal_alternative_matches[i] >= 0) 4045 ; 4046 /* Handle an operand with a nonoffsettable address 4047 appearing where an offsettable address will do 4048 by reloading the address into a base register. 4049 4050 ??? We can also do this when the operand is a register and 4051 reg_equiv_mem is not offsettable, but this is a bit tricky, 4052 so we don't bother with it. It may not be worth doing. */ 4053 else if (goal_alternative_matched[i] == -1 4054 && goal_alternative_offmemok[i] 4055 && MEM_P (recog_data.operand[i])) 4056 { 4057 /* If the address to be reloaded is a VOIDmode constant, 4058 use the default address mode as mode of the reload register, 4059 as would have been done by find_reloads_address. */ 4060 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]); 4061 enum machine_mode address_mode; 4062 4063 address_mode = get_address_mode (recog_data.operand[i]); 4064 operand_reloadnum[i] 4065 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX, 4066 &XEXP (recog_data.operand[i], 0), (rtx*) 0, 4067 base_reg_class (VOIDmode, as, MEM, SCRATCH), 4068 address_mode, 4069 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT); 4070 rld[operand_reloadnum[i]].inc 4071 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i])); 4072 4073 /* If this operand is an output, we will have made any 4074 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but 4075 now we are treating part of the operand as an input, so 4076 we must change these to RELOAD_FOR_INPUT_ADDRESS. */ 4077 4078 if (modified[i] == RELOAD_WRITE) 4079 { 4080 for (j = 0; j < n_reloads; j++) 4081 { 4082 if (rld[j].opnum == i) 4083 { 4084 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS) 4085 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS; 4086 else if (rld[j].when_needed 4087 == RELOAD_FOR_OUTADDR_ADDRESS) 4088 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS; 4089 } 4090 } 4091 } 4092 } 4093 else if (goal_alternative_matched[i] == -1) 4094 { 4095 operand_reloadnum[i] 4096 = push_reload ((modified[i] != RELOAD_WRITE 4097 ? recog_data.operand[i] : 0), 4098 (modified[i] != RELOAD_READ 4099 ? recog_data.operand[i] : 0), 4100 (modified[i] != RELOAD_WRITE 4101 ? recog_data.operand_loc[i] : 0), 4102 (modified[i] != RELOAD_READ 4103 ? recog_data.operand_loc[i] : 0), 4104 (enum reg_class) goal_alternative[i], 4105 (modified[i] == RELOAD_WRITE 4106 ? VOIDmode : operand_mode[i]), 4107 (modified[i] == RELOAD_READ 4108 ? VOIDmode : operand_mode[i]), 4109 (insn_code_number < 0 ? 0 4110 : insn_data[insn_code_number].operand[i].strict_low), 4111 0, i, operand_type[i]); 4112 } 4113 /* In a matching pair of operands, one must be input only 4114 and the other must be output only. 4115 Pass the input operand as IN and the other as OUT. */ 4116 else if (modified[i] == RELOAD_READ 4117 && modified[goal_alternative_matched[i]] == RELOAD_WRITE) 4118 { 4119 operand_reloadnum[i] 4120 = push_reload (recog_data.operand[i], 4121 recog_data.operand[goal_alternative_matched[i]], 4122 recog_data.operand_loc[i], 4123 recog_data.operand_loc[goal_alternative_matched[i]], 4124 (enum reg_class) goal_alternative[i], 4125 operand_mode[i], 4126 operand_mode[goal_alternative_matched[i]], 4127 0, 0, i, RELOAD_OTHER); 4128 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum; 4129 } 4130 else if (modified[i] == RELOAD_WRITE 4131 && modified[goal_alternative_matched[i]] == RELOAD_READ) 4132 { 4133 operand_reloadnum[goal_alternative_matched[i]] 4134 = push_reload (recog_data.operand[goal_alternative_matched[i]], 4135 recog_data.operand[i], 4136 recog_data.operand_loc[goal_alternative_matched[i]], 4137 recog_data.operand_loc[i], 4138 (enum reg_class) goal_alternative[i], 4139 operand_mode[goal_alternative_matched[i]], 4140 operand_mode[i], 4141 0, 0, i, RELOAD_OTHER); 4142 operand_reloadnum[i] = output_reloadnum; 4143 } 4144 else 4145 { 4146 gcc_assert (insn_code_number < 0); 4147 error_for_asm (insn, "inconsistent operand constraints " 4148 "in an %<asm%>"); 4149 /* Avoid further trouble with this insn. */ 4150 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx); 4151 n_reloads = 0; 4152 return 0; 4153 } 4154 } 4155 else if (goal_alternative_matched[i] < 0 4156 && goal_alternative_matches[i] < 0 4157 && address_operand_reloaded[i] != 1 4158 && optimize) 4159 { 4160 /* For each non-matching operand that's a MEM or a pseudo-register 4161 that didn't get a hard register, make an optional reload. 4162 This may get done even if the insn needs no reloads otherwise. */ 4163 4164 rtx operand = recog_data.operand[i]; 4165 4166 while (GET_CODE (operand) == SUBREG) 4167 operand = SUBREG_REG (operand); 4168 if ((MEM_P (operand) 4169 || (REG_P (operand) 4170 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)) 4171 /* If this is only for an output, the optional reload would not 4172 actually cause us to use a register now, just note that 4173 something is stored here. */ 4174 && (goal_alternative[i] != NO_REGS 4175 || modified[i] == RELOAD_WRITE) 4176 && ! no_input_reloads 4177 /* An optional output reload might allow to delete INSN later. 4178 We mustn't make in-out reloads on insns that are not permitted 4179 output reloads. 4180 If this is an asm, we can't delete it; we must not even call 4181 push_reload for an optional output reload in this case, 4182 because we can't be sure that the constraint allows a register, 4183 and push_reload verifies the constraints for asms. */ 4184 && (modified[i] == RELOAD_READ 4185 || (! no_output_reloads && ! this_insn_is_asm))) 4186 operand_reloadnum[i] 4187 = push_reload ((modified[i] != RELOAD_WRITE 4188 ? recog_data.operand[i] : 0), 4189 (modified[i] != RELOAD_READ 4190 ? recog_data.operand[i] : 0), 4191 (modified[i] != RELOAD_WRITE 4192 ? recog_data.operand_loc[i] : 0), 4193 (modified[i] != RELOAD_READ 4194 ? recog_data.operand_loc[i] : 0), 4195 (enum reg_class) goal_alternative[i], 4196 (modified[i] == RELOAD_WRITE 4197 ? VOIDmode : operand_mode[i]), 4198 (modified[i] == RELOAD_READ 4199 ? VOIDmode : operand_mode[i]), 4200 (insn_code_number < 0 ? 0 4201 : insn_data[insn_code_number].operand[i].strict_low), 4202 1, i, operand_type[i]); 4203 /* If a memory reference remains (either as a MEM or a pseudo that 4204 did not get a hard register), yet we can't make an optional 4205 reload, check if this is actually a pseudo register reference; 4206 we then need to emit a USE and/or a CLOBBER so that reload 4207 inheritance will do the right thing. */ 4208 else if (replace 4209 && (MEM_P (operand) 4210 || (REG_P (operand) 4211 && REGNO (operand) >= FIRST_PSEUDO_REGISTER 4212 && reg_renumber [REGNO (operand)] < 0))) 4213 { 4214 operand = *recog_data.operand_loc[i]; 4215 4216 while (GET_CODE (operand) == SUBREG) 4217 operand = SUBREG_REG (operand); 4218 if (REG_P (operand)) 4219 { 4220 if (modified[i] != RELOAD_WRITE) 4221 /* We mark the USE with QImode so that we recognize 4222 it as one that can be safely deleted at the end 4223 of reload. */ 4224 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand), 4225 insn), QImode); 4226 if (modified[i] != RELOAD_READ) 4227 emit_insn_after (gen_clobber (operand), insn); 4228 } 4229 } 4230 } 4231 else if (goal_alternative_matches[i] >= 0 4232 && goal_alternative_win[goal_alternative_matches[i]] 4233 && modified[i] == RELOAD_READ 4234 && modified[goal_alternative_matches[i]] == RELOAD_WRITE 4235 && ! no_input_reloads && ! no_output_reloads 4236 && optimize) 4237 { 4238 /* Similarly, make an optional reload for a pair of matching 4239 objects that are in MEM or a pseudo that didn't get a hard reg. */ 4240 4241 rtx operand = recog_data.operand[i]; 4242 4243 while (GET_CODE (operand) == SUBREG) 4244 operand = SUBREG_REG (operand); 4245 if ((MEM_P (operand) 4246 || (REG_P (operand) 4247 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)) 4248 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS)) 4249 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]] 4250 = push_reload (recog_data.operand[goal_alternative_matches[i]], 4251 recog_data.operand[i], 4252 recog_data.operand_loc[goal_alternative_matches[i]], 4253 recog_data.operand_loc[i], 4254 (enum reg_class) goal_alternative[goal_alternative_matches[i]], 4255 operand_mode[goal_alternative_matches[i]], 4256 operand_mode[i], 4257 0, 1, goal_alternative_matches[i], RELOAD_OTHER); 4258 } 4259 4260 /* Perform whatever substitutions on the operands we are supposed 4261 to make due to commutativity or replacement of registers 4262 with equivalent constants or memory slots. */ 4263 4264 for (i = 0; i < noperands; i++) 4265 { 4266 /* We only do this on the last pass through reload, because it is 4267 possible for some data (like reg_equiv_address) to be changed during 4268 later passes. Moreover, we lose the opportunity to get a useful 4269 reload_{in,out}_reg when we do these replacements. */ 4270 4271 if (replace) 4272 { 4273 rtx substitution = substed_operand[i]; 4274 4275 *recog_data.operand_loc[i] = substitution; 4276 4277 /* If we're replacing an operand with a LABEL_REF, we need to 4278 make sure that there's a REG_LABEL_OPERAND note attached to 4279 this instruction. */ 4280 if (GET_CODE (substitution) == LABEL_REF 4281 && !find_reg_note (insn, REG_LABEL_OPERAND, 4282 XEXP (substitution, 0)) 4283 /* For a JUMP_P, if it was a branch target it must have 4284 already been recorded as such. */ 4285 && (!JUMP_P (insn) 4286 || !label_is_jump_target_p (XEXP (substitution, 0), 4287 insn))) 4288 { 4289 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0)); 4290 if (LABEL_P (XEXP (substitution, 0))) 4291 ++LABEL_NUSES (XEXP (substitution, 0)); 4292 } 4293 4294 } 4295 else 4296 retval |= (substed_operand[i] != *recog_data.operand_loc[i]); 4297 } 4298 4299 /* If this insn pattern contains any MATCH_DUP's, make sure that 4300 they will be substituted if the operands they match are substituted. 4301 Also do now any substitutions we already did on the operands. 4302 4303 Don't do this if we aren't making replacements because we might be 4304 propagating things allocated by frame pointer elimination into places 4305 it doesn't expect. */ 4306 4307 if (insn_code_number >= 0 && replace) 4308 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--) 4309 { 4310 int opno = recog_data.dup_num[i]; 4311 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno]; 4312 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]); 4313 } 4314 4315 #if 0 4316 /* This loses because reloading of prior insns can invalidate the equivalence 4317 (or at least find_equiv_reg isn't smart enough to find it any more), 4318 causing this insn to need more reload regs than it needed before. 4319 It may be too late to make the reload regs available. 4320 Now this optimization is done safely in choose_reload_regs. */ 4321 4322 /* For each reload of a reg into some other class of reg, 4323 search for an existing equivalent reg (same value now) in the right class. 4324 We can use it as long as we don't need to change its contents. */ 4325 for (i = 0; i < n_reloads; i++) 4326 if (rld[i].reg_rtx == 0 4327 && rld[i].in != 0 4328 && REG_P (rld[i].in) 4329 && rld[i].out == 0) 4330 { 4331 rld[i].reg_rtx 4332 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1, 4333 static_reload_reg_p, 0, rld[i].inmode); 4334 /* Prevent generation of insn to load the value 4335 because the one we found already has the value. */ 4336 if (rld[i].reg_rtx) 4337 rld[i].in = rld[i].reg_rtx; 4338 } 4339 #endif 4340 4341 /* If we detected error and replaced asm instruction by USE, forget about the 4342 reloads. */ 4343 if (GET_CODE (PATTERN (insn)) == USE 4344 && CONST_INT_P (XEXP (PATTERN (insn), 0))) 4345 n_reloads = 0; 4346 4347 /* Perhaps an output reload can be combined with another 4348 to reduce needs by one. */ 4349 if (!goal_earlyclobber) 4350 combine_reloads (); 4351 4352 /* If we have a pair of reloads for parts of an address, they are reloading 4353 the same object, the operands themselves were not reloaded, and they 4354 are for two operands that are supposed to match, merge the reloads and 4355 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */ 4356 4357 for (i = 0; i < n_reloads; i++) 4358 { 4359 int k; 4360 4361 for (j = i + 1; j < n_reloads; j++) 4362 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS 4363 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS 4364 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS 4365 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) 4366 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS 4367 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS 4368 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS 4369 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) 4370 && rtx_equal_p (rld[i].in, rld[j].in) 4371 && (operand_reloadnum[rld[i].opnum] < 0 4372 || rld[operand_reloadnum[rld[i].opnum]].optional) 4373 && (operand_reloadnum[rld[j].opnum] < 0 4374 || rld[operand_reloadnum[rld[j].opnum]].optional) 4375 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum 4376 || (goal_alternative_matches[rld[j].opnum] 4377 == rld[i].opnum))) 4378 { 4379 for (k = 0; k < n_replacements; k++) 4380 if (replacements[k].what == j) 4381 replacements[k].what = i; 4382 4383 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS 4384 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) 4385 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR; 4386 else 4387 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS; 4388 rld[j].in = 0; 4389 } 4390 } 4391 4392 /* Scan all the reloads and update their type. 4393 If a reload is for the address of an operand and we didn't reload 4394 that operand, change the type. Similarly, change the operand number 4395 of a reload when two operands match. If a reload is optional, treat it 4396 as though the operand isn't reloaded. 4397 4398 ??? This latter case is somewhat odd because if we do the optional 4399 reload, it means the object is hanging around. Thus we need only 4400 do the address reload if the optional reload was NOT done. 4401 4402 Change secondary reloads to be the address type of their operand, not 4403 the normal type. 4404 4405 If an operand's reload is now RELOAD_OTHER, change any 4406 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to 4407 RELOAD_FOR_OTHER_ADDRESS. */ 4408 4409 for (i = 0; i < n_reloads; i++) 4410 { 4411 if (rld[i].secondary_p 4412 && rld[i].when_needed == operand_type[rld[i].opnum]) 4413 rld[i].when_needed = address_type[rld[i].opnum]; 4414 4415 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS 4416 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS 4417 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS 4418 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) 4419 && (operand_reloadnum[rld[i].opnum] < 0 4420 || rld[operand_reloadnum[rld[i].opnum]].optional)) 4421 { 4422 /* If we have a secondary reload to go along with this reload, 4423 change its type to RELOAD_FOR_OPADDR_ADDR. */ 4424 4425 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS 4426 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS) 4427 && rld[i].secondary_in_reload != -1) 4428 { 4429 int secondary_in_reload = rld[i].secondary_in_reload; 4430 4431 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR; 4432 4433 /* If there's a tertiary reload we have to change it also. */ 4434 if (secondary_in_reload > 0 4435 && rld[secondary_in_reload].secondary_in_reload != -1) 4436 rld[rld[secondary_in_reload].secondary_in_reload].when_needed 4437 = RELOAD_FOR_OPADDR_ADDR; 4438 } 4439 4440 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS 4441 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) 4442 && rld[i].secondary_out_reload != -1) 4443 { 4444 int secondary_out_reload = rld[i].secondary_out_reload; 4445 4446 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR; 4447 4448 /* If there's a tertiary reload we have to change it also. */ 4449 if (secondary_out_reload 4450 && rld[secondary_out_reload].secondary_out_reload != -1) 4451 rld[rld[secondary_out_reload].secondary_out_reload].when_needed 4452 = RELOAD_FOR_OPADDR_ADDR; 4453 } 4454 4455 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS 4456 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) 4457 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR; 4458 else 4459 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS; 4460 } 4461 4462 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS 4463 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS) 4464 && operand_reloadnum[rld[i].opnum] >= 0 4465 && (rld[operand_reloadnum[rld[i].opnum]].when_needed 4466 == RELOAD_OTHER)) 4467 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS; 4468 4469 if (goal_alternative_matches[rld[i].opnum] >= 0) 4470 rld[i].opnum = goal_alternative_matches[rld[i].opnum]; 4471 } 4472 4473 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads. 4474 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR 4475 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads. 4476 4477 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never 4478 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a 4479 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads. 4480 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload, 4481 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all 4482 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it. 4483 This is complicated by the fact that a single operand can have more 4484 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix 4485 choose_reload_regs without affecting code quality, and cases that 4486 actually fail are extremely rare, so it turns out to be better to fix 4487 the problem here by not generating cases that choose_reload_regs will 4488 fail for. */ 4489 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS / 4490 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for 4491 a single operand. 4492 We can reduce the register pressure by exploiting that a 4493 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads 4494 does not conflict with any of them, if it is only used for the first of 4495 the RELOAD_FOR_X_ADDRESS reloads. */ 4496 { 4497 int first_op_addr_num = -2; 4498 int first_inpaddr_num[MAX_RECOG_OPERANDS]; 4499 int first_outpaddr_num[MAX_RECOG_OPERANDS]; 4500 int need_change = 0; 4501 /* We use last_op_addr_reload and the contents of the above arrays 4502 first as flags - -2 means no instance encountered, -1 means exactly 4503 one instance encountered. 4504 If more than one instance has been encountered, we store the reload 4505 number of the first reload of the kind in question; reload numbers 4506 are known to be non-negative. */ 4507 for (i = 0; i < noperands; i++) 4508 first_inpaddr_num[i] = first_outpaddr_num[i] = -2; 4509 for (i = n_reloads - 1; i >= 0; i--) 4510 { 4511 switch (rld[i].when_needed) 4512 { 4513 case RELOAD_FOR_OPERAND_ADDRESS: 4514 if (++first_op_addr_num >= 0) 4515 { 4516 first_op_addr_num = i; 4517 need_change = 1; 4518 } 4519 break; 4520 case RELOAD_FOR_INPUT_ADDRESS: 4521 if (++first_inpaddr_num[rld[i].opnum] >= 0) 4522 { 4523 first_inpaddr_num[rld[i].opnum] = i; 4524 need_change = 1; 4525 } 4526 break; 4527 case RELOAD_FOR_OUTPUT_ADDRESS: 4528 if (++first_outpaddr_num[rld[i].opnum] >= 0) 4529 { 4530 first_outpaddr_num[rld[i].opnum] = i; 4531 need_change = 1; 4532 } 4533 break; 4534 default: 4535 break; 4536 } 4537 } 4538 4539 if (need_change) 4540 { 4541 for (i = 0; i < n_reloads; i++) 4542 { 4543 int first_num; 4544 enum reload_type type; 4545 4546 switch (rld[i].when_needed) 4547 { 4548 case RELOAD_FOR_OPADDR_ADDR: 4549 first_num = first_op_addr_num; 4550 type = RELOAD_FOR_OPERAND_ADDRESS; 4551 break; 4552 case RELOAD_FOR_INPADDR_ADDRESS: 4553 first_num = first_inpaddr_num[rld[i].opnum]; 4554 type = RELOAD_FOR_INPUT_ADDRESS; 4555 break; 4556 case RELOAD_FOR_OUTADDR_ADDRESS: 4557 first_num = first_outpaddr_num[rld[i].opnum]; 4558 type = RELOAD_FOR_OUTPUT_ADDRESS; 4559 break; 4560 default: 4561 continue; 4562 } 4563 if (first_num < 0) 4564 continue; 4565 else if (i > first_num) 4566 rld[i].when_needed = type; 4567 else 4568 { 4569 /* Check if the only TYPE reload that uses reload I is 4570 reload FIRST_NUM. */ 4571 for (j = n_reloads - 1; j > first_num; j--) 4572 { 4573 if (rld[j].when_needed == type 4574 && (rld[i].secondary_p 4575 ? rld[j].secondary_in_reload == i 4576 : reg_mentioned_p (rld[i].in, rld[j].in))) 4577 { 4578 rld[i].when_needed = type; 4579 break; 4580 } 4581 } 4582 } 4583 } 4584 } 4585 } 4586 4587 /* See if we have any reloads that are now allowed to be merged 4588 because we've changed when the reload is needed to 4589 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only 4590 check for the most common cases. */ 4591 4592 for (i = 0; i < n_reloads; i++) 4593 if (rld[i].in != 0 && rld[i].out == 0 4594 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS 4595 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR 4596 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS)) 4597 for (j = 0; j < n_reloads; j++) 4598 if (i != j && rld[j].in != 0 && rld[j].out == 0 4599 && rld[j].when_needed == rld[i].when_needed 4600 && MATCHES (rld[i].in, rld[j].in) 4601 && rld[i].rclass == rld[j].rclass 4602 && !rld[i].nocombine && !rld[j].nocombine 4603 && rld[i].reg_rtx == rld[j].reg_rtx) 4604 { 4605 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum); 4606 transfer_replacements (i, j); 4607 rld[j].in = 0; 4608 } 4609 4610 #ifdef HAVE_cc0 4611 /* If we made any reloads for addresses, see if they violate a 4612 "no input reloads" requirement for this insn. But loads that we 4613 do after the insn (such as for output addresses) are fine. */ 4614 if (no_input_reloads) 4615 for (i = 0; i < n_reloads; i++) 4616 gcc_assert (rld[i].in == 0 4617 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS 4618 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS); 4619 #endif 4620 4621 /* Compute reload_mode and reload_nregs. */ 4622 for (i = 0; i < n_reloads; i++) 4623 { 4624 rld[i].mode 4625 = (rld[i].inmode == VOIDmode 4626 || (GET_MODE_SIZE (rld[i].outmode) 4627 > GET_MODE_SIZE (rld[i].inmode))) 4628 ? rld[i].outmode : rld[i].inmode; 4629 4630 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode]; 4631 } 4632 4633 /* Special case a simple move with an input reload and a 4634 destination of a hard reg, if the hard reg is ok, use it. */ 4635 for (i = 0; i < n_reloads; i++) 4636 if (rld[i].when_needed == RELOAD_FOR_INPUT 4637 && GET_CODE (PATTERN (insn)) == SET 4638 && REG_P (SET_DEST (PATTERN (insn))) 4639 && (SET_SRC (PATTERN (insn)) == rld[i].in 4640 || SET_SRC (PATTERN (insn)) == rld[i].in_reg) 4641 && !elimination_target_reg_p (SET_DEST (PATTERN (insn)))) 4642 { 4643 rtx dest = SET_DEST (PATTERN (insn)); 4644 unsigned int regno = REGNO (dest); 4645 4646 if (regno < FIRST_PSEUDO_REGISTER 4647 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno) 4648 && HARD_REGNO_MODE_OK (regno, rld[i].mode)) 4649 { 4650 int nr = hard_regno_nregs[regno][rld[i].mode]; 4651 int ok = 1, nri; 4652 4653 for (nri = 1; nri < nr; nri ++) 4654 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri)) 4655 ok = 0; 4656 4657 if (ok) 4658 rld[i].reg_rtx = dest; 4659 } 4660 } 4661 4662 return retval; 4663 } 4664 4665 /* Return true if alternative number ALTNUM in constraint-string 4666 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference. 4667 MEM gives the reference if it didn't need any reloads, otherwise it 4668 is null. */ 4669 4670 static bool 4671 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED, 4672 const char *constraint, int altnum) 4673 { 4674 int c; 4675 4676 /* Skip alternatives before the one requested. */ 4677 while (altnum > 0) 4678 { 4679 while (*constraint++ != ',') 4680 ; 4681 altnum--; 4682 } 4683 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'. 4684 If one of them is present, this alternative accepts the result of 4685 passing a constant-pool reference through find_reloads_toplev. 4686 4687 The same is true of extra memory constraints if the address 4688 was reloaded into a register. However, the target may elect 4689 to disallow the original constant address, forcing it to be 4690 reloaded into a register instead. */ 4691 for (; (c = *constraint) && c != ',' && c != '#'; 4692 constraint += CONSTRAINT_LEN (c, constraint)) 4693 { 4694 if (c == TARGET_MEM_CONSTRAINT || c == 'o') 4695 return true; 4696 #ifdef EXTRA_CONSTRAINT_STR 4697 if (EXTRA_MEMORY_CONSTRAINT (c, constraint) 4698 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint))) 4699 return true; 4700 #endif 4701 } 4702 return false; 4703 } 4704 4705 /* Scan X for memory references and scan the addresses for reloading. 4706 Also checks for references to "constant" regs that we want to eliminate 4707 and replaces them with the values they stand for. 4708 We may alter X destructively if it contains a reference to such. 4709 If X is just a constant reg, we return the equivalent value 4710 instead of X. 4711 4712 IND_LEVELS says how many levels of indirect addressing this machine 4713 supports. 4714 4715 OPNUM and TYPE identify the purpose of the reload. 4716 4717 IS_SET_DEST is true if X is the destination of a SET, which is not 4718 appropriate to be replaced by a constant. 4719 4720 INSN, if nonzero, is the insn in which we do the reload. It is used 4721 to determine if we may generate output reloads, and where to put USEs 4722 for pseudos that we have to replace with stack slots. 4723 4724 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the 4725 result of find_reloads_address. */ 4726 4727 static rtx 4728 find_reloads_toplev (rtx x, int opnum, enum reload_type type, 4729 int ind_levels, int is_set_dest, rtx insn, 4730 int *address_reloaded) 4731 { 4732 RTX_CODE code = GET_CODE (x); 4733 4734 const char *fmt = GET_RTX_FORMAT (code); 4735 int i; 4736 int copied; 4737 4738 if (code == REG) 4739 { 4740 /* This code is duplicated for speed in find_reloads. */ 4741 int regno = REGNO (x); 4742 if (reg_equiv_constant (regno) != 0 && !is_set_dest) 4743 x = reg_equiv_constant (regno); 4744 #if 0 4745 /* This creates (subreg (mem...)) which would cause an unnecessary 4746 reload of the mem. */ 4747 else if (reg_equiv_mem (regno) != 0) 4748 x = reg_equiv_mem (regno); 4749 #endif 4750 else if (reg_equiv_memory_loc (regno) 4751 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)) 4752 { 4753 rtx mem = make_memloc (x, regno); 4754 if (reg_equiv_address (regno) 4755 || ! rtx_equal_p (mem, reg_equiv_mem (regno))) 4756 { 4757 /* If this is not a toplevel operand, find_reloads doesn't see 4758 this substitution. We have to emit a USE of the pseudo so 4759 that delete_output_reload can see it. */ 4760 if (replace_reloads && recog_data.operand[opnum] != x) 4761 /* We mark the USE with QImode so that we recognize it 4762 as one that can be safely deleted at the end of 4763 reload. */ 4764 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn), 4765 QImode); 4766 x = mem; 4767 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0), 4768 opnum, type, ind_levels, insn); 4769 if (!rtx_equal_p (x, mem)) 4770 push_reg_equiv_alt_mem (regno, x); 4771 if (address_reloaded) 4772 *address_reloaded = i; 4773 } 4774 } 4775 return x; 4776 } 4777 if (code == MEM) 4778 { 4779 rtx tem = x; 4780 4781 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0), 4782 opnum, type, ind_levels, insn); 4783 if (address_reloaded) 4784 *address_reloaded = i; 4785 4786 return tem; 4787 } 4788 4789 if (code == SUBREG && REG_P (SUBREG_REG (x))) 4790 { 4791 /* Check for SUBREG containing a REG that's equivalent to a 4792 constant. If the constant has a known value, truncate it 4793 right now. Similarly if we are extracting a single-word of a 4794 multi-word constant. If the constant is symbolic, allow it 4795 to be substituted normally. push_reload will strip the 4796 subreg later. The constant must not be VOIDmode, because we 4797 will lose the mode of the register (this should never happen 4798 because one of the cases above should handle it). */ 4799 4800 int regno = REGNO (SUBREG_REG (x)); 4801 rtx tem; 4802 4803 if (regno >= FIRST_PSEUDO_REGISTER 4804 && reg_renumber[regno] < 0 4805 && reg_equiv_constant (regno) != 0) 4806 { 4807 tem = 4808 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno), 4809 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x)); 4810 gcc_assert (tem); 4811 if (CONSTANT_P (tem) 4812 && !targetm.legitimate_constant_p (GET_MODE (x), tem)) 4813 { 4814 tem = force_const_mem (GET_MODE (x), tem); 4815 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0), 4816 &XEXP (tem, 0), opnum, type, 4817 ind_levels, insn); 4818 if (address_reloaded) 4819 *address_reloaded = i; 4820 } 4821 return tem; 4822 } 4823 4824 /* If the subreg contains a reg that will be converted to a mem, 4825 attempt to convert the whole subreg to a (narrower or wider) 4826 memory reference instead. If this succeeds, we're done -- 4827 otherwise fall through to check whether the inner reg still 4828 needs address reloads anyway. */ 4829 4830 if (regno >= FIRST_PSEUDO_REGISTER 4831 && reg_equiv_memory_loc (regno) != 0) 4832 { 4833 tem = find_reloads_subreg_address (x, opnum, type, ind_levels, 4834 insn, address_reloaded); 4835 if (tem) 4836 return tem; 4837 } 4838 } 4839 4840 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 4841 { 4842 if (fmt[i] == 'e') 4843 { 4844 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type, 4845 ind_levels, is_set_dest, insn, 4846 address_reloaded); 4847 /* If we have replaced a reg with it's equivalent memory loc - 4848 that can still be handled here e.g. if it's in a paradoxical 4849 subreg - we must make the change in a copy, rather than using 4850 a destructive change. This way, find_reloads can still elect 4851 not to do the change. */ 4852 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied) 4853 { 4854 x = shallow_copy_rtx (x); 4855 copied = 1; 4856 } 4857 XEXP (x, i) = new_part; 4858 } 4859 } 4860 return x; 4861 } 4862 4863 /* Return a mem ref for the memory equivalent of reg REGNO. 4864 This mem ref is not shared with anything. */ 4865 4866 static rtx 4867 make_memloc (rtx ad, int regno) 4868 { 4869 /* We must rerun eliminate_regs, in case the elimination 4870 offsets have changed. */ 4871 rtx tem 4872 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX), 4873 0); 4874 4875 /* If TEM might contain a pseudo, we must copy it to avoid 4876 modifying it when we do the substitution for the reload. */ 4877 if (rtx_varies_p (tem, 0)) 4878 tem = copy_rtx (tem); 4879 4880 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem); 4881 tem = adjust_address_nv (tem, GET_MODE (ad), 0); 4882 4883 /* Copy the result if it's still the same as the equivalence, to avoid 4884 modifying it when we do the substitution for the reload. */ 4885 if (tem == reg_equiv_memory_loc (regno)) 4886 tem = copy_rtx (tem); 4887 return tem; 4888 } 4889 4890 /* Returns true if AD could be turned into a valid memory reference 4891 to mode MODE in address space AS by reloading the part pointed to 4892 by PART into a register. */ 4893 4894 static int 4895 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad, 4896 addr_space_t as, rtx *part) 4897 { 4898 int retv; 4899 rtx tem = *part; 4900 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ()); 4901 4902 *part = reg; 4903 retv = memory_address_addr_space_p (mode, ad, as); 4904 *part = tem; 4905 4906 return retv; 4907 } 4908 4909 /* Record all reloads needed for handling memory address AD 4910 which appears in *LOC in a memory reference to mode MODE 4911 which itself is found in location *MEMREFLOC. 4912 Note that we take shortcuts assuming that no multi-reg machine mode 4913 occurs as part of an address. 4914 4915 OPNUM and TYPE specify the purpose of this reload. 4916 4917 IND_LEVELS says how many levels of indirect addressing this machine 4918 supports. 4919 4920 INSN, if nonzero, is the insn in which we do the reload. It is used 4921 to determine if we may generate output reloads, and where to put USEs 4922 for pseudos that we have to replace with stack slots. 4923 4924 Value is one if this address is reloaded or replaced as a whole; it is 4925 zero if the top level of this address was not reloaded or replaced, and 4926 it is -1 if it may or may not have been reloaded or replaced. 4927 4928 Note that there is no verification that the address will be valid after 4929 this routine does its work. Instead, we rely on the fact that the address 4930 was valid when reload started. So we need only undo things that reload 4931 could have broken. These are wrong register types, pseudos not allocated 4932 to a hard register, and frame pointer elimination. */ 4933 4934 static int 4935 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad, 4936 rtx *loc, int opnum, enum reload_type type, 4937 int ind_levels, rtx insn) 4938 { 4939 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc) 4940 : ADDR_SPACE_GENERIC; 4941 int regno; 4942 int removed_and = 0; 4943 int op_index; 4944 rtx tem; 4945 4946 /* If the address is a register, see if it is a legitimate address and 4947 reload if not. We first handle the cases where we need not reload 4948 or where we must reload in a non-standard way. */ 4949 4950 if (REG_P (ad)) 4951 { 4952 regno = REGNO (ad); 4953 4954 if (reg_equiv_constant (regno) != 0) 4955 { 4956 find_reloads_address_part (reg_equiv_constant (regno), loc, 4957 base_reg_class (mode, as, MEM, SCRATCH), 4958 GET_MODE (ad), opnum, type, ind_levels); 4959 return 1; 4960 } 4961 4962 tem = reg_equiv_memory_loc (regno); 4963 if (tem != 0) 4964 { 4965 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset) 4966 { 4967 tem = make_memloc (ad, regno); 4968 if (! strict_memory_address_addr_space_p (GET_MODE (tem), 4969 XEXP (tem, 0), 4970 MEM_ADDR_SPACE (tem))) 4971 { 4972 rtx orig = tem; 4973 4974 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0), 4975 &XEXP (tem, 0), opnum, 4976 ADDR_TYPE (type), ind_levels, insn); 4977 if (!rtx_equal_p (tem, orig)) 4978 push_reg_equiv_alt_mem (regno, tem); 4979 } 4980 /* We can avoid a reload if the register's equivalent memory 4981 expression is valid as an indirect memory address. 4982 But not all addresses are valid in a mem used as an indirect 4983 address: only reg or reg+constant. */ 4984 4985 if (ind_levels > 0 4986 && strict_memory_address_addr_space_p (mode, tem, as) 4987 && (REG_P (XEXP (tem, 0)) 4988 || (GET_CODE (XEXP (tem, 0)) == PLUS 4989 && REG_P (XEXP (XEXP (tem, 0), 0)) 4990 && CONSTANT_P (XEXP (XEXP (tem, 0), 1))))) 4991 { 4992 /* TEM is not the same as what we'll be replacing the 4993 pseudo with after reload, put a USE in front of INSN 4994 in the final reload pass. */ 4995 if (replace_reloads 4996 && num_not_at_initial_offset 4997 && ! rtx_equal_p (tem, reg_equiv_mem (regno))) 4998 { 4999 *loc = tem; 5000 /* We mark the USE with QImode so that we 5001 recognize it as one that can be safely 5002 deleted at the end of reload. */ 5003 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), 5004 insn), QImode); 5005 5006 /* This doesn't really count as replacing the address 5007 as a whole, since it is still a memory access. */ 5008 } 5009 return 0; 5010 } 5011 ad = tem; 5012 } 5013 } 5014 5015 /* The only remaining case where we can avoid a reload is if this is a 5016 hard register that is valid as a base register and which is not the 5017 subject of a CLOBBER in this insn. */ 5018 5019 else if (regno < FIRST_PSEUDO_REGISTER 5020 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH) 5021 && ! regno_clobbered_p (regno, this_insn, mode, 0)) 5022 return 0; 5023 5024 /* If we do not have one of the cases above, we must do the reload. */ 5025 push_reload (ad, NULL_RTX, loc, (rtx*) 0, 5026 base_reg_class (mode, as, MEM, SCRATCH), 5027 GET_MODE (ad), VOIDmode, 0, 0, opnum, type); 5028 return 1; 5029 } 5030 5031 if (strict_memory_address_addr_space_p (mode, ad, as)) 5032 { 5033 /* The address appears valid, so reloads are not needed. 5034 But the address may contain an eliminable register. 5035 This can happen because a machine with indirect addressing 5036 may consider a pseudo register by itself a valid address even when 5037 it has failed to get a hard reg. 5038 So do a tree-walk to find and eliminate all such regs. */ 5039 5040 /* But first quickly dispose of a common case. */ 5041 if (GET_CODE (ad) == PLUS 5042 && CONST_INT_P (XEXP (ad, 1)) 5043 && REG_P (XEXP (ad, 0)) 5044 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0) 5045 return 0; 5046 5047 subst_reg_equivs_changed = 0; 5048 *loc = subst_reg_equivs (ad, insn); 5049 5050 if (! subst_reg_equivs_changed) 5051 return 0; 5052 5053 /* Check result for validity after substitution. */ 5054 if (strict_memory_address_addr_space_p (mode, ad, as)) 5055 return 0; 5056 } 5057 5058 #ifdef LEGITIMIZE_RELOAD_ADDRESS 5059 do 5060 { 5061 if (memrefloc && ADDR_SPACE_GENERIC_P (as)) 5062 { 5063 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type, 5064 ind_levels, win); 5065 } 5066 break; 5067 win: 5068 *memrefloc = copy_rtx (*memrefloc); 5069 XEXP (*memrefloc, 0) = ad; 5070 move_replacements (&ad, &XEXP (*memrefloc, 0)); 5071 return -1; 5072 } 5073 while (0); 5074 #endif 5075 5076 /* The address is not valid. We have to figure out why. First see if 5077 we have an outer AND and remove it if so. Then analyze what's inside. */ 5078 5079 if (GET_CODE (ad) == AND) 5080 { 5081 removed_and = 1; 5082 loc = &XEXP (ad, 0); 5083 ad = *loc; 5084 } 5085 5086 /* One possibility for why the address is invalid is that it is itself 5087 a MEM. This can happen when the frame pointer is being eliminated, a 5088 pseudo is not allocated to a hard register, and the offset between the 5089 frame and stack pointers is not its initial value. In that case the 5090 pseudo will have been replaced by a MEM referring to the 5091 stack pointer. */ 5092 if (MEM_P (ad)) 5093 { 5094 /* First ensure that the address in this MEM is valid. Then, unless 5095 indirect addresses are valid, reload the MEM into a register. */ 5096 tem = ad; 5097 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0), 5098 opnum, ADDR_TYPE (type), 5099 ind_levels == 0 ? 0 : ind_levels - 1, insn); 5100 5101 /* If tem was changed, then we must create a new memory reference to 5102 hold it and store it back into memrefloc. */ 5103 if (tem != ad && memrefloc) 5104 { 5105 *memrefloc = copy_rtx (*memrefloc); 5106 copy_replacements (tem, XEXP (*memrefloc, 0)); 5107 loc = &XEXP (*memrefloc, 0); 5108 if (removed_and) 5109 loc = &XEXP (*loc, 0); 5110 } 5111 5112 /* Check similar cases as for indirect addresses as above except 5113 that we can allow pseudos and a MEM since they should have been 5114 taken care of above. */ 5115 5116 if (ind_levels == 0 5117 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok) 5118 || MEM_P (XEXP (tem, 0)) 5119 || ! (REG_P (XEXP (tem, 0)) 5120 || (GET_CODE (XEXP (tem, 0)) == PLUS 5121 && REG_P (XEXP (XEXP (tem, 0), 0)) 5122 && CONST_INT_P (XEXP (XEXP (tem, 0), 1))))) 5123 { 5124 /* Must use TEM here, not AD, since it is the one that will 5125 have any subexpressions reloaded, if needed. */ 5126 push_reload (tem, NULL_RTX, loc, (rtx*) 0, 5127 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem), 5128 VOIDmode, 0, 5129 0, opnum, type); 5130 return ! removed_and; 5131 } 5132 else 5133 return 0; 5134 } 5135 5136 /* If we have address of a stack slot but it's not valid because the 5137 displacement is too large, compute the sum in a register. 5138 Handle all base registers here, not just fp/ap/sp, because on some 5139 targets (namely SH) we can also get too large displacements from 5140 big-endian corrections. */ 5141 else if (GET_CODE (ad) == PLUS 5142 && REG_P (XEXP (ad, 0)) 5143 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER 5144 && CONST_INT_P (XEXP (ad, 1)) 5145 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS, 5146 CONST_INT) 5147 /* Similarly, if we were to reload the base register and the 5148 mem+offset address is still invalid, then we want to reload 5149 the whole address, not just the base register. */ 5150 || ! maybe_memory_address_addr_space_p 5151 (mode, ad, as, &(XEXP (ad, 0))))) 5152 5153 { 5154 /* Unshare the MEM rtx so we can safely alter it. */ 5155 if (memrefloc) 5156 { 5157 *memrefloc = copy_rtx (*memrefloc); 5158 loc = &XEXP (*memrefloc, 0); 5159 if (removed_and) 5160 loc = &XEXP (*loc, 0); 5161 } 5162 5163 if (double_reg_address_ok 5164 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, 5165 PLUS, CONST_INT)) 5166 { 5167 /* Unshare the sum as well. */ 5168 *loc = ad = copy_rtx (ad); 5169 5170 /* Reload the displacement into an index reg. 5171 We assume the frame pointer or arg pointer is a base reg. */ 5172 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1), 5173 INDEX_REG_CLASS, GET_MODE (ad), opnum, 5174 type, ind_levels); 5175 return 0; 5176 } 5177 else 5178 { 5179 /* If the sum of two regs is not necessarily valid, 5180 reload the sum into a base reg. 5181 That will at least work. */ 5182 find_reloads_address_part (ad, loc, 5183 base_reg_class (mode, as, MEM, SCRATCH), 5184 GET_MODE (ad), opnum, type, ind_levels); 5185 } 5186 return ! removed_and; 5187 } 5188 5189 /* If we have an indexed stack slot, there are three possible reasons why 5190 it might be invalid: The index might need to be reloaded, the address 5191 might have been made by frame pointer elimination and hence have a 5192 constant out of range, or both reasons might apply. 5193 5194 We can easily check for an index needing reload, but even if that is the 5195 case, we might also have an invalid constant. To avoid making the 5196 conservative assumption and requiring two reloads, we see if this address 5197 is valid when not interpreted strictly. If it is, the only problem is 5198 that the index needs a reload and find_reloads_address_1 will take care 5199 of it. 5200 5201 Handle all base registers here, not just fp/ap/sp, because on some 5202 targets (namely SPARC) we can also get invalid addresses from preventive 5203 subreg big-endian corrections made by find_reloads_toplev. We 5204 can also get expressions involving LO_SUM (rather than PLUS) from 5205 find_reloads_subreg_address. 5206 5207 If we decide to do something, it must be that `double_reg_address_ok' 5208 is true. We generate a reload of the base register + constant and 5209 rework the sum so that the reload register will be added to the index. 5210 This is safe because we know the address isn't shared. 5211 5212 We check for the base register as both the first and second operand of 5213 the innermost PLUS and/or LO_SUM. */ 5214 5215 for (op_index = 0; op_index < 2; ++op_index) 5216 { 5217 rtx operand, addend; 5218 enum rtx_code inner_code; 5219 5220 if (GET_CODE (ad) != PLUS) 5221 continue; 5222 5223 inner_code = GET_CODE (XEXP (ad, 0)); 5224 if (!(GET_CODE (ad) == PLUS 5225 && CONST_INT_P (XEXP (ad, 1)) 5226 && (inner_code == PLUS || inner_code == LO_SUM))) 5227 continue; 5228 5229 operand = XEXP (XEXP (ad, 0), op_index); 5230 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER) 5231 continue; 5232 5233 addend = XEXP (XEXP (ad, 0), 1 - op_index); 5234 5235 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code, 5236 GET_CODE (addend)) 5237 || operand == frame_pointer_rtx 5238 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER 5239 || operand == hard_frame_pointer_rtx 5240 #endif 5241 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 5242 || operand == arg_pointer_rtx 5243 #endif 5244 || operand == stack_pointer_rtx) 5245 && ! maybe_memory_address_addr_space_p 5246 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index))) 5247 { 5248 rtx offset_reg; 5249 enum reg_class cls; 5250 5251 offset_reg = plus_constant (GET_MODE (ad), operand, 5252 INTVAL (XEXP (ad, 1))); 5253 5254 /* Form the adjusted address. */ 5255 if (GET_CODE (XEXP (ad, 0)) == PLUS) 5256 ad = gen_rtx_PLUS (GET_MODE (ad), 5257 op_index == 0 ? offset_reg : addend, 5258 op_index == 0 ? addend : offset_reg); 5259 else 5260 ad = gen_rtx_LO_SUM (GET_MODE (ad), 5261 op_index == 0 ? offset_reg : addend, 5262 op_index == 0 ? addend : offset_reg); 5263 *loc = ad; 5264 5265 cls = base_reg_class (mode, as, MEM, GET_CODE (addend)); 5266 find_reloads_address_part (XEXP (ad, op_index), 5267 &XEXP (ad, op_index), cls, 5268 GET_MODE (ad), opnum, type, ind_levels); 5269 find_reloads_address_1 (mode, as, 5270 XEXP (ad, 1 - op_index), 1, GET_CODE (ad), 5271 GET_CODE (XEXP (ad, op_index)), 5272 &XEXP (ad, 1 - op_index), opnum, 5273 type, 0, insn); 5274 5275 return 0; 5276 } 5277 } 5278 5279 /* See if address becomes valid when an eliminable register 5280 in a sum is replaced. */ 5281 5282 tem = ad; 5283 if (GET_CODE (ad) == PLUS) 5284 tem = subst_indexed_address (ad); 5285 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as)) 5286 { 5287 /* Ok, we win that way. Replace any additional eliminable 5288 registers. */ 5289 5290 subst_reg_equivs_changed = 0; 5291 tem = subst_reg_equivs (tem, insn); 5292 5293 /* Make sure that didn't make the address invalid again. */ 5294 5295 if (! subst_reg_equivs_changed 5296 || strict_memory_address_addr_space_p (mode, tem, as)) 5297 { 5298 *loc = tem; 5299 return 0; 5300 } 5301 } 5302 5303 /* If constants aren't valid addresses, reload the constant address 5304 into a register. */ 5305 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as)) 5306 { 5307 enum machine_mode address_mode = GET_MODE (ad); 5308 if (address_mode == VOIDmode) 5309 address_mode = targetm.addr_space.address_mode (as); 5310 5311 /* If AD is an address in the constant pool, the MEM rtx may be shared. 5312 Unshare it so we can safely alter it. */ 5313 if (memrefloc && GET_CODE (ad) == SYMBOL_REF 5314 && CONSTANT_POOL_ADDRESS_P (ad)) 5315 { 5316 *memrefloc = copy_rtx (*memrefloc); 5317 loc = &XEXP (*memrefloc, 0); 5318 if (removed_and) 5319 loc = &XEXP (*loc, 0); 5320 } 5321 5322 find_reloads_address_part (ad, loc, 5323 base_reg_class (mode, as, MEM, SCRATCH), 5324 address_mode, opnum, type, ind_levels); 5325 return ! removed_and; 5326 } 5327 5328 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc, 5329 opnum, type, ind_levels, insn); 5330 } 5331 5332 /* Find all pseudo regs appearing in AD 5333 that are eliminable in favor of equivalent values 5334 and do not have hard regs; replace them by their equivalents. 5335 INSN, if nonzero, is the insn in which we do the reload. We put USEs in 5336 front of it for pseudos that we have to replace with stack slots. */ 5337 5338 static rtx 5339 subst_reg_equivs (rtx ad, rtx insn) 5340 { 5341 RTX_CODE code = GET_CODE (ad); 5342 int i; 5343 const char *fmt; 5344 5345 switch (code) 5346 { 5347 case HIGH: 5348 case CONST: 5349 CASE_CONST_ANY: 5350 case SYMBOL_REF: 5351 case LABEL_REF: 5352 case PC: 5353 case CC0: 5354 return ad; 5355 5356 case REG: 5357 { 5358 int regno = REGNO (ad); 5359 5360 if (reg_equiv_constant (regno) != 0) 5361 { 5362 subst_reg_equivs_changed = 1; 5363 return reg_equiv_constant (regno); 5364 } 5365 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset) 5366 { 5367 rtx mem = make_memloc (ad, regno); 5368 if (! rtx_equal_p (mem, reg_equiv_mem (regno))) 5369 { 5370 subst_reg_equivs_changed = 1; 5371 /* We mark the USE with QImode so that we recognize it 5372 as one that can be safely deleted at the end of 5373 reload. */ 5374 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn), 5375 QImode); 5376 return mem; 5377 } 5378 } 5379 } 5380 return ad; 5381 5382 case PLUS: 5383 /* Quickly dispose of a common case. */ 5384 if (XEXP (ad, 0) == frame_pointer_rtx 5385 && CONST_INT_P (XEXP (ad, 1))) 5386 return ad; 5387 break; 5388 5389 default: 5390 break; 5391 } 5392 5393 fmt = GET_RTX_FORMAT (code); 5394 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 5395 if (fmt[i] == 'e') 5396 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn); 5397 return ad; 5398 } 5399 5400 /* Compute the sum of X and Y, making canonicalizations assumed in an 5401 address, namely: sum constant integers, surround the sum of two 5402 constants with a CONST, put the constant as the second operand, and 5403 group the constant on the outermost sum. 5404 5405 This routine assumes both inputs are already in canonical form. */ 5406 5407 rtx 5408 form_sum (enum machine_mode mode, rtx x, rtx y) 5409 { 5410 rtx tem; 5411 5412 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode); 5413 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode); 5414 5415 if (CONST_INT_P (x)) 5416 return plus_constant (mode, y, INTVAL (x)); 5417 else if (CONST_INT_P (y)) 5418 return plus_constant (mode, x, INTVAL (y)); 5419 else if (CONSTANT_P (x)) 5420 tem = x, x = y, y = tem; 5421 5422 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1))) 5423 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y)); 5424 5425 /* Note that if the operands of Y are specified in the opposite 5426 order in the recursive calls below, infinite recursion will occur. */ 5427 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1))) 5428 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1)); 5429 5430 /* If both constant, encapsulate sum. Otherwise, just form sum. A 5431 constant will have been placed second. */ 5432 if (CONSTANT_P (x) && CONSTANT_P (y)) 5433 { 5434 if (GET_CODE (x) == CONST) 5435 x = XEXP (x, 0); 5436 if (GET_CODE (y) == CONST) 5437 y = XEXP (y, 0); 5438 5439 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y)); 5440 } 5441 5442 return gen_rtx_PLUS (mode, x, y); 5443 } 5444 5445 /* If ADDR is a sum containing a pseudo register that should be 5446 replaced with a constant (from reg_equiv_constant), 5447 return the result of doing so, and also apply the associative 5448 law so that the result is more likely to be a valid address. 5449 (But it is not guaranteed to be one.) 5450 5451 Note that at most one register is replaced, even if more are 5452 replaceable. Also, we try to put the result into a canonical form 5453 so it is more likely to be a valid address. 5454 5455 In all other cases, return ADDR. */ 5456 5457 static rtx 5458 subst_indexed_address (rtx addr) 5459 { 5460 rtx op0 = 0, op1 = 0, op2 = 0; 5461 rtx tem; 5462 int regno; 5463 5464 if (GET_CODE (addr) == PLUS) 5465 { 5466 /* Try to find a register to replace. */ 5467 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0; 5468 if (REG_P (op0) 5469 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER 5470 && reg_renumber[regno] < 0 5471 && reg_equiv_constant (regno) != 0) 5472 op0 = reg_equiv_constant (regno); 5473 else if (REG_P (op1) 5474 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER 5475 && reg_renumber[regno] < 0 5476 && reg_equiv_constant (regno) != 0) 5477 op1 = reg_equiv_constant (regno); 5478 else if (GET_CODE (op0) == PLUS 5479 && (tem = subst_indexed_address (op0)) != op0) 5480 op0 = tem; 5481 else if (GET_CODE (op1) == PLUS 5482 && (tem = subst_indexed_address (op1)) != op1) 5483 op1 = tem; 5484 else 5485 return addr; 5486 5487 /* Pick out up to three things to add. */ 5488 if (GET_CODE (op1) == PLUS) 5489 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0); 5490 else if (GET_CODE (op0) == PLUS) 5491 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0); 5492 5493 /* Compute the sum. */ 5494 if (op2 != 0) 5495 op1 = form_sum (GET_MODE (addr), op1, op2); 5496 if (op1 != 0) 5497 op0 = form_sum (GET_MODE (addr), op0, op1); 5498 5499 return op0; 5500 } 5501 return addr; 5502 } 5503 5504 /* Update the REG_INC notes for an insn. It updates all REG_INC 5505 notes for the instruction which refer to REGNO the to refer 5506 to the reload number. 5507 5508 INSN is the insn for which any REG_INC notes need updating. 5509 5510 REGNO is the register number which has been reloaded. 5511 5512 RELOADNUM is the reload number. */ 5513 5514 static void 5515 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED, 5516 int reloadnum ATTRIBUTE_UNUSED) 5517 { 5518 #ifdef AUTO_INC_DEC 5519 rtx link; 5520 5521 for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) 5522 if (REG_NOTE_KIND (link) == REG_INC 5523 && (int) REGNO (XEXP (link, 0)) == regno) 5524 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode); 5525 #endif 5526 } 5527 5528 /* Record the pseudo registers we must reload into hard registers in a 5529 subexpression of a would-be memory address, X referring to a value 5530 in mode MODE. (This function is not called if the address we find 5531 is strictly valid.) 5532 5533 CONTEXT = 1 means we are considering regs as index regs, 5534 = 0 means we are considering them as base regs. 5535 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS, 5536 or an autoinc code. 5537 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE 5538 is the code of the index part of the address. Otherwise, pass SCRATCH 5539 for this argument. 5540 OPNUM and TYPE specify the purpose of any reloads made. 5541 5542 IND_LEVELS says how many levels of indirect addressing are 5543 supported at this point in the address. 5544 5545 INSN, if nonzero, is the insn in which we do the reload. It is used 5546 to determine if we may generate output reloads. 5547 5548 We return nonzero if X, as a whole, is reloaded or replaced. */ 5549 5550 /* Note that we take shortcuts assuming that no multi-reg machine mode 5551 occurs as part of an address. 5552 Also, this is not fully machine-customizable; it works for machines 5553 such as VAXen and 68000's and 32000's, but other possible machines 5554 could have addressing modes that this does not handle right. 5555 If you add push_reload calls here, you need to make sure gen_reload 5556 handles those cases gracefully. */ 5557 5558 static int 5559 find_reloads_address_1 (enum machine_mode mode, addr_space_t as, 5560 rtx x, int context, 5561 enum rtx_code outer_code, enum rtx_code index_code, 5562 rtx *loc, int opnum, enum reload_type type, 5563 int ind_levels, rtx insn) 5564 { 5565 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \ 5566 ((CONTEXT) == 0 \ 5567 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \ 5568 : REGNO_OK_FOR_INDEX_P (REGNO)) 5569 5570 enum reg_class context_reg_class; 5571 RTX_CODE code = GET_CODE (x); 5572 5573 if (context == 1) 5574 context_reg_class = INDEX_REG_CLASS; 5575 else 5576 context_reg_class = base_reg_class (mode, as, outer_code, index_code); 5577 5578 switch (code) 5579 { 5580 case PLUS: 5581 { 5582 rtx orig_op0 = XEXP (x, 0); 5583 rtx orig_op1 = XEXP (x, 1); 5584 RTX_CODE code0 = GET_CODE (orig_op0); 5585 RTX_CODE code1 = GET_CODE (orig_op1); 5586 rtx op0 = orig_op0; 5587 rtx op1 = orig_op1; 5588 5589 if (GET_CODE (op0) == SUBREG) 5590 { 5591 op0 = SUBREG_REG (op0); 5592 code0 = GET_CODE (op0); 5593 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER) 5594 op0 = gen_rtx_REG (word_mode, 5595 (REGNO (op0) + 5596 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)), 5597 GET_MODE (SUBREG_REG (orig_op0)), 5598 SUBREG_BYTE (orig_op0), 5599 GET_MODE (orig_op0)))); 5600 } 5601 5602 if (GET_CODE (op1) == SUBREG) 5603 { 5604 op1 = SUBREG_REG (op1); 5605 code1 = GET_CODE (op1); 5606 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER) 5607 /* ??? Why is this given op1's mode and above for 5608 ??? op0 SUBREGs we use word_mode? */ 5609 op1 = gen_rtx_REG (GET_MODE (op1), 5610 (REGNO (op1) + 5611 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)), 5612 GET_MODE (SUBREG_REG (orig_op1)), 5613 SUBREG_BYTE (orig_op1), 5614 GET_MODE (orig_op1)))); 5615 } 5616 /* Plus in the index register may be created only as a result of 5617 register rematerialization for expression like &localvar*4. Reload it. 5618 It may be possible to combine the displacement on the outer level, 5619 but it is probably not worthwhile to do so. */ 5620 if (context == 1) 5621 { 5622 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0), 5623 opnum, ADDR_TYPE (type), ind_levels, insn); 5624 push_reload (*loc, NULL_RTX, loc, (rtx*) 0, 5625 context_reg_class, 5626 GET_MODE (x), VOIDmode, 0, 0, opnum, type); 5627 return 1; 5628 } 5629 5630 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE 5631 || code0 == ZERO_EXTEND || code1 == MEM) 5632 { 5633 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH, 5634 &XEXP (x, 0), opnum, type, ind_levels, 5635 insn); 5636 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0, 5637 &XEXP (x, 1), opnum, type, ind_levels, 5638 insn); 5639 } 5640 5641 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE 5642 || code1 == ZERO_EXTEND || code0 == MEM) 5643 { 5644 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1, 5645 &XEXP (x, 0), opnum, type, ind_levels, 5646 insn); 5647 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH, 5648 &XEXP (x, 1), opnum, type, ind_levels, 5649 insn); 5650 } 5651 5652 else if (code0 == CONST_INT || code0 == CONST 5653 || code0 == SYMBOL_REF || code0 == LABEL_REF) 5654 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0, 5655 &XEXP (x, 1), opnum, type, ind_levels, 5656 insn); 5657 5658 else if (code1 == CONST_INT || code1 == CONST 5659 || code1 == SYMBOL_REF || code1 == LABEL_REF) 5660 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1, 5661 &XEXP (x, 0), opnum, type, ind_levels, 5662 insn); 5663 5664 else if (code0 == REG && code1 == REG) 5665 { 5666 if (REGNO_OK_FOR_INDEX_P (REGNO (op1)) 5667 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG)) 5668 return 0; 5669 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)) 5670 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG)) 5671 return 0; 5672 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG)) 5673 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH, 5674 &XEXP (x, 1), opnum, type, ind_levels, 5675 insn); 5676 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1))) 5677 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG, 5678 &XEXP (x, 0), opnum, type, ind_levels, 5679 insn); 5680 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG)) 5681 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH, 5682 &XEXP (x, 0), opnum, type, ind_levels, 5683 insn); 5684 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))) 5685 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG, 5686 &XEXP (x, 1), opnum, type, ind_levels, 5687 insn); 5688 else 5689 { 5690 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG, 5691 &XEXP (x, 0), opnum, type, ind_levels, 5692 insn); 5693 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH, 5694 &XEXP (x, 1), opnum, type, ind_levels, 5695 insn); 5696 } 5697 } 5698 5699 else if (code0 == REG) 5700 { 5701 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH, 5702 &XEXP (x, 0), opnum, type, ind_levels, 5703 insn); 5704 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG, 5705 &XEXP (x, 1), opnum, type, ind_levels, 5706 insn); 5707 } 5708 5709 else if (code1 == REG) 5710 { 5711 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH, 5712 &XEXP (x, 1), opnum, type, ind_levels, 5713 insn); 5714 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG, 5715 &XEXP (x, 0), opnum, type, ind_levels, 5716 insn); 5717 } 5718 } 5719 5720 return 0; 5721 5722 case POST_MODIFY: 5723 case PRE_MODIFY: 5724 { 5725 rtx op0 = XEXP (x, 0); 5726 rtx op1 = XEXP (x, 1); 5727 enum rtx_code index_code; 5728 int regno; 5729 int reloadnum; 5730 5731 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS) 5732 return 0; 5733 5734 /* Currently, we only support {PRE,POST}_MODIFY constructs 5735 where a base register is {inc,dec}remented by the contents 5736 of another register or by a constant value. Thus, these 5737 operands must match. */ 5738 gcc_assert (op0 == XEXP (op1, 0)); 5739 5740 /* Require index register (or constant). Let's just handle the 5741 register case in the meantime... If the target allows 5742 auto-modify by a constant then we could try replacing a pseudo 5743 register with its equivalent constant where applicable. 5744 5745 We also handle the case where the register was eliminated 5746 resulting in a PLUS subexpression. 5747 5748 If we later decide to reload the whole PRE_MODIFY or 5749 POST_MODIFY, inc_for_reload might clobber the reload register 5750 before reading the index. The index register might therefore 5751 need to live longer than a TYPE reload normally would, so be 5752 conservative and class it as RELOAD_OTHER. */ 5753 if ((REG_P (XEXP (op1, 1)) 5754 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1)))) 5755 || GET_CODE (XEXP (op1, 1)) == PLUS) 5756 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH, 5757 &XEXP (op1, 1), opnum, RELOAD_OTHER, 5758 ind_levels, insn); 5759 5760 gcc_assert (REG_P (XEXP (op1, 0))); 5761 5762 regno = REGNO (XEXP (op1, 0)); 5763 index_code = GET_CODE (XEXP (op1, 1)); 5764 5765 /* A register that is incremented cannot be constant! */ 5766 gcc_assert (regno < FIRST_PSEUDO_REGISTER 5767 || reg_equiv_constant (regno) == 0); 5768 5769 /* Handle a register that is equivalent to a memory location 5770 which cannot be addressed directly. */ 5771 if (reg_equiv_memory_loc (regno) != 0 5772 && (reg_equiv_address (regno) != 0 5773 || num_not_at_initial_offset)) 5774 { 5775 rtx tem = make_memloc (XEXP (x, 0), regno); 5776 5777 if (reg_equiv_address (regno) 5778 || ! rtx_equal_p (tem, reg_equiv_mem (regno))) 5779 { 5780 rtx orig = tem; 5781 5782 /* First reload the memory location's address. 5783 We can't use ADDR_TYPE (type) here, because we need to 5784 write back the value after reading it, hence we actually 5785 need two registers. */ 5786 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0), 5787 &XEXP (tem, 0), opnum, 5788 RELOAD_OTHER, 5789 ind_levels, insn); 5790 5791 if (!rtx_equal_p (tem, orig)) 5792 push_reg_equiv_alt_mem (regno, tem); 5793 5794 /* Then reload the memory location into a base 5795 register. */ 5796 reloadnum = push_reload (tem, tem, &XEXP (x, 0), 5797 &XEXP (op1, 0), 5798 base_reg_class (mode, as, 5799 code, index_code), 5800 GET_MODE (x), GET_MODE (x), 0, 5801 0, opnum, RELOAD_OTHER); 5802 5803 update_auto_inc_notes (this_insn, regno, reloadnum); 5804 return 0; 5805 } 5806 } 5807 5808 if (reg_renumber[regno] >= 0) 5809 regno = reg_renumber[regno]; 5810 5811 /* We require a base register here... */ 5812 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code)) 5813 { 5814 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0), 5815 &XEXP (op1, 0), &XEXP (x, 0), 5816 base_reg_class (mode, as, 5817 code, index_code), 5818 GET_MODE (x), GET_MODE (x), 0, 0, 5819 opnum, RELOAD_OTHER); 5820 5821 update_auto_inc_notes (this_insn, regno, reloadnum); 5822 return 0; 5823 } 5824 } 5825 return 0; 5826 5827 case POST_INC: 5828 case POST_DEC: 5829 case PRE_INC: 5830 case PRE_DEC: 5831 if (REG_P (XEXP (x, 0))) 5832 { 5833 int regno = REGNO (XEXP (x, 0)); 5834 int value = 0; 5835 rtx x_orig = x; 5836 5837 /* A register that is incremented cannot be constant! */ 5838 gcc_assert (regno < FIRST_PSEUDO_REGISTER 5839 || reg_equiv_constant (regno) == 0); 5840 5841 /* Handle a register that is equivalent to a memory location 5842 which cannot be addressed directly. */ 5843 if (reg_equiv_memory_loc (regno) != 0 5844 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)) 5845 { 5846 rtx tem = make_memloc (XEXP (x, 0), regno); 5847 if (reg_equiv_address (regno) 5848 || ! rtx_equal_p (tem, reg_equiv_mem (regno))) 5849 { 5850 rtx orig = tem; 5851 5852 /* First reload the memory location's address. 5853 We can't use ADDR_TYPE (type) here, because we need to 5854 write back the value after reading it, hence we actually 5855 need two registers. */ 5856 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0), 5857 &XEXP (tem, 0), opnum, type, 5858 ind_levels, insn); 5859 if (!rtx_equal_p (tem, orig)) 5860 push_reg_equiv_alt_mem (regno, tem); 5861 /* Put this inside a new increment-expression. */ 5862 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem); 5863 /* Proceed to reload that, as if it contained a register. */ 5864 } 5865 } 5866 5867 /* If we have a hard register that is ok in this incdec context, 5868 don't make a reload. If the register isn't nice enough for 5869 autoincdec, we can reload it. But, if an autoincrement of a 5870 register that we here verified as playing nice, still outside 5871 isn't "valid", it must be that no autoincrement is "valid". 5872 If that is true and something made an autoincrement anyway, 5873 this must be a special context where one is allowed. 5874 (For example, a "push" instruction.) 5875 We can't improve this address, so leave it alone. */ 5876 5877 /* Otherwise, reload the autoincrement into a suitable hard reg 5878 and record how much to increment by. */ 5879 5880 if (reg_renumber[regno] >= 0) 5881 regno = reg_renumber[regno]; 5882 if (regno >= FIRST_PSEUDO_REGISTER 5883 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code, 5884 index_code)) 5885 { 5886 int reloadnum; 5887 5888 /* If we can output the register afterwards, do so, this 5889 saves the extra update. 5890 We can do so if we have an INSN - i.e. no JUMP_INSN nor 5891 CALL_INSN - and it does not set CC0. 5892 But don't do this if we cannot directly address the 5893 memory location, since this will make it harder to 5894 reuse address reloads, and increases register pressure. 5895 Also don't do this if we can probably update x directly. */ 5896 rtx equiv = (MEM_P (XEXP (x, 0)) 5897 ? XEXP (x, 0) 5898 : reg_equiv_mem (regno)); 5899 enum insn_code icode = optab_handler (add_optab, GET_MODE (x)); 5900 if (insn && NONJUMP_INSN_P (insn) && equiv 5901 && memory_operand (equiv, GET_MODE (equiv)) 5902 #ifdef HAVE_cc0 5903 && ! sets_cc0_p (PATTERN (insn)) 5904 #endif 5905 && ! (icode != CODE_FOR_nothing 5906 && insn_operand_matches (icode, 0, equiv) 5907 && insn_operand_matches (icode, 1, equiv))) 5908 { 5909 /* We use the original pseudo for loc, so that 5910 emit_reload_insns() knows which pseudo this 5911 reload refers to and updates the pseudo rtx, not 5912 its equivalent memory location, as well as the 5913 corresponding entry in reg_last_reload_reg. */ 5914 loc = &XEXP (x_orig, 0); 5915 x = XEXP (x, 0); 5916 reloadnum 5917 = push_reload (x, x, loc, loc, 5918 context_reg_class, 5919 GET_MODE (x), GET_MODE (x), 0, 0, 5920 opnum, RELOAD_OTHER); 5921 } 5922 else 5923 { 5924 reloadnum 5925 = push_reload (x, x, loc, (rtx*) 0, 5926 context_reg_class, 5927 GET_MODE (x), GET_MODE (x), 0, 0, 5928 opnum, type); 5929 rld[reloadnum].inc 5930 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0)); 5931 5932 value = 1; 5933 } 5934 5935 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)), 5936 reloadnum); 5937 } 5938 return value; 5939 } 5940 return 0; 5941 5942 case TRUNCATE: 5943 case SIGN_EXTEND: 5944 case ZERO_EXTEND: 5945 /* Look for parts to reload in the inner expression and reload them 5946 too, in addition to this operation. Reloading all inner parts in 5947 addition to this one shouldn't be necessary, but at this point, 5948 we don't know if we can possibly omit any part that *can* be 5949 reloaded. Targets that are better off reloading just either part 5950 (or perhaps even a different part of an outer expression), should 5951 define LEGITIMIZE_RELOAD_ADDRESS. */ 5952 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0), 5953 context, code, SCRATCH, &XEXP (x, 0), opnum, 5954 type, ind_levels, insn); 5955 push_reload (x, NULL_RTX, loc, (rtx*) 0, 5956 context_reg_class, 5957 GET_MODE (x), VOIDmode, 0, 0, opnum, type); 5958 return 1; 5959 5960 case MEM: 5961 /* This is probably the result of a substitution, by eliminate_regs, of 5962 an equivalent address for a pseudo that was not allocated to a hard 5963 register. Verify that the specified address is valid and reload it 5964 into a register. 5965 5966 Since we know we are going to reload this item, don't decrement for 5967 the indirection level. 5968 5969 Note that this is actually conservative: it would be slightly more 5970 efficient to use the value of SPILL_INDIRECT_LEVELS from 5971 reload1.c here. */ 5972 5973 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0), 5974 opnum, ADDR_TYPE (type), ind_levels, insn); 5975 push_reload (*loc, NULL_RTX, loc, (rtx*) 0, 5976 context_reg_class, 5977 GET_MODE (x), VOIDmode, 0, 0, opnum, type); 5978 return 1; 5979 5980 case REG: 5981 { 5982 int regno = REGNO (x); 5983 5984 if (reg_equiv_constant (regno) != 0) 5985 { 5986 find_reloads_address_part (reg_equiv_constant (regno), loc, 5987 context_reg_class, 5988 GET_MODE (x), opnum, type, ind_levels); 5989 return 1; 5990 } 5991 5992 #if 0 /* This might screw code in reload1.c to delete prior output-reload 5993 that feeds this insn. */ 5994 if (reg_equiv_mem (regno) != 0) 5995 { 5996 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0, 5997 context_reg_class, 5998 GET_MODE (x), VOIDmode, 0, 0, opnum, type); 5999 return 1; 6000 } 6001 #endif 6002 6003 if (reg_equiv_memory_loc (regno) 6004 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)) 6005 { 6006 rtx tem = make_memloc (x, regno); 6007 if (reg_equiv_address (regno) != 0 6008 || ! rtx_equal_p (tem, reg_equiv_mem (regno))) 6009 { 6010 x = tem; 6011 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), 6012 &XEXP (x, 0), opnum, ADDR_TYPE (type), 6013 ind_levels, insn); 6014 if (!rtx_equal_p (x, tem)) 6015 push_reg_equiv_alt_mem (regno, x); 6016 } 6017 } 6018 6019 if (reg_renumber[regno] >= 0) 6020 regno = reg_renumber[regno]; 6021 6022 if (regno >= FIRST_PSEUDO_REGISTER 6023 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code, 6024 index_code)) 6025 { 6026 push_reload (x, NULL_RTX, loc, (rtx*) 0, 6027 context_reg_class, 6028 GET_MODE (x), VOIDmode, 0, 0, opnum, type); 6029 return 1; 6030 } 6031 6032 /* If a register appearing in an address is the subject of a CLOBBER 6033 in this insn, reload it into some other register to be safe. 6034 The CLOBBER is supposed to make the register unavailable 6035 from before this insn to after it. */ 6036 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0)) 6037 { 6038 push_reload (x, NULL_RTX, loc, (rtx*) 0, 6039 context_reg_class, 6040 GET_MODE (x), VOIDmode, 0, 0, opnum, type); 6041 return 1; 6042 } 6043 } 6044 return 0; 6045 6046 case SUBREG: 6047 if (REG_P (SUBREG_REG (x))) 6048 { 6049 /* If this is a SUBREG of a hard register and the resulting register 6050 is of the wrong class, reload the whole SUBREG. This avoids 6051 needless copies if SUBREG_REG is multi-word. */ 6052 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER) 6053 { 6054 int regno ATTRIBUTE_UNUSED = subreg_regno (x); 6055 6056 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code, 6057 index_code)) 6058 { 6059 push_reload (x, NULL_RTX, loc, (rtx*) 0, 6060 context_reg_class, 6061 GET_MODE (x), VOIDmode, 0, 0, opnum, type); 6062 return 1; 6063 } 6064 } 6065 /* If this is a SUBREG of a pseudo-register, and the pseudo-register 6066 is larger than the class size, then reload the whole SUBREG. */ 6067 else 6068 { 6069 enum reg_class rclass = context_reg_class; 6070 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))] 6071 > reg_class_size[(int) rclass]) 6072 { 6073 /* If the inner register will be replaced by a memory 6074 reference, we can do this only if we can replace the 6075 whole subreg by a (narrower) memory reference. If 6076 this is not possible, fall through and reload just 6077 the inner register (including address reloads). */ 6078 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0) 6079 { 6080 rtx tem = find_reloads_subreg_address (x, opnum, 6081 ADDR_TYPE (type), 6082 ind_levels, insn, 6083 NULL); 6084 if (tem) 6085 { 6086 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass, 6087 GET_MODE (tem), VOIDmode, 0, 0, 6088 opnum, type); 6089 return 1; 6090 } 6091 } 6092 else 6093 { 6094 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass, 6095 GET_MODE (x), VOIDmode, 0, 0, opnum, type); 6096 return 1; 6097 } 6098 } 6099 } 6100 } 6101 break; 6102 6103 default: 6104 break; 6105 } 6106 6107 { 6108 const char *fmt = GET_RTX_FORMAT (code); 6109 int i; 6110 6111 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 6112 { 6113 if (fmt[i] == 'e') 6114 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once 6115 we get here. */ 6116 find_reloads_address_1 (mode, as, XEXP (x, i), context, 6117 code, SCRATCH, &XEXP (x, i), 6118 opnum, type, ind_levels, insn); 6119 } 6120 } 6121 6122 #undef REG_OK_FOR_CONTEXT 6123 return 0; 6124 } 6125 6126 /* X, which is found at *LOC, is a part of an address that needs to be 6127 reloaded into a register of class RCLASS. If X is a constant, or if 6128 X is a PLUS that contains a constant, check that the constant is a 6129 legitimate operand and that we are supposed to be able to load 6130 it into the register. 6131 6132 If not, force the constant into memory and reload the MEM instead. 6133 6134 MODE is the mode to use, in case X is an integer constant. 6135 6136 OPNUM and TYPE describe the purpose of any reloads made. 6137 6138 IND_LEVELS says how many levels of indirect addressing this machine 6139 supports. */ 6140 6141 static void 6142 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass, 6143 enum machine_mode mode, int opnum, 6144 enum reload_type type, int ind_levels) 6145 { 6146 if (CONSTANT_P (x) 6147 && (!targetm.legitimate_constant_p (mode, x) 6148 || targetm.preferred_reload_class (x, rclass) == NO_REGS)) 6149 { 6150 x = force_const_mem (mode, x); 6151 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0), 6152 opnum, type, ind_levels, 0); 6153 } 6154 6155 else if (GET_CODE (x) == PLUS 6156 && CONSTANT_P (XEXP (x, 1)) 6157 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1)) 6158 || targetm.preferred_reload_class (XEXP (x, 1), rclass) 6159 == NO_REGS)) 6160 { 6161 rtx tem; 6162 6163 tem = force_const_mem (GET_MODE (x), XEXP (x, 1)); 6164 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem); 6165 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0), 6166 opnum, type, ind_levels, 0); 6167 } 6168 6169 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass, 6170 mode, VOIDmode, 0, 0, opnum, type); 6171 } 6172 6173 /* X, a subreg of a pseudo, is a part of an address that needs to be 6174 reloaded, and the pseusdo is equivalent to a memory location. 6175 6176 Attempt to replace the whole subreg by a (possibly narrower or wider) 6177 memory reference. If this is possible, return this new memory 6178 reference, and push all required address reloads. Otherwise, 6179 return NULL. 6180 6181 OPNUM and TYPE identify the purpose of the reload. 6182 6183 IND_LEVELS says how many levels of indirect addressing are 6184 supported at this point in the address. 6185 6186 INSN, if nonzero, is the insn in which we do the reload. It is used 6187 to determine where to put USEs for pseudos that we have to replace with 6188 stack slots. */ 6189 6190 static rtx 6191 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type, 6192 int ind_levels, rtx insn, int *address_reloaded) 6193 { 6194 enum machine_mode outer_mode = GET_MODE (x); 6195 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x)); 6196 int regno = REGNO (SUBREG_REG (x)); 6197 int reloaded = 0; 6198 rtx tem, orig; 6199 int offset; 6200 6201 gcc_assert (reg_equiv_memory_loc (regno) != 0); 6202 6203 /* We cannot replace the subreg with a modified memory reference if: 6204 6205 - we have a paradoxical subreg that implicitly acts as a zero or 6206 sign extension operation due to LOAD_EXTEND_OP; 6207 6208 - we have a subreg that is implicitly supposed to act on the full 6209 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs); 6210 6211 - the address of the equivalent memory location is mode-dependent; or 6212 6213 - we have a paradoxical subreg and the resulting memory is not 6214 sufficiently aligned to allow access in the wider mode. 6215 6216 In addition, we choose not to perform the replacement for *any* 6217 paradoxical subreg, even if it were possible in principle. This 6218 is to avoid generating wider memory references than necessary. 6219 6220 This corresponds to how previous versions of reload used to handle 6221 paradoxical subregs where no address reload was required. */ 6222 6223 if (paradoxical_subreg_p (x)) 6224 return NULL; 6225 6226 #ifdef WORD_REGISTER_OPERATIONS 6227 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode) 6228 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD 6229 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD)) 6230 return NULL; 6231 #endif 6232 6233 /* Since we don't attempt to handle paradoxical subregs, we can just 6234 call into simplify_subreg, which will handle all remaining checks 6235 for us. */ 6236 orig = make_memloc (SUBREG_REG (x), regno); 6237 offset = SUBREG_BYTE (x); 6238 tem = simplify_subreg (outer_mode, orig, inner_mode, offset); 6239 if (!tem || !MEM_P (tem)) 6240 return NULL; 6241 6242 /* Now push all required address reloads, if any. */ 6243 reloaded = find_reloads_address (GET_MODE (tem), &tem, 6244 XEXP (tem, 0), &XEXP (tem, 0), 6245 opnum, type, ind_levels, insn); 6246 /* ??? Do we need to handle nonzero offsets somehow? */ 6247 if (!offset && !rtx_equal_p (tem, orig)) 6248 push_reg_equiv_alt_mem (regno, tem); 6249 6250 /* For some processors an address may be valid in the original mode but 6251 not in a smaller mode. For example, ARM accepts a scaled index register 6252 in SImode but not in HImode. Note that this is only a problem if the 6253 address in reg_equiv_mem is already invalid in the new mode; other 6254 cases would be fixed by find_reloads_address as usual. 6255 6256 ??? We attempt to handle such cases here by doing an additional reload 6257 of the full address after the usual processing by find_reloads_address. 6258 Note that this may not work in the general case, but it seems to cover 6259 the cases where this situation currently occurs. A more general fix 6260 might be to reload the *value* instead of the address, but this would 6261 not be expected by the callers of this routine as-is. 6262 6263 If find_reloads_address already completed replaced the address, there 6264 is nothing further to do. */ 6265 if (reloaded == 0 6266 && reg_equiv_mem (regno) != 0 6267 && !strict_memory_address_addr_space_p 6268 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0), 6269 MEM_ADDR_SPACE (reg_equiv_mem (regno)))) 6270 { 6271 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0, 6272 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem), 6273 MEM, SCRATCH), 6274 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type); 6275 reloaded = 1; 6276 } 6277 6278 /* If this is not a toplevel operand, find_reloads doesn't see this 6279 substitution. We have to emit a USE of the pseudo so that 6280 delete_output_reload can see it. */ 6281 if (replace_reloads && recog_data.operand[opnum] != x) 6282 /* We mark the USE with QImode so that we recognize it as one that 6283 can be safely deleted at the end of reload. */ 6284 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn), 6285 QImode); 6286 6287 if (address_reloaded) 6288 *address_reloaded = reloaded; 6289 6290 return tem; 6291 } 6292 6293 /* Substitute into the current INSN the registers into which we have reloaded 6294 the things that need reloading. The array `replacements' 6295 contains the locations of all pointers that must be changed 6296 and says what to replace them with. 6297 6298 Return the rtx that X translates into; usually X, but modified. */ 6299 6300 void 6301 subst_reloads (rtx insn) 6302 { 6303 int i; 6304 6305 for (i = 0; i < n_replacements; i++) 6306 { 6307 struct replacement *r = &replacements[i]; 6308 rtx reloadreg = rld[r->what].reg_rtx; 6309 if (reloadreg) 6310 { 6311 #ifdef DEBUG_RELOAD 6312 /* This checking takes a very long time on some platforms 6313 causing the gcc.c-torture/compile/limits-fnargs.c test 6314 to time out during testing. See PR 31850. 6315 6316 Internal consistency test. Check that we don't modify 6317 anything in the equivalence arrays. Whenever something from 6318 those arrays needs to be reloaded, it must be unshared before 6319 being substituted into; the equivalence must not be modified. 6320 Otherwise, if the equivalence is used after that, it will 6321 have been modified, and the thing substituted (probably a 6322 register) is likely overwritten and not a usable equivalence. */ 6323 int check_regno; 6324 6325 for (check_regno = 0; check_regno < max_regno; check_regno++) 6326 { 6327 #define CHECK_MODF(ARRAY) \ 6328 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \ 6329 || !loc_mentioned_in_p (r->where, \ 6330 (*reg_equivs)[check_regno].ARRAY)) 6331 6332 CHECK_MODF (constant); 6333 CHECK_MODF (memory_loc); 6334 CHECK_MODF (address); 6335 CHECK_MODF (mem); 6336 #undef CHECK_MODF 6337 } 6338 #endif /* DEBUG_RELOAD */ 6339 6340 /* If we're replacing a LABEL_REF with a register, there must 6341 already be an indication (to e.g. flow) which label this 6342 register refers to. */ 6343 gcc_assert (GET_CODE (*r->where) != LABEL_REF 6344 || !JUMP_P (insn) 6345 || find_reg_note (insn, 6346 REG_LABEL_OPERAND, 6347 XEXP (*r->where, 0)) 6348 || label_is_jump_target_p (XEXP (*r->where, 0), insn)); 6349 6350 /* Encapsulate RELOADREG so its machine mode matches what 6351 used to be there. Note that gen_lowpart_common will 6352 do the wrong thing if RELOADREG is multi-word. RELOADREG 6353 will always be a REG here. */ 6354 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode) 6355 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode); 6356 6357 *r->where = reloadreg; 6358 } 6359 /* If reload got no reg and isn't optional, something's wrong. */ 6360 else 6361 gcc_assert (rld[r->what].optional); 6362 } 6363 } 6364 6365 /* Make a copy of any replacements being done into X and move those 6366 copies to locations in Y, a copy of X. */ 6367 6368 void 6369 copy_replacements (rtx x, rtx y) 6370 { 6371 copy_replacements_1 (&x, &y, n_replacements); 6372 } 6373 6374 static void 6375 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements) 6376 { 6377 int i, j; 6378 rtx x, y; 6379 struct replacement *r; 6380 enum rtx_code code; 6381 const char *fmt; 6382 6383 for (j = 0; j < orig_replacements; j++) 6384 if (replacements[j].where == px) 6385 { 6386 r = &replacements[n_replacements++]; 6387 r->where = py; 6388 r->what = replacements[j].what; 6389 r->mode = replacements[j].mode; 6390 } 6391 6392 x = *px; 6393 y = *py; 6394 code = GET_CODE (x); 6395 fmt = GET_RTX_FORMAT (code); 6396 6397 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 6398 { 6399 if (fmt[i] == 'e') 6400 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements); 6401 else if (fmt[i] == 'E') 6402 for (j = XVECLEN (x, i); --j >= 0; ) 6403 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j), 6404 orig_replacements); 6405 } 6406 } 6407 6408 /* Change any replacements being done to *X to be done to *Y. */ 6409 6410 void 6411 move_replacements (rtx *x, rtx *y) 6412 { 6413 int i; 6414 6415 for (i = 0; i < n_replacements; i++) 6416 if (replacements[i].where == x) 6417 replacements[i].where = y; 6418 } 6419 6420 /* If LOC was scheduled to be replaced by something, return the replacement. 6421 Otherwise, return *LOC. */ 6422 6423 rtx 6424 find_replacement (rtx *loc) 6425 { 6426 struct replacement *r; 6427 6428 for (r = &replacements[0]; r < &replacements[n_replacements]; r++) 6429 { 6430 rtx reloadreg = rld[r->what].reg_rtx; 6431 6432 if (reloadreg && r->where == loc) 6433 { 6434 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode) 6435 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode); 6436 6437 return reloadreg; 6438 } 6439 else if (reloadreg && GET_CODE (*loc) == SUBREG 6440 && r->where == &SUBREG_REG (*loc)) 6441 { 6442 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode) 6443 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode); 6444 6445 return simplify_gen_subreg (GET_MODE (*loc), reloadreg, 6446 GET_MODE (SUBREG_REG (*loc)), 6447 SUBREG_BYTE (*loc)); 6448 } 6449 } 6450 6451 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for 6452 what's inside and make a new rtl if so. */ 6453 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS 6454 || GET_CODE (*loc) == MULT) 6455 { 6456 rtx x = find_replacement (&XEXP (*loc, 0)); 6457 rtx y = find_replacement (&XEXP (*loc, 1)); 6458 6459 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1)) 6460 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y); 6461 } 6462 6463 return *loc; 6464 } 6465 6466 /* Return nonzero if register in range [REGNO, ENDREGNO) 6467 appears either explicitly or implicitly in X 6468 other than being stored into (except for earlyclobber operands). 6469 6470 References contained within the substructure at LOC do not count. 6471 LOC may be zero, meaning don't ignore anything. 6472 6473 This is similar to refers_to_regno_p in rtlanal.c except that we 6474 look at equivalences for pseudos that didn't get hard registers. */ 6475 6476 static int 6477 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno, 6478 rtx x, rtx *loc) 6479 { 6480 int i; 6481 unsigned int r; 6482 RTX_CODE code; 6483 const char *fmt; 6484 6485 if (x == 0) 6486 return 0; 6487 6488 repeat: 6489 code = GET_CODE (x); 6490 6491 switch (code) 6492 { 6493 case REG: 6494 r = REGNO (x); 6495 6496 /* If this is a pseudo, a hard register must not have been allocated. 6497 X must therefore either be a constant or be in memory. */ 6498 if (r >= FIRST_PSEUDO_REGISTER) 6499 { 6500 if (reg_equiv_memory_loc (r)) 6501 return refers_to_regno_for_reload_p (regno, endregno, 6502 reg_equiv_memory_loc (r), 6503 (rtx*) 0); 6504 6505 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r)); 6506 return 0; 6507 } 6508 6509 return (endregno > r 6510 && regno < r + (r < FIRST_PSEUDO_REGISTER 6511 ? hard_regno_nregs[r][GET_MODE (x)] 6512 : 1)); 6513 6514 case SUBREG: 6515 /* If this is a SUBREG of a hard reg, we can see exactly which 6516 registers are being modified. Otherwise, handle normally. */ 6517 if (REG_P (SUBREG_REG (x)) 6518 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER) 6519 { 6520 unsigned int inner_regno = subreg_regno (x); 6521 unsigned int inner_endregno 6522 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER 6523 ? subreg_nregs (x) : 1); 6524 6525 return endregno > inner_regno && regno < inner_endregno; 6526 } 6527 break; 6528 6529 case CLOBBER: 6530 case SET: 6531 if (&SET_DEST (x) != loc 6532 /* Note setting a SUBREG counts as referring to the REG it is in for 6533 a pseudo but not for hard registers since we can 6534 treat each word individually. */ 6535 && ((GET_CODE (SET_DEST (x)) == SUBREG 6536 && loc != &SUBREG_REG (SET_DEST (x)) 6537 && REG_P (SUBREG_REG (SET_DEST (x))) 6538 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER 6539 && refers_to_regno_for_reload_p (regno, endregno, 6540 SUBREG_REG (SET_DEST (x)), 6541 loc)) 6542 /* If the output is an earlyclobber operand, this is 6543 a conflict. */ 6544 || ((!REG_P (SET_DEST (x)) 6545 || earlyclobber_operand_p (SET_DEST (x))) 6546 && refers_to_regno_for_reload_p (regno, endregno, 6547 SET_DEST (x), loc)))) 6548 return 1; 6549 6550 if (code == CLOBBER || loc == &SET_SRC (x)) 6551 return 0; 6552 x = SET_SRC (x); 6553 goto repeat; 6554 6555 default: 6556 break; 6557 } 6558 6559 /* X does not match, so try its subexpressions. */ 6560 6561 fmt = GET_RTX_FORMAT (code); 6562 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 6563 { 6564 if (fmt[i] == 'e' && loc != &XEXP (x, i)) 6565 { 6566 if (i == 0) 6567 { 6568 x = XEXP (x, 0); 6569 goto repeat; 6570 } 6571 else 6572 if (refers_to_regno_for_reload_p (regno, endregno, 6573 XEXP (x, i), loc)) 6574 return 1; 6575 } 6576 else if (fmt[i] == 'E') 6577 { 6578 int j; 6579 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 6580 if (loc != &XVECEXP (x, i, j) 6581 && refers_to_regno_for_reload_p (regno, endregno, 6582 XVECEXP (x, i, j), loc)) 6583 return 1; 6584 } 6585 } 6586 return 0; 6587 } 6588 6589 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG, 6590 we check if any register number in X conflicts with the relevant register 6591 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN 6592 contains a MEM (we don't bother checking for memory addresses that can't 6593 conflict because we expect this to be a rare case. 6594 6595 This function is similar to reg_overlap_mentioned_p in rtlanal.c except 6596 that we look at equivalences for pseudos that didn't get hard registers. */ 6597 6598 int 6599 reg_overlap_mentioned_for_reload_p (rtx x, rtx in) 6600 { 6601 int regno, endregno; 6602 6603 /* Overly conservative. */ 6604 if (GET_CODE (x) == STRICT_LOW_PART 6605 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC) 6606 x = XEXP (x, 0); 6607 6608 /* If either argument is a constant, then modifying X can not affect IN. */ 6609 if (CONSTANT_P (x) || CONSTANT_P (in)) 6610 return 0; 6611 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x))) 6612 return refers_to_mem_for_reload_p (in); 6613 else if (GET_CODE (x) == SUBREG) 6614 { 6615 regno = REGNO (SUBREG_REG (x)); 6616 if (regno < FIRST_PSEUDO_REGISTER) 6617 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)), 6618 GET_MODE (SUBREG_REG (x)), 6619 SUBREG_BYTE (x), 6620 GET_MODE (x)); 6621 endregno = regno + (regno < FIRST_PSEUDO_REGISTER 6622 ? subreg_nregs (x) : 1); 6623 6624 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0); 6625 } 6626 else if (REG_P (x)) 6627 { 6628 regno = REGNO (x); 6629 6630 /* If this is a pseudo, it must not have been assigned a hard register. 6631 Therefore, it must either be in memory or be a constant. */ 6632 6633 if (regno >= FIRST_PSEUDO_REGISTER) 6634 { 6635 if (reg_equiv_memory_loc (regno)) 6636 return refers_to_mem_for_reload_p (in); 6637 gcc_assert (reg_equiv_constant (regno)); 6638 return 0; 6639 } 6640 6641 endregno = END_HARD_REGNO (x); 6642 6643 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0); 6644 } 6645 else if (MEM_P (x)) 6646 return refers_to_mem_for_reload_p (in); 6647 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC 6648 || GET_CODE (x) == CC0) 6649 return reg_mentioned_p (x, in); 6650 else 6651 { 6652 gcc_assert (GET_CODE (x) == PLUS); 6653 6654 /* We actually want to know if X is mentioned somewhere inside IN. 6655 We must not say that (plus (sp) (const_int 124)) is in 6656 (plus (sp) (const_int 64)), since that can lead to incorrect reload 6657 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS 6658 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */ 6659 while (MEM_P (in)) 6660 in = XEXP (in, 0); 6661 if (REG_P (in)) 6662 return 0; 6663 else if (GET_CODE (in) == PLUS) 6664 return (rtx_equal_p (x, in) 6665 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0)) 6666 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1))); 6667 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in) 6668 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in)); 6669 } 6670 6671 gcc_unreachable (); 6672 } 6673 6674 /* Return nonzero if anything in X contains a MEM. Look also for pseudo 6675 registers. */ 6676 6677 static int 6678 refers_to_mem_for_reload_p (rtx x) 6679 { 6680 const char *fmt; 6681 int i; 6682 6683 if (MEM_P (x)) 6684 return 1; 6685 6686 if (REG_P (x)) 6687 return (REGNO (x) >= FIRST_PSEUDO_REGISTER 6688 && reg_equiv_memory_loc (REGNO (x))); 6689 6690 fmt = GET_RTX_FORMAT (GET_CODE (x)); 6691 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) 6692 if (fmt[i] == 'e' 6693 && (MEM_P (XEXP (x, i)) 6694 || refers_to_mem_for_reload_p (XEXP (x, i)))) 6695 return 1; 6696 6697 return 0; 6698 } 6699 6700 /* Check the insns before INSN to see if there is a suitable register 6701 containing the same value as GOAL. 6702 If OTHER is -1, look for a register in class RCLASS. 6703 Otherwise, just see if register number OTHER shares GOAL's value. 6704 6705 Return an rtx for the register found, or zero if none is found. 6706 6707 If RELOAD_REG_P is (short *)1, 6708 we reject any hard reg that appears in reload_reg_rtx 6709 because such a hard reg is also needed coming into this insn. 6710 6711 If RELOAD_REG_P is any other nonzero value, 6712 it is a vector indexed by hard reg number 6713 and we reject any hard reg whose element in the vector is nonnegative 6714 as well as any that appears in reload_reg_rtx. 6715 6716 If GOAL is zero, then GOALREG is a register number; we look 6717 for an equivalent for that register. 6718 6719 MODE is the machine mode of the value we want an equivalence for. 6720 If GOAL is nonzero and not VOIDmode, then it must have mode MODE. 6721 6722 This function is used by jump.c as well as in the reload pass. 6723 6724 If GOAL is the sum of the stack pointer and a constant, we treat it 6725 as if it were a constant except that sp is required to be unchanging. */ 6726 6727 rtx 6728 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other, 6729 short *reload_reg_p, int goalreg, enum machine_mode mode) 6730 { 6731 rtx p = insn; 6732 rtx goaltry, valtry, value, where; 6733 rtx pat; 6734 int regno = -1; 6735 int valueno; 6736 int goal_mem = 0; 6737 int goal_const = 0; 6738 int goal_mem_addr_varies = 0; 6739 int need_stable_sp = 0; 6740 int nregs; 6741 int valuenregs; 6742 int num = 0; 6743 6744 if (goal == 0) 6745 regno = goalreg; 6746 else if (REG_P (goal)) 6747 regno = REGNO (goal); 6748 else if (MEM_P (goal)) 6749 { 6750 enum rtx_code code = GET_CODE (XEXP (goal, 0)); 6751 if (MEM_VOLATILE_P (goal)) 6752 return 0; 6753 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal))) 6754 return 0; 6755 /* An address with side effects must be reexecuted. */ 6756 switch (code) 6757 { 6758 case POST_INC: 6759 case PRE_INC: 6760 case POST_DEC: 6761 case PRE_DEC: 6762 case POST_MODIFY: 6763 case PRE_MODIFY: 6764 return 0; 6765 default: 6766 break; 6767 } 6768 goal_mem = 1; 6769 } 6770 else if (CONSTANT_P (goal)) 6771 goal_const = 1; 6772 else if (GET_CODE (goal) == PLUS 6773 && XEXP (goal, 0) == stack_pointer_rtx 6774 && CONSTANT_P (XEXP (goal, 1))) 6775 goal_const = need_stable_sp = 1; 6776 else if (GET_CODE (goal) == PLUS 6777 && XEXP (goal, 0) == frame_pointer_rtx 6778 && CONSTANT_P (XEXP (goal, 1))) 6779 goal_const = 1; 6780 else 6781 return 0; 6782 6783 num = 0; 6784 /* Scan insns back from INSN, looking for one that copies 6785 a value into or out of GOAL. 6786 Stop and give up if we reach a label. */ 6787 6788 while (1) 6789 { 6790 p = PREV_INSN (p); 6791 if (p && DEBUG_INSN_P (p)) 6792 continue; 6793 num++; 6794 if (p == 0 || LABEL_P (p) 6795 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS)) 6796 return 0; 6797 6798 /* Don't reuse register contents from before a setjmp-type 6799 function call; on the second return (from the longjmp) it 6800 might have been clobbered by a later reuse. It doesn't 6801 seem worthwhile to actually go and see if it is actually 6802 reused even if that information would be readily available; 6803 just don't reuse it across the setjmp call. */ 6804 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX)) 6805 return 0; 6806 6807 if (NONJUMP_INSN_P (p) 6808 /* If we don't want spill regs ... */ 6809 && (! (reload_reg_p != 0 6810 && reload_reg_p != (short *) (HOST_WIDE_INT) 1) 6811 /* ... then ignore insns introduced by reload; they aren't 6812 useful and can cause results in reload_as_needed to be 6813 different from what they were when calculating the need for 6814 spills. If we notice an input-reload insn here, we will 6815 reject it below, but it might hide a usable equivalent. 6816 That makes bad code. It may even fail: perhaps no reg was 6817 spilled for this insn because it was assumed we would find 6818 that equivalent. */ 6819 || INSN_UID (p) < reload_first_uid)) 6820 { 6821 rtx tem; 6822 pat = single_set (p); 6823 6824 /* First check for something that sets some reg equal to GOAL. */ 6825 if (pat != 0 6826 && ((regno >= 0 6827 && true_regnum (SET_SRC (pat)) == regno 6828 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0) 6829 || 6830 (regno >= 0 6831 && true_regnum (SET_DEST (pat)) == regno 6832 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0) 6833 || 6834 (goal_const && rtx_equal_p (SET_SRC (pat), goal) 6835 /* When looking for stack pointer + const, 6836 make sure we don't use a stack adjust. */ 6837 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal) 6838 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0) 6839 || (goal_mem 6840 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0 6841 && rtx_renumbered_equal_p (goal, SET_SRC (pat))) 6842 || (goal_mem 6843 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0 6844 && rtx_renumbered_equal_p (goal, SET_DEST (pat))) 6845 /* If we are looking for a constant, 6846 and something equivalent to that constant was copied 6847 into a reg, we can use that reg. */ 6848 || (goal_const && REG_NOTES (p) != 0 6849 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX)) 6850 && ((rtx_equal_p (XEXP (tem, 0), goal) 6851 && (valueno 6852 = true_regnum (valtry = SET_DEST (pat))) >= 0) 6853 || (REG_P (SET_DEST (pat)) 6854 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0)) 6855 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0))) 6856 && CONST_INT_P (goal) 6857 && 0 != (goaltry 6858 = operand_subword (XEXP (tem, 0), 0, 0, 6859 VOIDmode)) 6860 && rtx_equal_p (goal, goaltry) 6861 && (valtry 6862 = operand_subword (SET_DEST (pat), 0, 0, 6863 VOIDmode)) 6864 && (valueno = true_regnum (valtry)) >= 0))) 6865 || (goal_const && (tem = find_reg_note (p, REG_EQUIV, 6866 NULL_RTX)) 6867 && REG_P (SET_DEST (pat)) 6868 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0)) 6869 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0))) 6870 && CONST_INT_P (goal) 6871 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0, 6872 VOIDmode)) 6873 && rtx_equal_p (goal, goaltry) 6874 && (valtry 6875 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode)) 6876 && (valueno = true_regnum (valtry)) >= 0))) 6877 { 6878 if (other >= 0) 6879 { 6880 if (valueno != other) 6881 continue; 6882 } 6883 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER) 6884 continue; 6885 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass], 6886 mode, valueno)) 6887 continue; 6888 value = valtry; 6889 where = p; 6890 break; 6891 } 6892 } 6893 } 6894 6895 /* We found a previous insn copying GOAL into a suitable other reg VALUE 6896 (or copying VALUE into GOAL, if GOAL is also a register). 6897 Now verify that VALUE is really valid. */ 6898 6899 /* VALUENO is the register number of VALUE; a hard register. */ 6900 6901 /* Don't try to re-use something that is killed in this insn. We want 6902 to be able to trust REG_UNUSED notes. */ 6903 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value)) 6904 return 0; 6905 6906 /* If we propose to get the value from the stack pointer or if GOAL is 6907 a MEM based on the stack pointer, we need a stable SP. */ 6908 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM 6909 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx, 6910 goal))) 6911 need_stable_sp = 1; 6912 6913 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */ 6914 if (GET_MODE (value) != mode) 6915 return 0; 6916 6917 /* Reject VALUE if it was loaded from GOAL 6918 and is also a register that appears in the address of GOAL. */ 6919 6920 if (goal_mem && value == SET_DEST (single_set (where)) 6921 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno), 6922 goal, (rtx*) 0)) 6923 return 0; 6924 6925 /* Reject registers that overlap GOAL. */ 6926 6927 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER) 6928 nregs = hard_regno_nregs[regno][mode]; 6929 else 6930 nregs = 1; 6931 valuenregs = hard_regno_nregs[valueno][mode]; 6932 6933 if (!goal_mem && !goal_const 6934 && regno + nregs > valueno && regno < valueno + valuenregs) 6935 return 0; 6936 6937 /* Reject VALUE if it is one of the regs reserved for reloads. 6938 Reload1 knows how to reuse them anyway, and it would get 6939 confused if we allocated one without its knowledge. 6940 (Now that insns introduced by reload are ignored above, 6941 this case shouldn't happen, but I'm not positive.) */ 6942 6943 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1) 6944 { 6945 int i; 6946 for (i = 0; i < valuenregs; ++i) 6947 if (reload_reg_p[valueno + i] >= 0) 6948 return 0; 6949 } 6950 6951 /* Reject VALUE if it is a register being used for an input reload 6952 even if it is not one of those reserved. */ 6953 6954 if (reload_reg_p != 0) 6955 { 6956 int i; 6957 for (i = 0; i < n_reloads; i++) 6958 if (rld[i].reg_rtx != 0 && rld[i].in) 6959 { 6960 int regno1 = REGNO (rld[i].reg_rtx); 6961 int nregs1 = hard_regno_nregs[regno1] 6962 [GET_MODE (rld[i].reg_rtx)]; 6963 if (regno1 < valueno + valuenregs 6964 && regno1 + nregs1 > valueno) 6965 return 0; 6966 } 6967 } 6968 6969 if (goal_mem) 6970 /* We must treat frame pointer as varying here, 6971 since it can vary--in a nonlocal goto as generated by expand_goto. */ 6972 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0)); 6973 6974 /* Now verify that the values of GOAL and VALUE remain unaltered 6975 until INSN is reached. */ 6976 6977 p = insn; 6978 while (1) 6979 { 6980 p = PREV_INSN (p); 6981 if (p == where) 6982 return value; 6983 6984 /* Don't trust the conversion past a function call 6985 if either of the two is in a call-clobbered register, or memory. */ 6986 if (CALL_P (p)) 6987 { 6988 int i; 6989 6990 if (goal_mem || need_stable_sp) 6991 return 0; 6992 6993 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER) 6994 for (i = 0; i < nregs; ++i) 6995 if (call_used_regs[regno + i] 6996 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode)) 6997 return 0; 6998 6999 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER) 7000 for (i = 0; i < valuenregs; ++i) 7001 if (call_used_regs[valueno + i] 7002 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode)) 7003 return 0; 7004 } 7005 7006 if (INSN_P (p)) 7007 { 7008 pat = PATTERN (p); 7009 7010 /* Watch out for unspec_volatile, and volatile asms. */ 7011 if (volatile_insn_p (pat)) 7012 return 0; 7013 7014 /* If this insn P stores in either GOAL or VALUE, return 0. 7015 If GOAL is a memory ref and this insn writes memory, return 0. 7016 If GOAL is a memory ref and its address is not constant, 7017 and this insn P changes a register used in GOAL, return 0. */ 7018 7019 if (GET_CODE (pat) == COND_EXEC) 7020 pat = COND_EXEC_CODE (pat); 7021 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER) 7022 { 7023 rtx dest = SET_DEST (pat); 7024 while (GET_CODE (dest) == SUBREG 7025 || GET_CODE (dest) == ZERO_EXTRACT 7026 || GET_CODE (dest) == STRICT_LOW_PART) 7027 dest = XEXP (dest, 0); 7028 if (REG_P (dest)) 7029 { 7030 int xregno = REGNO (dest); 7031 int xnregs; 7032 if (REGNO (dest) < FIRST_PSEUDO_REGISTER) 7033 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)]; 7034 else 7035 xnregs = 1; 7036 if (xregno < regno + nregs && xregno + xnregs > regno) 7037 return 0; 7038 if (xregno < valueno + valuenregs 7039 && xregno + xnregs > valueno) 7040 return 0; 7041 if (goal_mem_addr_varies 7042 && reg_overlap_mentioned_for_reload_p (dest, goal)) 7043 return 0; 7044 if (xregno == STACK_POINTER_REGNUM && need_stable_sp) 7045 return 0; 7046 } 7047 else if (goal_mem && MEM_P (dest) 7048 && ! push_operand (dest, GET_MODE (dest))) 7049 return 0; 7050 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER 7051 && reg_equiv_memory_loc (regno) != 0) 7052 return 0; 7053 else if (need_stable_sp && push_operand (dest, GET_MODE (dest))) 7054 return 0; 7055 } 7056 else if (GET_CODE (pat) == PARALLEL) 7057 { 7058 int i; 7059 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--) 7060 { 7061 rtx v1 = XVECEXP (pat, 0, i); 7062 if (GET_CODE (v1) == COND_EXEC) 7063 v1 = COND_EXEC_CODE (v1); 7064 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER) 7065 { 7066 rtx dest = SET_DEST (v1); 7067 while (GET_CODE (dest) == SUBREG 7068 || GET_CODE (dest) == ZERO_EXTRACT 7069 || GET_CODE (dest) == STRICT_LOW_PART) 7070 dest = XEXP (dest, 0); 7071 if (REG_P (dest)) 7072 { 7073 int xregno = REGNO (dest); 7074 int xnregs; 7075 if (REGNO (dest) < FIRST_PSEUDO_REGISTER) 7076 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)]; 7077 else 7078 xnregs = 1; 7079 if (xregno < regno + nregs 7080 && xregno + xnregs > regno) 7081 return 0; 7082 if (xregno < valueno + valuenregs 7083 && xregno + xnregs > valueno) 7084 return 0; 7085 if (goal_mem_addr_varies 7086 && reg_overlap_mentioned_for_reload_p (dest, 7087 goal)) 7088 return 0; 7089 if (xregno == STACK_POINTER_REGNUM && need_stable_sp) 7090 return 0; 7091 } 7092 else if (goal_mem && MEM_P (dest) 7093 && ! push_operand (dest, GET_MODE (dest))) 7094 return 0; 7095 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER 7096 && reg_equiv_memory_loc (regno) != 0) 7097 return 0; 7098 else if (need_stable_sp 7099 && push_operand (dest, GET_MODE (dest))) 7100 return 0; 7101 } 7102 } 7103 } 7104 7105 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p)) 7106 { 7107 rtx link; 7108 7109 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0; 7110 link = XEXP (link, 1)) 7111 { 7112 pat = XEXP (link, 0); 7113 if (GET_CODE (pat) == CLOBBER) 7114 { 7115 rtx dest = SET_DEST (pat); 7116 7117 if (REG_P (dest)) 7118 { 7119 int xregno = REGNO (dest); 7120 int xnregs 7121 = hard_regno_nregs[xregno][GET_MODE (dest)]; 7122 7123 if (xregno < regno + nregs 7124 && xregno + xnregs > regno) 7125 return 0; 7126 else if (xregno < valueno + valuenregs 7127 && xregno + xnregs > valueno) 7128 return 0; 7129 else if (goal_mem_addr_varies 7130 && reg_overlap_mentioned_for_reload_p (dest, 7131 goal)) 7132 return 0; 7133 } 7134 7135 else if (goal_mem && MEM_P (dest) 7136 && ! push_operand (dest, GET_MODE (dest))) 7137 return 0; 7138 else if (need_stable_sp 7139 && push_operand (dest, GET_MODE (dest))) 7140 return 0; 7141 } 7142 } 7143 } 7144 7145 #ifdef AUTO_INC_DEC 7146 /* If this insn auto-increments or auto-decrements 7147 either regno or valueno, return 0 now. 7148 If GOAL is a memory ref and its address is not constant, 7149 and this insn P increments a register used in GOAL, return 0. */ 7150 { 7151 rtx link; 7152 7153 for (link = REG_NOTES (p); link; link = XEXP (link, 1)) 7154 if (REG_NOTE_KIND (link) == REG_INC 7155 && REG_P (XEXP (link, 0))) 7156 { 7157 int incno = REGNO (XEXP (link, 0)); 7158 if (incno < regno + nregs && incno >= regno) 7159 return 0; 7160 if (incno < valueno + valuenregs && incno >= valueno) 7161 return 0; 7162 if (goal_mem_addr_varies 7163 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0), 7164 goal)) 7165 return 0; 7166 } 7167 } 7168 #endif 7169 } 7170 } 7171 } 7172 7173 /* Find a place where INCED appears in an increment or decrement operator 7174 within X, and return the amount INCED is incremented or decremented by. 7175 The value is always positive. */ 7176 7177 static int 7178 find_inc_amount (rtx x, rtx inced) 7179 { 7180 enum rtx_code code = GET_CODE (x); 7181 const char *fmt; 7182 int i; 7183 7184 if (code == MEM) 7185 { 7186 rtx addr = XEXP (x, 0); 7187 if ((GET_CODE (addr) == PRE_DEC 7188 || GET_CODE (addr) == POST_DEC 7189 || GET_CODE (addr) == PRE_INC 7190 || GET_CODE (addr) == POST_INC) 7191 && XEXP (addr, 0) == inced) 7192 return GET_MODE_SIZE (GET_MODE (x)); 7193 else if ((GET_CODE (addr) == PRE_MODIFY 7194 || GET_CODE (addr) == POST_MODIFY) 7195 && GET_CODE (XEXP (addr, 1)) == PLUS 7196 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0) 7197 && XEXP (addr, 0) == inced 7198 && CONST_INT_P (XEXP (XEXP (addr, 1), 1))) 7199 { 7200 i = INTVAL (XEXP (XEXP (addr, 1), 1)); 7201 return i < 0 ? -i : i; 7202 } 7203 } 7204 7205 fmt = GET_RTX_FORMAT (code); 7206 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 7207 { 7208 if (fmt[i] == 'e') 7209 { 7210 int tem = find_inc_amount (XEXP (x, i), inced); 7211 if (tem != 0) 7212 return tem; 7213 } 7214 if (fmt[i] == 'E') 7215 { 7216 int j; 7217 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 7218 { 7219 int tem = find_inc_amount (XVECEXP (x, i, j), inced); 7220 if (tem != 0) 7221 return tem; 7222 } 7223 } 7224 } 7225 7226 return 0; 7227 } 7228 7229 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a 7230 REG_INC note in insn INSN. REGNO must refer to a hard register. */ 7231 7232 #ifdef AUTO_INC_DEC 7233 static int 7234 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno, 7235 rtx insn) 7236 { 7237 rtx link; 7238 7239 gcc_assert (insn); 7240 7241 if (! INSN_P (insn)) 7242 return 0; 7243 7244 for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) 7245 if (REG_NOTE_KIND (link) == REG_INC) 7246 { 7247 unsigned int test = (int) REGNO (XEXP (link, 0)); 7248 if (test >= regno && test < endregno) 7249 return 1; 7250 } 7251 return 0; 7252 } 7253 #else 7254 7255 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0 7256 7257 #endif 7258 7259 /* Return 1 if register REGNO is the subject of a clobber in insn INSN. 7260 If SETS is 1, also consider SETs. If SETS is 2, enable checking 7261 REG_INC. REGNO must refer to a hard register. */ 7262 7263 int 7264 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode, 7265 int sets) 7266 { 7267 unsigned int nregs, endregno; 7268 7269 /* regno must be a hard register. */ 7270 gcc_assert (regno < FIRST_PSEUDO_REGISTER); 7271 7272 nregs = hard_regno_nregs[regno][mode]; 7273 endregno = regno + nregs; 7274 7275 if ((GET_CODE (PATTERN (insn)) == CLOBBER 7276 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET)) 7277 && REG_P (XEXP (PATTERN (insn), 0))) 7278 { 7279 unsigned int test = REGNO (XEXP (PATTERN (insn), 0)); 7280 7281 return test >= regno && test < endregno; 7282 } 7283 7284 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn)) 7285 return 1; 7286 7287 if (GET_CODE (PATTERN (insn)) == PARALLEL) 7288 { 7289 int i = XVECLEN (PATTERN (insn), 0) - 1; 7290 7291 for (; i >= 0; i--) 7292 { 7293 rtx elt = XVECEXP (PATTERN (insn), 0, i); 7294 if ((GET_CODE (elt) == CLOBBER 7295 || (sets == 1 && GET_CODE (elt) == SET)) 7296 && REG_P (XEXP (elt, 0))) 7297 { 7298 unsigned int test = REGNO (XEXP (elt, 0)); 7299 7300 if (test >= regno && test < endregno) 7301 return 1; 7302 } 7303 if (sets == 2 7304 && reg_inc_found_and_valid_p (regno, endregno, elt)) 7305 return 1; 7306 } 7307 } 7308 7309 return 0; 7310 } 7311 7312 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */ 7313 rtx 7314 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode) 7315 { 7316 int regno; 7317 7318 if (GET_MODE (reloadreg) == mode) 7319 return reloadreg; 7320 7321 regno = REGNO (reloadreg); 7322 7323 if (REG_WORDS_BIG_ENDIAN) 7324 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)] 7325 - (int) hard_regno_nregs[regno][mode]; 7326 7327 return gen_rtx_REG (mode, regno); 7328 } 7329 7330 static const char *const reload_when_needed_name[] = 7331 { 7332 "RELOAD_FOR_INPUT", 7333 "RELOAD_FOR_OUTPUT", 7334 "RELOAD_FOR_INSN", 7335 "RELOAD_FOR_INPUT_ADDRESS", 7336 "RELOAD_FOR_INPADDR_ADDRESS", 7337 "RELOAD_FOR_OUTPUT_ADDRESS", 7338 "RELOAD_FOR_OUTADDR_ADDRESS", 7339 "RELOAD_FOR_OPERAND_ADDRESS", 7340 "RELOAD_FOR_OPADDR_ADDR", 7341 "RELOAD_OTHER", 7342 "RELOAD_FOR_OTHER_ADDRESS" 7343 }; 7344 7345 /* These functions are used to print the variables set by 'find_reloads' */ 7346 7347 DEBUG_FUNCTION void 7348 debug_reload_to_stream (FILE *f) 7349 { 7350 int r; 7351 const char *prefix; 7352 7353 if (! f) 7354 f = stderr; 7355 for (r = 0; r < n_reloads; r++) 7356 { 7357 fprintf (f, "Reload %d: ", r); 7358 7359 if (rld[r].in != 0) 7360 { 7361 fprintf (f, "reload_in (%s) = ", 7362 GET_MODE_NAME (rld[r].inmode)); 7363 print_inline_rtx (f, rld[r].in, 24); 7364 fprintf (f, "\n\t"); 7365 } 7366 7367 if (rld[r].out != 0) 7368 { 7369 fprintf (f, "reload_out (%s) = ", 7370 GET_MODE_NAME (rld[r].outmode)); 7371 print_inline_rtx (f, rld[r].out, 24); 7372 fprintf (f, "\n\t"); 7373 } 7374 7375 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]); 7376 7377 fprintf (f, "%s (opnum = %d)", 7378 reload_when_needed_name[(int) rld[r].when_needed], 7379 rld[r].opnum); 7380 7381 if (rld[r].optional) 7382 fprintf (f, ", optional"); 7383 7384 if (rld[r].nongroup) 7385 fprintf (f, ", nongroup"); 7386 7387 if (rld[r].inc != 0) 7388 fprintf (f, ", inc by %d", rld[r].inc); 7389 7390 if (rld[r].nocombine) 7391 fprintf (f, ", can't combine"); 7392 7393 if (rld[r].secondary_p) 7394 fprintf (f, ", secondary_reload_p"); 7395 7396 if (rld[r].in_reg != 0) 7397 { 7398 fprintf (f, "\n\treload_in_reg: "); 7399 print_inline_rtx (f, rld[r].in_reg, 24); 7400 } 7401 7402 if (rld[r].out_reg != 0) 7403 { 7404 fprintf (f, "\n\treload_out_reg: "); 7405 print_inline_rtx (f, rld[r].out_reg, 24); 7406 } 7407 7408 if (rld[r].reg_rtx != 0) 7409 { 7410 fprintf (f, "\n\treload_reg_rtx: "); 7411 print_inline_rtx (f, rld[r].reg_rtx, 24); 7412 } 7413 7414 prefix = "\n\t"; 7415 if (rld[r].secondary_in_reload != -1) 7416 { 7417 fprintf (f, "%ssecondary_in_reload = %d", 7418 prefix, rld[r].secondary_in_reload); 7419 prefix = ", "; 7420 } 7421 7422 if (rld[r].secondary_out_reload != -1) 7423 fprintf (f, "%ssecondary_out_reload = %d\n", 7424 prefix, rld[r].secondary_out_reload); 7425 7426 prefix = "\n\t"; 7427 if (rld[r].secondary_in_icode != CODE_FOR_nothing) 7428 { 7429 fprintf (f, "%ssecondary_in_icode = %s", prefix, 7430 insn_data[rld[r].secondary_in_icode].name); 7431 prefix = ", "; 7432 } 7433 7434 if (rld[r].secondary_out_icode != CODE_FOR_nothing) 7435 fprintf (f, "%ssecondary_out_icode = %s", prefix, 7436 insn_data[rld[r].secondary_out_icode].name); 7437 7438 fprintf (f, "\n"); 7439 } 7440 } 7441 7442 DEBUG_FUNCTION void 7443 debug_reload (void) 7444 { 7445 debug_reload_to_stream (stderr); 7446 } 7447