1 /* Subroutines used by or related to instruction recognition. 2 Copyright (C) 1987-2017 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "target.h" 26 #include "rtl.h" 27 #include "tree.h" 28 #include "cfghooks.h" 29 #include "df.h" 30 #include "memmodel.h" 31 #include "tm_p.h" 32 #include "insn-config.h" 33 #include "regs.h" 34 #include "emit-rtl.h" 35 #include "recog.h" 36 #include "insn-attr.h" 37 #include "addresses.h" 38 #include "cfgrtl.h" 39 #include "cfgbuild.h" 40 #include "cfgcleanup.h" 41 #include "reload.h" 42 #include "tree-pass.h" 43 44 #ifndef STACK_POP_CODE 45 #if STACK_GROWS_DOWNWARD 46 #define STACK_POP_CODE POST_INC 47 #else 48 #define STACK_POP_CODE POST_DEC 49 #endif 50 #endif 51 52 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool); 53 static void validate_replace_src_1 (rtx *, void *); 54 static rtx_insn *split_insn (rtx_insn *); 55 56 struct target_recog default_target_recog; 57 #if SWITCHABLE_TARGET 58 struct target_recog *this_target_recog = &default_target_recog; 59 #endif 60 61 /* Nonzero means allow operands to be volatile. 62 This should be 0 if you are generating rtl, such as if you are calling 63 the functions in optabs.c and expmed.c (most of the time). 64 This should be 1 if all valid insns need to be recognized, 65 such as in reginfo.c and final.c and reload.c. 66 67 init_recog and init_recog_no_volatile are responsible for setting this. */ 68 69 int volatile_ok; 70 71 struct recog_data_d recog_data; 72 73 /* Contains a vector of operand_alternative structures, such that 74 operand OP of alternative A is at index A * n_operands + OP. 75 Set up by preprocess_constraints. */ 76 const operand_alternative *recog_op_alt; 77 78 /* Used to provide recog_op_alt for asms. */ 79 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS 80 * MAX_RECOG_ALTERNATIVES]; 81 82 /* On return from `constrain_operands', indicate which alternative 83 was satisfied. */ 84 85 int which_alternative; 86 87 /* Nonzero after end of reload pass. 88 Set to 1 or 0 by toplev.c. 89 Controls the significance of (SUBREG (MEM)). */ 90 91 int reload_completed; 92 93 /* Nonzero after thread_prologue_and_epilogue_insns has run. */ 94 int epilogue_completed; 95 96 /* Initialize data used by the function `recog'. 97 This must be called once in the compilation of a function 98 before any insn recognition may be done in the function. */ 99 100 void 101 init_recog_no_volatile (void) 102 { 103 volatile_ok = 0; 104 } 105 106 void 107 init_recog (void) 108 { 109 volatile_ok = 1; 110 } 111 112 113 /* Return true if labels in asm operands BODY are LABEL_REFs. */ 114 115 static bool 116 asm_labels_ok (rtx body) 117 { 118 rtx asmop; 119 int i; 120 121 asmop = extract_asm_operands (body); 122 if (asmop == NULL_RTX) 123 return true; 124 125 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++) 126 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF) 127 return false; 128 129 return true; 130 } 131 132 /* Check that X is an insn-body for an `asm' with operands 133 and that the operands mentioned in it are legitimate. */ 134 135 int 136 check_asm_operands (rtx x) 137 { 138 int noperands; 139 rtx *operands; 140 const char **constraints; 141 int i; 142 143 if (!asm_labels_ok (x)) 144 return 0; 145 146 /* Post-reload, be more strict with things. */ 147 if (reload_completed) 148 { 149 /* ??? Doh! We've not got the wrapping insn. Cook one up. */ 150 rtx_insn *insn = make_insn_raw (x); 151 extract_insn (insn); 152 constrain_operands (1, get_enabled_alternatives (insn)); 153 return which_alternative >= 0; 154 } 155 156 noperands = asm_noperands (x); 157 if (noperands < 0) 158 return 0; 159 if (noperands == 0) 160 return 1; 161 162 operands = XALLOCAVEC (rtx, noperands); 163 constraints = XALLOCAVEC (const char *, noperands); 164 165 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL); 166 167 for (i = 0; i < noperands; i++) 168 { 169 const char *c = constraints[i]; 170 if (c[0] == '%') 171 c++; 172 if (! asm_operand_ok (operands[i], c, constraints)) 173 return 0; 174 } 175 176 return 1; 177 } 178 179 /* Static data for the next two routines. */ 180 181 struct change_t 182 { 183 rtx object; 184 int old_code; 185 bool unshare; 186 rtx *loc; 187 rtx old; 188 }; 189 190 static change_t *changes; 191 static int changes_allocated; 192 193 static int num_changes = 0; 194 195 /* Validate a proposed change to OBJECT. LOC is the location in the rtl 196 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done, 197 the change is simply made. 198 199 Two types of objects are supported: If OBJECT is a MEM, memory_address_p 200 will be called with the address and mode as parameters. If OBJECT is 201 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with 202 the change in place. 203 204 IN_GROUP is nonzero if this is part of a group of changes that must be 205 performed as a group. In that case, the changes will be stored. The 206 function `apply_change_group' will validate and apply the changes. 207 208 If IN_GROUP is zero, this is a single change. Try to recognize the insn 209 or validate the memory reference with the change applied. If the result 210 is not valid for the machine, suppress the change and return zero. 211 Otherwise, perform the change and return 1. */ 212 213 static bool 214 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare) 215 { 216 rtx old = *loc; 217 218 if (old == new_rtx || rtx_equal_p (old, new_rtx)) 219 return 1; 220 221 gcc_assert (in_group != 0 || num_changes == 0); 222 223 *loc = new_rtx; 224 225 /* Save the information describing this change. */ 226 if (num_changes >= changes_allocated) 227 { 228 if (changes_allocated == 0) 229 /* This value allows for repeated substitutions inside complex 230 indexed addresses, or changes in up to 5 insns. */ 231 changes_allocated = MAX_RECOG_OPERANDS * 5; 232 else 233 changes_allocated *= 2; 234 235 changes = XRESIZEVEC (change_t, changes, changes_allocated); 236 } 237 238 changes[num_changes].object = object; 239 changes[num_changes].loc = loc; 240 changes[num_changes].old = old; 241 changes[num_changes].unshare = unshare; 242 243 if (object && !MEM_P (object)) 244 { 245 /* Set INSN_CODE to force rerecognition of insn. Save old code in 246 case invalid. */ 247 changes[num_changes].old_code = INSN_CODE (object); 248 INSN_CODE (object) = -1; 249 } 250 251 num_changes++; 252 253 /* If we are making a group of changes, return 1. Otherwise, validate the 254 change group we made. */ 255 256 if (in_group) 257 return 1; 258 else 259 return apply_change_group (); 260 } 261 262 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting 263 UNSHARE to false. */ 264 265 bool 266 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group) 267 { 268 return validate_change_1 (object, loc, new_rtx, in_group, false); 269 } 270 271 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting 272 UNSHARE to true. */ 273 274 bool 275 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group) 276 { 277 return validate_change_1 (object, loc, new_rtx, in_group, true); 278 } 279 280 281 /* Keep X canonicalized if some changes have made it non-canonical; only 282 modifies the operands of X, not (for example) its code. Simplifications 283 are not the job of this routine. 284 285 Return true if anything was changed. */ 286 bool 287 canonicalize_change_group (rtx_insn *insn, rtx x) 288 { 289 if (COMMUTATIVE_P (x) 290 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))) 291 { 292 /* Oops, the caller has made X no longer canonical. 293 Let's redo the changes in the correct order. */ 294 rtx tem = XEXP (x, 0); 295 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1); 296 validate_unshare_change (insn, &XEXP (x, 1), tem, 1); 297 return true; 298 } 299 else 300 return false; 301 } 302 303 304 /* This subroutine of apply_change_group verifies whether the changes to INSN 305 were valid; i.e. whether INSN can still be recognized. 306 307 If IN_GROUP is true clobbers which have to be added in order to 308 match the instructions will be added to the current change group. 309 Otherwise the changes will take effect immediately. */ 310 311 int 312 insn_invalid_p (rtx_insn *insn, bool in_group) 313 { 314 rtx pat = PATTERN (insn); 315 int num_clobbers = 0; 316 /* If we are before reload and the pattern is a SET, see if we can add 317 clobbers. */ 318 int icode = recog (pat, insn, 319 (GET_CODE (pat) == SET 320 && ! reload_completed 321 && ! reload_in_progress) 322 ? &num_clobbers : 0); 323 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0; 324 325 326 /* If this is an asm and the operand aren't legal, then fail. Likewise if 327 this is not an asm and the insn wasn't recognized. */ 328 if ((is_asm && ! check_asm_operands (PATTERN (insn))) 329 || (!is_asm && icode < 0)) 330 return 1; 331 332 /* If we have to add CLOBBERs, fail if we have to add ones that reference 333 hard registers since our callers can't know if they are live or not. 334 Otherwise, add them. */ 335 if (num_clobbers > 0) 336 { 337 rtx newpat; 338 339 if (added_clobbers_hard_reg_p (icode)) 340 return 1; 341 342 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1)); 343 XVECEXP (newpat, 0, 0) = pat; 344 add_clobbers (newpat, icode); 345 if (in_group) 346 validate_change (insn, &PATTERN (insn), newpat, 1); 347 else 348 PATTERN (insn) = pat = newpat; 349 } 350 351 /* After reload, verify that all constraints are satisfied. */ 352 if (reload_completed) 353 { 354 extract_insn (insn); 355 356 if (! constrain_operands (1, get_preferred_alternatives (insn))) 357 return 1; 358 } 359 360 INSN_CODE (insn) = icode; 361 return 0; 362 } 363 364 /* Return number of changes made and not validated yet. */ 365 int 366 num_changes_pending (void) 367 { 368 return num_changes; 369 } 370 371 /* Tentatively apply the changes numbered NUM and up. 372 Return 1 if all changes are valid, zero otherwise. */ 373 374 int 375 verify_changes (int num) 376 { 377 int i; 378 rtx last_validated = NULL_RTX; 379 380 /* The changes have been applied and all INSN_CODEs have been reset to force 381 rerecognition. 382 383 The changes are valid if we aren't given an object, or if we are 384 given a MEM and it still is a valid address, or if this is in insn 385 and it is recognized. In the latter case, if reload has completed, 386 we also require that the operands meet the constraints for 387 the insn. */ 388 389 for (i = num; i < num_changes; i++) 390 { 391 rtx object = changes[i].object; 392 393 /* If there is no object to test or if it is the same as the one we 394 already tested, ignore it. */ 395 if (object == 0 || object == last_validated) 396 continue; 397 398 if (MEM_P (object)) 399 { 400 if (! memory_address_addr_space_p (GET_MODE (object), 401 XEXP (object, 0), 402 MEM_ADDR_SPACE (object))) 403 break; 404 } 405 else if (/* changes[i].old might be zero, e.g. when putting a 406 REG_FRAME_RELATED_EXPR into a previously empty list. */ 407 changes[i].old 408 && REG_P (changes[i].old) 409 && asm_noperands (PATTERN (object)) > 0 410 && REG_EXPR (changes[i].old) != NULL_TREE 411 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old)) 412 && DECL_REGISTER (REG_EXPR (changes[i].old))) 413 { 414 /* Don't allow changes of hard register operands to inline 415 assemblies if they have been defined as register asm ("x"). */ 416 break; 417 } 418 else if (DEBUG_INSN_P (object)) 419 continue; 420 else if (insn_invalid_p (as_a <rtx_insn *> (object), true)) 421 { 422 rtx pat = PATTERN (object); 423 424 /* Perhaps we couldn't recognize the insn because there were 425 extra CLOBBERs at the end. If so, try to re-recognize 426 without the last CLOBBER (later iterations will cause each of 427 them to be eliminated, in turn). But don't do this if we 428 have an ASM_OPERAND. */ 429 if (GET_CODE (pat) == PARALLEL 430 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER 431 && asm_noperands (PATTERN (object)) < 0) 432 { 433 rtx newpat; 434 435 if (XVECLEN (pat, 0) == 2) 436 newpat = XVECEXP (pat, 0, 0); 437 else 438 { 439 int j; 440 441 newpat 442 = gen_rtx_PARALLEL (VOIDmode, 443 rtvec_alloc (XVECLEN (pat, 0) - 1)); 444 for (j = 0; j < XVECLEN (newpat, 0); j++) 445 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j); 446 } 447 448 /* Add a new change to this group to replace the pattern 449 with this new pattern. Then consider this change 450 as having succeeded. The change we added will 451 cause the entire call to fail if things remain invalid. 452 453 Note that this can lose if a later change than the one 454 we are processing specified &XVECEXP (PATTERN (object), 0, X) 455 but this shouldn't occur. */ 456 457 validate_change (object, &PATTERN (object), newpat, 1); 458 continue; 459 } 460 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER 461 || GET_CODE (pat) == VAR_LOCATION) 462 /* If this insn is a CLOBBER or USE, it is always valid, but is 463 never recognized. */ 464 continue; 465 else 466 break; 467 } 468 last_validated = object; 469 } 470 471 return (i == num_changes); 472 } 473 474 /* A group of changes has previously been issued with validate_change 475 and verified with verify_changes. Call df_insn_rescan for each of 476 the insn changed and clear num_changes. */ 477 478 void 479 confirm_change_group (void) 480 { 481 int i; 482 rtx last_object = NULL; 483 484 for (i = 0; i < num_changes; i++) 485 { 486 rtx object = changes[i].object; 487 488 if (changes[i].unshare) 489 *changes[i].loc = copy_rtx (*changes[i].loc); 490 491 /* Avoid unnecessary rescanning when multiple changes to same instruction 492 are made. */ 493 if (object) 494 { 495 if (object != last_object && last_object && INSN_P (last_object)) 496 df_insn_rescan (as_a <rtx_insn *> (last_object)); 497 last_object = object; 498 } 499 } 500 501 if (last_object && INSN_P (last_object)) 502 df_insn_rescan (as_a <rtx_insn *> (last_object)); 503 num_changes = 0; 504 } 505 506 /* Apply a group of changes previously issued with `validate_change'. 507 If all changes are valid, call confirm_change_group and return 1, 508 otherwise, call cancel_changes and return 0. */ 509 510 int 511 apply_change_group (void) 512 { 513 if (verify_changes (0)) 514 { 515 confirm_change_group (); 516 return 1; 517 } 518 else 519 { 520 cancel_changes (0); 521 return 0; 522 } 523 } 524 525 526 /* Return the number of changes so far in the current group. */ 527 528 int 529 num_validated_changes (void) 530 { 531 return num_changes; 532 } 533 534 /* Retract the changes numbered NUM and up. */ 535 536 void 537 cancel_changes (int num) 538 { 539 int i; 540 541 /* Back out all the changes. Do this in the opposite order in which 542 they were made. */ 543 for (i = num_changes - 1; i >= num; i--) 544 { 545 *changes[i].loc = changes[i].old; 546 if (changes[i].object && !MEM_P (changes[i].object)) 547 INSN_CODE (changes[i].object) = changes[i].old_code; 548 } 549 num_changes = num; 550 } 551 552 /* Reduce conditional compilation elsewhere. */ 553 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting 554 rtx. */ 555 556 static void 557 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object, 558 machine_mode op0_mode) 559 { 560 rtx x = *loc; 561 enum rtx_code code = GET_CODE (x); 562 rtx new_rtx = NULL_RTX; 563 564 if (SWAPPABLE_OPERANDS_P (x) 565 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))) 566 { 567 validate_unshare_change (object, loc, 568 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code 569 : swap_condition (code), 570 GET_MODE (x), XEXP (x, 1), 571 XEXP (x, 0)), 1); 572 x = *loc; 573 code = GET_CODE (x); 574 } 575 576 /* Canonicalize arithmetics with all constant operands. */ 577 switch (GET_RTX_CLASS (code)) 578 { 579 case RTX_UNARY: 580 if (CONSTANT_P (XEXP (x, 0))) 581 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0), 582 op0_mode); 583 break; 584 case RTX_COMM_ARITH: 585 case RTX_BIN_ARITH: 586 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1))) 587 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0), 588 XEXP (x, 1)); 589 break; 590 case RTX_COMPARE: 591 case RTX_COMM_COMPARE: 592 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1))) 593 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode, 594 XEXP (x, 0), XEXP (x, 1)); 595 break; 596 default: 597 break; 598 } 599 if (new_rtx) 600 { 601 validate_change (object, loc, new_rtx, 1); 602 return; 603 } 604 605 switch (code) 606 { 607 case PLUS: 608 /* If we have a PLUS whose second operand is now a CONST_INT, use 609 simplify_gen_binary to try to simplify it. 610 ??? We may want later to remove this, once simplification is 611 separated from this function. */ 612 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to) 613 validate_change (object, loc, 614 simplify_gen_binary 615 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1); 616 break; 617 case MINUS: 618 if (CONST_SCALAR_INT_P (XEXP (x, 1))) 619 validate_change (object, loc, 620 simplify_gen_binary 621 (PLUS, GET_MODE (x), XEXP (x, 0), 622 simplify_gen_unary (NEG, 623 GET_MODE (x), XEXP (x, 1), 624 GET_MODE (x))), 1); 625 break; 626 case ZERO_EXTEND: 627 case SIGN_EXTEND: 628 if (GET_MODE (XEXP (x, 0)) == VOIDmode) 629 { 630 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0), 631 op0_mode); 632 /* If any of the above failed, substitute in something that 633 we know won't be recognized. */ 634 if (!new_rtx) 635 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); 636 validate_change (object, loc, new_rtx, 1); 637 } 638 break; 639 case SUBREG: 640 /* All subregs possible to simplify should be simplified. */ 641 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode, 642 SUBREG_BYTE (x)); 643 644 /* Subregs of VOIDmode operands are incorrect. */ 645 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode) 646 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); 647 if (new_rtx) 648 validate_change (object, loc, new_rtx, 1); 649 break; 650 case ZERO_EXTRACT: 651 case SIGN_EXTRACT: 652 /* If we are replacing a register with memory, try to change the memory 653 to be the mode required for memory in extract operations (this isn't 654 likely to be an insertion operation; if it was, nothing bad will 655 happen, we might just fail in some cases). */ 656 657 if (MEM_P (XEXP (x, 0)) 658 && CONST_INT_P (XEXP (x, 1)) 659 && CONST_INT_P (XEXP (x, 2)) 660 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0), 661 MEM_ADDR_SPACE (XEXP (x, 0))) 662 && !MEM_VOLATILE_P (XEXP (x, 0))) 663 { 664 machine_mode wanted_mode = VOIDmode; 665 machine_mode is_mode = GET_MODE (XEXP (x, 0)); 666 int pos = INTVAL (XEXP (x, 2)); 667 668 if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ()) 669 { 670 wanted_mode = insn_data[targetm.code_for_extzv].operand[1].mode; 671 if (wanted_mode == VOIDmode) 672 wanted_mode = word_mode; 673 } 674 else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ()) 675 { 676 wanted_mode = insn_data[targetm.code_for_extv].operand[1].mode; 677 if (wanted_mode == VOIDmode) 678 wanted_mode = word_mode; 679 } 680 681 /* If we have a narrower mode, we can do something. */ 682 if (wanted_mode != VOIDmode 683 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) 684 { 685 int offset = pos / BITS_PER_UNIT; 686 rtx newmem; 687 688 /* If the bytes and bits are counted differently, we 689 must adjust the offset. */ 690 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN) 691 offset = 692 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) - 693 offset); 694 695 gcc_assert (GET_MODE_PRECISION (wanted_mode) 696 == GET_MODE_BITSIZE (wanted_mode)); 697 pos %= GET_MODE_BITSIZE (wanted_mode); 698 699 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset); 700 701 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1); 702 validate_change (object, &XEXP (x, 0), newmem, 1); 703 } 704 } 705 706 break; 707 708 default: 709 break; 710 } 711 } 712 713 /* Replace every occurrence of FROM in X with TO. Mark each change with 714 validate_change passing OBJECT. */ 715 716 static void 717 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object, 718 bool simplify) 719 { 720 int i, j; 721 const char *fmt; 722 rtx x = *loc; 723 enum rtx_code code; 724 machine_mode op0_mode = VOIDmode; 725 int prev_changes = num_changes; 726 727 if (!x) 728 return; 729 730 code = GET_CODE (x); 731 fmt = GET_RTX_FORMAT (code); 732 if (fmt[0] == 'e') 733 op0_mode = GET_MODE (XEXP (x, 0)); 734 735 /* X matches FROM if it is the same rtx or they are both referring to the 736 same register in the same mode. Avoid calling rtx_equal_p unless the 737 operands look similar. */ 738 739 if (x == from 740 || (REG_P (x) && REG_P (from) 741 && GET_MODE (x) == GET_MODE (from) 742 && REGNO (x) == REGNO (from)) 743 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from) 744 && rtx_equal_p (x, from))) 745 { 746 validate_unshare_change (object, loc, to, 1); 747 return; 748 } 749 750 /* Call ourself recursively to perform the replacements. 751 We must not replace inside already replaced expression, otherwise we 752 get infinite recursion for replacements like (reg X)->(subreg (reg X)) 753 so we must special case shared ASM_OPERANDS. */ 754 755 if (GET_CODE (x) == PARALLEL) 756 { 757 for (j = XVECLEN (x, 0) - 1; j >= 0; j--) 758 { 759 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET 760 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS) 761 { 762 /* Verify that operands are really shared. */ 763 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0))) 764 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP 765 (x, 0, j)))); 766 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)), 767 from, to, object, simplify); 768 } 769 else 770 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object, 771 simplify); 772 } 773 } 774 else 775 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 776 { 777 if (fmt[i] == 'e') 778 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify); 779 else if (fmt[i] == 'E') 780 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 781 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object, 782 simplify); 783 } 784 785 /* If we didn't substitute, there is nothing more to do. */ 786 if (num_changes == prev_changes) 787 return; 788 789 /* ??? The regmove is no more, so is this aberration still necessary? */ 790 /* Allow substituted expression to have different mode. This is used by 791 regmove to change mode of pseudo register. */ 792 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode) 793 op0_mode = GET_MODE (XEXP (x, 0)); 794 795 /* Do changes needed to keep rtx consistent. Don't do any other 796 simplifications, as it is not our job. */ 797 if (simplify) 798 simplify_while_replacing (loc, to, object, op0_mode); 799 } 800 801 /* Try replacing every occurrence of FROM in subexpression LOC of INSN 802 with TO. After all changes have been made, validate by seeing 803 if INSN is still valid. */ 804 805 int 806 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc) 807 { 808 validate_replace_rtx_1 (loc, from, to, insn, true); 809 return apply_change_group (); 810 } 811 812 /* Try replacing every occurrence of FROM in INSN with TO. After all 813 changes have been made, validate by seeing if INSN is still valid. */ 814 815 int 816 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn) 817 { 818 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true); 819 return apply_change_group (); 820 } 821 822 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE 823 is a part of INSN. After all changes have been made, validate by seeing if 824 INSN is still valid. 825 validate_replace_rtx (from, to, insn) is equivalent to 826 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */ 827 828 int 829 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn) 830 { 831 validate_replace_rtx_1 (where, from, to, insn, true); 832 return apply_change_group (); 833 } 834 835 /* Same as above, but do not simplify rtx afterwards. */ 836 int 837 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where, 838 rtx_insn *insn) 839 { 840 validate_replace_rtx_1 (where, from, to, insn, false); 841 return apply_change_group (); 842 843 } 844 845 /* Try replacing every occurrence of FROM in INSN with TO. This also 846 will replace in REG_EQUAL and REG_EQUIV notes. */ 847 848 void 849 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn) 850 { 851 rtx note; 852 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true); 853 for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) 854 if (REG_NOTE_KIND (note) == REG_EQUAL 855 || REG_NOTE_KIND (note) == REG_EQUIV) 856 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true); 857 } 858 859 /* Function called by note_uses to replace used subexpressions. */ 860 struct validate_replace_src_data 861 { 862 rtx from; /* Old RTX */ 863 rtx to; /* New RTX */ 864 rtx_insn *insn; /* Insn in which substitution is occurring. */ 865 }; 866 867 static void 868 validate_replace_src_1 (rtx *x, void *data) 869 { 870 struct validate_replace_src_data *d 871 = (struct validate_replace_src_data *) data; 872 873 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true); 874 } 875 876 /* Try replacing every occurrence of FROM in INSN with TO, avoiding 877 SET_DESTs. */ 878 879 void 880 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn) 881 { 882 struct validate_replace_src_data d; 883 884 d.from = from; 885 d.to = to; 886 d.insn = insn; 887 note_uses (&PATTERN (insn), validate_replace_src_1, &d); 888 } 889 890 /* Try simplify INSN. 891 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's 892 pattern and return true if something was simplified. */ 893 894 bool 895 validate_simplify_insn (rtx_insn *insn) 896 { 897 int i; 898 rtx pat = NULL; 899 rtx newpat = NULL; 900 901 pat = PATTERN (insn); 902 903 if (GET_CODE (pat) == SET) 904 { 905 newpat = simplify_rtx (SET_SRC (pat)); 906 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat)) 907 validate_change (insn, &SET_SRC (pat), newpat, 1); 908 newpat = simplify_rtx (SET_DEST (pat)); 909 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat)) 910 validate_change (insn, &SET_DEST (pat), newpat, 1); 911 } 912 else if (GET_CODE (pat) == PARALLEL) 913 for (i = 0; i < XVECLEN (pat, 0); i++) 914 { 915 rtx s = XVECEXP (pat, 0, i); 916 917 if (GET_CODE (XVECEXP (pat, 0, i)) == SET) 918 { 919 newpat = simplify_rtx (SET_SRC (s)); 920 if (newpat && !rtx_equal_p (SET_SRC (s), newpat)) 921 validate_change (insn, &SET_SRC (s), newpat, 1); 922 newpat = simplify_rtx (SET_DEST (s)); 923 if (newpat && !rtx_equal_p (SET_DEST (s), newpat)) 924 validate_change (insn, &SET_DEST (s), newpat, 1); 925 } 926 } 927 return ((num_changes_pending () > 0) && (apply_change_group () > 0)); 928 } 929 930 /* Return 1 if the insn using CC0 set by INSN does not contain 931 any ordered tests applied to the condition codes. 932 EQ and NE tests do not count. */ 933 934 int 935 next_insn_tests_no_inequality (rtx_insn *insn) 936 { 937 rtx_insn *next = next_cc0_user (insn); 938 939 /* If there is no next insn, we have to take the conservative choice. */ 940 if (next == 0) 941 return 0; 942 943 return (INSN_P (next) 944 && ! inequality_comparisons_p (PATTERN (next))); 945 } 946 947 /* Return 1 if OP is a valid general operand for machine mode MODE. 948 This is either a register reference, a memory reference, 949 or a constant. In the case of a memory reference, the address 950 is checked for general validity for the target machine. 951 952 Register and memory references must have mode MODE in order to be valid, 953 but some constants have no machine mode and are valid for any mode. 954 955 If MODE is VOIDmode, OP is checked for validity for whatever mode 956 it has. 957 958 The main use of this function is as a predicate in match_operand 959 expressions in the machine description. */ 960 961 int 962 general_operand (rtx op, machine_mode mode) 963 { 964 enum rtx_code code = GET_CODE (op); 965 966 if (mode == VOIDmode) 967 mode = GET_MODE (op); 968 969 /* Don't accept CONST_INT or anything similar 970 if the caller wants something floating. */ 971 if (GET_MODE (op) == VOIDmode && mode != VOIDmode 972 && GET_MODE_CLASS (mode) != MODE_INT 973 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) 974 return 0; 975 976 if (CONST_INT_P (op) 977 && mode != VOIDmode 978 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) 979 return 0; 980 981 if (CONSTANT_P (op)) 982 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode 983 || mode == VOIDmode) 984 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)) 985 && targetm.legitimate_constant_p (mode == VOIDmode 986 ? GET_MODE (op) 987 : mode, op)); 988 989 /* Except for certain constants with VOIDmode, already checked for, 990 OP's mode must match MODE if MODE specifies a mode. */ 991 992 if (GET_MODE (op) != mode) 993 return 0; 994 995 if (code == SUBREG) 996 { 997 rtx sub = SUBREG_REG (op); 998 999 #ifdef INSN_SCHEDULING 1000 /* On machines that have insn scheduling, we want all memory 1001 reference to be explicit, so outlaw paradoxical SUBREGs. 1002 However, we must allow them after reload so that they can 1003 get cleaned up by cleanup_subreg_operands. */ 1004 if (!reload_completed && MEM_P (sub) 1005 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub))) 1006 return 0; 1007 #endif 1008 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory 1009 may result in incorrect reference. We should simplify all valid 1010 subregs of MEM anyway. But allow this after reload because we 1011 might be called from cleanup_subreg_operands. 1012 1013 ??? This is a kludge. */ 1014 if (!reload_completed && SUBREG_BYTE (op) != 0 1015 && MEM_P (sub)) 1016 return 0; 1017 1018 #ifdef CANNOT_CHANGE_MODE_CLASS 1019 if (REG_P (sub) 1020 && REGNO (sub) < FIRST_PSEUDO_REGISTER 1021 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode) 1022 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT 1023 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT 1024 /* LRA can generate some invalid SUBREGS just for matched 1025 operand reload presentation. LRA needs to treat them as 1026 valid. */ 1027 && ! LRA_SUBREG_P (op)) 1028 return 0; 1029 #endif 1030 1031 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally 1032 create such rtl, and we must reject it. */ 1033 if (SCALAR_FLOAT_MODE_P (GET_MODE (op)) 1034 /* LRA can use subreg to store a floating point value in an 1035 integer mode. Although the floating point and the 1036 integer modes need the same number of hard registers, the 1037 size of floating point mode can be less than the integer 1038 mode. */ 1039 && ! lra_in_progress 1040 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub))) 1041 return 0; 1042 1043 op = sub; 1044 code = GET_CODE (op); 1045 } 1046 1047 if (code == REG) 1048 return (REGNO (op) >= FIRST_PSEUDO_REGISTER 1049 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op))); 1050 1051 if (code == MEM) 1052 { 1053 rtx y = XEXP (op, 0); 1054 1055 if (! volatile_ok && MEM_VOLATILE_P (op)) 1056 return 0; 1057 1058 /* Use the mem's mode, since it will be reloaded thus. LRA can 1059 generate move insn with invalid addresses which is made valid 1060 and efficiently calculated by LRA through further numerous 1061 transformations. */ 1062 if (lra_in_progress 1063 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op))) 1064 return 1; 1065 } 1066 1067 return 0; 1068 } 1069 1070 /* Return 1 if OP is a valid memory address for a memory reference 1071 of mode MODE. 1072 1073 The main use of this function is as a predicate in match_operand 1074 expressions in the machine description. */ 1075 1076 int 1077 address_operand (rtx op, machine_mode mode) 1078 { 1079 return memory_address_p (mode, op); 1080 } 1081 1082 /* Return 1 if OP is a register reference of mode MODE. 1083 If MODE is VOIDmode, accept a register in any mode. 1084 1085 The main use of this function is as a predicate in match_operand 1086 expressions in the machine description. */ 1087 1088 int 1089 register_operand (rtx op, machine_mode mode) 1090 { 1091 if (GET_CODE (op) == SUBREG) 1092 { 1093 rtx sub = SUBREG_REG (op); 1094 1095 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand 1096 because it is guaranteed to be reloaded into one. 1097 Just make sure the MEM is valid in itself. 1098 (Ideally, (SUBREG (MEM)...) should not exist after reload, 1099 but currently it does result from (SUBREG (REG)...) where the 1100 reg went on the stack.) */ 1101 if (!REG_P (sub) && (reload_completed || !MEM_P (sub))) 1102 return 0; 1103 } 1104 else if (!REG_P (op)) 1105 return 0; 1106 return general_operand (op, mode); 1107 } 1108 1109 /* Return 1 for a register in Pmode; ignore the tested mode. */ 1110 1111 int 1112 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 1113 { 1114 return register_operand (op, Pmode); 1115 } 1116 1117 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH 1118 or a hard register. */ 1119 1120 int 1121 scratch_operand (rtx op, machine_mode mode) 1122 { 1123 if (GET_MODE (op) != mode && mode != VOIDmode) 1124 return 0; 1125 1126 return (GET_CODE (op) == SCRATCH 1127 || (REG_P (op) 1128 && (lra_in_progress 1129 || (REGNO (op) < FIRST_PSEUDO_REGISTER 1130 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS)))); 1131 } 1132 1133 /* Return 1 if OP is a valid immediate operand for mode MODE. 1134 1135 The main use of this function is as a predicate in match_operand 1136 expressions in the machine description. */ 1137 1138 int 1139 immediate_operand (rtx op, machine_mode mode) 1140 { 1141 /* Don't accept CONST_INT or anything similar 1142 if the caller wants something floating. */ 1143 if (GET_MODE (op) == VOIDmode && mode != VOIDmode 1144 && GET_MODE_CLASS (mode) != MODE_INT 1145 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) 1146 return 0; 1147 1148 if (CONST_INT_P (op) 1149 && mode != VOIDmode 1150 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) 1151 return 0; 1152 1153 return (CONSTANT_P (op) 1154 && (GET_MODE (op) == mode || mode == VOIDmode 1155 || GET_MODE (op) == VOIDmode) 1156 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)) 1157 && targetm.legitimate_constant_p (mode == VOIDmode 1158 ? GET_MODE (op) 1159 : mode, op)); 1160 } 1161 1162 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */ 1163 1164 int 1165 const_int_operand (rtx op, machine_mode mode) 1166 { 1167 if (!CONST_INT_P (op)) 1168 return 0; 1169 1170 if (mode != VOIDmode 1171 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op)) 1172 return 0; 1173 1174 return 1; 1175 } 1176 1177 #if TARGET_SUPPORTS_WIDE_INT 1178 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT 1179 of mode MODE. */ 1180 int 1181 const_scalar_int_operand (rtx op, machine_mode mode) 1182 { 1183 if (!CONST_SCALAR_INT_P (op)) 1184 return 0; 1185 1186 if (CONST_INT_P (op)) 1187 return const_int_operand (op, mode); 1188 1189 if (mode != VOIDmode) 1190 { 1191 int prec = GET_MODE_PRECISION (mode); 1192 int bitsize = GET_MODE_BITSIZE (mode); 1193 1194 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize) 1195 return 0; 1196 1197 if (prec == bitsize) 1198 return 1; 1199 else 1200 { 1201 /* Multiword partial int. */ 1202 HOST_WIDE_INT x 1203 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1); 1204 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x); 1205 } 1206 } 1207 return 1; 1208 } 1209 1210 /* Returns 1 if OP is an operand that is a constant integer or constant 1211 floating-point number of MODE. */ 1212 1213 int 1214 const_double_operand (rtx op, machine_mode mode) 1215 { 1216 return (GET_CODE (op) == CONST_DOUBLE) 1217 && (GET_MODE (op) == mode || mode == VOIDmode); 1218 } 1219 #else 1220 /* Returns 1 if OP is an operand that is a constant integer or constant 1221 floating-point number of MODE. */ 1222 1223 int 1224 const_double_operand (rtx op, machine_mode mode) 1225 { 1226 /* Don't accept CONST_INT or anything similar 1227 if the caller wants something floating. */ 1228 if (GET_MODE (op) == VOIDmode && mode != VOIDmode 1229 && GET_MODE_CLASS (mode) != MODE_INT 1230 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT) 1231 return 0; 1232 1233 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op)) 1234 && (mode == VOIDmode || GET_MODE (op) == mode 1235 || GET_MODE (op) == VOIDmode)); 1236 } 1237 #endif 1238 /* Return 1 if OP is a general operand that is not an immediate 1239 operand of mode MODE. */ 1240 1241 int 1242 nonimmediate_operand (rtx op, machine_mode mode) 1243 { 1244 return (general_operand (op, mode) && ! CONSTANT_P (op)); 1245 } 1246 1247 /* Return 1 if OP is a register reference or immediate value of mode MODE. */ 1248 1249 int 1250 nonmemory_operand (rtx op, machine_mode mode) 1251 { 1252 if (CONSTANT_P (op)) 1253 return immediate_operand (op, mode); 1254 return register_operand (op, mode); 1255 } 1256 1257 /* Return 1 if OP is a valid operand that stands for pushing a 1258 value of mode MODE onto the stack. 1259 1260 The main use of this function is as a predicate in match_operand 1261 expressions in the machine description. */ 1262 1263 int 1264 push_operand (rtx op, machine_mode mode) 1265 { 1266 unsigned int rounded_size = GET_MODE_SIZE (mode); 1267 1268 #ifdef PUSH_ROUNDING 1269 rounded_size = PUSH_ROUNDING (rounded_size); 1270 #endif 1271 1272 if (!MEM_P (op)) 1273 return 0; 1274 1275 if (mode != VOIDmode && GET_MODE (op) != mode) 1276 return 0; 1277 1278 op = XEXP (op, 0); 1279 1280 if (rounded_size == GET_MODE_SIZE (mode)) 1281 { 1282 if (GET_CODE (op) != STACK_PUSH_CODE) 1283 return 0; 1284 } 1285 else 1286 { 1287 if (GET_CODE (op) != PRE_MODIFY 1288 || GET_CODE (XEXP (op, 1)) != PLUS 1289 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0) 1290 || !CONST_INT_P (XEXP (XEXP (op, 1), 1)) 1291 || INTVAL (XEXP (XEXP (op, 1), 1)) 1292 != ((STACK_GROWS_DOWNWARD ? -1 : 1) * (int) rounded_size)) 1293 return 0; 1294 } 1295 1296 return XEXP (op, 0) == stack_pointer_rtx; 1297 } 1298 1299 /* Return 1 if OP is a valid operand that stands for popping a 1300 value of mode MODE off the stack. 1301 1302 The main use of this function is as a predicate in match_operand 1303 expressions in the machine description. */ 1304 1305 int 1306 pop_operand (rtx op, machine_mode mode) 1307 { 1308 if (!MEM_P (op)) 1309 return 0; 1310 1311 if (mode != VOIDmode && GET_MODE (op) != mode) 1312 return 0; 1313 1314 op = XEXP (op, 0); 1315 1316 if (GET_CODE (op) != STACK_POP_CODE) 1317 return 0; 1318 1319 return XEXP (op, 0) == stack_pointer_rtx; 1320 } 1321 1322 /* Return 1 if ADDR is a valid memory address 1323 for mode MODE in address space AS. */ 1324 1325 int 1326 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED, 1327 rtx addr, addr_space_t as) 1328 { 1329 #ifdef GO_IF_LEGITIMATE_ADDRESS 1330 gcc_assert (ADDR_SPACE_GENERIC_P (as)); 1331 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win); 1332 return 0; 1333 1334 win: 1335 return 1; 1336 #else 1337 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as); 1338 #endif 1339 } 1340 1341 /* Return 1 if OP is a valid memory reference with mode MODE, 1342 including a valid address. 1343 1344 The main use of this function is as a predicate in match_operand 1345 expressions in the machine description. */ 1346 1347 int 1348 memory_operand (rtx op, machine_mode mode) 1349 { 1350 rtx inner; 1351 1352 if (! reload_completed) 1353 /* Note that no SUBREG is a memory operand before end of reload pass, 1354 because (SUBREG (MEM...)) forces reloading into a register. */ 1355 return MEM_P (op) && general_operand (op, mode); 1356 1357 if (mode != VOIDmode && GET_MODE (op) != mode) 1358 return 0; 1359 1360 inner = op; 1361 if (GET_CODE (inner) == SUBREG) 1362 inner = SUBREG_REG (inner); 1363 1364 return (MEM_P (inner) && general_operand (op, mode)); 1365 } 1366 1367 /* Return 1 if OP is a valid indirect memory reference with mode MODE; 1368 that is, a memory reference whose address is a general_operand. */ 1369 1370 int 1371 indirect_operand (rtx op, machine_mode mode) 1372 { 1373 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */ 1374 if (! reload_completed 1375 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op))) 1376 { 1377 int offset = SUBREG_BYTE (op); 1378 rtx inner = SUBREG_REG (op); 1379 1380 if (mode != VOIDmode && GET_MODE (op) != mode) 1381 return 0; 1382 1383 /* The only way that we can have a general_operand as the resulting 1384 address is if OFFSET is zero and the address already is an operand 1385 or if the address is (plus Y (const_int -OFFSET)) and Y is an 1386 operand. */ 1387 1388 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode)) 1389 || (GET_CODE (XEXP (inner, 0)) == PLUS 1390 && CONST_INT_P (XEXP (XEXP (inner, 0), 1)) 1391 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset 1392 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode))); 1393 } 1394 1395 return (MEM_P (op) 1396 && memory_operand (op, mode) 1397 && general_operand (XEXP (op, 0), Pmode)); 1398 } 1399 1400 /* Return 1 if this is an ordered comparison operator (not including 1401 ORDERED and UNORDERED). */ 1402 1403 int 1404 ordered_comparison_operator (rtx op, machine_mode mode) 1405 { 1406 if (mode != VOIDmode && GET_MODE (op) != mode) 1407 return false; 1408 switch (GET_CODE (op)) 1409 { 1410 case EQ: 1411 case NE: 1412 case LT: 1413 case LTU: 1414 case LE: 1415 case LEU: 1416 case GT: 1417 case GTU: 1418 case GE: 1419 case GEU: 1420 return true; 1421 default: 1422 return false; 1423 } 1424 } 1425 1426 /* Return 1 if this is a comparison operator. This allows the use of 1427 MATCH_OPERATOR to recognize all the branch insns. */ 1428 1429 int 1430 comparison_operator (rtx op, machine_mode mode) 1431 { 1432 return ((mode == VOIDmode || GET_MODE (op) == mode) 1433 && COMPARISON_P (op)); 1434 } 1435 1436 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */ 1437 1438 rtx 1439 extract_asm_operands (rtx body) 1440 { 1441 rtx tmp; 1442 switch (GET_CODE (body)) 1443 { 1444 case ASM_OPERANDS: 1445 return body; 1446 1447 case SET: 1448 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */ 1449 tmp = SET_SRC (body); 1450 if (GET_CODE (tmp) == ASM_OPERANDS) 1451 return tmp; 1452 break; 1453 1454 case PARALLEL: 1455 tmp = XVECEXP (body, 0, 0); 1456 if (GET_CODE (tmp) == ASM_OPERANDS) 1457 return tmp; 1458 if (GET_CODE (tmp) == SET) 1459 { 1460 tmp = SET_SRC (tmp); 1461 if (GET_CODE (tmp) == ASM_OPERANDS) 1462 return tmp; 1463 } 1464 break; 1465 1466 default: 1467 break; 1468 } 1469 return NULL; 1470 } 1471 1472 /* If BODY is an insn body that uses ASM_OPERANDS, 1473 return the number of operands (both input and output) in the insn. 1474 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL, 1475 return 0. 1476 Otherwise return -1. */ 1477 1478 int 1479 asm_noperands (const_rtx body) 1480 { 1481 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body)); 1482 int i, n_sets = 0; 1483 1484 if (asm_op == NULL) 1485 { 1486 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2 1487 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT) 1488 { 1489 /* body is [(asm_input ...) (clobber (reg ...))...]. */ 1490 for (i = XVECLEN (body, 0) - 1; i > 0; i--) 1491 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER) 1492 return -1; 1493 return 0; 1494 } 1495 return -1; 1496 } 1497 1498 if (GET_CODE (body) == SET) 1499 n_sets = 1; 1500 else if (GET_CODE (body) == PARALLEL) 1501 { 1502 if (GET_CODE (XVECEXP (body, 0, 0)) == SET) 1503 { 1504 /* Multiple output operands, or 1 output plus some clobbers: 1505 body is 1506 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */ 1507 /* Count backwards through CLOBBERs to determine number of SETs. */ 1508 for (i = XVECLEN (body, 0); i > 0; i--) 1509 { 1510 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET) 1511 break; 1512 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER) 1513 return -1; 1514 } 1515 1516 /* N_SETS is now number of output operands. */ 1517 n_sets = i; 1518 1519 /* Verify that all the SETs we have 1520 came from a single original asm_operands insn 1521 (so that invalid combinations are blocked). */ 1522 for (i = 0; i < n_sets; i++) 1523 { 1524 rtx elt = XVECEXP (body, 0, i); 1525 if (GET_CODE (elt) != SET) 1526 return -1; 1527 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS) 1528 return -1; 1529 /* If these ASM_OPERANDS rtx's came from different original insns 1530 then they aren't allowed together. */ 1531 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt)) 1532 != ASM_OPERANDS_INPUT_VEC (asm_op)) 1533 return -1; 1534 } 1535 } 1536 else 1537 { 1538 /* 0 outputs, but some clobbers: 1539 body is [(asm_operands ...) (clobber (reg ...))...]. */ 1540 /* Make sure all the other parallel things really are clobbers. */ 1541 for (i = XVECLEN (body, 0) - 1; i > 0; i--) 1542 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER) 1543 return -1; 1544 } 1545 } 1546 1547 return (ASM_OPERANDS_INPUT_LENGTH (asm_op) 1548 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets); 1549 } 1550 1551 /* Assuming BODY is an insn body that uses ASM_OPERANDS, 1552 copy its operands (both input and output) into the vector OPERANDS, 1553 the locations of the operands within the insn into the vector OPERAND_LOCS, 1554 and the constraints for the operands into CONSTRAINTS. 1555 Write the modes of the operands into MODES. 1556 Write the location info into LOC. 1557 Return the assembler-template. 1558 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL, 1559 return the basic assembly string. 1560 1561 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0, 1562 we don't store that info. */ 1563 1564 const char * 1565 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs, 1566 const char **constraints, machine_mode *modes, 1567 location_t *loc) 1568 { 1569 int nbase = 0, n, i; 1570 rtx asmop; 1571 1572 switch (GET_CODE (body)) 1573 { 1574 case ASM_OPERANDS: 1575 /* Zero output asm: BODY is (asm_operands ...). */ 1576 asmop = body; 1577 break; 1578 1579 case SET: 1580 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */ 1581 asmop = SET_SRC (body); 1582 1583 /* The output is in the SET. 1584 Its constraint is in the ASM_OPERANDS itself. */ 1585 if (operands) 1586 operands[0] = SET_DEST (body); 1587 if (operand_locs) 1588 operand_locs[0] = &SET_DEST (body); 1589 if (constraints) 1590 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop); 1591 if (modes) 1592 modes[0] = GET_MODE (SET_DEST (body)); 1593 nbase = 1; 1594 break; 1595 1596 case PARALLEL: 1597 { 1598 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */ 1599 1600 asmop = XVECEXP (body, 0, 0); 1601 if (GET_CODE (asmop) == SET) 1602 { 1603 asmop = SET_SRC (asmop); 1604 1605 /* At least one output, plus some CLOBBERs. The outputs are in 1606 the SETs. Their constraints are in the ASM_OPERANDS itself. */ 1607 for (i = 0; i < nparallel; i++) 1608 { 1609 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER) 1610 break; /* Past last SET */ 1611 if (operands) 1612 operands[i] = SET_DEST (XVECEXP (body, 0, i)); 1613 if (operand_locs) 1614 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i)); 1615 if (constraints) 1616 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1); 1617 if (modes) 1618 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i))); 1619 } 1620 nbase = i; 1621 } 1622 else if (GET_CODE (asmop) == ASM_INPUT) 1623 { 1624 if (loc) 1625 *loc = ASM_INPUT_SOURCE_LOCATION (asmop); 1626 return XSTR (asmop, 0); 1627 } 1628 break; 1629 } 1630 1631 default: 1632 gcc_unreachable (); 1633 } 1634 1635 n = ASM_OPERANDS_INPUT_LENGTH (asmop); 1636 for (i = 0; i < n; i++) 1637 { 1638 if (operand_locs) 1639 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i); 1640 if (operands) 1641 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i); 1642 if (constraints) 1643 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i); 1644 if (modes) 1645 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i); 1646 } 1647 nbase += n; 1648 1649 n = ASM_OPERANDS_LABEL_LENGTH (asmop); 1650 for (i = 0; i < n; i++) 1651 { 1652 if (operand_locs) 1653 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i); 1654 if (operands) 1655 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i); 1656 if (constraints) 1657 constraints[nbase + i] = ""; 1658 if (modes) 1659 modes[nbase + i] = Pmode; 1660 } 1661 1662 if (loc) 1663 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop); 1664 1665 return ASM_OPERANDS_TEMPLATE (asmop); 1666 } 1667 1668 /* Parse inline assembly string STRING and determine which operands are 1669 referenced by % markers. For the first NOPERANDS operands, set USED[I] 1670 to true if operand I is referenced. 1671 1672 This is intended to distinguish barrier-like asms such as: 1673 1674 asm ("" : "=m" (...)); 1675 1676 from real references such as: 1677 1678 asm ("sw\t$0, %0" : "=m" (...)); */ 1679 1680 void 1681 get_referenced_operands (const char *string, bool *used, 1682 unsigned int noperands) 1683 { 1684 memset (used, 0, sizeof (bool) * noperands); 1685 const char *p = string; 1686 while (*p) 1687 switch (*p) 1688 { 1689 case '%': 1690 p += 1; 1691 /* A letter followed by a digit indicates an operand number. */ 1692 if (ISALPHA (p[0]) && ISDIGIT (p[1])) 1693 p += 1; 1694 if (ISDIGIT (*p)) 1695 { 1696 char *endptr; 1697 unsigned long opnum = strtoul (p, &endptr, 10); 1698 if (endptr != p && opnum < noperands) 1699 used[opnum] = true; 1700 p = endptr; 1701 } 1702 else 1703 p += 1; 1704 break; 1705 1706 default: 1707 p++; 1708 break; 1709 } 1710 } 1711 1712 /* Check if an asm_operand matches its constraints. 1713 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */ 1714 1715 int 1716 asm_operand_ok (rtx op, const char *constraint, const char **constraints) 1717 { 1718 int result = 0; 1719 bool incdec_ok = false; 1720 1721 /* Use constrain_operands after reload. */ 1722 gcc_assert (!reload_completed); 1723 1724 /* Empty constraint string is the same as "X,...,X", i.e. X for as 1725 many alternatives as required to match the other operands. */ 1726 if (*constraint == '\0') 1727 result = 1; 1728 1729 while (*constraint) 1730 { 1731 enum constraint_num cn; 1732 char c = *constraint; 1733 int len; 1734 switch (c) 1735 { 1736 case ',': 1737 constraint++; 1738 continue; 1739 1740 case '0': case '1': case '2': case '3': case '4': 1741 case '5': case '6': case '7': case '8': case '9': 1742 /* If caller provided constraints pointer, look up 1743 the matching constraint. Otherwise, our caller should have 1744 given us the proper matching constraint, but we can't 1745 actually fail the check if they didn't. Indicate that 1746 results are inconclusive. */ 1747 if (constraints) 1748 { 1749 char *end; 1750 unsigned long match; 1751 1752 match = strtoul (constraint, &end, 10); 1753 if (!result) 1754 result = asm_operand_ok (op, constraints[match], NULL); 1755 constraint = (const char *) end; 1756 } 1757 else 1758 { 1759 do 1760 constraint++; 1761 while (ISDIGIT (*constraint)); 1762 if (! result) 1763 result = -1; 1764 } 1765 continue; 1766 1767 /* The rest of the compiler assumes that reloading the address 1768 of a MEM into a register will make it fit an 'o' constraint. 1769 That is, if it sees a MEM operand for an 'o' constraint, 1770 it assumes that (mem (base-reg)) will fit. 1771 1772 That assumption fails on targets that don't have offsettable 1773 addresses at all. We therefore need to treat 'o' asm 1774 constraints as a special case and only accept operands that 1775 are already offsettable, thus proving that at least one 1776 offsettable address exists. */ 1777 case 'o': /* offsettable */ 1778 if (offsettable_nonstrict_memref_p (op)) 1779 result = 1; 1780 break; 1781 1782 case 'g': 1783 if (general_operand (op, VOIDmode)) 1784 result = 1; 1785 break; 1786 1787 case '<': 1788 case '>': 1789 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed 1790 to exist, excepting those that expand_call created. Further, 1791 on some machines which do not have generalized auto inc/dec, 1792 an inc/dec is not a memory_operand. 1793 1794 Match any memory and hope things are resolved after reload. */ 1795 incdec_ok = true; 1796 /* FALLTHRU */ 1797 default: 1798 cn = lookup_constraint (constraint); 1799 switch (get_constraint_type (cn)) 1800 { 1801 case CT_REGISTER: 1802 if (!result 1803 && reg_class_for_constraint (cn) != NO_REGS 1804 && GET_MODE (op) != BLKmode 1805 && register_operand (op, VOIDmode)) 1806 result = 1; 1807 break; 1808 1809 case CT_CONST_INT: 1810 if (!result 1811 && CONST_INT_P (op) 1812 && insn_const_int_ok_for_constraint (INTVAL (op), cn)) 1813 result = 1; 1814 break; 1815 1816 case CT_MEMORY: 1817 case CT_SPECIAL_MEMORY: 1818 /* Every memory operand can be reloaded to fit. */ 1819 result = result || memory_operand (op, VOIDmode); 1820 break; 1821 1822 case CT_ADDRESS: 1823 /* Every address operand can be reloaded to fit. */ 1824 result = result || address_operand (op, VOIDmode); 1825 break; 1826 1827 case CT_FIXED_FORM: 1828 result = result || constraint_satisfied_p (op, cn); 1829 break; 1830 } 1831 break; 1832 } 1833 len = CONSTRAINT_LEN (c, constraint); 1834 do 1835 constraint++; 1836 while (--len && *constraint); 1837 if (len) 1838 return 0; 1839 } 1840 1841 /* For operands without < or > constraints reject side-effects. */ 1842 if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op)) 1843 switch (GET_CODE (XEXP (op, 0))) 1844 { 1845 case PRE_INC: 1846 case POST_INC: 1847 case PRE_DEC: 1848 case POST_DEC: 1849 case PRE_MODIFY: 1850 case POST_MODIFY: 1851 return 0; 1852 default: 1853 break; 1854 } 1855 1856 return result; 1857 } 1858 1859 /* Given an rtx *P, if it is a sum containing an integer constant term, 1860 return the location (type rtx *) of the pointer to that constant term. 1861 Otherwise, return a null pointer. */ 1862 1863 rtx * 1864 find_constant_term_loc (rtx *p) 1865 { 1866 rtx *tem; 1867 enum rtx_code code = GET_CODE (*p); 1868 1869 /* If *P IS such a constant term, P is its location. */ 1870 1871 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF 1872 || code == CONST) 1873 return p; 1874 1875 /* Otherwise, if not a sum, it has no constant term. */ 1876 1877 if (GET_CODE (*p) != PLUS) 1878 return 0; 1879 1880 /* If one of the summands is constant, return its location. */ 1881 1882 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0)) 1883 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1))) 1884 return p; 1885 1886 /* Otherwise, check each summand for containing a constant term. */ 1887 1888 if (XEXP (*p, 0) != 0) 1889 { 1890 tem = find_constant_term_loc (&XEXP (*p, 0)); 1891 if (tem != 0) 1892 return tem; 1893 } 1894 1895 if (XEXP (*p, 1) != 0) 1896 { 1897 tem = find_constant_term_loc (&XEXP (*p, 1)); 1898 if (tem != 0) 1899 return tem; 1900 } 1901 1902 return 0; 1903 } 1904 1905 /* Return 1 if OP is a memory reference 1906 whose address contains no side effects 1907 and remains valid after the addition 1908 of a positive integer less than the 1909 size of the object being referenced. 1910 1911 We assume that the original address is valid and do not check it. 1912 1913 This uses strict_memory_address_p as a subroutine, so 1914 don't use it before reload. */ 1915 1916 int 1917 offsettable_memref_p (rtx op) 1918 { 1919 return ((MEM_P (op)) 1920 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0), 1921 MEM_ADDR_SPACE (op))); 1922 } 1923 1924 /* Similar, but don't require a strictly valid mem ref: 1925 consider pseudo-regs valid as index or base regs. */ 1926 1927 int 1928 offsettable_nonstrict_memref_p (rtx op) 1929 { 1930 return ((MEM_P (op)) 1931 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0), 1932 MEM_ADDR_SPACE (op))); 1933 } 1934 1935 /* Return 1 if Y is a memory address which contains no side effects 1936 and would remain valid for address space AS after the addition of 1937 a positive integer less than the size of that mode. 1938 1939 We assume that the original address is valid and do not check it. 1940 We do check that it is valid for narrower modes. 1941 1942 If STRICTP is nonzero, we require a strictly valid address, 1943 for the sake of use in reload.c. */ 1944 1945 int 1946 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y, 1947 addr_space_t as) 1948 { 1949 enum rtx_code ycode = GET_CODE (y); 1950 rtx z; 1951 rtx y1 = y; 1952 rtx *y2; 1953 int (*addressp) (machine_mode, rtx, addr_space_t) = 1954 (strictp ? strict_memory_address_addr_space_p 1955 : memory_address_addr_space_p); 1956 unsigned int mode_sz = GET_MODE_SIZE (mode); 1957 1958 if (CONSTANT_ADDRESS_P (y)) 1959 return 1; 1960 1961 /* Adjusting an offsettable address involves changing to a narrower mode. 1962 Make sure that's OK. */ 1963 1964 if (mode_dependent_address_p (y, as)) 1965 return 0; 1966 1967 machine_mode address_mode = GET_MODE (y); 1968 if (address_mode == VOIDmode) 1969 address_mode = targetm.addr_space.address_mode (as); 1970 #ifdef POINTERS_EXTEND_UNSIGNED 1971 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as); 1972 #endif 1973 1974 /* ??? How much offset does an offsettable BLKmode reference need? 1975 Clearly that depends on the situation in which it's being used. 1976 However, the current situation in which we test 0xffffffff is 1977 less than ideal. Caveat user. */ 1978 if (mode_sz == 0) 1979 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT; 1980 1981 /* If the expression contains a constant term, 1982 see if it remains valid when max possible offset is added. */ 1983 1984 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1))) 1985 { 1986 int good; 1987 1988 y1 = *y2; 1989 *y2 = plus_constant (address_mode, *y2, mode_sz - 1); 1990 /* Use QImode because an odd displacement may be automatically invalid 1991 for any wider mode. But it should be valid for a single byte. */ 1992 good = (*addressp) (QImode, y, as); 1993 1994 /* In any case, restore old contents of memory. */ 1995 *y2 = y1; 1996 return good; 1997 } 1998 1999 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC) 2000 return 0; 2001 2002 /* The offset added here is chosen as the maximum offset that 2003 any instruction could need to add when operating on something 2004 of the specified mode. We assume that if Y and Y+c are 2005 valid addresses then so is Y+d for all 0<d<c. adjust_address will 2006 go inside a LO_SUM here, so we do so as well. */ 2007 if (GET_CODE (y) == LO_SUM 2008 && mode != BLKmode 2009 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT) 2010 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0), 2011 plus_constant (address_mode, XEXP (y, 1), 2012 mode_sz - 1)); 2013 #ifdef POINTERS_EXTEND_UNSIGNED 2014 /* Likewise for a ZERO_EXTEND from pointer_mode. */ 2015 else if (POINTERS_EXTEND_UNSIGNED > 0 2016 && GET_CODE (y) == ZERO_EXTEND 2017 && GET_MODE (XEXP (y, 0)) == pointer_mode) 2018 z = gen_rtx_ZERO_EXTEND (address_mode, 2019 plus_constant (pointer_mode, XEXP (y, 0), 2020 mode_sz - 1)); 2021 #endif 2022 else 2023 z = plus_constant (address_mode, y, mode_sz - 1); 2024 2025 /* Use QImode because an odd displacement may be automatically invalid 2026 for any wider mode. But it should be valid for a single byte. */ 2027 return (*addressp) (QImode, z, as); 2028 } 2029 2030 /* Return 1 if ADDR is an address-expression whose effect depends 2031 on the mode of the memory reference it is used in. 2032 2033 ADDRSPACE is the address space associated with the address. 2034 2035 Autoincrement addressing is a typical example of mode-dependence 2036 because the amount of the increment depends on the mode. */ 2037 2038 bool 2039 mode_dependent_address_p (rtx addr, addr_space_t addrspace) 2040 { 2041 /* Auto-increment addressing with anything other than post_modify 2042 or pre_modify always introduces a mode dependency. Catch such 2043 cases now instead of deferring to the target. */ 2044 if (GET_CODE (addr) == PRE_INC 2045 || GET_CODE (addr) == POST_INC 2046 || GET_CODE (addr) == PRE_DEC 2047 || GET_CODE (addr) == POST_DEC) 2048 return true; 2049 2050 return targetm.mode_dependent_address_p (addr, addrspace); 2051 } 2052 2053 /* Return true if boolean attribute ATTR is supported. */ 2054 2055 static bool 2056 have_bool_attr (bool_attr attr) 2057 { 2058 switch (attr) 2059 { 2060 case BA_ENABLED: 2061 return HAVE_ATTR_enabled; 2062 case BA_PREFERRED_FOR_SIZE: 2063 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size; 2064 case BA_PREFERRED_FOR_SPEED: 2065 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed; 2066 } 2067 gcc_unreachable (); 2068 } 2069 2070 /* Return the value of ATTR for instruction INSN. */ 2071 2072 static bool 2073 get_bool_attr (rtx_insn *insn, bool_attr attr) 2074 { 2075 switch (attr) 2076 { 2077 case BA_ENABLED: 2078 return get_attr_enabled (insn); 2079 case BA_PREFERRED_FOR_SIZE: 2080 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn); 2081 case BA_PREFERRED_FOR_SPEED: 2082 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn); 2083 } 2084 gcc_unreachable (); 2085 } 2086 2087 /* Like get_bool_attr_mask, but don't use the cache. */ 2088 2089 static alternative_mask 2090 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr) 2091 { 2092 /* Temporarily install enough information for get_attr_<foo> to assume 2093 that the insn operands are already cached. As above, the attribute 2094 mustn't depend on the values of operands, so we don't provide their 2095 real values here. */ 2096 rtx_insn *old_insn = recog_data.insn; 2097 int old_alternative = which_alternative; 2098 2099 recog_data.insn = insn; 2100 alternative_mask mask = ALL_ALTERNATIVES; 2101 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives; 2102 for (int i = 0; i < n_alternatives; i++) 2103 { 2104 which_alternative = i; 2105 if (!get_bool_attr (insn, attr)) 2106 mask &= ~ALTERNATIVE_BIT (i); 2107 } 2108 2109 recog_data.insn = old_insn; 2110 which_alternative = old_alternative; 2111 return mask; 2112 } 2113 2114 /* Return the mask of operand alternatives that are allowed for INSN 2115 by boolean attribute ATTR. This mask depends only on INSN and on 2116 the current target; it does not depend on things like the values of 2117 operands. */ 2118 2119 static alternative_mask 2120 get_bool_attr_mask (rtx_insn *insn, bool_attr attr) 2121 { 2122 /* Quick exit for asms and for targets that don't use these attributes. */ 2123 int code = INSN_CODE (insn); 2124 if (code < 0 || !have_bool_attr (attr)) 2125 return ALL_ALTERNATIVES; 2126 2127 /* Calling get_attr_<foo> can be expensive, so cache the mask 2128 for speed. */ 2129 if (!this_target_recog->x_bool_attr_masks[code][attr]) 2130 this_target_recog->x_bool_attr_masks[code][attr] 2131 = get_bool_attr_mask_uncached (insn, attr); 2132 return this_target_recog->x_bool_attr_masks[code][attr]; 2133 } 2134 2135 /* Return the set of alternatives of INSN that are allowed by the current 2136 target. */ 2137 2138 alternative_mask 2139 get_enabled_alternatives (rtx_insn *insn) 2140 { 2141 return get_bool_attr_mask (insn, BA_ENABLED); 2142 } 2143 2144 /* Return the set of alternatives of INSN that are allowed by the current 2145 target and are preferred for the current size/speed optimization 2146 choice. */ 2147 2148 alternative_mask 2149 get_preferred_alternatives (rtx_insn *insn) 2150 { 2151 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn))) 2152 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED); 2153 else 2154 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE); 2155 } 2156 2157 /* Return the set of alternatives of INSN that are allowed by the current 2158 target and are preferred for the size/speed optimization choice 2159 associated with BB. Passing a separate BB is useful if INSN has not 2160 been emitted yet or if we are considering moving it to a different 2161 block. */ 2162 2163 alternative_mask 2164 get_preferred_alternatives (rtx_insn *insn, basic_block bb) 2165 { 2166 if (optimize_bb_for_speed_p (bb)) 2167 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED); 2168 else 2169 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE); 2170 } 2171 2172 /* Assert that the cached boolean attributes for INSN are still accurate. 2173 The backend is required to define these attributes in a way that only 2174 depends on the current target (rather than operands, compiler phase, 2175 etc.). */ 2176 2177 bool 2178 check_bool_attrs (rtx_insn *insn) 2179 { 2180 int code = INSN_CODE (insn); 2181 if (code >= 0) 2182 for (int i = 0; i <= BA_LAST; ++i) 2183 { 2184 enum bool_attr attr = (enum bool_attr) i; 2185 if (this_target_recog->x_bool_attr_masks[code][attr]) 2186 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr] 2187 == get_bool_attr_mask_uncached (insn, attr)); 2188 } 2189 return true; 2190 } 2191 2192 /* Like extract_insn, but save insn extracted and don't extract again, when 2193 called again for the same insn expecting that recog_data still contain the 2194 valid information. This is used primary by gen_attr infrastructure that 2195 often does extract insn again and again. */ 2196 void 2197 extract_insn_cached (rtx_insn *insn) 2198 { 2199 if (recog_data.insn == insn && INSN_CODE (insn) >= 0) 2200 return; 2201 extract_insn (insn); 2202 recog_data.insn = insn; 2203 } 2204 2205 /* Do uncached extract_insn, constrain_operands and complain about failures. 2206 This should be used when extracting a pre-existing constrained instruction 2207 if the caller wants to know which alternative was chosen. */ 2208 void 2209 extract_constrain_insn (rtx_insn *insn) 2210 { 2211 extract_insn (insn); 2212 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn))) 2213 fatal_insn_not_found (insn); 2214 } 2215 2216 /* Do cached extract_insn, constrain_operands and complain about failures. 2217 Used by insn_attrtab. */ 2218 void 2219 extract_constrain_insn_cached (rtx_insn *insn) 2220 { 2221 extract_insn_cached (insn); 2222 if (which_alternative == -1 2223 && !constrain_operands (reload_completed, 2224 get_enabled_alternatives (insn))) 2225 fatal_insn_not_found (insn); 2226 } 2227 2228 /* Do cached constrain_operands on INSN and complain about failures. */ 2229 int 2230 constrain_operands_cached (rtx_insn *insn, int strict) 2231 { 2232 if (which_alternative == -1) 2233 return constrain_operands (strict, get_enabled_alternatives (insn)); 2234 else 2235 return 1; 2236 } 2237 2238 /* Analyze INSN and fill in recog_data. */ 2239 2240 void 2241 extract_insn (rtx_insn *insn) 2242 { 2243 int i; 2244 int icode; 2245 int noperands; 2246 rtx body = PATTERN (insn); 2247 2248 recog_data.n_operands = 0; 2249 recog_data.n_alternatives = 0; 2250 recog_data.n_dups = 0; 2251 recog_data.is_asm = false; 2252 2253 switch (GET_CODE (body)) 2254 { 2255 case USE: 2256 case CLOBBER: 2257 case ASM_INPUT: 2258 case ADDR_VEC: 2259 case ADDR_DIFF_VEC: 2260 case VAR_LOCATION: 2261 return; 2262 2263 case SET: 2264 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS) 2265 goto asm_insn; 2266 else 2267 goto normal_insn; 2268 case PARALLEL: 2269 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET 2270 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS) 2271 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS 2272 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT) 2273 goto asm_insn; 2274 else 2275 goto normal_insn; 2276 case ASM_OPERANDS: 2277 asm_insn: 2278 recog_data.n_operands = noperands = asm_noperands (body); 2279 if (noperands >= 0) 2280 { 2281 /* This insn is an `asm' with operands. */ 2282 2283 /* expand_asm_operands makes sure there aren't too many operands. */ 2284 gcc_assert (noperands <= MAX_RECOG_OPERANDS); 2285 2286 /* Now get the operand values and constraints out of the insn. */ 2287 decode_asm_operands (body, recog_data.operand, 2288 recog_data.operand_loc, 2289 recog_data.constraints, 2290 recog_data.operand_mode, NULL); 2291 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator); 2292 if (noperands > 0) 2293 { 2294 const char *p = recog_data.constraints[0]; 2295 recog_data.n_alternatives = 1; 2296 while (*p) 2297 recog_data.n_alternatives += (*p++ == ','); 2298 } 2299 recog_data.is_asm = true; 2300 break; 2301 } 2302 fatal_insn_not_found (insn); 2303 2304 default: 2305 normal_insn: 2306 /* Ordinary insn: recognize it, get the operands via insn_extract 2307 and get the constraints. */ 2308 2309 icode = recog_memoized (insn); 2310 if (icode < 0) 2311 fatal_insn_not_found (insn); 2312 2313 recog_data.n_operands = noperands = insn_data[icode].n_operands; 2314 recog_data.n_alternatives = insn_data[icode].n_alternatives; 2315 recog_data.n_dups = insn_data[icode].n_dups; 2316 2317 insn_extract (insn); 2318 2319 for (i = 0; i < noperands; i++) 2320 { 2321 recog_data.constraints[i] = insn_data[icode].operand[i].constraint; 2322 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator; 2323 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode; 2324 /* VOIDmode match_operands gets mode from their real operand. */ 2325 if (recog_data.operand_mode[i] == VOIDmode) 2326 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]); 2327 } 2328 } 2329 for (i = 0; i < noperands; i++) 2330 recog_data.operand_type[i] 2331 = (recog_data.constraints[i][0] == '=' ? OP_OUT 2332 : recog_data.constraints[i][0] == '+' ? OP_INOUT 2333 : OP_IN); 2334 2335 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES); 2336 2337 recog_data.insn = NULL; 2338 which_alternative = -1; 2339 } 2340 2341 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands, 2342 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS. 2343 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS 2344 has N_OPERANDS entries. */ 2345 2346 void 2347 preprocess_constraints (int n_operands, int n_alternatives, 2348 const char **constraints, 2349 operand_alternative *op_alt_base) 2350 { 2351 for (int i = 0; i < n_operands; i++) 2352 { 2353 int j; 2354 struct operand_alternative *op_alt; 2355 const char *p = constraints[i]; 2356 2357 op_alt = op_alt_base; 2358 2359 for (j = 0; j < n_alternatives; j++, op_alt += n_operands) 2360 { 2361 op_alt[i].cl = NO_REGS; 2362 op_alt[i].constraint = p; 2363 op_alt[i].matches = -1; 2364 op_alt[i].matched = -1; 2365 2366 if (*p == '\0' || *p == ',') 2367 { 2368 op_alt[i].anything_ok = 1; 2369 continue; 2370 } 2371 2372 for (;;) 2373 { 2374 char c = *p; 2375 if (c == '#') 2376 do 2377 c = *++p; 2378 while (c != ',' && c != '\0'); 2379 if (c == ',' || c == '\0') 2380 { 2381 p++; 2382 break; 2383 } 2384 2385 switch (c) 2386 { 2387 case '?': 2388 op_alt[i].reject += 6; 2389 break; 2390 case '!': 2391 op_alt[i].reject += 600; 2392 break; 2393 case '&': 2394 op_alt[i].earlyclobber = 1; 2395 break; 2396 2397 case '0': case '1': case '2': case '3': case '4': 2398 case '5': case '6': case '7': case '8': case '9': 2399 { 2400 char *end; 2401 op_alt[i].matches = strtoul (p, &end, 10); 2402 op_alt[op_alt[i].matches].matched = i; 2403 p = end; 2404 } 2405 continue; 2406 2407 case 'X': 2408 op_alt[i].anything_ok = 1; 2409 break; 2410 2411 case 'g': 2412 op_alt[i].cl = 2413 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS]; 2414 break; 2415 2416 default: 2417 enum constraint_num cn = lookup_constraint (p); 2418 enum reg_class cl; 2419 switch (get_constraint_type (cn)) 2420 { 2421 case CT_REGISTER: 2422 cl = reg_class_for_constraint (cn); 2423 if (cl != NO_REGS) 2424 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl]; 2425 break; 2426 2427 case CT_CONST_INT: 2428 break; 2429 2430 case CT_MEMORY: 2431 case CT_SPECIAL_MEMORY: 2432 op_alt[i].memory_ok = 1; 2433 break; 2434 2435 case CT_ADDRESS: 2436 op_alt[i].is_address = 1; 2437 op_alt[i].cl 2438 = (reg_class_subunion 2439 [(int) op_alt[i].cl] 2440 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC, 2441 ADDRESS, SCRATCH)]); 2442 break; 2443 2444 case CT_FIXED_FORM: 2445 break; 2446 } 2447 break; 2448 } 2449 p += CONSTRAINT_LEN (c, p); 2450 } 2451 } 2452 } 2453 } 2454 2455 /* Return an array of operand_alternative instructions for 2456 instruction ICODE. */ 2457 2458 const operand_alternative * 2459 preprocess_insn_constraints (unsigned int icode) 2460 { 2461 gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1)); 2462 if (this_target_recog->x_op_alt[icode]) 2463 return this_target_recog->x_op_alt[icode]; 2464 2465 int n_operands = insn_data[icode].n_operands; 2466 if (n_operands == 0) 2467 return 0; 2468 /* Always provide at least one alternative so that which_op_alt () 2469 works correctly. If the instruction has 0 alternatives (i.e. all 2470 constraint strings are empty) then each operand in this alternative 2471 will have anything_ok set. */ 2472 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1); 2473 int n_entries = n_operands * n_alternatives; 2474 2475 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries); 2476 const char **constraints = XALLOCAVEC (const char *, n_operands); 2477 2478 for (int i = 0; i < n_operands; ++i) 2479 constraints[i] = insn_data[icode].operand[i].constraint; 2480 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt); 2481 2482 this_target_recog->x_op_alt[icode] = op_alt; 2483 return op_alt; 2484 } 2485 2486 /* After calling extract_insn, you can use this function to extract some 2487 information from the constraint strings into a more usable form. 2488 The collected data is stored in recog_op_alt. */ 2489 2490 void 2491 preprocess_constraints (rtx_insn *insn) 2492 { 2493 int icode = INSN_CODE (insn); 2494 if (icode >= 0) 2495 recog_op_alt = preprocess_insn_constraints (icode); 2496 else 2497 { 2498 int n_operands = recog_data.n_operands; 2499 int n_alternatives = recog_data.n_alternatives; 2500 int n_entries = n_operands * n_alternatives; 2501 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative)); 2502 preprocess_constraints (n_operands, n_alternatives, 2503 recog_data.constraints, asm_op_alt); 2504 recog_op_alt = asm_op_alt; 2505 } 2506 } 2507 2508 /* Check the operands of an insn against the insn's operand constraints 2509 and return 1 if they match any of the alternatives in ALTERNATIVES. 2510 2511 The information about the insn's operands, constraints, operand modes 2512 etc. is obtained from the global variables set up by extract_insn. 2513 2514 WHICH_ALTERNATIVE is set to a number which indicates which 2515 alternative of constraints was matched: 0 for the first alternative, 2516 1 for the next, etc. 2517 2518 In addition, when two operands are required to match 2519 and it happens that the output operand is (reg) while the 2520 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec), 2521 make the output operand look like the input. 2522 This is because the output operand is the one the template will print. 2523 2524 This is used in final, just before printing the assembler code and by 2525 the routines that determine an insn's attribute. 2526 2527 If STRICT is a positive nonzero value, it means that we have been 2528 called after reload has been completed. In that case, we must 2529 do all checks strictly. If it is zero, it means that we have been called 2530 before reload has completed. In that case, we first try to see if we can 2531 find an alternative that matches strictly. If not, we try again, this 2532 time assuming that reload will fix up the insn. This provides a "best 2533 guess" for the alternative and is used to compute attributes of insns prior 2534 to reload. A negative value of STRICT is used for this internal call. */ 2535 2536 struct funny_match 2537 { 2538 int this_op, other; 2539 }; 2540 2541 int 2542 constrain_operands (int strict, alternative_mask alternatives) 2543 { 2544 const char *constraints[MAX_RECOG_OPERANDS]; 2545 int matching_operands[MAX_RECOG_OPERANDS]; 2546 int earlyclobber[MAX_RECOG_OPERANDS]; 2547 int c; 2548 2549 struct funny_match funny_match[MAX_RECOG_OPERANDS]; 2550 int funny_match_index; 2551 2552 which_alternative = 0; 2553 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0) 2554 return 1; 2555 2556 for (c = 0; c < recog_data.n_operands; c++) 2557 { 2558 constraints[c] = recog_data.constraints[c]; 2559 matching_operands[c] = -1; 2560 } 2561 2562 do 2563 { 2564 int seen_earlyclobber_at = -1; 2565 int opno; 2566 int lose = 0; 2567 funny_match_index = 0; 2568 2569 if (!TEST_BIT (alternatives, which_alternative)) 2570 { 2571 int i; 2572 2573 for (i = 0; i < recog_data.n_operands; i++) 2574 constraints[i] = skip_alternative (constraints[i]); 2575 2576 which_alternative++; 2577 continue; 2578 } 2579 2580 for (opno = 0; opno < recog_data.n_operands; opno++) 2581 { 2582 rtx op = recog_data.operand[opno]; 2583 machine_mode mode = GET_MODE (op); 2584 const char *p = constraints[opno]; 2585 int offset = 0; 2586 int win = 0; 2587 int val; 2588 int len; 2589 2590 earlyclobber[opno] = 0; 2591 2592 /* A unary operator may be accepted by the predicate, but it 2593 is irrelevant for matching constraints. */ 2594 if (UNARY_P (op)) 2595 op = XEXP (op, 0); 2596 2597 if (GET_CODE (op) == SUBREG) 2598 { 2599 if (REG_P (SUBREG_REG (op)) 2600 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER) 2601 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)), 2602 GET_MODE (SUBREG_REG (op)), 2603 SUBREG_BYTE (op), 2604 GET_MODE (op)); 2605 op = SUBREG_REG (op); 2606 } 2607 2608 /* An empty constraint or empty alternative 2609 allows anything which matched the pattern. */ 2610 if (*p == 0 || *p == ',') 2611 win = 1; 2612 2613 do 2614 switch (c = *p, len = CONSTRAINT_LEN (c, p), c) 2615 { 2616 case '\0': 2617 len = 0; 2618 break; 2619 case ',': 2620 c = '\0'; 2621 break; 2622 2623 case '#': 2624 /* Ignore rest of this alternative as far as 2625 constraint checking is concerned. */ 2626 do 2627 p++; 2628 while (*p && *p != ','); 2629 len = 0; 2630 break; 2631 2632 case '&': 2633 earlyclobber[opno] = 1; 2634 if (seen_earlyclobber_at < 0) 2635 seen_earlyclobber_at = opno; 2636 break; 2637 2638 case '0': case '1': case '2': case '3': case '4': 2639 case '5': case '6': case '7': case '8': case '9': 2640 { 2641 /* This operand must be the same as a previous one. 2642 This kind of constraint is used for instructions such 2643 as add when they take only two operands. 2644 2645 Note that the lower-numbered operand is passed first. 2646 2647 If we are not testing strictly, assume that this 2648 constraint will be satisfied. */ 2649 2650 char *end; 2651 int match; 2652 2653 match = strtoul (p, &end, 10); 2654 p = end; 2655 2656 if (strict < 0) 2657 val = 1; 2658 else 2659 { 2660 rtx op1 = recog_data.operand[match]; 2661 rtx op2 = recog_data.operand[opno]; 2662 2663 /* A unary operator may be accepted by the predicate, 2664 but it is irrelevant for matching constraints. */ 2665 if (UNARY_P (op1)) 2666 op1 = XEXP (op1, 0); 2667 if (UNARY_P (op2)) 2668 op2 = XEXP (op2, 0); 2669 2670 val = operands_match_p (op1, op2); 2671 } 2672 2673 matching_operands[opno] = match; 2674 matching_operands[match] = opno; 2675 2676 if (val != 0) 2677 win = 1; 2678 2679 /* If output is *x and input is *--x, arrange later 2680 to change the output to *--x as well, since the 2681 output op is the one that will be printed. */ 2682 if (val == 2 && strict > 0) 2683 { 2684 funny_match[funny_match_index].this_op = opno; 2685 funny_match[funny_match_index++].other = match; 2686 } 2687 } 2688 len = 0; 2689 break; 2690 2691 case 'p': 2692 /* p is used for address_operands. When we are called by 2693 gen_reload, no one will have checked that the address is 2694 strictly valid, i.e., that all pseudos requiring hard regs 2695 have gotten them. */ 2696 if (strict <= 0 2697 || (strict_memory_address_p (recog_data.operand_mode[opno], 2698 op))) 2699 win = 1; 2700 break; 2701 2702 /* No need to check general_operand again; 2703 it was done in insn-recog.c. Well, except that reload 2704 doesn't check the validity of its replacements, but 2705 that should only matter when there's a bug. */ 2706 case 'g': 2707 /* Anything goes unless it is a REG and really has a hard reg 2708 but the hard reg is not in the class GENERAL_REGS. */ 2709 if (REG_P (op)) 2710 { 2711 if (strict < 0 2712 || GENERAL_REGS == ALL_REGS 2713 || (reload_in_progress 2714 && REGNO (op) >= FIRST_PSEUDO_REGISTER) 2715 || reg_fits_class_p (op, GENERAL_REGS, offset, mode)) 2716 win = 1; 2717 } 2718 else if (strict < 0 || general_operand (op, mode)) 2719 win = 1; 2720 break; 2721 2722 default: 2723 { 2724 enum constraint_num cn = lookup_constraint (p); 2725 enum reg_class cl = reg_class_for_constraint (cn); 2726 if (cl != NO_REGS) 2727 { 2728 if (strict < 0 2729 || (strict == 0 2730 && REG_P (op) 2731 && REGNO (op) >= FIRST_PSEUDO_REGISTER) 2732 || (strict == 0 && GET_CODE (op) == SCRATCH) 2733 || (REG_P (op) 2734 && reg_fits_class_p (op, cl, offset, mode))) 2735 win = 1; 2736 } 2737 2738 else if (constraint_satisfied_p (op, cn)) 2739 win = 1; 2740 2741 else if (insn_extra_memory_constraint (cn) 2742 /* Every memory operand can be reloaded to fit. */ 2743 && ((strict < 0 && MEM_P (op)) 2744 /* Before reload, accept what reload can turn 2745 into a mem. */ 2746 || (strict < 0 && CONSTANT_P (op)) 2747 /* Before reload, accept a pseudo, 2748 since LRA can turn it into a mem. */ 2749 || (strict < 0 && targetm.lra_p () && REG_P (op) 2750 && REGNO (op) >= FIRST_PSEUDO_REGISTER) 2751 /* During reload, accept a pseudo */ 2752 || (reload_in_progress && REG_P (op) 2753 && REGNO (op) >= FIRST_PSEUDO_REGISTER))) 2754 win = 1; 2755 else if (insn_extra_address_constraint (cn) 2756 /* Every address operand can be reloaded to fit. */ 2757 && strict < 0) 2758 win = 1; 2759 /* Cater to architectures like IA-64 that define extra memory 2760 constraints without using define_memory_constraint. */ 2761 else if (reload_in_progress 2762 && REG_P (op) 2763 && REGNO (op) >= FIRST_PSEUDO_REGISTER 2764 && reg_renumber[REGNO (op)] < 0 2765 && reg_equiv_mem (REGNO (op)) != 0 2766 && constraint_satisfied_p 2767 (reg_equiv_mem (REGNO (op)), cn)) 2768 win = 1; 2769 break; 2770 } 2771 } 2772 while (p += len, c); 2773 2774 constraints[opno] = p; 2775 /* If this operand did not win somehow, 2776 this alternative loses. */ 2777 if (! win) 2778 lose = 1; 2779 } 2780 /* This alternative won; the operands are ok. 2781 Change whichever operands this alternative says to change. */ 2782 if (! lose) 2783 { 2784 int opno, eopno; 2785 2786 /* See if any earlyclobber operand conflicts with some other 2787 operand. */ 2788 2789 if (strict > 0 && seen_earlyclobber_at >= 0) 2790 for (eopno = seen_earlyclobber_at; 2791 eopno < recog_data.n_operands; 2792 eopno++) 2793 /* Ignore earlyclobber operands now in memory, 2794 because we would often report failure when we have 2795 two memory operands, one of which was formerly a REG. */ 2796 if (earlyclobber[eopno] 2797 && REG_P (recog_data.operand[eopno])) 2798 for (opno = 0; opno < recog_data.n_operands; opno++) 2799 if ((MEM_P (recog_data.operand[opno]) 2800 || recog_data.operand_type[opno] != OP_OUT) 2801 && opno != eopno 2802 /* Ignore things like match_operator operands. */ 2803 && *recog_data.constraints[opno] != 0 2804 && ! (matching_operands[opno] == eopno 2805 && operands_match_p (recog_data.operand[opno], 2806 recog_data.operand[eopno])) 2807 && ! safe_from_earlyclobber (recog_data.operand[opno], 2808 recog_data.operand[eopno])) 2809 lose = 1; 2810 2811 if (! lose) 2812 { 2813 while (--funny_match_index >= 0) 2814 { 2815 recog_data.operand[funny_match[funny_match_index].other] 2816 = recog_data.operand[funny_match[funny_match_index].this_op]; 2817 } 2818 2819 /* For operands without < or > constraints reject side-effects. */ 2820 if (AUTO_INC_DEC && recog_data.is_asm) 2821 { 2822 for (opno = 0; opno < recog_data.n_operands; opno++) 2823 if (MEM_P (recog_data.operand[opno])) 2824 switch (GET_CODE (XEXP (recog_data.operand[opno], 0))) 2825 { 2826 case PRE_INC: 2827 case POST_INC: 2828 case PRE_DEC: 2829 case POST_DEC: 2830 case PRE_MODIFY: 2831 case POST_MODIFY: 2832 if (strchr (recog_data.constraints[opno], '<') == NULL 2833 && strchr (recog_data.constraints[opno], '>') 2834 == NULL) 2835 return 0; 2836 break; 2837 default: 2838 break; 2839 } 2840 } 2841 2842 return 1; 2843 } 2844 } 2845 2846 which_alternative++; 2847 } 2848 while (which_alternative < recog_data.n_alternatives); 2849 2850 which_alternative = -1; 2851 /* If we are about to reject this, but we are not to test strictly, 2852 try a very loose test. Only return failure if it fails also. */ 2853 if (strict == 0) 2854 return constrain_operands (-1, alternatives); 2855 else 2856 return 0; 2857 } 2858 2859 /* Return true iff OPERAND (assumed to be a REG rtx) 2860 is a hard reg in class CLASS when its regno is offset by OFFSET 2861 and changed to mode MODE. 2862 If REG occupies multiple hard regs, all of them must be in CLASS. */ 2863 2864 bool 2865 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset, 2866 machine_mode mode) 2867 { 2868 unsigned int regno = REGNO (operand); 2869 2870 if (cl == NO_REGS) 2871 return false; 2872 2873 /* Regno must not be a pseudo register. Offset may be negative. */ 2874 return (HARD_REGISTER_NUM_P (regno) 2875 && HARD_REGISTER_NUM_P (regno + offset) 2876 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode, 2877 regno + offset)); 2878 } 2879 2880 /* Split single instruction. Helper function for split_all_insns and 2881 split_all_insns_noflow. Return last insn in the sequence if successful, 2882 or NULL if unsuccessful. */ 2883 2884 static rtx_insn * 2885 split_insn (rtx_insn *insn) 2886 { 2887 /* Split insns here to get max fine-grain parallelism. */ 2888 rtx_insn *first = PREV_INSN (insn); 2889 rtx_insn *last = try_split (PATTERN (insn), insn, 1); 2890 rtx insn_set, last_set, note; 2891 2892 if (last == insn) 2893 return NULL; 2894 2895 /* If the original instruction was a single set that was known to be 2896 equivalent to a constant, see if we can say the same about the last 2897 instruction in the split sequence. The two instructions must set 2898 the same destination. */ 2899 insn_set = single_set (insn); 2900 if (insn_set) 2901 { 2902 last_set = single_set (last); 2903 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set))) 2904 { 2905 note = find_reg_equal_equiv_note (insn); 2906 if (note && CONSTANT_P (XEXP (note, 0))) 2907 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0)); 2908 else if (CONSTANT_P (SET_SRC (insn_set))) 2909 set_unique_reg_note (last, REG_EQUAL, 2910 copy_rtx (SET_SRC (insn_set))); 2911 } 2912 } 2913 2914 /* try_split returns the NOTE that INSN became. */ 2915 SET_INSN_DELETED (insn); 2916 2917 /* ??? Coddle to md files that generate subregs in post-reload 2918 splitters instead of computing the proper hard register. */ 2919 if (reload_completed && first != last) 2920 { 2921 first = NEXT_INSN (first); 2922 for (;;) 2923 { 2924 if (INSN_P (first)) 2925 cleanup_subreg_operands (first); 2926 if (first == last) 2927 break; 2928 first = NEXT_INSN (first); 2929 } 2930 } 2931 2932 return last; 2933 } 2934 2935 /* Split all insns in the function. If UPD_LIFE, update life info after. */ 2936 2937 void 2938 split_all_insns (void) 2939 { 2940 bool changed; 2941 basic_block bb; 2942 2943 auto_sbitmap blocks (last_basic_block_for_fn (cfun)); 2944 bitmap_clear (blocks); 2945 changed = false; 2946 2947 FOR_EACH_BB_REVERSE_FN (bb, cfun) 2948 { 2949 rtx_insn *insn, *next; 2950 bool finish = false; 2951 2952 rtl_profile_for_bb (bb); 2953 for (insn = BB_HEAD (bb); !finish ; insn = next) 2954 { 2955 /* Can't use `next_real_insn' because that might go across 2956 CODE_LABELS and short-out basic blocks. */ 2957 next = NEXT_INSN (insn); 2958 finish = (insn == BB_END (bb)); 2959 if (INSN_P (insn)) 2960 { 2961 rtx set = single_set (insn); 2962 2963 /* Don't split no-op move insns. These should silently 2964 disappear later in final. Splitting such insns would 2965 break the code that handles LIBCALL blocks. */ 2966 if (set && set_noop_p (set)) 2967 { 2968 /* Nops get in the way while scheduling, so delete them 2969 now if register allocation has already been done. It 2970 is too risky to try to do this before register 2971 allocation, and there are unlikely to be very many 2972 nops then anyways. */ 2973 if (reload_completed) 2974 delete_insn_and_edges (insn); 2975 } 2976 else 2977 { 2978 if (split_insn (insn)) 2979 { 2980 bitmap_set_bit (blocks, bb->index); 2981 changed = true; 2982 } 2983 } 2984 } 2985 } 2986 } 2987 2988 default_rtl_profile (); 2989 if (changed) 2990 find_many_sub_basic_blocks (blocks); 2991 2992 checking_verify_flow_info (); 2993 } 2994 2995 /* Same as split_all_insns, but do not expect CFG to be available. 2996 Used by machine dependent reorg passes. */ 2997 2998 unsigned int 2999 split_all_insns_noflow (void) 3000 { 3001 rtx_insn *next, *insn; 3002 3003 for (insn = get_insns (); insn; insn = next) 3004 { 3005 next = NEXT_INSN (insn); 3006 if (INSN_P (insn)) 3007 { 3008 /* Don't split no-op move insns. These should silently 3009 disappear later in final. Splitting such insns would 3010 break the code that handles LIBCALL blocks. */ 3011 rtx set = single_set (insn); 3012 if (set && set_noop_p (set)) 3013 { 3014 /* Nops get in the way while scheduling, so delete them 3015 now if register allocation has already been done. It 3016 is too risky to try to do this before register 3017 allocation, and there are unlikely to be very many 3018 nops then anyways. 3019 3020 ??? Should we use delete_insn when the CFG isn't valid? */ 3021 if (reload_completed) 3022 delete_insn_and_edges (insn); 3023 } 3024 else 3025 split_insn (insn); 3026 } 3027 } 3028 return 0; 3029 } 3030 3031 struct peep2_insn_data 3032 { 3033 rtx_insn *insn; 3034 regset live_before; 3035 }; 3036 3037 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1]; 3038 static int peep2_current; 3039 3040 static bool peep2_do_rebuild_jump_labels; 3041 static bool peep2_do_cleanup_cfg; 3042 3043 /* The number of instructions available to match a peep2. */ 3044 int peep2_current_count; 3045 3046 /* A marker indicating the last insn of the block. The live_before regset 3047 for this element is correct, indicating DF_LIVE_OUT for the block. */ 3048 #define PEEP2_EOB invalid_insn_rtx 3049 3050 /* Wrap N to fit into the peep2_insn_data buffer. */ 3051 3052 static int 3053 peep2_buf_position (int n) 3054 { 3055 if (n >= MAX_INSNS_PER_PEEP2 + 1) 3056 n -= MAX_INSNS_PER_PEEP2 + 1; 3057 return n; 3058 } 3059 3060 /* Return the Nth non-note insn after `current', or return NULL_RTX if it 3061 does not exist. Used by the recognizer to find the next insn to match 3062 in a multi-insn pattern. */ 3063 3064 rtx_insn * 3065 peep2_next_insn (int n) 3066 { 3067 gcc_assert (n <= peep2_current_count); 3068 3069 n = peep2_buf_position (peep2_current + n); 3070 3071 return peep2_insn_data[n].insn; 3072 } 3073 3074 /* Return true if REGNO is dead before the Nth non-note insn 3075 after `current'. */ 3076 3077 int 3078 peep2_regno_dead_p (int ofs, int regno) 3079 { 3080 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1); 3081 3082 ofs = peep2_buf_position (peep2_current + ofs); 3083 3084 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX); 3085 3086 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno); 3087 } 3088 3089 /* Similarly for a REG. */ 3090 3091 int 3092 peep2_reg_dead_p (int ofs, rtx reg) 3093 { 3094 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1); 3095 3096 ofs = peep2_buf_position (peep2_current + ofs); 3097 3098 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX); 3099 3100 unsigned int end_regno = END_REGNO (reg); 3101 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno) 3102 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno)) 3103 return 0; 3104 return 1; 3105 } 3106 3107 /* Regno offset to be used in the register search. */ 3108 static int search_ofs; 3109 3110 /* Try to find a hard register of mode MODE, matching the register class in 3111 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and 3112 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX, 3113 in which case the only condition is that the register must be available 3114 before CURRENT_INSN. 3115 Registers that already have bits set in REG_SET will not be considered. 3116 3117 If an appropriate register is available, it will be returned and the 3118 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is 3119 returned. */ 3120 3121 rtx 3122 peep2_find_free_register (int from, int to, const char *class_str, 3123 machine_mode mode, HARD_REG_SET *reg_set) 3124 { 3125 enum reg_class cl; 3126 HARD_REG_SET live; 3127 df_ref def; 3128 int i; 3129 3130 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1); 3131 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1); 3132 3133 from = peep2_buf_position (peep2_current + from); 3134 to = peep2_buf_position (peep2_current + to); 3135 3136 gcc_assert (peep2_insn_data[from].insn != NULL_RTX); 3137 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before); 3138 3139 while (from != to) 3140 { 3141 gcc_assert (peep2_insn_data[from].insn != NULL_RTX); 3142 3143 /* Don't use registers set or clobbered by the insn. */ 3144 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn) 3145 SET_HARD_REG_BIT (live, DF_REF_REGNO (def)); 3146 3147 from = peep2_buf_position (from + 1); 3148 } 3149 3150 cl = reg_class_for_constraint (lookup_constraint (class_str)); 3151 3152 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 3153 { 3154 int raw_regno, regno, success, j; 3155 3156 /* Distribute the free registers as much as possible. */ 3157 raw_regno = search_ofs + i; 3158 if (raw_regno >= FIRST_PSEUDO_REGISTER) 3159 raw_regno -= FIRST_PSEUDO_REGISTER; 3160 #ifdef REG_ALLOC_ORDER 3161 regno = reg_alloc_order[raw_regno]; 3162 #else 3163 regno = raw_regno; 3164 #endif 3165 3166 /* Can it support the mode we need? */ 3167 if (! HARD_REGNO_MODE_OK (regno, mode)) 3168 continue; 3169 3170 success = 1; 3171 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++) 3172 { 3173 /* Don't allocate fixed registers. */ 3174 if (fixed_regs[regno + j]) 3175 { 3176 success = 0; 3177 break; 3178 } 3179 /* Don't allocate global registers. */ 3180 if (global_regs[regno + j]) 3181 { 3182 success = 0; 3183 break; 3184 } 3185 /* Make sure the register is of the right class. */ 3186 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j)) 3187 { 3188 success = 0; 3189 break; 3190 } 3191 /* And that we don't create an extra save/restore. */ 3192 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j)) 3193 { 3194 success = 0; 3195 break; 3196 } 3197 3198 if (! targetm.hard_regno_scratch_ok (regno + j)) 3199 { 3200 success = 0; 3201 break; 3202 } 3203 3204 /* And we don't clobber traceback for noreturn functions. */ 3205 if ((regno + j == FRAME_POINTER_REGNUM 3206 || regno + j == HARD_FRAME_POINTER_REGNUM) 3207 && (! reload_completed || frame_pointer_needed)) 3208 { 3209 success = 0; 3210 break; 3211 } 3212 3213 if (TEST_HARD_REG_BIT (*reg_set, regno + j) 3214 || TEST_HARD_REG_BIT (live, regno + j)) 3215 { 3216 success = 0; 3217 break; 3218 } 3219 } 3220 3221 if (success) 3222 { 3223 add_to_hard_reg_set (reg_set, mode, regno); 3224 3225 /* Start the next search with the next register. */ 3226 if (++raw_regno >= FIRST_PSEUDO_REGISTER) 3227 raw_regno = 0; 3228 search_ofs = raw_regno; 3229 3230 return gen_rtx_REG (mode, regno); 3231 } 3232 } 3233 3234 search_ofs = 0; 3235 return NULL_RTX; 3236 } 3237 3238 /* Forget all currently tracked instructions, only remember current 3239 LIVE regset. */ 3240 3241 static void 3242 peep2_reinit_state (regset live) 3243 { 3244 int i; 3245 3246 /* Indicate that all slots except the last holds invalid data. */ 3247 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i) 3248 peep2_insn_data[i].insn = NULL; 3249 peep2_current_count = 0; 3250 3251 /* Indicate that the last slot contains live_after data. */ 3252 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB; 3253 peep2_current = MAX_INSNS_PER_PEEP2; 3254 3255 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live); 3256 } 3257 3258 /* While scanning basic block BB, we found a match of length MATCH_LEN, 3259 starting at INSN. Perform the replacement, removing the old insns and 3260 replacing them with ATTEMPT. Returns the last insn emitted, or NULL 3261 if the replacement is rejected. */ 3262 3263 static rtx_insn * 3264 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt) 3265 { 3266 int i; 3267 rtx_insn *last, *before_try, *x; 3268 rtx eh_note, as_note; 3269 rtx_insn *old_insn; 3270 rtx_insn *new_insn; 3271 bool was_call = false; 3272 3273 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to 3274 match more than one insn, or to be split into more than one insn. */ 3275 old_insn = peep2_insn_data[peep2_current].insn; 3276 if (RTX_FRAME_RELATED_P (old_insn)) 3277 { 3278 bool any_note = false; 3279 rtx note; 3280 3281 if (match_len != 0) 3282 return NULL; 3283 3284 /* Look for one "active" insn. I.e. ignore any "clobber" insns that 3285 may be in the stream for the purpose of register allocation. */ 3286 if (active_insn_p (attempt)) 3287 new_insn = attempt; 3288 else 3289 new_insn = next_active_insn (attempt); 3290 if (next_active_insn (new_insn)) 3291 return NULL; 3292 3293 /* We have a 1-1 replacement. Copy over any frame-related info. */ 3294 RTX_FRAME_RELATED_P (new_insn) = 1; 3295 3296 /* Allow the backend to fill in a note during the split. */ 3297 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1)) 3298 switch (REG_NOTE_KIND (note)) 3299 { 3300 case REG_FRAME_RELATED_EXPR: 3301 case REG_CFA_DEF_CFA: 3302 case REG_CFA_ADJUST_CFA: 3303 case REG_CFA_OFFSET: 3304 case REG_CFA_REGISTER: 3305 case REG_CFA_EXPRESSION: 3306 case REG_CFA_RESTORE: 3307 case REG_CFA_SET_VDRAP: 3308 any_note = true; 3309 break; 3310 default: 3311 break; 3312 } 3313 3314 /* If the backend didn't supply a note, copy one over. */ 3315 if (!any_note) 3316 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1)) 3317 switch (REG_NOTE_KIND (note)) 3318 { 3319 case REG_FRAME_RELATED_EXPR: 3320 case REG_CFA_DEF_CFA: 3321 case REG_CFA_ADJUST_CFA: 3322 case REG_CFA_OFFSET: 3323 case REG_CFA_REGISTER: 3324 case REG_CFA_EXPRESSION: 3325 case REG_CFA_RESTORE: 3326 case REG_CFA_SET_VDRAP: 3327 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0)); 3328 any_note = true; 3329 break; 3330 default: 3331 break; 3332 } 3333 3334 /* If there still isn't a note, make sure the unwind info sees the 3335 same expression as before the split. */ 3336 if (!any_note) 3337 { 3338 rtx old_set, new_set; 3339 3340 /* The old insn had better have been simple, or annotated. */ 3341 old_set = single_set (old_insn); 3342 gcc_assert (old_set != NULL); 3343 3344 new_set = single_set (new_insn); 3345 if (!new_set || !rtx_equal_p (new_set, old_set)) 3346 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set); 3347 } 3348 3349 /* Copy prologue/epilogue status. This is required in order to keep 3350 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */ 3351 maybe_copy_prologue_epilogue_insn (old_insn, new_insn); 3352 } 3353 3354 /* If we are splitting a CALL_INSN, look for the CALL_INSN 3355 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other 3356 cfg-related call notes. */ 3357 for (i = 0; i <= match_len; ++i) 3358 { 3359 int j; 3360 rtx note; 3361 3362 j = peep2_buf_position (peep2_current + i); 3363 old_insn = peep2_insn_data[j].insn; 3364 if (!CALL_P (old_insn)) 3365 continue; 3366 was_call = true; 3367 3368 new_insn = attempt; 3369 while (new_insn != NULL_RTX) 3370 { 3371 if (CALL_P (new_insn)) 3372 break; 3373 new_insn = NEXT_INSN (new_insn); 3374 } 3375 3376 gcc_assert (new_insn != NULL_RTX); 3377 3378 CALL_INSN_FUNCTION_USAGE (new_insn) 3379 = CALL_INSN_FUNCTION_USAGE (old_insn); 3380 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn); 3381 3382 for (note = REG_NOTES (old_insn); 3383 note; 3384 note = XEXP (note, 1)) 3385 switch (REG_NOTE_KIND (note)) 3386 { 3387 case REG_NORETURN: 3388 case REG_SETJMP: 3389 case REG_TM: 3390 add_reg_note (new_insn, REG_NOTE_KIND (note), 3391 XEXP (note, 0)); 3392 break; 3393 default: 3394 /* Discard all other reg notes. */ 3395 break; 3396 } 3397 3398 /* Croak if there is another call in the sequence. */ 3399 while (++i <= match_len) 3400 { 3401 j = peep2_buf_position (peep2_current + i); 3402 old_insn = peep2_insn_data[j].insn; 3403 gcc_assert (!CALL_P (old_insn)); 3404 } 3405 break; 3406 } 3407 3408 /* If we matched any instruction that had a REG_ARGS_SIZE, then 3409 move those notes over to the new sequence. */ 3410 as_note = NULL; 3411 for (i = match_len; i >= 0; --i) 3412 { 3413 int j = peep2_buf_position (peep2_current + i); 3414 old_insn = peep2_insn_data[j].insn; 3415 3416 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL); 3417 if (as_note) 3418 break; 3419 } 3420 3421 i = peep2_buf_position (peep2_current + match_len); 3422 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX); 3423 3424 /* Replace the old sequence with the new. */ 3425 rtx_insn *peepinsn = peep2_insn_data[i].insn; 3426 last = emit_insn_after_setloc (attempt, 3427 peep2_insn_data[i].insn, 3428 INSN_LOCATION (peepinsn)); 3429 before_try = PREV_INSN (insn); 3430 delete_insn_chain (insn, peep2_insn_data[i].insn, false); 3431 3432 /* Re-insert the EH_REGION notes. */ 3433 if (eh_note || (was_call && nonlocal_goto_handler_labels)) 3434 { 3435 edge eh_edge; 3436 edge_iterator ei; 3437 3438 FOR_EACH_EDGE (eh_edge, ei, bb->succs) 3439 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL)) 3440 break; 3441 3442 if (eh_note) 3443 copy_reg_eh_region_note_backward (eh_note, last, before_try); 3444 3445 if (eh_edge) 3446 for (x = last; x != before_try; x = PREV_INSN (x)) 3447 if (x != BB_END (bb) 3448 && (can_throw_internal (x) 3449 || can_nonlocal_goto (x))) 3450 { 3451 edge nfte, nehe; 3452 int flags; 3453 3454 nfte = split_block (bb, x); 3455 flags = (eh_edge->flags 3456 & (EDGE_EH | EDGE_ABNORMAL)); 3457 if (CALL_P (x)) 3458 flags |= EDGE_ABNORMAL_CALL; 3459 nehe = make_edge (nfte->src, eh_edge->dest, 3460 flags); 3461 3462 nehe->probability = eh_edge->probability; 3463 nfte->probability 3464 = REG_BR_PROB_BASE - nehe->probability; 3465 3466 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest); 3467 bb = nfte->src; 3468 eh_edge = nehe; 3469 } 3470 3471 /* Converting possibly trapping insn to non-trapping is 3472 possible. Zap dummy outgoing edges. */ 3473 peep2_do_cleanup_cfg |= purge_dead_edges (bb); 3474 } 3475 3476 /* Re-insert the ARGS_SIZE notes. */ 3477 if (as_note) 3478 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0))); 3479 3480 /* If we generated a jump instruction, it won't have 3481 JUMP_LABEL set. Recompute after we're done. */ 3482 for (x = last; x != before_try; x = PREV_INSN (x)) 3483 if (JUMP_P (x)) 3484 { 3485 peep2_do_rebuild_jump_labels = true; 3486 break; 3487 } 3488 3489 return last; 3490 } 3491 3492 /* After performing a replacement in basic block BB, fix up the life 3493 information in our buffer. LAST is the last of the insns that we 3494 emitted as a replacement. PREV is the insn before the start of 3495 the replacement. MATCH_LEN is the number of instructions that were 3496 matched, and which now need to be replaced in the buffer. */ 3497 3498 static void 3499 peep2_update_life (basic_block bb, int match_len, rtx_insn *last, 3500 rtx_insn *prev) 3501 { 3502 int i = peep2_buf_position (peep2_current + match_len + 1); 3503 rtx_insn *x; 3504 regset_head live; 3505 3506 INIT_REG_SET (&live); 3507 COPY_REG_SET (&live, peep2_insn_data[i].live_before); 3508 3509 gcc_assert (peep2_current_count >= match_len + 1); 3510 peep2_current_count -= match_len + 1; 3511 3512 x = last; 3513 do 3514 { 3515 if (INSN_P (x)) 3516 { 3517 df_insn_rescan (x); 3518 if (peep2_current_count < MAX_INSNS_PER_PEEP2) 3519 { 3520 peep2_current_count++; 3521 if (--i < 0) 3522 i = MAX_INSNS_PER_PEEP2; 3523 peep2_insn_data[i].insn = x; 3524 df_simulate_one_insn_backwards (bb, x, &live); 3525 COPY_REG_SET (peep2_insn_data[i].live_before, &live); 3526 } 3527 } 3528 x = PREV_INSN (x); 3529 } 3530 while (x != prev); 3531 CLEAR_REG_SET (&live); 3532 3533 peep2_current = i; 3534 } 3535 3536 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible. 3537 Return true if we added it, false otherwise. The caller will try to match 3538 peepholes against the buffer if we return false; otherwise it will try to 3539 add more instructions to the buffer. */ 3540 3541 static bool 3542 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live) 3543 { 3544 int pos; 3545 3546 /* Once we have filled the maximum number of insns the buffer can hold, 3547 allow the caller to match the insns against peepholes. We wait until 3548 the buffer is full in case the target has similar peepholes of different 3549 length; we always want to match the longest if possible. */ 3550 if (peep2_current_count == MAX_INSNS_PER_PEEP2) 3551 return false; 3552 3553 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with 3554 any other pattern, lest it change the semantics of the frame info. */ 3555 if (RTX_FRAME_RELATED_P (insn)) 3556 { 3557 /* Let the buffer drain first. */ 3558 if (peep2_current_count > 0) 3559 return false; 3560 /* Now the insn will be the only thing in the buffer. */ 3561 } 3562 3563 pos = peep2_buf_position (peep2_current + peep2_current_count); 3564 peep2_insn_data[pos].insn = insn; 3565 COPY_REG_SET (peep2_insn_data[pos].live_before, live); 3566 peep2_current_count++; 3567 3568 df_simulate_one_insn_forwards (bb, insn, live); 3569 return true; 3570 } 3571 3572 /* Perform the peephole2 optimization pass. */ 3573 3574 static void 3575 peephole2_optimize (void) 3576 { 3577 rtx_insn *insn; 3578 bitmap live; 3579 int i; 3580 basic_block bb; 3581 3582 peep2_do_cleanup_cfg = false; 3583 peep2_do_rebuild_jump_labels = false; 3584 3585 df_set_flags (DF_LR_RUN_DCE); 3586 df_note_add_problem (); 3587 df_analyze (); 3588 3589 /* Initialize the regsets we're going to use. */ 3590 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i) 3591 peep2_insn_data[i].live_before = BITMAP_ALLOC (®_obstack); 3592 search_ofs = 0; 3593 live = BITMAP_ALLOC (®_obstack); 3594 3595 FOR_EACH_BB_REVERSE_FN (bb, cfun) 3596 { 3597 bool past_end = false; 3598 int pos; 3599 3600 rtl_profile_for_bb (bb); 3601 3602 /* Start up propagation. */ 3603 bitmap_copy (live, DF_LR_IN (bb)); 3604 df_simulate_initialize_forwards (bb, live); 3605 peep2_reinit_state (live); 3606 3607 insn = BB_HEAD (bb); 3608 for (;;) 3609 { 3610 rtx_insn *attempt, *head; 3611 int match_len; 3612 3613 if (!past_end && !NONDEBUG_INSN_P (insn)) 3614 { 3615 next_insn: 3616 insn = NEXT_INSN (insn); 3617 if (insn == NEXT_INSN (BB_END (bb))) 3618 past_end = true; 3619 continue; 3620 } 3621 if (!past_end && peep2_fill_buffer (bb, insn, live)) 3622 goto next_insn; 3623 3624 /* If we did not fill an empty buffer, it signals the end of the 3625 block. */ 3626 if (peep2_current_count == 0) 3627 break; 3628 3629 /* The buffer filled to the current maximum, so try to match. */ 3630 3631 pos = peep2_buf_position (peep2_current + peep2_current_count); 3632 peep2_insn_data[pos].insn = PEEP2_EOB; 3633 COPY_REG_SET (peep2_insn_data[pos].live_before, live); 3634 3635 /* Match the peephole. */ 3636 head = peep2_insn_data[peep2_current].insn; 3637 attempt = peephole2_insns (PATTERN (head), head, &match_len); 3638 if (attempt != NULL) 3639 { 3640 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt); 3641 if (last) 3642 { 3643 peep2_update_life (bb, match_len, last, PREV_INSN (attempt)); 3644 continue; 3645 } 3646 } 3647 3648 /* No match: advance the buffer by one insn. */ 3649 peep2_current = peep2_buf_position (peep2_current + 1); 3650 peep2_current_count--; 3651 } 3652 } 3653 3654 default_rtl_profile (); 3655 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i) 3656 BITMAP_FREE (peep2_insn_data[i].live_before); 3657 BITMAP_FREE (live); 3658 if (peep2_do_rebuild_jump_labels) 3659 rebuild_jump_labels (get_insns ()); 3660 if (peep2_do_cleanup_cfg) 3661 cleanup_cfg (CLEANUP_CFG_CHANGED); 3662 } 3663 3664 /* Common predicates for use with define_bypass. */ 3665 3666 /* True if the dependency between OUT_INSN and IN_INSN is on the store 3667 data not the address operand(s) of the store. IN_INSN and OUT_INSN 3668 must be either a single_set or a PARALLEL with SETs inside. */ 3669 3670 int 3671 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn) 3672 { 3673 rtx out_set, in_set; 3674 rtx out_pat, in_pat; 3675 rtx out_exp, in_exp; 3676 int i, j; 3677 3678 in_set = single_set (in_insn); 3679 if (in_set) 3680 { 3681 if (!MEM_P (SET_DEST (in_set))) 3682 return false; 3683 3684 out_set = single_set (out_insn); 3685 if (out_set) 3686 { 3687 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set))) 3688 return false; 3689 } 3690 else 3691 { 3692 out_pat = PATTERN (out_insn); 3693 3694 if (GET_CODE (out_pat) != PARALLEL) 3695 return false; 3696 3697 for (i = 0; i < XVECLEN (out_pat, 0); i++) 3698 { 3699 out_exp = XVECEXP (out_pat, 0, i); 3700 3701 if (GET_CODE (out_exp) == CLOBBER) 3702 continue; 3703 3704 gcc_assert (GET_CODE (out_exp) == SET); 3705 3706 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set))) 3707 return false; 3708 } 3709 } 3710 } 3711 else 3712 { 3713 in_pat = PATTERN (in_insn); 3714 gcc_assert (GET_CODE (in_pat) == PARALLEL); 3715 3716 for (i = 0; i < XVECLEN (in_pat, 0); i++) 3717 { 3718 in_exp = XVECEXP (in_pat, 0, i); 3719 3720 if (GET_CODE (in_exp) == CLOBBER) 3721 continue; 3722 3723 gcc_assert (GET_CODE (in_exp) == SET); 3724 3725 if (!MEM_P (SET_DEST (in_exp))) 3726 return false; 3727 3728 out_set = single_set (out_insn); 3729 if (out_set) 3730 { 3731 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp))) 3732 return false; 3733 } 3734 else 3735 { 3736 out_pat = PATTERN (out_insn); 3737 gcc_assert (GET_CODE (out_pat) == PARALLEL); 3738 3739 for (j = 0; j < XVECLEN (out_pat, 0); j++) 3740 { 3741 out_exp = XVECEXP (out_pat, 0, j); 3742 3743 if (GET_CODE (out_exp) == CLOBBER) 3744 continue; 3745 3746 gcc_assert (GET_CODE (out_exp) == SET); 3747 3748 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp))) 3749 return false; 3750 } 3751 } 3752 } 3753 } 3754 3755 return true; 3756 } 3757 3758 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE 3759 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single 3760 or multiple set; IN_INSN should be single_set for truth, but for convenience 3761 of insn categorization may be any JUMP or CALL insn. */ 3762 3763 int 3764 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn) 3765 { 3766 rtx out_set, in_set; 3767 3768 in_set = single_set (in_insn); 3769 if (! in_set) 3770 { 3771 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn)); 3772 return false; 3773 } 3774 3775 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE) 3776 return false; 3777 in_set = SET_SRC (in_set); 3778 3779 out_set = single_set (out_insn); 3780 if (out_set) 3781 { 3782 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1)) 3783 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2))) 3784 return false; 3785 } 3786 else 3787 { 3788 rtx out_pat; 3789 int i; 3790 3791 out_pat = PATTERN (out_insn); 3792 gcc_assert (GET_CODE (out_pat) == PARALLEL); 3793 3794 for (i = 0; i < XVECLEN (out_pat, 0); i++) 3795 { 3796 rtx exp = XVECEXP (out_pat, 0, i); 3797 3798 if (GET_CODE (exp) == CLOBBER) 3799 continue; 3800 3801 gcc_assert (GET_CODE (exp) == SET); 3802 3803 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1)) 3804 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2))) 3805 return false; 3806 } 3807 } 3808 3809 return true; 3810 } 3811 3812 static unsigned int 3813 rest_of_handle_peephole2 (void) 3814 { 3815 if (HAVE_peephole2) 3816 peephole2_optimize (); 3817 3818 return 0; 3819 } 3820 3821 namespace { 3822 3823 const pass_data pass_data_peephole2 = 3824 { 3825 RTL_PASS, /* type */ 3826 "peephole2", /* name */ 3827 OPTGROUP_NONE, /* optinfo_flags */ 3828 TV_PEEPHOLE2, /* tv_id */ 3829 0, /* properties_required */ 3830 0, /* properties_provided */ 3831 0, /* properties_destroyed */ 3832 0, /* todo_flags_start */ 3833 TODO_df_finish, /* todo_flags_finish */ 3834 }; 3835 3836 class pass_peephole2 : public rtl_opt_pass 3837 { 3838 public: 3839 pass_peephole2 (gcc::context *ctxt) 3840 : rtl_opt_pass (pass_data_peephole2, ctxt) 3841 {} 3842 3843 /* opt_pass methods: */ 3844 /* The epiphany backend creates a second instance of this pass, so we need 3845 a clone method. */ 3846 opt_pass * clone () { return new pass_peephole2 (m_ctxt); } 3847 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); } 3848 virtual unsigned int execute (function *) 3849 { 3850 return rest_of_handle_peephole2 (); 3851 } 3852 3853 }; // class pass_peephole2 3854 3855 } // anon namespace 3856 3857 rtl_opt_pass * 3858 make_pass_peephole2 (gcc::context *ctxt) 3859 { 3860 return new pass_peephole2 (ctxt); 3861 } 3862 3863 namespace { 3864 3865 const pass_data pass_data_split_all_insns = 3866 { 3867 RTL_PASS, /* type */ 3868 "split1", /* name */ 3869 OPTGROUP_NONE, /* optinfo_flags */ 3870 TV_NONE, /* tv_id */ 3871 0, /* properties_required */ 3872 0, /* properties_provided */ 3873 0, /* properties_destroyed */ 3874 0, /* todo_flags_start */ 3875 0, /* todo_flags_finish */ 3876 }; 3877 3878 class pass_split_all_insns : public rtl_opt_pass 3879 { 3880 public: 3881 pass_split_all_insns (gcc::context *ctxt) 3882 : rtl_opt_pass (pass_data_split_all_insns, ctxt) 3883 {} 3884 3885 /* opt_pass methods: */ 3886 /* The epiphany backend creates a second instance of this pass, so 3887 we need a clone method. */ 3888 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); } 3889 virtual unsigned int execute (function *) 3890 { 3891 split_all_insns (); 3892 return 0; 3893 } 3894 3895 }; // class pass_split_all_insns 3896 3897 } // anon namespace 3898 3899 rtl_opt_pass * 3900 make_pass_split_all_insns (gcc::context *ctxt) 3901 { 3902 return new pass_split_all_insns (ctxt); 3903 } 3904 3905 namespace { 3906 3907 const pass_data pass_data_split_after_reload = 3908 { 3909 RTL_PASS, /* type */ 3910 "split2", /* name */ 3911 OPTGROUP_NONE, /* optinfo_flags */ 3912 TV_NONE, /* tv_id */ 3913 0, /* properties_required */ 3914 0, /* properties_provided */ 3915 0, /* properties_destroyed */ 3916 0, /* todo_flags_start */ 3917 0, /* todo_flags_finish */ 3918 }; 3919 3920 class pass_split_after_reload : public rtl_opt_pass 3921 { 3922 public: 3923 pass_split_after_reload (gcc::context *ctxt) 3924 : rtl_opt_pass (pass_data_split_after_reload, ctxt) 3925 {} 3926 3927 /* opt_pass methods: */ 3928 virtual bool gate (function *) 3929 { 3930 /* If optimizing, then go ahead and split insns now. */ 3931 if (optimize > 0) 3932 return true; 3933 3934 #ifdef STACK_REGS 3935 return true; 3936 #else 3937 return false; 3938 #endif 3939 } 3940 3941 virtual unsigned int execute (function *) 3942 { 3943 split_all_insns (); 3944 return 0; 3945 } 3946 3947 }; // class pass_split_after_reload 3948 3949 } // anon namespace 3950 3951 rtl_opt_pass * 3952 make_pass_split_after_reload (gcc::context *ctxt) 3953 { 3954 return new pass_split_after_reload (ctxt); 3955 } 3956 3957 namespace { 3958 3959 const pass_data pass_data_split_before_regstack = 3960 { 3961 RTL_PASS, /* type */ 3962 "split3", /* name */ 3963 OPTGROUP_NONE, /* optinfo_flags */ 3964 TV_NONE, /* tv_id */ 3965 0, /* properties_required */ 3966 0, /* properties_provided */ 3967 0, /* properties_destroyed */ 3968 0, /* todo_flags_start */ 3969 0, /* todo_flags_finish */ 3970 }; 3971 3972 class pass_split_before_regstack : public rtl_opt_pass 3973 { 3974 public: 3975 pass_split_before_regstack (gcc::context *ctxt) 3976 : rtl_opt_pass (pass_data_split_before_regstack, ctxt) 3977 {} 3978 3979 /* opt_pass methods: */ 3980 virtual bool gate (function *); 3981 virtual unsigned int execute (function *) 3982 { 3983 split_all_insns (); 3984 return 0; 3985 } 3986 3987 }; // class pass_split_before_regstack 3988 3989 bool 3990 pass_split_before_regstack::gate (function *) 3991 { 3992 #if HAVE_ATTR_length && defined (STACK_REGS) 3993 /* If flow2 creates new instructions which need splitting 3994 and scheduling after reload is not done, they might not be 3995 split until final which doesn't allow splitting 3996 if HAVE_ATTR_length. */ 3997 # ifdef INSN_SCHEDULING 3998 return (optimize && !flag_schedule_insns_after_reload); 3999 # else 4000 return (optimize); 4001 # endif 4002 #else 4003 return 0; 4004 #endif 4005 } 4006 4007 } // anon namespace 4008 4009 rtl_opt_pass * 4010 make_pass_split_before_regstack (gcc::context *ctxt) 4011 { 4012 return new pass_split_before_regstack (ctxt); 4013 } 4014 4015 static unsigned int 4016 rest_of_handle_split_before_sched2 (void) 4017 { 4018 #ifdef INSN_SCHEDULING 4019 split_all_insns (); 4020 #endif 4021 return 0; 4022 } 4023 4024 namespace { 4025 4026 const pass_data pass_data_split_before_sched2 = 4027 { 4028 RTL_PASS, /* type */ 4029 "split4", /* name */ 4030 OPTGROUP_NONE, /* optinfo_flags */ 4031 TV_NONE, /* tv_id */ 4032 0, /* properties_required */ 4033 0, /* properties_provided */ 4034 0, /* properties_destroyed */ 4035 0, /* todo_flags_start */ 4036 0, /* todo_flags_finish */ 4037 }; 4038 4039 class pass_split_before_sched2 : public rtl_opt_pass 4040 { 4041 public: 4042 pass_split_before_sched2 (gcc::context *ctxt) 4043 : rtl_opt_pass (pass_data_split_before_sched2, ctxt) 4044 {} 4045 4046 /* opt_pass methods: */ 4047 virtual bool gate (function *) 4048 { 4049 #ifdef INSN_SCHEDULING 4050 return optimize > 0 && flag_schedule_insns_after_reload; 4051 #else 4052 return false; 4053 #endif 4054 } 4055 4056 virtual unsigned int execute (function *) 4057 { 4058 return rest_of_handle_split_before_sched2 (); 4059 } 4060 4061 }; // class pass_split_before_sched2 4062 4063 } // anon namespace 4064 4065 rtl_opt_pass * 4066 make_pass_split_before_sched2 (gcc::context *ctxt) 4067 { 4068 return new pass_split_before_sched2 (ctxt); 4069 } 4070 4071 namespace { 4072 4073 const pass_data pass_data_split_for_shorten_branches = 4074 { 4075 RTL_PASS, /* type */ 4076 "split5", /* name */ 4077 OPTGROUP_NONE, /* optinfo_flags */ 4078 TV_NONE, /* tv_id */ 4079 0, /* properties_required */ 4080 0, /* properties_provided */ 4081 0, /* properties_destroyed */ 4082 0, /* todo_flags_start */ 4083 0, /* todo_flags_finish */ 4084 }; 4085 4086 class pass_split_for_shorten_branches : public rtl_opt_pass 4087 { 4088 public: 4089 pass_split_for_shorten_branches (gcc::context *ctxt) 4090 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt) 4091 {} 4092 4093 /* opt_pass methods: */ 4094 virtual bool gate (function *) 4095 { 4096 /* The placement of the splitting that we do for shorten_branches 4097 depends on whether regstack is used by the target or not. */ 4098 #if HAVE_ATTR_length && !defined (STACK_REGS) 4099 return true; 4100 #else 4101 return false; 4102 #endif 4103 } 4104 4105 virtual unsigned int execute (function *) 4106 { 4107 return split_all_insns_noflow (); 4108 } 4109 4110 }; // class pass_split_for_shorten_branches 4111 4112 } // anon namespace 4113 4114 rtl_opt_pass * 4115 make_pass_split_for_shorten_branches (gcc::context *ctxt) 4116 { 4117 return new pass_split_for_shorten_branches (ctxt); 4118 } 4119 4120 /* (Re)initialize the target information after a change in target. */ 4121 4122 void 4123 recog_init () 4124 { 4125 /* The information is zero-initialized, so we don't need to do anything 4126 first time round. */ 4127 if (!this_target_recog->x_initialized) 4128 { 4129 this_target_recog->x_initialized = true; 4130 return; 4131 } 4132 memset (this_target_recog->x_bool_attr_masks, 0, 4133 sizeof (this_target_recog->x_bool_attr_masks)); 4134 for (unsigned int i = 0; i < NUM_INSN_CODES; ++i) 4135 if (this_target_recog->x_op_alt[i]) 4136 { 4137 free (this_target_recog->x_op_alt[i]); 4138 this_target_recog->x_op_alt[i] = 0; 4139 } 4140 } 4141