1 /* Subroutines used for code generation on the Renesas M32R cpu. 2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 3 2005, 2007, 2008, 2009 Free Software Foundation, Inc. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it 8 under the terms of the GNU General Public License as published 9 by the Free Software Foundation; either version 3, or (at your 10 option) any later version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT 13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY 14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public 15 License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "tm.h" 25 #include "tree.h" 26 #include "rtl.h" 27 #include "regs.h" 28 #include "hard-reg-set.h" 29 #include "real.h" 30 #include "insn-config.h" 31 #include "conditions.h" 32 #include "output.h" 33 #include "insn-attr.h" 34 #include "flags.h" 35 #include "expr.h" 36 #include "function.h" 37 #include "recog.h" 38 #include "toplev.h" 39 #include "ggc.h" 40 #include "integrate.h" 41 #include "df.h" 42 #include "tm_p.h" 43 #include "target.h" 44 #include "target-def.h" 45 #include "tm-constrs.h" 46 47 /* Array of valid operand punctuation characters. */ 48 char m32r_punct_chars[256]; 49 50 /* Selected code model. */ 51 enum m32r_model m32r_model = M32R_MODEL_DEFAULT; 52 53 /* Selected SDA support. */ 54 enum m32r_sdata m32r_sdata = M32R_SDATA_DEFAULT; 55 56 /* Machine-specific symbol_ref flags. */ 57 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT 58 #define SYMBOL_REF_MODEL(X) \ 59 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3)) 60 61 /* For string literals, etc. */ 62 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.') 63 64 /* Forward declaration. */ 65 static bool m32r_handle_option (size_t, const char *, int); 66 static void init_reg_tables (void); 67 static void block_move_call (rtx, rtx, rtx); 68 static int m32r_is_insn (rtx); 69 static rtx m32r_legitimize_address (rtx, rtx, enum machine_mode); 70 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *); 71 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT); 72 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT); 73 74 static void m32r_file_start (void); 75 76 static int m32r_adjust_priority (rtx, int); 77 static int m32r_issue_rate (void); 78 79 static void m32r_encode_section_info (tree, rtx, int); 80 static bool m32r_in_small_data_p (const_tree); 81 static bool m32r_return_in_memory (const_tree, const_tree); 82 static void m32r_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode, 83 tree, int *, int); 84 static void init_idents (void); 85 static bool m32r_rtx_costs (rtx, int, int, int *, bool speed); 86 static bool m32r_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode, 87 const_tree, bool); 88 static int m32r_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode, 89 tree, bool); 90 static bool m32r_can_eliminate (const int, const int); 91 static void m32r_trampoline_init (rtx, tree, rtx); 92 93 /* M32R specific attributes. */ 94 95 static const struct attribute_spec m32r_attribute_table[] = 96 { 97 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */ 98 { "interrupt", 0, 0, true, false, false, NULL }, 99 { "model", 1, 1, true, false, false, m32r_handle_model_attribute }, 100 { NULL, 0, 0, false, false, false, NULL } 101 }; 102 103 /* Initialize the GCC target structure. */ 104 #undef TARGET_ATTRIBUTE_TABLE 105 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table 106 107 #undef TARGET_LEGITIMIZE_ADDRESS 108 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address 109 110 #undef TARGET_ASM_ALIGNED_HI_OP 111 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t" 112 #undef TARGET_ASM_ALIGNED_SI_OP 113 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t" 114 115 #undef TARGET_ASM_FUNCTION_PROLOGUE 116 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue 117 #undef TARGET_ASM_FUNCTION_EPILOGUE 118 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue 119 120 #undef TARGET_ASM_FILE_START 121 #define TARGET_ASM_FILE_START m32r_file_start 122 123 #undef TARGET_SCHED_ADJUST_PRIORITY 124 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority 125 #undef TARGET_SCHED_ISSUE_RATE 126 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate 127 128 #undef TARGET_DEFAULT_TARGET_FLAGS 129 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_CPU_DEFAULT 130 #undef TARGET_HANDLE_OPTION 131 #define TARGET_HANDLE_OPTION m32r_handle_option 132 133 #undef TARGET_ENCODE_SECTION_INFO 134 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info 135 #undef TARGET_IN_SMALL_DATA_P 136 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p 137 138 #undef TARGET_RTX_COSTS 139 #define TARGET_RTX_COSTS m32r_rtx_costs 140 #undef TARGET_ADDRESS_COST 141 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0 142 143 #undef TARGET_PROMOTE_PROTOTYPES 144 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true 145 #undef TARGET_RETURN_IN_MEMORY 146 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory 147 #undef TARGET_SETUP_INCOMING_VARARGS 148 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs 149 #undef TARGET_MUST_PASS_IN_STACK 150 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size 151 #undef TARGET_PASS_BY_REFERENCE 152 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference 153 #undef TARGET_ARG_PARTIAL_BYTES 154 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes 155 156 #undef TARGET_CAN_ELIMINATE 157 #define TARGET_CAN_ELIMINATE m32r_can_eliminate 158 159 #undef TARGET_TRAMPOLINE_INIT 160 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init 161 162 struct gcc_target targetm = TARGET_INITIALIZER; 163 164 /* Implement TARGET_HANDLE_OPTION. */ 165 166 static bool 167 m32r_handle_option (size_t code, const char *arg, int value) 168 { 169 switch (code) 170 { 171 case OPT_m32r: 172 target_flags &= ~(MASK_M32R2 | MASK_M32RX); 173 return true; 174 175 case OPT_mmodel_: 176 if (strcmp (arg, "small") == 0) 177 m32r_model = M32R_MODEL_SMALL; 178 else if (strcmp (arg, "medium") == 0) 179 m32r_model = M32R_MODEL_MEDIUM; 180 else if (strcmp (arg, "large") == 0) 181 m32r_model = M32R_MODEL_LARGE; 182 else 183 return false; 184 return true; 185 186 case OPT_msdata_: 187 if (strcmp (arg, "none") == 0) 188 m32r_sdata = M32R_SDATA_NONE; 189 else if (strcmp (arg, "sdata") == 0) 190 m32r_sdata = M32R_SDATA_SDATA; 191 else if (strcmp (arg, "use") == 0) 192 m32r_sdata = M32R_SDATA_USE; 193 else 194 return false; 195 return true; 196 197 case OPT_mno_flush_func: 198 m32r_cache_flush_func = NULL; 199 return true; 200 201 case OPT_mflush_trap_: 202 return value <= 15; 203 204 case OPT_mno_flush_trap: 205 m32r_cache_flush_trap = -1; 206 return true; 207 208 default: 209 return true; 210 } 211 } 212 213 /* Called by OVERRIDE_OPTIONS to initialize various things. */ 214 215 void 216 m32r_init (void) 217 { 218 init_reg_tables (); 219 220 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */ 221 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars)); 222 m32r_punct_chars['#'] = 1; 223 m32r_punct_chars['@'] = 1; /* ??? no longer used */ 224 225 /* Provide default value if not specified. */ 226 if (!g_switch_set) 227 g_switch_value = SDATA_DEFAULT_SIZE; 228 } 229 230 /* Vectors to keep interesting information about registers where it can easily 231 be got. We use to use the actual mode value as the bit number, but there 232 is (or may be) more than 32 modes now. Instead we use two tables: one 233 indexed by hard register number, and one indexed by mode. */ 234 235 /* The purpose of m32r_mode_class is to shrink the range of modes so that 236 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is 237 mapped into one m32r_mode_class mode. */ 238 239 enum m32r_mode_class 240 { 241 C_MODE, 242 S_MODE, D_MODE, T_MODE, O_MODE, 243 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE 244 }; 245 246 /* Modes for condition codes. */ 247 #define C_MODES (1 << (int) C_MODE) 248 249 /* Modes for single-word and smaller quantities. */ 250 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE)) 251 252 /* Modes for double-word and smaller quantities. */ 253 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE)) 254 255 /* Modes for quad-word and smaller quantities. */ 256 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE)) 257 258 /* Modes for accumulators. */ 259 #define A_MODES (1 << (int) A_MODE) 260 261 /* Value is 1 if register/mode pair is acceptable on arc. */ 262 263 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] = 264 { 265 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, 266 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES, 267 S_MODES, C_MODES, A_MODES, A_MODES 268 }; 269 270 unsigned int m32r_mode_class [NUM_MACHINE_MODES]; 271 272 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER]; 273 274 static void 275 init_reg_tables (void) 276 { 277 int i; 278 279 for (i = 0; i < NUM_MACHINE_MODES; i++) 280 { 281 switch (GET_MODE_CLASS (i)) 282 { 283 case MODE_INT: 284 case MODE_PARTIAL_INT: 285 case MODE_COMPLEX_INT: 286 if (GET_MODE_SIZE (i) <= 4) 287 m32r_mode_class[i] = 1 << (int) S_MODE; 288 else if (GET_MODE_SIZE (i) == 8) 289 m32r_mode_class[i] = 1 << (int) D_MODE; 290 else if (GET_MODE_SIZE (i) == 16) 291 m32r_mode_class[i] = 1 << (int) T_MODE; 292 else if (GET_MODE_SIZE (i) == 32) 293 m32r_mode_class[i] = 1 << (int) O_MODE; 294 else 295 m32r_mode_class[i] = 0; 296 break; 297 case MODE_FLOAT: 298 case MODE_COMPLEX_FLOAT: 299 if (GET_MODE_SIZE (i) <= 4) 300 m32r_mode_class[i] = 1 << (int) SF_MODE; 301 else if (GET_MODE_SIZE (i) == 8) 302 m32r_mode_class[i] = 1 << (int) DF_MODE; 303 else if (GET_MODE_SIZE (i) == 16) 304 m32r_mode_class[i] = 1 << (int) TF_MODE; 305 else if (GET_MODE_SIZE (i) == 32) 306 m32r_mode_class[i] = 1 << (int) OF_MODE; 307 else 308 m32r_mode_class[i] = 0; 309 break; 310 case MODE_CC: 311 m32r_mode_class[i] = 1 << (int) C_MODE; 312 break; 313 default: 314 m32r_mode_class[i] = 0; 315 break; 316 } 317 } 318 319 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 320 { 321 if (GPR_P (i)) 322 m32r_regno_reg_class[i] = GENERAL_REGS; 323 else if (i == ARG_POINTER_REGNUM) 324 m32r_regno_reg_class[i] = GENERAL_REGS; 325 else 326 m32r_regno_reg_class[i] = NO_REGS; 327 } 328 } 329 330 /* M32R specific attribute support. 331 332 interrupt - for interrupt functions 333 334 model - select code model used to access object 335 336 small: addresses use 24 bits, use bl to make calls 337 medium: addresses use 32 bits, use bl to make calls 338 large: addresses use 32 bits, use seth/add3/jl to make calls 339 340 Grep for MODEL in m32r.h for more info. */ 341 342 static tree small_ident1; 343 static tree small_ident2; 344 static tree medium_ident1; 345 static tree medium_ident2; 346 static tree large_ident1; 347 static tree large_ident2; 348 349 static void 350 init_idents (void) 351 { 352 if (small_ident1 == 0) 353 { 354 small_ident1 = get_identifier ("small"); 355 small_ident2 = get_identifier ("__small__"); 356 medium_ident1 = get_identifier ("medium"); 357 medium_ident2 = get_identifier ("__medium__"); 358 large_ident1 = get_identifier ("large"); 359 large_ident2 = get_identifier ("__large__"); 360 } 361 } 362 363 /* Handle an "model" attribute; arguments as in 364 struct attribute_spec.handler. */ 365 static tree 366 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name, 367 tree args, int flags ATTRIBUTE_UNUSED, 368 bool *no_add_attrs) 369 { 370 tree arg; 371 372 init_idents (); 373 arg = TREE_VALUE (args); 374 375 if (arg != small_ident1 376 && arg != small_ident2 377 && arg != medium_ident1 378 && arg != medium_ident2 379 && arg != large_ident1 380 && arg != large_ident2) 381 { 382 warning (OPT_Wattributes, "invalid argument of %qs attribute", 383 IDENTIFIER_POINTER (name)); 384 *no_add_attrs = true; 385 } 386 387 return NULL_TREE; 388 } 389 390 /* Encode section information of DECL, which is either a VAR_DECL, 391 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???. 392 393 For the M32R we want to record: 394 395 - whether the object lives in .sdata/.sbss. 396 - what code model should be used to access the object 397 */ 398 399 static void 400 m32r_encode_section_info (tree decl, rtx rtl, int first) 401 { 402 int extra_flags = 0; 403 tree model_attr; 404 enum m32r_model model; 405 406 default_encode_section_info (decl, rtl, first); 407 408 if (!DECL_P (decl)) 409 return; 410 411 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl)); 412 if (model_attr) 413 { 414 tree id; 415 416 init_idents (); 417 418 id = TREE_VALUE (TREE_VALUE (model_attr)); 419 420 if (id == small_ident1 || id == small_ident2) 421 model = M32R_MODEL_SMALL; 422 else if (id == medium_ident1 || id == medium_ident2) 423 model = M32R_MODEL_MEDIUM; 424 else if (id == large_ident1 || id == large_ident2) 425 model = M32R_MODEL_LARGE; 426 else 427 gcc_unreachable (); /* shouldn't happen */ 428 } 429 else 430 { 431 if (TARGET_MODEL_SMALL) 432 model = M32R_MODEL_SMALL; 433 else if (TARGET_MODEL_MEDIUM) 434 model = M32R_MODEL_MEDIUM; 435 else if (TARGET_MODEL_LARGE) 436 model = M32R_MODEL_LARGE; 437 else 438 gcc_unreachable (); /* shouldn't happen */ 439 } 440 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT; 441 442 if (extra_flags) 443 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags; 444 } 445 446 /* Only mark the object as being small data area addressable if 447 it hasn't been explicitly marked with a code model. 448 449 The user can explicitly put an object in the small data area with the 450 section attribute. If the object is in sdata/sbss and marked with a 451 code model do both [put the object in .sdata and mark it as being 452 addressed with a specific code model - don't mark it as being addressed 453 with an SDA reloc though]. This is ok and might be useful at times. If 454 the object doesn't fit the linker will give an error. */ 455 456 static bool 457 m32r_in_small_data_p (const_tree decl) 458 { 459 const_tree section; 460 461 if (TREE_CODE (decl) != VAR_DECL) 462 return false; 463 464 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl))) 465 return false; 466 467 section = DECL_SECTION_NAME (decl); 468 if (section) 469 { 470 const char *const name = TREE_STRING_POINTER (section); 471 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0) 472 return true; 473 } 474 else 475 { 476 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE) 477 { 478 int size = int_size_in_bytes (TREE_TYPE (decl)); 479 480 if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value) 481 return true; 482 } 483 } 484 485 return false; 486 } 487 488 /* Do anything needed before RTL is emitted for each function. */ 489 490 void 491 m32r_init_expanders (void) 492 { 493 /* ??? At one point there was code here. The function is left in 494 to make it easy to experiment. */ 495 } 496 497 int 498 call_operand (rtx op, enum machine_mode mode) 499 { 500 if (!MEM_P (op)) 501 return 0; 502 op = XEXP (op, 0); 503 return call_address_operand (op, mode); 504 } 505 506 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */ 507 508 int 509 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED) 510 { 511 if (! TARGET_SDATA_USE) 512 return 0; 513 514 if (GET_CODE (op) == SYMBOL_REF) 515 return SYMBOL_REF_SMALL_P (op); 516 517 if (GET_CODE (op) == CONST 518 && GET_CODE (XEXP (op, 0)) == PLUS 519 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF 520 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1))) 521 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0)); 522 523 return 0; 524 } 525 526 /* Return 1 if OP is a symbol that can use 24-bit addressing. */ 527 528 int 529 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED) 530 { 531 rtx sym; 532 533 if (flag_pic) 534 return 0; 535 536 if (GET_CODE (op) == LABEL_REF) 537 return TARGET_ADDR24; 538 539 if (GET_CODE (op) == SYMBOL_REF) 540 sym = op; 541 else if (GET_CODE (op) == CONST 542 && GET_CODE (XEXP (op, 0)) == PLUS 543 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF 544 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1))) 545 sym = XEXP (XEXP (op, 0), 0); 546 else 547 return 0; 548 549 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL) 550 return 1; 551 552 if (TARGET_ADDR24 553 && (CONSTANT_POOL_ADDRESS_P (sym) 554 || LIT_NAME_P (XSTR (sym, 0)))) 555 return 1; 556 557 return 0; 558 } 559 560 /* Return 1 if OP is a symbol that needs 32-bit addressing. */ 561 562 int 563 addr32_operand (rtx op, enum machine_mode mode) 564 { 565 rtx sym; 566 567 if (GET_CODE (op) == LABEL_REF) 568 return TARGET_ADDR32; 569 570 if (GET_CODE (op) == SYMBOL_REF) 571 sym = op; 572 else if (GET_CODE (op) == CONST 573 && GET_CODE (XEXP (op, 0)) == PLUS 574 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF 575 && CONST_INT_P (XEXP (XEXP (op, 0), 1)) 576 && ! flag_pic) 577 sym = XEXP (XEXP (op, 0), 0); 578 else 579 return 0; 580 581 return (! addr24_operand (sym, mode) 582 && ! small_data_operand (sym, mode)); 583 } 584 585 /* Return 1 if OP is a function that can be called with the `bl' insn. */ 586 587 int 588 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED) 589 { 590 if (flag_pic) 591 return 1; 592 593 if (GET_CODE (op) == SYMBOL_REF) 594 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE; 595 596 return TARGET_CALL26; 597 } 598 599 /* Return 1 if OP is a DImode const we want to handle inline. 600 This must match the code in the movdi pattern. 601 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */ 602 603 int 604 easy_di_const (rtx op) 605 { 606 rtx high_rtx, low_rtx; 607 HOST_WIDE_INT high, low; 608 609 split_double (op, &high_rtx, &low_rtx); 610 high = INTVAL (high_rtx); 611 low = INTVAL (low_rtx); 612 /* Pick constants loadable with 2 16-bit `ldi' insns. */ 613 if (high >= -128 && high <= 127 614 && low >= -128 && low <= 127) 615 return 1; 616 return 0; 617 } 618 619 /* Return 1 if OP is a DFmode const we want to handle inline. 620 This must match the code in the movdf pattern. 621 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */ 622 623 int 624 easy_df_const (rtx op) 625 { 626 REAL_VALUE_TYPE r; 627 long l[2]; 628 629 REAL_VALUE_FROM_CONST_DOUBLE (r, op); 630 REAL_VALUE_TO_TARGET_DOUBLE (r, l); 631 if (l[0] == 0 && l[1] == 0) 632 return 1; 633 if ((l[0] & 0xffff) == 0 && l[1] == 0) 634 return 1; 635 return 0; 636 } 637 638 /* Return 1 if OP is (mem (reg ...)). 639 This is used in insn length calcs. */ 640 641 int 642 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED) 643 { 644 return MEM_P (op) && REG_P (XEXP (op, 0)); 645 } 646 647 /* Return nonzero if TYPE must be passed by indirect reference. */ 648 649 static bool 650 m32r_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED, 651 enum machine_mode mode, const_tree type, 652 bool named ATTRIBUTE_UNUSED) 653 { 654 int size; 655 656 if (type) 657 size = int_size_in_bytes (type); 658 else 659 size = GET_MODE_SIZE (mode); 660 661 return (size < 0 || size > 8); 662 } 663 664 /* Comparisons. */ 665 666 /* X and Y are two things to compare using CODE. Emit the compare insn and 667 return the rtx for compare [arg0 of the if_then_else]. 668 If need_compare is true then the comparison insn must be generated, rather 669 than being subsumed into the following branch instruction. */ 670 671 rtx 672 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare) 673 { 674 enum rtx_code compare_code; 675 enum rtx_code branch_code; 676 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM); 677 int must_swap = 0; 678 679 switch (code) 680 { 681 case EQ: compare_code = EQ; branch_code = NE; break; 682 case NE: compare_code = EQ; branch_code = EQ; break; 683 case LT: compare_code = LT; branch_code = NE; break; 684 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break; 685 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break; 686 case GE: compare_code = LT; branch_code = EQ; break; 687 case LTU: compare_code = LTU; branch_code = NE; break; 688 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break; 689 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break; 690 case GEU: compare_code = LTU; branch_code = EQ; break; 691 692 default: 693 gcc_unreachable (); 694 } 695 696 if (need_compare) 697 { 698 switch (compare_code) 699 { 700 case EQ: 701 if (satisfies_constraint_P (y) /* Reg equal to small const. */ 702 && y != const0_rtx) 703 { 704 rtx tmp = gen_reg_rtx (SImode); 705 706 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y)))); 707 x = tmp; 708 y = const0_rtx; 709 } 710 else if (CONSTANT_P (y)) /* Reg equal to const. */ 711 { 712 rtx tmp = force_reg (GET_MODE (x), y); 713 y = tmp; 714 } 715 716 if (register_operand (y, SImode) /* Reg equal to reg. */ 717 || y == const0_rtx) /* Reg equal to zero. */ 718 { 719 emit_insn (gen_cmp_eqsi_insn (x, y)); 720 721 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx); 722 } 723 break; 724 725 case LT: 726 if (register_operand (y, SImode) 727 || satisfies_constraint_P (y)) 728 { 729 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */ 730 731 switch (code) 732 { 733 case LT: 734 emit_insn (gen_cmp_ltsi_insn (x, y)); 735 code = EQ; 736 break; 737 case LE: 738 if (y == const0_rtx) 739 tmp = const1_rtx; 740 else 741 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 742 emit_insn (gen_cmp_ltsi_insn (x, tmp)); 743 code = EQ; 744 break; 745 case GT: 746 if (CONST_INT_P (y)) 747 tmp = gen_rtx_PLUS (SImode, y, const1_rtx); 748 else 749 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 750 emit_insn (gen_cmp_ltsi_insn (x, tmp)); 751 code = NE; 752 break; 753 case GE: 754 emit_insn (gen_cmp_ltsi_insn (x, y)); 755 code = NE; 756 break; 757 default: 758 gcc_unreachable (); 759 } 760 761 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx); 762 } 763 break; 764 765 case LTU: 766 if (register_operand (y, SImode) 767 || satisfies_constraint_P (y)) 768 { 769 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */ 770 771 switch (code) 772 { 773 case LTU: 774 emit_insn (gen_cmp_ltusi_insn (x, y)); 775 code = EQ; 776 break; 777 case LEU: 778 if (y == const0_rtx) 779 tmp = const1_rtx; 780 else 781 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 782 emit_insn (gen_cmp_ltusi_insn (x, tmp)); 783 code = EQ; 784 break; 785 case GTU: 786 if (CONST_INT_P (y)) 787 tmp = gen_rtx_PLUS (SImode, y, const1_rtx); 788 else 789 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 790 emit_insn (gen_cmp_ltusi_insn (x, tmp)); 791 code = NE; 792 break; 793 case GEU: 794 emit_insn (gen_cmp_ltusi_insn (x, y)); 795 code = NE; 796 break; 797 default: 798 gcc_unreachable (); 799 } 800 801 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx); 802 } 803 break; 804 805 default: 806 gcc_unreachable (); 807 } 808 } 809 else 810 { 811 /* Reg/reg equal comparison. */ 812 if (compare_code == EQ 813 && register_operand (y, SImode)) 814 return gen_rtx_fmt_ee (code, CCmode, x, y); 815 816 /* Reg/zero signed comparison. */ 817 if ((compare_code == EQ || compare_code == LT) 818 && y == const0_rtx) 819 return gen_rtx_fmt_ee (code, CCmode, x, y); 820 821 /* Reg/smallconst equal comparison. */ 822 if (compare_code == EQ 823 && satisfies_constraint_P (y)) 824 { 825 rtx tmp = gen_reg_rtx (SImode); 826 827 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y)))); 828 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx); 829 } 830 831 /* Reg/const equal comparison. */ 832 if (compare_code == EQ 833 && CONSTANT_P (y)) 834 { 835 rtx tmp = force_reg (GET_MODE (x), y); 836 837 return gen_rtx_fmt_ee (code, CCmode, x, tmp); 838 } 839 } 840 841 if (CONSTANT_P (y)) 842 { 843 if (must_swap) 844 y = force_reg (GET_MODE (x), y); 845 else 846 { 847 int ok_const = reg_or_int16_operand (y, GET_MODE (y)); 848 849 if (! ok_const) 850 y = force_reg (GET_MODE (x), y); 851 } 852 } 853 854 switch (compare_code) 855 { 856 case EQ : 857 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y)); 858 break; 859 case LT : 860 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y)); 861 break; 862 case LTU : 863 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y)); 864 break; 865 866 default: 867 gcc_unreachable (); 868 } 869 870 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode)); 871 } 872 873 bool 874 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2) 875 { 876 enum machine_mode mode = GET_MODE (op0); 877 878 gcc_assert (mode == SImode); 879 switch (code) 880 { 881 case EQ: 882 if (!register_operand (op1, mode)) 883 op1 = force_reg (mode, op1); 884 885 if (TARGET_M32RX || TARGET_M32R2) 886 { 887 if (!reg_or_zero_operand (op2, mode)) 888 op2 = force_reg (mode, op2); 889 890 emit_insn (gen_seq_insn_m32rx (op0, op1, op2)); 891 return true; 892 } 893 if (CONST_INT_P (op2) && INTVAL (op2) == 0) 894 { 895 emit_insn (gen_seq_zero_insn (op0, op1)); 896 return true; 897 } 898 899 if (!reg_or_eq_int16_operand (op2, mode)) 900 op2 = force_reg (mode, op2); 901 902 emit_insn (gen_seq_insn (op0, op1, op2)); 903 return true; 904 905 case NE: 906 if (!CONST_INT_P (op2) 907 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2))) 908 { 909 rtx reg; 910 911 if (reload_completed || reload_in_progress) 912 return false; 913 914 reg = gen_reg_rtx (SImode); 915 emit_insn (gen_xorsi3 (reg, op1, op2)); 916 op1 = reg; 917 918 if (!register_operand (op1, mode)) 919 op1 = force_reg (mode, op1); 920 921 emit_insn (gen_sne_zero_insn (op0, op1)); 922 return true; 923 } 924 return false; 925 926 case LT: 927 case GT: 928 if (code == GT) 929 { 930 rtx tmp = op2; 931 op2 = op1; 932 op1 = tmp; 933 code = LT; 934 } 935 936 if (!register_operand (op1, mode)) 937 op1 = force_reg (mode, op1); 938 939 if (!reg_or_int16_operand (op2, mode)) 940 op2 = force_reg (mode, op2); 941 942 emit_insn (gen_slt_insn (op0, op1, op2)); 943 return true; 944 945 case LTU: 946 case GTU: 947 if (code == GTU) 948 { 949 rtx tmp = op2; 950 op2 = op1; 951 op1 = tmp; 952 code = LTU; 953 } 954 955 if (!register_operand (op1, mode)) 956 op1 = force_reg (mode, op1); 957 958 if (!reg_or_int16_operand (op2, mode)) 959 op2 = force_reg (mode, op2); 960 961 emit_insn (gen_sltu_insn (op0, op1, op2)); 962 return true; 963 964 case GE: 965 case GEU: 966 if (!register_operand (op1, mode)) 967 op1 = force_reg (mode, op1); 968 969 if (!reg_or_int16_operand (op2, mode)) 970 op2 = force_reg (mode, op2); 971 972 if (code == GE) 973 emit_insn (gen_sge_insn (op0, op1, op2)); 974 else 975 emit_insn (gen_sgeu_insn (op0, op1, op2)); 976 return true; 977 978 case LE: 979 case LEU: 980 if (!register_operand (op1, mode)) 981 op1 = force_reg (mode, op1); 982 983 if (CONST_INT_P (op2)) 984 { 985 HOST_WIDE_INT value = INTVAL (op2); 986 if (value >= 2147483647) 987 { 988 emit_move_insn (op0, const1_rtx); 989 return true; 990 } 991 992 op2 = GEN_INT (value + 1); 993 if (value < -32768 || value >= 32767) 994 op2 = force_reg (mode, op2); 995 996 if (code == LEU) 997 emit_insn (gen_sltu_insn (op0, op1, op2)); 998 else 999 emit_insn (gen_slt_insn (op0, op1, op2)); 1000 return true; 1001 } 1002 1003 if (!register_operand (op2, mode)) 1004 op2 = force_reg (mode, op2); 1005 1006 if (code == LEU) 1007 emit_insn (gen_sleu_insn (op0, op1, op2)); 1008 else 1009 emit_insn (gen_sle_insn (op0, op1, op2)); 1010 return true; 1011 1012 default: 1013 gcc_unreachable (); 1014 } 1015 } 1016 1017 1018 /* Split a 2 word move (DI or DF) into component parts. */ 1019 1020 rtx 1021 gen_split_move_double (rtx operands[]) 1022 { 1023 enum machine_mode mode = GET_MODE (operands[0]); 1024 rtx dest = operands[0]; 1025 rtx src = operands[1]; 1026 rtx val; 1027 1028 /* We might have (SUBREG (MEM)) here, so just get rid of the 1029 subregs to make this code simpler. It is safe to call 1030 alter_subreg any time after reload. */ 1031 if (GET_CODE (dest) == SUBREG) 1032 alter_subreg (&dest); 1033 if (GET_CODE (src) == SUBREG) 1034 alter_subreg (&src); 1035 1036 start_sequence (); 1037 if (REG_P (dest)) 1038 { 1039 int dregno = REGNO (dest); 1040 1041 /* Reg = reg. */ 1042 if (REG_P (src)) 1043 { 1044 int sregno = REGNO (src); 1045 1046 int reverse = (dregno == sregno + 1); 1047 1048 /* We normally copy the low-numbered register first. However, if 1049 the first register operand 0 is the same as the second register of 1050 operand 1, we must copy in the opposite order. */ 1051 emit_insn (gen_rtx_SET (VOIDmode, 1052 operand_subword (dest, reverse, TRUE, mode), 1053 operand_subword (src, reverse, TRUE, mode))); 1054 1055 emit_insn (gen_rtx_SET (VOIDmode, 1056 operand_subword (dest, !reverse, TRUE, mode), 1057 operand_subword (src, !reverse, TRUE, mode))); 1058 } 1059 1060 /* Reg = constant. */ 1061 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE) 1062 { 1063 rtx words[2]; 1064 split_double (src, &words[0], &words[1]); 1065 emit_insn (gen_rtx_SET (VOIDmode, 1066 operand_subword (dest, 0, TRUE, mode), 1067 words[0])); 1068 1069 emit_insn (gen_rtx_SET (VOIDmode, 1070 operand_subword (dest, 1, TRUE, mode), 1071 words[1])); 1072 } 1073 1074 /* Reg = mem. */ 1075 else if (MEM_P (src)) 1076 { 1077 /* If the high-address word is used in the address, we must load it 1078 last. Otherwise, load it first. */ 1079 int reverse 1080 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0); 1081 1082 /* We used to optimize loads from single registers as 1083 1084 ld r1,r3+; ld r2,r3 1085 1086 if r3 were not used subsequently. However, the REG_NOTES aren't 1087 propagated correctly by the reload phase, and it can cause bad 1088 code to be generated. We could still try: 1089 1090 ld r1,r3+; ld r2,r3; addi r3,-4 1091 1092 which saves 2 bytes and doesn't force longword alignment. */ 1093 emit_insn (gen_rtx_SET (VOIDmode, 1094 operand_subword (dest, reverse, TRUE, mode), 1095 adjust_address (src, SImode, 1096 reverse * UNITS_PER_WORD))); 1097 1098 emit_insn (gen_rtx_SET (VOIDmode, 1099 operand_subword (dest, !reverse, TRUE, mode), 1100 adjust_address (src, SImode, 1101 !reverse * UNITS_PER_WORD))); 1102 } 1103 else 1104 gcc_unreachable (); 1105 } 1106 1107 /* Mem = reg. */ 1108 /* We used to optimize loads from single registers as 1109 1110 st r1,r3; st r2,+r3 1111 1112 if r3 were not used subsequently. However, the REG_NOTES aren't 1113 propagated correctly by the reload phase, and it can cause bad 1114 code to be generated. We could still try: 1115 1116 st r1,r3; st r2,+r3; addi r3,-4 1117 1118 which saves 2 bytes and doesn't force longword alignment. */ 1119 else if (MEM_P (dest) && REG_P (src)) 1120 { 1121 emit_insn (gen_rtx_SET (VOIDmode, 1122 adjust_address (dest, SImode, 0), 1123 operand_subword (src, 0, TRUE, mode))); 1124 1125 emit_insn (gen_rtx_SET (VOIDmode, 1126 adjust_address (dest, SImode, UNITS_PER_WORD), 1127 operand_subword (src, 1, TRUE, mode))); 1128 } 1129 1130 else 1131 gcc_unreachable (); 1132 1133 val = get_insns (); 1134 end_sequence (); 1135 return val; 1136 } 1137 1138 1139 static int 1140 m32r_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode, 1141 tree type, bool named ATTRIBUTE_UNUSED) 1142 { 1143 int words; 1144 unsigned int size = 1145 (((mode == BLKmode && type) 1146 ? (unsigned int) int_size_in_bytes (type) 1147 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1) 1148 / UNITS_PER_WORD; 1149 1150 if (*cum >= M32R_MAX_PARM_REGS) 1151 words = 0; 1152 else if (*cum + size > M32R_MAX_PARM_REGS) 1153 words = (*cum + size) - M32R_MAX_PARM_REGS; 1154 else 1155 words = 0; 1156 1157 return words * UNITS_PER_WORD; 1158 } 1159 1160 /* Worker function for TARGET_RETURN_IN_MEMORY. */ 1161 1162 static bool 1163 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED) 1164 { 1165 return m32r_pass_by_reference (NULL, TYPE_MODE (type), type, false); 1166 } 1167 1168 /* Do any needed setup for a variadic function. For the M32R, we must 1169 create a register parameter block, and then copy any anonymous arguments 1170 in registers to memory. 1171 1172 CUM has not been updated for the last named argument which has type TYPE 1173 and mode MODE, and we rely on this fact. */ 1174 1175 static void 1176 m32r_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode, 1177 tree type, int *pretend_size, int no_rtl) 1178 { 1179 int first_anon_arg; 1180 1181 if (no_rtl) 1182 return; 1183 1184 /* All BLKmode values are passed by reference. */ 1185 gcc_assert (mode != BLKmode); 1186 1187 first_anon_arg = (ROUND_ADVANCE_CUM (*cum, mode, type) 1188 + ROUND_ADVANCE_ARG (mode, type)); 1189 1190 if (first_anon_arg < M32R_MAX_PARM_REGS) 1191 { 1192 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */ 1193 int first_reg_offset = first_anon_arg; 1194 /* Size in words to "pretend" allocate. */ 1195 int size = M32R_MAX_PARM_REGS - first_reg_offset; 1196 rtx regblock; 1197 1198 regblock = gen_frame_mem (BLKmode, 1199 plus_constant (arg_pointer_rtx, 1200 FIRST_PARM_OFFSET (0))); 1201 set_mem_alias_set (regblock, get_varargs_alias_set ()); 1202 move_block_from_reg (first_reg_offset, regblock, size); 1203 1204 *pretend_size = (size * UNITS_PER_WORD); 1205 } 1206 } 1207 1208 1209 /* Return true if INSN is real instruction bearing insn. */ 1210 1211 static int 1212 m32r_is_insn (rtx insn) 1213 { 1214 return (NONDEBUG_INSN_P (insn) 1215 && GET_CODE (PATTERN (insn)) != USE 1216 && GET_CODE (PATTERN (insn)) != CLOBBER 1217 && GET_CODE (PATTERN (insn)) != ADDR_VEC); 1218 } 1219 1220 /* Increase the priority of long instructions so that the 1221 short instructions are scheduled ahead of the long ones. */ 1222 1223 static int 1224 m32r_adjust_priority (rtx insn, int priority) 1225 { 1226 if (m32r_is_insn (insn) 1227 && get_attr_insn_size (insn) != INSN_SIZE_SHORT) 1228 priority <<= 3; 1229 1230 return priority; 1231 } 1232 1233 1234 /* Indicate how many instructions can be issued at the same time. 1235 This is sort of a lie. The m32r can issue only 1 long insn at 1236 once, but it can issue 2 short insns. The default therefore is 1237 set at 2, but this can be overridden by the command line option 1238 -missue-rate=1. */ 1239 1240 static int 1241 m32r_issue_rate (void) 1242 { 1243 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2); 1244 } 1245 1246 /* Cost functions. */ 1247 1248 static bool 1249 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, 1250 bool speed ATTRIBUTE_UNUSED) 1251 { 1252 switch (code) 1253 { 1254 /* Small integers are as cheap as registers. 4 byte values can be 1255 fetched as immediate constants - let's give that the cost of an 1256 extra insn. */ 1257 case CONST_INT: 1258 if (INT16_P (INTVAL (x))) 1259 { 1260 *total = 0; 1261 return true; 1262 } 1263 /* FALLTHRU */ 1264 1265 case CONST: 1266 case LABEL_REF: 1267 case SYMBOL_REF: 1268 *total = COSTS_N_INSNS (1); 1269 return true; 1270 1271 case CONST_DOUBLE: 1272 { 1273 rtx high, low; 1274 1275 split_double (x, &high, &low); 1276 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high)) 1277 + !INT16_P (INTVAL (low))); 1278 return true; 1279 } 1280 1281 case MULT: 1282 *total = COSTS_N_INSNS (3); 1283 return true; 1284 1285 case DIV: 1286 case UDIV: 1287 case MOD: 1288 case UMOD: 1289 *total = COSTS_N_INSNS (10); 1290 return true; 1291 1292 default: 1293 return false; 1294 } 1295 } 1296 1297 /* Type of function DECL. 1298 1299 The result is cached. To reset the cache at the end of a function, 1300 call with DECL = NULL_TREE. */ 1301 1302 enum m32r_function_type 1303 m32r_compute_function_type (tree decl) 1304 { 1305 /* Cached value. */ 1306 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN; 1307 /* Last function we were called for. */ 1308 static tree last_fn = NULL_TREE; 1309 1310 /* Resetting the cached value? */ 1311 if (decl == NULL_TREE) 1312 { 1313 fn_type = M32R_FUNCTION_UNKNOWN; 1314 last_fn = NULL_TREE; 1315 return fn_type; 1316 } 1317 1318 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN) 1319 return fn_type; 1320 1321 /* Compute function type. */ 1322 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE 1323 ? M32R_FUNCTION_INTERRUPT 1324 : M32R_FUNCTION_NORMAL); 1325 1326 last_fn = decl; 1327 return fn_type; 1328 } 1329 /* Function prologue/epilogue handlers. */ 1330 1331 /* M32R stack frames look like: 1332 1333 Before call After call 1334 +-----------------------+ +-----------------------+ 1335 | | | | 1336 high | local variables, | | local variables, | 1337 mem | reg save area, etc. | | reg save area, etc. | 1338 | | | | 1339 +-----------------------+ +-----------------------+ 1340 | | | | 1341 | arguments on stack. | | arguments on stack. | 1342 | | | | 1343 SP+0->+-----------------------+ +-----------------------+ 1344 | reg parm save area, | 1345 | only created for | 1346 | variable argument | 1347 | functions | 1348 +-----------------------+ 1349 | previous frame ptr | 1350 +-----------------------+ 1351 | | 1352 | register save area | 1353 | | 1354 +-----------------------+ 1355 | return address | 1356 +-----------------------+ 1357 | | 1358 | local variables | 1359 | | 1360 +-----------------------+ 1361 | | 1362 | alloca allocations | 1363 | | 1364 +-----------------------+ 1365 | | 1366 low | arguments on stack | 1367 memory | | 1368 SP+0->+-----------------------+ 1369 1370 Notes: 1371 1) The "reg parm save area" does not exist for non variable argument fns. 1372 2) The "reg parm save area" can be eliminated completely if we saved regs 1373 containing anonymous args separately but that complicates things too 1374 much (so it's not done). 1375 3) The return address is saved after the register save area so as to have as 1376 many insns as possible between the restoration of `lr' and the `jmp lr'. */ 1377 1378 /* Structure to be filled in by m32r_compute_frame_size with register 1379 save masks, and offsets for the current function. */ 1380 struct m32r_frame_info 1381 { 1382 unsigned int total_size; /* # bytes that the entire frame takes up. */ 1383 unsigned int extra_size; /* # bytes of extra stuff. */ 1384 unsigned int pretend_size; /* # bytes we push and pretend caller did. */ 1385 unsigned int args_size; /* # bytes that outgoing arguments take up. */ 1386 unsigned int reg_size; /* # bytes needed to store regs. */ 1387 unsigned int var_size; /* # bytes that variables take up. */ 1388 unsigned int gmask; /* Mask of saved gp registers. */ 1389 unsigned int save_fp; /* Nonzero if fp must be saved. */ 1390 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */ 1391 int initialized; /* Nonzero if frame size already calculated. */ 1392 }; 1393 1394 /* Current frame information calculated by m32r_compute_frame_size. */ 1395 static struct m32r_frame_info current_frame_info; 1396 1397 /* Zero structure to initialize current_frame_info. */ 1398 static struct m32r_frame_info zero_frame_info; 1399 1400 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM)) 1401 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM)) 1402 1403 /* Tell prologue and epilogue if register REGNO should be saved / restored. 1404 The return address and frame pointer are treated separately. 1405 Don't consider them here. */ 1406 #define MUST_SAVE_REGISTER(regno, interrupt_p) \ 1407 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \ 1408 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p))) 1409 1410 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM)) 1411 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile) 1412 1413 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */ 1414 #define LONG_INSN_SIZE 4 /* Size of long instructions. */ 1415 1416 /* Return the bytes needed to compute the frame pointer from the current 1417 stack pointer. 1418 1419 SIZE is the size needed for local variables. */ 1420 1421 unsigned int 1422 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */ 1423 { 1424 unsigned int regno; 1425 unsigned int total_size, var_size, args_size, pretend_size, extra_size; 1426 unsigned int reg_size, frame_size; 1427 unsigned int gmask; 1428 enum m32r_function_type fn_type; 1429 int interrupt_p; 1430 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table 1431 | crtl->profile); 1432 1433 var_size = M32R_STACK_ALIGN (size); 1434 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size); 1435 pretend_size = crtl->args.pretend_args_size; 1436 extra_size = FIRST_PARM_OFFSET (0); 1437 total_size = extra_size + pretend_size + args_size + var_size; 1438 reg_size = 0; 1439 gmask = 0; 1440 1441 /* See if this is an interrupt handler. Call used registers must be saved 1442 for them too. */ 1443 fn_type = m32r_compute_function_type (current_function_decl); 1444 interrupt_p = M32R_INTERRUPT_P (fn_type); 1445 1446 /* Calculate space needed for registers. */ 1447 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++) 1448 { 1449 if (MUST_SAVE_REGISTER (regno, interrupt_p) 1450 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used)) 1451 { 1452 reg_size += UNITS_PER_WORD; 1453 gmask |= 1 << regno; 1454 } 1455 } 1456 1457 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER; 1458 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used; 1459 1460 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr) 1461 * UNITS_PER_WORD); 1462 total_size += reg_size; 1463 1464 /* ??? Not sure this is necessary, and I don't think the epilogue 1465 handler will do the right thing if this changes total_size. */ 1466 total_size = M32R_STACK_ALIGN (total_size); 1467 1468 frame_size = total_size - (pretend_size + reg_size); 1469 1470 /* Save computed information. */ 1471 current_frame_info.total_size = total_size; 1472 current_frame_info.extra_size = extra_size; 1473 current_frame_info.pretend_size = pretend_size; 1474 current_frame_info.var_size = var_size; 1475 current_frame_info.args_size = args_size; 1476 current_frame_info.reg_size = reg_size; 1477 current_frame_info.gmask = gmask; 1478 current_frame_info.initialized = reload_completed; 1479 1480 /* Ok, we're done. */ 1481 return total_size; 1482 } 1483 1484 /* Worker function for TARGET_CAN_ELIMINATE. */ 1485 1486 bool 1487 m32r_can_eliminate (const int from, const int to) 1488 { 1489 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM 1490 ? ! frame_pointer_needed 1491 : true); 1492 } 1493 1494 1495 /* The table we use to reference PIC data. */ 1496 static rtx global_offset_table; 1497 1498 static void 1499 m32r_reload_lr (rtx sp, int size) 1500 { 1501 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM); 1502 1503 if (size == 0) 1504 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp))); 1505 else if (size < 32768) 1506 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, 1507 gen_rtx_PLUS (Pmode, sp, 1508 GEN_INT (size))))); 1509 else 1510 { 1511 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1512 1513 emit_insn (gen_movsi (tmp, GEN_INT (size))); 1514 emit_insn (gen_addsi3 (tmp, tmp, sp)); 1515 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp))); 1516 } 1517 1518 emit_use (lr); 1519 } 1520 1521 void 1522 m32r_load_pic_register (void) 1523 { 1524 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_"); 1525 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table, 1526 GEN_INT (TARGET_MODEL_SMALL))); 1527 1528 /* Need to emit this whether or not we obey regdecls, 1529 since setjmp/longjmp can cause life info to screw up. */ 1530 emit_use (pic_offset_table_rtx); 1531 } 1532 1533 /* Expand the m32r prologue as a series of insns. */ 1534 1535 void 1536 m32r_expand_prologue (void) 1537 { 1538 int regno; 1539 int frame_size; 1540 unsigned int gmask; 1541 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table 1542 | crtl->profile); 1543 1544 if (! current_frame_info.initialized) 1545 m32r_compute_frame_size (get_frame_size ()); 1546 1547 gmask = current_frame_info.gmask; 1548 1549 /* These cases shouldn't happen. Catch them now. */ 1550 gcc_assert (current_frame_info.total_size || !gmask); 1551 1552 /* Allocate space for register arguments if this is a variadic function. */ 1553 if (current_frame_info.pretend_size != 0) 1554 { 1555 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives 1556 the wrong result on a 64-bit host. */ 1557 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size; 1558 emit_insn (gen_addsi3 (stack_pointer_rtx, 1559 stack_pointer_rtx, 1560 GEN_INT (-pretend_size))); 1561 } 1562 1563 /* Save any registers we need to and set up fp. */ 1564 if (current_frame_info.save_fp) 1565 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx)); 1566 1567 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK); 1568 1569 /* Save any needed call-saved regs (and call-used if this is an 1570 interrupt handler). */ 1571 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno) 1572 { 1573 if ((gmask & (1 << regno)) != 0) 1574 emit_insn (gen_movsi_push (stack_pointer_rtx, 1575 gen_rtx_REG (Pmode, regno))); 1576 } 1577 1578 if (current_frame_info.save_lr) 1579 emit_insn (gen_movsi_push (stack_pointer_rtx, 1580 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM))); 1581 1582 /* Allocate the stack frame. */ 1583 frame_size = (current_frame_info.total_size 1584 - (current_frame_info.pretend_size 1585 + current_frame_info.reg_size)); 1586 1587 if (frame_size == 0) 1588 ; /* Nothing to do. */ 1589 else if (frame_size <= 32768) 1590 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1591 GEN_INT (-frame_size))); 1592 else 1593 { 1594 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1595 1596 emit_insn (gen_movsi (tmp, GEN_INT (frame_size))); 1597 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp)); 1598 } 1599 1600 if (frame_pointer_needed) 1601 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx)); 1602 1603 if (crtl->profile) 1604 /* Push lr for mcount (form_pc, x). */ 1605 emit_insn (gen_movsi_push (stack_pointer_rtx, 1606 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM))); 1607 1608 if (pic_reg_used) 1609 { 1610 m32r_load_pic_register (); 1611 m32r_reload_lr (stack_pointer_rtx, 1612 (crtl->profile ? 0 : frame_size)); 1613 } 1614 1615 if (crtl->profile && !pic_reg_used) 1616 emit_insn (gen_blockage ()); 1617 } 1618 1619 1620 /* Set up the stack and frame pointer (if desired) for the function. 1621 Note, if this is changed, you need to mirror the changes in 1622 m32r_compute_frame_size which calculates the prolog size. */ 1623 1624 static void 1625 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size) 1626 { 1627 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl); 1628 1629 /* If this is an interrupt handler, mark it as such. */ 1630 if (M32R_INTERRUPT_P (fn_type)) 1631 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START); 1632 1633 if (! current_frame_info.initialized) 1634 m32r_compute_frame_size (size); 1635 1636 /* This is only for the human reader. */ 1637 fprintf (file, 1638 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n", 1639 ASM_COMMENT_START, 1640 current_frame_info.var_size, 1641 current_frame_info.reg_size / 4, 1642 current_frame_info.args_size, 1643 current_frame_info.extra_size); 1644 } 1645 1646 /* Output RTL to pop register REGNO from the stack. */ 1647 1648 static void 1649 pop (int regno) 1650 { 1651 rtx x; 1652 1653 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno), 1654 stack_pointer_rtx)); 1655 add_reg_note (x, REG_INC, stack_pointer_rtx); 1656 } 1657 1658 /* Expand the m32r epilogue as a series of insns. */ 1659 1660 void 1661 m32r_expand_epilogue (void) 1662 { 1663 int regno; 1664 int noepilogue = FALSE; 1665 int total_size; 1666 1667 gcc_assert (current_frame_info.initialized); 1668 total_size = current_frame_info.total_size; 1669 1670 if (total_size == 0) 1671 { 1672 rtx insn = get_last_insn (); 1673 1674 /* If the last insn was a BARRIER, we don't have to write any code 1675 because a jump (aka return) was put there. */ 1676 if (insn && NOTE_P (insn)) 1677 insn = prev_nonnote_insn (insn); 1678 if (insn && BARRIER_P (insn)) 1679 noepilogue = TRUE; 1680 } 1681 1682 if (!noepilogue) 1683 { 1684 unsigned int var_size = current_frame_info.var_size; 1685 unsigned int args_size = current_frame_info.args_size; 1686 unsigned int gmask = current_frame_info.gmask; 1687 int can_trust_sp_p = !cfun->calls_alloca; 1688 1689 if (flag_exceptions) 1690 emit_insn (gen_blockage ()); 1691 1692 /* The first thing to do is point the sp at the bottom of the register 1693 save area. */ 1694 if (can_trust_sp_p) 1695 { 1696 unsigned int reg_offset = var_size + args_size; 1697 1698 if (reg_offset == 0) 1699 ; /* Nothing to do. */ 1700 else if (reg_offset < 32768) 1701 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1702 GEN_INT (reg_offset))); 1703 else 1704 { 1705 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1706 1707 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset))); 1708 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1709 tmp)); 1710 } 1711 } 1712 else if (frame_pointer_needed) 1713 { 1714 unsigned int reg_offset = var_size + args_size; 1715 1716 if (reg_offset == 0) 1717 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx)); 1718 else if (reg_offset < 32768) 1719 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx, 1720 GEN_INT (reg_offset))); 1721 else 1722 { 1723 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1724 1725 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset))); 1726 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx)); 1727 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1728 tmp)); 1729 } 1730 } 1731 else 1732 gcc_unreachable (); 1733 1734 if (current_frame_info.save_lr) 1735 pop (RETURN_ADDR_REGNUM); 1736 1737 /* Restore any saved registers, in reverse order of course. */ 1738 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK); 1739 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno) 1740 { 1741 if ((gmask & (1L << regno)) != 0) 1742 pop (regno); 1743 } 1744 1745 if (current_frame_info.save_fp) 1746 pop (FRAME_POINTER_REGNUM); 1747 1748 /* Remove varargs area if present. */ 1749 if (current_frame_info.pretend_size != 0) 1750 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1751 GEN_INT (current_frame_info.pretend_size))); 1752 1753 emit_insn (gen_blockage ()); 1754 } 1755 } 1756 1757 /* Do any necessary cleanup after a function to restore stack, frame, 1758 and regs. */ 1759 1760 static void 1761 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED, 1762 HOST_WIDE_INT size ATTRIBUTE_UNUSED) 1763 { 1764 /* Reset state info for each function. */ 1765 current_frame_info = zero_frame_info; 1766 m32r_compute_function_type (NULL_TREE); 1767 } 1768 1769 /* Return nonzero if this function is known to have a null or 1 instruction 1770 epilogue. */ 1771 1772 int 1773 direct_return (void) 1774 { 1775 if (!reload_completed) 1776 return FALSE; 1777 1778 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl))) 1779 return FALSE; 1780 1781 if (! current_frame_info.initialized) 1782 m32r_compute_frame_size (get_frame_size ()); 1783 1784 return current_frame_info.total_size == 0; 1785 } 1786 1787 1788 /* PIC. */ 1789 1790 int 1791 m32r_legitimate_pic_operand_p (rtx x) 1792 { 1793 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) 1794 return 0; 1795 1796 if (GET_CODE (x) == CONST 1797 && GET_CODE (XEXP (x, 0)) == PLUS 1798 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF 1799 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF) 1800 && (CONST_INT_P (XEXP (XEXP (x, 0), 1)))) 1801 return 0; 1802 1803 return 1; 1804 } 1805 1806 rtx 1807 m32r_legitimize_pic_address (rtx orig, rtx reg) 1808 { 1809 #ifdef DEBUG_PIC 1810 printf("m32r_legitimize_pic_address()\n"); 1811 #endif 1812 1813 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF) 1814 { 1815 rtx pic_ref, address; 1816 rtx insn; 1817 int subregs = 0; 1818 1819 if (reg == 0) 1820 { 1821 gcc_assert (!reload_in_progress && !reload_completed); 1822 reg = gen_reg_rtx (Pmode); 1823 1824 subregs = 1; 1825 } 1826 1827 if (subregs) 1828 address = gen_reg_rtx (Pmode); 1829 else 1830 address = reg; 1831 1832 crtl->uses_pic_offset_table = 1; 1833 1834 if (GET_CODE (orig) == LABEL_REF 1835 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig))) 1836 { 1837 emit_insn (gen_gotoff_load_addr (reg, orig)); 1838 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx)); 1839 return reg; 1840 } 1841 1842 emit_insn (gen_pic_load_addr (address, orig)); 1843 1844 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx)); 1845 pic_ref = gen_const_mem (Pmode, address); 1846 insn = emit_move_insn (reg, pic_ref); 1847 #if 0 1848 /* Put a REG_EQUAL note on this insn, so that it can be optimized 1849 by loop. */ 1850 set_unique_reg_note (insn, REG_EQUAL, orig); 1851 #endif 1852 return reg; 1853 } 1854 else if (GET_CODE (orig) == CONST) 1855 { 1856 rtx base, offset; 1857 1858 if (GET_CODE (XEXP (orig, 0)) == PLUS 1859 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx) 1860 return orig; 1861 1862 if (reg == 0) 1863 { 1864 gcc_assert (!reload_in_progress && !reload_completed); 1865 reg = gen_reg_rtx (Pmode); 1866 } 1867 1868 if (GET_CODE (XEXP (orig, 0)) == PLUS) 1869 { 1870 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg); 1871 if (base == reg) 1872 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX); 1873 else 1874 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg); 1875 } 1876 else 1877 return orig; 1878 1879 if (CONST_INT_P (offset)) 1880 { 1881 if (INT16_P (INTVAL (offset))) 1882 return plus_constant (base, INTVAL (offset)); 1883 else 1884 { 1885 gcc_assert (! reload_in_progress && ! reload_completed); 1886 offset = force_reg (Pmode, offset); 1887 } 1888 } 1889 1890 return gen_rtx_PLUS (Pmode, base, offset); 1891 } 1892 1893 return orig; 1894 } 1895 1896 static rtx 1897 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED, 1898 enum machine_mode mode ATTRIBUTE_UNUSED) 1899 { 1900 if (flag_pic) 1901 return m32r_legitimize_pic_address (x, NULL_RTX); 1902 else 1903 return x; 1904 } 1905 1906 /* Nested function support. */ 1907 1908 /* Emit RTL insns to initialize the variable parts of a trampoline. 1909 FNADDR is an RTX for the address of the function's pure code. 1910 CXT is an RTX for the static chain value for the function. */ 1911 1912 void 1913 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED, 1914 rtx fnaddr ATTRIBUTE_UNUSED, 1915 rtx cxt ATTRIBUTE_UNUSED) 1916 { 1917 } 1918 1919 static void 1920 m32r_file_start (void) 1921 { 1922 default_file_start (); 1923 1924 if (flag_verbose_asm) 1925 fprintf (asm_out_file, 1926 "%s M32R/D special options: -G " HOST_WIDE_INT_PRINT_UNSIGNED "\n", 1927 ASM_COMMENT_START, g_switch_value); 1928 1929 if (TARGET_LITTLE_ENDIAN) 1930 fprintf (asm_out_file, "\t.little\n"); 1931 } 1932 1933 /* Print operand X (an rtx) in assembler syntax to file FILE. 1934 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified. 1935 For `%' followed by punctuation, CODE is the punctuation and X is null. */ 1936 1937 void 1938 m32r_print_operand (FILE * file, rtx x, int code) 1939 { 1940 rtx addr; 1941 1942 switch (code) 1943 { 1944 /* The 's' and 'p' codes are used by output_block_move() to 1945 indicate post-increment 's'tores and 'p're-increment loads. */ 1946 case 's': 1947 if (REG_P (x)) 1948 fprintf (file, "@+%s", reg_names [REGNO (x)]); 1949 else 1950 output_operand_lossage ("invalid operand to %%s code"); 1951 return; 1952 1953 case 'p': 1954 if (REG_P (x)) 1955 fprintf (file, "@%s+", reg_names [REGNO (x)]); 1956 else 1957 output_operand_lossage ("invalid operand to %%p code"); 1958 return; 1959 1960 case 'R' : 1961 /* Write second word of DImode or DFmode reference, 1962 register or memory. */ 1963 if (REG_P (x)) 1964 fputs (reg_names[REGNO (x)+1], file); 1965 else if (MEM_P (x)) 1966 { 1967 fprintf (file, "@("); 1968 /* Handle possible auto-increment. Since it is pre-increment and 1969 we have already done it, we can just use an offset of four. */ 1970 /* ??? This is taken from rs6000.c I think. I don't think it is 1971 currently necessary, but keep it around. */ 1972 if (GET_CODE (XEXP (x, 0)) == PRE_INC 1973 || GET_CODE (XEXP (x, 0)) == PRE_DEC) 1974 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4)); 1975 else 1976 output_address (plus_constant (XEXP (x, 0), 4)); 1977 fputc (')', file); 1978 } 1979 else 1980 output_operand_lossage ("invalid operand to %%R code"); 1981 return; 1982 1983 case 'H' : /* High word. */ 1984 case 'L' : /* Low word. */ 1985 if (REG_P (x)) 1986 { 1987 /* L = least significant word, H = most significant word. */ 1988 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L')) 1989 fputs (reg_names[REGNO (x)], file); 1990 else 1991 fputs (reg_names[REGNO (x)+1], file); 1992 } 1993 else if (CONST_INT_P (x) 1994 || GET_CODE (x) == CONST_DOUBLE) 1995 { 1996 rtx first, second; 1997 1998 split_double (x, &first, &second); 1999 fprintf (file, HOST_WIDE_INT_PRINT_HEX, 2000 code == 'L' ? INTVAL (first) : INTVAL (second)); 2001 } 2002 else 2003 output_operand_lossage ("invalid operand to %%H/%%L code"); 2004 return; 2005 2006 case 'A' : 2007 { 2008 char str[30]; 2009 2010 if (GET_CODE (x) != CONST_DOUBLE 2011 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT) 2012 fatal_insn ("bad insn for 'A'", x); 2013 2014 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1); 2015 fprintf (file, "%s", str); 2016 return; 2017 } 2018 2019 case 'B' : /* Bottom half. */ 2020 case 'T' : /* Top half. */ 2021 /* Output the argument to a `seth' insn (sets the Top half-word). 2022 For constants output arguments to a seth/or3 pair to set Top and 2023 Bottom halves. For symbols output arguments to a seth/add3 pair to 2024 set Top and Bottom halves. The difference exists because for 2025 constants seth/or3 is more readable but for symbols we need to use 2026 the same scheme as `ld' and `st' insns (16-bit addend is signed). */ 2027 switch (GET_CODE (x)) 2028 { 2029 case CONST_INT : 2030 case CONST_DOUBLE : 2031 { 2032 rtx first, second; 2033 2034 split_double (x, &first, &second); 2035 x = WORDS_BIG_ENDIAN ? second : first; 2036 fprintf (file, HOST_WIDE_INT_PRINT_HEX, 2037 (code == 'B' 2038 ? INTVAL (x) & 0xffff 2039 : (INTVAL (x) >> 16) & 0xffff)); 2040 } 2041 return; 2042 case CONST : 2043 case SYMBOL_REF : 2044 if (code == 'B' 2045 && small_data_operand (x, VOIDmode)) 2046 { 2047 fputs ("sda(", file); 2048 output_addr_const (file, x); 2049 fputc (')', file); 2050 return; 2051 } 2052 /* fall through */ 2053 case LABEL_REF : 2054 fputs (code == 'T' ? "shigh(" : "low(", file); 2055 output_addr_const (file, x); 2056 fputc (')', file); 2057 return; 2058 default : 2059 output_operand_lossage ("invalid operand to %%T/%%B code"); 2060 return; 2061 } 2062 break; 2063 2064 case 'U' : 2065 /* ??? wip */ 2066 /* Output a load/store with update indicator if appropriate. */ 2067 if (MEM_P (x)) 2068 { 2069 if (GET_CODE (XEXP (x, 0)) == PRE_INC 2070 || GET_CODE (XEXP (x, 0)) == PRE_DEC) 2071 fputs (".a", file); 2072 } 2073 else 2074 output_operand_lossage ("invalid operand to %%U code"); 2075 return; 2076 2077 case 'N' : 2078 /* Print a constant value negated. */ 2079 if (CONST_INT_P (x)) 2080 output_addr_const (file, GEN_INT (- INTVAL (x))); 2081 else 2082 output_operand_lossage ("invalid operand to %%N code"); 2083 return; 2084 2085 case 'X' : 2086 /* Print a const_int in hex. Used in comments. */ 2087 if (CONST_INT_P (x)) 2088 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x)); 2089 return; 2090 2091 case '#' : 2092 fputs (IMMEDIATE_PREFIX, file); 2093 return; 2094 2095 case 0 : 2096 /* Do nothing special. */ 2097 break; 2098 2099 default : 2100 /* Unknown flag. */ 2101 output_operand_lossage ("invalid operand output code"); 2102 } 2103 2104 switch (GET_CODE (x)) 2105 { 2106 case REG : 2107 fputs (reg_names[REGNO (x)], file); 2108 break; 2109 2110 case MEM : 2111 addr = XEXP (x, 0); 2112 if (GET_CODE (addr) == PRE_INC) 2113 { 2114 if (!REG_P (XEXP (addr, 0))) 2115 fatal_insn ("pre-increment address is not a register", x); 2116 2117 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]); 2118 } 2119 else if (GET_CODE (addr) == PRE_DEC) 2120 { 2121 if (!REG_P (XEXP (addr, 0))) 2122 fatal_insn ("pre-decrement address is not a register", x); 2123 2124 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]); 2125 } 2126 else if (GET_CODE (addr) == POST_INC) 2127 { 2128 if (!REG_P (XEXP (addr, 0))) 2129 fatal_insn ("post-increment address is not a register", x); 2130 2131 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]); 2132 } 2133 else 2134 { 2135 fputs ("@(", file); 2136 output_address (XEXP (x, 0)); 2137 fputc (')', file); 2138 } 2139 break; 2140 2141 case CONST_DOUBLE : 2142 /* We handle SFmode constants here as output_addr_const doesn't. */ 2143 if (GET_MODE (x) == SFmode) 2144 { 2145 REAL_VALUE_TYPE d; 2146 long l; 2147 2148 REAL_VALUE_FROM_CONST_DOUBLE (d, x); 2149 REAL_VALUE_TO_TARGET_SINGLE (d, l); 2150 fprintf (file, "0x%08lx", l); 2151 break; 2152 } 2153 2154 /* Fall through. Let output_addr_const deal with it. */ 2155 2156 default : 2157 output_addr_const (file, x); 2158 break; 2159 } 2160 } 2161 2162 /* Print a memory address as an operand to reference that memory location. */ 2163 2164 void 2165 m32r_print_operand_address (FILE * file, rtx addr) 2166 { 2167 rtx base; 2168 rtx index = 0; 2169 int offset = 0; 2170 2171 switch (GET_CODE (addr)) 2172 { 2173 case REG : 2174 fputs (reg_names[REGNO (addr)], file); 2175 break; 2176 2177 case PLUS : 2178 if (CONST_INT_P (XEXP (addr, 0))) 2179 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1); 2180 else if (CONST_INT_P (XEXP (addr, 1))) 2181 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0); 2182 else 2183 base = XEXP (addr, 0), index = XEXP (addr, 1); 2184 if (REG_P (base)) 2185 { 2186 /* Print the offset first (if present) to conform to the manual. */ 2187 if (index == 0) 2188 { 2189 if (offset != 0) 2190 fprintf (file, "%d,", offset); 2191 fputs (reg_names[REGNO (base)], file); 2192 } 2193 /* The chip doesn't support this, but left in for generality. */ 2194 else if (REG_P (index)) 2195 fprintf (file, "%s,%s", 2196 reg_names[REGNO (base)], reg_names[REGNO (index)]); 2197 /* Not sure this can happen, but leave in for now. */ 2198 else if (GET_CODE (index) == SYMBOL_REF) 2199 { 2200 output_addr_const (file, index); 2201 fputc (',', file); 2202 fputs (reg_names[REGNO (base)], file); 2203 } 2204 else 2205 fatal_insn ("bad address", addr); 2206 } 2207 else if (GET_CODE (base) == LO_SUM) 2208 { 2209 gcc_assert (!index && REG_P (XEXP (base, 0))); 2210 if (small_data_operand (XEXP (base, 1), VOIDmode)) 2211 fputs ("sda(", file); 2212 else 2213 fputs ("low(", file); 2214 output_addr_const (file, plus_constant (XEXP (base, 1), offset)); 2215 fputs ("),", file); 2216 fputs (reg_names[REGNO (XEXP (base, 0))], file); 2217 } 2218 else 2219 fatal_insn ("bad address", addr); 2220 break; 2221 2222 case LO_SUM : 2223 if (!REG_P (XEXP (addr, 0))) 2224 fatal_insn ("lo_sum not of register", addr); 2225 if (small_data_operand (XEXP (addr, 1), VOIDmode)) 2226 fputs ("sda(", file); 2227 else 2228 fputs ("low(", file); 2229 output_addr_const (file, XEXP (addr, 1)); 2230 fputs ("),", file); 2231 fputs (reg_names[REGNO (XEXP (addr, 0))], file); 2232 break; 2233 2234 case PRE_INC : /* Assume SImode. */ 2235 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]); 2236 break; 2237 2238 case PRE_DEC : /* Assume SImode. */ 2239 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]); 2240 break; 2241 2242 case POST_INC : /* Assume SImode. */ 2243 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]); 2244 break; 2245 2246 default : 2247 output_addr_const (file, addr); 2248 break; 2249 } 2250 } 2251 2252 /* Return true if the operands are the constants 0 and 1. */ 2253 2254 int 2255 zero_and_one (rtx operand1, rtx operand2) 2256 { 2257 return 2258 CONST_INT_P (operand1) 2259 && CONST_INT_P (operand2) 2260 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1)) 2261 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0))); 2262 } 2263 2264 /* Generate the correct assembler code to handle the conditional loading of a 2265 value into a register. It is known that the operands satisfy the 2266 conditional_move_operand() function above. The destination is operand[0]. 2267 The condition is operand [1]. The 'true' value is operand [2] and the 2268 'false' value is operand [3]. */ 2269 2270 char * 2271 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED) 2272 { 2273 static char buffer [100]; 2274 const char * dest = reg_names [REGNO (operands [0])]; 2275 2276 buffer [0] = 0; 2277 2278 /* Destination must be a register. */ 2279 gcc_assert (REG_P (operands [0])); 2280 gcc_assert (conditional_move_operand (operands [2], SImode)); 2281 gcc_assert (conditional_move_operand (operands [3], SImode)); 2282 2283 /* Check to see if the test is reversed. */ 2284 if (GET_CODE (operands [1]) == NE) 2285 { 2286 rtx tmp = operands [2]; 2287 operands [2] = operands [3]; 2288 operands [3] = tmp; 2289 } 2290 2291 sprintf (buffer, "mvfc %s, cbr", dest); 2292 2293 /* If the true value was '0' then we need to invert the results of the move. */ 2294 if (INTVAL (operands [2]) == 0) 2295 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1", 2296 dest, dest); 2297 2298 return buffer; 2299 } 2300 2301 /* Returns true if the registers contained in the two 2302 rtl expressions are different. */ 2303 2304 int 2305 m32r_not_same_reg (rtx a, rtx b) 2306 { 2307 int reg_a = -1; 2308 int reg_b = -2; 2309 2310 while (GET_CODE (a) == SUBREG) 2311 a = SUBREG_REG (a); 2312 2313 if (REG_P (a)) 2314 reg_a = REGNO (a); 2315 2316 while (GET_CODE (b) == SUBREG) 2317 b = SUBREG_REG (b); 2318 2319 if (REG_P (b)) 2320 reg_b = REGNO (b); 2321 2322 return reg_a != reg_b; 2323 } 2324 2325 2326 rtx 2327 m32r_function_symbol (const char *name) 2328 { 2329 int extra_flags = 0; 2330 enum m32r_model model; 2331 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name); 2332 2333 if (TARGET_MODEL_SMALL) 2334 model = M32R_MODEL_SMALL; 2335 else if (TARGET_MODEL_MEDIUM) 2336 model = M32R_MODEL_MEDIUM; 2337 else if (TARGET_MODEL_LARGE) 2338 model = M32R_MODEL_LARGE; 2339 else 2340 gcc_unreachable (); /* Shouldn't happen. */ 2341 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT; 2342 2343 if (extra_flags) 2344 SYMBOL_REF_FLAGS (sym) |= extra_flags; 2345 2346 return sym; 2347 } 2348 2349 /* Use a library function to move some bytes. */ 2350 2351 static void 2352 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx) 2353 { 2354 /* We want to pass the size as Pmode, which will normally be SImode 2355 but will be DImode if we are using 64-bit longs and pointers. */ 2356 if (GET_MODE (bytes_rtx) != VOIDmode 2357 && GET_MODE (bytes_rtx) != Pmode) 2358 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1); 2359 2360 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL, 2361 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode, 2362 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx, 2363 TYPE_UNSIGNED (sizetype)), 2364 TYPE_MODE (sizetype)); 2365 } 2366 2367 /* Expand string/block move operations. 2368 2369 operands[0] is the pointer to the destination. 2370 operands[1] is the pointer to the source. 2371 operands[2] is the number of bytes to move. 2372 operands[3] is the alignment. 2373 2374 Returns 1 upon success, 0 otherwise. */ 2375 2376 int 2377 m32r_expand_block_move (rtx operands[]) 2378 { 2379 rtx orig_dst = operands[0]; 2380 rtx orig_src = operands[1]; 2381 rtx bytes_rtx = operands[2]; 2382 rtx align_rtx = operands[3]; 2383 int constp = CONST_INT_P (bytes_rtx); 2384 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0; 2385 int align = INTVAL (align_rtx); 2386 int leftover; 2387 rtx src_reg; 2388 rtx dst_reg; 2389 2390 if (constp && bytes <= 0) 2391 return 1; 2392 2393 /* Move the address into scratch registers. */ 2394 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0)); 2395 src_reg = copy_addr_to_reg (XEXP (orig_src, 0)); 2396 2397 if (align > UNITS_PER_WORD) 2398 align = UNITS_PER_WORD; 2399 2400 /* If we prefer size over speed, always use a function call. 2401 If we do not know the size, use a function call. 2402 If the blocks are not word aligned, use a function call. */ 2403 if (optimize_size || ! constp || align != UNITS_PER_WORD) 2404 { 2405 block_move_call (dst_reg, src_reg, bytes_rtx); 2406 return 0; 2407 } 2408 2409 leftover = bytes % MAX_MOVE_BYTES; 2410 bytes -= leftover; 2411 2412 /* If necessary, generate a loop to handle the bulk of the copy. */ 2413 if (bytes) 2414 { 2415 rtx label = NULL_RTX; 2416 rtx final_src = NULL_RTX; 2417 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES); 2418 rtx rounded_total = GEN_INT (bytes); 2419 rtx new_dst_reg = gen_reg_rtx (SImode); 2420 rtx new_src_reg = gen_reg_rtx (SImode); 2421 2422 /* If we are going to have to perform this loop more than 2423 once, then generate a label and compute the address the 2424 source register will contain upon completion of the final 2425 iteration. */ 2426 if (bytes > MAX_MOVE_BYTES) 2427 { 2428 final_src = gen_reg_rtx (Pmode); 2429 2430 if (INT16_P(bytes)) 2431 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total)); 2432 else 2433 { 2434 emit_insn (gen_movsi (final_src, rounded_total)); 2435 emit_insn (gen_addsi3 (final_src, final_src, src_reg)); 2436 } 2437 2438 label = gen_label_rtx (); 2439 emit_label (label); 2440 } 2441 2442 /* It is known that output_block_move() will update src_reg to point 2443 to the word after the end of the source block, and dst_reg to point 2444 to the last word of the destination block, provided that the block 2445 is MAX_MOVE_BYTES long. */ 2446 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time, 2447 new_dst_reg, new_src_reg)); 2448 emit_move_insn (dst_reg, new_dst_reg); 2449 emit_move_insn (src_reg, new_src_reg); 2450 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4))); 2451 2452 if (bytes > MAX_MOVE_BYTES) 2453 { 2454 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src); 2455 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label)); 2456 } 2457 } 2458 2459 if (leftover) 2460 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover), 2461 gen_reg_rtx (SImode), 2462 gen_reg_rtx (SImode))); 2463 return 1; 2464 } 2465 2466 2467 /* Emit load/stores for a small constant word aligned block_move. 2468 2469 operands[0] is the memory address of the destination. 2470 operands[1] is the memory address of the source. 2471 operands[2] is the number of bytes to move. 2472 operands[3] is a temp register. 2473 operands[4] is a temp register. */ 2474 2475 void 2476 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[]) 2477 { 2478 HOST_WIDE_INT bytes = INTVAL (operands[2]); 2479 int first_time; 2480 int got_extra = 0; 2481 2482 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES); 2483 2484 /* We do not have a post-increment store available, so the first set of 2485 stores are done without any increment, then the remaining ones can use 2486 the pre-increment addressing mode. 2487 2488 Note: expand_block_move() also relies upon this behavior when building 2489 loops to copy large blocks. */ 2490 first_time = 1; 2491 2492 while (bytes > 0) 2493 { 2494 if (bytes >= 8) 2495 { 2496 if (first_time) 2497 { 2498 output_asm_insn ("ld\t%5, %p1", operands); 2499 output_asm_insn ("ld\t%6, %p1", operands); 2500 output_asm_insn ("st\t%5, @%0", operands); 2501 output_asm_insn ("st\t%6, %s0", operands); 2502 } 2503 else 2504 { 2505 output_asm_insn ("ld\t%5, %p1", operands); 2506 output_asm_insn ("ld\t%6, %p1", operands); 2507 output_asm_insn ("st\t%5, %s0", operands); 2508 output_asm_insn ("st\t%6, %s0", operands); 2509 } 2510 2511 bytes -= 8; 2512 } 2513 else if (bytes >= 4) 2514 { 2515 if (bytes > 4) 2516 got_extra = 1; 2517 2518 output_asm_insn ("ld\t%5, %p1", operands); 2519 2520 if (got_extra) 2521 output_asm_insn ("ld\t%6, %p1", operands); 2522 2523 if (first_time) 2524 output_asm_insn ("st\t%5, @%0", operands); 2525 else 2526 output_asm_insn ("st\t%5, %s0", operands); 2527 2528 bytes -= 4; 2529 } 2530 else 2531 { 2532 /* Get the entire next word, even though we do not want all of it. 2533 The saves us from doing several smaller loads, and we assume that 2534 we cannot cause a page fault when at least part of the word is in 2535 valid memory [since we don't get called if things aren't properly 2536 aligned]. */ 2537 int dst_offset = first_time ? 0 : 4; 2538 /* The amount of increment we have to make to the 2539 destination pointer. */ 2540 int dst_inc_amount = dst_offset + bytes - 4; 2541 /* The same for the source pointer. */ 2542 int src_inc_amount = bytes; 2543 int last_shift; 2544 rtx my_operands[3]; 2545 2546 /* If got_extra is true then we have already loaded 2547 the next word as part of loading and storing the previous word. */ 2548 if (! got_extra) 2549 output_asm_insn ("ld\t%6, @%1", operands); 2550 2551 if (bytes >= 2) 2552 { 2553 bytes -= 2; 2554 2555 output_asm_insn ("sra3\t%5, %6, #16", operands); 2556 my_operands[0] = operands[5]; 2557 my_operands[1] = GEN_INT (dst_offset); 2558 my_operands[2] = operands[0]; 2559 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands); 2560 2561 /* If there is a byte left to store then increment the 2562 destination address and shift the contents of the source 2563 register down by 8 bits. We could not do the address 2564 increment in the store half word instruction, because it does 2565 not have an auto increment mode. */ 2566 if (bytes > 0) /* assert (bytes == 1) */ 2567 { 2568 dst_offset += 2; 2569 last_shift = 8; 2570 } 2571 } 2572 else 2573 last_shift = 24; 2574 2575 if (bytes > 0) 2576 { 2577 my_operands[0] = operands[6]; 2578 my_operands[1] = GEN_INT (last_shift); 2579 output_asm_insn ("srai\t%0, #%1", my_operands); 2580 my_operands[0] = operands[6]; 2581 my_operands[1] = GEN_INT (dst_offset); 2582 my_operands[2] = operands[0]; 2583 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands); 2584 } 2585 2586 /* Update the destination pointer if needed. We have to do 2587 this so that the patterns matches what we output in this 2588 function. */ 2589 if (dst_inc_amount 2590 && !find_reg_note (insn, REG_UNUSED, operands[0])) 2591 { 2592 my_operands[0] = operands[0]; 2593 my_operands[1] = GEN_INT (dst_inc_amount); 2594 output_asm_insn ("addi\t%0, #%1", my_operands); 2595 } 2596 2597 /* Update the source pointer if needed. We have to do this 2598 so that the patterns matches what we output in this 2599 function. */ 2600 if (src_inc_amount 2601 && !find_reg_note (insn, REG_UNUSED, operands[1])) 2602 { 2603 my_operands[0] = operands[1]; 2604 my_operands[1] = GEN_INT (src_inc_amount); 2605 output_asm_insn ("addi\t%0, #%1", my_operands); 2606 } 2607 2608 bytes = 0; 2609 } 2610 2611 first_time = 0; 2612 } 2613 } 2614 2615 /* Return true if using NEW_REG in place of OLD_REG is ok. */ 2616 2617 int 2618 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED, 2619 unsigned int new_reg) 2620 { 2621 /* Interrupt routines can't clobber any register that isn't already used. */ 2622 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) 2623 && !df_regs_ever_live_p (new_reg)) 2624 return 0; 2625 2626 return 1; 2627 } 2628 2629 rtx 2630 m32r_return_addr (int count) 2631 { 2632 if (count != 0) 2633 return const0_rtx; 2634 2635 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM); 2636 } 2637 2638 static void 2639 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value) 2640 { 2641 emit_move_insn (adjust_address (m_tramp, SImode, 0), 2642 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2643 0x017e8e17 : 0x178e7e01, SImode)); 2644 emit_move_insn (adjust_address (m_tramp, SImode, 4), 2645 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2646 0x0c00ae86 : 0x86ae000c, SImode)); 2647 emit_move_insn (adjust_address (m_tramp, SImode, 8), 2648 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2649 0xe627871e : 0x1e8727e6, SImode)); 2650 emit_move_insn (adjust_address (m_tramp, SImode, 12), 2651 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2652 0xc616c626 : 0x26c61fc6, SImode)); 2653 emit_move_insn (adjust_address (m_tramp, SImode, 16), 2654 chain_value); 2655 emit_move_insn (adjust_address (m_tramp, SImode, 20), 2656 XEXP (DECL_RTL (fndecl), 0)); 2657 2658 if (m32r_cache_flush_trap >= 0) 2659 emit_insn (gen_flush_icache 2660 (validize_mem (adjust_address (m_tramp, SImode, 0)), 2661 gen_int_mode (m32r_cache_flush_trap, SImode))); 2662 else if (m32r_cache_flush_func && m32r_cache_flush_func[0]) 2663 emit_library_call (m32r_function_symbol (m32r_cache_flush_func), 2664 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode, 2665 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode, 2666 GEN_INT (3), SImode); 2667 } 2668