1 /* Subroutines used for code generation on the Renesas M32R cpu. 2 Copyright (C) 1996-2016 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it 7 under the terms of the GNU General Public License as published 8 by the Free Software Foundation; either version 3, or (at your 9 option) any later version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT 12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY 13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public 14 License for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #include "config.h" 21 #include "system.h" 22 #include "coretypes.h" 23 #include "backend.h" 24 #include "target.h" 25 #include "rtl.h" 26 #include "tree.h" 27 #include "df.h" 28 #include "tm_p.h" 29 #include "stringpool.h" 30 #include "insn-config.h" 31 #include "emit-rtl.h" 32 #include "recog.h" 33 #include "diagnostic-core.h" 34 #include "alias.h" 35 #include "stor-layout.h" 36 #include "varasm.h" 37 #include "calls.h" 38 #include "output.h" 39 #include "insn-attr.h" 40 #include "explow.h" 41 #include "expr.h" 42 #include "tm-constrs.h" 43 #include "builtins.h" 44 45 /* This file should be included last. */ 46 #include "target-def.h" 47 48 /* Array of valid operand punctuation characters. */ 49 static char m32r_punct_chars[256]; 50 51 /* Machine-specific symbol_ref flags. */ 52 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT 53 #define SYMBOL_REF_MODEL(X) \ 54 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3)) 55 56 /* For string literals, etc. */ 57 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.') 58 59 /* Forward declaration. */ 60 static void m32r_option_override (void); 61 static void init_reg_tables (void); 62 static void block_move_call (rtx, rtx, rtx); 63 static int m32r_is_insn (rtx); 64 static bool m32r_legitimate_address_p (machine_mode, rtx, bool); 65 static rtx m32r_legitimize_address (rtx, rtx, machine_mode); 66 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t); 67 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *); 68 static void m32r_print_operand (FILE *, rtx, int); 69 static void m32r_print_operand_address (FILE *, machine_mode, rtx); 70 static bool m32r_print_operand_punct_valid_p (unsigned char code); 71 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT); 72 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT); 73 74 static void m32r_file_start (void); 75 76 static int m32r_adjust_priority (rtx_insn *, int); 77 static int m32r_issue_rate (void); 78 79 static void m32r_encode_section_info (tree, rtx, int); 80 static bool m32r_in_small_data_p (const_tree); 81 static bool m32r_return_in_memory (const_tree, const_tree); 82 static rtx m32r_function_value (const_tree, const_tree, bool); 83 static rtx m32r_libcall_value (machine_mode, const_rtx); 84 static bool m32r_function_value_regno_p (const unsigned int); 85 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode, 86 tree, int *, int); 87 static void init_idents (void); 88 static bool m32r_rtx_costs (rtx, machine_mode, int, int, int *, bool speed); 89 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool); 90 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode, 91 const_tree, bool); 92 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode, 93 tree, bool); 94 static rtx m32r_function_arg (cumulative_args_t, machine_mode, 95 const_tree, bool); 96 static void m32r_function_arg_advance (cumulative_args_t, machine_mode, 97 const_tree, bool); 98 static bool m32r_can_eliminate (const int, const int); 99 static void m32r_conditional_register_usage (void); 100 static void m32r_trampoline_init (rtx, tree, rtx); 101 static bool m32r_legitimate_constant_p (machine_mode, rtx); 102 static bool m32r_attribute_identifier (const_tree); 103 104 /* M32R specific attributes. */ 105 106 static const struct attribute_spec m32r_attribute_table[] = 107 { 108 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler, 109 affects_type_identity } */ 110 { "interrupt", 0, 0, true, false, false, NULL, false }, 111 { "model", 1, 1, true, false, false, m32r_handle_model_attribute, 112 false }, 113 { NULL, 0, 0, false, false, false, NULL, false } 114 }; 115 116 /* Initialize the GCC target structure. */ 117 #undef TARGET_ATTRIBUTE_TABLE 118 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table 119 #undef TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P 120 #define TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P m32r_attribute_identifier 121 122 #undef TARGET_LEGITIMATE_ADDRESS_P 123 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p 124 #undef TARGET_LEGITIMIZE_ADDRESS 125 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address 126 #undef TARGET_MODE_DEPENDENT_ADDRESS_P 127 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p 128 129 #undef TARGET_ASM_ALIGNED_HI_OP 130 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t" 131 #undef TARGET_ASM_ALIGNED_SI_OP 132 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t" 133 134 #undef TARGET_PRINT_OPERAND 135 #define TARGET_PRINT_OPERAND m32r_print_operand 136 #undef TARGET_PRINT_OPERAND_ADDRESS 137 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address 138 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P 139 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p 140 141 #undef TARGET_ASM_FUNCTION_PROLOGUE 142 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue 143 #undef TARGET_ASM_FUNCTION_EPILOGUE 144 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue 145 146 #undef TARGET_ASM_FILE_START 147 #define TARGET_ASM_FILE_START m32r_file_start 148 149 #undef TARGET_SCHED_ADJUST_PRIORITY 150 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority 151 #undef TARGET_SCHED_ISSUE_RATE 152 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate 153 154 #undef TARGET_OPTION_OVERRIDE 155 #define TARGET_OPTION_OVERRIDE m32r_option_override 156 157 #undef TARGET_ENCODE_SECTION_INFO 158 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info 159 #undef TARGET_IN_SMALL_DATA_P 160 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p 161 162 163 #undef TARGET_MEMORY_MOVE_COST 164 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost 165 #undef TARGET_RTX_COSTS 166 #define TARGET_RTX_COSTS m32r_rtx_costs 167 #undef TARGET_ADDRESS_COST 168 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0 169 170 #undef TARGET_PROMOTE_PROTOTYPES 171 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true 172 #undef TARGET_RETURN_IN_MEMORY 173 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory 174 175 #undef TARGET_FUNCTION_VALUE 176 #define TARGET_FUNCTION_VALUE m32r_function_value 177 #undef TARGET_LIBCALL_VALUE 178 #define TARGET_LIBCALL_VALUE m32r_libcall_value 179 #undef TARGET_FUNCTION_VALUE_REGNO_P 180 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p 181 182 #undef TARGET_SETUP_INCOMING_VARARGS 183 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs 184 #undef TARGET_MUST_PASS_IN_STACK 185 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size 186 #undef TARGET_PASS_BY_REFERENCE 187 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference 188 #undef TARGET_ARG_PARTIAL_BYTES 189 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes 190 #undef TARGET_FUNCTION_ARG 191 #define TARGET_FUNCTION_ARG m32r_function_arg 192 #undef TARGET_FUNCTION_ARG_ADVANCE 193 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance 194 195 #undef TARGET_CAN_ELIMINATE 196 #define TARGET_CAN_ELIMINATE m32r_can_eliminate 197 198 #undef TARGET_CONDITIONAL_REGISTER_USAGE 199 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage 200 201 #undef TARGET_TRAMPOLINE_INIT 202 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init 203 204 #undef TARGET_LEGITIMATE_CONSTANT_P 205 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p 206 207 struct gcc_target targetm = TARGET_INITIALIZER; 208 209 /* Called by m32r_option_override to initialize various things. */ 210 211 void 212 m32r_init (void) 213 { 214 init_reg_tables (); 215 216 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */ 217 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars)); 218 m32r_punct_chars['#'] = 1; 219 m32r_punct_chars['@'] = 1; /* ??? no longer used */ 220 221 /* Provide default value if not specified. */ 222 if (!global_options_set.x_g_switch_value) 223 g_switch_value = SDATA_DEFAULT_SIZE; 224 } 225 226 static void 227 m32r_option_override (void) 228 { 229 /* These need to be done at start up. 230 It's convenient to do them here. */ 231 m32r_init (); 232 SUBTARGET_OVERRIDE_OPTIONS; 233 } 234 235 /* Vectors to keep interesting information about registers where it can easily 236 be got. We use to use the actual mode value as the bit number, but there 237 is (or may be) more than 32 modes now. Instead we use two tables: one 238 indexed by hard register number, and one indexed by mode. */ 239 240 /* The purpose of m32r_mode_class is to shrink the range of modes so that 241 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is 242 mapped into one m32r_mode_class mode. */ 243 244 enum m32r_mode_class 245 { 246 C_MODE, 247 S_MODE, D_MODE, T_MODE, O_MODE, 248 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE 249 }; 250 251 /* Modes for condition codes. */ 252 #define C_MODES (1 << (int) C_MODE) 253 254 /* Modes for single-word and smaller quantities. */ 255 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE)) 256 257 /* Modes for double-word and smaller quantities. */ 258 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE)) 259 260 /* Modes for quad-word and smaller quantities. */ 261 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE)) 262 263 /* Modes for accumulators. */ 264 #define A_MODES (1 << (int) A_MODE) 265 266 /* Value is 1 if register/mode pair is acceptable on arc. */ 267 268 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] = 269 { 270 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, 271 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES, 272 S_MODES, C_MODES, A_MODES, A_MODES 273 }; 274 275 unsigned int m32r_mode_class [NUM_MACHINE_MODES]; 276 277 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER]; 278 279 static void 280 init_reg_tables (void) 281 { 282 int i; 283 284 for (i = 0; i < NUM_MACHINE_MODES; i++) 285 { 286 machine_mode m = (machine_mode) i; 287 288 switch (GET_MODE_CLASS (m)) 289 { 290 case MODE_INT: 291 case MODE_PARTIAL_INT: 292 case MODE_COMPLEX_INT: 293 if (GET_MODE_SIZE (m) <= 4) 294 m32r_mode_class[i] = 1 << (int) S_MODE; 295 else if (GET_MODE_SIZE (m) == 8) 296 m32r_mode_class[i] = 1 << (int) D_MODE; 297 else if (GET_MODE_SIZE (m) == 16) 298 m32r_mode_class[i] = 1 << (int) T_MODE; 299 else if (GET_MODE_SIZE (m) == 32) 300 m32r_mode_class[i] = 1 << (int) O_MODE; 301 else 302 m32r_mode_class[i] = 0; 303 break; 304 case MODE_FLOAT: 305 case MODE_COMPLEX_FLOAT: 306 if (GET_MODE_SIZE (m) <= 4) 307 m32r_mode_class[i] = 1 << (int) SF_MODE; 308 else if (GET_MODE_SIZE (m) == 8) 309 m32r_mode_class[i] = 1 << (int) DF_MODE; 310 else if (GET_MODE_SIZE (m) == 16) 311 m32r_mode_class[i] = 1 << (int) TF_MODE; 312 else if (GET_MODE_SIZE (m) == 32) 313 m32r_mode_class[i] = 1 << (int) OF_MODE; 314 else 315 m32r_mode_class[i] = 0; 316 break; 317 case MODE_CC: 318 m32r_mode_class[i] = 1 << (int) C_MODE; 319 break; 320 default: 321 m32r_mode_class[i] = 0; 322 break; 323 } 324 } 325 326 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 327 { 328 if (GPR_P (i)) 329 m32r_regno_reg_class[i] = GENERAL_REGS; 330 else if (i == ARG_POINTER_REGNUM) 331 m32r_regno_reg_class[i] = GENERAL_REGS; 332 else 333 m32r_regno_reg_class[i] = NO_REGS; 334 } 335 } 336 337 /* M32R specific attribute support. 338 339 interrupt - for interrupt functions 340 341 model - select code model used to access object 342 343 small: addresses use 24 bits, use bl to make calls 344 medium: addresses use 32 bits, use bl to make calls 345 large: addresses use 32 bits, use seth/add3/jl to make calls 346 347 Grep for MODEL in m32r.h for more info. */ 348 349 static tree small_ident1; 350 static tree small_ident2; 351 static tree medium_ident1; 352 static tree medium_ident2; 353 static tree large_ident1; 354 static tree large_ident2; 355 356 static void 357 init_idents (void) 358 { 359 if (small_ident1 == 0) 360 { 361 small_ident1 = get_identifier ("small"); 362 small_ident2 = get_identifier ("__small__"); 363 medium_ident1 = get_identifier ("medium"); 364 medium_ident2 = get_identifier ("__medium__"); 365 large_ident1 = get_identifier ("large"); 366 large_ident2 = get_identifier ("__large__"); 367 } 368 } 369 370 /* Handle an "model" attribute; arguments as in 371 struct attribute_spec.handler. */ 372 static tree 373 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name, 374 tree args, int flags ATTRIBUTE_UNUSED, 375 bool *no_add_attrs) 376 { 377 tree arg; 378 379 init_idents (); 380 arg = TREE_VALUE (args); 381 382 if (arg != small_ident1 383 && arg != small_ident2 384 && arg != medium_ident1 385 && arg != medium_ident2 386 && arg != large_ident1 387 && arg != large_ident2) 388 { 389 warning (OPT_Wattributes, "invalid argument of %qs attribute", 390 IDENTIFIER_POINTER (name)); 391 *no_add_attrs = true; 392 } 393 394 return NULL_TREE; 395 } 396 397 static bool 398 m32r_attribute_identifier (const_tree name) 399 { 400 return strcmp (IDENTIFIER_POINTER (name), "model") == 0 401 || strcmp (IDENTIFIER_POINTER (name), "__model__") == 0; 402 } 403 404 /* Encode section information of DECL, which is either a VAR_DECL, 405 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???. 406 407 For the M32R we want to record: 408 409 - whether the object lives in .sdata/.sbss. 410 - what code model should be used to access the object 411 */ 412 413 static void 414 m32r_encode_section_info (tree decl, rtx rtl, int first) 415 { 416 int extra_flags = 0; 417 tree model_attr; 418 enum m32r_model model; 419 420 default_encode_section_info (decl, rtl, first); 421 422 if (!DECL_P (decl)) 423 return; 424 425 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl)); 426 if (model_attr) 427 { 428 tree id; 429 430 init_idents (); 431 432 id = TREE_VALUE (TREE_VALUE (model_attr)); 433 434 if (id == small_ident1 || id == small_ident2) 435 model = M32R_MODEL_SMALL; 436 else if (id == medium_ident1 || id == medium_ident2) 437 model = M32R_MODEL_MEDIUM; 438 else if (id == large_ident1 || id == large_ident2) 439 model = M32R_MODEL_LARGE; 440 else 441 gcc_unreachable (); /* shouldn't happen */ 442 } 443 else 444 { 445 if (TARGET_MODEL_SMALL) 446 model = M32R_MODEL_SMALL; 447 else if (TARGET_MODEL_MEDIUM) 448 model = M32R_MODEL_MEDIUM; 449 else if (TARGET_MODEL_LARGE) 450 model = M32R_MODEL_LARGE; 451 else 452 gcc_unreachable (); /* shouldn't happen */ 453 } 454 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT; 455 456 if (extra_flags) 457 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags; 458 } 459 460 /* Only mark the object as being small data area addressable if 461 it hasn't been explicitly marked with a code model. 462 463 The user can explicitly put an object in the small data area with the 464 section attribute. If the object is in sdata/sbss and marked with a 465 code model do both [put the object in .sdata and mark it as being 466 addressed with a specific code model - don't mark it as being addressed 467 with an SDA reloc though]. This is ok and might be useful at times. If 468 the object doesn't fit the linker will give an error. */ 469 470 static bool 471 m32r_in_small_data_p (const_tree decl) 472 { 473 const char *section; 474 475 if (TREE_CODE (decl) != VAR_DECL) 476 return false; 477 478 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl))) 479 return false; 480 481 section = DECL_SECTION_NAME (decl); 482 if (section) 483 { 484 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0) 485 return true; 486 } 487 else 488 { 489 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE) 490 { 491 int size = int_size_in_bytes (TREE_TYPE (decl)); 492 493 if (size > 0 && size <= g_switch_value) 494 return true; 495 } 496 } 497 498 return false; 499 } 500 501 /* Do anything needed before RTL is emitted for each function. */ 502 503 void 504 m32r_init_expanders (void) 505 { 506 /* ??? At one point there was code here. The function is left in 507 to make it easy to experiment. */ 508 } 509 510 int 511 call_operand (rtx op, machine_mode mode) 512 { 513 if (!MEM_P (op)) 514 return 0; 515 op = XEXP (op, 0); 516 return call_address_operand (op, mode); 517 } 518 519 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */ 520 521 int 522 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 523 { 524 if (! TARGET_SDATA_USE) 525 return 0; 526 527 if (GET_CODE (op) == SYMBOL_REF) 528 return SYMBOL_REF_SMALL_P (op); 529 530 if (GET_CODE (op) == CONST 531 && GET_CODE (XEXP (op, 0)) == PLUS 532 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF 533 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1))) 534 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0)); 535 536 return 0; 537 } 538 539 /* Return 1 if OP is a symbol that can use 24-bit addressing. */ 540 541 int 542 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 543 { 544 rtx sym; 545 546 if (flag_pic) 547 return 0; 548 549 if (GET_CODE (op) == LABEL_REF) 550 return TARGET_ADDR24; 551 552 if (GET_CODE (op) == SYMBOL_REF) 553 sym = op; 554 else if (GET_CODE (op) == CONST 555 && GET_CODE (XEXP (op, 0)) == PLUS 556 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF 557 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1))) 558 sym = XEXP (XEXP (op, 0), 0); 559 else 560 return 0; 561 562 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL) 563 return 1; 564 565 if (TARGET_ADDR24 566 && (CONSTANT_POOL_ADDRESS_P (sym) 567 || LIT_NAME_P (XSTR (sym, 0)))) 568 return 1; 569 570 return 0; 571 } 572 573 /* Return 1 if OP is a symbol that needs 32-bit addressing. */ 574 575 int 576 addr32_operand (rtx op, machine_mode mode) 577 { 578 rtx sym; 579 580 if (GET_CODE (op) == LABEL_REF) 581 return TARGET_ADDR32; 582 583 if (GET_CODE (op) == SYMBOL_REF) 584 sym = op; 585 else if (GET_CODE (op) == CONST 586 && GET_CODE (XEXP (op, 0)) == PLUS 587 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF 588 && CONST_INT_P (XEXP (XEXP (op, 0), 1)) 589 && ! flag_pic) 590 sym = XEXP (XEXP (op, 0), 0); 591 else 592 return 0; 593 594 return (! addr24_operand (sym, mode) 595 && ! small_data_operand (sym, mode)); 596 } 597 598 /* Return 1 if OP is a function that can be called with the `bl' insn. */ 599 600 int 601 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 602 { 603 if (flag_pic) 604 return 1; 605 606 if (GET_CODE (op) == SYMBOL_REF) 607 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE; 608 609 return TARGET_CALL26; 610 } 611 612 /* Return 1 if OP is a DImode const we want to handle inline. 613 This must match the code in the movdi pattern. 614 It is used by the 'G' constraint. */ 615 616 int 617 easy_di_const (rtx op) 618 { 619 rtx high_rtx, low_rtx; 620 HOST_WIDE_INT high, low; 621 622 split_double (op, &high_rtx, &low_rtx); 623 high = INTVAL (high_rtx); 624 low = INTVAL (low_rtx); 625 /* Pick constants loadable with 2 16-bit `ldi' insns. */ 626 if (high >= -128 && high <= 127 627 && low >= -128 && low <= 127) 628 return 1; 629 return 0; 630 } 631 632 /* Return 1 if OP is a DFmode const we want to handle inline. 633 This must match the code in the movdf pattern. 634 It is used by the 'H' constraint. */ 635 636 int 637 easy_df_const (rtx op) 638 { 639 long l[2]; 640 641 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (op), l); 642 if (l[0] == 0 && l[1] == 0) 643 return 1; 644 if ((l[0] & 0xffff) == 0 && l[1] == 0) 645 return 1; 646 return 0; 647 } 648 649 /* Return 1 if OP is (mem (reg ...)). 650 This is used in insn length calcs. */ 651 652 int 653 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 654 { 655 return MEM_P (op) && REG_P (XEXP (op, 0)); 656 } 657 658 /* Return nonzero if TYPE must be passed by indirect reference. */ 659 660 static bool 661 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED, 662 machine_mode mode, const_tree type, 663 bool named ATTRIBUTE_UNUSED) 664 { 665 int size; 666 667 if (type) 668 size = int_size_in_bytes (type); 669 else 670 size = GET_MODE_SIZE (mode); 671 672 return (size < 0 || size > 8); 673 } 674 675 /* Comparisons. */ 676 677 /* X and Y are two things to compare using CODE. Emit the compare insn and 678 return the rtx for compare [arg0 of the if_then_else]. 679 If need_compare is true then the comparison insn must be generated, rather 680 than being subsumed into the following branch instruction. */ 681 682 rtx 683 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare) 684 { 685 enum rtx_code compare_code; 686 enum rtx_code branch_code; 687 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM); 688 int must_swap = 0; 689 690 switch (code) 691 { 692 case EQ: compare_code = EQ; branch_code = NE; break; 693 case NE: compare_code = EQ; branch_code = EQ; break; 694 case LT: compare_code = LT; branch_code = NE; break; 695 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break; 696 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break; 697 case GE: compare_code = LT; branch_code = EQ; break; 698 case LTU: compare_code = LTU; branch_code = NE; break; 699 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break; 700 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break; 701 case GEU: compare_code = LTU; branch_code = EQ; break; 702 703 default: 704 gcc_unreachable (); 705 } 706 707 if (need_compare) 708 { 709 switch (compare_code) 710 { 711 case EQ: 712 if (satisfies_constraint_P (y) /* Reg equal to small const. */ 713 && y != const0_rtx) 714 { 715 rtx tmp = gen_reg_rtx (SImode); 716 717 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y)))); 718 x = tmp; 719 y = const0_rtx; 720 } 721 else if (CONSTANT_P (y)) /* Reg equal to const. */ 722 { 723 rtx tmp = force_reg (GET_MODE (x), y); 724 y = tmp; 725 } 726 727 if (register_operand (y, SImode) /* Reg equal to reg. */ 728 || y == const0_rtx) /* Reg equal to zero. */ 729 { 730 emit_insn (gen_cmp_eqsi_insn (x, y)); 731 732 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx); 733 } 734 break; 735 736 case LT: 737 if (register_operand (y, SImode) 738 || satisfies_constraint_P (y)) 739 { 740 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */ 741 742 switch (code) 743 { 744 case LT: 745 emit_insn (gen_cmp_ltsi_insn (x, y)); 746 code = EQ; 747 break; 748 case LE: 749 if (y == const0_rtx) 750 tmp = const1_rtx; 751 else 752 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 753 emit_insn (gen_cmp_ltsi_insn (x, tmp)); 754 code = EQ; 755 break; 756 case GT: 757 if (CONST_INT_P (y)) 758 tmp = gen_rtx_PLUS (SImode, y, const1_rtx); 759 else 760 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 761 emit_insn (gen_cmp_ltsi_insn (x, tmp)); 762 code = NE; 763 break; 764 case GE: 765 emit_insn (gen_cmp_ltsi_insn (x, y)); 766 code = NE; 767 break; 768 default: 769 gcc_unreachable (); 770 } 771 772 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx); 773 } 774 break; 775 776 case LTU: 777 if (register_operand (y, SImode) 778 || satisfies_constraint_P (y)) 779 { 780 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */ 781 782 switch (code) 783 { 784 case LTU: 785 emit_insn (gen_cmp_ltusi_insn (x, y)); 786 code = EQ; 787 break; 788 case LEU: 789 if (y == const0_rtx) 790 tmp = const1_rtx; 791 else 792 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 793 emit_insn (gen_cmp_ltusi_insn (x, tmp)); 794 code = EQ; 795 break; 796 case GTU: 797 if (CONST_INT_P (y)) 798 tmp = gen_rtx_PLUS (SImode, y, const1_rtx); 799 else 800 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 801 emit_insn (gen_cmp_ltusi_insn (x, tmp)); 802 code = NE; 803 break; 804 case GEU: 805 emit_insn (gen_cmp_ltusi_insn (x, y)); 806 code = NE; 807 break; 808 default: 809 gcc_unreachable (); 810 } 811 812 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx); 813 } 814 break; 815 816 default: 817 gcc_unreachable (); 818 } 819 } 820 else 821 { 822 /* Reg/reg equal comparison. */ 823 if (compare_code == EQ 824 && register_operand (y, SImode)) 825 return gen_rtx_fmt_ee (code, CCmode, x, y); 826 827 /* Reg/zero signed comparison. */ 828 if ((compare_code == EQ || compare_code == LT) 829 && y == const0_rtx) 830 return gen_rtx_fmt_ee (code, CCmode, x, y); 831 832 /* Reg/smallconst equal comparison. */ 833 if (compare_code == EQ 834 && satisfies_constraint_P (y)) 835 { 836 rtx tmp = gen_reg_rtx (SImode); 837 838 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y)))); 839 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx); 840 } 841 842 /* Reg/const equal comparison. */ 843 if (compare_code == EQ 844 && CONSTANT_P (y)) 845 { 846 rtx tmp = force_reg (GET_MODE (x), y); 847 848 return gen_rtx_fmt_ee (code, CCmode, x, tmp); 849 } 850 } 851 852 if (CONSTANT_P (y)) 853 { 854 if (must_swap) 855 y = force_reg (GET_MODE (x), y); 856 else 857 { 858 int ok_const = reg_or_int16_operand (y, GET_MODE (y)); 859 860 if (! ok_const) 861 y = force_reg (GET_MODE (x), y); 862 } 863 } 864 865 switch (compare_code) 866 { 867 case EQ : 868 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y)); 869 break; 870 case LT : 871 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y)); 872 break; 873 case LTU : 874 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y)); 875 break; 876 877 default: 878 gcc_unreachable (); 879 } 880 881 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode)); 882 } 883 884 bool 885 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2) 886 { 887 machine_mode mode = GET_MODE (op0); 888 889 gcc_assert (mode == SImode); 890 switch (code) 891 { 892 case EQ: 893 if (!register_operand (op1, mode)) 894 op1 = force_reg (mode, op1); 895 896 if (TARGET_M32RX || TARGET_M32R2) 897 { 898 if (!reg_or_zero_operand (op2, mode)) 899 op2 = force_reg (mode, op2); 900 901 emit_insn (gen_seq_insn_m32rx (op0, op1, op2)); 902 return true; 903 } 904 if (CONST_INT_P (op2) && INTVAL (op2) == 0) 905 { 906 emit_insn (gen_seq_zero_insn (op0, op1)); 907 return true; 908 } 909 910 if (!reg_or_eq_int16_operand (op2, mode)) 911 op2 = force_reg (mode, op2); 912 913 emit_insn (gen_seq_insn (op0, op1, op2)); 914 return true; 915 916 case NE: 917 if (!CONST_INT_P (op2) 918 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2))) 919 { 920 rtx reg; 921 922 if (reload_completed || reload_in_progress) 923 return false; 924 925 reg = gen_reg_rtx (SImode); 926 emit_insn (gen_xorsi3 (reg, op1, op2)); 927 op1 = reg; 928 929 if (!register_operand (op1, mode)) 930 op1 = force_reg (mode, op1); 931 932 emit_insn (gen_sne_zero_insn (op0, op1)); 933 return true; 934 } 935 return false; 936 937 case LT: 938 case GT: 939 if (code == GT) 940 { 941 rtx tmp = op2; 942 op2 = op1; 943 op1 = tmp; 944 code = LT; 945 } 946 947 if (!register_operand (op1, mode)) 948 op1 = force_reg (mode, op1); 949 950 if (!reg_or_int16_operand (op2, mode)) 951 op2 = force_reg (mode, op2); 952 953 emit_insn (gen_slt_insn (op0, op1, op2)); 954 return true; 955 956 case LTU: 957 case GTU: 958 if (code == GTU) 959 { 960 rtx tmp = op2; 961 op2 = op1; 962 op1 = tmp; 963 code = LTU; 964 } 965 966 if (!register_operand (op1, mode)) 967 op1 = force_reg (mode, op1); 968 969 if (!reg_or_int16_operand (op2, mode)) 970 op2 = force_reg (mode, op2); 971 972 emit_insn (gen_sltu_insn (op0, op1, op2)); 973 return true; 974 975 case GE: 976 case GEU: 977 if (!register_operand (op1, mode)) 978 op1 = force_reg (mode, op1); 979 980 if (!reg_or_int16_operand (op2, mode)) 981 op2 = force_reg (mode, op2); 982 983 if (code == GE) 984 emit_insn (gen_sge_insn (op0, op1, op2)); 985 else 986 emit_insn (gen_sgeu_insn (op0, op1, op2)); 987 return true; 988 989 case LE: 990 case LEU: 991 if (!register_operand (op1, mode)) 992 op1 = force_reg (mode, op1); 993 994 if (CONST_INT_P (op2)) 995 { 996 HOST_WIDE_INT value = INTVAL (op2); 997 if (value >= 2147483647) 998 { 999 emit_move_insn (op0, const1_rtx); 1000 return true; 1001 } 1002 1003 op2 = GEN_INT (value + 1); 1004 if (value < -32768 || value >= 32767) 1005 op2 = force_reg (mode, op2); 1006 1007 if (code == LEU) 1008 emit_insn (gen_sltu_insn (op0, op1, op2)); 1009 else 1010 emit_insn (gen_slt_insn (op0, op1, op2)); 1011 return true; 1012 } 1013 1014 if (!register_operand (op2, mode)) 1015 op2 = force_reg (mode, op2); 1016 1017 if (code == LEU) 1018 emit_insn (gen_sleu_insn (op0, op1, op2)); 1019 else 1020 emit_insn (gen_sle_insn (op0, op1, op2)); 1021 return true; 1022 1023 default: 1024 gcc_unreachable (); 1025 } 1026 } 1027 1028 1029 /* Split a 2 word move (DI or DF) into component parts. */ 1030 1031 rtx 1032 gen_split_move_double (rtx operands[]) 1033 { 1034 machine_mode mode = GET_MODE (operands[0]); 1035 rtx dest = operands[0]; 1036 rtx src = operands[1]; 1037 rtx val; 1038 1039 /* We might have (SUBREG (MEM)) here, so just get rid of the 1040 subregs to make this code simpler. It is safe to call 1041 alter_subreg any time after reload. */ 1042 if (GET_CODE (dest) == SUBREG) 1043 alter_subreg (&dest, true); 1044 if (GET_CODE (src) == SUBREG) 1045 alter_subreg (&src, true); 1046 1047 start_sequence (); 1048 if (REG_P (dest)) 1049 { 1050 int dregno = REGNO (dest); 1051 1052 /* Reg = reg. */ 1053 if (REG_P (src)) 1054 { 1055 int sregno = REGNO (src); 1056 1057 int reverse = (dregno == sregno + 1); 1058 1059 /* We normally copy the low-numbered register first. However, if 1060 the first register operand 0 is the same as the second register of 1061 operand 1, we must copy in the opposite order. */ 1062 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode), 1063 operand_subword (src, reverse, TRUE, mode))); 1064 1065 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode), 1066 operand_subword (src, !reverse, TRUE, mode))); 1067 } 1068 1069 /* Reg = constant. */ 1070 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE) 1071 { 1072 rtx words[2]; 1073 split_double (src, &words[0], &words[1]); 1074 emit_insn (gen_rtx_SET (operand_subword (dest, 0, TRUE, mode), 1075 words[0])); 1076 1077 emit_insn (gen_rtx_SET (operand_subword (dest, 1, TRUE, mode), 1078 words[1])); 1079 } 1080 1081 /* Reg = mem. */ 1082 else if (MEM_P (src)) 1083 { 1084 /* If the high-address word is used in the address, we must load it 1085 last. Otherwise, load it first. */ 1086 int reverse = refers_to_regno_p (dregno, XEXP (src, 0)); 1087 1088 /* We used to optimize loads from single registers as 1089 1090 ld r1,r3+; ld r2,r3 1091 1092 if r3 were not used subsequently. However, the REG_NOTES aren't 1093 propagated correctly by the reload phase, and it can cause bad 1094 code to be generated. We could still try: 1095 1096 ld r1,r3+; ld r2,r3; addi r3,-4 1097 1098 which saves 2 bytes and doesn't force longword alignment. */ 1099 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode), 1100 adjust_address (src, SImode, 1101 reverse * UNITS_PER_WORD))); 1102 1103 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode), 1104 adjust_address (src, SImode, 1105 !reverse * UNITS_PER_WORD))); 1106 } 1107 else 1108 gcc_unreachable (); 1109 } 1110 1111 /* Mem = reg. */ 1112 /* We used to optimize loads from single registers as 1113 1114 st r1,r3; st r2,+r3 1115 1116 if r3 were not used subsequently. However, the REG_NOTES aren't 1117 propagated correctly by the reload phase, and it can cause bad 1118 code to be generated. We could still try: 1119 1120 st r1,r3; st r2,+r3; addi r3,-4 1121 1122 which saves 2 bytes and doesn't force longword alignment. */ 1123 else if (MEM_P (dest) && REG_P (src)) 1124 { 1125 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, 0), 1126 operand_subword (src, 0, TRUE, mode))); 1127 1128 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, UNITS_PER_WORD), 1129 operand_subword (src, 1, TRUE, mode))); 1130 } 1131 1132 else 1133 gcc_unreachable (); 1134 1135 val = get_insns (); 1136 end_sequence (); 1137 return val; 1138 } 1139 1140 1141 static int 1142 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode, 1143 tree type, bool named ATTRIBUTE_UNUSED) 1144 { 1145 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1146 1147 int words; 1148 unsigned int size = 1149 (((mode == BLKmode && type) 1150 ? (unsigned int) int_size_in_bytes (type) 1151 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1) 1152 / UNITS_PER_WORD; 1153 1154 if (*cum >= M32R_MAX_PARM_REGS) 1155 words = 0; 1156 else if (*cum + size > M32R_MAX_PARM_REGS) 1157 words = (*cum + size) - M32R_MAX_PARM_REGS; 1158 else 1159 words = 0; 1160 1161 return words * UNITS_PER_WORD; 1162 } 1163 1164 /* The ROUND_ADVANCE* macros are local to this file. */ 1165 /* Round SIZE up to a word boundary. */ 1166 #define ROUND_ADVANCE(SIZE) \ 1167 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD) 1168 1169 /* Round arg MODE/TYPE up to the next word boundary. */ 1170 #define ROUND_ADVANCE_ARG(MODE, TYPE) \ 1171 ((MODE) == BLKmode \ 1172 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \ 1173 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE))) 1174 1175 /* Round CUM up to the necessary point for argument MODE/TYPE. */ 1176 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM) 1177 1178 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in 1179 a reg. This includes arguments that have to be passed by reference as the 1180 pointer to them is passed in a reg if one is available (and that is what 1181 we're given). 1182 This macro is only used in this file. */ 1183 #define PASS_IN_REG_P(CUM, MODE, TYPE) \ 1184 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS) 1185 1186 /* Determine where to put an argument to a function. 1187 Value is zero to push the argument on the stack, 1188 or a hard register in which to store the argument. 1189 1190 MODE is the argument's machine mode. 1191 TYPE is the data type of the argument (as a tree). 1192 This is null for libcalls where that information may 1193 not be available. 1194 CUM is a variable of type CUMULATIVE_ARGS which gives info about 1195 the preceding args and about the function being called. 1196 NAMED is nonzero if this argument is a named parameter 1197 (otherwise it is an extra parameter matching an ellipsis). */ 1198 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers 1199 and the rest are pushed. */ 1200 1201 static rtx 1202 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode, 1203 const_tree type ATTRIBUTE_UNUSED, 1204 bool named ATTRIBUTE_UNUSED) 1205 { 1206 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1207 1208 return (PASS_IN_REG_P (*cum, mode, type) 1209 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type)) 1210 : NULL_RTX); 1211 } 1212 1213 /* Update the data in CUM to advance over an argument 1214 of mode MODE and data type TYPE. 1215 (TYPE is null for libcalls where that information may not be available.) */ 1216 1217 static void 1218 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode, 1219 const_tree type, bool named ATTRIBUTE_UNUSED) 1220 { 1221 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1222 1223 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type) 1224 + ROUND_ADVANCE_ARG (mode, type)); 1225 } 1226 1227 /* Worker function for TARGET_RETURN_IN_MEMORY. */ 1228 1229 static bool 1230 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED) 1231 { 1232 cumulative_args_t dummy = pack_cumulative_args (NULL); 1233 1234 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false); 1235 } 1236 1237 /* Worker function for TARGET_FUNCTION_VALUE. */ 1238 1239 static rtx 1240 m32r_function_value (const_tree valtype, 1241 const_tree fn_decl_or_type ATTRIBUTE_UNUSED, 1242 bool outgoing ATTRIBUTE_UNUSED) 1243 { 1244 return gen_rtx_REG (TYPE_MODE (valtype), 0); 1245 } 1246 1247 /* Worker function for TARGET_LIBCALL_VALUE. */ 1248 1249 static rtx 1250 m32r_libcall_value (machine_mode mode, 1251 const_rtx fun ATTRIBUTE_UNUSED) 1252 { 1253 return gen_rtx_REG (mode, 0); 1254 } 1255 1256 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. 1257 1258 ??? What about r1 in DI/DF values. */ 1259 1260 static bool 1261 m32r_function_value_regno_p (const unsigned int regno) 1262 { 1263 return (regno == 0); 1264 } 1265 1266 /* Do any needed setup for a variadic function. For the M32R, we must 1267 create a register parameter block, and then copy any anonymous arguments 1268 in registers to memory. 1269 1270 CUM has not been updated for the last named argument which has type TYPE 1271 and mode MODE, and we rely on this fact. */ 1272 1273 static void 1274 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode, 1275 tree type, int *pretend_size, int no_rtl) 1276 { 1277 int first_anon_arg; 1278 1279 if (no_rtl) 1280 return; 1281 1282 /* All BLKmode values are passed by reference. */ 1283 gcc_assert (mode != BLKmode); 1284 1285 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type) 1286 + ROUND_ADVANCE_ARG (mode, type)); 1287 1288 if (first_anon_arg < M32R_MAX_PARM_REGS) 1289 { 1290 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */ 1291 int first_reg_offset = first_anon_arg; 1292 /* Size in words to "pretend" allocate. */ 1293 int size = M32R_MAX_PARM_REGS - first_reg_offset; 1294 rtx regblock; 1295 1296 regblock = gen_frame_mem (BLKmode, 1297 plus_constant (Pmode, arg_pointer_rtx, 1298 FIRST_PARM_OFFSET (0))); 1299 set_mem_alias_set (regblock, get_varargs_alias_set ()); 1300 move_block_from_reg (first_reg_offset, regblock, size); 1301 1302 *pretend_size = (size * UNITS_PER_WORD); 1303 } 1304 } 1305 1306 1307 /* Return true if INSN is real instruction bearing insn. */ 1308 1309 static int 1310 m32r_is_insn (rtx insn) 1311 { 1312 return (NONDEBUG_INSN_P (insn) 1313 && GET_CODE (PATTERN (insn)) != USE 1314 && GET_CODE (PATTERN (insn)) != CLOBBER); 1315 } 1316 1317 /* Increase the priority of long instructions so that the 1318 short instructions are scheduled ahead of the long ones. */ 1319 1320 static int 1321 m32r_adjust_priority (rtx_insn *insn, int priority) 1322 { 1323 if (m32r_is_insn (insn) 1324 && get_attr_insn_size (insn) != INSN_SIZE_SHORT) 1325 priority <<= 3; 1326 1327 return priority; 1328 } 1329 1330 1331 /* Indicate how many instructions can be issued at the same time. 1332 This is sort of a lie. The m32r can issue only 1 long insn at 1333 once, but it can issue 2 short insns. The default therefore is 1334 set at 2, but this can be overridden by the command line option 1335 -missue-rate=1. */ 1336 1337 static int 1338 m32r_issue_rate (void) 1339 { 1340 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2); 1341 } 1342 1343 /* Cost functions. */ 1344 /* Memory is 3 times as expensive as registers. 1345 ??? Is that the right way to look at it? */ 1346 1347 static int 1348 m32r_memory_move_cost (machine_mode mode, 1349 reg_class_t rclass ATTRIBUTE_UNUSED, 1350 bool in ATTRIBUTE_UNUSED) 1351 { 1352 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD) 1353 return 6; 1354 else 1355 return 12; 1356 } 1357 1358 static bool 1359 m32r_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED, 1360 int outer_code ATTRIBUTE_UNUSED, 1361 int opno ATTRIBUTE_UNUSED, int *total, 1362 bool speed ATTRIBUTE_UNUSED) 1363 { 1364 int code = GET_CODE (x); 1365 1366 switch (code) 1367 { 1368 /* Small integers are as cheap as registers. 4 byte values can be 1369 fetched as immediate constants - let's give that the cost of an 1370 extra insn. */ 1371 case CONST_INT: 1372 if (INT16_P (INTVAL (x))) 1373 { 1374 *total = 0; 1375 return true; 1376 } 1377 /* FALLTHRU */ 1378 1379 case CONST: 1380 case LABEL_REF: 1381 case SYMBOL_REF: 1382 *total = COSTS_N_INSNS (1); 1383 return true; 1384 1385 case CONST_DOUBLE: 1386 { 1387 rtx high, low; 1388 1389 split_double (x, &high, &low); 1390 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high)) 1391 + !INT16_P (INTVAL (low))); 1392 return true; 1393 } 1394 1395 case MULT: 1396 *total = COSTS_N_INSNS (3); 1397 return true; 1398 1399 case DIV: 1400 case UDIV: 1401 case MOD: 1402 case UMOD: 1403 *total = COSTS_N_INSNS (10); 1404 return true; 1405 1406 default: 1407 return false; 1408 } 1409 } 1410 1411 /* Type of function DECL. 1412 1413 The result is cached. To reset the cache at the end of a function, 1414 call with DECL = NULL_TREE. */ 1415 1416 enum m32r_function_type 1417 m32r_compute_function_type (tree decl) 1418 { 1419 /* Cached value. */ 1420 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN; 1421 /* Last function we were called for. */ 1422 static tree last_fn = NULL_TREE; 1423 1424 /* Resetting the cached value? */ 1425 if (decl == NULL_TREE) 1426 { 1427 fn_type = M32R_FUNCTION_UNKNOWN; 1428 last_fn = NULL_TREE; 1429 return fn_type; 1430 } 1431 1432 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN) 1433 return fn_type; 1434 1435 /* Compute function type. */ 1436 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE 1437 ? M32R_FUNCTION_INTERRUPT 1438 : M32R_FUNCTION_NORMAL); 1439 1440 last_fn = decl; 1441 return fn_type; 1442 } 1443 /* Function prologue/epilogue handlers. */ 1444 1445 /* M32R stack frames look like: 1446 1447 Before call After call 1448 +-----------------------+ +-----------------------+ 1449 | | | | 1450 high | local variables, | | local variables, | 1451 mem | reg save area, etc. | | reg save area, etc. | 1452 | | | | 1453 +-----------------------+ +-----------------------+ 1454 | | | | 1455 | arguments on stack. | | arguments on stack. | 1456 | | | | 1457 SP+0->+-----------------------+ +-----------------------+ 1458 | reg parm save area, | 1459 | only created for | 1460 | variable argument | 1461 | functions | 1462 +-----------------------+ 1463 | previous frame ptr | 1464 +-----------------------+ 1465 | | 1466 | register save area | 1467 | | 1468 +-----------------------+ 1469 | return address | 1470 +-----------------------+ 1471 | | 1472 | local variables | 1473 | | 1474 +-----------------------+ 1475 | | 1476 | alloca allocations | 1477 | | 1478 +-----------------------+ 1479 | | 1480 low | arguments on stack | 1481 memory | | 1482 SP+0->+-----------------------+ 1483 1484 Notes: 1485 1) The "reg parm save area" does not exist for non variable argument fns. 1486 2) The "reg parm save area" can be eliminated completely if we saved regs 1487 containing anonymous args separately but that complicates things too 1488 much (so it's not done). 1489 3) The return address is saved after the register save area so as to have as 1490 many insns as possible between the restoration of `lr' and the `jmp lr'. */ 1491 1492 /* Structure to be filled in by m32r_compute_frame_size with register 1493 save masks, and offsets for the current function. */ 1494 struct m32r_frame_info 1495 { 1496 unsigned int total_size; /* # bytes that the entire frame takes up. */ 1497 unsigned int extra_size; /* # bytes of extra stuff. */ 1498 unsigned int pretend_size; /* # bytes we push and pretend caller did. */ 1499 unsigned int args_size; /* # bytes that outgoing arguments take up. */ 1500 unsigned int reg_size; /* # bytes needed to store regs. */ 1501 unsigned int var_size; /* # bytes that variables take up. */ 1502 unsigned int gmask; /* Mask of saved gp registers. */ 1503 unsigned int save_fp; /* Nonzero if fp must be saved. */ 1504 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */ 1505 int initialized; /* Nonzero if frame size already calculated. */ 1506 }; 1507 1508 /* Current frame information calculated by m32r_compute_frame_size. */ 1509 static struct m32r_frame_info current_frame_info; 1510 1511 /* Zero structure to initialize current_frame_info. */ 1512 static struct m32r_frame_info zero_frame_info; 1513 1514 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM)) 1515 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM)) 1516 1517 /* Tell prologue and epilogue if register REGNO should be saved / restored. 1518 The return address and frame pointer are treated separately. 1519 Don't consider them here. */ 1520 #define MUST_SAVE_REGISTER(regno, interrupt_p) \ 1521 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \ 1522 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p))) 1523 1524 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM)) 1525 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile) 1526 1527 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */ 1528 #define LONG_INSN_SIZE 4 /* Size of long instructions. */ 1529 1530 /* Return the bytes needed to compute the frame pointer from the current 1531 stack pointer. 1532 1533 SIZE is the size needed for local variables. */ 1534 1535 unsigned int 1536 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */ 1537 { 1538 unsigned int regno; 1539 unsigned int total_size, var_size, args_size, pretend_size, extra_size; 1540 unsigned int reg_size; 1541 unsigned int gmask; 1542 enum m32r_function_type fn_type; 1543 int interrupt_p; 1544 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table 1545 | crtl->profile); 1546 1547 var_size = M32R_STACK_ALIGN (size); 1548 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size); 1549 pretend_size = crtl->args.pretend_args_size; 1550 extra_size = FIRST_PARM_OFFSET (0); 1551 total_size = extra_size + pretend_size + args_size + var_size; 1552 reg_size = 0; 1553 gmask = 0; 1554 1555 /* See if this is an interrupt handler. Call used registers must be saved 1556 for them too. */ 1557 fn_type = m32r_compute_function_type (current_function_decl); 1558 interrupt_p = M32R_INTERRUPT_P (fn_type); 1559 1560 /* Calculate space needed for registers. */ 1561 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++) 1562 { 1563 if (MUST_SAVE_REGISTER (regno, interrupt_p) 1564 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used)) 1565 { 1566 reg_size += UNITS_PER_WORD; 1567 gmask |= 1 << regno; 1568 } 1569 } 1570 1571 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER; 1572 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used; 1573 1574 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr) 1575 * UNITS_PER_WORD); 1576 total_size += reg_size; 1577 1578 /* ??? Not sure this is necessary, and I don't think the epilogue 1579 handler will do the right thing if this changes total_size. */ 1580 total_size = M32R_STACK_ALIGN (total_size); 1581 1582 /* frame_size = total_size - (pretend_size + reg_size); */ 1583 1584 /* Save computed information. */ 1585 current_frame_info.total_size = total_size; 1586 current_frame_info.extra_size = extra_size; 1587 current_frame_info.pretend_size = pretend_size; 1588 current_frame_info.var_size = var_size; 1589 current_frame_info.args_size = args_size; 1590 current_frame_info.reg_size = reg_size; 1591 current_frame_info.gmask = gmask; 1592 current_frame_info.initialized = reload_completed; 1593 1594 /* Ok, we're done. */ 1595 return total_size; 1596 } 1597 1598 /* Worker function for TARGET_CAN_ELIMINATE. */ 1599 1600 bool 1601 m32r_can_eliminate (const int from, const int to) 1602 { 1603 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM 1604 ? ! frame_pointer_needed 1605 : true); 1606 } 1607 1608 1609 /* The table we use to reference PIC data. */ 1610 static rtx global_offset_table; 1611 1612 static void 1613 m32r_reload_lr (rtx sp, int size) 1614 { 1615 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM); 1616 1617 if (size == 0) 1618 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp))); 1619 else if (size < 32768) 1620 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, 1621 gen_rtx_PLUS (Pmode, sp, 1622 GEN_INT (size))))); 1623 else 1624 { 1625 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1626 1627 emit_insn (gen_movsi (tmp, GEN_INT (size))); 1628 emit_insn (gen_addsi3 (tmp, tmp, sp)); 1629 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp))); 1630 } 1631 1632 emit_use (lr); 1633 } 1634 1635 void 1636 m32r_load_pic_register (void) 1637 { 1638 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_"); 1639 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table, 1640 GEN_INT (TARGET_MODEL_SMALL))); 1641 1642 /* Need to emit this whether or not we obey regdecls, 1643 since setjmp/longjmp can cause life info to screw up. */ 1644 emit_use (pic_offset_table_rtx); 1645 } 1646 1647 /* Expand the m32r prologue as a series of insns. */ 1648 1649 void 1650 m32r_expand_prologue (void) 1651 { 1652 int regno; 1653 int frame_size; 1654 unsigned int gmask; 1655 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table 1656 | crtl->profile); 1657 1658 if (! current_frame_info.initialized) 1659 m32r_compute_frame_size (get_frame_size ()); 1660 1661 if (flag_stack_usage_info) 1662 current_function_static_stack_size = current_frame_info.total_size; 1663 1664 gmask = current_frame_info.gmask; 1665 1666 /* These cases shouldn't happen. Catch them now. */ 1667 gcc_assert (current_frame_info.total_size || !gmask); 1668 1669 /* Allocate space for register arguments if this is a variadic function. */ 1670 if (current_frame_info.pretend_size != 0) 1671 { 1672 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives 1673 the wrong result on a 64-bit host. */ 1674 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size; 1675 emit_insn (gen_addsi3 (stack_pointer_rtx, 1676 stack_pointer_rtx, 1677 GEN_INT (-pretend_size))); 1678 } 1679 1680 /* Save any registers we need to and set up fp. */ 1681 if (current_frame_info.save_fp) 1682 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx)); 1683 1684 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK); 1685 1686 /* Save any needed call-saved regs (and call-used if this is an 1687 interrupt handler). */ 1688 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno) 1689 { 1690 if ((gmask & (1 << regno)) != 0) 1691 emit_insn (gen_movsi_push (stack_pointer_rtx, 1692 gen_rtx_REG (Pmode, regno))); 1693 } 1694 1695 if (current_frame_info.save_lr) 1696 emit_insn (gen_movsi_push (stack_pointer_rtx, 1697 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM))); 1698 1699 /* Allocate the stack frame. */ 1700 frame_size = (current_frame_info.total_size 1701 - (current_frame_info.pretend_size 1702 + current_frame_info.reg_size)); 1703 1704 if (frame_size == 0) 1705 ; /* Nothing to do. */ 1706 else if (frame_size <= 32768) 1707 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1708 GEN_INT (-frame_size))); 1709 else 1710 { 1711 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1712 1713 emit_insn (gen_movsi (tmp, GEN_INT (frame_size))); 1714 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp)); 1715 } 1716 1717 if (frame_pointer_needed) 1718 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx)); 1719 1720 if (crtl->profile) 1721 /* Push lr for mcount (form_pc, x). */ 1722 emit_insn (gen_movsi_push (stack_pointer_rtx, 1723 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM))); 1724 1725 if (pic_reg_used) 1726 { 1727 m32r_load_pic_register (); 1728 m32r_reload_lr (stack_pointer_rtx, 1729 (crtl->profile ? 0 : frame_size)); 1730 } 1731 1732 if (crtl->profile && !pic_reg_used) 1733 emit_insn (gen_blockage ()); 1734 } 1735 1736 1737 /* Set up the stack and frame pointer (if desired) for the function. 1738 Note, if this is changed, you need to mirror the changes in 1739 m32r_compute_frame_size which calculates the prolog size. */ 1740 1741 static void 1742 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size) 1743 { 1744 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl); 1745 1746 /* If this is an interrupt handler, mark it as such. */ 1747 if (M32R_INTERRUPT_P (fn_type)) 1748 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START); 1749 1750 if (! current_frame_info.initialized) 1751 m32r_compute_frame_size (size); 1752 1753 /* This is only for the human reader. */ 1754 fprintf (file, 1755 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n", 1756 ASM_COMMENT_START, 1757 current_frame_info.var_size, 1758 current_frame_info.reg_size / 4, 1759 current_frame_info.args_size, 1760 current_frame_info.extra_size); 1761 } 1762 1763 /* Output RTL to pop register REGNO from the stack. */ 1764 1765 static void 1766 pop (int regno) 1767 { 1768 rtx x; 1769 1770 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno), 1771 stack_pointer_rtx)); 1772 add_reg_note (x, REG_INC, stack_pointer_rtx); 1773 } 1774 1775 /* Expand the m32r epilogue as a series of insns. */ 1776 1777 void 1778 m32r_expand_epilogue (void) 1779 { 1780 int regno; 1781 int noepilogue = FALSE; 1782 int total_size; 1783 1784 gcc_assert (current_frame_info.initialized); 1785 total_size = current_frame_info.total_size; 1786 1787 if (total_size == 0) 1788 { 1789 rtx insn = get_last_insn (); 1790 1791 /* If the last insn was a BARRIER, we don't have to write any code 1792 because a jump (aka return) was put there. */ 1793 if (insn && NOTE_P (insn)) 1794 insn = prev_nonnote_insn (insn); 1795 if (insn && BARRIER_P (insn)) 1796 noepilogue = TRUE; 1797 } 1798 1799 if (!noepilogue) 1800 { 1801 unsigned int var_size = current_frame_info.var_size; 1802 unsigned int args_size = current_frame_info.args_size; 1803 unsigned int gmask = current_frame_info.gmask; 1804 int can_trust_sp_p = !cfun->calls_alloca; 1805 1806 if (flag_exceptions) 1807 emit_insn (gen_blockage ()); 1808 1809 /* The first thing to do is point the sp at the bottom of the register 1810 save area. */ 1811 if (can_trust_sp_p) 1812 { 1813 unsigned int reg_offset = var_size + args_size; 1814 1815 if (reg_offset == 0) 1816 ; /* Nothing to do. */ 1817 else if (reg_offset < 32768) 1818 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1819 GEN_INT (reg_offset))); 1820 else 1821 { 1822 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1823 1824 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset))); 1825 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1826 tmp)); 1827 } 1828 } 1829 else if (frame_pointer_needed) 1830 { 1831 unsigned int reg_offset = var_size + args_size; 1832 1833 if (reg_offset == 0) 1834 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx)); 1835 else if (reg_offset < 32768) 1836 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx, 1837 GEN_INT (reg_offset))); 1838 else 1839 { 1840 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1841 1842 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset))); 1843 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx)); 1844 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1845 tmp)); 1846 } 1847 } 1848 else 1849 gcc_unreachable (); 1850 1851 if (current_frame_info.save_lr) 1852 pop (RETURN_ADDR_REGNUM); 1853 1854 /* Restore any saved registers, in reverse order of course. */ 1855 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK); 1856 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno) 1857 { 1858 if ((gmask & (1L << regno)) != 0) 1859 pop (regno); 1860 } 1861 1862 if (current_frame_info.save_fp) 1863 pop (FRAME_POINTER_REGNUM); 1864 1865 /* Remove varargs area if present. */ 1866 if (current_frame_info.pretend_size != 0) 1867 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1868 GEN_INT (current_frame_info.pretend_size))); 1869 1870 emit_insn (gen_blockage ()); 1871 } 1872 } 1873 1874 /* Do any necessary cleanup after a function to restore stack, frame, 1875 and regs. */ 1876 1877 static void 1878 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED, 1879 HOST_WIDE_INT size ATTRIBUTE_UNUSED) 1880 { 1881 /* Reset state info for each function. */ 1882 current_frame_info = zero_frame_info; 1883 m32r_compute_function_type (NULL_TREE); 1884 } 1885 1886 /* Return nonzero if this function is known to have a null or 1 instruction 1887 epilogue. */ 1888 1889 int 1890 direct_return (void) 1891 { 1892 if (!reload_completed) 1893 return FALSE; 1894 1895 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl))) 1896 return FALSE; 1897 1898 if (! current_frame_info.initialized) 1899 m32r_compute_frame_size (get_frame_size ()); 1900 1901 return current_frame_info.total_size == 0; 1902 } 1903 1904 1905 /* PIC. */ 1906 1907 int 1908 m32r_legitimate_pic_operand_p (rtx x) 1909 { 1910 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) 1911 return 0; 1912 1913 if (GET_CODE (x) == CONST 1914 && GET_CODE (XEXP (x, 0)) == PLUS 1915 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF 1916 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF) 1917 && (CONST_INT_P (XEXP (XEXP (x, 0), 1)))) 1918 return 0; 1919 1920 return 1; 1921 } 1922 1923 rtx 1924 m32r_legitimize_pic_address (rtx orig, rtx reg) 1925 { 1926 #ifdef DEBUG_PIC 1927 printf("m32r_legitimize_pic_address()\n"); 1928 #endif 1929 1930 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF) 1931 { 1932 rtx pic_ref, address; 1933 int subregs = 0; 1934 1935 if (reg == 0) 1936 { 1937 gcc_assert (!reload_in_progress && !reload_completed); 1938 reg = gen_reg_rtx (Pmode); 1939 1940 subregs = 1; 1941 } 1942 1943 if (subregs) 1944 address = gen_reg_rtx (Pmode); 1945 else 1946 address = reg; 1947 1948 crtl->uses_pic_offset_table = 1; 1949 1950 if (GET_CODE (orig) == LABEL_REF 1951 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig))) 1952 { 1953 emit_insn (gen_gotoff_load_addr (reg, orig)); 1954 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx)); 1955 return reg; 1956 } 1957 1958 emit_insn (gen_pic_load_addr (address, orig)); 1959 1960 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx)); 1961 pic_ref = gen_const_mem (Pmode, address); 1962 emit_move_insn (reg, pic_ref); 1963 return reg; 1964 } 1965 else if (GET_CODE (orig) == CONST) 1966 { 1967 rtx base, offset; 1968 1969 if (GET_CODE (XEXP (orig, 0)) == PLUS 1970 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx) 1971 return orig; 1972 1973 if (reg == 0) 1974 { 1975 gcc_assert (!reload_in_progress && !reload_completed); 1976 reg = gen_reg_rtx (Pmode); 1977 } 1978 1979 if (GET_CODE (XEXP (orig, 0)) == PLUS) 1980 { 1981 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg); 1982 if (base == reg) 1983 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX); 1984 else 1985 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg); 1986 } 1987 else 1988 return orig; 1989 1990 if (CONST_INT_P (offset)) 1991 { 1992 if (INT16_P (INTVAL (offset))) 1993 return plus_constant (Pmode, base, INTVAL (offset)); 1994 else 1995 { 1996 gcc_assert (! reload_in_progress && ! reload_completed); 1997 offset = force_reg (Pmode, offset); 1998 } 1999 } 2000 2001 return gen_rtx_PLUS (Pmode, base, offset); 2002 } 2003 2004 return orig; 2005 } 2006 2007 static rtx 2008 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED, 2009 machine_mode mode ATTRIBUTE_UNUSED) 2010 { 2011 if (flag_pic) 2012 return m32r_legitimize_pic_address (x, NULL_RTX); 2013 else 2014 return x; 2015 } 2016 2017 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */ 2018 2019 static bool 2020 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED) 2021 { 2022 if (GET_CODE (addr) == LO_SUM) 2023 return true; 2024 2025 return false; 2026 } 2027 2028 /* Nested function support. */ 2029 2030 /* Emit RTL insns to initialize the variable parts of a trampoline. 2031 FNADDR is an RTX for the address of the function's pure code. 2032 CXT is an RTX for the static chain value for the function. */ 2033 2034 void 2035 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED, 2036 rtx fnaddr ATTRIBUTE_UNUSED, 2037 rtx cxt ATTRIBUTE_UNUSED) 2038 { 2039 } 2040 2041 static void 2042 m32r_file_start (void) 2043 { 2044 default_file_start (); 2045 2046 if (flag_verbose_asm) 2047 fprintf (asm_out_file, 2048 "%s M32R/D special options: -G %d\n", 2049 ASM_COMMENT_START, g_switch_value); 2050 2051 if (TARGET_LITTLE_ENDIAN) 2052 fprintf (asm_out_file, "\t.little\n"); 2053 } 2054 2055 /* Print operand X (an rtx) in assembler syntax to file FILE. 2056 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified. 2057 For `%' followed by punctuation, CODE is the punctuation and X is null. */ 2058 2059 static void 2060 m32r_print_operand (FILE * file, rtx x, int code) 2061 { 2062 rtx addr; 2063 2064 switch (code) 2065 { 2066 /* The 's' and 'p' codes are used by output_block_move() to 2067 indicate post-increment 's'tores and 'p're-increment loads. */ 2068 case 's': 2069 if (REG_P (x)) 2070 fprintf (file, "@+%s", reg_names [REGNO (x)]); 2071 else 2072 output_operand_lossage ("invalid operand to %%s code"); 2073 return; 2074 2075 case 'p': 2076 if (REG_P (x)) 2077 fprintf (file, "@%s+", reg_names [REGNO (x)]); 2078 else 2079 output_operand_lossage ("invalid operand to %%p code"); 2080 return; 2081 2082 case 'R' : 2083 /* Write second word of DImode or DFmode reference, 2084 register or memory. */ 2085 if (REG_P (x)) 2086 fputs (reg_names[REGNO (x)+1], file); 2087 else if (MEM_P (x)) 2088 { 2089 machine_mode mode = GET_MODE (x); 2090 2091 fprintf (file, "@("); 2092 /* Handle possible auto-increment. Since it is pre-increment and 2093 we have already done it, we can just use an offset of four. */ 2094 /* ??? This is taken from rs6000.c I think. I don't think it is 2095 currently necessary, but keep it around. */ 2096 if (GET_CODE (XEXP (x, 0)) == PRE_INC 2097 || GET_CODE (XEXP (x, 0)) == PRE_DEC) 2098 output_address (mode, plus_constant (Pmode, 2099 XEXP (XEXP (x, 0), 0), 4)); 2100 else 2101 output_address (mode, plus_constant (Pmode, XEXP (x, 0), 4)); 2102 fputc (')', file); 2103 } 2104 else 2105 output_operand_lossage ("invalid operand to %%R code"); 2106 return; 2107 2108 case 'H' : /* High word. */ 2109 case 'L' : /* Low word. */ 2110 if (REG_P (x)) 2111 { 2112 /* L = least significant word, H = most significant word. */ 2113 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L')) 2114 fputs (reg_names[REGNO (x)], file); 2115 else 2116 fputs (reg_names[REGNO (x)+1], file); 2117 } 2118 else if (CONST_INT_P (x) 2119 || GET_CODE (x) == CONST_DOUBLE) 2120 { 2121 rtx first, second; 2122 2123 split_double (x, &first, &second); 2124 fprintf (file, HOST_WIDE_INT_PRINT_HEX, 2125 code == 'L' ? INTVAL (first) : INTVAL (second)); 2126 } 2127 else 2128 output_operand_lossage ("invalid operand to %%H/%%L code"); 2129 return; 2130 2131 case 'A' : 2132 { 2133 char str[30]; 2134 2135 if (GET_CODE (x) != CONST_DOUBLE 2136 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT) 2137 fatal_insn ("bad insn for 'A'", x); 2138 2139 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1); 2140 fprintf (file, "%s", str); 2141 return; 2142 } 2143 2144 case 'B' : /* Bottom half. */ 2145 case 'T' : /* Top half. */ 2146 /* Output the argument to a `seth' insn (sets the Top half-word). 2147 For constants output arguments to a seth/or3 pair to set Top and 2148 Bottom halves. For symbols output arguments to a seth/add3 pair to 2149 set Top and Bottom halves. The difference exists because for 2150 constants seth/or3 is more readable but for symbols we need to use 2151 the same scheme as `ld' and `st' insns (16-bit addend is signed). */ 2152 switch (GET_CODE (x)) 2153 { 2154 case CONST_INT : 2155 case CONST_DOUBLE : 2156 { 2157 rtx first, second; 2158 2159 split_double (x, &first, &second); 2160 x = WORDS_BIG_ENDIAN ? second : first; 2161 fprintf (file, HOST_WIDE_INT_PRINT_HEX, 2162 (code == 'B' 2163 ? INTVAL (x) & 0xffff 2164 : (INTVAL (x) >> 16) & 0xffff)); 2165 } 2166 return; 2167 case CONST : 2168 case SYMBOL_REF : 2169 if (code == 'B' 2170 && small_data_operand (x, VOIDmode)) 2171 { 2172 fputs ("sda(", file); 2173 output_addr_const (file, x); 2174 fputc (')', file); 2175 return; 2176 } 2177 /* fall through */ 2178 case LABEL_REF : 2179 fputs (code == 'T' ? "shigh(" : "low(", file); 2180 output_addr_const (file, x); 2181 fputc (')', file); 2182 return; 2183 default : 2184 output_operand_lossage ("invalid operand to %%T/%%B code"); 2185 return; 2186 } 2187 break; 2188 2189 case 'U' : 2190 /* ??? wip */ 2191 /* Output a load/store with update indicator if appropriate. */ 2192 if (MEM_P (x)) 2193 { 2194 if (GET_CODE (XEXP (x, 0)) == PRE_INC 2195 || GET_CODE (XEXP (x, 0)) == PRE_DEC) 2196 fputs (".a", file); 2197 } 2198 else 2199 output_operand_lossage ("invalid operand to %%U code"); 2200 return; 2201 2202 case 'N' : 2203 /* Print a constant value negated. */ 2204 if (CONST_INT_P (x)) 2205 output_addr_const (file, GEN_INT (- INTVAL (x))); 2206 else 2207 output_operand_lossage ("invalid operand to %%N code"); 2208 return; 2209 2210 case 'X' : 2211 /* Print a const_int in hex. Used in comments. */ 2212 if (CONST_INT_P (x)) 2213 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x)); 2214 return; 2215 2216 case '#' : 2217 fputs (IMMEDIATE_PREFIX, file); 2218 return; 2219 2220 case 0 : 2221 /* Do nothing special. */ 2222 break; 2223 2224 default : 2225 /* Unknown flag. */ 2226 output_operand_lossage ("invalid operand output code"); 2227 } 2228 2229 switch (GET_CODE (x)) 2230 { 2231 case REG : 2232 fputs (reg_names[REGNO (x)], file); 2233 break; 2234 2235 case MEM : 2236 addr = XEXP (x, 0); 2237 if (GET_CODE (addr) == PRE_INC) 2238 { 2239 if (!REG_P (XEXP (addr, 0))) 2240 fatal_insn ("pre-increment address is not a register", x); 2241 2242 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]); 2243 } 2244 else if (GET_CODE (addr) == PRE_DEC) 2245 { 2246 if (!REG_P (XEXP (addr, 0))) 2247 fatal_insn ("pre-decrement address is not a register", x); 2248 2249 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]); 2250 } 2251 else if (GET_CODE (addr) == POST_INC) 2252 { 2253 if (!REG_P (XEXP (addr, 0))) 2254 fatal_insn ("post-increment address is not a register", x); 2255 2256 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]); 2257 } 2258 else 2259 { 2260 fputs ("@(", file); 2261 output_address (GET_MODE (x), addr); 2262 fputc (')', file); 2263 } 2264 break; 2265 2266 case CONST_DOUBLE : 2267 /* We handle SFmode constants here as output_addr_const doesn't. */ 2268 if (GET_MODE (x) == SFmode) 2269 { 2270 long l; 2271 2272 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l); 2273 fprintf (file, "0x%08lx", l); 2274 break; 2275 } 2276 2277 /* Fall through. Let output_addr_const deal with it. */ 2278 2279 default : 2280 output_addr_const (file, x); 2281 break; 2282 } 2283 } 2284 2285 /* Print a memory address as an operand to reference that memory location. */ 2286 2287 static void 2288 m32r_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr) 2289 { 2290 rtx base; 2291 rtx index = 0; 2292 int offset = 0; 2293 2294 switch (GET_CODE (addr)) 2295 { 2296 case REG : 2297 fputs (reg_names[REGNO (addr)], file); 2298 break; 2299 2300 case PLUS : 2301 if (CONST_INT_P (XEXP (addr, 0))) 2302 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1); 2303 else if (CONST_INT_P (XEXP (addr, 1))) 2304 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0); 2305 else 2306 base = XEXP (addr, 0), index = XEXP (addr, 1); 2307 if (REG_P (base)) 2308 { 2309 /* Print the offset first (if present) to conform to the manual. */ 2310 if (index == 0) 2311 { 2312 if (offset != 0) 2313 fprintf (file, "%d,", offset); 2314 fputs (reg_names[REGNO (base)], file); 2315 } 2316 /* The chip doesn't support this, but left in for generality. */ 2317 else if (REG_P (index)) 2318 fprintf (file, "%s,%s", 2319 reg_names[REGNO (base)], reg_names[REGNO (index)]); 2320 /* Not sure this can happen, but leave in for now. */ 2321 else if (GET_CODE (index) == SYMBOL_REF) 2322 { 2323 output_addr_const (file, index); 2324 fputc (',', file); 2325 fputs (reg_names[REGNO (base)], file); 2326 } 2327 else 2328 fatal_insn ("bad address", addr); 2329 } 2330 else if (GET_CODE (base) == LO_SUM) 2331 { 2332 gcc_assert (!index && REG_P (XEXP (base, 0))); 2333 if (small_data_operand (XEXP (base, 1), VOIDmode)) 2334 fputs ("sda(", file); 2335 else 2336 fputs ("low(", file); 2337 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1), 2338 offset)); 2339 fputs ("),", file); 2340 fputs (reg_names[REGNO (XEXP (base, 0))], file); 2341 } 2342 else 2343 fatal_insn ("bad address", addr); 2344 break; 2345 2346 case LO_SUM : 2347 if (!REG_P (XEXP (addr, 0))) 2348 fatal_insn ("lo_sum not of register", addr); 2349 if (small_data_operand (XEXP (addr, 1), VOIDmode)) 2350 fputs ("sda(", file); 2351 else 2352 fputs ("low(", file); 2353 output_addr_const (file, XEXP (addr, 1)); 2354 fputs ("),", file); 2355 fputs (reg_names[REGNO (XEXP (addr, 0))], file); 2356 break; 2357 2358 case PRE_INC : /* Assume SImode. */ 2359 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]); 2360 break; 2361 2362 case PRE_DEC : /* Assume SImode. */ 2363 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]); 2364 break; 2365 2366 case POST_INC : /* Assume SImode. */ 2367 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]); 2368 break; 2369 2370 default : 2371 output_addr_const (file, addr); 2372 break; 2373 } 2374 } 2375 2376 static bool 2377 m32r_print_operand_punct_valid_p (unsigned char code) 2378 { 2379 return m32r_punct_chars[code]; 2380 } 2381 2382 /* Return true if the operands are the constants 0 and 1. */ 2383 2384 int 2385 zero_and_one (rtx operand1, rtx operand2) 2386 { 2387 return 2388 CONST_INT_P (operand1) 2389 && CONST_INT_P (operand2) 2390 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1)) 2391 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0))); 2392 } 2393 2394 /* Generate the correct assembler code to handle the conditional loading of a 2395 value into a register. It is known that the operands satisfy the 2396 conditional_move_operand() function above. The destination is operand[0]. 2397 The condition is operand [1]. The 'true' value is operand [2] and the 2398 'false' value is operand [3]. */ 2399 2400 char * 2401 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED) 2402 { 2403 static char buffer [100]; 2404 const char * dest = reg_names [REGNO (operands [0])]; 2405 2406 buffer [0] = 0; 2407 2408 /* Destination must be a register. */ 2409 gcc_assert (REG_P (operands [0])); 2410 gcc_assert (conditional_move_operand (operands [2], SImode)); 2411 gcc_assert (conditional_move_operand (operands [3], SImode)); 2412 2413 /* Check to see if the test is reversed. */ 2414 if (GET_CODE (operands [1]) == NE) 2415 { 2416 rtx tmp = operands [2]; 2417 operands [2] = operands [3]; 2418 operands [3] = tmp; 2419 } 2420 2421 sprintf (buffer, "mvfc %s, cbr", dest); 2422 2423 /* If the true value was '0' then we need to invert the results of the move. */ 2424 if (INTVAL (operands [2]) == 0) 2425 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1", 2426 dest, dest); 2427 2428 return buffer; 2429 } 2430 2431 /* Returns true if the registers contained in the two 2432 rtl expressions are different. */ 2433 2434 int 2435 m32r_not_same_reg (rtx a, rtx b) 2436 { 2437 int reg_a = -1; 2438 int reg_b = -2; 2439 2440 while (GET_CODE (a) == SUBREG) 2441 a = SUBREG_REG (a); 2442 2443 if (REG_P (a)) 2444 reg_a = REGNO (a); 2445 2446 while (GET_CODE (b) == SUBREG) 2447 b = SUBREG_REG (b); 2448 2449 if (REG_P (b)) 2450 reg_b = REGNO (b); 2451 2452 return reg_a != reg_b; 2453 } 2454 2455 2456 rtx 2457 m32r_function_symbol (const char *name) 2458 { 2459 int extra_flags = 0; 2460 enum m32r_model model; 2461 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name); 2462 2463 if (TARGET_MODEL_SMALL) 2464 model = M32R_MODEL_SMALL; 2465 else if (TARGET_MODEL_MEDIUM) 2466 model = M32R_MODEL_MEDIUM; 2467 else if (TARGET_MODEL_LARGE) 2468 model = M32R_MODEL_LARGE; 2469 else 2470 gcc_unreachable (); /* Shouldn't happen. */ 2471 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT; 2472 2473 if (extra_flags) 2474 SYMBOL_REF_FLAGS (sym) |= extra_flags; 2475 2476 return sym; 2477 } 2478 2479 /* Use a library function to move some bytes. */ 2480 2481 static void 2482 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx) 2483 { 2484 /* We want to pass the size as Pmode, which will normally be SImode 2485 but will be DImode if we are using 64-bit longs and pointers. */ 2486 if (GET_MODE (bytes_rtx) != VOIDmode 2487 && GET_MODE (bytes_rtx) != Pmode) 2488 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1); 2489 2490 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL, 2491 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode, 2492 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx, 2493 TYPE_UNSIGNED (sizetype)), 2494 TYPE_MODE (sizetype)); 2495 } 2496 2497 /* Expand string/block move operations. 2498 2499 operands[0] is the pointer to the destination. 2500 operands[1] is the pointer to the source. 2501 operands[2] is the number of bytes to move. 2502 operands[3] is the alignment. 2503 2504 Returns 1 upon success, 0 otherwise. */ 2505 2506 int 2507 m32r_expand_block_move (rtx operands[]) 2508 { 2509 rtx orig_dst = operands[0]; 2510 rtx orig_src = operands[1]; 2511 rtx bytes_rtx = operands[2]; 2512 rtx align_rtx = operands[3]; 2513 int constp = CONST_INT_P (bytes_rtx); 2514 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0; 2515 int align = INTVAL (align_rtx); 2516 int leftover; 2517 rtx src_reg; 2518 rtx dst_reg; 2519 2520 if (constp && bytes <= 0) 2521 return 1; 2522 2523 /* Move the address into scratch registers. */ 2524 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0)); 2525 src_reg = copy_addr_to_reg (XEXP (orig_src, 0)); 2526 2527 if (align > UNITS_PER_WORD) 2528 align = UNITS_PER_WORD; 2529 2530 /* If we prefer size over speed, always use a function call. 2531 If we do not know the size, use a function call. 2532 If the blocks are not word aligned, use a function call. */ 2533 if (optimize_size || ! constp || align != UNITS_PER_WORD) 2534 { 2535 block_move_call (dst_reg, src_reg, bytes_rtx); 2536 return 0; 2537 } 2538 2539 leftover = bytes % MAX_MOVE_BYTES; 2540 bytes -= leftover; 2541 2542 /* If necessary, generate a loop to handle the bulk of the copy. */ 2543 if (bytes) 2544 { 2545 rtx_code_label *label = NULL; 2546 rtx final_src = NULL_RTX; 2547 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES); 2548 rtx rounded_total = GEN_INT (bytes); 2549 rtx new_dst_reg = gen_reg_rtx (SImode); 2550 rtx new_src_reg = gen_reg_rtx (SImode); 2551 2552 /* If we are going to have to perform this loop more than 2553 once, then generate a label and compute the address the 2554 source register will contain upon completion of the final 2555 iteration. */ 2556 if (bytes > MAX_MOVE_BYTES) 2557 { 2558 final_src = gen_reg_rtx (Pmode); 2559 2560 if (INT16_P(bytes)) 2561 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total)); 2562 else 2563 { 2564 emit_insn (gen_movsi (final_src, rounded_total)); 2565 emit_insn (gen_addsi3 (final_src, final_src, src_reg)); 2566 } 2567 2568 label = gen_label_rtx (); 2569 emit_label (label); 2570 } 2571 2572 /* It is known that output_block_move() will update src_reg to point 2573 to the word after the end of the source block, and dst_reg to point 2574 to the last word of the destination block, provided that the block 2575 is MAX_MOVE_BYTES long. */ 2576 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time, 2577 new_dst_reg, new_src_reg)); 2578 emit_move_insn (dst_reg, new_dst_reg); 2579 emit_move_insn (src_reg, new_src_reg); 2580 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4))); 2581 2582 if (bytes > MAX_MOVE_BYTES) 2583 { 2584 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src); 2585 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label)); 2586 } 2587 } 2588 2589 if (leftover) 2590 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover), 2591 gen_reg_rtx (SImode), 2592 gen_reg_rtx (SImode))); 2593 return 1; 2594 } 2595 2596 2597 /* Emit load/stores for a small constant word aligned block_move. 2598 2599 operands[0] is the memory address of the destination. 2600 operands[1] is the memory address of the source. 2601 operands[2] is the number of bytes to move. 2602 operands[3] is a temp register. 2603 operands[4] is a temp register. */ 2604 2605 void 2606 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[]) 2607 { 2608 HOST_WIDE_INT bytes = INTVAL (operands[2]); 2609 int first_time; 2610 int got_extra = 0; 2611 2612 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES); 2613 2614 /* We do not have a post-increment store available, so the first set of 2615 stores are done without any increment, then the remaining ones can use 2616 the pre-increment addressing mode. 2617 2618 Note: expand_block_move() also relies upon this behavior when building 2619 loops to copy large blocks. */ 2620 first_time = 1; 2621 2622 while (bytes > 0) 2623 { 2624 if (bytes >= 8) 2625 { 2626 if (first_time) 2627 { 2628 output_asm_insn ("ld\t%5, %p1", operands); 2629 output_asm_insn ("ld\t%6, %p1", operands); 2630 output_asm_insn ("st\t%5, @%0", operands); 2631 output_asm_insn ("st\t%6, %s0", operands); 2632 } 2633 else 2634 { 2635 output_asm_insn ("ld\t%5, %p1", operands); 2636 output_asm_insn ("ld\t%6, %p1", operands); 2637 output_asm_insn ("st\t%5, %s0", operands); 2638 output_asm_insn ("st\t%6, %s0", operands); 2639 } 2640 2641 bytes -= 8; 2642 } 2643 else if (bytes >= 4) 2644 { 2645 if (bytes > 4) 2646 got_extra = 1; 2647 2648 output_asm_insn ("ld\t%5, %p1", operands); 2649 2650 if (got_extra) 2651 output_asm_insn ("ld\t%6, %p1", operands); 2652 2653 if (first_time) 2654 output_asm_insn ("st\t%5, @%0", operands); 2655 else 2656 output_asm_insn ("st\t%5, %s0", operands); 2657 2658 bytes -= 4; 2659 } 2660 else 2661 { 2662 /* Get the entire next word, even though we do not want all of it. 2663 The saves us from doing several smaller loads, and we assume that 2664 we cannot cause a page fault when at least part of the word is in 2665 valid memory [since we don't get called if things aren't properly 2666 aligned]. */ 2667 int dst_offset = first_time ? 0 : 4; 2668 /* The amount of increment we have to make to the 2669 destination pointer. */ 2670 int dst_inc_amount = dst_offset + bytes - 4; 2671 /* The same for the source pointer. */ 2672 int src_inc_amount = bytes; 2673 int last_shift; 2674 rtx my_operands[3]; 2675 2676 /* If got_extra is true then we have already loaded 2677 the next word as part of loading and storing the previous word. */ 2678 if (! got_extra) 2679 output_asm_insn ("ld\t%6, @%1", operands); 2680 2681 if (bytes >= 2) 2682 { 2683 bytes -= 2; 2684 2685 output_asm_insn ("sra3\t%5, %6, #16", operands); 2686 my_operands[0] = operands[5]; 2687 my_operands[1] = GEN_INT (dst_offset); 2688 my_operands[2] = operands[0]; 2689 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands); 2690 2691 /* If there is a byte left to store then increment the 2692 destination address and shift the contents of the source 2693 register down by 8 bits. We could not do the address 2694 increment in the store half word instruction, because it does 2695 not have an auto increment mode. */ 2696 if (bytes > 0) /* assert (bytes == 1) */ 2697 { 2698 dst_offset += 2; 2699 last_shift = 8; 2700 } 2701 } 2702 else 2703 last_shift = 24; 2704 2705 if (bytes > 0) 2706 { 2707 my_operands[0] = operands[6]; 2708 my_operands[1] = GEN_INT (last_shift); 2709 output_asm_insn ("srai\t%0, #%1", my_operands); 2710 my_operands[0] = operands[6]; 2711 my_operands[1] = GEN_INT (dst_offset); 2712 my_operands[2] = operands[0]; 2713 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands); 2714 } 2715 2716 /* Update the destination pointer if needed. We have to do 2717 this so that the patterns matches what we output in this 2718 function. */ 2719 if (dst_inc_amount 2720 && !find_reg_note (insn, REG_UNUSED, operands[0])) 2721 { 2722 my_operands[0] = operands[0]; 2723 my_operands[1] = GEN_INT (dst_inc_amount); 2724 output_asm_insn ("addi\t%0, #%1", my_operands); 2725 } 2726 2727 /* Update the source pointer if needed. We have to do this 2728 so that the patterns matches what we output in this 2729 function. */ 2730 if (src_inc_amount 2731 && !find_reg_note (insn, REG_UNUSED, operands[1])) 2732 { 2733 my_operands[0] = operands[1]; 2734 my_operands[1] = GEN_INT (src_inc_amount); 2735 output_asm_insn ("addi\t%0, #%1", my_operands); 2736 } 2737 2738 bytes = 0; 2739 } 2740 2741 first_time = 0; 2742 } 2743 } 2744 2745 /* Return true if using NEW_REG in place of OLD_REG is ok. */ 2746 2747 int 2748 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED, 2749 unsigned int new_reg) 2750 { 2751 /* Interrupt routines can't clobber any register that isn't already used. */ 2752 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) 2753 && !df_regs_ever_live_p (new_reg)) 2754 return 0; 2755 2756 return 1; 2757 } 2758 2759 rtx 2760 m32r_return_addr (int count) 2761 { 2762 if (count != 0) 2763 return const0_rtx; 2764 2765 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM); 2766 } 2767 2768 static void 2769 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value) 2770 { 2771 emit_move_insn (adjust_address (m_tramp, SImode, 0), 2772 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2773 0x017e8e17 : 0x178e7e01, SImode)); 2774 emit_move_insn (adjust_address (m_tramp, SImode, 4), 2775 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2776 0x0c00ae86 : 0x86ae000c, SImode)); 2777 emit_move_insn (adjust_address (m_tramp, SImode, 8), 2778 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2779 0xe627871e : 0x1e8727e6, SImode)); 2780 emit_move_insn (adjust_address (m_tramp, SImode, 12), 2781 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2782 0xc616c626 : 0x26c61fc6, SImode)); 2783 emit_move_insn (adjust_address (m_tramp, SImode, 16), 2784 chain_value); 2785 emit_move_insn (adjust_address (m_tramp, SImode, 20), 2786 XEXP (DECL_RTL (fndecl), 0)); 2787 2788 if (m32r_cache_flush_trap >= 0) 2789 emit_insn (gen_flush_icache 2790 (validize_mem (adjust_address (m_tramp, SImode, 0)), 2791 gen_int_mode (m32r_cache_flush_trap, SImode))); 2792 else if (m32r_cache_flush_func && m32r_cache_flush_func[0]) 2793 emit_library_call (m32r_function_symbol (m32r_cache_flush_func), 2794 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode, 2795 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode, 2796 GEN_INT (3), SImode); 2797 } 2798 2799 /* True if X is a reg that can be used as a base reg. */ 2800 2801 static bool 2802 m32r_rtx_ok_for_base_p (const_rtx x, bool strict) 2803 { 2804 if (! REG_P (x)) 2805 return false; 2806 2807 if (strict) 2808 { 2809 if (GPR_P (REGNO (x))) 2810 return true; 2811 } 2812 else 2813 { 2814 if (GPR_P (REGNO (x)) 2815 || REGNO (x) == ARG_POINTER_REGNUM 2816 || ! HARD_REGISTER_P (x)) 2817 return true; 2818 } 2819 2820 return false; 2821 } 2822 2823 static inline bool 2824 m32r_rtx_ok_for_offset_p (const_rtx x) 2825 { 2826 return (CONST_INT_P (x) && INT16_P (INTVAL (x))); 2827 } 2828 2829 static inline bool 2830 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED, 2831 const_rtx x, bool strict) 2832 { 2833 if (GET_CODE (x) == PLUS 2834 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict) 2835 && m32r_rtx_ok_for_offset_p (XEXP (x, 1))) 2836 return true; 2837 2838 return false; 2839 } 2840 2841 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word, 2842 since more than one instruction will be required. */ 2843 2844 static inline bool 2845 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x, 2846 bool strict) 2847 { 2848 if (GET_CODE (x) == LO_SUM 2849 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD) 2850 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict) 2851 && CONSTANT_P (XEXP (x, 1))) 2852 return true; 2853 2854 return false; 2855 } 2856 2857 /* Is this a load and increment operation. */ 2858 2859 static inline bool 2860 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict) 2861 { 2862 if ((mode == SImode || mode == SFmode) 2863 && GET_CODE (x) == POST_INC 2864 && REG_P (XEXP (x, 0)) 2865 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)) 2866 return true; 2867 2868 return false; 2869 } 2870 2871 /* Is this an increment/decrement and store operation. */ 2872 2873 static inline bool 2874 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict) 2875 { 2876 if ((mode == SImode || mode == SFmode) 2877 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC) 2878 && REG_P (XEXP (x, 0)) \ 2879 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)) 2880 return true; 2881 2882 return false; 2883 } 2884 2885 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */ 2886 2887 static bool 2888 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict) 2889 { 2890 if (m32r_rtx_ok_for_base_p (x, strict) 2891 || m32r_legitimate_offset_addres_p (mode, x, strict) 2892 || m32r_legitimate_lo_sum_addres_p (mode, x, strict) 2893 || m32r_load_postinc_p (mode, x, strict) 2894 || m32r_store_preinc_predec_p (mode, x, strict)) 2895 return true; 2896 2897 return false; 2898 } 2899 2900 static void 2901 m32r_conditional_register_usage (void) 2902 { 2903 if (flag_pic) 2904 { 2905 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1; 2906 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1; 2907 } 2908 } 2909 2910 /* Implement TARGET_LEGITIMATE_CONSTANT_P 2911 2912 We don't allow (plus symbol large-constant) as the relocations can't 2913 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations. 2914 We allow all CONST_DOUBLE's as the md file patterns will force the 2915 constant to memory if they can't handle them. */ 2916 2917 static bool 2918 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x) 2919 { 2920 return !(GET_CODE (x) == CONST 2921 && GET_CODE (XEXP (x, 0)) == PLUS 2922 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF 2923 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF) 2924 && CONST_INT_P (XEXP (XEXP (x, 0), 1)) 2925 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767); 2926 } 2927