1 /* Subroutines used for code generation on the Renesas M32R cpu. 2 Copyright (C) 1996-2019 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it 7 under the terms of the GNU General Public License as published 8 by the Free Software Foundation; either version 3, or (at your 9 option) any later version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT 12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY 13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public 14 License for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #define IN_TARGET_CODE 1 21 22 #include "config.h" 23 #include "system.h" 24 #include "coretypes.h" 25 #include "backend.h" 26 #include "target.h" 27 #include "rtl.h" 28 #include "tree.h" 29 #include "df.h" 30 #include "memmodel.h" 31 #include "tm_p.h" 32 #include "stringpool.h" 33 #include "attribs.h" 34 #include "insn-config.h" 35 #include "emit-rtl.h" 36 #include "recog.h" 37 #include "diagnostic-core.h" 38 #include "alias.h" 39 #include "stor-layout.h" 40 #include "varasm.h" 41 #include "calls.h" 42 #include "output.h" 43 #include "insn-attr.h" 44 #include "explow.h" 45 #include "expr.h" 46 #include "tm-constrs.h" 47 #include "builtins.h" 48 49 /* This file should be included last. */ 50 #include "target-def.h" 51 52 /* Array of valid operand punctuation characters. */ 53 static char m32r_punct_chars[256]; 54 55 /* Machine-specific symbol_ref flags. */ 56 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT 57 #define SYMBOL_REF_MODEL(X) \ 58 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3)) 59 60 /* For string literals, etc. */ 61 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.') 62 63 /* Forward declaration. */ 64 static void m32r_option_override (void); 65 static void init_reg_tables (void); 66 static void block_move_call (rtx, rtx, rtx); 67 static int m32r_is_insn (rtx); 68 static bool m32r_legitimate_address_p (machine_mode, rtx, bool); 69 static rtx m32r_legitimize_address (rtx, rtx, machine_mode); 70 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t); 71 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *); 72 static void m32r_print_operand (FILE *, rtx, int); 73 static void m32r_print_operand_address (FILE *, machine_mode, rtx); 74 static bool m32r_print_operand_punct_valid_p (unsigned char code); 75 static void m32r_output_function_prologue (FILE *); 76 static void m32r_output_function_epilogue (FILE *); 77 78 static void m32r_file_start (void); 79 80 static int m32r_adjust_priority (rtx_insn *, int); 81 static int m32r_issue_rate (void); 82 83 static void m32r_encode_section_info (tree, rtx, int); 84 static bool m32r_in_small_data_p (const_tree); 85 static bool m32r_return_in_memory (const_tree, const_tree); 86 static rtx m32r_function_value (const_tree, const_tree, bool); 87 static rtx m32r_libcall_value (machine_mode, const_rtx); 88 static bool m32r_function_value_regno_p (const unsigned int); 89 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode, 90 tree, int *, int); 91 static void init_idents (void); 92 static bool m32r_rtx_costs (rtx, machine_mode, int, int, int *, bool speed); 93 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool); 94 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode, 95 const_tree, bool); 96 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode, 97 tree, bool); 98 static rtx m32r_function_arg (cumulative_args_t, machine_mode, 99 const_tree, bool); 100 static void m32r_function_arg_advance (cumulative_args_t, machine_mode, 101 const_tree, bool); 102 static bool m32r_can_eliminate (const int, const int); 103 static void m32r_conditional_register_usage (void); 104 static void m32r_trampoline_init (rtx, tree, rtx); 105 static bool m32r_legitimate_constant_p (machine_mode, rtx); 106 static bool m32r_attribute_identifier (const_tree); 107 static bool m32r_hard_regno_mode_ok (unsigned int, machine_mode); 108 static bool m32r_modes_tieable_p (machine_mode, machine_mode); 109 static HOST_WIDE_INT m32r_starting_frame_offset (void); 110 111 /* M32R specific attributes. */ 112 113 static const struct attribute_spec m32r_attribute_table[] = 114 { 115 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, 116 affects_type_identity, handler, exclude } */ 117 { "interrupt", 0, 0, true, false, false, false, NULL, NULL }, 118 { "model", 1, 1, true, false, false, false, m32r_handle_model_attribute, 119 NULL }, 120 { NULL, 0, 0, false, false, false, false, NULL, NULL } 121 }; 122 123 /* Initialize the GCC target structure. */ 124 #undef TARGET_ATTRIBUTE_TABLE 125 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table 126 #undef TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P 127 #define TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P m32r_attribute_identifier 128 129 #undef TARGET_LRA_P 130 #define TARGET_LRA_P hook_bool_void_false 131 132 #undef TARGET_LEGITIMATE_ADDRESS_P 133 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p 134 #undef TARGET_LEGITIMIZE_ADDRESS 135 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address 136 #undef TARGET_MODE_DEPENDENT_ADDRESS_P 137 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p 138 139 #undef TARGET_ASM_ALIGNED_HI_OP 140 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t" 141 #undef TARGET_ASM_ALIGNED_SI_OP 142 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t" 143 144 #undef TARGET_PRINT_OPERAND 145 #define TARGET_PRINT_OPERAND m32r_print_operand 146 #undef TARGET_PRINT_OPERAND_ADDRESS 147 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address 148 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P 149 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p 150 151 #undef TARGET_ASM_FUNCTION_PROLOGUE 152 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue 153 #undef TARGET_ASM_FUNCTION_EPILOGUE 154 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue 155 156 #undef TARGET_ASM_FILE_START 157 #define TARGET_ASM_FILE_START m32r_file_start 158 159 #undef TARGET_SCHED_ADJUST_PRIORITY 160 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority 161 #undef TARGET_SCHED_ISSUE_RATE 162 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate 163 164 #undef TARGET_OPTION_OVERRIDE 165 #define TARGET_OPTION_OVERRIDE m32r_option_override 166 167 #undef TARGET_ENCODE_SECTION_INFO 168 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info 169 #undef TARGET_IN_SMALL_DATA_P 170 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p 171 172 173 #undef TARGET_MEMORY_MOVE_COST 174 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost 175 #undef TARGET_RTX_COSTS 176 #define TARGET_RTX_COSTS m32r_rtx_costs 177 #undef TARGET_ADDRESS_COST 178 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0 179 180 #undef TARGET_PROMOTE_PROTOTYPES 181 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true 182 #undef TARGET_RETURN_IN_MEMORY 183 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory 184 185 #undef TARGET_FUNCTION_VALUE 186 #define TARGET_FUNCTION_VALUE m32r_function_value 187 #undef TARGET_LIBCALL_VALUE 188 #define TARGET_LIBCALL_VALUE m32r_libcall_value 189 #undef TARGET_FUNCTION_VALUE_REGNO_P 190 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p 191 192 #undef TARGET_SETUP_INCOMING_VARARGS 193 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs 194 #undef TARGET_MUST_PASS_IN_STACK 195 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size 196 #undef TARGET_PASS_BY_REFERENCE 197 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference 198 #undef TARGET_ARG_PARTIAL_BYTES 199 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes 200 #undef TARGET_FUNCTION_ARG 201 #define TARGET_FUNCTION_ARG m32r_function_arg 202 #undef TARGET_FUNCTION_ARG_ADVANCE 203 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance 204 205 #undef TARGET_CAN_ELIMINATE 206 #define TARGET_CAN_ELIMINATE m32r_can_eliminate 207 208 #undef TARGET_CONDITIONAL_REGISTER_USAGE 209 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage 210 211 #undef TARGET_TRAMPOLINE_INIT 212 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init 213 214 #undef TARGET_LEGITIMATE_CONSTANT_P 215 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p 216 217 #undef TARGET_HARD_REGNO_MODE_OK 218 #define TARGET_HARD_REGNO_MODE_OK m32r_hard_regno_mode_ok 219 220 #undef TARGET_MODES_TIEABLE_P 221 #define TARGET_MODES_TIEABLE_P m32r_modes_tieable_p 222 223 #undef TARGET_CONSTANT_ALIGNMENT 224 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings 225 226 #undef TARGET_STARTING_FRAME_OFFSET 227 #define TARGET_STARTING_FRAME_OFFSET m32r_starting_frame_offset 228 229 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE 230 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed 231 232 struct gcc_target targetm = TARGET_INITIALIZER; 233 234 /* Called by m32r_option_override to initialize various things. */ 235 236 void 237 m32r_init (void) 238 { 239 init_reg_tables (); 240 241 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */ 242 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars)); 243 m32r_punct_chars['#'] = 1; 244 m32r_punct_chars['@'] = 1; /* ??? no longer used */ 245 246 /* Provide default value if not specified. */ 247 if (!global_options_set.x_g_switch_value) 248 g_switch_value = SDATA_DEFAULT_SIZE; 249 } 250 251 static void 252 m32r_option_override (void) 253 { 254 /* These need to be done at start up. 255 It's convenient to do them here. */ 256 m32r_init (); 257 SUBTARGET_OVERRIDE_OPTIONS; 258 } 259 260 /* Vectors to keep interesting information about registers where it can easily 261 be got. We use to use the actual mode value as the bit number, but there 262 is (or may be) more than 32 modes now. Instead we use two tables: one 263 indexed by hard register number, and one indexed by mode. */ 264 265 /* The purpose of m32r_mode_class is to shrink the range of modes so that 266 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is 267 mapped into one m32r_mode_class mode. */ 268 269 enum m32r_mode_class 270 { 271 C_MODE, 272 S_MODE, D_MODE, T_MODE, O_MODE, 273 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE 274 }; 275 276 /* Modes for condition codes. */ 277 #define C_MODES (1 << (int) C_MODE) 278 279 /* Modes for single-word and smaller quantities. */ 280 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE)) 281 282 /* Modes for double-word and smaller quantities. */ 283 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE)) 284 285 /* Modes for quad-word and smaller quantities. */ 286 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE)) 287 288 /* Modes for accumulators. */ 289 #define A_MODES (1 << (int) A_MODE) 290 291 /* Value is 1 if register/mode pair is acceptable on arc. */ 292 293 static const unsigned int m32r_hard_regno_modes[FIRST_PSEUDO_REGISTER] = 294 { 295 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, 296 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES, 297 S_MODES, C_MODES, A_MODES, A_MODES 298 }; 299 300 static unsigned int m32r_mode_class [NUM_MACHINE_MODES]; 301 302 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER]; 303 304 static void 305 init_reg_tables (void) 306 { 307 int i; 308 309 for (i = 0; i < NUM_MACHINE_MODES; i++) 310 { 311 machine_mode m = (machine_mode) i; 312 313 switch (GET_MODE_CLASS (m)) 314 { 315 case MODE_INT: 316 case MODE_PARTIAL_INT: 317 case MODE_COMPLEX_INT: 318 if (GET_MODE_SIZE (m) <= 4) 319 m32r_mode_class[i] = 1 << (int) S_MODE; 320 else if (GET_MODE_SIZE (m) == 8) 321 m32r_mode_class[i] = 1 << (int) D_MODE; 322 else if (GET_MODE_SIZE (m) == 16) 323 m32r_mode_class[i] = 1 << (int) T_MODE; 324 else if (GET_MODE_SIZE (m) == 32) 325 m32r_mode_class[i] = 1 << (int) O_MODE; 326 else 327 m32r_mode_class[i] = 0; 328 break; 329 case MODE_FLOAT: 330 case MODE_COMPLEX_FLOAT: 331 if (GET_MODE_SIZE (m) <= 4) 332 m32r_mode_class[i] = 1 << (int) SF_MODE; 333 else if (GET_MODE_SIZE (m) == 8) 334 m32r_mode_class[i] = 1 << (int) DF_MODE; 335 else if (GET_MODE_SIZE (m) == 16) 336 m32r_mode_class[i] = 1 << (int) TF_MODE; 337 else if (GET_MODE_SIZE (m) == 32) 338 m32r_mode_class[i] = 1 << (int) OF_MODE; 339 else 340 m32r_mode_class[i] = 0; 341 break; 342 case MODE_CC: 343 m32r_mode_class[i] = 1 << (int) C_MODE; 344 break; 345 default: 346 m32r_mode_class[i] = 0; 347 break; 348 } 349 } 350 351 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 352 { 353 if (GPR_P (i)) 354 m32r_regno_reg_class[i] = GENERAL_REGS; 355 else if (i == ARG_POINTER_REGNUM) 356 m32r_regno_reg_class[i] = GENERAL_REGS; 357 else 358 m32r_regno_reg_class[i] = NO_REGS; 359 } 360 } 361 362 /* M32R specific attribute support. 363 364 interrupt - for interrupt functions 365 366 model - select code model used to access object 367 368 small: addresses use 24 bits, use bl to make calls 369 medium: addresses use 32 bits, use bl to make calls 370 large: addresses use 32 bits, use seth/add3/jl to make calls 371 372 Grep for MODEL in m32r.h for more info. */ 373 374 static tree small_ident1; 375 static tree small_ident2; 376 static tree medium_ident1; 377 static tree medium_ident2; 378 static tree large_ident1; 379 static tree large_ident2; 380 381 static void 382 init_idents (void) 383 { 384 if (small_ident1 == 0) 385 { 386 small_ident1 = get_identifier ("small"); 387 small_ident2 = get_identifier ("__small__"); 388 medium_ident1 = get_identifier ("medium"); 389 medium_ident2 = get_identifier ("__medium__"); 390 large_ident1 = get_identifier ("large"); 391 large_ident2 = get_identifier ("__large__"); 392 } 393 } 394 395 /* Handle an "model" attribute; arguments as in 396 struct attribute_spec.handler. */ 397 static tree 398 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name, 399 tree args, int flags ATTRIBUTE_UNUSED, 400 bool *no_add_attrs) 401 { 402 tree arg; 403 404 init_idents (); 405 arg = TREE_VALUE (args); 406 407 if (arg != small_ident1 408 && arg != small_ident2 409 && arg != medium_ident1 410 && arg != medium_ident2 411 && arg != large_ident1 412 && arg != large_ident2) 413 { 414 warning (OPT_Wattributes, "invalid argument of %qs attribute", 415 IDENTIFIER_POINTER (name)); 416 *no_add_attrs = true; 417 } 418 419 return NULL_TREE; 420 } 421 422 static bool 423 m32r_attribute_identifier (const_tree name) 424 { 425 return strcmp (IDENTIFIER_POINTER (name), "model") == 0 426 || strcmp (IDENTIFIER_POINTER (name), "__model__") == 0; 427 } 428 429 /* Encode section information of DECL, which is either a VAR_DECL, 430 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???. 431 432 For the M32R we want to record: 433 434 - whether the object lives in .sdata/.sbss. 435 - what code model should be used to access the object 436 */ 437 438 static void 439 m32r_encode_section_info (tree decl, rtx rtl, int first) 440 { 441 int extra_flags = 0; 442 tree model_attr; 443 enum m32r_model model; 444 445 default_encode_section_info (decl, rtl, first); 446 447 if (!DECL_P (decl)) 448 return; 449 450 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl)); 451 if (model_attr) 452 { 453 tree id; 454 455 init_idents (); 456 457 id = TREE_VALUE (TREE_VALUE (model_attr)); 458 459 if (id == small_ident1 || id == small_ident2) 460 model = M32R_MODEL_SMALL; 461 else if (id == medium_ident1 || id == medium_ident2) 462 model = M32R_MODEL_MEDIUM; 463 else if (id == large_ident1 || id == large_ident2) 464 model = M32R_MODEL_LARGE; 465 else 466 gcc_unreachable (); /* shouldn't happen */ 467 } 468 else 469 { 470 if (TARGET_MODEL_SMALL) 471 model = M32R_MODEL_SMALL; 472 else if (TARGET_MODEL_MEDIUM) 473 model = M32R_MODEL_MEDIUM; 474 else if (TARGET_MODEL_LARGE) 475 model = M32R_MODEL_LARGE; 476 else 477 gcc_unreachable (); /* shouldn't happen */ 478 } 479 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT; 480 481 if (extra_flags) 482 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags; 483 } 484 485 /* Only mark the object as being small data area addressable if 486 it hasn't been explicitly marked with a code model. 487 488 The user can explicitly put an object in the small data area with the 489 section attribute. If the object is in sdata/sbss and marked with a 490 code model do both [put the object in .sdata and mark it as being 491 addressed with a specific code model - don't mark it as being addressed 492 with an SDA reloc though]. This is ok and might be useful at times. If 493 the object doesn't fit the linker will give an error. */ 494 495 static bool 496 m32r_in_small_data_p (const_tree decl) 497 { 498 const char *section; 499 500 if (TREE_CODE (decl) != VAR_DECL) 501 return false; 502 503 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl))) 504 return false; 505 506 section = DECL_SECTION_NAME (decl); 507 if (section) 508 { 509 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0) 510 return true; 511 } 512 else 513 { 514 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE) 515 { 516 int size = int_size_in_bytes (TREE_TYPE (decl)); 517 518 if (size > 0 && size <= g_switch_value) 519 return true; 520 } 521 } 522 523 return false; 524 } 525 526 /* Do anything needed before RTL is emitted for each function. */ 527 528 void 529 m32r_init_expanders (void) 530 { 531 /* ??? At one point there was code here. The function is left in 532 to make it easy to experiment. */ 533 } 534 535 int 536 call_operand (rtx op, machine_mode mode) 537 { 538 if (!MEM_P (op)) 539 return 0; 540 op = XEXP (op, 0); 541 return call_address_operand (op, mode); 542 } 543 544 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */ 545 546 int 547 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 548 { 549 if (! TARGET_SDATA_USE) 550 return 0; 551 552 if (GET_CODE (op) == SYMBOL_REF) 553 return SYMBOL_REF_SMALL_P (op); 554 555 if (GET_CODE (op) == CONST 556 && GET_CODE (XEXP (op, 0)) == PLUS 557 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF 558 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1))) 559 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0)); 560 561 return 0; 562 } 563 564 /* Return 1 if OP is a symbol that can use 24-bit addressing. */ 565 566 int 567 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 568 { 569 rtx sym; 570 571 if (flag_pic) 572 return 0; 573 574 if (GET_CODE (op) == LABEL_REF) 575 return TARGET_ADDR24; 576 577 if (GET_CODE (op) == SYMBOL_REF) 578 sym = op; 579 else if (GET_CODE (op) == CONST 580 && GET_CODE (XEXP (op, 0)) == PLUS 581 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF 582 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1))) 583 sym = XEXP (XEXP (op, 0), 0); 584 else 585 return 0; 586 587 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL) 588 return 1; 589 590 if (TARGET_ADDR24 591 && (CONSTANT_POOL_ADDRESS_P (sym) 592 || LIT_NAME_P (XSTR (sym, 0)))) 593 return 1; 594 595 return 0; 596 } 597 598 /* Return 1 if OP is a symbol that needs 32-bit addressing. */ 599 600 int 601 addr32_operand (rtx op, machine_mode mode) 602 { 603 rtx sym; 604 605 if (GET_CODE (op) == LABEL_REF) 606 return TARGET_ADDR32; 607 608 if (GET_CODE (op) == SYMBOL_REF) 609 sym = op; 610 else if (GET_CODE (op) == CONST 611 && GET_CODE (XEXP (op, 0)) == PLUS 612 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF 613 && CONST_INT_P (XEXP (XEXP (op, 0), 1)) 614 && ! flag_pic) 615 sym = XEXP (XEXP (op, 0), 0); 616 else 617 return 0; 618 619 return (! addr24_operand (sym, mode) 620 && ! small_data_operand (sym, mode)); 621 } 622 623 /* Return 1 if OP is a function that can be called with the `bl' insn. */ 624 625 int 626 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 627 { 628 if (flag_pic) 629 return 1; 630 631 if (GET_CODE (op) == SYMBOL_REF) 632 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE; 633 634 return TARGET_CALL26; 635 } 636 637 /* Return 1 if OP is a DImode const we want to handle inline. 638 This must match the code in the movdi pattern. 639 It is used by the 'G' constraint. */ 640 641 int 642 easy_di_const (rtx op) 643 { 644 rtx high_rtx, low_rtx; 645 HOST_WIDE_INT high, low; 646 647 split_double (op, &high_rtx, &low_rtx); 648 high = INTVAL (high_rtx); 649 low = INTVAL (low_rtx); 650 /* Pick constants loadable with 2 16-bit `ldi' insns. */ 651 if (high >= -128 && high <= 127 652 && low >= -128 && low <= 127) 653 return 1; 654 return 0; 655 } 656 657 /* Return 1 if OP is a DFmode const we want to handle inline. 658 This must match the code in the movdf pattern. 659 It is used by the 'H' constraint. */ 660 661 int 662 easy_df_const (rtx op) 663 { 664 long l[2]; 665 666 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (op), l); 667 if (l[0] == 0 && l[1] == 0) 668 return 1; 669 if ((l[0] & 0xffff) == 0 && l[1] == 0) 670 return 1; 671 return 0; 672 } 673 674 /* Return 1 if OP is (mem (reg ...)). 675 This is used in insn length calcs. */ 676 677 int 678 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 679 { 680 return MEM_P (op) && REG_P (XEXP (op, 0)); 681 } 682 683 /* Return nonzero if TYPE must be passed by indirect reference. */ 684 685 static bool 686 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED, 687 machine_mode mode, const_tree type, 688 bool named ATTRIBUTE_UNUSED) 689 { 690 int size; 691 692 if (type) 693 size = int_size_in_bytes (type); 694 else 695 size = GET_MODE_SIZE (mode); 696 697 return (size < 0 || size > 8); 698 } 699 700 /* Comparisons. */ 701 702 /* X and Y are two things to compare using CODE. Emit the compare insn and 703 return the rtx for compare [arg0 of the if_then_else]. 704 If need_compare is true then the comparison insn must be generated, rather 705 than being subsumed into the following branch instruction. */ 706 707 rtx 708 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare) 709 { 710 enum rtx_code compare_code; 711 enum rtx_code branch_code; 712 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM); 713 int must_swap = 0; 714 715 switch (code) 716 { 717 case EQ: compare_code = EQ; branch_code = NE; break; 718 case NE: compare_code = EQ; branch_code = EQ; break; 719 case LT: compare_code = LT; branch_code = NE; break; 720 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break; 721 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break; 722 case GE: compare_code = LT; branch_code = EQ; break; 723 case LTU: compare_code = LTU; branch_code = NE; break; 724 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break; 725 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break; 726 case GEU: compare_code = LTU; branch_code = EQ; break; 727 728 default: 729 gcc_unreachable (); 730 } 731 732 if (need_compare) 733 { 734 switch (compare_code) 735 { 736 case EQ: 737 if (satisfies_constraint_P (y) /* Reg equal to small const. */ 738 && y != const0_rtx) 739 { 740 rtx tmp = gen_reg_rtx (SImode); 741 742 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y)))); 743 x = tmp; 744 y = const0_rtx; 745 } 746 else if (CONSTANT_P (y)) /* Reg equal to const. */ 747 { 748 rtx tmp = force_reg (GET_MODE (x), y); 749 y = tmp; 750 } 751 752 if (register_operand (y, SImode) /* Reg equal to reg. */ 753 || y == const0_rtx) /* Reg equal to zero. */ 754 { 755 emit_insn (gen_cmp_eqsi_insn (x, y)); 756 757 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx); 758 } 759 break; 760 761 case LT: 762 if (register_operand (y, SImode) 763 || satisfies_constraint_P (y)) 764 { 765 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */ 766 767 switch (code) 768 { 769 case LT: 770 emit_insn (gen_cmp_ltsi_insn (x, y)); 771 code = EQ; 772 break; 773 case LE: 774 if (y == const0_rtx) 775 tmp = const1_rtx; 776 else 777 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 778 emit_insn (gen_cmp_ltsi_insn (x, tmp)); 779 code = EQ; 780 break; 781 case GT: 782 if (CONST_INT_P (y)) 783 tmp = gen_rtx_PLUS (SImode, y, const1_rtx); 784 else 785 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 786 emit_insn (gen_cmp_ltsi_insn (x, tmp)); 787 code = NE; 788 break; 789 case GE: 790 emit_insn (gen_cmp_ltsi_insn (x, y)); 791 code = NE; 792 break; 793 default: 794 gcc_unreachable (); 795 } 796 797 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx); 798 } 799 break; 800 801 case LTU: 802 if (register_operand (y, SImode) 803 || satisfies_constraint_P (y)) 804 { 805 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */ 806 807 switch (code) 808 { 809 case LTU: 810 emit_insn (gen_cmp_ltusi_insn (x, y)); 811 code = EQ; 812 break; 813 case LEU: 814 if (y == const0_rtx) 815 tmp = const1_rtx; 816 else 817 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 818 emit_insn (gen_cmp_ltusi_insn (x, tmp)); 819 code = EQ; 820 break; 821 case GTU: 822 if (CONST_INT_P (y)) 823 tmp = gen_rtx_PLUS (SImode, y, const1_rtx); 824 else 825 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 826 emit_insn (gen_cmp_ltusi_insn (x, tmp)); 827 code = NE; 828 break; 829 case GEU: 830 emit_insn (gen_cmp_ltusi_insn (x, y)); 831 code = NE; 832 break; 833 default: 834 gcc_unreachable (); 835 } 836 837 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx); 838 } 839 break; 840 841 default: 842 gcc_unreachable (); 843 } 844 } 845 else 846 { 847 /* Reg/reg equal comparison. */ 848 if (compare_code == EQ 849 && register_operand (y, SImode)) 850 return gen_rtx_fmt_ee (code, CCmode, x, y); 851 852 /* Reg/zero signed comparison. */ 853 if ((compare_code == EQ || compare_code == LT) 854 && y == const0_rtx) 855 return gen_rtx_fmt_ee (code, CCmode, x, y); 856 857 /* Reg/smallconst equal comparison. */ 858 if (compare_code == EQ 859 && satisfies_constraint_P (y)) 860 { 861 rtx tmp = gen_reg_rtx (SImode); 862 863 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y)))); 864 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx); 865 } 866 867 /* Reg/const equal comparison. */ 868 if (compare_code == EQ 869 && CONSTANT_P (y)) 870 { 871 rtx tmp = force_reg (GET_MODE (x), y); 872 873 return gen_rtx_fmt_ee (code, CCmode, x, tmp); 874 } 875 } 876 877 if (CONSTANT_P (y)) 878 { 879 if (must_swap) 880 y = force_reg (GET_MODE (x), y); 881 else 882 { 883 int ok_const = reg_or_int16_operand (y, GET_MODE (y)); 884 885 if (! ok_const) 886 y = force_reg (GET_MODE (x), y); 887 } 888 } 889 890 switch (compare_code) 891 { 892 case EQ : 893 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y)); 894 break; 895 case LT : 896 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y)); 897 break; 898 case LTU : 899 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y)); 900 break; 901 902 default: 903 gcc_unreachable (); 904 } 905 906 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode)); 907 } 908 909 bool 910 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2) 911 { 912 machine_mode mode = GET_MODE (op0); 913 914 gcc_assert (mode == SImode); 915 switch (code) 916 { 917 case EQ: 918 if (!register_operand (op1, mode)) 919 op1 = force_reg (mode, op1); 920 921 if (TARGET_M32RX || TARGET_M32R2) 922 { 923 if (!reg_or_zero_operand (op2, mode)) 924 op2 = force_reg (mode, op2); 925 926 emit_insn (gen_seq_insn_m32rx (op0, op1, op2)); 927 return true; 928 } 929 if (CONST_INT_P (op2) && INTVAL (op2) == 0) 930 { 931 emit_insn (gen_seq_zero_insn (op0, op1)); 932 return true; 933 } 934 935 if (!reg_or_eq_int16_operand (op2, mode)) 936 op2 = force_reg (mode, op2); 937 938 emit_insn (gen_seq_insn (op0, op1, op2)); 939 return true; 940 941 case NE: 942 if (!CONST_INT_P (op2) 943 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2))) 944 { 945 rtx reg; 946 947 if (reload_completed || reload_in_progress) 948 return false; 949 950 reg = gen_reg_rtx (SImode); 951 emit_insn (gen_xorsi3 (reg, op1, op2)); 952 op1 = reg; 953 954 if (!register_operand (op1, mode)) 955 op1 = force_reg (mode, op1); 956 957 emit_insn (gen_sne_zero_insn (op0, op1)); 958 return true; 959 } 960 return false; 961 962 case LT: 963 case GT: 964 if (code == GT) 965 { 966 rtx tmp = op2; 967 op2 = op1; 968 op1 = tmp; 969 code = LT; 970 } 971 972 if (!register_operand (op1, mode)) 973 op1 = force_reg (mode, op1); 974 975 if (!reg_or_int16_operand (op2, mode)) 976 op2 = force_reg (mode, op2); 977 978 emit_insn (gen_slt_insn (op0, op1, op2)); 979 return true; 980 981 case LTU: 982 case GTU: 983 if (code == GTU) 984 { 985 rtx tmp = op2; 986 op2 = op1; 987 op1 = tmp; 988 code = LTU; 989 } 990 991 if (!register_operand (op1, mode)) 992 op1 = force_reg (mode, op1); 993 994 if (!reg_or_int16_operand (op2, mode)) 995 op2 = force_reg (mode, op2); 996 997 emit_insn (gen_sltu_insn (op0, op1, op2)); 998 return true; 999 1000 case GE: 1001 case GEU: 1002 if (!register_operand (op1, mode)) 1003 op1 = force_reg (mode, op1); 1004 1005 if (!reg_or_int16_operand (op2, mode)) 1006 op2 = force_reg (mode, op2); 1007 1008 if (code == GE) 1009 emit_insn (gen_sge_insn (op0, op1, op2)); 1010 else 1011 emit_insn (gen_sgeu_insn (op0, op1, op2)); 1012 return true; 1013 1014 case LE: 1015 case LEU: 1016 if (!register_operand (op1, mode)) 1017 op1 = force_reg (mode, op1); 1018 1019 if (CONST_INT_P (op2)) 1020 { 1021 HOST_WIDE_INT value = INTVAL (op2); 1022 if (value >= 2147483647) 1023 { 1024 emit_move_insn (op0, const1_rtx); 1025 return true; 1026 } 1027 1028 op2 = GEN_INT (value + 1); 1029 if (value < -32768 || value >= 32767) 1030 op2 = force_reg (mode, op2); 1031 1032 if (code == LEU) 1033 emit_insn (gen_sltu_insn (op0, op1, op2)); 1034 else 1035 emit_insn (gen_slt_insn (op0, op1, op2)); 1036 return true; 1037 } 1038 1039 if (!register_operand (op2, mode)) 1040 op2 = force_reg (mode, op2); 1041 1042 if (code == LEU) 1043 emit_insn (gen_sleu_insn (op0, op1, op2)); 1044 else 1045 emit_insn (gen_sle_insn (op0, op1, op2)); 1046 return true; 1047 1048 default: 1049 gcc_unreachable (); 1050 } 1051 } 1052 1053 1054 /* Split a 2 word move (DI or DF) into component parts. */ 1055 1056 rtx 1057 gen_split_move_double (rtx operands[]) 1058 { 1059 machine_mode mode = GET_MODE (operands[0]); 1060 rtx dest = operands[0]; 1061 rtx src = operands[1]; 1062 rtx val; 1063 1064 /* We might have (SUBREG (MEM)) here, so just get rid of the 1065 subregs to make this code simpler. It is safe to call 1066 alter_subreg any time after reload. */ 1067 if (GET_CODE (dest) == SUBREG) 1068 alter_subreg (&dest, true); 1069 if (GET_CODE (src) == SUBREG) 1070 alter_subreg (&src, true); 1071 1072 start_sequence (); 1073 if (REG_P (dest)) 1074 { 1075 int dregno = REGNO (dest); 1076 1077 /* Reg = reg. */ 1078 if (REG_P (src)) 1079 { 1080 int sregno = REGNO (src); 1081 1082 int reverse = (dregno == sregno + 1); 1083 1084 /* We normally copy the low-numbered register first. However, if 1085 the first register operand 0 is the same as the second register of 1086 operand 1, we must copy in the opposite order. */ 1087 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode), 1088 operand_subword (src, reverse, TRUE, mode))); 1089 1090 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode), 1091 operand_subword (src, !reverse, TRUE, mode))); 1092 } 1093 1094 /* Reg = constant. */ 1095 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE) 1096 { 1097 rtx words[2]; 1098 split_double (src, &words[0], &words[1]); 1099 emit_insn (gen_rtx_SET (operand_subword (dest, 0, TRUE, mode), 1100 words[0])); 1101 1102 emit_insn (gen_rtx_SET (operand_subword (dest, 1, TRUE, mode), 1103 words[1])); 1104 } 1105 1106 /* Reg = mem. */ 1107 else if (MEM_P (src)) 1108 { 1109 /* If the high-address word is used in the address, we must load it 1110 last. Otherwise, load it first. */ 1111 int reverse = refers_to_regno_p (dregno, XEXP (src, 0)); 1112 1113 /* We used to optimize loads from single registers as 1114 1115 ld r1,r3+; ld r2,r3 1116 1117 if r3 were not used subsequently. However, the REG_NOTES aren't 1118 propagated correctly by the reload phase, and it can cause bad 1119 code to be generated. We could still try: 1120 1121 ld r1,r3+; ld r2,r3; addi r3,-4 1122 1123 which saves 2 bytes and doesn't force longword alignment. */ 1124 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode), 1125 adjust_address (src, SImode, 1126 reverse * UNITS_PER_WORD))); 1127 1128 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode), 1129 adjust_address (src, SImode, 1130 !reverse * UNITS_PER_WORD))); 1131 } 1132 else 1133 gcc_unreachable (); 1134 } 1135 1136 /* Mem = reg. */ 1137 /* We used to optimize loads from single registers as 1138 1139 st r1,r3; st r2,+r3 1140 1141 if r3 were not used subsequently. However, the REG_NOTES aren't 1142 propagated correctly by the reload phase, and it can cause bad 1143 code to be generated. We could still try: 1144 1145 st r1,r3; st r2,+r3; addi r3,-4 1146 1147 which saves 2 bytes and doesn't force longword alignment. */ 1148 else if (MEM_P (dest) && REG_P (src)) 1149 { 1150 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, 0), 1151 operand_subword (src, 0, TRUE, mode))); 1152 1153 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, UNITS_PER_WORD), 1154 operand_subword (src, 1, TRUE, mode))); 1155 } 1156 1157 else 1158 gcc_unreachable (); 1159 1160 val = get_insns (); 1161 end_sequence (); 1162 return val; 1163 } 1164 1165 1166 static int 1167 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode, 1168 tree type, bool named ATTRIBUTE_UNUSED) 1169 { 1170 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1171 1172 int words; 1173 unsigned int size = 1174 (((mode == BLKmode && type) 1175 ? (unsigned int) int_size_in_bytes (type) 1176 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1) 1177 / UNITS_PER_WORD; 1178 1179 if (*cum >= M32R_MAX_PARM_REGS) 1180 words = 0; 1181 else if (*cum + size > M32R_MAX_PARM_REGS) 1182 words = (*cum + size) - M32R_MAX_PARM_REGS; 1183 else 1184 words = 0; 1185 1186 return words * UNITS_PER_WORD; 1187 } 1188 1189 /* The ROUND_ADVANCE* macros are local to this file. */ 1190 /* Round SIZE up to a word boundary. */ 1191 #define ROUND_ADVANCE(SIZE) \ 1192 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD) 1193 1194 /* Round arg MODE/TYPE up to the next word boundary. */ 1195 #define ROUND_ADVANCE_ARG(MODE, TYPE) \ 1196 ((MODE) == BLKmode \ 1197 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \ 1198 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE))) 1199 1200 /* Round CUM up to the necessary point for argument MODE/TYPE. */ 1201 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM) 1202 1203 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in 1204 a reg. This includes arguments that have to be passed by reference as the 1205 pointer to them is passed in a reg if one is available (and that is what 1206 we're given). 1207 This macro is only used in this file. */ 1208 #define PASS_IN_REG_P(CUM, MODE, TYPE) \ 1209 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS) 1210 1211 /* Determine where to put an argument to a function. 1212 Value is zero to push the argument on the stack, 1213 or a hard register in which to store the argument. 1214 1215 MODE is the argument's machine mode. 1216 TYPE is the data type of the argument (as a tree). 1217 This is null for libcalls where that information may 1218 not be available. 1219 CUM is a variable of type CUMULATIVE_ARGS which gives info about 1220 the preceding args and about the function being called. 1221 NAMED is nonzero if this argument is a named parameter 1222 (otherwise it is an extra parameter matching an ellipsis). */ 1223 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers 1224 and the rest are pushed. */ 1225 1226 static rtx 1227 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode, 1228 const_tree type ATTRIBUTE_UNUSED, 1229 bool named ATTRIBUTE_UNUSED) 1230 { 1231 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1232 1233 return (PASS_IN_REG_P (*cum, mode, type) 1234 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type)) 1235 : NULL_RTX); 1236 } 1237 1238 /* Update the data in CUM to advance over an argument 1239 of mode MODE and data type TYPE. 1240 (TYPE is null for libcalls where that information may not be available.) */ 1241 1242 static void 1243 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode, 1244 const_tree type, bool named ATTRIBUTE_UNUSED) 1245 { 1246 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1247 1248 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type) 1249 + ROUND_ADVANCE_ARG (mode, type)); 1250 } 1251 1252 /* Worker function for TARGET_RETURN_IN_MEMORY. */ 1253 1254 static bool 1255 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED) 1256 { 1257 cumulative_args_t dummy = pack_cumulative_args (NULL); 1258 1259 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false); 1260 } 1261 1262 /* Worker function for TARGET_FUNCTION_VALUE. */ 1263 1264 static rtx 1265 m32r_function_value (const_tree valtype, 1266 const_tree fn_decl_or_type ATTRIBUTE_UNUSED, 1267 bool outgoing ATTRIBUTE_UNUSED) 1268 { 1269 return gen_rtx_REG (TYPE_MODE (valtype), 0); 1270 } 1271 1272 /* Worker function for TARGET_LIBCALL_VALUE. */ 1273 1274 static rtx 1275 m32r_libcall_value (machine_mode mode, 1276 const_rtx fun ATTRIBUTE_UNUSED) 1277 { 1278 return gen_rtx_REG (mode, 0); 1279 } 1280 1281 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. 1282 1283 ??? What about r1 in DI/DF values. */ 1284 1285 static bool 1286 m32r_function_value_regno_p (const unsigned int regno) 1287 { 1288 return (regno == 0); 1289 } 1290 1291 /* Do any needed setup for a variadic function. For the M32R, we must 1292 create a register parameter block, and then copy any anonymous arguments 1293 in registers to memory. 1294 1295 CUM has not been updated for the last named argument which has type TYPE 1296 and mode MODE, and we rely on this fact. */ 1297 1298 static void 1299 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode, 1300 tree type, int *pretend_size, int no_rtl) 1301 { 1302 int first_anon_arg; 1303 1304 if (no_rtl) 1305 return; 1306 1307 /* All BLKmode values are passed by reference. */ 1308 gcc_assert (mode != BLKmode); 1309 1310 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type) 1311 + ROUND_ADVANCE_ARG (mode, type)); 1312 1313 if (first_anon_arg < M32R_MAX_PARM_REGS) 1314 { 1315 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */ 1316 int first_reg_offset = first_anon_arg; 1317 /* Size in words to "pretend" allocate. */ 1318 int size = M32R_MAX_PARM_REGS - first_reg_offset; 1319 rtx regblock; 1320 1321 regblock = gen_frame_mem (BLKmode, 1322 plus_constant (Pmode, arg_pointer_rtx, 1323 FIRST_PARM_OFFSET (0))); 1324 set_mem_alias_set (regblock, get_varargs_alias_set ()); 1325 move_block_from_reg (first_reg_offset, regblock, size); 1326 1327 *pretend_size = (size * UNITS_PER_WORD); 1328 } 1329 } 1330 1331 1332 /* Return true if INSN is real instruction bearing insn. */ 1333 1334 static int 1335 m32r_is_insn (rtx insn) 1336 { 1337 return (NONDEBUG_INSN_P (insn) 1338 && GET_CODE (PATTERN (insn)) != USE 1339 && GET_CODE (PATTERN (insn)) != CLOBBER); 1340 } 1341 1342 /* Increase the priority of long instructions so that the 1343 short instructions are scheduled ahead of the long ones. */ 1344 1345 static int 1346 m32r_adjust_priority (rtx_insn *insn, int priority) 1347 { 1348 if (m32r_is_insn (insn) 1349 && get_attr_insn_size (insn) != INSN_SIZE_SHORT) 1350 priority <<= 3; 1351 1352 return priority; 1353 } 1354 1355 1356 /* Indicate how many instructions can be issued at the same time. 1357 This is sort of a lie. The m32r can issue only 1 long insn at 1358 once, but it can issue 2 short insns. The default therefore is 1359 set at 2, but this can be overridden by the command line option 1360 -missue-rate=1. */ 1361 1362 static int 1363 m32r_issue_rate (void) 1364 { 1365 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2); 1366 } 1367 1368 /* Cost functions. */ 1369 /* Memory is 3 times as expensive as registers. 1370 ??? Is that the right way to look at it? */ 1371 1372 static int 1373 m32r_memory_move_cost (machine_mode mode, 1374 reg_class_t rclass ATTRIBUTE_UNUSED, 1375 bool in ATTRIBUTE_UNUSED) 1376 { 1377 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD) 1378 return 6; 1379 else 1380 return 12; 1381 } 1382 1383 static bool 1384 m32r_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED, 1385 int outer_code ATTRIBUTE_UNUSED, 1386 int opno ATTRIBUTE_UNUSED, int *total, 1387 bool speed ATTRIBUTE_UNUSED) 1388 { 1389 int code = GET_CODE (x); 1390 1391 switch (code) 1392 { 1393 /* Small integers are as cheap as registers. 4 byte values can be 1394 fetched as immediate constants - let's give that the cost of an 1395 extra insn. */ 1396 case CONST_INT: 1397 if (INT16_P (INTVAL (x))) 1398 { 1399 *total = 0; 1400 return true; 1401 } 1402 /* FALLTHRU */ 1403 1404 case CONST: 1405 case LABEL_REF: 1406 case SYMBOL_REF: 1407 *total = COSTS_N_INSNS (1); 1408 return true; 1409 1410 case CONST_DOUBLE: 1411 { 1412 rtx high, low; 1413 1414 split_double (x, &high, &low); 1415 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high)) 1416 + !INT16_P (INTVAL (low))); 1417 return true; 1418 } 1419 1420 case MULT: 1421 *total = COSTS_N_INSNS (3); 1422 return true; 1423 1424 case DIV: 1425 case UDIV: 1426 case MOD: 1427 case UMOD: 1428 *total = COSTS_N_INSNS (10); 1429 return true; 1430 1431 default: 1432 return false; 1433 } 1434 } 1435 1436 /* Type of function DECL. 1437 1438 The result is cached. To reset the cache at the end of a function, 1439 call with DECL = NULL_TREE. */ 1440 1441 enum m32r_function_type 1442 m32r_compute_function_type (tree decl) 1443 { 1444 /* Cached value. */ 1445 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN; 1446 /* Last function we were called for. */ 1447 static tree last_fn = NULL_TREE; 1448 1449 /* Resetting the cached value? */ 1450 if (decl == NULL_TREE) 1451 { 1452 fn_type = M32R_FUNCTION_UNKNOWN; 1453 last_fn = NULL_TREE; 1454 return fn_type; 1455 } 1456 1457 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN) 1458 return fn_type; 1459 1460 /* Compute function type. */ 1461 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE 1462 ? M32R_FUNCTION_INTERRUPT 1463 : M32R_FUNCTION_NORMAL); 1464 1465 last_fn = decl; 1466 return fn_type; 1467 } 1468 /* Function prologue/epilogue handlers. */ 1469 1470 /* M32R stack frames look like: 1471 1472 Before call After call 1473 +-----------------------+ +-----------------------+ 1474 | | | | 1475 high | local variables, | | local variables, | 1476 mem | reg save area, etc. | | reg save area, etc. | 1477 | | | | 1478 +-----------------------+ +-----------------------+ 1479 | | | | 1480 | arguments on stack. | | arguments on stack. | 1481 | | | | 1482 SP+0->+-----------------------+ +-----------------------+ 1483 | reg parm save area, | 1484 | only created for | 1485 | variable argument | 1486 | functions | 1487 +-----------------------+ 1488 | previous frame ptr | 1489 +-----------------------+ 1490 | | 1491 | register save area | 1492 | | 1493 +-----------------------+ 1494 | return address | 1495 +-----------------------+ 1496 | | 1497 | local variables | 1498 | | 1499 +-----------------------+ 1500 | | 1501 | alloca allocations | 1502 | | 1503 +-----------------------+ 1504 | | 1505 low | arguments on stack | 1506 memory | | 1507 SP+0->+-----------------------+ 1508 1509 Notes: 1510 1) The "reg parm save area" does not exist for non variable argument fns. 1511 2) The "reg parm save area" can be eliminated completely if we saved regs 1512 containing anonymous args separately but that complicates things too 1513 much (so it's not done). 1514 3) The return address is saved after the register save area so as to have as 1515 many insns as possible between the restoration of `lr' and the `jmp lr'. */ 1516 1517 /* Structure to be filled in by m32r_compute_frame_size with register 1518 save masks, and offsets for the current function. */ 1519 struct m32r_frame_info 1520 { 1521 unsigned int total_size; /* # bytes that the entire frame takes up. */ 1522 unsigned int extra_size; /* # bytes of extra stuff. */ 1523 unsigned int pretend_size; /* # bytes we push and pretend caller did. */ 1524 unsigned int args_size; /* # bytes that outgoing arguments take up. */ 1525 unsigned int reg_size; /* # bytes needed to store regs. */ 1526 unsigned int var_size; /* # bytes that variables take up. */ 1527 unsigned int gmask; /* Mask of saved gp registers. */ 1528 unsigned int save_fp; /* Nonzero if fp must be saved. */ 1529 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */ 1530 int initialized; /* Nonzero if frame size already calculated. */ 1531 }; 1532 1533 /* Current frame information calculated by m32r_compute_frame_size. */ 1534 static struct m32r_frame_info current_frame_info; 1535 1536 /* Zero structure to initialize current_frame_info. */ 1537 static struct m32r_frame_info zero_frame_info; 1538 1539 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM)) 1540 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM)) 1541 1542 /* Tell prologue and epilogue if register REGNO should be saved / restored. 1543 The return address and frame pointer are treated separately. 1544 Don't consider them here. */ 1545 #define MUST_SAVE_REGISTER(regno, interrupt_p) \ 1546 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \ 1547 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p))) 1548 1549 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM)) 1550 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile) 1551 1552 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */ 1553 #define LONG_INSN_SIZE 4 /* Size of long instructions. */ 1554 1555 /* Return the bytes needed to compute the frame pointer from the current 1556 stack pointer. 1557 1558 SIZE is the size needed for local variables. */ 1559 1560 unsigned int 1561 m32r_compute_frame_size (poly_int64 size) /* # of var. bytes allocated. */ 1562 { 1563 unsigned int regno; 1564 unsigned int total_size, var_size, args_size, pretend_size, extra_size; 1565 unsigned int reg_size; 1566 unsigned int gmask; 1567 enum m32r_function_type fn_type; 1568 int interrupt_p; 1569 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table 1570 | crtl->profile); 1571 1572 var_size = M32R_STACK_ALIGN (size); 1573 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size); 1574 pretend_size = crtl->args.pretend_args_size; 1575 extra_size = FIRST_PARM_OFFSET (0); 1576 total_size = extra_size + pretend_size + args_size + var_size; 1577 reg_size = 0; 1578 gmask = 0; 1579 1580 /* See if this is an interrupt handler. Call used registers must be saved 1581 for them too. */ 1582 fn_type = m32r_compute_function_type (current_function_decl); 1583 interrupt_p = M32R_INTERRUPT_P (fn_type); 1584 1585 /* Calculate space needed for registers. */ 1586 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++) 1587 { 1588 if (MUST_SAVE_REGISTER (regno, interrupt_p) 1589 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used)) 1590 { 1591 reg_size += UNITS_PER_WORD; 1592 gmask |= 1 << regno; 1593 } 1594 } 1595 1596 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER; 1597 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used; 1598 1599 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr) 1600 * UNITS_PER_WORD); 1601 total_size += reg_size; 1602 1603 /* ??? Not sure this is necessary, and I don't think the epilogue 1604 handler will do the right thing if this changes total_size. */ 1605 total_size = M32R_STACK_ALIGN (total_size); 1606 1607 /* frame_size = total_size - (pretend_size + reg_size); */ 1608 1609 /* Save computed information. */ 1610 current_frame_info.total_size = total_size; 1611 current_frame_info.extra_size = extra_size; 1612 current_frame_info.pretend_size = pretend_size; 1613 current_frame_info.var_size = var_size; 1614 current_frame_info.args_size = args_size; 1615 current_frame_info.reg_size = reg_size; 1616 current_frame_info.gmask = gmask; 1617 current_frame_info.initialized = reload_completed; 1618 1619 /* Ok, we're done. */ 1620 return total_size; 1621 } 1622 1623 /* Worker function for TARGET_CAN_ELIMINATE. */ 1624 1625 bool 1626 m32r_can_eliminate (const int from, const int to) 1627 { 1628 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM 1629 ? ! frame_pointer_needed 1630 : true); 1631 } 1632 1633 1634 /* The table we use to reference PIC data. */ 1635 static rtx global_offset_table; 1636 1637 static void 1638 m32r_reload_lr (rtx sp, int size) 1639 { 1640 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM); 1641 1642 if (size == 0) 1643 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp))); 1644 else if (size < 32768) 1645 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, 1646 gen_rtx_PLUS (Pmode, sp, 1647 GEN_INT (size))))); 1648 else 1649 { 1650 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1651 1652 emit_insn (gen_movsi (tmp, GEN_INT (size))); 1653 emit_insn (gen_addsi3 (tmp, tmp, sp)); 1654 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp))); 1655 } 1656 1657 emit_use (lr); 1658 } 1659 1660 void 1661 m32r_load_pic_register (void) 1662 { 1663 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_"); 1664 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table, 1665 GEN_INT (TARGET_MODEL_SMALL))); 1666 1667 /* Need to emit this whether or not we obey regdecls, 1668 since setjmp/longjmp can cause life info to screw up. */ 1669 emit_use (pic_offset_table_rtx); 1670 } 1671 1672 /* Expand the m32r prologue as a series of insns. */ 1673 1674 void 1675 m32r_expand_prologue (void) 1676 { 1677 int regno; 1678 int frame_size; 1679 unsigned int gmask; 1680 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table 1681 | crtl->profile); 1682 1683 if (! current_frame_info.initialized) 1684 m32r_compute_frame_size (get_frame_size ()); 1685 1686 if (flag_stack_usage_info) 1687 current_function_static_stack_size = current_frame_info.total_size; 1688 1689 gmask = current_frame_info.gmask; 1690 1691 /* These cases shouldn't happen. Catch them now. */ 1692 gcc_assert (current_frame_info.total_size || !gmask); 1693 1694 /* Allocate space for register arguments if this is a variadic function. */ 1695 if (current_frame_info.pretend_size != 0) 1696 { 1697 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives 1698 the wrong result on a 64-bit host. */ 1699 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size; 1700 emit_insn (gen_addsi3 (stack_pointer_rtx, 1701 stack_pointer_rtx, 1702 GEN_INT (-pretend_size))); 1703 } 1704 1705 /* Save any registers we need to and set up fp. */ 1706 if (current_frame_info.save_fp) 1707 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx)); 1708 1709 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK); 1710 1711 /* Save any needed call-saved regs (and call-used if this is an 1712 interrupt handler). */ 1713 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno) 1714 { 1715 if ((gmask & (1 << regno)) != 0) 1716 emit_insn (gen_movsi_push (stack_pointer_rtx, 1717 gen_rtx_REG (Pmode, regno))); 1718 } 1719 1720 if (current_frame_info.save_lr) 1721 emit_insn (gen_movsi_push (stack_pointer_rtx, 1722 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM))); 1723 1724 /* Allocate the stack frame. */ 1725 frame_size = (current_frame_info.total_size 1726 - (current_frame_info.pretend_size 1727 + current_frame_info.reg_size)); 1728 1729 if (frame_size == 0) 1730 ; /* Nothing to do. */ 1731 else if (frame_size <= 32768) 1732 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1733 GEN_INT (-frame_size))); 1734 else 1735 { 1736 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1737 1738 emit_insn (gen_movsi (tmp, GEN_INT (frame_size))); 1739 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp)); 1740 } 1741 1742 if (frame_pointer_needed) 1743 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx)); 1744 1745 if (crtl->profile) 1746 /* Push lr for mcount (form_pc, x). */ 1747 emit_insn (gen_movsi_push (stack_pointer_rtx, 1748 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM))); 1749 1750 if (pic_reg_used) 1751 { 1752 m32r_load_pic_register (); 1753 m32r_reload_lr (stack_pointer_rtx, 1754 (crtl->profile ? 0 : frame_size)); 1755 } 1756 1757 if (crtl->profile && !pic_reg_used) 1758 emit_insn (gen_blockage ()); 1759 } 1760 1761 1762 /* Set up the stack and frame pointer (if desired) for the function. 1763 Note, if this is changed, you need to mirror the changes in 1764 m32r_compute_frame_size which calculates the prolog size. */ 1765 1766 static void 1767 m32r_output_function_prologue (FILE * file) 1768 { 1769 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl); 1770 1771 /* If this is an interrupt handler, mark it as such. */ 1772 if (M32R_INTERRUPT_P (fn_type)) 1773 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START); 1774 1775 if (! current_frame_info.initialized) 1776 m32r_compute_frame_size (get_frame_size ()); 1777 1778 /* This is only for the human reader. */ 1779 fprintf (file, 1780 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n", 1781 ASM_COMMENT_START, 1782 current_frame_info.var_size, 1783 current_frame_info.reg_size / 4, 1784 current_frame_info.args_size, 1785 current_frame_info.extra_size); 1786 } 1787 1788 /* Output RTL to pop register REGNO from the stack. */ 1789 1790 static void 1791 pop (int regno) 1792 { 1793 rtx x; 1794 1795 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno), 1796 stack_pointer_rtx)); 1797 add_reg_note (x, REG_INC, stack_pointer_rtx); 1798 } 1799 1800 /* Expand the m32r epilogue as a series of insns. */ 1801 1802 void 1803 m32r_expand_epilogue (void) 1804 { 1805 int regno; 1806 int noepilogue = FALSE; 1807 int total_size; 1808 1809 gcc_assert (current_frame_info.initialized); 1810 total_size = current_frame_info.total_size; 1811 1812 if (total_size == 0) 1813 { 1814 rtx_insn *insn = get_last_insn (); 1815 1816 /* If the last insn was a BARRIER, we don't have to write any code 1817 because a jump (aka return) was put there. */ 1818 if (insn && NOTE_P (insn)) 1819 insn = prev_nonnote_insn (insn); 1820 if (insn && BARRIER_P (insn)) 1821 noepilogue = TRUE; 1822 } 1823 1824 if (!noepilogue) 1825 { 1826 unsigned int var_size = current_frame_info.var_size; 1827 unsigned int args_size = current_frame_info.args_size; 1828 unsigned int gmask = current_frame_info.gmask; 1829 int can_trust_sp_p = !cfun->calls_alloca; 1830 1831 if (flag_exceptions) 1832 emit_insn (gen_blockage ()); 1833 1834 /* The first thing to do is point the sp at the bottom of the register 1835 save area. */ 1836 if (can_trust_sp_p) 1837 { 1838 unsigned int reg_offset = var_size + args_size; 1839 1840 if (reg_offset == 0) 1841 ; /* Nothing to do. */ 1842 else if (reg_offset < 32768) 1843 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1844 GEN_INT (reg_offset))); 1845 else 1846 { 1847 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1848 1849 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset))); 1850 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1851 tmp)); 1852 } 1853 } 1854 else if (frame_pointer_needed) 1855 { 1856 unsigned int reg_offset = var_size + args_size; 1857 1858 if (reg_offset == 0) 1859 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx)); 1860 else if (reg_offset < 32768) 1861 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx, 1862 GEN_INT (reg_offset))); 1863 else 1864 { 1865 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1866 1867 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset))); 1868 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx)); 1869 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1870 tmp)); 1871 } 1872 } 1873 else 1874 gcc_unreachable (); 1875 1876 if (current_frame_info.save_lr) 1877 pop (RETURN_ADDR_REGNUM); 1878 1879 /* Restore any saved registers, in reverse order of course. */ 1880 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK); 1881 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno) 1882 { 1883 if ((gmask & (1L << regno)) != 0) 1884 pop (regno); 1885 } 1886 1887 if (current_frame_info.save_fp) 1888 pop (FRAME_POINTER_REGNUM); 1889 1890 /* Remove varargs area if present. */ 1891 if (current_frame_info.pretend_size != 0) 1892 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1893 GEN_INT (current_frame_info.pretend_size))); 1894 1895 emit_insn (gen_blockage ()); 1896 } 1897 } 1898 1899 /* Do any necessary cleanup after a function to restore stack, frame, 1900 and regs. */ 1901 1902 static void 1903 m32r_output_function_epilogue (FILE *) 1904 { 1905 /* Reset state info for each function. */ 1906 current_frame_info = zero_frame_info; 1907 m32r_compute_function_type (NULL_TREE); 1908 } 1909 1910 /* Return nonzero if this function is known to have a null or 1 instruction 1911 epilogue. */ 1912 1913 int 1914 direct_return (void) 1915 { 1916 if (!reload_completed) 1917 return FALSE; 1918 1919 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl))) 1920 return FALSE; 1921 1922 if (! current_frame_info.initialized) 1923 m32r_compute_frame_size (get_frame_size ()); 1924 1925 return current_frame_info.total_size == 0; 1926 } 1927 1928 1929 /* PIC. */ 1930 1931 int 1932 m32r_legitimate_pic_operand_p (rtx x) 1933 { 1934 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) 1935 return 0; 1936 1937 if (GET_CODE (x) == CONST 1938 && GET_CODE (XEXP (x, 0)) == PLUS 1939 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF 1940 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF) 1941 && (CONST_INT_P (XEXP (XEXP (x, 0), 1)))) 1942 return 0; 1943 1944 return 1; 1945 } 1946 1947 rtx 1948 m32r_legitimize_pic_address (rtx orig, rtx reg) 1949 { 1950 #ifdef DEBUG_PIC 1951 printf("m32r_legitimize_pic_address()\n"); 1952 #endif 1953 1954 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF) 1955 { 1956 rtx pic_ref, address; 1957 int subregs = 0; 1958 1959 if (reg == 0) 1960 { 1961 gcc_assert (!reload_in_progress && !reload_completed); 1962 reg = gen_reg_rtx (Pmode); 1963 1964 subregs = 1; 1965 } 1966 1967 if (subregs) 1968 address = gen_reg_rtx (Pmode); 1969 else 1970 address = reg; 1971 1972 crtl->uses_pic_offset_table = 1; 1973 1974 if (GET_CODE (orig) == LABEL_REF 1975 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig))) 1976 { 1977 emit_insn (gen_gotoff_load_addr (reg, orig)); 1978 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx)); 1979 return reg; 1980 } 1981 1982 emit_insn (gen_pic_load_addr (address, orig)); 1983 1984 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx)); 1985 pic_ref = gen_const_mem (Pmode, address); 1986 emit_move_insn (reg, pic_ref); 1987 return reg; 1988 } 1989 else if (GET_CODE (orig) == CONST) 1990 { 1991 rtx base, offset; 1992 1993 if (GET_CODE (XEXP (orig, 0)) == PLUS 1994 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx) 1995 return orig; 1996 1997 if (reg == 0) 1998 { 1999 gcc_assert (!reload_in_progress && !reload_completed); 2000 reg = gen_reg_rtx (Pmode); 2001 } 2002 2003 if (GET_CODE (XEXP (orig, 0)) == PLUS) 2004 { 2005 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg); 2006 if (base == reg) 2007 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX); 2008 else 2009 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg); 2010 } 2011 else 2012 return orig; 2013 2014 if (CONST_INT_P (offset)) 2015 { 2016 if (INT16_P (INTVAL (offset))) 2017 return plus_constant (Pmode, base, INTVAL (offset)); 2018 else 2019 { 2020 gcc_assert (! reload_in_progress && ! reload_completed); 2021 offset = force_reg (Pmode, offset); 2022 } 2023 } 2024 2025 return gen_rtx_PLUS (Pmode, base, offset); 2026 } 2027 2028 return orig; 2029 } 2030 2031 static rtx 2032 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED, 2033 machine_mode mode ATTRIBUTE_UNUSED) 2034 { 2035 if (flag_pic) 2036 return m32r_legitimize_pic_address (x, NULL_RTX); 2037 else 2038 return x; 2039 } 2040 2041 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */ 2042 2043 static bool 2044 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED) 2045 { 2046 if (GET_CODE (addr) == LO_SUM) 2047 return true; 2048 2049 return false; 2050 } 2051 2052 /* Nested function support. */ 2053 2054 /* Emit RTL insns to initialize the variable parts of a trampoline. 2055 FNADDR is an RTX for the address of the function's pure code. 2056 CXT is an RTX for the static chain value for the function. */ 2057 2058 void 2059 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED, 2060 rtx fnaddr ATTRIBUTE_UNUSED, 2061 rtx cxt ATTRIBUTE_UNUSED) 2062 { 2063 } 2064 2065 static void 2066 m32r_file_start (void) 2067 { 2068 default_file_start (); 2069 2070 if (flag_verbose_asm) 2071 fprintf (asm_out_file, 2072 "%s M32R/D special options: -G %d\n", 2073 ASM_COMMENT_START, g_switch_value); 2074 2075 if (TARGET_LITTLE_ENDIAN) 2076 fprintf (asm_out_file, "\t.little\n"); 2077 } 2078 2079 /* Print operand X (an rtx) in assembler syntax to file FILE. 2080 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified. 2081 For `%' followed by punctuation, CODE is the punctuation and X is null. */ 2082 2083 static void 2084 m32r_print_operand (FILE * file, rtx x, int code) 2085 { 2086 rtx addr; 2087 2088 switch (code) 2089 { 2090 /* The 's' and 'p' codes are used by output_block_move() to 2091 indicate post-increment 's'tores and 'p're-increment loads. */ 2092 case 's': 2093 if (REG_P (x)) 2094 fprintf (file, "@+%s", reg_names [REGNO (x)]); 2095 else 2096 output_operand_lossage ("invalid operand to %%s code"); 2097 return; 2098 2099 case 'p': 2100 if (REG_P (x)) 2101 fprintf (file, "@%s+", reg_names [REGNO (x)]); 2102 else 2103 output_operand_lossage ("invalid operand to %%p code"); 2104 return; 2105 2106 case 'R' : 2107 /* Write second word of DImode or DFmode reference, 2108 register or memory. */ 2109 if (REG_P (x)) 2110 fputs (reg_names[REGNO (x)+1], file); 2111 else if (MEM_P (x)) 2112 { 2113 machine_mode mode = GET_MODE (x); 2114 2115 fprintf (file, "@("); 2116 /* Handle possible auto-increment. Since it is pre-increment and 2117 we have already done it, we can just use an offset of four. */ 2118 /* ??? This is taken from rs6000.c I think. I don't think it is 2119 currently necessary, but keep it around. */ 2120 if (GET_CODE (XEXP (x, 0)) == PRE_INC 2121 || GET_CODE (XEXP (x, 0)) == PRE_DEC) 2122 output_address (mode, plus_constant (Pmode, 2123 XEXP (XEXP (x, 0), 0), 4)); 2124 else 2125 output_address (mode, plus_constant (Pmode, XEXP (x, 0), 4)); 2126 fputc (')', file); 2127 } 2128 else 2129 output_operand_lossage ("invalid operand to %%R code"); 2130 return; 2131 2132 case 'H' : /* High word. */ 2133 case 'L' : /* Low word. */ 2134 if (REG_P (x)) 2135 { 2136 /* L = least significant word, H = most significant word. */ 2137 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L')) 2138 fputs (reg_names[REGNO (x)], file); 2139 else 2140 fputs (reg_names[REGNO (x)+1], file); 2141 } 2142 else if (CONST_INT_P (x) 2143 || GET_CODE (x) == CONST_DOUBLE) 2144 { 2145 rtx first, second; 2146 2147 split_double (x, &first, &second); 2148 fprintf (file, HOST_WIDE_INT_PRINT_HEX, 2149 code == 'L' ? INTVAL (first) : INTVAL (second)); 2150 } 2151 else 2152 output_operand_lossage ("invalid operand to %%H/%%L code"); 2153 return; 2154 2155 case 'A' : 2156 { 2157 char str[30]; 2158 2159 if (GET_CODE (x) != CONST_DOUBLE 2160 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT) 2161 fatal_insn ("bad insn for 'A'", x); 2162 2163 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1); 2164 fprintf (file, "%s", str); 2165 return; 2166 } 2167 2168 case 'B' : /* Bottom half. */ 2169 case 'T' : /* Top half. */ 2170 /* Output the argument to a `seth' insn (sets the Top half-word). 2171 For constants output arguments to a seth/or3 pair to set Top and 2172 Bottom halves. For symbols output arguments to a seth/add3 pair to 2173 set Top and Bottom halves. The difference exists because for 2174 constants seth/or3 is more readable but for symbols we need to use 2175 the same scheme as `ld' and `st' insns (16-bit addend is signed). */ 2176 switch (GET_CODE (x)) 2177 { 2178 case CONST_INT : 2179 case CONST_DOUBLE : 2180 { 2181 rtx first, second; 2182 2183 split_double (x, &first, &second); 2184 x = WORDS_BIG_ENDIAN ? second : first; 2185 fprintf (file, HOST_WIDE_INT_PRINT_HEX, 2186 (code == 'B' 2187 ? INTVAL (x) & 0xffff 2188 : (INTVAL (x) >> 16) & 0xffff)); 2189 } 2190 return; 2191 case CONST : 2192 case SYMBOL_REF : 2193 if (code == 'B' 2194 && small_data_operand (x, VOIDmode)) 2195 { 2196 fputs ("sda(", file); 2197 output_addr_const (file, x); 2198 fputc (')', file); 2199 return; 2200 } 2201 /* fall through */ 2202 case LABEL_REF : 2203 fputs (code == 'T' ? "shigh(" : "low(", file); 2204 output_addr_const (file, x); 2205 fputc (')', file); 2206 return; 2207 default : 2208 output_operand_lossage ("invalid operand to %%T/%%B code"); 2209 return; 2210 } 2211 break; 2212 2213 case 'U' : 2214 /* ??? wip */ 2215 /* Output a load/store with update indicator if appropriate. */ 2216 if (MEM_P (x)) 2217 { 2218 if (GET_CODE (XEXP (x, 0)) == PRE_INC 2219 || GET_CODE (XEXP (x, 0)) == PRE_DEC) 2220 fputs (".a", file); 2221 } 2222 else 2223 output_operand_lossage ("invalid operand to %%U code"); 2224 return; 2225 2226 case 'N' : 2227 /* Print a constant value negated. */ 2228 if (CONST_INT_P (x)) 2229 output_addr_const (file, GEN_INT (- INTVAL (x))); 2230 else 2231 output_operand_lossage ("invalid operand to %%N code"); 2232 return; 2233 2234 case 'X' : 2235 /* Print a const_int in hex. Used in comments. */ 2236 if (CONST_INT_P (x)) 2237 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x)); 2238 return; 2239 2240 case '#' : 2241 fputs (IMMEDIATE_PREFIX, file); 2242 return; 2243 2244 case 0 : 2245 /* Do nothing special. */ 2246 break; 2247 2248 default : 2249 /* Unknown flag. */ 2250 output_operand_lossage ("invalid operand output code"); 2251 } 2252 2253 switch (GET_CODE (x)) 2254 { 2255 case REG : 2256 fputs (reg_names[REGNO (x)], file); 2257 break; 2258 2259 case MEM : 2260 addr = XEXP (x, 0); 2261 if (GET_CODE (addr) == PRE_INC) 2262 { 2263 if (!REG_P (XEXP (addr, 0))) 2264 fatal_insn ("pre-increment address is not a register", x); 2265 2266 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]); 2267 } 2268 else if (GET_CODE (addr) == PRE_DEC) 2269 { 2270 if (!REG_P (XEXP (addr, 0))) 2271 fatal_insn ("pre-decrement address is not a register", x); 2272 2273 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]); 2274 } 2275 else if (GET_CODE (addr) == POST_INC) 2276 { 2277 if (!REG_P (XEXP (addr, 0))) 2278 fatal_insn ("post-increment address is not a register", x); 2279 2280 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]); 2281 } 2282 else 2283 { 2284 fputs ("@(", file); 2285 output_address (GET_MODE (x), addr); 2286 fputc (')', file); 2287 } 2288 break; 2289 2290 case CONST_DOUBLE : 2291 /* We handle SFmode constants here as output_addr_const doesn't. */ 2292 if (GET_MODE (x) == SFmode) 2293 { 2294 long l; 2295 2296 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l); 2297 fprintf (file, "0x%08lx", l); 2298 break; 2299 } 2300 2301 /* FALLTHRU */ 2302 /* Let output_addr_const deal with it. */ 2303 2304 default : 2305 output_addr_const (file, x); 2306 break; 2307 } 2308 } 2309 2310 /* Print a memory address as an operand to reference that memory location. */ 2311 2312 static void 2313 m32r_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr) 2314 { 2315 rtx base; 2316 rtx index = 0; 2317 int offset = 0; 2318 2319 switch (GET_CODE (addr)) 2320 { 2321 case REG : 2322 fputs (reg_names[REGNO (addr)], file); 2323 break; 2324 2325 case PLUS : 2326 if (CONST_INT_P (XEXP (addr, 0))) 2327 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1); 2328 else if (CONST_INT_P (XEXP (addr, 1))) 2329 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0); 2330 else 2331 base = XEXP (addr, 0), index = XEXP (addr, 1); 2332 if (REG_P (base)) 2333 { 2334 /* Print the offset first (if present) to conform to the manual. */ 2335 if (index == 0) 2336 { 2337 if (offset != 0) 2338 fprintf (file, "%d,", offset); 2339 fputs (reg_names[REGNO (base)], file); 2340 } 2341 /* The chip doesn't support this, but left in for generality. */ 2342 else if (REG_P (index)) 2343 fprintf (file, "%s,%s", 2344 reg_names[REGNO (base)], reg_names[REGNO (index)]); 2345 /* Not sure this can happen, but leave in for now. */ 2346 else if (GET_CODE (index) == SYMBOL_REF) 2347 { 2348 output_addr_const (file, index); 2349 fputc (',', file); 2350 fputs (reg_names[REGNO (base)], file); 2351 } 2352 else 2353 fatal_insn ("bad address", addr); 2354 } 2355 else if (GET_CODE (base) == LO_SUM) 2356 { 2357 gcc_assert (!index && REG_P (XEXP (base, 0))); 2358 if (small_data_operand (XEXP (base, 1), VOIDmode)) 2359 fputs ("sda(", file); 2360 else 2361 fputs ("low(", file); 2362 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1), 2363 offset)); 2364 fputs ("),", file); 2365 fputs (reg_names[REGNO (XEXP (base, 0))], file); 2366 } 2367 else 2368 fatal_insn ("bad address", addr); 2369 break; 2370 2371 case LO_SUM : 2372 if (!REG_P (XEXP (addr, 0))) 2373 fatal_insn ("lo_sum not of register", addr); 2374 if (small_data_operand (XEXP (addr, 1), VOIDmode)) 2375 fputs ("sda(", file); 2376 else 2377 fputs ("low(", file); 2378 output_addr_const (file, XEXP (addr, 1)); 2379 fputs ("),", file); 2380 fputs (reg_names[REGNO (XEXP (addr, 0))], file); 2381 break; 2382 2383 case PRE_INC : /* Assume SImode. */ 2384 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]); 2385 break; 2386 2387 case PRE_DEC : /* Assume SImode. */ 2388 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]); 2389 break; 2390 2391 case POST_INC : /* Assume SImode. */ 2392 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]); 2393 break; 2394 2395 default : 2396 output_addr_const (file, addr); 2397 break; 2398 } 2399 } 2400 2401 static bool 2402 m32r_print_operand_punct_valid_p (unsigned char code) 2403 { 2404 return m32r_punct_chars[code]; 2405 } 2406 2407 /* Return true if the operands are the constants 0 and 1. */ 2408 2409 int 2410 zero_and_one (rtx operand1, rtx operand2) 2411 { 2412 return 2413 CONST_INT_P (operand1) 2414 && CONST_INT_P (operand2) 2415 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1)) 2416 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0))); 2417 } 2418 2419 /* Generate the correct assembler code to handle the conditional loading of a 2420 value into a register. It is known that the operands satisfy the 2421 conditional_move_operand() function above. The destination is operand[0]. 2422 The condition is operand [1]. The 'true' value is operand [2] and the 2423 'false' value is operand [3]. */ 2424 2425 char * 2426 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED) 2427 { 2428 static char buffer [100]; 2429 const char * dest = reg_names [REGNO (operands [0])]; 2430 2431 buffer [0] = 0; 2432 2433 /* Destination must be a register. */ 2434 gcc_assert (REG_P (operands [0])); 2435 gcc_assert (conditional_move_operand (operands [2], SImode)); 2436 gcc_assert (conditional_move_operand (operands [3], SImode)); 2437 2438 /* Check to see if the test is reversed. */ 2439 if (GET_CODE (operands [1]) == NE) 2440 { 2441 rtx tmp = operands [2]; 2442 operands [2] = operands [3]; 2443 operands [3] = tmp; 2444 } 2445 2446 sprintf (buffer, "mvfc %s, cbr", dest); 2447 2448 /* If the true value was '0' then we need to invert the results of the move. */ 2449 if (INTVAL (operands [2]) == 0) 2450 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1", 2451 dest, dest); 2452 2453 return buffer; 2454 } 2455 2456 /* Returns true if the registers contained in the two 2457 rtl expressions are different. */ 2458 2459 int 2460 m32r_not_same_reg (rtx a, rtx b) 2461 { 2462 int reg_a = -1; 2463 int reg_b = -2; 2464 2465 while (GET_CODE (a) == SUBREG) 2466 a = SUBREG_REG (a); 2467 2468 if (REG_P (a)) 2469 reg_a = REGNO (a); 2470 2471 while (GET_CODE (b) == SUBREG) 2472 b = SUBREG_REG (b); 2473 2474 if (REG_P (b)) 2475 reg_b = REGNO (b); 2476 2477 return reg_a != reg_b; 2478 } 2479 2480 2481 rtx 2482 m32r_function_symbol (const char *name) 2483 { 2484 int extra_flags = 0; 2485 enum m32r_model model; 2486 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name); 2487 2488 if (TARGET_MODEL_SMALL) 2489 model = M32R_MODEL_SMALL; 2490 else if (TARGET_MODEL_MEDIUM) 2491 model = M32R_MODEL_MEDIUM; 2492 else if (TARGET_MODEL_LARGE) 2493 model = M32R_MODEL_LARGE; 2494 else 2495 gcc_unreachable (); /* Shouldn't happen. */ 2496 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT; 2497 2498 if (extra_flags) 2499 SYMBOL_REF_FLAGS (sym) |= extra_flags; 2500 2501 return sym; 2502 } 2503 2504 /* Use a library function to move some bytes. */ 2505 2506 static void 2507 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx) 2508 { 2509 /* We want to pass the size as Pmode, which will normally be SImode 2510 but will be DImode if we are using 64-bit longs and pointers. */ 2511 if (GET_MODE (bytes_rtx) != VOIDmode 2512 && GET_MODE (bytes_rtx) != Pmode) 2513 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1); 2514 2515 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL, 2516 VOIDmode, dest_reg, Pmode, src_reg, Pmode, 2517 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx, 2518 TYPE_UNSIGNED (sizetype)), 2519 TYPE_MODE (sizetype)); 2520 } 2521 2522 /* Expand string/block move operations. 2523 2524 operands[0] is the pointer to the destination. 2525 operands[1] is the pointer to the source. 2526 operands[2] is the number of bytes to move. 2527 operands[3] is the alignment. 2528 2529 Returns 1 upon success, 0 otherwise. */ 2530 2531 int 2532 m32r_expand_block_move (rtx operands[]) 2533 { 2534 rtx orig_dst = operands[0]; 2535 rtx orig_src = operands[1]; 2536 rtx bytes_rtx = operands[2]; 2537 rtx align_rtx = operands[3]; 2538 int constp = CONST_INT_P (bytes_rtx); 2539 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0; 2540 int align = INTVAL (align_rtx); 2541 int leftover; 2542 rtx src_reg; 2543 rtx dst_reg; 2544 2545 if (constp && bytes <= 0) 2546 return 1; 2547 2548 /* Move the address into scratch registers. */ 2549 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0)); 2550 src_reg = copy_addr_to_reg (XEXP (orig_src, 0)); 2551 2552 if (align > UNITS_PER_WORD) 2553 align = UNITS_PER_WORD; 2554 2555 /* If we prefer size over speed, always use a function call. 2556 If we do not know the size, use a function call. 2557 If the blocks are not word aligned, use a function call. */ 2558 if (optimize_size || ! constp || align != UNITS_PER_WORD) 2559 { 2560 block_move_call (dst_reg, src_reg, bytes_rtx); 2561 return 0; 2562 } 2563 2564 leftover = bytes % MAX_MOVE_BYTES; 2565 bytes -= leftover; 2566 2567 /* If necessary, generate a loop to handle the bulk of the copy. */ 2568 if (bytes) 2569 { 2570 rtx_code_label *label = NULL; 2571 rtx final_src = NULL_RTX; 2572 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES); 2573 rtx rounded_total = GEN_INT (bytes); 2574 rtx new_dst_reg = gen_reg_rtx (SImode); 2575 rtx new_src_reg = gen_reg_rtx (SImode); 2576 2577 /* If we are going to have to perform this loop more than 2578 once, then generate a label and compute the address the 2579 source register will contain upon completion of the final 2580 iteration. */ 2581 if (bytes > MAX_MOVE_BYTES) 2582 { 2583 final_src = gen_reg_rtx (Pmode); 2584 2585 if (INT16_P(bytes)) 2586 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total)); 2587 else 2588 { 2589 emit_insn (gen_movsi (final_src, rounded_total)); 2590 emit_insn (gen_addsi3 (final_src, final_src, src_reg)); 2591 } 2592 2593 label = gen_label_rtx (); 2594 emit_label (label); 2595 } 2596 2597 /* It is known that output_block_move() will update src_reg to point 2598 to the word after the end of the source block, and dst_reg to point 2599 to the last word of the destination block, provided that the block 2600 is MAX_MOVE_BYTES long. */ 2601 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time, 2602 new_dst_reg, new_src_reg)); 2603 emit_move_insn (dst_reg, new_dst_reg); 2604 emit_move_insn (src_reg, new_src_reg); 2605 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4))); 2606 2607 if (bytes > MAX_MOVE_BYTES) 2608 { 2609 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src); 2610 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label)); 2611 } 2612 } 2613 2614 if (leftover) 2615 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover), 2616 gen_reg_rtx (SImode), 2617 gen_reg_rtx (SImode))); 2618 return 1; 2619 } 2620 2621 2622 /* Emit load/stores for a small constant word aligned block_move. 2623 2624 operands[0] is the memory address of the destination. 2625 operands[1] is the memory address of the source. 2626 operands[2] is the number of bytes to move. 2627 operands[3] is a temp register. 2628 operands[4] is a temp register. */ 2629 2630 void 2631 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[]) 2632 { 2633 HOST_WIDE_INT bytes = INTVAL (operands[2]); 2634 int first_time; 2635 int got_extra = 0; 2636 2637 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES); 2638 2639 /* We do not have a post-increment store available, so the first set of 2640 stores are done without any increment, then the remaining ones can use 2641 the pre-increment addressing mode. 2642 2643 Note: expand_block_move() also relies upon this behavior when building 2644 loops to copy large blocks. */ 2645 first_time = 1; 2646 2647 while (bytes > 0) 2648 { 2649 if (bytes >= 8) 2650 { 2651 if (first_time) 2652 { 2653 output_asm_insn ("ld\t%5, %p1", operands); 2654 output_asm_insn ("ld\t%6, %p1", operands); 2655 output_asm_insn ("st\t%5, @%0", operands); 2656 output_asm_insn ("st\t%6, %s0", operands); 2657 } 2658 else 2659 { 2660 output_asm_insn ("ld\t%5, %p1", operands); 2661 output_asm_insn ("ld\t%6, %p1", operands); 2662 output_asm_insn ("st\t%5, %s0", operands); 2663 output_asm_insn ("st\t%6, %s0", operands); 2664 } 2665 2666 bytes -= 8; 2667 } 2668 else if (bytes >= 4) 2669 { 2670 if (bytes > 4) 2671 got_extra = 1; 2672 2673 output_asm_insn ("ld\t%5, %p1", operands); 2674 2675 if (got_extra) 2676 output_asm_insn ("ld\t%6, %p1", operands); 2677 2678 if (first_time) 2679 output_asm_insn ("st\t%5, @%0", operands); 2680 else 2681 output_asm_insn ("st\t%5, %s0", operands); 2682 2683 bytes -= 4; 2684 } 2685 else 2686 { 2687 /* Get the entire next word, even though we do not want all of it. 2688 The saves us from doing several smaller loads, and we assume that 2689 we cannot cause a page fault when at least part of the word is in 2690 valid memory [since we don't get called if things aren't properly 2691 aligned]. */ 2692 int dst_offset = first_time ? 0 : 4; 2693 /* The amount of increment we have to make to the 2694 destination pointer. */ 2695 int dst_inc_amount = dst_offset + bytes - 4; 2696 /* The same for the source pointer. */ 2697 int src_inc_amount = bytes; 2698 int last_shift; 2699 rtx my_operands[3]; 2700 2701 /* If got_extra is true then we have already loaded 2702 the next word as part of loading and storing the previous word. */ 2703 if (! got_extra) 2704 output_asm_insn ("ld\t%6, @%1", operands); 2705 2706 if (bytes >= 2) 2707 { 2708 bytes -= 2; 2709 2710 output_asm_insn ("sra3\t%5, %6, #16", operands); 2711 my_operands[0] = operands[5]; 2712 my_operands[1] = GEN_INT (dst_offset); 2713 my_operands[2] = operands[0]; 2714 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands); 2715 2716 /* If there is a byte left to store then increment the 2717 destination address and shift the contents of the source 2718 register down by 8 bits. We could not do the address 2719 increment in the store half word instruction, because it does 2720 not have an auto increment mode. */ 2721 if (bytes > 0) /* assert (bytes == 1) */ 2722 { 2723 dst_offset += 2; 2724 last_shift = 8; 2725 } 2726 } 2727 else 2728 last_shift = 24; 2729 2730 if (bytes > 0) 2731 { 2732 my_operands[0] = operands[6]; 2733 my_operands[1] = GEN_INT (last_shift); 2734 output_asm_insn ("srai\t%0, #%1", my_operands); 2735 my_operands[0] = operands[6]; 2736 my_operands[1] = GEN_INT (dst_offset); 2737 my_operands[2] = operands[0]; 2738 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands); 2739 } 2740 2741 /* Update the destination pointer if needed. We have to do 2742 this so that the patterns matches what we output in this 2743 function. */ 2744 if (dst_inc_amount 2745 && !find_reg_note (insn, REG_UNUSED, operands[0])) 2746 { 2747 my_operands[0] = operands[0]; 2748 my_operands[1] = GEN_INT (dst_inc_amount); 2749 output_asm_insn ("addi\t%0, #%1", my_operands); 2750 } 2751 2752 /* Update the source pointer if needed. We have to do this 2753 so that the patterns matches what we output in this 2754 function. */ 2755 if (src_inc_amount 2756 && !find_reg_note (insn, REG_UNUSED, operands[1])) 2757 { 2758 my_operands[0] = operands[1]; 2759 my_operands[1] = GEN_INT (src_inc_amount); 2760 output_asm_insn ("addi\t%0, #%1", my_operands); 2761 } 2762 2763 bytes = 0; 2764 } 2765 2766 first_time = 0; 2767 } 2768 } 2769 2770 /* Implement TARGET_HARD_REGNO_MODE_OK. */ 2771 2772 static bool 2773 m32r_hard_regno_mode_ok (unsigned int regno, machine_mode mode) 2774 { 2775 return (m32r_hard_regno_modes[regno] & m32r_mode_class[mode]) != 0; 2776 } 2777 2778 /* Implement TARGET_MODES_TIEABLE_P. Tie QI/HI/SI modes together. */ 2779 2780 static bool 2781 m32r_modes_tieable_p (machine_mode mode1, machine_mode mode2) 2782 { 2783 return (GET_MODE_CLASS (mode1) == MODE_INT 2784 && GET_MODE_CLASS (mode2) == MODE_INT 2785 && GET_MODE_SIZE (mode1) <= UNITS_PER_WORD 2786 && GET_MODE_SIZE (mode2) <= UNITS_PER_WORD); 2787 } 2788 2789 /* Return true if using NEW_REG in place of OLD_REG is ok. */ 2790 2791 int 2792 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED, 2793 unsigned int new_reg) 2794 { 2795 /* Interrupt routines can't clobber any register that isn't already used. */ 2796 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) 2797 && !df_regs_ever_live_p (new_reg)) 2798 return 0; 2799 2800 return 1; 2801 } 2802 2803 rtx 2804 m32r_return_addr (int count) 2805 { 2806 if (count != 0) 2807 return const0_rtx; 2808 2809 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM); 2810 } 2811 2812 static void 2813 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value) 2814 { 2815 emit_move_insn (adjust_address (m_tramp, SImode, 0), 2816 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2817 0x017e8e17 : 0x178e7e01, SImode)); 2818 emit_move_insn (adjust_address (m_tramp, SImode, 4), 2819 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2820 0x0c00ae86 : 0x86ae000c, SImode)); 2821 emit_move_insn (adjust_address (m_tramp, SImode, 8), 2822 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2823 0xe627871e : 0x1e8727e6, SImode)); 2824 emit_move_insn (adjust_address (m_tramp, SImode, 12), 2825 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2826 0xc616c626 : 0x26c61fc6, SImode)); 2827 emit_move_insn (adjust_address (m_tramp, SImode, 16), 2828 chain_value); 2829 emit_move_insn (adjust_address (m_tramp, SImode, 20), 2830 XEXP (DECL_RTL (fndecl), 0)); 2831 2832 if (m32r_cache_flush_trap >= 0) 2833 emit_insn (gen_flush_icache 2834 (validize_mem (adjust_address (m_tramp, SImode, 0)), 2835 gen_int_mode (m32r_cache_flush_trap, SImode))); 2836 else if (m32r_cache_flush_func && m32r_cache_flush_func[0]) 2837 emit_library_call (m32r_function_symbol (m32r_cache_flush_func), 2838 LCT_NORMAL, VOIDmode, XEXP (m_tramp, 0), Pmode, 2839 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode, 2840 GEN_INT (3), SImode); 2841 } 2842 2843 /* True if X is a reg that can be used as a base reg. */ 2844 2845 static bool 2846 m32r_rtx_ok_for_base_p (const_rtx x, bool strict) 2847 { 2848 if (! REG_P (x)) 2849 return false; 2850 2851 if (strict) 2852 { 2853 if (GPR_P (REGNO (x))) 2854 return true; 2855 } 2856 else 2857 { 2858 if (GPR_P (REGNO (x)) 2859 || REGNO (x) == ARG_POINTER_REGNUM 2860 || ! HARD_REGISTER_P (x)) 2861 return true; 2862 } 2863 2864 return false; 2865 } 2866 2867 static inline bool 2868 m32r_rtx_ok_for_offset_p (const_rtx x) 2869 { 2870 return (CONST_INT_P (x) && INT16_P (INTVAL (x))); 2871 } 2872 2873 static inline bool 2874 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED, 2875 const_rtx x, bool strict) 2876 { 2877 if (GET_CODE (x) == PLUS 2878 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict) 2879 && m32r_rtx_ok_for_offset_p (XEXP (x, 1))) 2880 return true; 2881 2882 return false; 2883 } 2884 2885 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word, 2886 since more than one instruction will be required. */ 2887 2888 static inline bool 2889 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x, 2890 bool strict) 2891 { 2892 if (GET_CODE (x) == LO_SUM 2893 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD) 2894 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict) 2895 && CONSTANT_P (XEXP (x, 1))) 2896 return true; 2897 2898 return false; 2899 } 2900 2901 /* Is this a load and increment operation. */ 2902 2903 static inline bool 2904 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict) 2905 { 2906 if ((mode == SImode || mode == SFmode) 2907 && GET_CODE (x) == POST_INC 2908 && REG_P (XEXP (x, 0)) 2909 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)) 2910 return true; 2911 2912 return false; 2913 } 2914 2915 /* Is this an increment/decrement and store operation. */ 2916 2917 static inline bool 2918 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict) 2919 { 2920 if ((mode == SImode || mode == SFmode) 2921 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC) 2922 && REG_P (XEXP (x, 0)) \ 2923 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)) 2924 return true; 2925 2926 return false; 2927 } 2928 2929 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */ 2930 2931 static bool 2932 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict) 2933 { 2934 if (m32r_rtx_ok_for_base_p (x, strict) 2935 || m32r_legitimate_offset_addres_p (mode, x, strict) 2936 || m32r_legitimate_lo_sum_addres_p (mode, x, strict) 2937 || m32r_load_postinc_p (mode, x, strict) 2938 || m32r_store_preinc_predec_p (mode, x, strict)) 2939 return true; 2940 2941 return false; 2942 } 2943 2944 static void 2945 m32r_conditional_register_usage (void) 2946 { 2947 if (flag_pic) 2948 { 2949 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1; 2950 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1; 2951 } 2952 } 2953 2954 /* Implement TARGET_LEGITIMATE_CONSTANT_P 2955 2956 We don't allow (plus symbol large-constant) as the relocations can't 2957 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations. 2958 We allow all CONST_DOUBLE's as the md file patterns will force the 2959 constant to memory if they can't handle them. */ 2960 2961 static bool 2962 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x) 2963 { 2964 return !(GET_CODE (x) == CONST 2965 && GET_CODE (XEXP (x, 0)) == PLUS 2966 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF 2967 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF) 2968 && CONST_INT_P (XEXP (XEXP (x, 0), 1)) 2969 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767); 2970 } 2971 2972 /* Implement TARGET_STARTING_FRAME_OFFSET. The frame pointer points at 2973 the same place as the stack pointer, except if alloca has been called. */ 2974 2975 static HOST_WIDE_INT 2976 m32r_starting_frame_offset (void) 2977 { 2978 return M32R_STACK_ALIGN (crtl->outgoing_args_size); 2979 } 2980