1 /* Subroutines used for code generation on the Renesas M32R cpu. 2 Copyright (C) 1996-2015 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it 7 under the terms of the GNU General Public License as published 8 by the Free Software Foundation; either version 3, or (at your 9 option) any later version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT 12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY 13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public 14 License for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #include "config.h" 21 #include "system.h" 22 #include "coretypes.h" 23 #include "tm.h" 24 #include "hash-set.h" 25 #include "machmode.h" 26 #include "vec.h" 27 #include "double-int.h" 28 #include "input.h" 29 #include "alias.h" 30 #include "symtab.h" 31 #include "wide-int.h" 32 #include "inchash.h" 33 #include "tree.h" 34 #include "stor-layout.h" 35 #include "varasm.h" 36 #include "stringpool.h" 37 #include "calls.h" 38 #include "rtl.h" 39 #include "regs.h" 40 #include "hard-reg-set.h" 41 #include "insn-config.h" 42 #include "conditions.h" 43 #include "output.h" 44 #include "dbxout.h" 45 #include "insn-attr.h" 46 #include "flags.h" 47 #include "hashtab.h" 48 #include "function.h" 49 #include "statistics.h" 50 #include "real.h" 51 #include "fixed-value.h" 52 #include "expmed.h" 53 #include "dojump.h" 54 #include "explow.h" 55 #include "emit-rtl.h" 56 #include "stmt.h" 57 #include "expr.h" 58 #include "recog.h" 59 #include "diagnostic-core.h" 60 #include "ggc.h" 61 #include "dominance.h" 62 #include "cfg.h" 63 #include "cfgrtl.h" 64 #include "cfganal.h" 65 #include "lcm.h" 66 #include "cfgbuild.h" 67 #include "cfgcleanup.h" 68 #include "predict.h" 69 #include "basic-block.h" 70 #include "df.h" 71 #include "tm_p.h" 72 #include "target.h" 73 #include "target-def.h" 74 #include "tm-constrs.h" 75 #include "opts.h" 76 #include "builtins.h" 77 78 /* Array of valid operand punctuation characters. */ 79 static char m32r_punct_chars[256]; 80 81 /* Machine-specific symbol_ref flags. */ 82 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT 83 #define SYMBOL_REF_MODEL(X) \ 84 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3)) 85 86 /* For string literals, etc. */ 87 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.') 88 89 /* Forward declaration. */ 90 static void m32r_option_override (void); 91 static void init_reg_tables (void); 92 static void block_move_call (rtx, rtx, rtx); 93 static int m32r_is_insn (rtx); 94 static bool m32r_legitimate_address_p (machine_mode, rtx, bool); 95 static rtx m32r_legitimize_address (rtx, rtx, machine_mode); 96 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t); 97 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *); 98 static void m32r_print_operand (FILE *, rtx, int); 99 static void m32r_print_operand_address (FILE *, rtx); 100 static bool m32r_print_operand_punct_valid_p (unsigned char code); 101 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT); 102 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT); 103 104 static void m32r_file_start (void); 105 106 static int m32r_adjust_priority (rtx_insn *, int); 107 static int m32r_issue_rate (void); 108 109 static void m32r_encode_section_info (tree, rtx, int); 110 static bool m32r_in_small_data_p (const_tree); 111 static bool m32r_return_in_memory (const_tree, const_tree); 112 static rtx m32r_function_value (const_tree, const_tree, bool); 113 static rtx m32r_libcall_value (machine_mode, const_rtx); 114 static bool m32r_function_value_regno_p (const unsigned int); 115 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode, 116 tree, int *, int); 117 static void init_idents (void); 118 static bool m32r_rtx_costs (rtx, int, int, int, int *, bool speed); 119 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool); 120 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode, 121 const_tree, bool); 122 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode, 123 tree, bool); 124 static rtx m32r_function_arg (cumulative_args_t, machine_mode, 125 const_tree, bool); 126 static void m32r_function_arg_advance (cumulative_args_t, machine_mode, 127 const_tree, bool); 128 static bool m32r_can_eliminate (const int, const int); 129 static void m32r_conditional_register_usage (void); 130 static void m32r_trampoline_init (rtx, tree, rtx); 131 static bool m32r_legitimate_constant_p (machine_mode, rtx); 132 133 /* M32R specific attributes. */ 134 135 static const struct attribute_spec m32r_attribute_table[] = 136 { 137 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler, 138 affects_type_identity } */ 139 { "interrupt", 0, 0, true, false, false, NULL, false }, 140 { "model", 1, 1, true, false, false, m32r_handle_model_attribute, 141 false }, 142 { NULL, 0, 0, false, false, false, NULL, false } 143 }; 144 145 /* Initialize the GCC target structure. */ 146 #undef TARGET_ATTRIBUTE_TABLE 147 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table 148 149 #undef TARGET_LEGITIMATE_ADDRESS_P 150 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p 151 #undef TARGET_LEGITIMIZE_ADDRESS 152 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address 153 #undef TARGET_MODE_DEPENDENT_ADDRESS_P 154 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p 155 156 #undef TARGET_ASM_ALIGNED_HI_OP 157 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t" 158 #undef TARGET_ASM_ALIGNED_SI_OP 159 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t" 160 161 #undef TARGET_PRINT_OPERAND 162 #define TARGET_PRINT_OPERAND m32r_print_operand 163 #undef TARGET_PRINT_OPERAND_ADDRESS 164 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address 165 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P 166 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p 167 168 #undef TARGET_ASM_FUNCTION_PROLOGUE 169 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue 170 #undef TARGET_ASM_FUNCTION_EPILOGUE 171 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue 172 173 #undef TARGET_ASM_FILE_START 174 #define TARGET_ASM_FILE_START m32r_file_start 175 176 #undef TARGET_SCHED_ADJUST_PRIORITY 177 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority 178 #undef TARGET_SCHED_ISSUE_RATE 179 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate 180 181 #undef TARGET_OPTION_OVERRIDE 182 #define TARGET_OPTION_OVERRIDE m32r_option_override 183 184 #undef TARGET_ENCODE_SECTION_INFO 185 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info 186 #undef TARGET_IN_SMALL_DATA_P 187 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p 188 189 190 #undef TARGET_MEMORY_MOVE_COST 191 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost 192 #undef TARGET_RTX_COSTS 193 #define TARGET_RTX_COSTS m32r_rtx_costs 194 #undef TARGET_ADDRESS_COST 195 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0 196 197 #undef TARGET_PROMOTE_PROTOTYPES 198 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true 199 #undef TARGET_RETURN_IN_MEMORY 200 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory 201 202 #undef TARGET_FUNCTION_VALUE 203 #define TARGET_FUNCTION_VALUE m32r_function_value 204 #undef TARGET_LIBCALL_VALUE 205 #define TARGET_LIBCALL_VALUE m32r_libcall_value 206 #undef TARGET_FUNCTION_VALUE_REGNO_P 207 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p 208 209 #undef TARGET_SETUP_INCOMING_VARARGS 210 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs 211 #undef TARGET_MUST_PASS_IN_STACK 212 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size 213 #undef TARGET_PASS_BY_REFERENCE 214 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference 215 #undef TARGET_ARG_PARTIAL_BYTES 216 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes 217 #undef TARGET_FUNCTION_ARG 218 #define TARGET_FUNCTION_ARG m32r_function_arg 219 #undef TARGET_FUNCTION_ARG_ADVANCE 220 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance 221 222 #undef TARGET_CAN_ELIMINATE 223 #define TARGET_CAN_ELIMINATE m32r_can_eliminate 224 225 #undef TARGET_CONDITIONAL_REGISTER_USAGE 226 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage 227 228 #undef TARGET_TRAMPOLINE_INIT 229 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init 230 231 #undef TARGET_LEGITIMATE_CONSTANT_P 232 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p 233 234 struct gcc_target targetm = TARGET_INITIALIZER; 235 236 /* Called by m32r_option_override to initialize various things. */ 237 238 void 239 m32r_init (void) 240 { 241 init_reg_tables (); 242 243 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */ 244 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars)); 245 m32r_punct_chars['#'] = 1; 246 m32r_punct_chars['@'] = 1; /* ??? no longer used */ 247 248 /* Provide default value if not specified. */ 249 if (!global_options_set.x_g_switch_value) 250 g_switch_value = SDATA_DEFAULT_SIZE; 251 } 252 253 static void 254 m32r_option_override (void) 255 { 256 /* These need to be done at start up. 257 It's convenient to do them here. */ 258 m32r_init (); 259 SUBTARGET_OVERRIDE_OPTIONS; 260 } 261 262 /* Vectors to keep interesting information about registers where it can easily 263 be got. We use to use the actual mode value as the bit number, but there 264 is (or may be) more than 32 modes now. Instead we use two tables: one 265 indexed by hard register number, and one indexed by mode. */ 266 267 /* The purpose of m32r_mode_class is to shrink the range of modes so that 268 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is 269 mapped into one m32r_mode_class mode. */ 270 271 enum m32r_mode_class 272 { 273 C_MODE, 274 S_MODE, D_MODE, T_MODE, O_MODE, 275 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE 276 }; 277 278 /* Modes for condition codes. */ 279 #define C_MODES (1 << (int) C_MODE) 280 281 /* Modes for single-word and smaller quantities. */ 282 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE)) 283 284 /* Modes for double-word and smaller quantities. */ 285 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE)) 286 287 /* Modes for quad-word and smaller quantities. */ 288 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE)) 289 290 /* Modes for accumulators. */ 291 #define A_MODES (1 << (int) A_MODE) 292 293 /* Value is 1 if register/mode pair is acceptable on arc. */ 294 295 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] = 296 { 297 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, 298 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES, 299 S_MODES, C_MODES, A_MODES, A_MODES 300 }; 301 302 unsigned int m32r_mode_class [NUM_MACHINE_MODES]; 303 304 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER]; 305 306 static void 307 init_reg_tables (void) 308 { 309 int i; 310 311 for (i = 0; i < NUM_MACHINE_MODES; i++) 312 { 313 machine_mode m = (machine_mode) i; 314 315 switch (GET_MODE_CLASS (m)) 316 { 317 case MODE_INT: 318 case MODE_PARTIAL_INT: 319 case MODE_COMPLEX_INT: 320 if (GET_MODE_SIZE (m) <= 4) 321 m32r_mode_class[i] = 1 << (int) S_MODE; 322 else if (GET_MODE_SIZE (m) == 8) 323 m32r_mode_class[i] = 1 << (int) D_MODE; 324 else if (GET_MODE_SIZE (m) == 16) 325 m32r_mode_class[i] = 1 << (int) T_MODE; 326 else if (GET_MODE_SIZE (m) == 32) 327 m32r_mode_class[i] = 1 << (int) O_MODE; 328 else 329 m32r_mode_class[i] = 0; 330 break; 331 case MODE_FLOAT: 332 case MODE_COMPLEX_FLOAT: 333 if (GET_MODE_SIZE (m) <= 4) 334 m32r_mode_class[i] = 1 << (int) SF_MODE; 335 else if (GET_MODE_SIZE (m) == 8) 336 m32r_mode_class[i] = 1 << (int) DF_MODE; 337 else if (GET_MODE_SIZE (m) == 16) 338 m32r_mode_class[i] = 1 << (int) TF_MODE; 339 else if (GET_MODE_SIZE (m) == 32) 340 m32r_mode_class[i] = 1 << (int) OF_MODE; 341 else 342 m32r_mode_class[i] = 0; 343 break; 344 case MODE_CC: 345 m32r_mode_class[i] = 1 << (int) C_MODE; 346 break; 347 default: 348 m32r_mode_class[i] = 0; 349 break; 350 } 351 } 352 353 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 354 { 355 if (GPR_P (i)) 356 m32r_regno_reg_class[i] = GENERAL_REGS; 357 else if (i == ARG_POINTER_REGNUM) 358 m32r_regno_reg_class[i] = GENERAL_REGS; 359 else 360 m32r_regno_reg_class[i] = NO_REGS; 361 } 362 } 363 364 /* M32R specific attribute support. 365 366 interrupt - for interrupt functions 367 368 model - select code model used to access object 369 370 small: addresses use 24 bits, use bl to make calls 371 medium: addresses use 32 bits, use bl to make calls 372 large: addresses use 32 bits, use seth/add3/jl to make calls 373 374 Grep for MODEL in m32r.h for more info. */ 375 376 static tree small_ident1; 377 static tree small_ident2; 378 static tree medium_ident1; 379 static tree medium_ident2; 380 static tree large_ident1; 381 static tree large_ident2; 382 383 static void 384 init_idents (void) 385 { 386 if (small_ident1 == 0) 387 { 388 small_ident1 = get_identifier ("small"); 389 small_ident2 = get_identifier ("__small__"); 390 medium_ident1 = get_identifier ("medium"); 391 medium_ident2 = get_identifier ("__medium__"); 392 large_ident1 = get_identifier ("large"); 393 large_ident2 = get_identifier ("__large__"); 394 } 395 } 396 397 /* Handle an "model" attribute; arguments as in 398 struct attribute_spec.handler. */ 399 static tree 400 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name, 401 tree args, int flags ATTRIBUTE_UNUSED, 402 bool *no_add_attrs) 403 { 404 tree arg; 405 406 init_idents (); 407 arg = TREE_VALUE (args); 408 409 if (arg != small_ident1 410 && arg != small_ident2 411 && arg != medium_ident1 412 && arg != medium_ident2 413 && arg != large_ident1 414 && arg != large_ident2) 415 { 416 warning (OPT_Wattributes, "invalid argument of %qs attribute", 417 IDENTIFIER_POINTER (name)); 418 *no_add_attrs = true; 419 } 420 421 return NULL_TREE; 422 } 423 424 /* Encode section information of DECL, which is either a VAR_DECL, 425 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???. 426 427 For the M32R we want to record: 428 429 - whether the object lives in .sdata/.sbss. 430 - what code model should be used to access the object 431 */ 432 433 static void 434 m32r_encode_section_info (tree decl, rtx rtl, int first) 435 { 436 int extra_flags = 0; 437 tree model_attr; 438 enum m32r_model model; 439 440 default_encode_section_info (decl, rtl, first); 441 442 if (!DECL_P (decl)) 443 return; 444 445 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl)); 446 if (model_attr) 447 { 448 tree id; 449 450 init_idents (); 451 452 id = TREE_VALUE (TREE_VALUE (model_attr)); 453 454 if (id == small_ident1 || id == small_ident2) 455 model = M32R_MODEL_SMALL; 456 else if (id == medium_ident1 || id == medium_ident2) 457 model = M32R_MODEL_MEDIUM; 458 else if (id == large_ident1 || id == large_ident2) 459 model = M32R_MODEL_LARGE; 460 else 461 gcc_unreachable (); /* shouldn't happen */ 462 } 463 else 464 { 465 if (TARGET_MODEL_SMALL) 466 model = M32R_MODEL_SMALL; 467 else if (TARGET_MODEL_MEDIUM) 468 model = M32R_MODEL_MEDIUM; 469 else if (TARGET_MODEL_LARGE) 470 model = M32R_MODEL_LARGE; 471 else 472 gcc_unreachable (); /* shouldn't happen */ 473 } 474 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT; 475 476 if (extra_flags) 477 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags; 478 } 479 480 /* Only mark the object as being small data area addressable if 481 it hasn't been explicitly marked with a code model. 482 483 The user can explicitly put an object in the small data area with the 484 section attribute. If the object is in sdata/sbss and marked with a 485 code model do both [put the object in .sdata and mark it as being 486 addressed with a specific code model - don't mark it as being addressed 487 with an SDA reloc though]. This is ok and might be useful at times. If 488 the object doesn't fit the linker will give an error. */ 489 490 static bool 491 m32r_in_small_data_p (const_tree decl) 492 { 493 const char *section; 494 495 if (TREE_CODE (decl) != VAR_DECL) 496 return false; 497 498 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl))) 499 return false; 500 501 section = DECL_SECTION_NAME (decl); 502 if (section) 503 { 504 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0) 505 return true; 506 } 507 else 508 { 509 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE) 510 { 511 int size = int_size_in_bytes (TREE_TYPE (decl)); 512 513 if (size > 0 && size <= g_switch_value) 514 return true; 515 } 516 } 517 518 return false; 519 } 520 521 /* Do anything needed before RTL is emitted for each function. */ 522 523 void 524 m32r_init_expanders (void) 525 { 526 /* ??? At one point there was code here. The function is left in 527 to make it easy to experiment. */ 528 } 529 530 int 531 call_operand (rtx op, machine_mode mode) 532 { 533 if (!MEM_P (op)) 534 return 0; 535 op = XEXP (op, 0); 536 return call_address_operand (op, mode); 537 } 538 539 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */ 540 541 int 542 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 543 { 544 if (! TARGET_SDATA_USE) 545 return 0; 546 547 if (GET_CODE (op) == SYMBOL_REF) 548 return SYMBOL_REF_SMALL_P (op); 549 550 if (GET_CODE (op) == CONST 551 && GET_CODE (XEXP (op, 0)) == PLUS 552 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF 553 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1))) 554 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0)); 555 556 return 0; 557 } 558 559 /* Return 1 if OP is a symbol that can use 24-bit addressing. */ 560 561 int 562 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 563 { 564 rtx sym; 565 566 if (flag_pic) 567 return 0; 568 569 if (GET_CODE (op) == LABEL_REF) 570 return TARGET_ADDR24; 571 572 if (GET_CODE (op) == SYMBOL_REF) 573 sym = op; 574 else if (GET_CODE (op) == CONST 575 && GET_CODE (XEXP (op, 0)) == PLUS 576 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF 577 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1))) 578 sym = XEXP (XEXP (op, 0), 0); 579 else 580 return 0; 581 582 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL) 583 return 1; 584 585 if (TARGET_ADDR24 586 && (CONSTANT_POOL_ADDRESS_P (sym) 587 || LIT_NAME_P (XSTR (sym, 0)))) 588 return 1; 589 590 return 0; 591 } 592 593 /* Return 1 if OP is a symbol that needs 32-bit addressing. */ 594 595 int 596 addr32_operand (rtx op, machine_mode mode) 597 { 598 rtx sym; 599 600 if (GET_CODE (op) == LABEL_REF) 601 return TARGET_ADDR32; 602 603 if (GET_CODE (op) == SYMBOL_REF) 604 sym = op; 605 else if (GET_CODE (op) == CONST 606 && GET_CODE (XEXP (op, 0)) == PLUS 607 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF 608 && CONST_INT_P (XEXP (XEXP (op, 0), 1)) 609 && ! flag_pic) 610 sym = XEXP (XEXP (op, 0), 0); 611 else 612 return 0; 613 614 return (! addr24_operand (sym, mode) 615 && ! small_data_operand (sym, mode)); 616 } 617 618 /* Return 1 if OP is a function that can be called with the `bl' insn. */ 619 620 int 621 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 622 { 623 if (flag_pic) 624 return 1; 625 626 if (GET_CODE (op) == SYMBOL_REF) 627 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE; 628 629 return TARGET_CALL26; 630 } 631 632 /* Return 1 if OP is a DImode const we want to handle inline. 633 This must match the code in the movdi pattern. 634 It is used by the 'G' constraint. */ 635 636 int 637 easy_di_const (rtx op) 638 { 639 rtx high_rtx, low_rtx; 640 HOST_WIDE_INT high, low; 641 642 split_double (op, &high_rtx, &low_rtx); 643 high = INTVAL (high_rtx); 644 low = INTVAL (low_rtx); 645 /* Pick constants loadable with 2 16-bit `ldi' insns. */ 646 if (high >= -128 && high <= 127 647 && low >= -128 && low <= 127) 648 return 1; 649 return 0; 650 } 651 652 /* Return 1 if OP is a DFmode const we want to handle inline. 653 This must match the code in the movdf pattern. 654 It is used by the 'H' constraint. */ 655 656 int 657 easy_df_const (rtx op) 658 { 659 REAL_VALUE_TYPE r; 660 long l[2]; 661 662 REAL_VALUE_FROM_CONST_DOUBLE (r, op); 663 REAL_VALUE_TO_TARGET_DOUBLE (r, l); 664 if (l[0] == 0 && l[1] == 0) 665 return 1; 666 if ((l[0] & 0xffff) == 0 && l[1] == 0) 667 return 1; 668 return 0; 669 } 670 671 /* Return 1 if OP is (mem (reg ...)). 672 This is used in insn length calcs. */ 673 674 int 675 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED) 676 { 677 return MEM_P (op) && REG_P (XEXP (op, 0)); 678 } 679 680 /* Return nonzero if TYPE must be passed by indirect reference. */ 681 682 static bool 683 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED, 684 machine_mode mode, const_tree type, 685 bool named ATTRIBUTE_UNUSED) 686 { 687 int size; 688 689 if (type) 690 size = int_size_in_bytes (type); 691 else 692 size = GET_MODE_SIZE (mode); 693 694 return (size < 0 || size > 8); 695 } 696 697 /* Comparisons. */ 698 699 /* X and Y are two things to compare using CODE. Emit the compare insn and 700 return the rtx for compare [arg0 of the if_then_else]. 701 If need_compare is true then the comparison insn must be generated, rather 702 than being subsumed into the following branch instruction. */ 703 704 rtx 705 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare) 706 { 707 enum rtx_code compare_code; 708 enum rtx_code branch_code; 709 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM); 710 int must_swap = 0; 711 712 switch (code) 713 { 714 case EQ: compare_code = EQ; branch_code = NE; break; 715 case NE: compare_code = EQ; branch_code = EQ; break; 716 case LT: compare_code = LT; branch_code = NE; break; 717 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break; 718 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break; 719 case GE: compare_code = LT; branch_code = EQ; break; 720 case LTU: compare_code = LTU; branch_code = NE; break; 721 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break; 722 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break; 723 case GEU: compare_code = LTU; branch_code = EQ; break; 724 725 default: 726 gcc_unreachable (); 727 } 728 729 if (need_compare) 730 { 731 switch (compare_code) 732 { 733 case EQ: 734 if (satisfies_constraint_P (y) /* Reg equal to small const. */ 735 && y != const0_rtx) 736 { 737 rtx tmp = gen_reg_rtx (SImode); 738 739 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y)))); 740 x = tmp; 741 y = const0_rtx; 742 } 743 else if (CONSTANT_P (y)) /* Reg equal to const. */ 744 { 745 rtx tmp = force_reg (GET_MODE (x), y); 746 y = tmp; 747 } 748 749 if (register_operand (y, SImode) /* Reg equal to reg. */ 750 || y == const0_rtx) /* Reg equal to zero. */ 751 { 752 emit_insn (gen_cmp_eqsi_insn (x, y)); 753 754 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx); 755 } 756 break; 757 758 case LT: 759 if (register_operand (y, SImode) 760 || satisfies_constraint_P (y)) 761 { 762 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */ 763 764 switch (code) 765 { 766 case LT: 767 emit_insn (gen_cmp_ltsi_insn (x, y)); 768 code = EQ; 769 break; 770 case LE: 771 if (y == const0_rtx) 772 tmp = const1_rtx; 773 else 774 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 775 emit_insn (gen_cmp_ltsi_insn (x, tmp)); 776 code = EQ; 777 break; 778 case GT: 779 if (CONST_INT_P (y)) 780 tmp = gen_rtx_PLUS (SImode, y, const1_rtx); 781 else 782 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 783 emit_insn (gen_cmp_ltsi_insn (x, tmp)); 784 code = NE; 785 break; 786 case GE: 787 emit_insn (gen_cmp_ltsi_insn (x, y)); 788 code = NE; 789 break; 790 default: 791 gcc_unreachable (); 792 } 793 794 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx); 795 } 796 break; 797 798 case LTU: 799 if (register_operand (y, SImode) 800 || satisfies_constraint_P (y)) 801 { 802 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */ 803 804 switch (code) 805 { 806 case LTU: 807 emit_insn (gen_cmp_ltusi_insn (x, y)); 808 code = EQ; 809 break; 810 case LEU: 811 if (y == const0_rtx) 812 tmp = const1_rtx; 813 else 814 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 815 emit_insn (gen_cmp_ltusi_insn (x, tmp)); 816 code = EQ; 817 break; 818 case GTU: 819 if (CONST_INT_P (y)) 820 tmp = gen_rtx_PLUS (SImode, y, const1_rtx); 821 else 822 emit_insn (gen_addsi3 (tmp, y, constm1_rtx)); 823 emit_insn (gen_cmp_ltusi_insn (x, tmp)); 824 code = NE; 825 break; 826 case GEU: 827 emit_insn (gen_cmp_ltusi_insn (x, y)); 828 code = NE; 829 break; 830 default: 831 gcc_unreachable (); 832 } 833 834 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx); 835 } 836 break; 837 838 default: 839 gcc_unreachable (); 840 } 841 } 842 else 843 { 844 /* Reg/reg equal comparison. */ 845 if (compare_code == EQ 846 && register_operand (y, SImode)) 847 return gen_rtx_fmt_ee (code, CCmode, x, y); 848 849 /* Reg/zero signed comparison. */ 850 if ((compare_code == EQ || compare_code == LT) 851 && y == const0_rtx) 852 return gen_rtx_fmt_ee (code, CCmode, x, y); 853 854 /* Reg/smallconst equal comparison. */ 855 if (compare_code == EQ 856 && satisfies_constraint_P (y)) 857 { 858 rtx tmp = gen_reg_rtx (SImode); 859 860 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y)))); 861 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx); 862 } 863 864 /* Reg/const equal comparison. */ 865 if (compare_code == EQ 866 && CONSTANT_P (y)) 867 { 868 rtx tmp = force_reg (GET_MODE (x), y); 869 870 return gen_rtx_fmt_ee (code, CCmode, x, tmp); 871 } 872 } 873 874 if (CONSTANT_P (y)) 875 { 876 if (must_swap) 877 y = force_reg (GET_MODE (x), y); 878 else 879 { 880 int ok_const = reg_or_int16_operand (y, GET_MODE (y)); 881 882 if (! ok_const) 883 y = force_reg (GET_MODE (x), y); 884 } 885 } 886 887 switch (compare_code) 888 { 889 case EQ : 890 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y)); 891 break; 892 case LT : 893 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y)); 894 break; 895 case LTU : 896 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y)); 897 break; 898 899 default: 900 gcc_unreachable (); 901 } 902 903 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode)); 904 } 905 906 bool 907 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2) 908 { 909 machine_mode mode = GET_MODE (op0); 910 911 gcc_assert (mode == SImode); 912 switch (code) 913 { 914 case EQ: 915 if (!register_operand (op1, mode)) 916 op1 = force_reg (mode, op1); 917 918 if (TARGET_M32RX || TARGET_M32R2) 919 { 920 if (!reg_or_zero_operand (op2, mode)) 921 op2 = force_reg (mode, op2); 922 923 emit_insn (gen_seq_insn_m32rx (op0, op1, op2)); 924 return true; 925 } 926 if (CONST_INT_P (op2) && INTVAL (op2) == 0) 927 { 928 emit_insn (gen_seq_zero_insn (op0, op1)); 929 return true; 930 } 931 932 if (!reg_or_eq_int16_operand (op2, mode)) 933 op2 = force_reg (mode, op2); 934 935 emit_insn (gen_seq_insn (op0, op1, op2)); 936 return true; 937 938 case NE: 939 if (!CONST_INT_P (op2) 940 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2))) 941 { 942 rtx reg; 943 944 if (reload_completed || reload_in_progress) 945 return false; 946 947 reg = gen_reg_rtx (SImode); 948 emit_insn (gen_xorsi3 (reg, op1, op2)); 949 op1 = reg; 950 951 if (!register_operand (op1, mode)) 952 op1 = force_reg (mode, op1); 953 954 emit_insn (gen_sne_zero_insn (op0, op1)); 955 return true; 956 } 957 return false; 958 959 case LT: 960 case GT: 961 if (code == GT) 962 { 963 rtx tmp = op2; 964 op2 = op1; 965 op1 = tmp; 966 code = LT; 967 } 968 969 if (!register_operand (op1, mode)) 970 op1 = force_reg (mode, op1); 971 972 if (!reg_or_int16_operand (op2, mode)) 973 op2 = force_reg (mode, op2); 974 975 emit_insn (gen_slt_insn (op0, op1, op2)); 976 return true; 977 978 case LTU: 979 case GTU: 980 if (code == GTU) 981 { 982 rtx tmp = op2; 983 op2 = op1; 984 op1 = tmp; 985 code = LTU; 986 } 987 988 if (!register_operand (op1, mode)) 989 op1 = force_reg (mode, op1); 990 991 if (!reg_or_int16_operand (op2, mode)) 992 op2 = force_reg (mode, op2); 993 994 emit_insn (gen_sltu_insn (op0, op1, op2)); 995 return true; 996 997 case GE: 998 case GEU: 999 if (!register_operand (op1, mode)) 1000 op1 = force_reg (mode, op1); 1001 1002 if (!reg_or_int16_operand (op2, mode)) 1003 op2 = force_reg (mode, op2); 1004 1005 if (code == GE) 1006 emit_insn (gen_sge_insn (op0, op1, op2)); 1007 else 1008 emit_insn (gen_sgeu_insn (op0, op1, op2)); 1009 return true; 1010 1011 case LE: 1012 case LEU: 1013 if (!register_operand (op1, mode)) 1014 op1 = force_reg (mode, op1); 1015 1016 if (CONST_INT_P (op2)) 1017 { 1018 HOST_WIDE_INT value = INTVAL (op2); 1019 if (value >= 2147483647) 1020 { 1021 emit_move_insn (op0, const1_rtx); 1022 return true; 1023 } 1024 1025 op2 = GEN_INT (value + 1); 1026 if (value < -32768 || value >= 32767) 1027 op2 = force_reg (mode, op2); 1028 1029 if (code == LEU) 1030 emit_insn (gen_sltu_insn (op0, op1, op2)); 1031 else 1032 emit_insn (gen_slt_insn (op0, op1, op2)); 1033 return true; 1034 } 1035 1036 if (!register_operand (op2, mode)) 1037 op2 = force_reg (mode, op2); 1038 1039 if (code == LEU) 1040 emit_insn (gen_sleu_insn (op0, op1, op2)); 1041 else 1042 emit_insn (gen_sle_insn (op0, op1, op2)); 1043 return true; 1044 1045 default: 1046 gcc_unreachable (); 1047 } 1048 } 1049 1050 1051 /* Split a 2 word move (DI or DF) into component parts. */ 1052 1053 rtx 1054 gen_split_move_double (rtx operands[]) 1055 { 1056 machine_mode mode = GET_MODE (operands[0]); 1057 rtx dest = operands[0]; 1058 rtx src = operands[1]; 1059 rtx val; 1060 1061 /* We might have (SUBREG (MEM)) here, so just get rid of the 1062 subregs to make this code simpler. It is safe to call 1063 alter_subreg any time after reload. */ 1064 if (GET_CODE (dest) == SUBREG) 1065 alter_subreg (&dest, true); 1066 if (GET_CODE (src) == SUBREG) 1067 alter_subreg (&src, true); 1068 1069 start_sequence (); 1070 if (REG_P (dest)) 1071 { 1072 int dregno = REGNO (dest); 1073 1074 /* Reg = reg. */ 1075 if (REG_P (src)) 1076 { 1077 int sregno = REGNO (src); 1078 1079 int reverse = (dregno == sregno + 1); 1080 1081 /* We normally copy the low-numbered register first. However, if 1082 the first register operand 0 is the same as the second register of 1083 operand 1, we must copy in the opposite order. */ 1084 emit_insn (gen_rtx_SET (VOIDmode, 1085 operand_subword (dest, reverse, TRUE, mode), 1086 operand_subword (src, reverse, TRUE, mode))); 1087 1088 emit_insn (gen_rtx_SET (VOIDmode, 1089 operand_subword (dest, !reverse, TRUE, mode), 1090 operand_subword (src, !reverse, TRUE, mode))); 1091 } 1092 1093 /* Reg = constant. */ 1094 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE) 1095 { 1096 rtx words[2]; 1097 split_double (src, &words[0], &words[1]); 1098 emit_insn (gen_rtx_SET (VOIDmode, 1099 operand_subword (dest, 0, TRUE, mode), 1100 words[0])); 1101 1102 emit_insn (gen_rtx_SET (VOIDmode, 1103 operand_subword (dest, 1, TRUE, mode), 1104 words[1])); 1105 } 1106 1107 /* Reg = mem. */ 1108 else if (MEM_P (src)) 1109 { 1110 /* If the high-address word is used in the address, we must load it 1111 last. Otherwise, load it first. */ 1112 int reverse = refers_to_regno_p (dregno, XEXP (src, 0)); 1113 1114 /* We used to optimize loads from single registers as 1115 1116 ld r1,r3+; ld r2,r3 1117 1118 if r3 were not used subsequently. However, the REG_NOTES aren't 1119 propagated correctly by the reload phase, and it can cause bad 1120 code to be generated. We could still try: 1121 1122 ld r1,r3+; ld r2,r3; addi r3,-4 1123 1124 which saves 2 bytes and doesn't force longword alignment. */ 1125 emit_insn (gen_rtx_SET (VOIDmode, 1126 operand_subword (dest, reverse, TRUE, mode), 1127 adjust_address (src, SImode, 1128 reverse * UNITS_PER_WORD))); 1129 1130 emit_insn (gen_rtx_SET (VOIDmode, 1131 operand_subword (dest, !reverse, TRUE, mode), 1132 adjust_address (src, SImode, 1133 !reverse * UNITS_PER_WORD))); 1134 } 1135 else 1136 gcc_unreachable (); 1137 } 1138 1139 /* Mem = reg. */ 1140 /* We used to optimize loads from single registers as 1141 1142 st r1,r3; st r2,+r3 1143 1144 if r3 were not used subsequently. However, the REG_NOTES aren't 1145 propagated correctly by the reload phase, and it can cause bad 1146 code to be generated. We could still try: 1147 1148 st r1,r3; st r2,+r3; addi r3,-4 1149 1150 which saves 2 bytes and doesn't force longword alignment. */ 1151 else if (MEM_P (dest) && REG_P (src)) 1152 { 1153 emit_insn (gen_rtx_SET (VOIDmode, 1154 adjust_address (dest, SImode, 0), 1155 operand_subword (src, 0, TRUE, mode))); 1156 1157 emit_insn (gen_rtx_SET (VOIDmode, 1158 adjust_address (dest, SImode, UNITS_PER_WORD), 1159 operand_subword (src, 1, TRUE, mode))); 1160 } 1161 1162 else 1163 gcc_unreachable (); 1164 1165 val = get_insns (); 1166 end_sequence (); 1167 return val; 1168 } 1169 1170 1171 static int 1172 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode, 1173 tree type, bool named ATTRIBUTE_UNUSED) 1174 { 1175 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1176 1177 int words; 1178 unsigned int size = 1179 (((mode == BLKmode && type) 1180 ? (unsigned int) int_size_in_bytes (type) 1181 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1) 1182 / UNITS_PER_WORD; 1183 1184 if (*cum >= M32R_MAX_PARM_REGS) 1185 words = 0; 1186 else if (*cum + size > M32R_MAX_PARM_REGS) 1187 words = (*cum + size) - M32R_MAX_PARM_REGS; 1188 else 1189 words = 0; 1190 1191 return words * UNITS_PER_WORD; 1192 } 1193 1194 /* The ROUND_ADVANCE* macros are local to this file. */ 1195 /* Round SIZE up to a word boundary. */ 1196 #define ROUND_ADVANCE(SIZE) \ 1197 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD) 1198 1199 /* Round arg MODE/TYPE up to the next word boundary. */ 1200 #define ROUND_ADVANCE_ARG(MODE, TYPE) \ 1201 ((MODE) == BLKmode \ 1202 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \ 1203 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE))) 1204 1205 /* Round CUM up to the necessary point for argument MODE/TYPE. */ 1206 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM) 1207 1208 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in 1209 a reg. This includes arguments that have to be passed by reference as the 1210 pointer to them is passed in a reg if one is available (and that is what 1211 we're given). 1212 This macro is only used in this file. */ 1213 #define PASS_IN_REG_P(CUM, MODE, TYPE) \ 1214 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS) 1215 1216 /* Determine where to put an argument to a function. 1217 Value is zero to push the argument on the stack, 1218 or a hard register in which to store the argument. 1219 1220 MODE is the argument's machine mode. 1221 TYPE is the data type of the argument (as a tree). 1222 This is null for libcalls where that information may 1223 not be available. 1224 CUM is a variable of type CUMULATIVE_ARGS which gives info about 1225 the preceding args and about the function being called. 1226 NAMED is nonzero if this argument is a named parameter 1227 (otherwise it is an extra parameter matching an ellipsis). */ 1228 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers 1229 and the rest are pushed. */ 1230 1231 static rtx 1232 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode, 1233 const_tree type ATTRIBUTE_UNUSED, 1234 bool named ATTRIBUTE_UNUSED) 1235 { 1236 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1237 1238 return (PASS_IN_REG_P (*cum, mode, type) 1239 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type)) 1240 : NULL_RTX); 1241 } 1242 1243 /* Update the data in CUM to advance over an argument 1244 of mode MODE and data type TYPE. 1245 (TYPE is null for libcalls where that information may not be available.) */ 1246 1247 static void 1248 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode, 1249 const_tree type, bool named ATTRIBUTE_UNUSED) 1250 { 1251 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1252 1253 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type) 1254 + ROUND_ADVANCE_ARG (mode, type)); 1255 } 1256 1257 /* Worker function for TARGET_RETURN_IN_MEMORY. */ 1258 1259 static bool 1260 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED) 1261 { 1262 cumulative_args_t dummy = pack_cumulative_args (NULL); 1263 1264 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false); 1265 } 1266 1267 /* Worker function for TARGET_FUNCTION_VALUE. */ 1268 1269 static rtx 1270 m32r_function_value (const_tree valtype, 1271 const_tree fn_decl_or_type ATTRIBUTE_UNUSED, 1272 bool outgoing ATTRIBUTE_UNUSED) 1273 { 1274 return gen_rtx_REG (TYPE_MODE (valtype), 0); 1275 } 1276 1277 /* Worker function for TARGET_LIBCALL_VALUE. */ 1278 1279 static rtx 1280 m32r_libcall_value (machine_mode mode, 1281 const_rtx fun ATTRIBUTE_UNUSED) 1282 { 1283 return gen_rtx_REG (mode, 0); 1284 } 1285 1286 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. 1287 1288 ??? What about r1 in DI/DF values. */ 1289 1290 static bool 1291 m32r_function_value_regno_p (const unsigned int regno) 1292 { 1293 return (regno == 0); 1294 } 1295 1296 /* Do any needed setup for a variadic function. For the M32R, we must 1297 create a register parameter block, and then copy any anonymous arguments 1298 in registers to memory. 1299 1300 CUM has not been updated for the last named argument which has type TYPE 1301 and mode MODE, and we rely on this fact. */ 1302 1303 static void 1304 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode, 1305 tree type, int *pretend_size, int no_rtl) 1306 { 1307 int first_anon_arg; 1308 1309 if (no_rtl) 1310 return; 1311 1312 /* All BLKmode values are passed by reference. */ 1313 gcc_assert (mode != BLKmode); 1314 1315 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type) 1316 + ROUND_ADVANCE_ARG (mode, type)); 1317 1318 if (first_anon_arg < M32R_MAX_PARM_REGS) 1319 { 1320 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */ 1321 int first_reg_offset = first_anon_arg; 1322 /* Size in words to "pretend" allocate. */ 1323 int size = M32R_MAX_PARM_REGS - first_reg_offset; 1324 rtx regblock; 1325 1326 regblock = gen_frame_mem (BLKmode, 1327 plus_constant (Pmode, arg_pointer_rtx, 1328 FIRST_PARM_OFFSET (0))); 1329 set_mem_alias_set (regblock, get_varargs_alias_set ()); 1330 move_block_from_reg (first_reg_offset, regblock, size); 1331 1332 *pretend_size = (size * UNITS_PER_WORD); 1333 } 1334 } 1335 1336 1337 /* Return true if INSN is real instruction bearing insn. */ 1338 1339 static int 1340 m32r_is_insn (rtx insn) 1341 { 1342 return (NONDEBUG_INSN_P (insn) 1343 && GET_CODE (PATTERN (insn)) != USE 1344 && GET_CODE (PATTERN (insn)) != CLOBBER); 1345 } 1346 1347 /* Increase the priority of long instructions so that the 1348 short instructions are scheduled ahead of the long ones. */ 1349 1350 static int 1351 m32r_adjust_priority (rtx_insn *insn, int priority) 1352 { 1353 if (m32r_is_insn (insn) 1354 && get_attr_insn_size (insn) != INSN_SIZE_SHORT) 1355 priority <<= 3; 1356 1357 return priority; 1358 } 1359 1360 1361 /* Indicate how many instructions can be issued at the same time. 1362 This is sort of a lie. The m32r can issue only 1 long insn at 1363 once, but it can issue 2 short insns. The default therefore is 1364 set at 2, but this can be overridden by the command line option 1365 -missue-rate=1. */ 1366 1367 static int 1368 m32r_issue_rate (void) 1369 { 1370 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2); 1371 } 1372 1373 /* Cost functions. */ 1374 /* Memory is 3 times as expensive as registers. 1375 ??? Is that the right way to look at it? */ 1376 1377 static int 1378 m32r_memory_move_cost (machine_mode mode, 1379 reg_class_t rclass ATTRIBUTE_UNUSED, 1380 bool in ATTRIBUTE_UNUSED) 1381 { 1382 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD) 1383 return 6; 1384 else 1385 return 12; 1386 } 1387 1388 static bool 1389 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, 1390 int opno ATTRIBUTE_UNUSED, int *total, 1391 bool speed ATTRIBUTE_UNUSED) 1392 { 1393 switch (code) 1394 { 1395 /* Small integers are as cheap as registers. 4 byte values can be 1396 fetched as immediate constants - let's give that the cost of an 1397 extra insn. */ 1398 case CONST_INT: 1399 if (INT16_P (INTVAL (x))) 1400 { 1401 *total = 0; 1402 return true; 1403 } 1404 /* FALLTHRU */ 1405 1406 case CONST: 1407 case LABEL_REF: 1408 case SYMBOL_REF: 1409 *total = COSTS_N_INSNS (1); 1410 return true; 1411 1412 case CONST_DOUBLE: 1413 { 1414 rtx high, low; 1415 1416 split_double (x, &high, &low); 1417 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high)) 1418 + !INT16_P (INTVAL (low))); 1419 return true; 1420 } 1421 1422 case MULT: 1423 *total = COSTS_N_INSNS (3); 1424 return true; 1425 1426 case DIV: 1427 case UDIV: 1428 case MOD: 1429 case UMOD: 1430 *total = COSTS_N_INSNS (10); 1431 return true; 1432 1433 default: 1434 return false; 1435 } 1436 } 1437 1438 /* Type of function DECL. 1439 1440 The result is cached. To reset the cache at the end of a function, 1441 call with DECL = NULL_TREE. */ 1442 1443 enum m32r_function_type 1444 m32r_compute_function_type (tree decl) 1445 { 1446 /* Cached value. */ 1447 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN; 1448 /* Last function we were called for. */ 1449 static tree last_fn = NULL_TREE; 1450 1451 /* Resetting the cached value? */ 1452 if (decl == NULL_TREE) 1453 { 1454 fn_type = M32R_FUNCTION_UNKNOWN; 1455 last_fn = NULL_TREE; 1456 return fn_type; 1457 } 1458 1459 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN) 1460 return fn_type; 1461 1462 /* Compute function type. */ 1463 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE 1464 ? M32R_FUNCTION_INTERRUPT 1465 : M32R_FUNCTION_NORMAL); 1466 1467 last_fn = decl; 1468 return fn_type; 1469 } 1470 /* Function prologue/epilogue handlers. */ 1471 1472 /* M32R stack frames look like: 1473 1474 Before call After call 1475 +-----------------------+ +-----------------------+ 1476 | | | | 1477 high | local variables, | | local variables, | 1478 mem | reg save area, etc. | | reg save area, etc. | 1479 | | | | 1480 +-----------------------+ +-----------------------+ 1481 | | | | 1482 | arguments on stack. | | arguments on stack. | 1483 | | | | 1484 SP+0->+-----------------------+ +-----------------------+ 1485 | reg parm save area, | 1486 | only created for | 1487 | variable argument | 1488 | functions | 1489 +-----------------------+ 1490 | previous frame ptr | 1491 +-----------------------+ 1492 | | 1493 | register save area | 1494 | | 1495 +-----------------------+ 1496 | return address | 1497 +-----------------------+ 1498 | | 1499 | local variables | 1500 | | 1501 +-----------------------+ 1502 | | 1503 | alloca allocations | 1504 | | 1505 +-----------------------+ 1506 | | 1507 low | arguments on stack | 1508 memory | | 1509 SP+0->+-----------------------+ 1510 1511 Notes: 1512 1) The "reg parm save area" does not exist for non variable argument fns. 1513 2) The "reg parm save area" can be eliminated completely if we saved regs 1514 containing anonymous args separately but that complicates things too 1515 much (so it's not done). 1516 3) The return address is saved after the register save area so as to have as 1517 many insns as possible between the restoration of `lr' and the `jmp lr'. */ 1518 1519 /* Structure to be filled in by m32r_compute_frame_size with register 1520 save masks, and offsets for the current function. */ 1521 struct m32r_frame_info 1522 { 1523 unsigned int total_size; /* # bytes that the entire frame takes up. */ 1524 unsigned int extra_size; /* # bytes of extra stuff. */ 1525 unsigned int pretend_size; /* # bytes we push and pretend caller did. */ 1526 unsigned int args_size; /* # bytes that outgoing arguments take up. */ 1527 unsigned int reg_size; /* # bytes needed to store regs. */ 1528 unsigned int var_size; /* # bytes that variables take up. */ 1529 unsigned int gmask; /* Mask of saved gp registers. */ 1530 unsigned int save_fp; /* Nonzero if fp must be saved. */ 1531 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */ 1532 int initialized; /* Nonzero if frame size already calculated. */ 1533 }; 1534 1535 /* Current frame information calculated by m32r_compute_frame_size. */ 1536 static struct m32r_frame_info current_frame_info; 1537 1538 /* Zero structure to initialize current_frame_info. */ 1539 static struct m32r_frame_info zero_frame_info; 1540 1541 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM)) 1542 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM)) 1543 1544 /* Tell prologue and epilogue if register REGNO should be saved / restored. 1545 The return address and frame pointer are treated separately. 1546 Don't consider them here. */ 1547 #define MUST_SAVE_REGISTER(regno, interrupt_p) \ 1548 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \ 1549 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p))) 1550 1551 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM)) 1552 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile) 1553 1554 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */ 1555 #define LONG_INSN_SIZE 4 /* Size of long instructions. */ 1556 1557 /* Return the bytes needed to compute the frame pointer from the current 1558 stack pointer. 1559 1560 SIZE is the size needed for local variables. */ 1561 1562 unsigned int 1563 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */ 1564 { 1565 unsigned int regno; 1566 unsigned int total_size, var_size, args_size, pretend_size, extra_size; 1567 unsigned int reg_size; 1568 unsigned int gmask; 1569 enum m32r_function_type fn_type; 1570 int interrupt_p; 1571 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table 1572 | crtl->profile); 1573 1574 var_size = M32R_STACK_ALIGN (size); 1575 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size); 1576 pretend_size = crtl->args.pretend_args_size; 1577 extra_size = FIRST_PARM_OFFSET (0); 1578 total_size = extra_size + pretend_size + args_size + var_size; 1579 reg_size = 0; 1580 gmask = 0; 1581 1582 /* See if this is an interrupt handler. Call used registers must be saved 1583 for them too. */ 1584 fn_type = m32r_compute_function_type (current_function_decl); 1585 interrupt_p = M32R_INTERRUPT_P (fn_type); 1586 1587 /* Calculate space needed for registers. */ 1588 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++) 1589 { 1590 if (MUST_SAVE_REGISTER (regno, interrupt_p) 1591 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used)) 1592 { 1593 reg_size += UNITS_PER_WORD; 1594 gmask |= 1 << regno; 1595 } 1596 } 1597 1598 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER; 1599 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used; 1600 1601 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr) 1602 * UNITS_PER_WORD); 1603 total_size += reg_size; 1604 1605 /* ??? Not sure this is necessary, and I don't think the epilogue 1606 handler will do the right thing if this changes total_size. */ 1607 total_size = M32R_STACK_ALIGN (total_size); 1608 1609 /* frame_size = total_size - (pretend_size + reg_size); */ 1610 1611 /* Save computed information. */ 1612 current_frame_info.total_size = total_size; 1613 current_frame_info.extra_size = extra_size; 1614 current_frame_info.pretend_size = pretend_size; 1615 current_frame_info.var_size = var_size; 1616 current_frame_info.args_size = args_size; 1617 current_frame_info.reg_size = reg_size; 1618 current_frame_info.gmask = gmask; 1619 current_frame_info.initialized = reload_completed; 1620 1621 /* Ok, we're done. */ 1622 return total_size; 1623 } 1624 1625 /* Worker function for TARGET_CAN_ELIMINATE. */ 1626 1627 bool 1628 m32r_can_eliminate (const int from, const int to) 1629 { 1630 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM 1631 ? ! frame_pointer_needed 1632 : true); 1633 } 1634 1635 1636 /* The table we use to reference PIC data. */ 1637 static rtx global_offset_table; 1638 1639 static void 1640 m32r_reload_lr (rtx sp, int size) 1641 { 1642 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM); 1643 1644 if (size == 0) 1645 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp))); 1646 else if (size < 32768) 1647 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, 1648 gen_rtx_PLUS (Pmode, sp, 1649 GEN_INT (size))))); 1650 else 1651 { 1652 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1653 1654 emit_insn (gen_movsi (tmp, GEN_INT (size))); 1655 emit_insn (gen_addsi3 (tmp, tmp, sp)); 1656 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp))); 1657 } 1658 1659 emit_use (lr); 1660 } 1661 1662 void 1663 m32r_load_pic_register (void) 1664 { 1665 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_"); 1666 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table, 1667 GEN_INT (TARGET_MODEL_SMALL))); 1668 1669 /* Need to emit this whether or not we obey regdecls, 1670 since setjmp/longjmp can cause life info to screw up. */ 1671 emit_use (pic_offset_table_rtx); 1672 } 1673 1674 /* Expand the m32r prologue as a series of insns. */ 1675 1676 void 1677 m32r_expand_prologue (void) 1678 { 1679 int regno; 1680 int frame_size; 1681 unsigned int gmask; 1682 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table 1683 | crtl->profile); 1684 1685 if (! current_frame_info.initialized) 1686 m32r_compute_frame_size (get_frame_size ()); 1687 1688 gmask = current_frame_info.gmask; 1689 1690 /* These cases shouldn't happen. Catch them now. */ 1691 gcc_assert (current_frame_info.total_size || !gmask); 1692 1693 /* Allocate space for register arguments if this is a variadic function. */ 1694 if (current_frame_info.pretend_size != 0) 1695 { 1696 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives 1697 the wrong result on a 64-bit host. */ 1698 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size; 1699 emit_insn (gen_addsi3 (stack_pointer_rtx, 1700 stack_pointer_rtx, 1701 GEN_INT (-pretend_size))); 1702 } 1703 1704 /* Save any registers we need to and set up fp. */ 1705 if (current_frame_info.save_fp) 1706 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx)); 1707 1708 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK); 1709 1710 /* Save any needed call-saved regs (and call-used if this is an 1711 interrupt handler). */ 1712 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno) 1713 { 1714 if ((gmask & (1 << regno)) != 0) 1715 emit_insn (gen_movsi_push (stack_pointer_rtx, 1716 gen_rtx_REG (Pmode, regno))); 1717 } 1718 1719 if (current_frame_info.save_lr) 1720 emit_insn (gen_movsi_push (stack_pointer_rtx, 1721 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM))); 1722 1723 /* Allocate the stack frame. */ 1724 frame_size = (current_frame_info.total_size 1725 - (current_frame_info.pretend_size 1726 + current_frame_info.reg_size)); 1727 1728 if (frame_size == 0) 1729 ; /* Nothing to do. */ 1730 else if (frame_size <= 32768) 1731 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1732 GEN_INT (-frame_size))); 1733 else 1734 { 1735 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1736 1737 emit_insn (gen_movsi (tmp, GEN_INT (frame_size))); 1738 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp)); 1739 } 1740 1741 if (frame_pointer_needed) 1742 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx)); 1743 1744 if (crtl->profile) 1745 /* Push lr for mcount (form_pc, x). */ 1746 emit_insn (gen_movsi_push (stack_pointer_rtx, 1747 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM))); 1748 1749 if (pic_reg_used) 1750 { 1751 m32r_load_pic_register (); 1752 m32r_reload_lr (stack_pointer_rtx, 1753 (crtl->profile ? 0 : frame_size)); 1754 } 1755 1756 if (crtl->profile && !pic_reg_used) 1757 emit_insn (gen_blockage ()); 1758 } 1759 1760 1761 /* Set up the stack and frame pointer (if desired) for the function. 1762 Note, if this is changed, you need to mirror the changes in 1763 m32r_compute_frame_size which calculates the prolog size. */ 1764 1765 static void 1766 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size) 1767 { 1768 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl); 1769 1770 /* If this is an interrupt handler, mark it as such. */ 1771 if (M32R_INTERRUPT_P (fn_type)) 1772 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START); 1773 1774 if (! current_frame_info.initialized) 1775 m32r_compute_frame_size (size); 1776 1777 /* This is only for the human reader. */ 1778 fprintf (file, 1779 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n", 1780 ASM_COMMENT_START, 1781 current_frame_info.var_size, 1782 current_frame_info.reg_size / 4, 1783 current_frame_info.args_size, 1784 current_frame_info.extra_size); 1785 } 1786 1787 /* Output RTL to pop register REGNO from the stack. */ 1788 1789 static void 1790 pop (int regno) 1791 { 1792 rtx x; 1793 1794 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno), 1795 stack_pointer_rtx)); 1796 add_reg_note (x, REG_INC, stack_pointer_rtx); 1797 } 1798 1799 /* Expand the m32r epilogue as a series of insns. */ 1800 1801 void 1802 m32r_expand_epilogue (void) 1803 { 1804 int regno; 1805 int noepilogue = FALSE; 1806 int total_size; 1807 1808 gcc_assert (current_frame_info.initialized); 1809 total_size = current_frame_info.total_size; 1810 1811 if (total_size == 0) 1812 { 1813 rtx insn = get_last_insn (); 1814 1815 /* If the last insn was a BARRIER, we don't have to write any code 1816 because a jump (aka return) was put there. */ 1817 if (insn && NOTE_P (insn)) 1818 insn = prev_nonnote_insn (insn); 1819 if (insn && BARRIER_P (insn)) 1820 noepilogue = TRUE; 1821 } 1822 1823 if (!noepilogue) 1824 { 1825 unsigned int var_size = current_frame_info.var_size; 1826 unsigned int args_size = current_frame_info.args_size; 1827 unsigned int gmask = current_frame_info.gmask; 1828 int can_trust_sp_p = !cfun->calls_alloca; 1829 1830 if (flag_exceptions) 1831 emit_insn (gen_blockage ()); 1832 1833 /* The first thing to do is point the sp at the bottom of the register 1834 save area. */ 1835 if (can_trust_sp_p) 1836 { 1837 unsigned int reg_offset = var_size + args_size; 1838 1839 if (reg_offset == 0) 1840 ; /* Nothing to do. */ 1841 else if (reg_offset < 32768) 1842 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1843 GEN_INT (reg_offset))); 1844 else 1845 { 1846 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1847 1848 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset))); 1849 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1850 tmp)); 1851 } 1852 } 1853 else if (frame_pointer_needed) 1854 { 1855 unsigned int reg_offset = var_size + args_size; 1856 1857 if (reg_offset == 0) 1858 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx)); 1859 else if (reg_offset < 32768) 1860 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx, 1861 GEN_INT (reg_offset))); 1862 else 1863 { 1864 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM); 1865 1866 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset))); 1867 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx)); 1868 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1869 tmp)); 1870 } 1871 } 1872 else 1873 gcc_unreachable (); 1874 1875 if (current_frame_info.save_lr) 1876 pop (RETURN_ADDR_REGNUM); 1877 1878 /* Restore any saved registers, in reverse order of course. */ 1879 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK); 1880 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno) 1881 { 1882 if ((gmask & (1L << regno)) != 0) 1883 pop (regno); 1884 } 1885 1886 if (current_frame_info.save_fp) 1887 pop (FRAME_POINTER_REGNUM); 1888 1889 /* Remove varargs area if present. */ 1890 if (current_frame_info.pretend_size != 0) 1891 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, 1892 GEN_INT (current_frame_info.pretend_size))); 1893 1894 emit_insn (gen_blockage ()); 1895 } 1896 } 1897 1898 /* Do any necessary cleanup after a function to restore stack, frame, 1899 and regs. */ 1900 1901 static void 1902 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED, 1903 HOST_WIDE_INT size ATTRIBUTE_UNUSED) 1904 { 1905 /* Reset state info for each function. */ 1906 current_frame_info = zero_frame_info; 1907 m32r_compute_function_type (NULL_TREE); 1908 } 1909 1910 /* Return nonzero if this function is known to have a null or 1 instruction 1911 epilogue. */ 1912 1913 int 1914 direct_return (void) 1915 { 1916 if (!reload_completed) 1917 return FALSE; 1918 1919 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl))) 1920 return FALSE; 1921 1922 if (! current_frame_info.initialized) 1923 m32r_compute_frame_size (get_frame_size ()); 1924 1925 return current_frame_info.total_size == 0; 1926 } 1927 1928 1929 /* PIC. */ 1930 1931 int 1932 m32r_legitimate_pic_operand_p (rtx x) 1933 { 1934 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) 1935 return 0; 1936 1937 if (GET_CODE (x) == CONST 1938 && GET_CODE (XEXP (x, 0)) == PLUS 1939 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF 1940 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF) 1941 && (CONST_INT_P (XEXP (XEXP (x, 0), 1)))) 1942 return 0; 1943 1944 return 1; 1945 } 1946 1947 rtx 1948 m32r_legitimize_pic_address (rtx orig, rtx reg) 1949 { 1950 #ifdef DEBUG_PIC 1951 printf("m32r_legitimize_pic_address()\n"); 1952 #endif 1953 1954 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF) 1955 { 1956 rtx pic_ref, address; 1957 int subregs = 0; 1958 1959 if (reg == 0) 1960 { 1961 gcc_assert (!reload_in_progress && !reload_completed); 1962 reg = gen_reg_rtx (Pmode); 1963 1964 subregs = 1; 1965 } 1966 1967 if (subregs) 1968 address = gen_reg_rtx (Pmode); 1969 else 1970 address = reg; 1971 1972 crtl->uses_pic_offset_table = 1; 1973 1974 if (GET_CODE (orig) == LABEL_REF 1975 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig))) 1976 { 1977 emit_insn (gen_gotoff_load_addr (reg, orig)); 1978 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx)); 1979 return reg; 1980 } 1981 1982 emit_insn (gen_pic_load_addr (address, orig)); 1983 1984 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx)); 1985 pic_ref = gen_const_mem (Pmode, address); 1986 emit_move_insn (reg, pic_ref); 1987 return reg; 1988 } 1989 else if (GET_CODE (orig) == CONST) 1990 { 1991 rtx base, offset; 1992 1993 if (GET_CODE (XEXP (orig, 0)) == PLUS 1994 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx) 1995 return orig; 1996 1997 if (reg == 0) 1998 { 1999 gcc_assert (!reload_in_progress && !reload_completed); 2000 reg = gen_reg_rtx (Pmode); 2001 } 2002 2003 if (GET_CODE (XEXP (orig, 0)) == PLUS) 2004 { 2005 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg); 2006 if (base == reg) 2007 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX); 2008 else 2009 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg); 2010 } 2011 else 2012 return orig; 2013 2014 if (CONST_INT_P (offset)) 2015 { 2016 if (INT16_P (INTVAL (offset))) 2017 return plus_constant (Pmode, base, INTVAL (offset)); 2018 else 2019 { 2020 gcc_assert (! reload_in_progress && ! reload_completed); 2021 offset = force_reg (Pmode, offset); 2022 } 2023 } 2024 2025 return gen_rtx_PLUS (Pmode, base, offset); 2026 } 2027 2028 return orig; 2029 } 2030 2031 static rtx 2032 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED, 2033 machine_mode mode ATTRIBUTE_UNUSED) 2034 { 2035 if (flag_pic) 2036 return m32r_legitimize_pic_address (x, NULL_RTX); 2037 else 2038 return x; 2039 } 2040 2041 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */ 2042 2043 static bool 2044 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED) 2045 { 2046 if (GET_CODE (addr) == LO_SUM) 2047 return true; 2048 2049 return false; 2050 } 2051 2052 /* Nested function support. */ 2053 2054 /* Emit RTL insns to initialize the variable parts of a trampoline. 2055 FNADDR is an RTX for the address of the function's pure code. 2056 CXT is an RTX for the static chain value for the function. */ 2057 2058 void 2059 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED, 2060 rtx fnaddr ATTRIBUTE_UNUSED, 2061 rtx cxt ATTRIBUTE_UNUSED) 2062 { 2063 } 2064 2065 static void 2066 m32r_file_start (void) 2067 { 2068 default_file_start (); 2069 2070 if (flag_verbose_asm) 2071 fprintf (asm_out_file, 2072 "%s M32R/D special options: -G %d\n", 2073 ASM_COMMENT_START, g_switch_value); 2074 2075 if (TARGET_LITTLE_ENDIAN) 2076 fprintf (asm_out_file, "\t.little\n"); 2077 } 2078 2079 /* Print operand X (an rtx) in assembler syntax to file FILE. 2080 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified. 2081 For `%' followed by punctuation, CODE is the punctuation and X is null. */ 2082 2083 static void 2084 m32r_print_operand (FILE * file, rtx x, int code) 2085 { 2086 rtx addr; 2087 2088 switch (code) 2089 { 2090 /* The 's' and 'p' codes are used by output_block_move() to 2091 indicate post-increment 's'tores and 'p're-increment loads. */ 2092 case 's': 2093 if (REG_P (x)) 2094 fprintf (file, "@+%s", reg_names [REGNO (x)]); 2095 else 2096 output_operand_lossage ("invalid operand to %%s code"); 2097 return; 2098 2099 case 'p': 2100 if (REG_P (x)) 2101 fprintf (file, "@%s+", reg_names [REGNO (x)]); 2102 else 2103 output_operand_lossage ("invalid operand to %%p code"); 2104 return; 2105 2106 case 'R' : 2107 /* Write second word of DImode or DFmode reference, 2108 register or memory. */ 2109 if (REG_P (x)) 2110 fputs (reg_names[REGNO (x)+1], file); 2111 else if (MEM_P (x)) 2112 { 2113 fprintf (file, "@("); 2114 /* Handle possible auto-increment. Since it is pre-increment and 2115 we have already done it, we can just use an offset of four. */ 2116 /* ??? This is taken from rs6000.c I think. I don't think it is 2117 currently necessary, but keep it around. */ 2118 if (GET_CODE (XEXP (x, 0)) == PRE_INC 2119 || GET_CODE (XEXP (x, 0)) == PRE_DEC) 2120 output_address (plus_constant (Pmode, XEXP (XEXP (x, 0), 0), 4)); 2121 else 2122 output_address (plus_constant (Pmode, XEXP (x, 0), 4)); 2123 fputc (')', file); 2124 } 2125 else 2126 output_operand_lossage ("invalid operand to %%R code"); 2127 return; 2128 2129 case 'H' : /* High word. */ 2130 case 'L' : /* Low word. */ 2131 if (REG_P (x)) 2132 { 2133 /* L = least significant word, H = most significant word. */ 2134 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L')) 2135 fputs (reg_names[REGNO (x)], file); 2136 else 2137 fputs (reg_names[REGNO (x)+1], file); 2138 } 2139 else if (CONST_INT_P (x) 2140 || GET_CODE (x) == CONST_DOUBLE) 2141 { 2142 rtx first, second; 2143 2144 split_double (x, &first, &second); 2145 fprintf (file, HOST_WIDE_INT_PRINT_HEX, 2146 code == 'L' ? INTVAL (first) : INTVAL (second)); 2147 } 2148 else 2149 output_operand_lossage ("invalid operand to %%H/%%L code"); 2150 return; 2151 2152 case 'A' : 2153 { 2154 char str[30]; 2155 2156 if (GET_CODE (x) != CONST_DOUBLE 2157 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT) 2158 fatal_insn ("bad insn for 'A'", x); 2159 2160 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1); 2161 fprintf (file, "%s", str); 2162 return; 2163 } 2164 2165 case 'B' : /* Bottom half. */ 2166 case 'T' : /* Top half. */ 2167 /* Output the argument to a `seth' insn (sets the Top half-word). 2168 For constants output arguments to a seth/or3 pair to set Top and 2169 Bottom halves. For symbols output arguments to a seth/add3 pair to 2170 set Top and Bottom halves. The difference exists because for 2171 constants seth/or3 is more readable but for symbols we need to use 2172 the same scheme as `ld' and `st' insns (16-bit addend is signed). */ 2173 switch (GET_CODE (x)) 2174 { 2175 case CONST_INT : 2176 case CONST_DOUBLE : 2177 { 2178 rtx first, second; 2179 2180 split_double (x, &first, &second); 2181 x = WORDS_BIG_ENDIAN ? second : first; 2182 fprintf (file, HOST_WIDE_INT_PRINT_HEX, 2183 (code == 'B' 2184 ? INTVAL (x) & 0xffff 2185 : (INTVAL (x) >> 16) & 0xffff)); 2186 } 2187 return; 2188 case CONST : 2189 case SYMBOL_REF : 2190 if (code == 'B' 2191 && small_data_operand (x, VOIDmode)) 2192 { 2193 fputs ("sda(", file); 2194 output_addr_const (file, x); 2195 fputc (')', file); 2196 return; 2197 } 2198 /* fall through */ 2199 case LABEL_REF : 2200 fputs (code == 'T' ? "shigh(" : "low(", file); 2201 output_addr_const (file, x); 2202 fputc (')', file); 2203 return; 2204 default : 2205 output_operand_lossage ("invalid operand to %%T/%%B code"); 2206 return; 2207 } 2208 break; 2209 2210 case 'U' : 2211 /* ??? wip */ 2212 /* Output a load/store with update indicator if appropriate. */ 2213 if (MEM_P (x)) 2214 { 2215 if (GET_CODE (XEXP (x, 0)) == PRE_INC 2216 || GET_CODE (XEXP (x, 0)) == PRE_DEC) 2217 fputs (".a", file); 2218 } 2219 else 2220 output_operand_lossage ("invalid operand to %%U code"); 2221 return; 2222 2223 case 'N' : 2224 /* Print a constant value negated. */ 2225 if (CONST_INT_P (x)) 2226 output_addr_const (file, GEN_INT (- INTVAL (x))); 2227 else 2228 output_operand_lossage ("invalid operand to %%N code"); 2229 return; 2230 2231 case 'X' : 2232 /* Print a const_int in hex. Used in comments. */ 2233 if (CONST_INT_P (x)) 2234 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x)); 2235 return; 2236 2237 case '#' : 2238 fputs (IMMEDIATE_PREFIX, file); 2239 return; 2240 2241 case 0 : 2242 /* Do nothing special. */ 2243 break; 2244 2245 default : 2246 /* Unknown flag. */ 2247 output_operand_lossage ("invalid operand output code"); 2248 } 2249 2250 switch (GET_CODE (x)) 2251 { 2252 case REG : 2253 fputs (reg_names[REGNO (x)], file); 2254 break; 2255 2256 case MEM : 2257 addr = XEXP (x, 0); 2258 if (GET_CODE (addr) == PRE_INC) 2259 { 2260 if (!REG_P (XEXP (addr, 0))) 2261 fatal_insn ("pre-increment address is not a register", x); 2262 2263 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]); 2264 } 2265 else if (GET_CODE (addr) == PRE_DEC) 2266 { 2267 if (!REG_P (XEXP (addr, 0))) 2268 fatal_insn ("pre-decrement address is not a register", x); 2269 2270 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]); 2271 } 2272 else if (GET_CODE (addr) == POST_INC) 2273 { 2274 if (!REG_P (XEXP (addr, 0))) 2275 fatal_insn ("post-increment address is not a register", x); 2276 2277 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]); 2278 } 2279 else 2280 { 2281 fputs ("@(", file); 2282 output_address (XEXP (x, 0)); 2283 fputc (')', file); 2284 } 2285 break; 2286 2287 case CONST_DOUBLE : 2288 /* We handle SFmode constants here as output_addr_const doesn't. */ 2289 if (GET_MODE (x) == SFmode) 2290 { 2291 REAL_VALUE_TYPE d; 2292 long l; 2293 2294 REAL_VALUE_FROM_CONST_DOUBLE (d, x); 2295 REAL_VALUE_TO_TARGET_SINGLE (d, l); 2296 fprintf (file, "0x%08lx", l); 2297 break; 2298 } 2299 2300 /* Fall through. Let output_addr_const deal with it. */ 2301 2302 default : 2303 output_addr_const (file, x); 2304 break; 2305 } 2306 } 2307 2308 /* Print a memory address as an operand to reference that memory location. */ 2309 2310 static void 2311 m32r_print_operand_address (FILE * file, rtx addr) 2312 { 2313 rtx base; 2314 rtx index = 0; 2315 int offset = 0; 2316 2317 switch (GET_CODE (addr)) 2318 { 2319 case REG : 2320 fputs (reg_names[REGNO (addr)], file); 2321 break; 2322 2323 case PLUS : 2324 if (CONST_INT_P (XEXP (addr, 0))) 2325 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1); 2326 else if (CONST_INT_P (XEXP (addr, 1))) 2327 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0); 2328 else 2329 base = XEXP (addr, 0), index = XEXP (addr, 1); 2330 if (REG_P (base)) 2331 { 2332 /* Print the offset first (if present) to conform to the manual. */ 2333 if (index == 0) 2334 { 2335 if (offset != 0) 2336 fprintf (file, "%d,", offset); 2337 fputs (reg_names[REGNO (base)], file); 2338 } 2339 /* The chip doesn't support this, but left in for generality. */ 2340 else if (REG_P (index)) 2341 fprintf (file, "%s,%s", 2342 reg_names[REGNO (base)], reg_names[REGNO (index)]); 2343 /* Not sure this can happen, but leave in for now. */ 2344 else if (GET_CODE (index) == SYMBOL_REF) 2345 { 2346 output_addr_const (file, index); 2347 fputc (',', file); 2348 fputs (reg_names[REGNO (base)], file); 2349 } 2350 else 2351 fatal_insn ("bad address", addr); 2352 } 2353 else if (GET_CODE (base) == LO_SUM) 2354 { 2355 gcc_assert (!index && REG_P (XEXP (base, 0))); 2356 if (small_data_operand (XEXP (base, 1), VOIDmode)) 2357 fputs ("sda(", file); 2358 else 2359 fputs ("low(", file); 2360 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1), 2361 offset)); 2362 fputs ("),", file); 2363 fputs (reg_names[REGNO (XEXP (base, 0))], file); 2364 } 2365 else 2366 fatal_insn ("bad address", addr); 2367 break; 2368 2369 case LO_SUM : 2370 if (!REG_P (XEXP (addr, 0))) 2371 fatal_insn ("lo_sum not of register", addr); 2372 if (small_data_operand (XEXP (addr, 1), VOIDmode)) 2373 fputs ("sda(", file); 2374 else 2375 fputs ("low(", file); 2376 output_addr_const (file, XEXP (addr, 1)); 2377 fputs ("),", file); 2378 fputs (reg_names[REGNO (XEXP (addr, 0))], file); 2379 break; 2380 2381 case PRE_INC : /* Assume SImode. */ 2382 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]); 2383 break; 2384 2385 case PRE_DEC : /* Assume SImode. */ 2386 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]); 2387 break; 2388 2389 case POST_INC : /* Assume SImode. */ 2390 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]); 2391 break; 2392 2393 default : 2394 output_addr_const (file, addr); 2395 break; 2396 } 2397 } 2398 2399 static bool 2400 m32r_print_operand_punct_valid_p (unsigned char code) 2401 { 2402 return m32r_punct_chars[code]; 2403 } 2404 2405 /* Return true if the operands are the constants 0 and 1. */ 2406 2407 int 2408 zero_and_one (rtx operand1, rtx operand2) 2409 { 2410 return 2411 CONST_INT_P (operand1) 2412 && CONST_INT_P (operand2) 2413 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1)) 2414 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0))); 2415 } 2416 2417 /* Generate the correct assembler code to handle the conditional loading of a 2418 value into a register. It is known that the operands satisfy the 2419 conditional_move_operand() function above. The destination is operand[0]. 2420 The condition is operand [1]. The 'true' value is operand [2] and the 2421 'false' value is operand [3]. */ 2422 2423 char * 2424 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED) 2425 { 2426 static char buffer [100]; 2427 const char * dest = reg_names [REGNO (operands [0])]; 2428 2429 buffer [0] = 0; 2430 2431 /* Destination must be a register. */ 2432 gcc_assert (REG_P (operands [0])); 2433 gcc_assert (conditional_move_operand (operands [2], SImode)); 2434 gcc_assert (conditional_move_operand (operands [3], SImode)); 2435 2436 /* Check to see if the test is reversed. */ 2437 if (GET_CODE (operands [1]) == NE) 2438 { 2439 rtx tmp = operands [2]; 2440 operands [2] = operands [3]; 2441 operands [3] = tmp; 2442 } 2443 2444 sprintf (buffer, "mvfc %s, cbr", dest); 2445 2446 /* If the true value was '0' then we need to invert the results of the move. */ 2447 if (INTVAL (operands [2]) == 0) 2448 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1", 2449 dest, dest); 2450 2451 return buffer; 2452 } 2453 2454 /* Returns true if the registers contained in the two 2455 rtl expressions are different. */ 2456 2457 int 2458 m32r_not_same_reg (rtx a, rtx b) 2459 { 2460 int reg_a = -1; 2461 int reg_b = -2; 2462 2463 while (GET_CODE (a) == SUBREG) 2464 a = SUBREG_REG (a); 2465 2466 if (REG_P (a)) 2467 reg_a = REGNO (a); 2468 2469 while (GET_CODE (b) == SUBREG) 2470 b = SUBREG_REG (b); 2471 2472 if (REG_P (b)) 2473 reg_b = REGNO (b); 2474 2475 return reg_a != reg_b; 2476 } 2477 2478 2479 rtx 2480 m32r_function_symbol (const char *name) 2481 { 2482 int extra_flags = 0; 2483 enum m32r_model model; 2484 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name); 2485 2486 if (TARGET_MODEL_SMALL) 2487 model = M32R_MODEL_SMALL; 2488 else if (TARGET_MODEL_MEDIUM) 2489 model = M32R_MODEL_MEDIUM; 2490 else if (TARGET_MODEL_LARGE) 2491 model = M32R_MODEL_LARGE; 2492 else 2493 gcc_unreachable (); /* Shouldn't happen. */ 2494 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT; 2495 2496 if (extra_flags) 2497 SYMBOL_REF_FLAGS (sym) |= extra_flags; 2498 2499 return sym; 2500 } 2501 2502 /* Use a library function to move some bytes. */ 2503 2504 static void 2505 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx) 2506 { 2507 /* We want to pass the size as Pmode, which will normally be SImode 2508 but will be DImode if we are using 64-bit longs and pointers. */ 2509 if (GET_MODE (bytes_rtx) != VOIDmode 2510 && GET_MODE (bytes_rtx) != Pmode) 2511 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1); 2512 2513 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL, 2514 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode, 2515 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx, 2516 TYPE_UNSIGNED (sizetype)), 2517 TYPE_MODE (sizetype)); 2518 } 2519 2520 /* Expand string/block move operations. 2521 2522 operands[0] is the pointer to the destination. 2523 operands[1] is the pointer to the source. 2524 operands[2] is the number of bytes to move. 2525 operands[3] is the alignment. 2526 2527 Returns 1 upon success, 0 otherwise. */ 2528 2529 int 2530 m32r_expand_block_move (rtx operands[]) 2531 { 2532 rtx orig_dst = operands[0]; 2533 rtx orig_src = operands[1]; 2534 rtx bytes_rtx = operands[2]; 2535 rtx align_rtx = operands[3]; 2536 int constp = CONST_INT_P (bytes_rtx); 2537 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0; 2538 int align = INTVAL (align_rtx); 2539 int leftover; 2540 rtx src_reg; 2541 rtx dst_reg; 2542 2543 if (constp && bytes <= 0) 2544 return 1; 2545 2546 /* Move the address into scratch registers. */ 2547 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0)); 2548 src_reg = copy_addr_to_reg (XEXP (orig_src, 0)); 2549 2550 if (align > UNITS_PER_WORD) 2551 align = UNITS_PER_WORD; 2552 2553 /* If we prefer size over speed, always use a function call. 2554 If we do not know the size, use a function call. 2555 If the blocks are not word aligned, use a function call. */ 2556 if (optimize_size || ! constp || align != UNITS_PER_WORD) 2557 { 2558 block_move_call (dst_reg, src_reg, bytes_rtx); 2559 return 0; 2560 } 2561 2562 leftover = bytes % MAX_MOVE_BYTES; 2563 bytes -= leftover; 2564 2565 /* If necessary, generate a loop to handle the bulk of the copy. */ 2566 if (bytes) 2567 { 2568 rtx_code_label *label = NULL; 2569 rtx final_src = NULL_RTX; 2570 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES); 2571 rtx rounded_total = GEN_INT (bytes); 2572 rtx new_dst_reg = gen_reg_rtx (SImode); 2573 rtx new_src_reg = gen_reg_rtx (SImode); 2574 2575 /* If we are going to have to perform this loop more than 2576 once, then generate a label and compute the address the 2577 source register will contain upon completion of the final 2578 iteration. */ 2579 if (bytes > MAX_MOVE_BYTES) 2580 { 2581 final_src = gen_reg_rtx (Pmode); 2582 2583 if (INT16_P(bytes)) 2584 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total)); 2585 else 2586 { 2587 emit_insn (gen_movsi (final_src, rounded_total)); 2588 emit_insn (gen_addsi3 (final_src, final_src, src_reg)); 2589 } 2590 2591 label = gen_label_rtx (); 2592 emit_label (label); 2593 } 2594 2595 /* It is known that output_block_move() will update src_reg to point 2596 to the word after the end of the source block, and dst_reg to point 2597 to the last word of the destination block, provided that the block 2598 is MAX_MOVE_BYTES long. */ 2599 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time, 2600 new_dst_reg, new_src_reg)); 2601 emit_move_insn (dst_reg, new_dst_reg); 2602 emit_move_insn (src_reg, new_src_reg); 2603 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4))); 2604 2605 if (bytes > MAX_MOVE_BYTES) 2606 { 2607 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src); 2608 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label)); 2609 } 2610 } 2611 2612 if (leftover) 2613 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover), 2614 gen_reg_rtx (SImode), 2615 gen_reg_rtx (SImode))); 2616 return 1; 2617 } 2618 2619 2620 /* Emit load/stores for a small constant word aligned block_move. 2621 2622 operands[0] is the memory address of the destination. 2623 operands[1] is the memory address of the source. 2624 operands[2] is the number of bytes to move. 2625 operands[3] is a temp register. 2626 operands[4] is a temp register. */ 2627 2628 void 2629 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[]) 2630 { 2631 HOST_WIDE_INT bytes = INTVAL (operands[2]); 2632 int first_time; 2633 int got_extra = 0; 2634 2635 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES); 2636 2637 /* We do not have a post-increment store available, so the first set of 2638 stores are done without any increment, then the remaining ones can use 2639 the pre-increment addressing mode. 2640 2641 Note: expand_block_move() also relies upon this behavior when building 2642 loops to copy large blocks. */ 2643 first_time = 1; 2644 2645 while (bytes > 0) 2646 { 2647 if (bytes >= 8) 2648 { 2649 if (first_time) 2650 { 2651 output_asm_insn ("ld\t%5, %p1", operands); 2652 output_asm_insn ("ld\t%6, %p1", operands); 2653 output_asm_insn ("st\t%5, @%0", operands); 2654 output_asm_insn ("st\t%6, %s0", operands); 2655 } 2656 else 2657 { 2658 output_asm_insn ("ld\t%5, %p1", operands); 2659 output_asm_insn ("ld\t%6, %p1", operands); 2660 output_asm_insn ("st\t%5, %s0", operands); 2661 output_asm_insn ("st\t%6, %s0", operands); 2662 } 2663 2664 bytes -= 8; 2665 } 2666 else if (bytes >= 4) 2667 { 2668 if (bytes > 4) 2669 got_extra = 1; 2670 2671 output_asm_insn ("ld\t%5, %p1", operands); 2672 2673 if (got_extra) 2674 output_asm_insn ("ld\t%6, %p1", operands); 2675 2676 if (first_time) 2677 output_asm_insn ("st\t%5, @%0", operands); 2678 else 2679 output_asm_insn ("st\t%5, %s0", operands); 2680 2681 bytes -= 4; 2682 } 2683 else 2684 { 2685 /* Get the entire next word, even though we do not want all of it. 2686 The saves us from doing several smaller loads, and we assume that 2687 we cannot cause a page fault when at least part of the word is in 2688 valid memory [since we don't get called if things aren't properly 2689 aligned]. */ 2690 int dst_offset = first_time ? 0 : 4; 2691 /* The amount of increment we have to make to the 2692 destination pointer. */ 2693 int dst_inc_amount = dst_offset + bytes - 4; 2694 /* The same for the source pointer. */ 2695 int src_inc_amount = bytes; 2696 int last_shift; 2697 rtx my_operands[3]; 2698 2699 /* If got_extra is true then we have already loaded 2700 the next word as part of loading and storing the previous word. */ 2701 if (! got_extra) 2702 output_asm_insn ("ld\t%6, @%1", operands); 2703 2704 if (bytes >= 2) 2705 { 2706 bytes -= 2; 2707 2708 output_asm_insn ("sra3\t%5, %6, #16", operands); 2709 my_operands[0] = operands[5]; 2710 my_operands[1] = GEN_INT (dst_offset); 2711 my_operands[2] = operands[0]; 2712 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands); 2713 2714 /* If there is a byte left to store then increment the 2715 destination address and shift the contents of the source 2716 register down by 8 bits. We could not do the address 2717 increment in the store half word instruction, because it does 2718 not have an auto increment mode. */ 2719 if (bytes > 0) /* assert (bytes == 1) */ 2720 { 2721 dst_offset += 2; 2722 last_shift = 8; 2723 } 2724 } 2725 else 2726 last_shift = 24; 2727 2728 if (bytes > 0) 2729 { 2730 my_operands[0] = operands[6]; 2731 my_operands[1] = GEN_INT (last_shift); 2732 output_asm_insn ("srai\t%0, #%1", my_operands); 2733 my_operands[0] = operands[6]; 2734 my_operands[1] = GEN_INT (dst_offset); 2735 my_operands[2] = operands[0]; 2736 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands); 2737 } 2738 2739 /* Update the destination pointer if needed. We have to do 2740 this so that the patterns matches what we output in this 2741 function. */ 2742 if (dst_inc_amount 2743 && !find_reg_note (insn, REG_UNUSED, operands[0])) 2744 { 2745 my_operands[0] = operands[0]; 2746 my_operands[1] = GEN_INT (dst_inc_amount); 2747 output_asm_insn ("addi\t%0, #%1", my_operands); 2748 } 2749 2750 /* Update the source pointer if needed. We have to do this 2751 so that the patterns matches what we output in this 2752 function. */ 2753 if (src_inc_amount 2754 && !find_reg_note (insn, REG_UNUSED, operands[1])) 2755 { 2756 my_operands[0] = operands[1]; 2757 my_operands[1] = GEN_INT (src_inc_amount); 2758 output_asm_insn ("addi\t%0, #%1", my_operands); 2759 } 2760 2761 bytes = 0; 2762 } 2763 2764 first_time = 0; 2765 } 2766 } 2767 2768 /* Return true if using NEW_REG in place of OLD_REG is ok. */ 2769 2770 int 2771 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED, 2772 unsigned int new_reg) 2773 { 2774 /* Interrupt routines can't clobber any register that isn't already used. */ 2775 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) 2776 && !df_regs_ever_live_p (new_reg)) 2777 return 0; 2778 2779 return 1; 2780 } 2781 2782 rtx 2783 m32r_return_addr (int count) 2784 { 2785 if (count != 0) 2786 return const0_rtx; 2787 2788 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM); 2789 } 2790 2791 static void 2792 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value) 2793 { 2794 emit_move_insn (adjust_address (m_tramp, SImode, 0), 2795 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2796 0x017e8e17 : 0x178e7e01, SImode)); 2797 emit_move_insn (adjust_address (m_tramp, SImode, 4), 2798 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2799 0x0c00ae86 : 0x86ae000c, SImode)); 2800 emit_move_insn (adjust_address (m_tramp, SImode, 8), 2801 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2802 0xe627871e : 0x1e8727e6, SImode)); 2803 emit_move_insn (adjust_address (m_tramp, SImode, 12), 2804 gen_int_mode (TARGET_LITTLE_ENDIAN ? 2805 0xc616c626 : 0x26c61fc6, SImode)); 2806 emit_move_insn (adjust_address (m_tramp, SImode, 16), 2807 chain_value); 2808 emit_move_insn (adjust_address (m_tramp, SImode, 20), 2809 XEXP (DECL_RTL (fndecl), 0)); 2810 2811 if (m32r_cache_flush_trap >= 0) 2812 emit_insn (gen_flush_icache 2813 (validize_mem (adjust_address (m_tramp, SImode, 0)), 2814 gen_int_mode (m32r_cache_flush_trap, SImode))); 2815 else if (m32r_cache_flush_func && m32r_cache_flush_func[0]) 2816 emit_library_call (m32r_function_symbol (m32r_cache_flush_func), 2817 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode, 2818 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode, 2819 GEN_INT (3), SImode); 2820 } 2821 2822 /* True if X is a reg that can be used as a base reg. */ 2823 2824 static bool 2825 m32r_rtx_ok_for_base_p (const_rtx x, bool strict) 2826 { 2827 if (! REG_P (x)) 2828 return false; 2829 2830 if (strict) 2831 { 2832 if (GPR_P (REGNO (x))) 2833 return true; 2834 } 2835 else 2836 { 2837 if (GPR_P (REGNO (x)) 2838 || REGNO (x) == ARG_POINTER_REGNUM 2839 || ! HARD_REGISTER_P (x)) 2840 return true; 2841 } 2842 2843 return false; 2844 } 2845 2846 static inline bool 2847 m32r_rtx_ok_for_offset_p (const_rtx x) 2848 { 2849 return (CONST_INT_P (x) && INT16_P (INTVAL (x))); 2850 } 2851 2852 static inline bool 2853 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED, 2854 const_rtx x, bool strict) 2855 { 2856 if (GET_CODE (x) == PLUS 2857 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict) 2858 && m32r_rtx_ok_for_offset_p (XEXP (x, 1))) 2859 return true; 2860 2861 return false; 2862 } 2863 2864 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word, 2865 since more than one instruction will be required. */ 2866 2867 static inline bool 2868 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x, 2869 bool strict) 2870 { 2871 if (GET_CODE (x) == LO_SUM 2872 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD) 2873 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict) 2874 && CONSTANT_P (XEXP (x, 1))) 2875 return true; 2876 2877 return false; 2878 } 2879 2880 /* Is this a load and increment operation. */ 2881 2882 static inline bool 2883 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict) 2884 { 2885 if ((mode == SImode || mode == SFmode) 2886 && GET_CODE (x) == POST_INC 2887 && REG_P (XEXP (x, 0)) 2888 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)) 2889 return true; 2890 2891 return false; 2892 } 2893 2894 /* Is this an increment/decrement and store operation. */ 2895 2896 static inline bool 2897 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict) 2898 { 2899 if ((mode == SImode || mode == SFmode) 2900 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC) 2901 && REG_P (XEXP (x, 0)) \ 2902 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)) 2903 return true; 2904 2905 return false; 2906 } 2907 2908 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */ 2909 2910 static bool 2911 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict) 2912 { 2913 if (m32r_rtx_ok_for_base_p (x, strict) 2914 || m32r_legitimate_offset_addres_p (mode, x, strict) 2915 || m32r_legitimate_lo_sum_addres_p (mode, x, strict) 2916 || m32r_load_postinc_p (mode, x, strict) 2917 || m32r_store_preinc_predec_p (mode, x, strict)) 2918 return true; 2919 2920 return false; 2921 } 2922 2923 static void 2924 m32r_conditional_register_usage (void) 2925 { 2926 if (flag_pic) 2927 { 2928 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1; 2929 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1; 2930 } 2931 } 2932 2933 /* Implement TARGET_LEGITIMATE_CONSTANT_P 2934 2935 We don't allow (plus symbol large-constant) as the relocations can't 2936 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations. 2937 We allow all CONST_DOUBLE's as the md file patterns will force the 2938 constant to memory if they can't handle them. */ 2939 2940 static bool 2941 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x) 2942 { 2943 return !(GET_CODE (x) == CONST 2944 && GET_CODE (XEXP (x, 0)) == PLUS 2945 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF 2946 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF) 2947 && CONST_INT_P (XEXP (XEXP (x, 0), 1)) 2948 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767); 2949 } 2950