1 /* Copyright (C) 1997-2018 Free Software Foundation, Inc. 2 Contributed by Red Hat, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify 7 it under the terms of the GNU General Public License as published by 8 the Free Software Foundation; either version 3, or (at your option) 9 any later version. 10 11 GCC is distributed in the hope that it will be useful, 12 but WITHOUT ANY WARRANTY; without even the implied warranty of 13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 GNU General Public License for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #define IN_TARGET_CODE 1 21 22 #include "config.h" 23 #include "system.h" 24 #include "coretypes.h" 25 #include "backend.h" 26 #include "target.h" 27 #include "rtl.h" 28 #include "tree.h" 29 #include "df.h" 30 #include "memmodel.h" 31 #include "tm_p.h" 32 #include "stringpool.h" 33 #include "attribs.h" 34 #include "optabs.h" 35 #include "regs.h" 36 #include "emit-rtl.h" 37 #include "recog.h" 38 #include "diagnostic-core.h" 39 #include "fold-const.h" 40 #include "varasm.h" 41 #include "stor-layout.h" 42 #include "output.h" 43 #include "insn-attr.h" 44 #include "explow.h" 45 #include "expr.h" 46 #include "cfgrtl.h" 47 #include "langhooks.h" 48 #include "dumpfile.h" 49 #include "builtins.h" 50 #include "ifcvt.h" 51 #include "rtl-iter.h" 52 53 /* This file should be included last. */ 54 #include "target-def.h" 55 56 #ifndef FRV_INLINE 57 #define FRV_INLINE inline 58 #endif 59 60 /* The maximum number of distinct NOP patterns. There are three: 61 nop, fnop and mnop. */ 62 #define NUM_NOP_PATTERNS 3 63 64 /* Classification of instructions and units: integer, floating-point/media, 65 branch and control. */ 66 enum frv_insn_group { GROUP_I, GROUP_FM, GROUP_B, GROUP_C, NUM_GROUPS }; 67 68 /* The DFA names of the units, in packet order. */ 69 static const char *const frv_unit_names[] = 70 { 71 "c", 72 "i0", "f0", 73 "i1", "f1", 74 "i2", "f2", 75 "i3", "f3", 76 "b0", "b1" 77 }; 78 79 /* The classification of each unit in frv_unit_names[]. */ 80 static const enum frv_insn_group frv_unit_groups[ARRAY_SIZE (frv_unit_names)] = 81 { 82 GROUP_C, 83 GROUP_I, GROUP_FM, 84 GROUP_I, GROUP_FM, 85 GROUP_I, GROUP_FM, 86 GROUP_I, GROUP_FM, 87 GROUP_B, GROUP_B 88 }; 89 90 /* Return the DFA unit code associated with the Nth unit of integer 91 or floating-point group GROUP, */ 92 #define NTH_UNIT(GROUP, N) frv_unit_codes[(GROUP) + (N) * 2 + 1] 93 94 /* Return the number of integer or floating-point unit UNIT 95 (1 for I1, 2 for F2, etc.). */ 96 #define UNIT_NUMBER(UNIT) (((UNIT) - 1) / 2) 97 98 /* The DFA unit number for each unit in frv_unit_names[]. */ 99 static int frv_unit_codes[ARRAY_SIZE (frv_unit_names)]; 100 101 /* FRV_TYPE_TO_UNIT[T] is the last unit in frv_unit_names[] that can issue 102 an instruction of type T. The value is ARRAY_SIZE (frv_unit_names) if 103 no instruction of type T has been seen. */ 104 static unsigned int frv_type_to_unit[TYPE_UNKNOWN + 1]; 105 106 /* An array of dummy nop INSNs, one for each type of nop that the 107 target supports. */ 108 static GTY(()) rtx_insn *frv_nops[NUM_NOP_PATTERNS]; 109 110 /* The number of nop instructions in frv_nops[]. */ 111 static unsigned int frv_num_nops; 112 113 /* The type of access. FRV_IO_UNKNOWN means the access can be either 114 a read or a write. */ 115 enum frv_io_type { FRV_IO_UNKNOWN, FRV_IO_READ, FRV_IO_WRITE }; 116 117 /* Information about one __builtin_read or __builtin_write access, or 118 the combination of several such accesses. The most general value 119 is all-zeros (an unknown access to an unknown address). */ 120 struct frv_io { 121 enum frv_io_type type; 122 123 /* The constant address being accessed, or zero if not known. */ 124 HOST_WIDE_INT const_address; 125 126 /* The run-time address, as used in operand 0 of the membar pattern. */ 127 rtx var_address; 128 }; 129 130 /* Return true if instruction INSN should be packed with the following 131 instruction. */ 132 #define PACKING_FLAG_P(INSN) (GET_MODE (INSN) == TImode) 133 134 /* Set the value of PACKING_FLAG_P(INSN). */ 135 #define SET_PACKING_FLAG(INSN) PUT_MODE (INSN, TImode) 136 #define CLEAR_PACKING_FLAG(INSN) PUT_MODE (INSN, VOIDmode) 137 138 /* Loop with REG set to each hard register in rtx X. */ 139 #define FOR_EACH_REGNO(REG, X) \ 140 for (REG = REGNO (X); REG < END_REGNO (X); REG++) 141 142 /* This structure contains machine specific function data. */ 143 struct GTY(()) machine_function 144 { 145 /* True if we have created an rtx that relies on the stack frame. */ 146 int frame_needed; 147 148 /* True if this function contains at least one __builtin_{read,write}*. */ 149 bool has_membar_p; 150 }; 151 152 /* Temporary register allocation support structure. */ 153 typedef struct frv_tmp_reg_struct 154 { 155 HARD_REG_SET regs; /* possible registers to allocate */ 156 int next_reg[N_REG_CLASSES]; /* next register to allocate per class */ 157 } 158 frv_tmp_reg_t; 159 160 /* Register state information for VLIW re-packing phase. */ 161 #define REGSTATE_CC_MASK 0x07 /* Mask to isolate CCn for cond exec */ 162 #define REGSTATE_MODIFIED 0x08 /* reg modified in current VLIW insn */ 163 #define REGSTATE_IF_TRUE 0x10 /* reg modified in cond exec true */ 164 #define REGSTATE_IF_FALSE 0x20 /* reg modified in cond exec false */ 165 166 #define REGSTATE_IF_EITHER (REGSTATE_IF_TRUE | REGSTATE_IF_FALSE) 167 168 typedef unsigned char regstate_t; 169 170 /* Used in frv_frame_accessor_t to indicate the direction of a register-to- 171 memory move. */ 172 enum frv_stack_op 173 { 174 FRV_LOAD, 175 FRV_STORE 176 }; 177 178 /* Information required by frv_frame_access. */ 179 typedef struct 180 { 181 /* This field is FRV_LOAD if registers are to be loaded from the stack and 182 FRV_STORE if they should be stored onto the stack. FRV_STORE implies 183 the move is being done by the prologue code while FRV_LOAD implies it 184 is being done by the epilogue. */ 185 enum frv_stack_op op; 186 187 /* The base register to use when accessing the stack. This may be the 188 frame pointer, stack pointer, or a temporary. The choice of register 189 depends on which part of the frame is being accessed and how big the 190 frame is. */ 191 rtx base; 192 193 /* The offset of BASE from the bottom of the current frame, in bytes. */ 194 int base_offset; 195 } frv_frame_accessor_t; 196 197 /* Conditional execution support gathered together in one structure. */ 198 typedef struct 199 { 200 /* Linked list of insns to add if the conditional execution conversion was 201 successful. Each link points to an EXPR_LIST which points to the pattern 202 of the insn to add, and the insn to be inserted before. */ 203 rtx added_insns_list; 204 205 /* Identify which registers are safe to allocate for if conversions to 206 conditional execution. We keep the last allocated register in the 207 register classes between COND_EXEC statements. This will mean we allocate 208 different registers for each different COND_EXEC group if we can. This 209 might allow the scheduler to intermix two different COND_EXEC sections. */ 210 frv_tmp_reg_t tmp_reg; 211 212 /* For nested IFs, identify which CC registers are used outside of setting 213 via a compare isnsn, and using via a check insn. This will allow us to 214 know if we can rewrite the register to use a different register that will 215 be paired with the CR register controlling the nested IF-THEN blocks. */ 216 HARD_REG_SET nested_cc_ok_rewrite; 217 218 /* Temporary registers allocated to hold constants during conditional 219 execution. */ 220 rtx scratch_regs[FIRST_PSEUDO_REGISTER]; 221 222 /* Current number of temp registers available. */ 223 int cur_scratch_regs; 224 225 /* Number of nested conditional execution blocks. */ 226 int num_nested_cond_exec; 227 228 /* Map of insns that set up constants in scratch registers. */ 229 bitmap scratch_insns_bitmap; 230 231 /* Conditional execution test register (CC0..CC7). */ 232 rtx cr_reg; 233 234 /* Conditional execution compare register that is paired with cr_reg, so that 235 nested compares can be done. The csubcc and caddcc instructions don't 236 have enough bits to specify both a CC register to be set and a CR register 237 to do the test on, so the same bit number is used for both. Needless to 238 say, this is rather inconvenient for GCC. */ 239 rtx nested_cc_reg; 240 241 /* Extra CR registers used for &&, ||. */ 242 rtx extra_int_cr; 243 rtx extra_fp_cr; 244 245 /* Previous CR used in nested if, to make sure we are dealing with the same 246 nested if as the previous statement. */ 247 rtx last_nested_if_cr; 248 } 249 frv_ifcvt_t; 250 251 static /* GTY(()) */ frv_ifcvt_t frv_ifcvt; 252 253 /* Map register number to smallest register class. */ 254 enum reg_class regno_reg_class[FIRST_PSEUDO_REGISTER]; 255 256 /* Cached value of frv_stack_info. */ 257 static frv_stack_t *frv_stack_cache = (frv_stack_t *)0; 258 259 /* Forward references */ 260 261 static void frv_option_override (void); 262 static bool frv_legitimate_address_p (machine_mode, rtx, bool); 263 static int frv_default_flags_for_cpu (void); 264 static int frv_string_begins_with (const char *, const char *); 265 static FRV_INLINE bool frv_small_data_reloc_p (rtx, int); 266 static void frv_print_operand (FILE *, rtx, int); 267 static void frv_print_operand_address (FILE *, machine_mode, rtx); 268 static bool frv_print_operand_punct_valid_p (unsigned char code); 269 static void frv_print_operand_memory_reference_reg 270 (FILE *, rtx); 271 static void frv_print_operand_memory_reference (FILE *, rtx, int); 272 static int frv_print_operand_jump_hint (rtx_insn *); 273 static const char *comparison_string (enum rtx_code, rtx); 274 static rtx frv_function_value (const_tree, const_tree, 275 bool); 276 static rtx frv_libcall_value (machine_mode, 277 const_rtx); 278 static FRV_INLINE int frv_regno_ok_for_base_p (int, int); 279 static rtx single_set_pattern (rtx); 280 static int frv_function_contains_far_jump (void); 281 static rtx frv_alloc_temp_reg (frv_tmp_reg_t *, 282 enum reg_class, 283 machine_mode, 284 int, int); 285 static rtx frv_frame_offset_rtx (int); 286 static rtx frv_frame_mem (machine_mode, rtx, int); 287 static rtx frv_dwarf_store (rtx, int); 288 static void frv_frame_insn (rtx, rtx); 289 static void frv_frame_access (frv_frame_accessor_t*, 290 rtx, int); 291 static void frv_frame_access_multi (frv_frame_accessor_t*, 292 frv_stack_t *, int); 293 static void frv_frame_access_standard_regs (enum frv_stack_op, 294 frv_stack_t *); 295 static struct machine_function *frv_init_machine_status (void); 296 static rtx frv_int_to_acc (enum insn_code, int, rtx); 297 static machine_mode frv_matching_accg_mode (machine_mode); 298 static rtx frv_read_argument (tree, unsigned int); 299 static rtx frv_read_iacc_argument (machine_mode, tree, unsigned int); 300 static int frv_check_constant_argument (enum insn_code, int, rtx); 301 static rtx frv_legitimize_target (enum insn_code, rtx); 302 static rtx frv_legitimize_argument (enum insn_code, int, rtx); 303 static rtx frv_legitimize_tls_address (rtx, enum tls_model); 304 static rtx frv_legitimize_address (rtx, rtx, machine_mode); 305 static rtx frv_expand_set_builtin (enum insn_code, tree, rtx); 306 static rtx frv_expand_unop_builtin (enum insn_code, tree, rtx); 307 static rtx frv_expand_binop_builtin (enum insn_code, tree, rtx); 308 static rtx frv_expand_cut_builtin (enum insn_code, tree, rtx); 309 static rtx frv_expand_binopimm_builtin (enum insn_code, tree, rtx); 310 static rtx frv_expand_voidbinop_builtin (enum insn_code, tree); 311 static rtx frv_expand_int_void2arg (enum insn_code, tree); 312 static rtx frv_expand_prefetches (enum insn_code, tree); 313 static rtx frv_expand_voidtriop_builtin (enum insn_code, tree); 314 static rtx frv_expand_voidaccop_builtin (enum insn_code, tree); 315 static rtx frv_expand_mclracc_builtin (tree); 316 static rtx frv_expand_mrdacc_builtin (enum insn_code, tree); 317 static rtx frv_expand_mwtacc_builtin (enum insn_code, tree); 318 static rtx frv_expand_noargs_builtin (enum insn_code); 319 static void frv_split_iacc_move (rtx, rtx); 320 static rtx frv_emit_comparison (enum rtx_code, rtx, rtx); 321 static void frv_ifcvt_add_insn (rtx, rtx, int); 322 static rtx frv_ifcvt_rewrite_mem (rtx, machine_mode, rtx); 323 static rtx frv_ifcvt_load_value (rtx, rtx); 324 static unsigned int frv_insn_unit (rtx_insn *); 325 static bool frv_issues_to_branch_unit_p (rtx_insn *); 326 static int frv_cond_flags (rtx); 327 static bool frv_regstate_conflict_p (regstate_t, regstate_t); 328 static bool frv_registers_conflict_p (rtx); 329 static void frv_registers_update_1 (rtx, const_rtx, void *); 330 static void frv_registers_update (rtx); 331 static void frv_start_packet (void); 332 static void frv_start_packet_block (void); 333 static void frv_finish_packet (void (*) (void)); 334 static bool frv_pack_insn_p (rtx_insn *); 335 static void frv_add_insn_to_packet (rtx_insn *); 336 static void frv_insert_nop_in_packet (rtx_insn *); 337 static bool frv_for_each_packet (void (*) (void)); 338 static bool frv_sort_insn_group_1 (enum frv_insn_group, 339 unsigned int, unsigned int, 340 unsigned int, unsigned int, 341 state_t); 342 static int frv_compare_insns (const void *, const void *); 343 static void frv_sort_insn_group (enum frv_insn_group); 344 static void frv_reorder_packet (void); 345 static void frv_fill_unused_units (enum frv_insn_group); 346 static void frv_align_label (void); 347 static void frv_reorg_packet (void); 348 static void frv_register_nop (rtx); 349 static void frv_reorg (void); 350 static void frv_pack_insns (void); 351 static void frv_function_prologue (FILE *); 352 static void frv_function_epilogue (FILE *); 353 static bool frv_assemble_integer (rtx, unsigned, int); 354 static void frv_init_builtins (void); 355 static rtx frv_expand_builtin (tree, rtx, rtx, machine_mode, int); 356 static void frv_init_libfuncs (void); 357 static bool frv_in_small_data_p (const_tree); 358 static void frv_asm_output_mi_thunk 359 (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree); 360 static void frv_setup_incoming_varargs (cumulative_args_t, 361 machine_mode, 362 tree, int *, int); 363 static rtx frv_expand_builtin_saveregs (void); 364 static void frv_expand_builtin_va_start (tree, rtx); 365 static bool frv_rtx_costs (rtx, machine_mode, int, int, 366 int*, bool); 367 static int frv_register_move_cost (machine_mode, 368 reg_class_t, reg_class_t); 369 static int frv_memory_move_cost (machine_mode, 370 reg_class_t, bool); 371 static void frv_asm_out_constructor (rtx, int); 372 static void frv_asm_out_destructor (rtx, int); 373 static bool frv_function_symbol_referenced_p (rtx); 374 static bool frv_legitimate_constant_p (machine_mode, rtx); 375 static bool frv_cannot_force_const_mem (machine_mode, rtx); 376 static const char *unspec_got_name (int); 377 static void frv_output_const_unspec (FILE *, 378 const struct frv_unspec *); 379 static bool frv_function_ok_for_sibcall (tree, tree); 380 static rtx frv_struct_value_rtx (tree, int); 381 static bool frv_must_pass_in_stack (machine_mode mode, const_tree type); 382 static int frv_arg_partial_bytes (cumulative_args_t, machine_mode, 383 tree, bool); 384 static rtx frv_function_arg (cumulative_args_t, machine_mode, 385 const_tree, bool); 386 static rtx frv_function_incoming_arg (cumulative_args_t, machine_mode, 387 const_tree, bool); 388 static void frv_function_arg_advance (cumulative_args_t, machine_mode, 389 const_tree, bool); 390 static unsigned int frv_function_arg_boundary (machine_mode, 391 const_tree); 392 static void frv_output_dwarf_dtprel (FILE *, int, rtx) 393 ATTRIBUTE_UNUSED; 394 static reg_class_t frv_secondary_reload (bool, rtx, reg_class_t, 395 machine_mode, 396 secondary_reload_info *); 397 static bool frv_frame_pointer_required (void); 398 static bool frv_can_eliminate (const int, const int); 399 static void frv_conditional_register_usage (void); 400 static void frv_trampoline_init (rtx, tree, rtx); 401 static bool frv_class_likely_spilled_p (reg_class_t); 402 static unsigned int frv_hard_regno_nregs (unsigned int, machine_mode); 403 static bool frv_hard_regno_mode_ok (unsigned int, machine_mode); 404 static bool frv_modes_tieable_p (machine_mode, machine_mode); 405 406 /* Initialize the GCC target structure. */ 407 #undef TARGET_PRINT_OPERAND 408 #define TARGET_PRINT_OPERAND frv_print_operand 409 #undef TARGET_PRINT_OPERAND_ADDRESS 410 #define TARGET_PRINT_OPERAND_ADDRESS frv_print_operand_address 411 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P 412 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P frv_print_operand_punct_valid_p 413 #undef TARGET_ASM_FUNCTION_PROLOGUE 414 #define TARGET_ASM_FUNCTION_PROLOGUE frv_function_prologue 415 #undef TARGET_ASM_FUNCTION_EPILOGUE 416 #define TARGET_ASM_FUNCTION_EPILOGUE frv_function_epilogue 417 #undef TARGET_ASM_INTEGER 418 #define TARGET_ASM_INTEGER frv_assemble_integer 419 #undef TARGET_OPTION_OVERRIDE 420 #define TARGET_OPTION_OVERRIDE frv_option_override 421 #undef TARGET_INIT_BUILTINS 422 #define TARGET_INIT_BUILTINS frv_init_builtins 423 #undef TARGET_EXPAND_BUILTIN 424 #define TARGET_EXPAND_BUILTIN frv_expand_builtin 425 #undef TARGET_INIT_LIBFUNCS 426 #define TARGET_INIT_LIBFUNCS frv_init_libfuncs 427 #undef TARGET_IN_SMALL_DATA_P 428 #define TARGET_IN_SMALL_DATA_P frv_in_small_data_p 429 #undef TARGET_REGISTER_MOVE_COST 430 #define TARGET_REGISTER_MOVE_COST frv_register_move_cost 431 #undef TARGET_MEMORY_MOVE_COST 432 #define TARGET_MEMORY_MOVE_COST frv_memory_move_cost 433 #undef TARGET_RTX_COSTS 434 #define TARGET_RTX_COSTS frv_rtx_costs 435 #undef TARGET_ASM_CONSTRUCTOR 436 #define TARGET_ASM_CONSTRUCTOR frv_asm_out_constructor 437 #undef TARGET_ASM_DESTRUCTOR 438 #define TARGET_ASM_DESTRUCTOR frv_asm_out_destructor 439 440 #undef TARGET_ASM_OUTPUT_MI_THUNK 441 #define TARGET_ASM_OUTPUT_MI_THUNK frv_asm_output_mi_thunk 442 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK 443 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall 444 445 #undef TARGET_SCHED_ISSUE_RATE 446 #define TARGET_SCHED_ISSUE_RATE frv_issue_rate 447 448 #undef TARGET_LEGITIMIZE_ADDRESS 449 #define TARGET_LEGITIMIZE_ADDRESS frv_legitimize_address 450 451 #undef TARGET_FUNCTION_OK_FOR_SIBCALL 452 #define TARGET_FUNCTION_OK_FOR_SIBCALL frv_function_ok_for_sibcall 453 #undef TARGET_LEGITIMATE_CONSTANT_P 454 #define TARGET_LEGITIMATE_CONSTANT_P frv_legitimate_constant_p 455 #undef TARGET_CANNOT_FORCE_CONST_MEM 456 #define TARGET_CANNOT_FORCE_CONST_MEM frv_cannot_force_const_mem 457 458 #undef TARGET_HAVE_TLS 459 #define TARGET_HAVE_TLS HAVE_AS_TLS 460 461 #undef TARGET_STRUCT_VALUE_RTX 462 #define TARGET_STRUCT_VALUE_RTX frv_struct_value_rtx 463 #undef TARGET_MUST_PASS_IN_STACK 464 #define TARGET_MUST_PASS_IN_STACK frv_must_pass_in_stack 465 #undef TARGET_PASS_BY_REFERENCE 466 #define TARGET_PASS_BY_REFERENCE hook_pass_by_reference_must_pass_in_stack 467 #undef TARGET_ARG_PARTIAL_BYTES 468 #define TARGET_ARG_PARTIAL_BYTES frv_arg_partial_bytes 469 #undef TARGET_FUNCTION_ARG 470 #define TARGET_FUNCTION_ARG frv_function_arg 471 #undef TARGET_FUNCTION_INCOMING_ARG 472 #define TARGET_FUNCTION_INCOMING_ARG frv_function_incoming_arg 473 #undef TARGET_FUNCTION_ARG_ADVANCE 474 #define TARGET_FUNCTION_ARG_ADVANCE frv_function_arg_advance 475 #undef TARGET_FUNCTION_ARG_BOUNDARY 476 #define TARGET_FUNCTION_ARG_BOUNDARY frv_function_arg_boundary 477 478 #undef TARGET_EXPAND_BUILTIN_SAVEREGS 479 #define TARGET_EXPAND_BUILTIN_SAVEREGS frv_expand_builtin_saveregs 480 #undef TARGET_SETUP_INCOMING_VARARGS 481 #define TARGET_SETUP_INCOMING_VARARGS frv_setup_incoming_varargs 482 #undef TARGET_MACHINE_DEPENDENT_REORG 483 #define TARGET_MACHINE_DEPENDENT_REORG frv_reorg 484 485 #undef TARGET_EXPAND_BUILTIN_VA_START 486 #define TARGET_EXPAND_BUILTIN_VA_START frv_expand_builtin_va_start 487 488 #if HAVE_AS_TLS 489 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL 490 #define TARGET_ASM_OUTPUT_DWARF_DTPREL frv_output_dwarf_dtprel 491 #endif 492 493 #undef TARGET_CLASS_LIKELY_SPILLED_P 494 #define TARGET_CLASS_LIKELY_SPILLED_P frv_class_likely_spilled_p 495 496 #undef TARGET_SECONDARY_RELOAD 497 #define TARGET_SECONDARY_RELOAD frv_secondary_reload 498 499 #undef TARGET_LRA_P 500 #define TARGET_LRA_P hook_bool_void_false 501 502 #undef TARGET_LEGITIMATE_ADDRESS_P 503 #define TARGET_LEGITIMATE_ADDRESS_P frv_legitimate_address_p 504 505 #undef TARGET_FRAME_POINTER_REQUIRED 506 #define TARGET_FRAME_POINTER_REQUIRED frv_frame_pointer_required 507 508 #undef TARGET_CAN_ELIMINATE 509 #define TARGET_CAN_ELIMINATE frv_can_eliminate 510 511 #undef TARGET_CONDITIONAL_REGISTER_USAGE 512 #define TARGET_CONDITIONAL_REGISTER_USAGE frv_conditional_register_usage 513 514 #undef TARGET_TRAMPOLINE_INIT 515 #define TARGET_TRAMPOLINE_INIT frv_trampoline_init 516 517 #undef TARGET_FUNCTION_VALUE 518 #define TARGET_FUNCTION_VALUE frv_function_value 519 #undef TARGET_LIBCALL_VALUE 520 #define TARGET_LIBCALL_VALUE frv_libcall_value 521 522 #undef TARGET_HARD_REGNO_NREGS 523 #define TARGET_HARD_REGNO_NREGS frv_hard_regno_nregs 524 #undef TARGET_HARD_REGNO_MODE_OK 525 #define TARGET_HARD_REGNO_MODE_OK frv_hard_regno_mode_ok 526 #undef TARGET_MODES_TIEABLE_P 527 #define TARGET_MODES_TIEABLE_P frv_modes_tieable_p 528 #undef TARGET_CONSTANT_ALIGNMENT 529 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings 530 531 struct gcc_target targetm = TARGET_INITIALIZER; 532 533 #define FRV_SYMBOL_REF_TLS_P(RTX) \ 534 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0) 535 536 537 /* Any function call that satisfies the machine-independent 538 requirements is eligible on FR-V. */ 539 540 static bool 541 frv_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED, 542 tree exp ATTRIBUTE_UNUSED) 543 { 544 return true; 545 } 546 547 /* Return true if SYMBOL is a small data symbol and relocation RELOC 548 can be used to access it directly in a load or store. */ 549 550 static FRV_INLINE bool 551 frv_small_data_reloc_p (rtx symbol, int reloc) 552 { 553 return (GET_CODE (symbol) == SYMBOL_REF 554 && SYMBOL_REF_SMALL_P (symbol) 555 && (!TARGET_FDPIC || flag_pic == 1) 556 && (reloc == R_FRV_GOTOFF12 || reloc == R_FRV_GPREL12)); 557 } 558 559 /* Return true if X is a valid relocation unspec. If it is, fill in UNSPEC 560 appropriately. */ 561 562 bool 563 frv_const_unspec_p (rtx x, struct frv_unspec *unspec) 564 { 565 if (GET_CODE (x) == CONST) 566 { 567 unspec->offset = 0; 568 x = XEXP (x, 0); 569 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT) 570 { 571 unspec->offset += INTVAL (XEXP (x, 1)); 572 x = XEXP (x, 0); 573 } 574 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_GOT) 575 { 576 unspec->symbol = XVECEXP (x, 0, 0); 577 unspec->reloc = INTVAL (XVECEXP (x, 0, 1)); 578 579 if (unspec->offset == 0) 580 return true; 581 582 if (frv_small_data_reloc_p (unspec->symbol, unspec->reloc) 583 && unspec->offset > 0 584 && unspec->offset < g_switch_value) 585 return true; 586 } 587 } 588 return false; 589 } 590 591 /* Decide whether we can force certain constants to memory. If we 592 decide we can't, the caller should be able to cope with it in 593 another way. 594 595 We never allow constants to be forced into memory for TARGET_FDPIC. 596 This is necessary for several reasons: 597 598 1. Since frv_legitimate_constant_p rejects constant pool addresses, the 599 target-independent code will try to force them into the constant 600 pool, thus leading to infinite recursion. 601 602 2. We can never introduce new constant pool references during reload. 603 Any such reference would require use of the pseudo FDPIC register. 604 605 3. We can't represent a constant added to a function pointer (which is 606 not the same as a pointer to a function+constant). 607 608 4. In many cases, it's more efficient to calculate the constant in-line. */ 609 610 static bool 611 frv_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, 612 rtx x ATTRIBUTE_UNUSED) 613 { 614 return TARGET_FDPIC; 615 } 616 617 static int 618 frv_default_flags_for_cpu (void) 619 { 620 switch (frv_cpu_type) 621 { 622 case FRV_CPU_GENERIC: 623 return MASK_DEFAULT_FRV; 624 625 case FRV_CPU_FR550: 626 return MASK_DEFAULT_FR550; 627 628 case FRV_CPU_FR500: 629 case FRV_CPU_TOMCAT: 630 return MASK_DEFAULT_FR500; 631 632 case FRV_CPU_FR450: 633 return MASK_DEFAULT_FR450; 634 635 case FRV_CPU_FR405: 636 case FRV_CPU_FR400: 637 return MASK_DEFAULT_FR400; 638 639 case FRV_CPU_FR300: 640 case FRV_CPU_SIMPLE: 641 return MASK_DEFAULT_SIMPLE; 642 643 default: 644 gcc_unreachable (); 645 } 646 } 647 648 /* Implement TARGET_OPTION_OVERRIDE. */ 649 650 static void 651 frv_option_override (void) 652 { 653 int regno; 654 unsigned int i; 655 656 target_flags |= (frv_default_flags_for_cpu () & ~target_flags_explicit); 657 658 /* -mlibrary-pic sets -fPIC and -G0 and also suppresses warnings from the 659 linker about linking pic and non-pic code. */ 660 if (TARGET_LIBPIC) 661 { 662 if (!flag_pic) /* -fPIC */ 663 flag_pic = 2; 664 665 if (!global_options_set.x_g_switch_value) /* -G0 */ 666 { 667 g_switch_value = 0; 668 } 669 } 670 671 /* A C expression whose value is a register class containing hard 672 register REGNO. In general there is more than one such class; 673 choose a class which is "minimal", meaning that no smaller class 674 also contains the register. */ 675 676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 677 { 678 enum reg_class rclass; 679 680 if (GPR_P (regno)) 681 { 682 int gpr_reg = regno - GPR_FIRST; 683 684 if (gpr_reg == GR8_REG) 685 rclass = GR8_REGS; 686 687 else if (gpr_reg == GR9_REG) 688 rclass = GR9_REGS; 689 690 else if (gpr_reg == GR14_REG) 691 rclass = FDPIC_FPTR_REGS; 692 693 else if (gpr_reg == FDPIC_REGNO) 694 rclass = FDPIC_REGS; 695 696 else if ((gpr_reg & 3) == 0) 697 rclass = QUAD_REGS; 698 699 else if ((gpr_reg & 1) == 0) 700 rclass = EVEN_REGS; 701 702 else 703 rclass = GPR_REGS; 704 } 705 706 else if (FPR_P (regno)) 707 { 708 int fpr_reg = regno - GPR_FIRST; 709 if ((fpr_reg & 3) == 0) 710 rclass = QUAD_FPR_REGS; 711 712 else if ((fpr_reg & 1) == 0) 713 rclass = FEVEN_REGS; 714 715 else 716 rclass = FPR_REGS; 717 } 718 719 else if (regno == LR_REGNO) 720 rclass = LR_REG; 721 722 else if (regno == LCR_REGNO) 723 rclass = LCR_REG; 724 725 else if (ICC_P (regno)) 726 rclass = ICC_REGS; 727 728 else if (FCC_P (regno)) 729 rclass = FCC_REGS; 730 731 else if (ICR_P (regno)) 732 rclass = ICR_REGS; 733 734 else if (FCR_P (regno)) 735 rclass = FCR_REGS; 736 737 else if (ACC_P (regno)) 738 { 739 int r = regno - ACC_FIRST; 740 if ((r & 3) == 0) 741 rclass = QUAD_ACC_REGS; 742 else if ((r & 1) == 0) 743 rclass = EVEN_ACC_REGS; 744 else 745 rclass = ACC_REGS; 746 } 747 748 else if (ACCG_P (regno)) 749 rclass = ACCG_REGS; 750 751 else 752 rclass = NO_REGS; 753 754 regno_reg_class[regno] = rclass; 755 } 756 757 /* Check for small data option */ 758 if (!global_options_set.x_g_switch_value && !TARGET_LIBPIC) 759 g_switch_value = SDATA_DEFAULT_SIZE; 760 761 /* There is no single unaligned SI op for PIC code. Sometimes we 762 need to use ".4byte" and sometimes we need to use ".picptr". 763 See frv_assemble_integer for details. */ 764 if (flag_pic || TARGET_FDPIC) 765 targetm.asm_out.unaligned_op.si = 0; 766 767 if ((target_flags_explicit & MASK_LINKED_FP) == 0) 768 target_flags |= MASK_LINKED_FP; 769 770 if ((target_flags_explicit & MASK_OPTIMIZE_MEMBAR) == 0) 771 target_flags |= MASK_OPTIMIZE_MEMBAR; 772 773 for (i = 0; i < ARRAY_SIZE (frv_unit_names); i++) 774 frv_unit_codes[i] = get_cpu_unit_code (frv_unit_names[i]); 775 776 for (i = 0; i < ARRAY_SIZE (frv_type_to_unit); i++) 777 frv_type_to_unit[i] = ARRAY_SIZE (frv_unit_codes); 778 779 init_machine_status = frv_init_machine_status; 780 } 781 782 783 /* Return true if NAME (a STRING_CST node) begins with PREFIX. */ 784 785 static int 786 frv_string_begins_with (const char *name, const char *prefix) 787 { 788 const int prefix_len = strlen (prefix); 789 790 /* Remember: NAME's length includes the null terminator. */ 791 return (strncmp (name, prefix, prefix_len) == 0); 792 } 793 794 /* Implement TARGET_CONDITIONAL_REGISTER_USAGE. */ 795 796 static void 797 frv_conditional_register_usage (void) 798 { 799 int i; 800 801 for (i = GPR_FIRST + NUM_GPRS; i <= GPR_LAST; i++) 802 fixed_regs[i] = call_used_regs[i] = 1; 803 804 for (i = FPR_FIRST + NUM_FPRS; i <= FPR_LAST; i++) 805 fixed_regs[i] = call_used_regs[i] = 1; 806 807 /* Reserve the registers used for conditional execution. At present, we need 808 1 ICC and 1 ICR register. */ 809 fixed_regs[ICC_TEMP] = call_used_regs[ICC_TEMP] = 1; 810 fixed_regs[ICR_TEMP] = call_used_regs[ICR_TEMP] = 1; 811 812 if (TARGET_FIXED_CC) 813 { 814 fixed_regs[ICC_FIRST] = call_used_regs[ICC_FIRST] = 1; 815 fixed_regs[FCC_FIRST] = call_used_regs[FCC_FIRST] = 1; 816 fixed_regs[ICR_FIRST] = call_used_regs[ICR_FIRST] = 1; 817 fixed_regs[FCR_FIRST] = call_used_regs[FCR_FIRST] = 1; 818 } 819 820 if (TARGET_FDPIC) 821 fixed_regs[GPR_FIRST + 16] = fixed_regs[GPR_FIRST + 17] = 822 call_used_regs[GPR_FIRST + 16] = call_used_regs[GPR_FIRST + 17] = 0; 823 824 #if 0 825 /* If -fpic, SDA_BASE_REG is the PIC register. */ 826 if (g_switch_value == 0 && !flag_pic) 827 fixed_regs[SDA_BASE_REG] = call_used_regs[SDA_BASE_REG] = 0; 828 829 if (!flag_pic) 830 fixed_regs[PIC_REGNO] = call_used_regs[PIC_REGNO] = 0; 831 #endif 832 } 833 834 835 /* 836 * Compute the stack frame layout 837 * 838 * Register setup: 839 * +---------------+-----------------------+-----------------------+ 840 * |Register |type |caller-save/callee-save| 841 * +---------------+-----------------------+-----------------------+ 842 * |GR0 |Zero register | - | 843 * |GR1 |Stack pointer(SP) | - | 844 * |GR2 |Frame pointer(FP) | - | 845 * |GR3 |Hidden parameter | caller save | 846 * |GR4-GR7 | - | caller save | 847 * |GR8-GR13 |Argument register | caller save | 848 * |GR14-GR15 | - | caller save | 849 * |GR16-GR31 | - | callee save | 850 * |GR32-GR47 | - | caller save | 851 * |GR48-GR63 | - | callee save | 852 * |FR0-FR15 | - | caller save | 853 * |FR16-FR31 | - | callee save | 854 * |FR32-FR47 | - | caller save | 855 * |FR48-FR63 | - | callee save | 856 * +---------------+-----------------------+-----------------------+ 857 * 858 * Stack frame setup: 859 * Low 860 * SP-> |-----------------------------------| 861 * | Argument area | 862 * |-----------------------------------| 863 * | Register save area | 864 * |-----------------------------------| 865 * | Local variable save area | 866 * FP-> |-----------------------------------| 867 * | Old FP | 868 * |-----------------------------------| 869 * | Hidden parameter save area | 870 * |-----------------------------------| 871 * | Return address(LR) storage area | 872 * |-----------------------------------| 873 * | Padding for alignment | 874 * |-----------------------------------| 875 * | Register argument area | 876 * OLD SP-> |-----------------------------------| 877 * | Parameter area | 878 * |-----------------------------------| 879 * High 880 * 881 * Argument area/Parameter area: 882 * 883 * When a function is called, this area is used for argument transfer. When 884 * the argument is set up by the caller function, this area is referred to as 885 * the argument area. When the argument is referenced by the callee function, 886 * this area is referred to as the parameter area. The area is allocated when 887 * all arguments cannot be placed on the argument register at the time of 888 * argument transfer. 889 * 890 * Register save area: 891 * 892 * This is a register save area that must be guaranteed for the caller 893 * function. This area is not secured when the register save operation is not 894 * needed. 895 * 896 * Local variable save area: 897 * 898 * This is the area for local variables and temporary variables. 899 * 900 * Old FP: 901 * 902 * This area stores the FP value of the caller function. 903 * 904 * Hidden parameter save area: 905 * 906 * This area stores the start address of the return value storage 907 * area for a struct/union return function. 908 * When a struct/union is used as the return value, the caller 909 * function stores the return value storage area start address in 910 * register GR3 and passes it to the caller function. 911 * The callee function interprets the address stored in the GR3 912 * as the return value storage area start address. 913 * When register GR3 needs to be saved into memory, the callee 914 * function saves it in the hidden parameter save area. This 915 * area is not secured when the save operation is not needed. 916 * 917 * Return address(LR) storage area: 918 * 919 * This area saves the LR. The LR stores the address of a return to the caller 920 * function for the purpose of function calling. 921 * 922 * Argument register area: 923 * 924 * This area saves the argument register. This area is not secured when the 925 * save operation is not needed. 926 * 927 * Argument: 928 * 929 * Arguments, the count of which equals the count of argument registers (6 930 * words), are positioned in registers GR8 to GR13 and delivered to the callee 931 * function. When a struct/union return function is called, the return value 932 * area address is stored in register GR3. Arguments not placed in the 933 * argument registers will be stored in the stack argument area for transfer 934 * purposes. When an 8-byte type argument is to be delivered using registers, 935 * it is divided into two and placed in two registers for transfer. When 936 * argument registers must be saved to memory, the callee function secures an 937 * argument register save area in the stack. In this case, a continuous 938 * argument register save area must be established in the parameter area. The 939 * argument register save area must be allocated as needed to cover the size of 940 * the argument register to be saved. If the function has a variable count of 941 * arguments, it saves all argument registers in the argument register save 942 * area. 943 * 944 * Argument Extension Format: 945 * 946 * When an argument is to be stored in the stack, its type is converted to an 947 * extended type in accordance with the individual argument type. The argument 948 * is freed by the caller function after the return from the callee function is 949 * made. 950 * 951 * +-----------------------+---------------+------------------------+ 952 * | Argument Type |Extended Type |Stack Storage Size(byte)| 953 * +-----------------------+---------------+------------------------+ 954 * |char |int | 4 | 955 * |signed char |int | 4 | 956 * |unsigned char |int | 4 | 957 * |[signed] short int |int | 4 | 958 * |unsigned short int |int | 4 | 959 * |[signed] int |No extension | 4 | 960 * |unsigned int |No extension | 4 | 961 * |[signed] long int |No extension | 4 | 962 * |unsigned long int |No extension | 4 | 963 * |[signed] long long int |No extension | 8 | 964 * |unsigned long long int |No extension | 8 | 965 * |float |double | 8 | 966 * |double |No extension | 8 | 967 * |long double |No extension | 8 | 968 * |pointer |No extension | 4 | 969 * |struct/union |- | 4 (*1) | 970 * +-----------------------+---------------+------------------------+ 971 * 972 * When a struct/union is to be delivered as an argument, the caller copies it 973 * to the local variable area and delivers the address of that area. 974 * 975 * Return Value: 976 * 977 * +-------------------------------+----------------------+ 978 * |Return Value Type |Return Value Interface| 979 * +-------------------------------+----------------------+ 980 * |void |None | 981 * |[signed|unsigned] char |GR8 | 982 * |[signed|unsigned] short int |GR8 | 983 * |[signed|unsigned] int |GR8 | 984 * |[signed|unsigned] long int |GR8 | 985 * |pointer |GR8 | 986 * |[signed|unsigned] long long int|GR8 & GR9 | 987 * |float |GR8 | 988 * |double |GR8 & GR9 | 989 * |long double |GR8 & GR9 | 990 * |struct/union |(*1) | 991 * +-------------------------------+----------------------+ 992 * 993 * When a struct/union is used as the return value, the caller function stores 994 * the start address of the return value storage area into GR3 and then passes 995 * it to the callee function. The callee function interprets GR3 as the start 996 * address of the return value storage area. When this address needs to be 997 * saved in memory, the callee function secures the hidden parameter save area 998 * and saves the address in that area. 999 */ 1000 1001 frv_stack_t * 1002 frv_stack_info (void) 1003 { 1004 static frv_stack_t info, zero_info; 1005 frv_stack_t *info_ptr = &info; 1006 tree fndecl = current_function_decl; 1007 int varargs_p = 0; 1008 tree cur_arg; 1009 tree next_arg; 1010 int range; 1011 int alignment; 1012 int offset; 1013 1014 /* If we've already calculated the values and reload is complete, 1015 just return now. */ 1016 if (frv_stack_cache) 1017 return frv_stack_cache; 1018 1019 /* Zero all fields. */ 1020 info = zero_info; 1021 1022 /* Set up the register range information. */ 1023 info_ptr->regs[STACK_REGS_GPR].name = "gpr"; 1024 info_ptr->regs[STACK_REGS_GPR].first = LAST_ARG_REGNUM + 1; 1025 info_ptr->regs[STACK_REGS_GPR].last = GPR_LAST; 1026 info_ptr->regs[STACK_REGS_GPR].dword_p = TRUE; 1027 1028 info_ptr->regs[STACK_REGS_FPR].name = "fpr"; 1029 info_ptr->regs[STACK_REGS_FPR].first = FPR_FIRST; 1030 info_ptr->regs[STACK_REGS_FPR].last = FPR_LAST; 1031 info_ptr->regs[STACK_REGS_FPR].dword_p = TRUE; 1032 1033 info_ptr->regs[STACK_REGS_LR].name = "lr"; 1034 info_ptr->regs[STACK_REGS_LR].first = LR_REGNO; 1035 info_ptr->regs[STACK_REGS_LR].last = LR_REGNO; 1036 info_ptr->regs[STACK_REGS_LR].special_p = 1; 1037 1038 info_ptr->regs[STACK_REGS_CC].name = "cc"; 1039 info_ptr->regs[STACK_REGS_CC].first = CC_FIRST; 1040 info_ptr->regs[STACK_REGS_CC].last = CC_LAST; 1041 info_ptr->regs[STACK_REGS_CC].field_p = TRUE; 1042 1043 info_ptr->regs[STACK_REGS_LCR].name = "lcr"; 1044 info_ptr->regs[STACK_REGS_LCR].first = LCR_REGNO; 1045 info_ptr->regs[STACK_REGS_LCR].last = LCR_REGNO; 1046 1047 info_ptr->regs[STACK_REGS_STDARG].name = "stdarg"; 1048 info_ptr->regs[STACK_REGS_STDARG].first = FIRST_ARG_REGNUM; 1049 info_ptr->regs[STACK_REGS_STDARG].last = LAST_ARG_REGNUM; 1050 info_ptr->regs[STACK_REGS_STDARG].dword_p = 1; 1051 info_ptr->regs[STACK_REGS_STDARG].special_p = 1; 1052 1053 info_ptr->regs[STACK_REGS_STRUCT].name = "struct"; 1054 info_ptr->regs[STACK_REGS_STRUCT].first = FRV_STRUCT_VALUE_REGNUM; 1055 info_ptr->regs[STACK_REGS_STRUCT].last = FRV_STRUCT_VALUE_REGNUM; 1056 info_ptr->regs[STACK_REGS_STRUCT].special_p = 1; 1057 1058 info_ptr->regs[STACK_REGS_FP].name = "fp"; 1059 info_ptr->regs[STACK_REGS_FP].first = FRAME_POINTER_REGNUM; 1060 info_ptr->regs[STACK_REGS_FP].last = FRAME_POINTER_REGNUM; 1061 info_ptr->regs[STACK_REGS_FP].special_p = 1; 1062 1063 /* Determine if this is a stdarg function. If so, allocate space to store 1064 the 6 arguments. */ 1065 if (cfun->stdarg) 1066 varargs_p = 1; 1067 1068 else 1069 { 1070 /* Find the last argument, and see if it is __builtin_va_alist. */ 1071 for (cur_arg = DECL_ARGUMENTS (fndecl); cur_arg != (tree)0; cur_arg = next_arg) 1072 { 1073 next_arg = DECL_CHAIN (cur_arg); 1074 if (next_arg == (tree)0) 1075 { 1076 if (DECL_NAME (cur_arg) 1077 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (cur_arg)), "__builtin_va_alist")) 1078 varargs_p = 1; 1079 1080 break; 1081 } 1082 } 1083 } 1084 1085 /* Iterate over all of the register ranges. */ 1086 for (range = 0; range < STACK_REGS_MAX; range++) 1087 { 1088 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]); 1089 int first = reg_ptr->first; 1090 int last = reg_ptr->last; 1091 int size_1word = 0; 1092 int size_2words = 0; 1093 int regno; 1094 1095 /* Calculate which registers need to be saved & save area size. */ 1096 switch (range) 1097 { 1098 default: 1099 for (regno = first; regno <= last; regno++) 1100 { 1101 if ((df_regs_ever_live_p (regno) && !call_used_regs[regno]) 1102 || (crtl->calls_eh_return 1103 && (regno >= FIRST_EH_REGNUM && regno <= LAST_EH_REGNUM)) 1104 || (!TARGET_FDPIC && flag_pic 1105 && crtl->uses_pic_offset_table && regno == PIC_REGNO)) 1106 { 1107 info_ptr->save_p[regno] = REG_SAVE_1WORD; 1108 size_1word += UNITS_PER_WORD; 1109 } 1110 } 1111 break; 1112 1113 /* Calculate whether we need to create a frame after everything else 1114 has been processed. */ 1115 case STACK_REGS_FP: 1116 break; 1117 1118 case STACK_REGS_LR: 1119 if (df_regs_ever_live_p (LR_REGNO) 1120 || profile_flag 1121 /* This is set for __builtin_return_address, etc. */ 1122 || cfun->machine->frame_needed 1123 || (TARGET_LINKED_FP && frame_pointer_needed) 1124 || (!TARGET_FDPIC && flag_pic 1125 && crtl->uses_pic_offset_table)) 1126 { 1127 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD; 1128 size_1word += UNITS_PER_WORD; 1129 } 1130 break; 1131 1132 case STACK_REGS_STDARG: 1133 if (varargs_p) 1134 { 1135 /* If this is a stdarg function with a non varardic 1136 argument split between registers and the stack, 1137 adjust the saved registers downward. */ 1138 last -= (ADDR_ALIGN (crtl->args.pretend_args_size, UNITS_PER_WORD) 1139 / UNITS_PER_WORD); 1140 1141 for (regno = first; regno <= last; regno++) 1142 { 1143 info_ptr->save_p[regno] = REG_SAVE_1WORD; 1144 size_1word += UNITS_PER_WORD; 1145 } 1146 1147 info_ptr->stdarg_size = size_1word; 1148 } 1149 break; 1150 1151 case STACK_REGS_STRUCT: 1152 if (cfun->returns_struct) 1153 { 1154 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD; 1155 size_1word += UNITS_PER_WORD; 1156 } 1157 break; 1158 } 1159 1160 1161 if (size_1word) 1162 { 1163 /* If this is a field, it only takes one word. */ 1164 if (reg_ptr->field_p) 1165 size_1word = UNITS_PER_WORD; 1166 1167 /* Determine which register pairs can be saved together. */ 1168 else if (reg_ptr->dword_p && TARGET_DWORD) 1169 { 1170 for (regno = first; regno < last; regno += 2) 1171 { 1172 if (info_ptr->save_p[regno] && info_ptr->save_p[regno+1]) 1173 { 1174 size_2words += 2 * UNITS_PER_WORD; 1175 size_1word -= 2 * UNITS_PER_WORD; 1176 info_ptr->save_p[regno] = REG_SAVE_2WORDS; 1177 info_ptr->save_p[regno+1] = REG_SAVE_NO_SAVE; 1178 } 1179 } 1180 } 1181 1182 reg_ptr->size_1word = size_1word; 1183 reg_ptr->size_2words = size_2words; 1184 1185 if (! reg_ptr->special_p) 1186 { 1187 info_ptr->regs_size_1word += size_1word; 1188 info_ptr->regs_size_2words += size_2words; 1189 } 1190 } 1191 } 1192 1193 /* Set up the sizes of each field in the frame body, making the sizes 1194 of each be divisible by the size of a dword if dword operations might 1195 be used, or the size of a word otherwise. */ 1196 alignment = (TARGET_DWORD? 2 * UNITS_PER_WORD : UNITS_PER_WORD); 1197 1198 info_ptr->parameter_size = ADDR_ALIGN (crtl->outgoing_args_size, alignment); 1199 info_ptr->regs_size = ADDR_ALIGN (info_ptr->regs_size_2words 1200 + info_ptr->regs_size_1word, 1201 alignment); 1202 info_ptr->vars_size = ADDR_ALIGN (get_frame_size (), alignment); 1203 1204 info_ptr->pretend_size = crtl->args.pretend_args_size; 1205 1206 /* Work out the size of the frame, excluding the header. Both the frame 1207 body and register parameter area will be dword-aligned. */ 1208 info_ptr->total_size 1209 = (ADDR_ALIGN (info_ptr->parameter_size 1210 + info_ptr->regs_size 1211 + info_ptr->vars_size, 1212 2 * UNITS_PER_WORD) 1213 + ADDR_ALIGN (info_ptr->pretend_size 1214 + info_ptr->stdarg_size, 1215 2 * UNITS_PER_WORD)); 1216 1217 /* See if we need to create a frame at all, if so add header area. */ 1218 if (info_ptr->total_size > 0 1219 || frame_pointer_needed 1220 || info_ptr->regs[STACK_REGS_LR].size_1word > 0 1221 || info_ptr->regs[STACK_REGS_STRUCT].size_1word > 0) 1222 { 1223 offset = info_ptr->parameter_size; 1224 info_ptr->header_size = 4 * UNITS_PER_WORD; 1225 info_ptr->total_size += 4 * UNITS_PER_WORD; 1226 1227 /* Calculate the offsets to save normal register pairs. */ 1228 for (range = 0; range < STACK_REGS_MAX; range++) 1229 { 1230 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]); 1231 if (! reg_ptr->special_p) 1232 { 1233 int first = reg_ptr->first; 1234 int last = reg_ptr->last; 1235 int regno; 1236 1237 for (regno = first; regno <= last; regno++) 1238 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS 1239 && regno != FRAME_POINTER_REGNUM 1240 && (regno < FIRST_ARG_REGNUM 1241 || regno > LAST_ARG_REGNUM)) 1242 { 1243 info_ptr->reg_offset[regno] = offset; 1244 offset += 2 * UNITS_PER_WORD; 1245 } 1246 } 1247 } 1248 1249 /* Calculate the offsets to save normal single registers. */ 1250 for (range = 0; range < STACK_REGS_MAX; range++) 1251 { 1252 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]); 1253 if (! reg_ptr->special_p) 1254 { 1255 int first = reg_ptr->first; 1256 int last = reg_ptr->last; 1257 int regno; 1258 1259 for (regno = first; regno <= last; regno++) 1260 if (info_ptr->save_p[regno] == REG_SAVE_1WORD 1261 && regno != FRAME_POINTER_REGNUM 1262 && (regno < FIRST_ARG_REGNUM 1263 || regno > LAST_ARG_REGNUM)) 1264 { 1265 info_ptr->reg_offset[regno] = offset; 1266 offset += UNITS_PER_WORD; 1267 } 1268 } 1269 } 1270 1271 /* Calculate the offset to save the local variables at. */ 1272 offset = ADDR_ALIGN (offset, alignment); 1273 if (info_ptr->vars_size) 1274 { 1275 info_ptr->vars_offset = offset; 1276 offset += info_ptr->vars_size; 1277 } 1278 1279 /* Align header to a dword-boundary. */ 1280 offset = ADDR_ALIGN (offset, 2 * UNITS_PER_WORD); 1281 1282 /* Calculate the offsets in the fixed frame. */ 1283 info_ptr->save_p[FRAME_POINTER_REGNUM] = REG_SAVE_1WORD; 1284 info_ptr->reg_offset[FRAME_POINTER_REGNUM] = offset; 1285 info_ptr->regs[STACK_REGS_FP].size_1word = UNITS_PER_WORD; 1286 1287 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD; 1288 info_ptr->reg_offset[LR_REGNO] = offset + 2*UNITS_PER_WORD; 1289 info_ptr->regs[STACK_REGS_LR].size_1word = UNITS_PER_WORD; 1290 1291 if (cfun->returns_struct) 1292 { 1293 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD; 1294 info_ptr->reg_offset[FRV_STRUCT_VALUE_REGNUM] = offset + UNITS_PER_WORD; 1295 info_ptr->regs[STACK_REGS_STRUCT].size_1word = UNITS_PER_WORD; 1296 } 1297 1298 /* Calculate the offsets to store the arguments passed in registers 1299 for stdarg functions. The register pairs are first and the single 1300 register if any is last. The register save area starts on a 1301 dword-boundary. */ 1302 if (info_ptr->stdarg_size) 1303 { 1304 int first = info_ptr->regs[STACK_REGS_STDARG].first; 1305 int last = info_ptr->regs[STACK_REGS_STDARG].last; 1306 int regno; 1307 1308 /* Skip the header. */ 1309 offset += 4 * UNITS_PER_WORD; 1310 for (regno = first; regno <= last; regno++) 1311 { 1312 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS) 1313 { 1314 info_ptr->reg_offset[regno] = offset; 1315 offset += 2 * UNITS_PER_WORD; 1316 } 1317 else if (info_ptr->save_p[regno] == REG_SAVE_1WORD) 1318 { 1319 info_ptr->reg_offset[regno] = offset; 1320 offset += UNITS_PER_WORD; 1321 } 1322 } 1323 } 1324 } 1325 1326 if (reload_completed) 1327 frv_stack_cache = info_ptr; 1328 1329 return info_ptr; 1330 } 1331 1332 1333 /* Print the information about the frv stack offsets, etc. when debugging. */ 1334 1335 void 1336 frv_debug_stack (frv_stack_t *info) 1337 { 1338 int range; 1339 1340 if (!info) 1341 info = frv_stack_info (); 1342 1343 fprintf (stderr, "\nStack information for function %s:\n", 1344 ((current_function_decl && DECL_NAME (current_function_decl)) 1345 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl)) 1346 : "<unknown>")); 1347 1348 fprintf (stderr, "\ttotal_size\t= %6d\n", info->total_size); 1349 fprintf (stderr, "\tvars_size\t= %6d\n", info->vars_size); 1350 fprintf (stderr, "\tparam_size\t= %6d\n", info->parameter_size); 1351 fprintf (stderr, "\tregs_size\t= %6d, 1w = %3d, 2w = %3d\n", 1352 info->regs_size, info->regs_size_1word, info->regs_size_2words); 1353 1354 fprintf (stderr, "\theader_size\t= %6d\n", info->header_size); 1355 fprintf (stderr, "\tpretend_size\t= %6d\n", info->pretend_size); 1356 fprintf (stderr, "\tvars_offset\t= %6d\n", info->vars_offset); 1357 fprintf (stderr, "\tregs_offset\t= %6d\n", info->regs_offset); 1358 1359 for (range = 0; range < STACK_REGS_MAX; range++) 1360 { 1361 frv_stack_regs_t *regs = &(info->regs[range]); 1362 if ((regs->size_1word + regs->size_2words) > 0) 1363 { 1364 int first = regs->first; 1365 int last = regs->last; 1366 int regno; 1367 1368 fprintf (stderr, "\t%s\tsize\t= %6d, 1w = %3d, 2w = %3d, save =", 1369 regs->name, regs->size_1word + regs->size_2words, 1370 regs->size_1word, regs->size_2words); 1371 1372 for (regno = first; regno <= last; regno++) 1373 { 1374 if (info->save_p[regno] == REG_SAVE_1WORD) 1375 fprintf (stderr, " %s (%d)", reg_names[regno], 1376 info->reg_offset[regno]); 1377 1378 else if (info->save_p[regno] == REG_SAVE_2WORDS) 1379 fprintf (stderr, " %s-%s (%d)", reg_names[regno], 1380 reg_names[regno+1], info->reg_offset[regno]); 1381 } 1382 1383 fputc ('\n', stderr); 1384 } 1385 } 1386 1387 fflush (stderr); 1388 } 1389 1390 1391 1392 1393 /* Used during final to control the packing of insns. The value is 1394 1 if the current instruction should be packed with the next one, 1395 0 if it shouldn't or -1 if packing is disabled altogether. */ 1396 1397 static int frv_insn_packing_flag; 1398 1399 /* True if the current function contains a far jump. */ 1400 1401 static int 1402 frv_function_contains_far_jump (void) 1403 { 1404 rtx_insn *insn = get_insns (); 1405 while (insn != NULL 1406 && !(JUMP_P (insn) 1407 && get_attr_far_jump (insn) == FAR_JUMP_YES)) 1408 insn = NEXT_INSN (insn); 1409 return (insn != NULL); 1410 } 1411 1412 /* For the FRV, this function makes sure that a function with far jumps 1413 will return correctly. It also does the VLIW packing. */ 1414 1415 static void 1416 frv_function_prologue (FILE *file) 1417 { 1418 /* If no frame was created, check whether the function uses a call 1419 instruction to implement a far jump. If so, save the link in gr3 and 1420 replace all returns to LR with returns to GR3. GR3 is used because it 1421 is call-clobbered, because is not available to the register allocator, 1422 and because all functions that take a hidden argument pointer will have 1423 a stack frame. */ 1424 if (frv_stack_info ()->total_size == 0 && frv_function_contains_far_jump ()) 1425 { 1426 rtx_insn *insn; 1427 1428 /* Just to check that the above comment is true. */ 1429 gcc_assert (!df_regs_ever_live_p (GPR_FIRST + 3)); 1430 1431 /* Generate the instruction that saves the link register. */ 1432 fprintf (file, "\tmovsg lr,gr3\n"); 1433 1434 /* Replace the LR with GR3 in *return_internal patterns. The insn 1435 will now return using jmpl @(gr3,0) rather than bralr. We cannot 1436 simply emit a different assembly directive because bralr and jmpl 1437 execute in different units. */ 1438 for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn)) 1439 if (JUMP_P (insn)) 1440 { 1441 rtx pattern = PATTERN (insn); 1442 if (GET_CODE (pattern) == PARALLEL 1443 && XVECLEN (pattern, 0) >= 2 1444 && GET_CODE (XVECEXP (pattern, 0, 0)) == RETURN 1445 && GET_CODE (XVECEXP (pattern, 0, 1)) == USE) 1446 { 1447 rtx address = XEXP (XVECEXP (pattern, 0, 1), 0); 1448 if (GET_CODE (address) == REG && REGNO (address) == LR_REGNO) 1449 SET_REGNO (address, GPR_FIRST + 3); 1450 } 1451 } 1452 } 1453 1454 frv_pack_insns (); 1455 1456 /* Allow the garbage collector to free the nops created by frv_reorg. */ 1457 memset (frv_nops, 0, sizeof (frv_nops)); 1458 } 1459 1460 1461 /* Return the next available temporary register in a given class. */ 1462 1463 static rtx 1464 frv_alloc_temp_reg ( 1465 frv_tmp_reg_t *info, /* which registers are available */ 1466 enum reg_class rclass, /* register class desired */ 1467 machine_mode mode, /* mode to allocate register with */ 1468 int mark_as_used, /* register not available after allocation */ 1469 int no_abort) /* return NULL instead of aborting */ 1470 { 1471 int regno = info->next_reg[ (int)rclass ]; 1472 int orig_regno = regno; 1473 HARD_REG_SET *reg_in_class = ®_class_contents[ (int)rclass ]; 1474 int i, nr; 1475 1476 for (;;) 1477 { 1478 if (TEST_HARD_REG_BIT (*reg_in_class, regno) 1479 && TEST_HARD_REG_BIT (info->regs, regno)) 1480 break; 1481 1482 if (++regno >= FIRST_PSEUDO_REGISTER) 1483 regno = 0; 1484 if (regno == orig_regno) 1485 { 1486 gcc_assert (no_abort); 1487 return NULL_RTX; 1488 } 1489 } 1490 1491 nr = hard_regno_nregs (regno, mode); 1492 info->next_reg[ (int)rclass ] = regno + nr; 1493 1494 if (mark_as_used) 1495 for (i = 0; i < nr; i++) 1496 CLEAR_HARD_REG_BIT (info->regs, regno+i); 1497 1498 return gen_rtx_REG (mode, regno); 1499 } 1500 1501 1502 /* Return an rtx with the value OFFSET, which will either be a register or a 1503 signed 12-bit integer. It can be used as the second operand in an "add" 1504 instruction, or as the index in a load or store. 1505 1506 The function returns a constant rtx if OFFSET is small enough, otherwise 1507 it loads the constant into register OFFSET_REGNO and returns that. */ 1508 static rtx 1509 frv_frame_offset_rtx (int offset) 1510 { 1511 rtx offset_rtx = GEN_INT (offset); 1512 if (IN_RANGE (offset, -2048, 2047)) 1513 return offset_rtx; 1514 else 1515 { 1516 rtx reg_rtx = gen_rtx_REG (SImode, OFFSET_REGNO); 1517 if (IN_RANGE (offset, -32768, 32767)) 1518 emit_insn (gen_movsi (reg_rtx, offset_rtx)); 1519 else 1520 { 1521 emit_insn (gen_movsi_high (reg_rtx, offset_rtx)); 1522 emit_insn (gen_movsi_lo_sum (reg_rtx, offset_rtx)); 1523 } 1524 return reg_rtx; 1525 } 1526 } 1527 1528 /* Generate (mem:MODE (plus:Pmode BASE (frv_frame_offset OFFSET)))). The 1529 prologue and epilogue uses such expressions to access the stack. */ 1530 static rtx 1531 frv_frame_mem (machine_mode mode, rtx base, int offset) 1532 { 1533 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, 1534 base, 1535 frv_frame_offset_rtx (offset))); 1536 } 1537 1538 /* Generate a frame-related expression: 1539 1540 (set REG (mem (plus (sp) (const_int OFFSET)))). 1541 1542 Such expressions are used in FRAME_RELATED_EXPR notes for more complex 1543 instructions. Marking the expressions as frame-related is superfluous if 1544 the note contains just a single set. But if the note contains a PARALLEL 1545 or SEQUENCE that has several sets, each set must be individually marked 1546 as frame-related. */ 1547 static rtx 1548 frv_dwarf_store (rtx reg, int offset) 1549 { 1550 rtx set = gen_rtx_SET (gen_rtx_MEM (GET_MODE (reg), 1551 plus_constant (Pmode, stack_pointer_rtx, 1552 offset)), 1553 reg); 1554 RTX_FRAME_RELATED_P (set) = 1; 1555 return set; 1556 } 1557 1558 /* Emit a frame-related instruction whose pattern is PATTERN. The 1559 instruction is the last in a sequence that cumulatively performs the 1560 operation described by DWARF_PATTERN. The instruction is marked as 1561 frame-related and has a REG_FRAME_RELATED_EXPR note containing 1562 DWARF_PATTERN. */ 1563 static void 1564 frv_frame_insn (rtx pattern, rtx dwarf_pattern) 1565 { 1566 rtx insn = emit_insn (pattern); 1567 RTX_FRAME_RELATED_P (insn) = 1; 1568 REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR, 1569 dwarf_pattern, 1570 REG_NOTES (insn)); 1571 } 1572 1573 /* Emit instructions that transfer REG to or from the memory location (sp + 1574 STACK_OFFSET). The register is stored in memory if ACCESSOR->OP is 1575 FRV_STORE and loaded if it is FRV_LOAD. Only the prologue uses this 1576 function to store registers and only the epilogue uses it to load them. 1577 1578 The caller sets up ACCESSOR so that BASE is equal to (sp + BASE_OFFSET). 1579 The generated instruction will use BASE as its base register. BASE may 1580 simply be the stack pointer, but if several accesses are being made to a 1581 region far away from the stack pointer, it may be more efficient to set 1582 up a temporary instead. 1583 1584 Store instructions will be frame-related and will be annotated with the 1585 overall effect of the store. Load instructions will be followed by a 1586 (use) to prevent later optimizations from zapping them. 1587 1588 The function takes care of the moves to and from SPRs, using TEMP_REGNO 1589 as a temporary in such cases. */ 1590 static void 1591 frv_frame_access (frv_frame_accessor_t *accessor, rtx reg, int stack_offset) 1592 { 1593 machine_mode mode = GET_MODE (reg); 1594 rtx mem = frv_frame_mem (mode, 1595 accessor->base, 1596 stack_offset - accessor->base_offset); 1597 1598 if (accessor->op == FRV_LOAD) 1599 { 1600 if (SPR_P (REGNO (reg))) 1601 { 1602 rtx temp = gen_rtx_REG (mode, TEMP_REGNO); 1603 emit_insn (gen_rtx_SET (temp, mem)); 1604 emit_insn (gen_rtx_SET (reg, temp)); 1605 } 1606 else 1607 { 1608 /* We cannot use reg+reg addressing for DImode access. */ 1609 if (mode == DImode 1610 && GET_CODE (XEXP (mem, 0)) == PLUS 1611 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG 1612 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG) 1613 { 1614 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO); 1615 1616 emit_move_insn (temp, 1617 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0), 1618 XEXP (XEXP (mem, 0), 1))); 1619 mem = gen_rtx_MEM (DImode, temp); 1620 } 1621 emit_insn (gen_rtx_SET (reg, mem)); 1622 } 1623 emit_use (reg); 1624 } 1625 else 1626 { 1627 if (SPR_P (REGNO (reg))) 1628 { 1629 rtx temp = gen_rtx_REG (mode, TEMP_REGNO); 1630 emit_insn (gen_rtx_SET (temp, reg)); 1631 frv_frame_insn (gen_rtx_SET (mem, temp), 1632 frv_dwarf_store (reg, stack_offset)); 1633 } 1634 else if (mode == DImode) 1635 { 1636 /* For DImode saves, the dwarf2 version needs to be a SEQUENCE 1637 with a separate save for each register. */ 1638 rtx reg1 = gen_rtx_REG (SImode, REGNO (reg)); 1639 rtx reg2 = gen_rtx_REG (SImode, REGNO (reg) + 1); 1640 rtx set1 = frv_dwarf_store (reg1, stack_offset); 1641 rtx set2 = frv_dwarf_store (reg2, stack_offset + 4); 1642 1643 /* Also we cannot use reg+reg addressing. */ 1644 if (GET_CODE (XEXP (mem, 0)) == PLUS 1645 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG 1646 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG) 1647 { 1648 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO); 1649 emit_move_insn (temp, 1650 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0), 1651 XEXP (XEXP (mem, 0), 1))); 1652 mem = gen_rtx_MEM (DImode, temp); 1653 } 1654 1655 frv_frame_insn (gen_rtx_SET (mem, reg), 1656 gen_rtx_PARALLEL (VOIDmode, 1657 gen_rtvec (2, set1, set2))); 1658 } 1659 else 1660 frv_frame_insn (gen_rtx_SET (mem, reg), 1661 frv_dwarf_store (reg, stack_offset)); 1662 } 1663 } 1664 1665 /* A function that uses frv_frame_access to transfer a group of registers to 1666 or from the stack. ACCESSOR is passed directly to frv_frame_access, INFO 1667 is the stack information generated by frv_stack_info, and REG_SET is the 1668 number of the register set to transfer. */ 1669 static void 1670 frv_frame_access_multi (frv_frame_accessor_t *accessor, 1671 frv_stack_t *info, 1672 int reg_set) 1673 { 1674 frv_stack_regs_t *regs_info; 1675 int regno; 1676 1677 regs_info = &info->regs[reg_set]; 1678 for (regno = regs_info->first; regno <= regs_info->last; regno++) 1679 if (info->save_p[regno]) 1680 frv_frame_access (accessor, 1681 info->save_p[regno] == REG_SAVE_2WORDS 1682 ? gen_rtx_REG (DImode, regno) 1683 : gen_rtx_REG (SImode, regno), 1684 info->reg_offset[regno]); 1685 } 1686 1687 /* Save or restore callee-saved registers that are kept outside the frame 1688 header. The function saves the registers if OP is FRV_STORE and restores 1689 them if OP is FRV_LOAD. INFO is the stack information generated by 1690 frv_stack_info. */ 1691 static void 1692 frv_frame_access_standard_regs (enum frv_stack_op op, frv_stack_t *info) 1693 { 1694 frv_frame_accessor_t accessor; 1695 1696 accessor.op = op; 1697 accessor.base = stack_pointer_rtx; 1698 accessor.base_offset = 0; 1699 frv_frame_access_multi (&accessor, info, STACK_REGS_GPR); 1700 frv_frame_access_multi (&accessor, info, STACK_REGS_FPR); 1701 frv_frame_access_multi (&accessor, info, STACK_REGS_LCR); 1702 } 1703 1704 1705 /* Called after register allocation to add any instructions needed for the 1706 prologue. Using a prologue insn is favored compared to putting all of the 1707 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since 1708 it allows the scheduler to intermix instructions with the saves of 1709 the caller saved registers. In some cases, it might be necessary 1710 to emit a barrier instruction as the last insn to prevent such 1711 scheduling. 1712 1713 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1 1714 so that the debug info generation code can handle them properly. */ 1715 void 1716 frv_expand_prologue (void) 1717 { 1718 frv_stack_t *info = frv_stack_info (); 1719 rtx sp = stack_pointer_rtx; 1720 rtx fp = frame_pointer_rtx; 1721 frv_frame_accessor_t accessor; 1722 1723 if (TARGET_DEBUG_STACK) 1724 frv_debug_stack (info); 1725 1726 if (flag_stack_usage_info) 1727 current_function_static_stack_size = info->total_size; 1728 1729 if (info->total_size == 0) 1730 return; 1731 1732 /* We're interested in three areas of the frame here: 1733 1734 A: the register save area 1735 B: the old FP 1736 C: the header after B 1737 1738 If the frame pointer isn't used, we'll have to set up A, B and C 1739 using the stack pointer. If the frame pointer is used, we'll access 1740 them as follows: 1741 1742 A: set up using sp 1743 B: set up using sp or a temporary (see below) 1744 C: set up using fp 1745 1746 We set up B using the stack pointer if the frame is small enough. 1747 Otherwise, it's more efficient to copy the old stack pointer into a 1748 temporary and use that. 1749 1750 Note that it's important to make sure the prologue and epilogue use the 1751 same registers to access A and C, since doing otherwise will confuse 1752 the aliasing code. */ 1753 1754 /* Set up ACCESSOR for accessing region B above. If the frame pointer 1755 isn't used, the same method will serve for C. */ 1756 accessor.op = FRV_STORE; 1757 if (frame_pointer_needed && info->total_size > 2048) 1758 { 1759 accessor.base = gen_rtx_REG (Pmode, OLD_SP_REGNO); 1760 accessor.base_offset = info->total_size; 1761 emit_insn (gen_movsi (accessor.base, sp)); 1762 } 1763 else 1764 { 1765 accessor.base = stack_pointer_rtx; 1766 accessor.base_offset = 0; 1767 } 1768 1769 /* Allocate the stack space. */ 1770 { 1771 rtx asm_offset = frv_frame_offset_rtx (-info->total_size); 1772 rtx dwarf_offset = GEN_INT (-info->total_size); 1773 1774 frv_frame_insn (gen_stack_adjust (sp, sp, asm_offset), 1775 gen_rtx_SET (sp, gen_rtx_PLUS (Pmode, sp, dwarf_offset))); 1776 } 1777 1778 /* If the frame pointer is needed, store the old one at (sp + FP_OFFSET) 1779 and point the new one to that location. */ 1780 if (frame_pointer_needed) 1781 { 1782 int fp_offset = info->reg_offset[FRAME_POINTER_REGNUM]; 1783 1784 /* ASM_SRC and DWARF_SRC both point to the frame header. ASM_SRC is 1785 based on ACCESSOR.BASE but DWARF_SRC is always based on the stack 1786 pointer. */ 1787 rtx asm_src = plus_constant (Pmode, accessor.base, 1788 fp_offset - accessor.base_offset); 1789 rtx dwarf_src = plus_constant (Pmode, sp, fp_offset); 1790 1791 /* Store the old frame pointer at (sp + FP_OFFSET). */ 1792 frv_frame_access (&accessor, fp, fp_offset); 1793 1794 /* Set up the new frame pointer. */ 1795 frv_frame_insn (gen_rtx_SET (fp, asm_src), 1796 gen_rtx_SET (fp, dwarf_src)); 1797 1798 /* Access region C from the frame pointer. */ 1799 accessor.base = fp; 1800 accessor.base_offset = fp_offset; 1801 } 1802 1803 /* Set up region C. */ 1804 frv_frame_access_multi (&accessor, info, STACK_REGS_STRUCT); 1805 frv_frame_access_multi (&accessor, info, STACK_REGS_LR); 1806 frv_frame_access_multi (&accessor, info, STACK_REGS_STDARG); 1807 1808 /* Set up region A. */ 1809 frv_frame_access_standard_regs (FRV_STORE, info); 1810 1811 /* If this is a varargs/stdarg function, issue a blockage to prevent the 1812 scheduler from moving loads before the stores saving the registers. */ 1813 if (info->stdarg_size > 0) 1814 emit_insn (gen_blockage ()); 1815 1816 /* Set up pic register/small data register for this function. */ 1817 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table) 1818 emit_insn (gen_pic_prologue (gen_rtx_REG (Pmode, PIC_REGNO), 1819 gen_rtx_REG (Pmode, LR_REGNO), 1820 gen_rtx_REG (SImode, OFFSET_REGNO))); 1821 } 1822 1823 1824 /* Under frv, all of the work is done via frv_expand_epilogue, but 1825 this function provides a convenient place to do cleanup. */ 1826 1827 static void 1828 frv_function_epilogue (FILE *) 1829 { 1830 frv_stack_cache = (frv_stack_t *)0; 1831 1832 /* Zap last used registers for conditional execution. */ 1833 memset (&frv_ifcvt.tmp_reg, 0, sizeof (frv_ifcvt.tmp_reg)); 1834 1835 /* Release the bitmap of created insns. */ 1836 BITMAP_FREE (frv_ifcvt.scratch_insns_bitmap); 1837 } 1838 1839 1840 /* Called after register allocation to add any instructions needed for the 1841 epilogue. Using an epilogue insn is favored compared to putting all of the 1842 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since 1843 it allows the scheduler to intermix instructions with the saves of 1844 the caller saved registers. In some cases, it might be necessary 1845 to emit a barrier instruction as the last insn to prevent such 1846 scheduling. */ 1847 1848 void 1849 frv_expand_epilogue (bool emit_return) 1850 { 1851 frv_stack_t *info = frv_stack_info (); 1852 rtx fp = frame_pointer_rtx; 1853 rtx sp = stack_pointer_rtx; 1854 rtx return_addr; 1855 int fp_offset; 1856 1857 fp_offset = info->reg_offset[FRAME_POINTER_REGNUM]; 1858 1859 /* Restore the stack pointer to its original value if alloca or the like 1860 is used. */ 1861 if (! crtl->sp_is_unchanging) 1862 emit_insn (gen_addsi3 (sp, fp, frv_frame_offset_rtx (-fp_offset))); 1863 1864 /* Restore the callee-saved registers that were used in this function. */ 1865 frv_frame_access_standard_regs (FRV_LOAD, info); 1866 1867 /* Set RETURN_ADDR to the address we should return to. Set it to NULL if 1868 no return instruction should be emitted. */ 1869 if (info->save_p[LR_REGNO]) 1870 { 1871 int lr_offset; 1872 rtx mem; 1873 1874 /* Use the same method to access the link register's slot as we did in 1875 the prologue. In other words, use the frame pointer if available, 1876 otherwise use the stack pointer. 1877 1878 LR_OFFSET is the offset of the link register's slot from the start 1879 of the frame and MEM is a memory rtx for it. */ 1880 lr_offset = info->reg_offset[LR_REGNO]; 1881 if (frame_pointer_needed) 1882 mem = frv_frame_mem (Pmode, fp, lr_offset - fp_offset); 1883 else 1884 mem = frv_frame_mem (Pmode, sp, lr_offset); 1885 1886 /* Load the old link register into a GPR. */ 1887 return_addr = gen_rtx_REG (Pmode, TEMP_REGNO); 1888 emit_insn (gen_rtx_SET (return_addr, mem)); 1889 } 1890 else 1891 return_addr = gen_rtx_REG (Pmode, LR_REGNO); 1892 1893 /* Restore the old frame pointer. Emit a USE afterwards to make sure 1894 the load is preserved. */ 1895 if (frame_pointer_needed) 1896 { 1897 emit_insn (gen_rtx_SET (fp, gen_rtx_MEM (Pmode, fp))); 1898 emit_use (fp); 1899 } 1900 1901 /* Deallocate the stack frame. */ 1902 if (info->total_size != 0) 1903 { 1904 rtx offset = frv_frame_offset_rtx (info->total_size); 1905 emit_insn (gen_stack_adjust (sp, sp, offset)); 1906 } 1907 1908 /* If this function uses eh_return, add the final stack adjustment now. */ 1909 if (crtl->calls_eh_return) 1910 emit_insn (gen_stack_adjust (sp, sp, EH_RETURN_STACKADJ_RTX)); 1911 1912 if (emit_return) 1913 emit_jump_insn (gen_epilogue_return (return_addr)); 1914 else 1915 { 1916 rtx lr = return_addr; 1917 1918 if (REGNO (return_addr) != LR_REGNO) 1919 { 1920 lr = gen_rtx_REG (Pmode, LR_REGNO); 1921 emit_move_insn (lr, return_addr); 1922 } 1923 1924 emit_use (lr); 1925 } 1926 } 1927 1928 1929 /* Worker function for TARGET_ASM_OUTPUT_MI_THUNK. */ 1930 1931 static void 1932 frv_asm_output_mi_thunk (FILE *file, 1933 tree thunk_fndecl ATTRIBUTE_UNUSED, 1934 HOST_WIDE_INT delta, 1935 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED, 1936 tree function) 1937 { 1938 const char *name_func = XSTR (XEXP (DECL_RTL (function), 0), 0); 1939 const char *name_arg0 = reg_names[FIRST_ARG_REGNUM]; 1940 const char *name_jmp = reg_names[JUMP_REGNO]; 1941 const char *parallel = (frv_issue_rate () > 1 ? ".p" : ""); 1942 1943 /* Do the add using an addi if possible. */ 1944 if (IN_RANGE (delta, -2048, 2047)) 1945 fprintf (file, "\taddi %s,#%d,%s\n", name_arg0, (int) delta, name_arg0); 1946 else 1947 { 1948 const char *const name_add = reg_names[TEMP_REGNO]; 1949 fprintf (file, "\tsethi%s #hi(" HOST_WIDE_INT_PRINT_DEC "),%s\n", 1950 parallel, delta, name_add); 1951 fprintf (file, "\tsetlo #lo(" HOST_WIDE_INT_PRINT_DEC "),%s\n", 1952 delta, name_add); 1953 fprintf (file, "\tadd %s,%s,%s\n", name_add, name_arg0, name_arg0); 1954 } 1955 1956 if (TARGET_FDPIC) 1957 { 1958 const char *name_pic = reg_names[FDPIC_REGNO]; 1959 name_jmp = reg_names[FDPIC_FPTR_REGNO]; 1960 1961 if (flag_pic != 1) 1962 { 1963 fprintf (file, "\tsethi%s #gotofffuncdeschi(", parallel); 1964 assemble_name (file, name_func); 1965 fprintf (file, "),%s\n", name_jmp); 1966 1967 fprintf (file, "\tsetlo #gotofffuncdesclo("); 1968 assemble_name (file, name_func); 1969 fprintf (file, "),%s\n", name_jmp); 1970 1971 fprintf (file, "\tldd @(%s,%s), %s\n", name_jmp, name_pic, name_jmp); 1972 } 1973 else 1974 { 1975 fprintf (file, "\tlddo @(%s,#gotofffuncdesc12(", name_pic); 1976 assemble_name (file, name_func); 1977 fprintf (file, "\t)), %s\n", name_jmp); 1978 } 1979 } 1980 else if (!flag_pic) 1981 { 1982 fprintf (file, "\tsethi%s #hi(", parallel); 1983 assemble_name (file, name_func); 1984 fprintf (file, "),%s\n", name_jmp); 1985 1986 fprintf (file, "\tsetlo #lo("); 1987 assemble_name (file, name_func); 1988 fprintf (file, "),%s\n", name_jmp); 1989 } 1990 else 1991 { 1992 /* Use JUMP_REGNO as a temporary PIC register. */ 1993 const char *name_lr = reg_names[LR_REGNO]; 1994 const char *name_gppic = name_jmp; 1995 const char *name_tmp = reg_names[TEMP_REGNO]; 1996 1997 fprintf (file, "\tmovsg %s,%s\n", name_lr, name_tmp); 1998 fprintf (file, "\tcall 1f\n"); 1999 fprintf (file, "1:\tmovsg %s,%s\n", name_lr, name_gppic); 2000 fprintf (file, "\tmovgs %s,%s\n", name_tmp, name_lr); 2001 fprintf (file, "\tsethi%s #gprelhi(1b),%s\n", parallel, name_tmp); 2002 fprintf (file, "\tsetlo #gprello(1b),%s\n", name_tmp); 2003 fprintf (file, "\tsub %s,%s,%s\n", name_gppic, name_tmp, name_gppic); 2004 2005 fprintf (file, "\tsethi%s #gprelhi(", parallel); 2006 assemble_name (file, name_func); 2007 fprintf (file, "),%s\n", name_tmp); 2008 2009 fprintf (file, "\tsetlo #gprello("); 2010 assemble_name (file, name_func); 2011 fprintf (file, "),%s\n", name_tmp); 2012 2013 fprintf (file, "\tadd %s,%s,%s\n", name_gppic, name_tmp, name_jmp); 2014 } 2015 2016 /* Jump to the function address. */ 2017 fprintf (file, "\tjmpl @(%s,%s)\n", name_jmp, reg_names[GPR_FIRST+0]); 2018 } 2019 2020 2021 2022 /* On frv, create a frame whenever we need to create stack. */ 2023 2024 static bool 2025 frv_frame_pointer_required (void) 2026 { 2027 /* If we forgoing the usual linkage requirements, we only need 2028 a frame pointer if the stack pointer might change. */ 2029 if (!TARGET_LINKED_FP) 2030 return !crtl->sp_is_unchanging; 2031 2032 if (! crtl->is_leaf) 2033 return true; 2034 2035 if (get_frame_size () != 0) 2036 return true; 2037 2038 if (cfun->stdarg) 2039 return true; 2040 2041 if (!crtl->sp_is_unchanging) 2042 return true; 2043 2044 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table) 2045 return true; 2046 2047 if (profile_flag) 2048 return true; 2049 2050 if (cfun->machine->frame_needed) 2051 return true; 2052 2053 return false; 2054 } 2055 2056 2057 /* Worker function for TARGET_CAN_ELIMINATE. */ 2058 2059 bool 2060 frv_can_eliminate (const int from, const int to) 2061 { 2062 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM 2063 ? ! frame_pointer_needed 2064 : true); 2065 } 2066 2067 /* This function returns the initial difference between the specified 2068 pair of registers. */ 2069 2070 /* See frv_stack_info for more details on the frv stack frame. */ 2071 2072 int 2073 frv_initial_elimination_offset (int from, int to) 2074 { 2075 frv_stack_t *info = frv_stack_info (); 2076 int ret = 0; 2077 2078 if (to == STACK_POINTER_REGNUM && from == ARG_POINTER_REGNUM) 2079 ret = info->total_size - info->pretend_size; 2080 2081 else if (to == STACK_POINTER_REGNUM && from == FRAME_POINTER_REGNUM) 2082 ret = info->reg_offset[FRAME_POINTER_REGNUM]; 2083 2084 else if (to == FRAME_POINTER_REGNUM && from == ARG_POINTER_REGNUM) 2085 ret = (info->total_size 2086 - info->reg_offset[FRAME_POINTER_REGNUM] 2087 - info->pretend_size); 2088 2089 else 2090 gcc_unreachable (); 2091 2092 if (TARGET_DEBUG_STACK) 2093 fprintf (stderr, "Eliminate %s to %s by adding %d\n", 2094 reg_names [from], reg_names[to], ret); 2095 2096 return ret; 2097 } 2098 2099 2100 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */ 2101 2102 static void 2103 frv_setup_incoming_varargs (cumulative_args_t cum_v, 2104 machine_mode mode, 2105 tree type ATTRIBUTE_UNUSED, 2106 int *pretend_size, 2107 int second_time) 2108 { 2109 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 2110 2111 if (TARGET_DEBUG_ARG) 2112 fprintf (stderr, 2113 "setup_vararg: words = %2d, mode = %4s, pretend_size = %d, second_time = %d\n", 2114 *cum, GET_MODE_NAME (mode), *pretend_size, second_time); 2115 } 2116 2117 2118 /* Worker function for TARGET_EXPAND_BUILTIN_SAVEREGS. */ 2119 2120 static rtx 2121 frv_expand_builtin_saveregs (void) 2122 { 2123 int offset = UNITS_PER_WORD * FRV_NUM_ARG_REGS; 2124 2125 if (TARGET_DEBUG_ARG) 2126 fprintf (stderr, "expand_builtin_saveregs: offset from ap = %d\n", 2127 offset); 2128 2129 return gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx, GEN_INT (- offset)); 2130 } 2131 2132 2133 /* Expand __builtin_va_start to do the va_start macro. */ 2134 2135 static void 2136 frv_expand_builtin_va_start (tree valist, rtx nextarg) 2137 { 2138 tree t; 2139 int num = crtl->args.info - FIRST_ARG_REGNUM - FRV_NUM_ARG_REGS; 2140 2141 nextarg = gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx, 2142 GEN_INT (UNITS_PER_WORD * num)); 2143 2144 if (TARGET_DEBUG_ARG) 2145 { 2146 fprintf (stderr, "va_start: args_info = %d, num = %d\n", 2147 crtl->args.info, num); 2148 2149 debug_rtx (nextarg); 2150 } 2151 2152 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, 2153 fold_convert (TREE_TYPE (valist), 2154 make_tree (sizetype, nextarg))); 2155 TREE_SIDE_EFFECTS (t) = 1; 2156 2157 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 2158 } 2159 2160 2161 /* Expand a block move operation, and return 1 if successful. Return 0 2162 if we should let the compiler generate normal code. 2163 2164 operands[0] is the destination 2165 operands[1] is the source 2166 operands[2] is the length 2167 operands[3] is the alignment */ 2168 2169 /* Maximum number of loads to do before doing the stores */ 2170 #ifndef MAX_MOVE_REG 2171 #define MAX_MOVE_REG 4 2172 #endif 2173 2174 /* Maximum number of total loads to do. */ 2175 #ifndef TOTAL_MOVE_REG 2176 #define TOTAL_MOVE_REG 8 2177 #endif 2178 2179 int 2180 frv_expand_block_move (rtx operands[]) 2181 { 2182 rtx orig_dest = operands[0]; 2183 rtx orig_src = operands[1]; 2184 rtx bytes_rtx = operands[2]; 2185 rtx align_rtx = operands[3]; 2186 int constp = (GET_CODE (bytes_rtx) == CONST_INT); 2187 int align; 2188 int bytes; 2189 int offset; 2190 int num_reg; 2191 int i; 2192 rtx src_reg; 2193 rtx dest_reg; 2194 rtx src_addr; 2195 rtx dest_addr; 2196 rtx src_mem; 2197 rtx dest_mem; 2198 rtx tmp_reg; 2199 rtx stores[MAX_MOVE_REG]; 2200 int move_bytes; 2201 machine_mode mode; 2202 2203 /* If this is not a fixed size move, just call memcpy. */ 2204 if (! constp) 2205 return FALSE; 2206 2207 /* This should be a fixed size alignment. */ 2208 gcc_assert (GET_CODE (align_rtx) == CONST_INT); 2209 2210 align = INTVAL (align_rtx); 2211 2212 /* Anything to move? */ 2213 bytes = INTVAL (bytes_rtx); 2214 if (bytes <= 0) 2215 return TRUE; 2216 2217 /* Don't support real large moves. */ 2218 if (bytes > TOTAL_MOVE_REG*align) 2219 return FALSE; 2220 2221 /* Move the address into scratch registers. */ 2222 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0)); 2223 src_reg = copy_addr_to_reg (XEXP (orig_src, 0)); 2224 2225 num_reg = offset = 0; 2226 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes)) 2227 { 2228 /* Calculate the correct offset for src/dest. */ 2229 if (offset == 0) 2230 { 2231 src_addr = src_reg; 2232 dest_addr = dest_reg; 2233 } 2234 else 2235 { 2236 src_addr = plus_constant (Pmode, src_reg, offset); 2237 dest_addr = plus_constant (Pmode, dest_reg, offset); 2238 } 2239 2240 /* Generate the appropriate load and store, saving the stores 2241 for later. */ 2242 if (bytes >= 4 && align >= 4) 2243 mode = SImode; 2244 else if (bytes >= 2 && align >= 2) 2245 mode = HImode; 2246 else 2247 mode = QImode; 2248 2249 move_bytes = GET_MODE_SIZE (mode); 2250 tmp_reg = gen_reg_rtx (mode); 2251 src_mem = change_address (orig_src, mode, src_addr); 2252 dest_mem = change_address (orig_dest, mode, dest_addr); 2253 emit_insn (gen_rtx_SET (tmp_reg, src_mem)); 2254 stores[num_reg++] = gen_rtx_SET (dest_mem, tmp_reg); 2255 2256 if (num_reg >= MAX_MOVE_REG) 2257 { 2258 for (i = 0; i < num_reg; i++) 2259 emit_insn (stores[i]); 2260 num_reg = 0; 2261 } 2262 } 2263 2264 for (i = 0; i < num_reg; i++) 2265 emit_insn (stores[i]); 2266 2267 return TRUE; 2268 } 2269 2270 2271 /* Expand a block clear operation, and return 1 if successful. Return 0 2272 if we should let the compiler generate normal code. 2273 2274 operands[0] is the destination 2275 operands[1] is the length 2276 operands[3] is the alignment */ 2277 2278 int 2279 frv_expand_block_clear (rtx operands[]) 2280 { 2281 rtx orig_dest = operands[0]; 2282 rtx bytes_rtx = operands[1]; 2283 rtx align_rtx = operands[3]; 2284 int constp = (GET_CODE (bytes_rtx) == CONST_INT); 2285 int align; 2286 int bytes; 2287 int offset; 2288 rtx dest_reg; 2289 rtx dest_addr; 2290 rtx dest_mem; 2291 int clear_bytes; 2292 machine_mode mode; 2293 2294 /* If this is not a fixed size move, just call memcpy. */ 2295 if (! constp) 2296 return FALSE; 2297 2298 /* This should be a fixed size alignment. */ 2299 gcc_assert (GET_CODE (align_rtx) == CONST_INT); 2300 2301 align = INTVAL (align_rtx); 2302 2303 /* Anything to move? */ 2304 bytes = INTVAL (bytes_rtx); 2305 if (bytes <= 0) 2306 return TRUE; 2307 2308 /* Don't support real large clears. */ 2309 if (bytes > TOTAL_MOVE_REG*align) 2310 return FALSE; 2311 2312 /* Move the address into a scratch register. */ 2313 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0)); 2314 2315 offset = 0; 2316 for ( ; bytes > 0; (bytes -= clear_bytes), (offset += clear_bytes)) 2317 { 2318 /* Calculate the correct offset for src/dest. */ 2319 dest_addr = ((offset == 0) 2320 ? dest_reg 2321 : plus_constant (Pmode, dest_reg, offset)); 2322 2323 /* Generate the appropriate store of gr0. */ 2324 if (bytes >= 4 && align >= 4) 2325 mode = SImode; 2326 else if (bytes >= 2 && align >= 2) 2327 mode = HImode; 2328 else 2329 mode = QImode; 2330 2331 clear_bytes = GET_MODE_SIZE (mode); 2332 dest_mem = change_address (orig_dest, mode, dest_addr); 2333 emit_insn (gen_rtx_SET (dest_mem, const0_rtx)); 2334 } 2335 2336 return TRUE; 2337 } 2338 2339 2340 /* The following variable is used to output modifiers of assembler 2341 code of the current output insn. */ 2342 2343 static rtx *frv_insn_operands; 2344 2345 /* The following function is used to add assembler insn code suffix .p 2346 if it is necessary. */ 2347 2348 const char * 2349 frv_asm_output_opcode (FILE *f, const char *ptr) 2350 { 2351 int c; 2352 2353 if (frv_insn_packing_flag <= 0) 2354 return ptr; 2355 2356 for (; *ptr && *ptr != ' ' && *ptr != '\t';) 2357 { 2358 c = *ptr++; 2359 if (c == '%' && ((*ptr >= 'a' && *ptr <= 'z') 2360 || (*ptr >= 'A' && *ptr <= 'Z'))) 2361 { 2362 int letter = *ptr++; 2363 2364 c = atoi (ptr); 2365 frv_print_operand (f, frv_insn_operands [c], letter); 2366 while ((c = *ptr) >= '0' && c <= '9') 2367 ptr++; 2368 } 2369 else 2370 fputc (c, f); 2371 } 2372 2373 fprintf (f, ".p"); 2374 2375 return ptr; 2376 } 2377 2378 /* Set up the packing bit for the current output insn. Note that this 2379 function is not called for asm insns. */ 2380 2381 void 2382 frv_final_prescan_insn (rtx_insn *insn, rtx *opvec, 2383 int noperands ATTRIBUTE_UNUSED) 2384 { 2385 if (INSN_P (insn)) 2386 { 2387 if (frv_insn_packing_flag >= 0) 2388 { 2389 frv_insn_operands = opvec; 2390 frv_insn_packing_flag = PACKING_FLAG_P (insn); 2391 } 2392 else if (recog_memoized (insn) >= 0 2393 && get_attr_acc_group (insn) == ACC_GROUP_ODD) 2394 /* Packing optimizations have been disabled, but INSN can only 2395 be issued in M1. Insert an mnop in M0. */ 2396 fprintf (asm_out_file, "\tmnop.p\n"); 2397 } 2398 } 2399 2400 2401 2402 /* A C expression whose value is RTL representing the address in a stack frame 2403 where the pointer to the caller's frame is stored. Assume that FRAMEADDR is 2404 an RTL expression for the address of the stack frame itself. 2405 2406 If you don't define this macro, the default is to return the value of 2407 FRAMEADDR--that is, the stack frame address is also the address of the stack 2408 word that points to the previous frame. */ 2409 2410 /* The default is correct, but we need to make sure the frame gets created. */ 2411 rtx 2412 frv_dynamic_chain_address (rtx frame) 2413 { 2414 cfun->machine->frame_needed = 1; 2415 return frame; 2416 } 2417 2418 2419 /* A C expression whose value is RTL representing the value of the return 2420 address for the frame COUNT steps up from the current frame, after the 2421 prologue. FRAMEADDR is the frame pointer of the COUNT frame, or the frame 2422 pointer of the COUNT - 1 frame if `RETURN_ADDR_IN_PREVIOUS_FRAME' is 2423 defined. 2424 2425 The value of the expression must always be the correct address when COUNT is 2426 zero, but may be `NULL_RTX' if there is not way to determine the return 2427 address of other frames. */ 2428 2429 rtx 2430 frv_return_addr_rtx (int count, rtx frame) 2431 { 2432 if (count != 0) 2433 return const0_rtx; 2434 cfun->machine->frame_needed = 1; 2435 return gen_rtx_MEM (Pmode, plus_constant (Pmode, frame, 8)); 2436 } 2437 2438 /* Given a memory reference MEMREF, interpret the referenced memory as 2439 an array of MODE values, and return a reference to the element 2440 specified by INDEX. Assume that any pre-modification implicit in 2441 MEMREF has already happened. 2442 2443 MEMREF must be a legitimate operand for modes larger than SImode. 2444 frv_legitimate_address_p forbids register+register addresses, which 2445 this function cannot handle. */ 2446 rtx 2447 frv_index_memory (rtx memref, machine_mode mode, int index) 2448 { 2449 rtx base = XEXP (memref, 0); 2450 if (GET_CODE (base) == PRE_MODIFY) 2451 base = XEXP (base, 0); 2452 return change_address (memref, mode, 2453 plus_constant (Pmode, base, 2454 index * GET_MODE_SIZE (mode))); 2455 } 2456 2457 2458 /* Print a memory address as an operand to reference that memory location. */ 2459 static void 2460 frv_print_operand_address (FILE * stream, machine_mode /* mode */, rtx x) 2461 { 2462 if (GET_CODE (x) == MEM) 2463 x = XEXP (x, 0); 2464 2465 switch (GET_CODE (x)) 2466 { 2467 case REG: 2468 fputs (reg_names [ REGNO (x)], stream); 2469 return; 2470 2471 case CONST_INT: 2472 fprintf (stream, "%ld", (long) INTVAL (x)); 2473 return; 2474 2475 case SYMBOL_REF: 2476 assemble_name (stream, XSTR (x, 0)); 2477 return; 2478 2479 case LABEL_REF: 2480 case CONST: 2481 output_addr_const (stream, x); 2482 return; 2483 2484 case PLUS: 2485 /* Poorly constructed asm statements can trigger this alternative. 2486 See gcc/testsuite/gcc.dg/asm-4.c for an example. */ 2487 frv_print_operand_memory_reference (stream, x, 0); 2488 return; 2489 2490 default: 2491 break; 2492 } 2493 2494 fatal_insn ("bad insn to frv_print_operand_address:", x); 2495 } 2496 2497 2498 static void 2499 frv_print_operand_memory_reference_reg (FILE * stream, rtx x) 2500 { 2501 int regno = true_regnum (x); 2502 if (GPR_P (regno)) 2503 fputs (reg_names[regno], stream); 2504 else 2505 fatal_insn ("bad register to frv_print_operand_memory_reference_reg:", x); 2506 } 2507 2508 /* Print a memory reference suitable for the ld/st instructions. */ 2509 2510 static void 2511 frv_print_operand_memory_reference (FILE * stream, rtx x, int addr_offset) 2512 { 2513 struct frv_unspec unspec; 2514 rtx x0 = NULL_RTX; 2515 rtx x1 = NULL_RTX; 2516 2517 switch (GET_CODE (x)) 2518 { 2519 case SUBREG: 2520 case REG: 2521 x0 = x; 2522 break; 2523 2524 case PRE_MODIFY: /* (pre_modify (reg) (plus (reg) (reg))) */ 2525 x0 = XEXP (x, 0); 2526 x1 = XEXP (XEXP (x, 1), 1); 2527 break; 2528 2529 case CONST_INT: 2530 x1 = x; 2531 break; 2532 2533 case PLUS: 2534 x0 = XEXP (x, 0); 2535 x1 = XEXP (x, 1); 2536 if (GET_CODE (x0) == CONST_INT) 2537 { 2538 x0 = XEXP (x, 1); 2539 x1 = XEXP (x, 0); 2540 } 2541 break; 2542 2543 default: 2544 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x); 2545 break; 2546 2547 } 2548 2549 if (addr_offset) 2550 { 2551 if (!x1) 2552 x1 = const0_rtx; 2553 else if (GET_CODE (x1) != CONST_INT) 2554 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x); 2555 } 2556 2557 fputs ("@(", stream); 2558 if (!x0) 2559 fputs (reg_names[GPR_R0], stream); 2560 else if (GET_CODE (x0) == REG || GET_CODE (x0) == SUBREG) 2561 frv_print_operand_memory_reference_reg (stream, x0); 2562 else 2563 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x); 2564 2565 fputs (",", stream); 2566 if (!x1) 2567 fputs (reg_names [GPR_R0], stream); 2568 2569 else 2570 { 2571 switch (GET_CODE (x1)) 2572 { 2573 case SUBREG: 2574 case REG: 2575 frv_print_operand_memory_reference_reg (stream, x1); 2576 break; 2577 2578 case CONST_INT: 2579 fprintf (stream, "%ld", (long) (INTVAL (x1) + addr_offset)); 2580 break; 2581 2582 case CONST: 2583 if (!frv_const_unspec_p (x1, &unspec)) 2584 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x1); 2585 frv_output_const_unspec (stream, &unspec); 2586 break; 2587 2588 default: 2589 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x); 2590 } 2591 } 2592 2593 fputs (")", stream); 2594 } 2595 2596 2597 /* Return 2 for likely branches and 0 for non-likely branches */ 2598 2599 #define FRV_JUMP_LIKELY 2 2600 #define FRV_JUMP_NOT_LIKELY 0 2601 2602 static int 2603 frv_print_operand_jump_hint (rtx_insn *insn) 2604 { 2605 rtx note; 2606 rtx labelref; 2607 int ret; 2608 enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN; 2609 2610 gcc_assert (JUMP_P (insn)); 2611 2612 /* Assume any non-conditional jump is likely. */ 2613 if (! any_condjump_p (insn)) 2614 ret = FRV_JUMP_LIKELY; 2615 2616 else 2617 { 2618 labelref = condjump_label (insn); 2619 if (labelref) 2620 { 2621 rtx label = XEXP (labelref, 0); 2622 jump_type = (insn_current_address > INSN_ADDRESSES (INSN_UID (label)) 2623 ? BACKWARD 2624 : FORWARD); 2625 } 2626 2627 note = find_reg_note (insn, REG_BR_PROB, 0); 2628 if (!note) 2629 ret = ((jump_type == BACKWARD) ? FRV_JUMP_LIKELY : FRV_JUMP_NOT_LIKELY); 2630 2631 else 2632 { 2633 ret = ((profile_probability::from_reg_br_prob_note (XINT (note, 0)) 2634 >= profile_probability::even ()) 2635 ? FRV_JUMP_LIKELY 2636 : FRV_JUMP_NOT_LIKELY); 2637 } 2638 } 2639 2640 #if 0 2641 if (TARGET_DEBUG) 2642 { 2643 char *direction; 2644 2645 switch (jump_type) 2646 { 2647 default: 2648 case UNKNOWN: direction = "unknown jump direction"; break; 2649 case BACKWARD: direction = "jump backward"; break; 2650 case FORWARD: direction = "jump forward"; break; 2651 } 2652 2653 fprintf (stderr, 2654 "%s: uid %ld, %s, probability = %d, max prob. = %d, hint = %d\n", 2655 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)), 2656 (long)INSN_UID (insn), direction, prob, 2657 REG_BR_PROB_BASE, ret); 2658 } 2659 #endif 2660 2661 return ret; 2662 } 2663 2664 2665 /* Return the comparison operator to use for CODE given that the ICC 2666 register is OP0. */ 2667 2668 static const char * 2669 comparison_string (enum rtx_code code, rtx op0) 2670 { 2671 bool is_nz_p = GET_MODE (op0) == CC_NZmode; 2672 switch (code) 2673 { 2674 default: output_operand_lossage ("bad condition code"); return ""; 2675 case EQ: return "eq"; 2676 case NE: return "ne"; 2677 case LT: return is_nz_p ? "n" : "lt"; 2678 case LE: return "le"; 2679 case GT: return "gt"; 2680 case GE: return is_nz_p ? "p" : "ge"; 2681 case LTU: return is_nz_p ? "no" : "c"; 2682 case LEU: return is_nz_p ? "eq" : "ls"; 2683 case GTU: return is_nz_p ? "ne" : "hi"; 2684 case GEU: return is_nz_p ? "ra" : "nc"; 2685 } 2686 } 2687 2688 /* Print an operand to an assembler instruction. 2689 2690 `%' followed by a letter and a digit says to output an operand in an 2691 alternate fashion. Four letters have standard, built-in meanings 2692 described below. The hook `TARGET_PRINT_OPERAND' can define 2693 additional letters with nonstandard meanings. 2694 2695 `%cDIGIT' can be used to substitute an operand that is a constant value 2696 without the syntax that normally indicates an immediate operand. 2697 2698 `%nDIGIT' is like `%cDIGIT' except that the value of the constant is negated 2699 before printing. 2700 2701 `%aDIGIT' can be used to substitute an operand as if it were a memory 2702 reference, with the actual operand treated as the address. This may be 2703 useful when outputting a "load address" instruction, because often the 2704 assembler syntax for such an instruction requires you to write the operand 2705 as if it were a memory reference. 2706 2707 `%lDIGIT' is used to substitute a `label_ref' into a jump instruction. 2708 2709 `%=' outputs a number which is unique to each instruction in the entire 2710 compilation. This is useful for making local labels to be referred to more 2711 than once in a single template that generates multiple assembler 2712 instructions. 2713 2714 `%' followed by a punctuation character specifies a substitution that 2715 does not use an operand. Only one case is standard: `%%' outputs a 2716 `%' into the assembler code. Other nonstandard cases can be defined 2717 in the `TARGET_PRINT_OPERAND' hook. You must also define which 2718 punctuation characters are valid with the 2719 `TARGET_PRINT_OPERAND_PUNCT_VALID_P' hook. */ 2720 2721 static void 2722 frv_print_operand (FILE * file, rtx x, int code) 2723 { 2724 struct frv_unspec unspec; 2725 HOST_WIDE_INT value; 2726 int offset; 2727 2728 if (code != 0 && !ISALPHA (code)) 2729 value = 0; 2730 2731 else if (GET_CODE (x) == CONST_INT) 2732 value = INTVAL (x); 2733 2734 else if (GET_CODE (x) == CONST_DOUBLE) 2735 { 2736 if (GET_MODE (x) == SFmode) 2737 { 2738 long l; 2739 2740 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l); 2741 value = l; 2742 } 2743 2744 else if (GET_MODE (x) == VOIDmode) 2745 value = CONST_DOUBLE_LOW (x); 2746 2747 else 2748 fatal_insn ("bad insn in frv_print_operand, bad const_double", x); 2749 } 2750 2751 else 2752 value = 0; 2753 2754 switch (code) 2755 { 2756 2757 case '.': 2758 /* Output r0. */ 2759 fputs (reg_names[GPR_R0], file); 2760 break; 2761 2762 case '#': 2763 fprintf (file, "%d", frv_print_operand_jump_hint (current_output_insn)); 2764 break; 2765 2766 case '@': 2767 /* Output small data area base register (gr16). */ 2768 fputs (reg_names[SDA_BASE_REG], file); 2769 break; 2770 2771 case '~': 2772 /* Output pic register (gr17). */ 2773 fputs (reg_names[PIC_REGNO], file); 2774 break; 2775 2776 case '*': 2777 /* Output the temporary integer CCR register. */ 2778 fputs (reg_names[ICR_TEMP], file); 2779 break; 2780 2781 case '&': 2782 /* Output the temporary integer CC register. */ 2783 fputs (reg_names[ICC_TEMP], file); 2784 break; 2785 2786 /* case 'a': print an address. */ 2787 2788 case 'C': 2789 /* Print appropriate test for integer branch false operation. */ 2790 fputs (comparison_string (reverse_condition (GET_CODE (x)), 2791 XEXP (x, 0)), file); 2792 break; 2793 2794 case 'c': 2795 /* Print appropriate test for integer branch true operation. */ 2796 fputs (comparison_string (GET_CODE (x), XEXP (x, 0)), file); 2797 break; 2798 2799 case 'e': 2800 /* Print 1 for a NE and 0 for an EQ to give the final argument 2801 for a conditional instruction. */ 2802 if (GET_CODE (x) == NE) 2803 fputs ("1", file); 2804 2805 else if (GET_CODE (x) == EQ) 2806 fputs ("0", file); 2807 2808 else 2809 fatal_insn ("bad insn to frv_print_operand, 'e' modifier:", x); 2810 break; 2811 2812 case 'F': 2813 /* Print appropriate test for floating point branch false operation. */ 2814 switch (GET_CODE (x)) 2815 { 2816 default: 2817 fatal_insn ("bad insn to frv_print_operand, 'F' modifier:", x); 2818 2819 case EQ: fputs ("ne", file); break; 2820 case NE: fputs ("eq", file); break; 2821 case LT: fputs ("uge", file); break; 2822 case LE: fputs ("ug", file); break; 2823 case GT: fputs ("ule", file); break; 2824 case GE: fputs ("ul", file); break; 2825 } 2826 break; 2827 2828 case 'f': 2829 /* Print appropriate test for floating point branch true operation. */ 2830 switch (GET_CODE (x)) 2831 { 2832 default: 2833 fatal_insn ("bad insn to frv_print_operand, 'f' modifier:", x); 2834 2835 case EQ: fputs ("eq", file); break; 2836 case NE: fputs ("ne", file); break; 2837 case LT: fputs ("lt", file); break; 2838 case LE: fputs ("le", file); break; 2839 case GT: fputs ("gt", file); break; 2840 case GE: fputs ("ge", file); break; 2841 } 2842 break; 2843 2844 case 'g': 2845 /* Print appropriate GOT function. */ 2846 if (GET_CODE (x) != CONST_INT) 2847 fatal_insn ("bad insn to frv_print_operand, 'g' modifier:", x); 2848 fputs (unspec_got_name (INTVAL (x)), file); 2849 break; 2850 2851 case 'I': 2852 /* Print 'i' if the operand is a constant, or is a memory reference that 2853 adds a constant. */ 2854 if (GET_CODE (x) == MEM) 2855 x = ((GET_CODE (XEXP (x, 0)) == PLUS) 2856 ? XEXP (XEXP (x, 0), 1) 2857 : XEXP (x, 0)); 2858 else if (GET_CODE (x) == PLUS) 2859 x = XEXP (x, 1); 2860 2861 switch (GET_CODE (x)) 2862 { 2863 default: 2864 break; 2865 2866 case CONST_INT: 2867 case SYMBOL_REF: 2868 case CONST: 2869 fputs ("i", file); 2870 break; 2871 } 2872 break; 2873 2874 case 'i': 2875 /* For jump instructions, print 'i' if the operand is a constant or 2876 is an expression that adds a constant. */ 2877 if (GET_CODE (x) == CONST_INT) 2878 fputs ("i", file); 2879 2880 else 2881 { 2882 if (GET_CODE (x) == CONST_INT 2883 || (GET_CODE (x) == PLUS 2884 && (GET_CODE (XEXP (x, 1)) == CONST_INT 2885 || GET_CODE (XEXP (x, 0)) == CONST_INT))) 2886 fputs ("i", file); 2887 } 2888 break; 2889 2890 case 'L': 2891 /* Print the lower register of a double word register pair */ 2892 if (GET_CODE (x) == REG) 2893 fputs (reg_names[ REGNO (x)+1 ], file); 2894 else 2895 fatal_insn ("bad insn to frv_print_operand, 'L' modifier:", x); 2896 break; 2897 2898 /* case 'l': print a LABEL_REF. */ 2899 2900 case 'M': 2901 case 'N': 2902 /* Print a memory reference for ld/st/jmp, %N prints a memory reference 2903 for the second word of double memory operations. */ 2904 offset = (code == 'M') ? 0 : UNITS_PER_WORD; 2905 switch (GET_CODE (x)) 2906 { 2907 default: 2908 fatal_insn ("bad insn to frv_print_operand, 'M/N' modifier:", x); 2909 2910 case MEM: 2911 frv_print_operand_memory_reference (file, XEXP (x, 0), offset); 2912 break; 2913 2914 case REG: 2915 case SUBREG: 2916 case CONST_INT: 2917 case PLUS: 2918 case SYMBOL_REF: 2919 frv_print_operand_memory_reference (file, x, offset); 2920 break; 2921 } 2922 break; 2923 2924 case 'O': 2925 /* Print the opcode of a command. */ 2926 switch (GET_CODE (x)) 2927 { 2928 default: 2929 fatal_insn ("bad insn to frv_print_operand, 'O' modifier:", x); 2930 2931 case PLUS: fputs ("add", file); break; 2932 case MINUS: fputs ("sub", file); break; 2933 case AND: fputs ("and", file); break; 2934 case IOR: fputs ("or", file); break; 2935 case XOR: fputs ("xor", file); break; 2936 case ASHIFT: fputs ("sll", file); break; 2937 case ASHIFTRT: fputs ("sra", file); break; 2938 case LSHIFTRT: fputs ("srl", file); break; 2939 } 2940 break; 2941 2942 /* case 'n': negate and print a constant int. */ 2943 2944 case 'P': 2945 /* Print PIC label using operand as the number. */ 2946 if (GET_CODE (x) != CONST_INT) 2947 fatal_insn ("bad insn to frv_print_operand, P modifier:", x); 2948 2949 fprintf (file, ".LCF%ld", (long)INTVAL (x)); 2950 break; 2951 2952 case 'U': 2953 /* Print 'u' if the operand is a update load/store. */ 2954 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == PRE_MODIFY) 2955 fputs ("u", file); 2956 break; 2957 2958 case 'z': 2959 /* If value is 0, print gr0, otherwise it must be a register. */ 2960 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0) 2961 fputs (reg_names[GPR_R0], file); 2962 2963 else if (GET_CODE (x) == REG) 2964 fputs (reg_names [REGNO (x)], file); 2965 2966 else 2967 fatal_insn ("bad insn in frv_print_operand, z case", x); 2968 break; 2969 2970 case 'x': 2971 /* Print constant in hex. */ 2972 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE) 2973 { 2974 fprintf (file, "%s0x%.4lx", IMMEDIATE_PREFIX, (long) value); 2975 break; 2976 } 2977 2978 /* Fall through. */ 2979 2980 case '\0': 2981 if (GET_CODE (x) == REG) 2982 fputs (reg_names [REGNO (x)], file); 2983 2984 else if (GET_CODE (x) == CONST_INT 2985 || GET_CODE (x) == CONST_DOUBLE) 2986 fprintf (file, "%s%ld", IMMEDIATE_PREFIX, (long) value); 2987 2988 else if (frv_const_unspec_p (x, &unspec)) 2989 frv_output_const_unspec (file, &unspec); 2990 2991 else if (GET_CODE (x) == MEM) 2992 frv_print_operand_address (file, GET_MODE (x), XEXP (x, 0)); 2993 2994 else if (CONSTANT_ADDRESS_P (x)) 2995 frv_print_operand_address (file, VOIDmode, x); 2996 2997 else 2998 fatal_insn ("bad insn in frv_print_operand, 0 case", x); 2999 3000 break; 3001 3002 default: 3003 fatal_insn ("frv_print_operand: unknown code", x); 3004 break; 3005 } 3006 3007 return; 3008 } 3009 3010 static bool 3011 frv_print_operand_punct_valid_p (unsigned char code) 3012 { 3013 return (code == '.' || code == '#' || code == '@' || code == '~' 3014 || code == '*' || code == '&'); 3015 } 3016 3017 3018 /* A C statement (sans semicolon) for initializing the variable CUM for the 3019 state at the beginning of the argument list. The variable has type 3020 `CUMULATIVE_ARGS'. The value of FNTYPE is the tree node for the data type 3021 of the function which will receive the args, or 0 if the args are to a 3022 compiler support library function. The value of INDIRECT is nonzero when 3023 processing an indirect call, for example a call through a function pointer. 3024 The value of INDIRECT is zero for a call to an explicitly named function, a 3025 library function call, or when `INIT_CUMULATIVE_ARGS' is used to find 3026 arguments for the function being compiled. 3027 3028 When processing a call to a compiler support library function, LIBNAME 3029 identifies which one. It is a `symbol_ref' rtx which contains the name of 3030 the function, as a string. LIBNAME is 0 when an ordinary C function call is 3031 being processed. Thus, each time this macro is called, either LIBNAME or 3032 FNTYPE is nonzero, but never both of them at once. */ 3033 3034 void 3035 frv_init_cumulative_args (CUMULATIVE_ARGS *cum, 3036 tree fntype, 3037 rtx libname, 3038 tree fndecl, 3039 int incoming) 3040 { 3041 *cum = FIRST_ARG_REGNUM; 3042 3043 if (TARGET_DEBUG_ARG) 3044 { 3045 fprintf (stderr, "\ninit_cumulative_args:"); 3046 if (!fndecl && fntype) 3047 fputs (" indirect", stderr); 3048 3049 if (incoming) 3050 fputs (" incoming", stderr); 3051 3052 if (fntype) 3053 { 3054 tree ret_type = TREE_TYPE (fntype); 3055 fprintf (stderr, " return=%s,", 3056 get_tree_code_name (TREE_CODE (ret_type))); 3057 } 3058 3059 if (libname && GET_CODE (libname) == SYMBOL_REF) 3060 fprintf (stderr, " libname=%s", XSTR (libname, 0)); 3061 3062 if (cfun->returns_struct) 3063 fprintf (stderr, " return-struct"); 3064 3065 putc ('\n', stderr); 3066 } 3067 } 3068 3069 3070 /* Return true if we should pass an argument on the stack rather than 3071 in registers. */ 3072 3073 static bool 3074 frv_must_pass_in_stack (machine_mode mode, const_tree type) 3075 { 3076 if (mode == BLKmode) 3077 return true; 3078 if (type == NULL) 3079 return false; 3080 return AGGREGATE_TYPE_P (type); 3081 } 3082 3083 /* If defined, a C expression that gives the alignment boundary, in bits, of an 3084 argument with the specified mode and type. If it is not defined, 3085 `PARM_BOUNDARY' is used for all arguments. */ 3086 3087 static unsigned int 3088 frv_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED, 3089 const_tree type ATTRIBUTE_UNUSED) 3090 { 3091 return BITS_PER_WORD; 3092 } 3093 3094 static rtx 3095 frv_function_arg_1 (cumulative_args_t cum_v, machine_mode mode, 3096 const_tree type ATTRIBUTE_UNUSED, bool named, 3097 bool incoming ATTRIBUTE_UNUSED) 3098 { 3099 const CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 3100 3101 machine_mode xmode = (mode == BLKmode) ? SImode : mode; 3102 int arg_num = *cum; 3103 rtx ret; 3104 const char *debstr; 3105 3106 /* Return a marker for use in the call instruction. */ 3107 if (xmode == VOIDmode) 3108 { 3109 ret = const0_rtx; 3110 debstr = "<0>"; 3111 } 3112 3113 else if (arg_num <= LAST_ARG_REGNUM) 3114 { 3115 ret = gen_rtx_REG (xmode, arg_num); 3116 debstr = reg_names[arg_num]; 3117 } 3118 3119 else 3120 { 3121 ret = NULL_RTX; 3122 debstr = "memory"; 3123 } 3124 3125 if (TARGET_DEBUG_ARG) 3126 fprintf (stderr, 3127 "function_arg: words = %2d, mode = %4s, named = %d, size = %3d, arg = %s\n", 3128 arg_num, GET_MODE_NAME (mode), named, GET_MODE_SIZE (mode), debstr); 3129 3130 return ret; 3131 } 3132 3133 static rtx 3134 frv_function_arg (cumulative_args_t cum, machine_mode mode, 3135 const_tree type, bool named) 3136 { 3137 return frv_function_arg_1 (cum, mode, type, named, false); 3138 } 3139 3140 static rtx 3141 frv_function_incoming_arg (cumulative_args_t cum, machine_mode mode, 3142 const_tree type, bool named) 3143 { 3144 return frv_function_arg_1 (cum, mode, type, named, true); 3145 } 3146 3147 3148 /* A C statement (sans semicolon) to update the summarizer variable CUM to 3149 advance past an argument in the argument list. The values MODE, TYPE and 3150 NAMED describe that argument. Once this is done, the variable CUM is 3151 suitable for analyzing the *following* argument with `FUNCTION_ARG', etc. 3152 3153 This macro need not do anything if the argument in question was passed on 3154 the stack. The compiler knows how to track the amount of stack space used 3155 for arguments without any special help. */ 3156 3157 static void 3158 frv_function_arg_advance (cumulative_args_t cum_v, 3159 machine_mode mode, 3160 const_tree type ATTRIBUTE_UNUSED, 3161 bool named) 3162 { 3163 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 3164 3165 machine_mode xmode = (mode == BLKmode) ? SImode : mode; 3166 int bytes = GET_MODE_SIZE (xmode); 3167 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; 3168 int arg_num = *cum; 3169 3170 *cum = arg_num + words; 3171 3172 if (TARGET_DEBUG_ARG) 3173 fprintf (stderr, 3174 "function_adv: words = %2d, mode = %4s, named = %d, size = %3d\n", 3175 arg_num, GET_MODE_NAME (mode), named, words * UNITS_PER_WORD); 3176 } 3177 3178 3179 /* A C expression for the number of words, at the beginning of an argument, 3180 must be put in registers. The value must be zero for arguments that are 3181 passed entirely in registers or that are entirely pushed on the stack. 3182 3183 On some machines, certain arguments must be passed partially in registers 3184 and partially in memory. On these machines, typically the first N words of 3185 arguments are passed in registers, and the rest on the stack. If a 3186 multi-word argument (a `double' or a structure) crosses that boundary, its 3187 first few words must be passed in registers and the rest must be pushed. 3188 This macro tells the compiler when this occurs, and how many of the words 3189 should go in registers. 3190 3191 `FUNCTION_ARG' for these arguments should return the first register to be 3192 used by the caller for this argument; likewise `FUNCTION_INCOMING_ARG', for 3193 the called function. */ 3194 3195 static int 3196 frv_arg_partial_bytes (cumulative_args_t cum, machine_mode mode, 3197 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED) 3198 { 3199 3200 machine_mode xmode = (mode == BLKmode) ? SImode : mode; 3201 int bytes = GET_MODE_SIZE (xmode); 3202 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; 3203 int arg_num = *get_cumulative_args (cum); 3204 int ret; 3205 3206 ret = ((arg_num <= LAST_ARG_REGNUM && arg_num + words > LAST_ARG_REGNUM+1) 3207 ? LAST_ARG_REGNUM - arg_num + 1 3208 : 0); 3209 ret *= UNITS_PER_WORD; 3210 3211 if (TARGET_DEBUG_ARG && ret) 3212 fprintf (stderr, "frv_arg_partial_bytes: %d\n", ret); 3213 3214 return ret; 3215 } 3216 3217 3218 /* Implements TARGET_FUNCTION_VALUE. */ 3219 3220 static rtx 3221 frv_function_value (const_tree valtype, 3222 const_tree fn_decl_or_type ATTRIBUTE_UNUSED, 3223 bool outgoing ATTRIBUTE_UNUSED) 3224 { 3225 return gen_rtx_REG (TYPE_MODE (valtype), RETURN_VALUE_REGNUM); 3226 } 3227 3228 3229 /* Implements TARGET_LIBCALL_VALUE. */ 3230 3231 static rtx 3232 frv_libcall_value (machine_mode mode, 3233 const_rtx fun ATTRIBUTE_UNUSED) 3234 { 3235 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM); 3236 } 3237 3238 3239 /* Implements FUNCTION_VALUE_REGNO_P. */ 3240 3241 bool 3242 frv_function_value_regno_p (const unsigned int regno) 3243 { 3244 return (regno == RETURN_VALUE_REGNUM); 3245 } 3246 3247 /* Return true if a register is ok to use as a base or index register. */ 3248 3249 static FRV_INLINE int 3250 frv_regno_ok_for_base_p (int regno, int strict_p) 3251 { 3252 if (GPR_P (regno)) 3253 return TRUE; 3254 3255 if (strict_p) 3256 return (reg_renumber[regno] >= 0 && GPR_P (reg_renumber[regno])); 3257 3258 if (regno == ARG_POINTER_REGNUM) 3259 return TRUE; 3260 3261 return (regno >= FIRST_PSEUDO_REGISTER); 3262 } 3263 3264 3265 /* A C compound statement with a conditional `goto LABEL;' executed if X (an 3266 RTX) is a legitimate memory address on the target machine for a memory 3267 operand of mode MODE. 3268 3269 It usually pays to define several simpler macros to serve as subroutines for 3270 this one. Otherwise it may be too complicated to understand. 3271 3272 This macro must exist in two variants: a strict variant and a non-strict 3273 one. The strict variant is used in the reload pass. It must be defined so 3274 that any pseudo-register that has not been allocated a hard register is 3275 considered a memory reference. In contexts where some kind of register is 3276 required, a pseudo-register with no hard register must be rejected. 3277 3278 The non-strict variant is used in other passes. It must be defined to 3279 accept all pseudo-registers in every context where some kind of register is 3280 required. 3281 3282 Compiler source files that want to use the strict variant of this macro 3283 define the macro `REG_OK_STRICT'. You should use an `#ifdef REG_OK_STRICT' 3284 conditional to define the strict variant in that case and the non-strict 3285 variant otherwise. 3286 3287 Normally, constant addresses which are the sum of a `symbol_ref' and an 3288 integer are stored inside a `const' RTX to mark them as constant. 3289 Therefore, there is no need to recognize such sums specifically as 3290 legitimate addresses. Normally you would simply recognize any `const' as 3291 legitimate. 3292 3293 Usually `TARGET_PRINT_OPERAND_ADDRESS' is not prepared to handle 3294 constant sums that are not marked with `const'. It assumes that a 3295 naked `plus' indicates indexing. If so, then you *must* reject such 3296 naked constant sums as illegitimate addresses, so that none of them 3297 will be given to `TARGET_PRINT_OPERAND_ADDRESS'. */ 3298 3299 int 3300 frv_legitimate_address_p_1 (machine_mode mode, 3301 rtx x, 3302 int strict_p, 3303 int condexec_p, 3304 int allow_double_reg_p) 3305 { 3306 rtx x0, x1; 3307 int ret = 0; 3308 HOST_WIDE_INT value; 3309 unsigned regno0; 3310 3311 if (FRV_SYMBOL_REF_TLS_P (x)) 3312 return 0; 3313 3314 switch (GET_CODE (x)) 3315 { 3316 default: 3317 break; 3318 3319 case SUBREG: 3320 x = SUBREG_REG (x); 3321 if (GET_CODE (x) != REG) 3322 break; 3323 3324 /* Fall through. */ 3325 3326 case REG: 3327 ret = frv_regno_ok_for_base_p (REGNO (x), strict_p); 3328 break; 3329 3330 case PRE_MODIFY: 3331 x0 = XEXP (x, 0); 3332 x1 = XEXP (x, 1); 3333 if (GET_CODE (x0) != REG 3334 || ! frv_regno_ok_for_base_p (REGNO (x0), strict_p) 3335 || GET_CODE (x1) != PLUS 3336 || ! rtx_equal_p (x0, XEXP (x1, 0)) 3337 || GET_CODE (XEXP (x1, 1)) != REG 3338 || ! frv_regno_ok_for_base_p (REGNO (XEXP (x1, 1)), strict_p)) 3339 break; 3340 3341 ret = 1; 3342 break; 3343 3344 case CONST_INT: 3345 /* 12-bit immediate */ 3346 if (condexec_p) 3347 ret = FALSE; 3348 else 3349 { 3350 ret = IN_RANGE (INTVAL (x), -2048, 2047); 3351 3352 /* If we can't use load/store double operations, make sure we can 3353 address the second word. */ 3354 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD) 3355 ret = IN_RANGE (INTVAL (x) + GET_MODE_SIZE (mode) - 1, 3356 -2048, 2047); 3357 } 3358 break; 3359 3360 case PLUS: 3361 x0 = XEXP (x, 0); 3362 x1 = XEXP (x, 1); 3363 3364 if (GET_CODE (x0) == SUBREG) 3365 x0 = SUBREG_REG (x0); 3366 3367 if (GET_CODE (x0) != REG) 3368 break; 3369 3370 regno0 = REGNO (x0); 3371 if (!frv_regno_ok_for_base_p (regno0, strict_p)) 3372 break; 3373 3374 switch (GET_CODE (x1)) 3375 { 3376 default: 3377 break; 3378 3379 case SUBREG: 3380 x1 = SUBREG_REG (x1); 3381 if (GET_CODE (x1) != REG) 3382 break; 3383 3384 /* Fall through. */ 3385 3386 case REG: 3387 /* Do not allow reg+reg addressing for modes > 1 word if we 3388 can't depend on having move double instructions. */ 3389 if (!allow_double_reg_p && GET_MODE_SIZE (mode) > UNITS_PER_WORD) 3390 ret = FALSE; 3391 else 3392 ret = frv_regno_ok_for_base_p (REGNO (x1), strict_p); 3393 break; 3394 3395 case CONST_INT: 3396 /* 12-bit immediate */ 3397 if (condexec_p) 3398 ret = FALSE; 3399 else 3400 { 3401 value = INTVAL (x1); 3402 ret = IN_RANGE (value, -2048, 2047); 3403 3404 /* If we can't use load/store double operations, make sure we can 3405 address the second word. */ 3406 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD) 3407 ret = IN_RANGE (value + GET_MODE_SIZE (mode) - 1, -2048, 2047); 3408 } 3409 break; 3410 3411 case CONST: 3412 if (!condexec_p && got12_operand (x1, VOIDmode)) 3413 ret = TRUE; 3414 break; 3415 3416 } 3417 break; 3418 } 3419 3420 if (TARGET_DEBUG_ADDR) 3421 { 3422 fprintf (stderr, "\n========== legitimate_address_p, mode = %s, result = %d, addresses are %sstrict%s\n", 3423 GET_MODE_NAME (mode), ret, (strict_p) ? "" : "not ", 3424 (condexec_p) ? ", inside conditional code" : ""); 3425 debug_rtx (x); 3426 } 3427 3428 return ret; 3429 } 3430 3431 bool 3432 frv_legitimate_address_p (machine_mode mode, rtx x, bool strict_p) 3433 { 3434 return frv_legitimate_address_p_1 (mode, x, strict_p, FALSE, FALSE); 3435 } 3436 3437 /* Given an ADDR, generate code to inline the PLT. */ 3438 static rtx 3439 gen_inlined_tls_plt (rtx addr) 3440 { 3441 rtx retval, dest; 3442 rtx picreg = get_hard_reg_initial_val (Pmode, FDPIC_REG); 3443 3444 3445 dest = gen_reg_rtx (DImode); 3446 3447 if (flag_pic == 1) 3448 { 3449 /* 3450 -fpic version: 3451 3452 lddi.p @(gr15, #gottlsdesc12(ADDR)), gr8 3453 calll #gettlsoff(ADDR)@(gr8, gr0) 3454 */ 3455 emit_insn (gen_tls_lddi (dest, addr, picreg)); 3456 } 3457 else 3458 { 3459 /* 3460 -fPIC version: 3461 3462 sethi.p #gottlsdeschi(ADDR), gr8 3463 setlo #gottlsdesclo(ADDR), gr8 3464 ldd #tlsdesc(ADDR)@(gr15, gr8), gr8 3465 calll #gettlsoff(ADDR)@(gr8, gr0) 3466 */ 3467 rtx reguse = gen_reg_rtx (Pmode); 3468 emit_insn (gen_tlsoff_hilo (reguse, addr, GEN_INT (R_FRV_GOTTLSDESCHI))); 3469 emit_insn (gen_tls_tlsdesc_ldd (dest, picreg, reguse, addr)); 3470 } 3471 3472 retval = gen_reg_rtx (Pmode); 3473 emit_insn (gen_tls_indirect_call (retval, addr, dest, picreg)); 3474 return retval; 3475 } 3476 3477 /* Emit a TLSMOFF or TLSMOFF12 offset, depending on -mTLS. Returns 3478 the destination address. */ 3479 static rtx 3480 gen_tlsmoff (rtx addr, rtx reg) 3481 { 3482 rtx dest = gen_reg_rtx (Pmode); 3483 3484 if (TARGET_BIG_TLS) 3485 { 3486 /* sethi.p #tlsmoffhi(x), grA 3487 setlo #tlsmofflo(x), grA 3488 */ 3489 dest = gen_reg_rtx (Pmode); 3490 emit_insn (gen_tlsoff_hilo (dest, addr, 3491 GEN_INT (R_FRV_TLSMOFFHI))); 3492 dest = gen_rtx_PLUS (Pmode, dest, reg); 3493 } 3494 else 3495 { 3496 /* addi grB, #tlsmoff12(x), grC 3497 -or- 3498 ld/st @(grB, #tlsmoff12(x)), grC 3499 */ 3500 dest = gen_reg_rtx (Pmode); 3501 emit_insn (gen_symGOTOFF2reg_i (dest, addr, reg, 3502 GEN_INT (R_FRV_TLSMOFF12))); 3503 } 3504 return dest; 3505 } 3506 3507 /* Generate code for a TLS address. */ 3508 static rtx 3509 frv_legitimize_tls_address (rtx addr, enum tls_model model) 3510 { 3511 rtx dest, tp = gen_rtx_REG (Pmode, 29); 3512 rtx picreg = get_hard_reg_initial_val (Pmode, 15); 3513 3514 switch (model) 3515 { 3516 case TLS_MODEL_INITIAL_EXEC: 3517 if (flag_pic == 1) 3518 { 3519 /* -fpic version. 3520 ldi @(gr15, #gottlsoff12(x)), gr5 3521 */ 3522 dest = gen_reg_rtx (Pmode); 3523 emit_insn (gen_tls_load_gottlsoff12 (dest, addr, picreg)); 3524 dest = gen_rtx_PLUS (Pmode, tp, dest); 3525 } 3526 else 3527 { 3528 /* -fPIC or anything else. 3529 3530 sethi.p #gottlsoffhi(x), gr14 3531 setlo #gottlsofflo(x), gr14 3532 ld #tlsoff(x)@(gr15, gr14), gr9 3533 */ 3534 rtx tmp = gen_reg_rtx (Pmode); 3535 dest = gen_reg_rtx (Pmode); 3536 emit_insn (gen_tlsoff_hilo (tmp, addr, 3537 GEN_INT (R_FRV_GOTTLSOFF_HI))); 3538 3539 emit_insn (gen_tls_tlsoff_ld (dest, picreg, tmp, addr)); 3540 dest = gen_rtx_PLUS (Pmode, tp, dest); 3541 } 3542 break; 3543 case TLS_MODEL_LOCAL_DYNAMIC: 3544 { 3545 rtx reg, retval; 3546 3547 if (TARGET_INLINE_PLT) 3548 retval = gen_inlined_tls_plt (GEN_INT (0)); 3549 else 3550 { 3551 /* call #gettlsoff(0) */ 3552 retval = gen_reg_rtx (Pmode); 3553 emit_insn (gen_call_gettlsoff (retval, GEN_INT (0), picreg)); 3554 } 3555 3556 reg = gen_reg_rtx (Pmode); 3557 emit_insn (gen_rtx_SET (reg, gen_rtx_PLUS (Pmode, retval, tp))); 3558 3559 dest = gen_tlsmoff (addr, reg); 3560 3561 /* 3562 dest = gen_reg_rtx (Pmode); 3563 emit_insn (gen_tlsoff_hilo (dest, addr, 3564 GEN_INT (R_FRV_TLSMOFFHI))); 3565 dest = gen_rtx_PLUS (Pmode, dest, reg); 3566 */ 3567 break; 3568 } 3569 case TLS_MODEL_LOCAL_EXEC: 3570 dest = gen_tlsmoff (addr, gen_rtx_REG (Pmode, 29)); 3571 break; 3572 case TLS_MODEL_GLOBAL_DYNAMIC: 3573 { 3574 rtx retval; 3575 3576 if (TARGET_INLINE_PLT) 3577 retval = gen_inlined_tls_plt (addr); 3578 else 3579 { 3580 /* call #gettlsoff(x) */ 3581 retval = gen_reg_rtx (Pmode); 3582 emit_insn (gen_call_gettlsoff (retval, addr, picreg)); 3583 } 3584 dest = gen_rtx_PLUS (Pmode, retval, tp); 3585 break; 3586 } 3587 default: 3588 gcc_unreachable (); 3589 } 3590 3591 return dest; 3592 } 3593 3594 rtx 3595 frv_legitimize_address (rtx x, 3596 rtx oldx ATTRIBUTE_UNUSED, 3597 machine_mode mode ATTRIBUTE_UNUSED) 3598 { 3599 if (GET_CODE (x) == SYMBOL_REF) 3600 { 3601 enum tls_model model = SYMBOL_REF_TLS_MODEL (x); 3602 if (model != 0) 3603 return frv_legitimize_tls_address (x, model); 3604 } 3605 3606 return x; 3607 } 3608 3609 /* Test whether a local function descriptor is canonical, i.e., 3610 whether we can use FUNCDESC_GOTOFF to compute the address of the 3611 function. */ 3612 3613 static bool 3614 frv_local_funcdesc_p (rtx fnx) 3615 { 3616 tree fn; 3617 enum symbol_visibility vis; 3618 bool ret; 3619 3620 if (! SYMBOL_REF_LOCAL_P (fnx)) 3621 return FALSE; 3622 3623 fn = SYMBOL_REF_DECL (fnx); 3624 3625 if (! fn) 3626 return FALSE; 3627 3628 vis = DECL_VISIBILITY (fn); 3629 3630 if (vis == VISIBILITY_PROTECTED) 3631 /* Private function descriptors for protected functions are not 3632 canonical. Temporarily change the visibility to global. */ 3633 vis = VISIBILITY_DEFAULT; 3634 else if (flag_shlib) 3635 /* If we're already compiling for a shared library (that, unlike 3636 executables, can't assume that the existence of a definition 3637 implies local binding), we can skip the re-testing. */ 3638 return TRUE; 3639 3640 ret = default_binds_local_p_1 (fn, flag_pic); 3641 3642 DECL_VISIBILITY (fn) = vis; 3643 3644 return ret; 3645 } 3646 3647 /* Load the _gp symbol into DEST. SRC is supposed to be the FDPIC 3648 register. */ 3649 3650 rtx 3651 frv_gen_GPsym2reg (rtx dest, rtx src) 3652 { 3653 tree gp = get_identifier ("_gp"); 3654 rtx gp_sym = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (gp)); 3655 3656 return gen_symGOT2reg (dest, gp_sym, src, GEN_INT (R_FRV_GOT12)); 3657 } 3658 3659 static const char * 3660 unspec_got_name (int i) 3661 { 3662 switch (i) 3663 { 3664 case R_FRV_GOT12: return "got12"; 3665 case R_FRV_GOTHI: return "gothi"; 3666 case R_FRV_GOTLO: return "gotlo"; 3667 case R_FRV_FUNCDESC: return "funcdesc"; 3668 case R_FRV_FUNCDESC_GOT12: return "gotfuncdesc12"; 3669 case R_FRV_FUNCDESC_GOTHI: return "gotfuncdeschi"; 3670 case R_FRV_FUNCDESC_GOTLO: return "gotfuncdesclo"; 3671 case R_FRV_FUNCDESC_VALUE: return "funcdescvalue"; 3672 case R_FRV_FUNCDESC_GOTOFF12: return "gotofffuncdesc12"; 3673 case R_FRV_FUNCDESC_GOTOFFHI: return "gotofffuncdeschi"; 3674 case R_FRV_FUNCDESC_GOTOFFLO: return "gotofffuncdesclo"; 3675 case R_FRV_GOTOFF12: return "gotoff12"; 3676 case R_FRV_GOTOFFHI: return "gotoffhi"; 3677 case R_FRV_GOTOFFLO: return "gotofflo"; 3678 case R_FRV_GPREL12: return "gprel12"; 3679 case R_FRV_GPRELHI: return "gprelhi"; 3680 case R_FRV_GPRELLO: return "gprello"; 3681 case R_FRV_GOTTLSOFF_HI: return "gottlsoffhi"; 3682 case R_FRV_GOTTLSOFF_LO: return "gottlsofflo"; 3683 case R_FRV_TLSMOFFHI: return "tlsmoffhi"; 3684 case R_FRV_TLSMOFFLO: return "tlsmofflo"; 3685 case R_FRV_TLSMOFF12: return "tlsmoff12"; 3686 case R_FRV_TLSDESCHI: return "tlsdeschi"; 3687 case R_FRV_TLSDESCLO: return "tlsdesclo"; 3688 case R_FRV_GOTTLSDESCHI: return "gottlsdeschi"; 3689 case R_FRV_GOTTLSDESCLO: return "gottlsdesclo"; 3690 default: gcc_unreachable (); 3691 } 3692 } 3693 3694 /* Write the assembler syntax for UNSPEC to STREAM. Note that any offset 3695 is added inside the relocation operator. */ 3696 3697 static void 3698 frv_output_const_unspec (FILE *stream, const struct frv_unspec *unspec) 3699 { 3700 fprintf (stream, "#%s(", unspec_got_name (unspec->reloc)); 3701 output_addr_const (stream, plus_constant (Pmode, unspec->symbol, 3702 unspec->offset)); 3703 fputs (")", stream); 3704 } 3705 3706 /* Implement FIND_BASE_TERM. See whether ORIG_X represents #gprel12(foo) 3707 or #gotoff12(foo) for some small data symbol foo. If so, return foo, 3708 otherwise return ORIG_X. */ 3709 3710 rtx 3711 frv_find_base_term (rtx x) 3712 { 3713 struct frv_unspec unspec; 3714 3715 if (frv_const_unspec_p (x, &unspec) 3716 && frv_small_data_reloc_p (unspec.symbol, unspec.reloc)) 3717 return plus_constant (Pmode, unspec.symbol, unspec.offset); 3718 3719 return x; 3720 } 3721 3722 /* Return 1 if operand is a valid FRV address. CONDEXEC_P is true if 3723 the operand is used by a predicated instruction. */ 3724 3725 int 3726 frv_legitimate_memory_operand (rtx op, machine_mode mode, int condexec_p) 3727 { 3728 return ((GET_MODE (op) == mode || mode == VOIDmode) 3729 && GET_CODE (op) == MEM 3730 && frv_legitimate_address_p_1 (mode, XEXP (op, 0), 3731 reload_completed, condexec_p, FALSE)); 3732 } 3733 3734 void 3735 frv_expand_fdpic_call (rtx *operands, bool ret_value, bool sibcall) 3736 { 3737 rtx lr = gen_rtx_REG (Pmode, LR_REGNO); 3738 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REG); 3739 rtx c, rvrtx=0; 3740 rtx addr; 3741 3742 if (ret_value) 3743 { 3744 rvrtx = operands[0]; 3745 operands ++; 3746 } 3747 3748 addr = XEXP (operands[0], 0); 3749 3750 /* Inline PLTs if we're optimizing for speed. We'd like to inline 3751 any calls that would involve a PLT, but can't tell, since we 3752 don't know whether an extern function is going to be provided by 3753 a separate translation unit or imported from a separate module. 3754 When compiling for shared libraries, if the function has default 3755 visibility, we assume it's overridable, so we inline the PLT, but 3756 for executables, we don't really have a way to make a good 3757 decision: a function is as likely to be imported from a shared 3758 library as it is to be defined in the executable itself. We 3759 assume executables will get global functions defined locally, 3760 whereas shared libraries will have them potentially overridden, 3761 so we only inline PLTs when compiling for shared libraries. 3762 3763 In order to mark a function as local to a shared library, any 3764 non-default visibility attribute suffices. Unfortunately, 3765 there's no simple way to tag a function declaration as ``in a 3766 different module'', which we could then use to trigger PLT 3767 inlining on executables. There's -minline-plt, but it affects 3768 all external functions, so one would have to also mark function 3769 declarations available in the same module with non-default 3770 visibility, which is advantageous in itself. */ 3771 if (GET_CODE (addr) == SYMBOL_REF 3772 && ((!SYMBOL_REF_LOCAL_P (addr) && TARGET_INLINE_PLT) 3773 || sibcall)) 3774 { 3775 rtx x, dest; 3776 dest = gen_reg_rtx (SImode); 3777 if (flag_pic != 1) 3778 x = gen_symGOTOFF2reg_hilo (dest, addr, OUR_FDPIC_REG, 3779 GEN_INT (R_FRV_FUNCDESC_GOTOFF12)); 3780 else 3781 x = gen_symGOTOFF2reg (dest, addr, OUR_FDPIC_REG, 3782 GEN_INT (R_FRV_FUNCDESC_GOTOFF12)); 3783 emit_insn (x); 3784 crtl->uses_pic_offset_table = TRUE; 3785 addr = dest; 3786 } 3787 else if (GET_CODE (addr) == SYMBOL_REF) 3788 { 3789 /* These are always either local, or handled through a local 3790 PLT. */ 3791 if (ret_value) 3792 c = gen_call_value_fdpicsi (rvrtx, addr, operands[1], 3793 operands[2], picreg, lr); 3794 else 3795 c = gen_call_fdpicsi (addr, operands[1], operands[2], picreg, lr); 3796 emit_call_insn (c); 3797 return; 3798 } 3799 else if (! ldd_address_operand (addr, Pmode)) 3800 addr = force_reg (Pmode, addr); 3801 3802 picreg = gen_reg_rtx (DImode); 3803 emit_insn (gen_movdi_ldd (picreg, addr)); 3804 3805 if (sibcall && ret_value) 3806 c = gen_sibcall_value_fdpicdi (rvrtx, picreg, const0_rtx); 3807 else if (sibcall) 3808 c = gen_sibcall_fdpicdi (picreg, const0_rtx); 3809 else if (ret_value) 3810 c = gen_call_value_fdpicdi (rvrtx, picreg, const0_rtx, lr); 3811 else 3812 c = gen_call_fdpicdi (picreg, const0_rtx, lr); 3813 emit_call_insn (c); 3814 } 3815 3816 /* Look for a SYMBOL_REF of a function in an rtx. We always want to 3817 process these separately from any offsets, such that we add any 3818 offsets to the function descriptor (the actual pointer), not to the 3819 function address. */ 3820 3821 static bool 3822 frv_function_symbol_referenced_p (rtx x) 3823 { 3824 const char *format; 3825 int length; 3826 int j; 3827 3828 if (GET_CODE (x) == SYMBOL_REF) 3829 return SYMBOL_REF_FUNCTION_P (x); 3830 3831 length = GET_RTX_LENGTH (GET_CODE (x)); 3832 format = GET_RTX_FORMAT (GET_CODE (x)); 3833 3834 for (j = 0; j < length; ++j) 3835 { 3836 switch (format[j]) 3837 { 3838 case 'e': 3839 if (frv_function_symbol_referenced_p (XEXP (x, j))) 3840 return TRUE; 3841 break; 3842 3843 case 'V': 3844 case 'E': 3845 if (XVEC (x, j) != 0) 3846 { 3847 int k; 3848 for (k = 0; k < XVECLEN (x, j); ++k) 3849 if (frv_function_symbol_referenced_p (XVECEXP (x, j, k))) 3850 return TRUE; 3851 } 3852 break; 3853 3854 default: 3855 /* Nothing to do. */ 3856 break; 3857 } 3858 } 3859 3860 return FALSE; 3861 } 3862 3863 /* Return true if the memory operand is one that can be conditionally 3864 executed. */ 3865 3866 int 3867 condexec_memory_operand (rtx op, machine_mode mode) 3868 { 3869 machine_mode op_mode = GET_MODE (op); 3870 rtx addr; 3871 3872 if (mode != VOIDmode && op_mode != mode) 3873 return FALSE; 3874 3875 switch (op_mode) 3876 { 3877 default: 3878 return FALSE; 3879 3880 case E_QImode: 3881 case E_HImode: 3882 case E_SImode: 3883 case E_SFmode: 3884 break; 3885 } 3886 3887 if (GET_CODE (op) != MEM) 3888 return FALSE; 3889 3890 addr = XEXP (op, 0); 3891 return frv_legitimate_address_p_1 (mode, addr, reload_completed, TRUE, FALSE); 3892 } 3893 3894 /* Return true if the bare return instruction can be used outside of the 3895 epilog code. For frv, we only do it if there was no stack allocation. */ 3896 3897 int 3898 direct_return_p (void) 3899 { 3900 frv_stack_t *info; 3901 3902 if (!reload_completed) 3903 return FALSE; 3904 3905 info = frv_stack_info (); 3906 return (info->total_size == 0); 3907 } 3908 3909 3910 void 3911 frv_emit_move (machine_mode mode, rtx dest, rtx src) 3912 { 3913 if (GET_CODE (src) == SYMBOL_REF) 3914 { 3915 enum tls_model model = SYMBOL_REF_TLS_MODEL (src); 3916 if (model != 0) 3917 src = frv_legitimize_tls_address (src, model); 3918 } 3919 3920 switch (mode) 3921 { 3922 case E_SImode: 3923 if (frv_emit_movsi (dest, src)) 3924 return; 3925 break; 3926 3927 case E_QImode: 3928 case E_HImode: 3929 case E_DImode: 3930 case E_SFmode: 3931 case E_DFmode: 3932 if (!reload_in_progress 3933 && !reload_completed 3934 && !register_operand (dest, mode) 3935 && !reg_or_0_operand (src, mode)) 3936 src = copy_to_mode_reg (mode, src); 3937 break; 3938 3939 default: 3940 gcc_unreachable (); 3941 } 3942 3943 emit_insn (gen_rtx_SET (dest, src)); 3944 } 3945 3946 /* Emit code to handle a MOVSI, adding in the small data register or pic 3947 register if needed to load up addresses. Return TRUE if the appropriate 3948 instructions are emitted. */ 3949 3950 int 3951 frv_emit_movsi (rtx dest, rtx src) 3952 { 3953 int base_regno = -1; 3954 int unspec = 0; 3955 rtx sym = src; 3956 struct frv_unspec old_unspec; 3957 3958 if (!reload_in_progress 3959 && !reload_completed 3960 && !register_operand (dest, SImode) 3961 && (!reg_or_0_operand (src, SImode) 3962 /* Virtual registers will almost always be replaced by an 3963 add instruction, so expose this to CSE by copying to 3964 an intermediate register. */ 3965 || (GET_CODE (src) == REG 3966 && IN_RANGE (REGNO (src), 3967 FIRST_VIRTUAL_REGISTER, 3968 LAST_VIRTUAL_POINTER_REGISTER)))) 3969 { 3970 emit_insn (gen_rtx_SET (dest, copy_to_mode_reg (SImode, src))); 3971 return TRUE; 3972 } 3973 3974 /* Explicitly add in the PIC or small data register if needed. */ 3975 switch (GET_CODE (src)) 3976 { 3977 default: 3978 break; 3979 3980 case LABEL_REF: 3981 handle_label: 3982 if (TARGET_FDPIC) 3983 { 3984 /* Using GPREL12, we use a single GOT entry for all symbols 3985 in read-only sections, but trade sequences such as: 3986 3987 sethi #gothi(label), gr# 3988 setlo #gotlo(label), gr# 3989 ld @(gr15,gr#), gr# 3990 3991 for 3992 3993 ld @(gr15,#got12(_gp)), gr# 3994 sethi #gprelhi(label), gr## 3995 setlo #gprello(label), gr## 3996 add gr#, gr##, gr## 3997 3998 We may often be able to share gr# for multiple 3999 computations of GPREL addresses, and we may often fold 4000 the final add into the pair of registers of a load or 4001 store instruction, so it's often profitable. Even when 4002 optimizing for size, we're trading a GOT entry for an 4003 additional instruction, which trades GOT space 4004 (read-write) for code size (read-only, shareable), as 4005 long as the symbol is not used in more than two different 4006 locations. 4007 4008 With -fpie/-fpic, we'd be trading a single load for a 4009 sequence of 4 instructions, because the offset of the 4010 label can't be assumed to be addressable with 12 bits, so 4011 we don't do this. */ 4012 if (TARGET_GPREL_RO) 4013 unspec = R_FRV_GPREL12; 4014 else 4015 unspec = R_FRV_GOT12; 4016 } 4017 else if (flag_pic) 4018 base_regno = PIC_REGNO; 4019 4020 break; 4021 4022 case CONST: 4023 if (frv_const_unspec_p (src, &old_unspec)) 4024 break; 4025 4026 if (TARGET_FDPIC && frv_function_symbol_referenced_p (XEXP (src, 0))) 4027 { 4028 handle_whatever: 4029 src = force_reg (GET_MODE (XEXP (src, 0)), XEXP (src, 0)); 4030 emit_move_insn (dest, src); 4031 return TRUE; 4032 } 4033 else 4034 { 4035 sym = XEXP (sym, 0); 4036 if (GET_CODE (sym) == PLUS 4037 && GET_CODE (XEXP (sym, 0)) == SYMBOL_REF 4038 && GET_CODE (XEXP (sym, 1)) == CONST_INT) 4039 sym = XEXP (sym, 0); 4040 if (GET_CODE (sym) == SYMBOL_REF) 4041 goto handle_sym; 4042 else if (GET_CODE (sym) == LABEL_REF) 4043 goto handle_label; 4044 else 4045 goto handle_whatever; 4046 } 4047 break; 4048 4049 case SYMBOL_REF: 4050 handle_sym: 4051 if (TARGET_FDPIC) 4052 { 4053 enum tls_model model = SYMBOL_REF_TLS_MODEL (sym); 4054 4055 if (model != 0) 4056 { 4057 src = frv_legitimize_tls_address (src, model); 4058 emit_move_insn (dest, src); 4059 return TRUE; 4060 } 4061 4062 if (SYMBOL_REF_FUNCTION_P (sym)) 4063 { 4064 if (frv_local_funcdesc_p (sym)) 4065 unspec = R_FRV_FUNCDESC_GOTOFF12; 4066 else 4067 unspec = R_FRV_FUNCDESC_GOT12; 4068 } 4069 else 4070 { 4071 if (CONSTANT_POOL_ADDRESS_P (sym)) 4072 switch (GET_CODE (get_pool_constant (sym))) 4073 { 4074 case CONST: 4075 case SYMBOL_REF: 4076 case LABEL_REF: 4077 if (flag_pic) 4078 { 4079 unspec = R_FRV_GOTOFF12; 4080 break; 4081 } 4082 /* Fall through. */ 4083 default: 4084 if (TARGET_GPREL_RO) 4085 unspec = R_FRV_GPREL12; 4086 else 4087 unspec = R_FRV_GOT12; 4088 break; 4089 } 4090 else if (SYMBOL_REF_LOCAL_P (sym) 4091 && !SYMBOL_REF_EXTERNAL_P (sym) 4092 && SYMBOL_REF_DECL (sym) 4093 && (!DECL_P (SYMBOL_REF_DECL (sym)) 4094 || !DECL_COMMON (SYMBOL_REF_DECL (sym)))) 4095 { 4096 tree decl = SYMBOL_REF_DECL (sym); 4097 tree init = TREE_CODE (decl) == VAR_DECL 4098 ? DECL_INITIAL (decl) 4099 : TREE_CODE (decl) == CONSTRUCTOR 4100 ? decl : 0; 4101 int reloc = 0; 4102 bool named_section, readonly; 4103 4104 if (init && init != error_mark_node) 4105 reloc = compute_reloc_for_constant (init); 4106 4107 named_section = TREE_CODE (decl) == VAR_DECL 4108 && lookup_attribute ("section", DECL_ATTRIBUTES (decl)); 4109 readonly = decl_readonly_section (decl, reloc); 4110 4111 if (named_section) 4112 unspec = R_FRV_GOT12; 4113 else if (!readonly) 4114 unspec = R_FRV_GOTOFF12; 4115 else if (readonly && TARGET_GPREL_RO) 4116 unspec = R_FRV_GPREL12; 4117 else 4118 unspec = R_FRV_GOT12; 4119 } 4120 else 4121 unspec = R_FRV_GOT12; 4122 } 4123 } 4124 4125 else if (SYMBOL_REF_SMALL_P (sym)) 4126 base_regno = SDA_BASE_REG; 4127 4128 else if (flag_pic) 4129 base_regno = PIC_REGNO; 4130 4131 break; 4132 } 4133 4134 if (base_regno >= 0) 4135 { 4136 if (GET_CODE (sym) == SYMBOL_REF && SYMBOL_REF_SMALL_P (sym)) 4137 emit_insn (gen_symGOTOFF2reg (dest, src, 4138 gen_rtx_REG (Pmode, base_regno), 4139 GEN_INT (R_FRV_GPREL12))); 4140 else 4141 emit_insn (gen_symGOTOFF2reg_hilo (dest, src, 4142 gen_rtx_REG (Pmode, base_regno), 4143 GEN_INT (R_FRV_GPREL12))); 4144 if (base_regno == PIC_REGNO) 4145 crtl->uses_pic_offset_table = TRUE; 4146 return TRUE; 4147 } 4148 4149 if (unspec) 4150 { 4151 rtx x; 4152 4153 /* Since OUR_FDPIC_REG is a pseudo register, we can't safely introduce 4154 new uses of it once reload has begun. */ 4155 gcc_assert (!reload_in_progress && !reload_completed); 4156 4157 switch (unspec) 4158 { 4159 case R_FRV_GOTOFF12: 4160 if (!frv_small_data_reloc_p (sym, unspec)) 4161 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG, 4162 GEN_INT (unspec)); 4163 else 4164 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec)); 4165 break; 4166 case R_FRV_GPREL12: 4167 if (!frv_small_data_reloc_p (sym, unspec)) 4168 x = gen_symGPREL2reg_hilo (dest, src, OUR_FDPIC_REG, 4169 GEN_INT (unspec)); 4170 else 4171 x = gen_symGPREL2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec)); 4172 break; 4173 case R_FRV_FUNCDESC_GOTOFF12: 4174 if (flag_pic != 1) 4175 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG, 4176 GEN_INT (unspec)); 4177 else 4178 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec)); 4179 break; 4180 default: 4181 if (flag_pic != 1) 4182 x = gen_symGOT2reg_hilo (dest, src, OUR_FDPIC_REG, 4183 GEN_INT (unspec)); 4184 else 4185 x = gen_symGOT2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec)); 4186 break; 4187 } 4188 emit_insn (x); 4189 crtl->uses_pic_offset_table = TRUE; 4190 return TRUE; 4191 } 4192 4193 4194 return FALSE; 4195 } 4196 4197 4198 /* Return a string to output a single word move. */ 4199 4200 const char * 4201 output_move_single (rtx operands[], rtx insn) 4202 { 4203 rtx dest = operands[0]; 4204 rtx src = operands[1]; 4205 4206 if (GET_CODE (dest) == REG) 4207 { 4208 int dest_regno = REGNO (dest); 4209 machine_mode mode = GET_MODE (dest); 4210 4211 if (GPR_P (dest_regno)) 4212 { 4213 if (GET_CODE (src) == REG) 4214 { 4215 /* gpr <- some sort of register */ 4216 int src_regno = REGNO (src); 4217 4218 if (GPR_P (src_regno)) 4219 return "mov %1, %0"; 4220 4221 else if (FPR_P (src_regno)) 4222 return "movfg %1, %0"; 4223 4224 else if (SPR_P (src_regno)) 4225 return "movsg %1, %0"; 4226 } 4227 4228 else if (GET_CODE (src) == MEM) 4229 { 4230 /* gpr <- memory */ 4231 switch (mode) 4232 { 4233 default: 4234 break; 4235 4236 case E_QImode: 4237 return "ldsb%I1%U1 %M1,%0"; 4238 4239 case E_HImode: 4240 return "ldsh%I1%U1 %M1,%0"; 4241 4242 case E_SImode: 4243 case E_SFmode: 4244 return "ld%I1%U1 %M1, %0"; 4245 } 4246 } 4247 4248 else if (GET_CODE (src) == CONST_INT 4249 || GET_CODE (src) == CONST_DOUBLE) 4250 { 4251 /* gpr <- integer/floating constant */ 4252 HOST_WIDE_INT value; 4253 4254 if (GET_CODE (src) == CONST_INT) 4255 value = INTVAL (src); 4256 4257 else if (mode == SFmode) 4258 { 4259 long l; 4260 4261 REAL_VALUE_TO_TARGET_SINGLE 4262 (*CONST_DOUBLE_REAL_VALUE (src), l); 4263 value = l; 4264 } 4265 4266 else 4267 value = CONST_DOUBLE_LOW (src); 4268 4269 if (IN_RANGE (value, -32768, 32767)) 4270 return "setlos %1, %0"; 4271 4272 return "#"; 4273 } 4274 4275 else if (GET_CODE (src) == SYMBOL_REF 4276 || GET_CODE (src) == LABEL_REF 4277 || GET_CODE (src) == CONST) 4278 { 4279 return "#"; 4280 } 4281 } 4282 4283 else if (FPR_P (dest_regno)) 4284 { 4285 if (GET_CODE (src) == REG) 4286 { 4287 /* fpr <- some sort of register */ 4288 int src_regno = REGNO (src); 4289 4290 if (GPR_P (src_regno)) 4291 return "movgf %1, %0"; 4292 4293 else if (FPR_P (src_regno)) 4294 { 4295 if (TARGET_HARD_FLOAT) 4296 return "fmovs %1, %0"; 4297 else 4298 return "mor %1, %1, %0"; 4299 } 4300 } 4301 4302 else if (GET_CODE (src) == MEM) 4303 { 4304 /* fpr <- memory */ 4305 switch (mode) 4306 { 4307 default: 4308 break; 4309 4310 case E_QImode: 4311 return "ldbf%I1%U1 %M1,%0"; 4312 4313 case E_HImode: 4314 return "ldhf%I1%U1 %M1,%0"; 4315 4316 case E_SImode: 4317 case E_SFmode: 4318 return "ldf%I1%U1 %M1, %0"; 4319 } 4320 } 4321 4322 else if (ZERO_P (src)) 4323 return "movgf %., %0"; 4324 } 4325 4326 else if (SPR_P (dest_regno)) 4327 { 4328 if (GET_CODE (src) == REG) 4329 { 4330 /* spr <- some sort of register */ 4331 int src_regno = REGNO (src); 4332 4333 if (GPR_P (src_regno)) 4334 return "movgs %1, %0"; 4335 } 4336 else if (ZERO_P (src)) 4337 return "movgs %., %0"; 4338 } 4339 } 4340 4341 else if (GET_CODE (dest) == MEM) 4342 { 4343 if (GET_CODE (src) == REG) 4344 { 4345 int src_regno = REGNO (src); 4346 machine_mode mode = GET_MODE (dest); 4347 4348 if (GPR_P (src_regno)) 4349 { 4350 switch (mode) 4351 { 4352 default: 4353 break; 4354 4355 case E_QImode: 4356 return "stb%I0%U0 %1, %M0"; 4357 4358 case E_HImode: 4359 return "sth%I0%U0 %1, %M0"; 4360 4361 case E_SImode: 4362 case E_SFmode: 4363 return "st%I0%U0 %1, %M0"; 4364 } 4365 } 4366 4367 else if (FPR_P (src_regno)) 4368 { 4369 switch (mode) 4370 { 4371 default: 4372 break; 4373 4374 case E_QImode: 4375 return "stbf%I0%U0 %1, %M0"; 4376 4377 case E_HImode: 4378 return "sthf%I0%U0 %1, %M0"; 4379 4380 case E_SImode: 4381 case E_SFmode: 4382 return "stf%I0%U0 %1, %M0"; 4383 } 4384 } 4385 } 4386 4387 else if (ZERO_P (src)) 4388 { 4389 switch (GET_MODE (dest)) 4390 { 4391 default: 4392 break; 4393 4394 case E_QImode: 4395 return "stb%I0%U0 %., %M0"; 4396 4397 case E_HImode: 4398 return "sth%I0%U0 %., %M0"; 4399 4400 case E_SImode: 4401 case E_SFmode: 4402 return "st%I0%U0 %., %M0"; 4403 } 4404 } 4405 } 4406 4407 fatal_insn ("bad output_move_single operand", insn); 4408 return ""; 4409 } 4410 4411 4412 /* Return a string to output a double word move. */ 4413 4414 const char * 4415 output_move_double (rtx operands[], rtx insn) 4416 { 4417 rtx dest = operands[0]; 4418 rtx src = operands[1]; 4419 machine_mode mode = GET_MODE (dest); 4420 4421 if (GET_CODE (dest) == REG) 4422 { 4423 int dest_regno = REGNO (dest); 4424 4425 if (GPR_P (dest_regno)) 4426 { 4427 if (GET_CODE (src) == REG) 4428 { 4429 /* gpr <- some sort of register */ 4430 int src_regno = REGNO (src); 4431 4432 if (GPR_P (src_regno)) 4433 return "#"; 4434 4435 else if (FPR_P (src_regno)) 4436 { 4437 if (((dest_regno - GPR_FIRST) & 1) == 0 4438 && ((src_regno - FPR_FIRST) & 1) == 0) 4439 return "movfgd %1, %0"; 4440 4441 return "#"; 4442 } 4443 } 4444 4445 else if (GET_CODE (src) == MEM) 4446 { 4447 /* gpr <- memory */ 4448 if (dbl_memory_one_insn_operand (src, mode)) 4449 return "ldd%I1%U1 %M1, %0"; 4450 4451 return "#"; 4452 } 4453 4454 else if (GET_CODE (src) == CONST_INT 4455 || GET_CODE (src) == CONST_DOUBLE) 4456 return "#"; 4457 } 4458 4459 else if (FPR_P (dest_regno)) 4460 { 4461 if (GET_CODE (src) == REG) 4462 { 4463 /* fpr <- some sort of register */ 4464 int src_regno = REGNO (src); 4465 4466 if (GPR_P (src_regno)) 4467 { 4468 if (((dest_regno - FPR_FIRST) & 1) == 0 4469 && ((src_regno - GPR_FIRST) & 1) == 0) 4470 return "movgfd %1, %0"; 4471 4472 return "#"; 4473 } 4474 4475 else if (FPR_P (src_regno)) 4476 { 4477 if (TARGET_DOUBLE 4478 && ((dest_regno - FPR_FIRST) & 1) == 0 4479 && ((src_regno - FPR_FIRST) & 1) == 0) 4480 return "fmovd %1, %0"; 4481 4482 return "#"; 4483 } 4484 } 4485 4486 else if (GET_CODE (src) == MEM) 4487 { 4488 /* fpr <- memory */ 4489 if (dbl_memory_one_insn_operand (src, mode)) 4490 return "lddf%I1%U1 %M1, %0"; 4491 4492 return "#"; 4493 } 4494 4495 else if (ZERO_P (src)) 4496 return "#"; 4497 } 4498 } 4499 4500 else if (GET_CODE (dest) == MEM) 4501 { 4502 if (GET_CODE (src) == REG) 4503 { 4504 int src_regno = REGNO (src); 4505 4506 if (GPR_P (src_regno)) 4507 { 4508 if (((src_regno - GPR_FIRST) & 1) == 0 4509 && dbl_memory_one_insn_operand (dest, mode)) 4510 return "std%I0%U0 %1, %M0"; 4511 4512 return "#"; 4513 } 4514 4515 if (FPR_P (src_regno)) 4516 { 4517 if (((src_regno - FPR_FIRST) & 1) == 0 4518 && dbl_memory_one_insn_operand (dest, mode)) 4519 return "stdf%I0%U0 %1, %M0"; 4520 4521 return "#"; 4522 } 4523 } 4524 4525 else if (ZERO_P (src)) 4526 { 4527 if (dbl_memory_one_insn_operand (dest, mode)) 4528 return "std%I0%U0 %., %M0"; 4529 4530 return "#"; 4531 } 4532 } 4533 4534 fatal_insn ("bad output_move_double operand", insn); 4535 return ""; 4536 } 4537 4538 4539 /* Return a string to output a single word conditional move. 4540 Operand0 -- EQ/NE of ccr register and 0 4541 Operand1 -- CCR register 4542 Operand2 -- destination 4543 Operand3 -- source */ 4544 4545 const char * 4546 output_condmove_single (rtx operands[], rtx insn) 4547 { 4548 rtx dest = operands[2]; 4549 rtx src = operands[3]; 4550 4551 if (GET_CODE (dest) == REG) 4552 { 4553 int dest_regno = REGNO (dest); 4554 machine_mode mode = GET_MODE (dest); 4555 4556 if (GPR_P (dest_regno)) 4557 { 4558 if (GET_CODE (src) == REG) 4559 { 4560 /* gpr <- some sort of register */ 4561 int src_regno = REGNO (src); 4562 4563 if (GPR_P (src_regno)) 4564 return "cmov %z3, %2, %1, %e0"; 4565 4566 else if (FPR_P (src_regno)) 4567 return "cmovfg %3, %2, %1, %e0"; 4568 } 4569 4570 else if (GET_CODE (src) == MEM) 4571 { 4572 /* gpr <- memory */ 4573 switch (mode) 4574 { 4575 default: 4576 break; 4577 4578 case E_QImode: 4579 return "cldsb%I3%U3 %M3, %2, %1, %e0"; 4580 4581 case E_HImode: 4582 return "cldsh%I3%U3 %M3, %2, %1, %e0"; 4583 4584 case E_SImode: 4585 case E_SFmode: 4586 return "cld%I3%U3 %M3, %2, %1, %e0"; 4587 } 4588 } 4589 4590 else if (ZERO_P (src)) 4591 return "cmov %., %2, %1, %e0"; 4592 } 4593 4594 else if (FPR_P (dest_regno)) 4595 { 4596 if (GET_CODE (src) == REG) 4597 { 4598 /* fpr <- some sort of register */ 4599 int src_regno = REGNO (src); 4600 4601 if (GPR_P (src_regno)) 4602 return "cmovgf %3, %2, %1, %e0"; 4603 4604 else if (FPR_P (src_regno)) 4605 { 4606 if (TARGET_HARD_FLOAT) 4607 return "cfmovs %3,%2,%1,%e0"; 4608 else 4609 return "cmor %3, %3, %2, %1, %e0"; 4610 } 4611 } 4612 4613 else if (GET_CODE (src) == MEM) 4614 { 4615 /* fpr <- memory */ 4616 if (mode == SImode || mode == SFmode) 4617 return "cldf%I3%U3 %M3, %2, %1, %e0"; 4618 } 4619 4620 else if (ZERO_P (src)) 4621 return "cmovgf %., %2, %1, %e0"; 4622 } 4623 } 4624 4625 else if (GET_CODE (dest) == MEM) 4626 { 4627 if (GET_CODE (src) == REG) 4628 { 4629 int src_regno = REGNO (src); 4630 machine_mode mode = GET_MODE (dest); 4631 4632 if (GPR_P (src_regno)) 4633 { 4634 switch (mode) 4635 { 4636 default: 4637 break; 4638 4639 case E_QImode: 4640 return "cstb%I2%U2 %3, %M2, %1, %e0"; 4641 4642 case E_HImode: 4643 return "csth%I2%U2 %3, %M2, %1, %e0"; 4644 4645 case E_SImode: 4646 case E_SFmode: 4647 return "cst%I2%U2 %3, %M2, %1, %e0"; 4648 } 4649 } 4650 4651 else if (FPR_P (src_regno) && (mode == SImode || mode == SFmode)) 4652 return "cstf%I2%U2 %3, %M2, %1, %e0"; 4653 } 4654 4655 else if (ZERO_P (src)) 4656 { 4657 machine_mode mode = GET_MODE (dest); 4658 switch (mode) 4659 { 4660 default: 4661 break; 4662 4663 case E_QImode: 4664 return "cstb%I2%U2 %., %M2, %1, %e0"; 4665 4666 case E_HImode: 4667 return "csth%I2%U2 %., %M2, %1, %e0"; 4668 4669 case E_SImode: 4670 case E_SFmode: 4671 return "cst%I2%U2 %., %M2, %1, %e0"; 4672 } 4673 } 4674 } 4675 4676 fatal_insn ("bad output_condmove_single operand", insn); 4677 return ""; 4678 } 4679 4680 4681 /* Emit the appropriate code to do a comparison, returning the register the 4682 comparison was done it. */ 4683 4684 static rtx 4685 frv_emit_comparison (enum rtx_code test, rtx op0, rtx op1) 4686 { 4687 machine_mode cc_mode; 4688 rtx cc_reg; 4689 4690 /* Floating point doesn't have comparison against a constant. */ 4691 if (GET_MODE (op0) == CC_FPmode && GET_CODE (op1) != REG) 4692 op1 = force_reg (GET_MODE (op0), op1); 4693 4694 /* Possibly disable using anything but a fixed register in order to work 4695 around cse moving comparisons past function calls. */ 4696 cc_mode = SELECT_CC_MODE (test, op0, op1); 4697 cc_reg = ((TARGET_ALLOC_CC) 4698 ? gen_reg_rtx (cc_mode) 4699 : gen_rtx_REG (cc_mode, 4700 (cc_mode == CC_FPmode) ? FCC_FIRST : ICC_FIRST)); 4701 4702 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (cc_mode, op0, op1))); 4703 4704 return cc_reg; 4705 } 4706 4707 4708 /* Emit code for a conditional branch. 4709 XXX: I originally wanted to add a clobber of a CCR register to use in 4710 conditional execution, but that confuses the rest of the compiler. */ 4711 4712 int 4713 frv_emit_cond_branch (rtx operands[]) 4714 { 4715 rtx test_rtx; 4716 rtx label_ref; 4717 rtx if_else; 4718 enum rtx_code test = GET_CODE (operands[0]); 4719 rtx cc_reg = frv_emit_comparison (test, operands[1], operands[2]); 4720 machine_mode cc_mode = GET_MODE (cc_reg); 4721 4722 /* Branches generate: 4723 (set (pc) 4724 (if_then_else (<test>, <cc_reg>, (const_int 0)) 4725 (label_ref <branch_label>) 4726 (pc))) */ 4727 label_ref = gen_rtx_LABEL_REF (VOIDmode, operands[3]); 4728 test_rtx = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx); 4729 if_else = gen_rtx_IF_THEN_ELSE (cc_mode, test_rtx, label_ref, pc_rtx); 4730 emit_jump_insn (gen_rtx_SET (pc_rtx, if_else)); 4731 return TRUE; 4732 } 4733 4734 4735 /* Emit code to set a gpr to 1/0 based on a comparison. */ 4736 4737 int 4738 frv_emit_scc (rtx operands[]) 4739 { 4740 rtx set; 4741 rtx test_rtx; 4742 rtx clobber; 4743 rtx cr_reg; 4744 enum rtx_code test = GET_CODE (operands[1]); 4745 rtx cc_reg = frv_emit_comparison (test, operands[2], operands[3]); 4746 4747 /* SCC instructions generate: 4748 (parallel [(set <target> (<test>, <cc_reg>, (const_int 0)) 4749 (clobber (<ccr_reg>))]) */ 4750 test_rtx = gen_rtx_fmt_ee (test, SImode, cc_reg, const0_rtx); 4751 set = gen_rtx_SET (operands[0], test_rtx); 4752 4753 cr_reg = ((TARGET_ALLOC_CC) 4754 ? gen_reg_rtx (CC_CCRmode) 4755 : gen_rtx_REG (CC_CCRmode, 4756 ((GET_MODE (cc_reg) == CC_FPmode) 4757 ? FCR_FIRST 4758 : ICR_FIRST))); 4759 4760 clobber = gen_rtx_CLOBBER (VOIDmode, cr_reg); 4761 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber))); 4762 return TRUE; 4763 } 4764 4765 4766 /* Split a SCC instruction into component parts, returning a SEQUENCE to hold 4767 the separate insns. */ 4768 4769 rtx 4770 frv_split_scc (rtx dest, rtx test, rtx cc_reg, rtx cr_reg, HOST_WIDE_INT value) 4771 { 4772 rtx ret; 4773 4774 start_sequence (); 4775 4776 /* Set the appropriate CCR bit. */ 4777 emit_insn (gen_rtx_SET (cr_reg, 4778 gen_rtx_fmt_ee (GET_CODE (test), 4779 GET_MODE (cr_reg), 4780 cc_reg, 4781 const0_rtx))); 4782 4783 /* Move the value into the destination. */ 4784 emit_move_insn (dest, GEN_INT (value)); 4785 4786 /* Move 0 into the destination if the test failed */ 4787 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 4788 gen_rtx_EQ (GET_MODE (cr_reg), 4789 cr_reg, 4790 const0_rtx), 4791 gen_rtx_SET (dest, const0_rtx))); 4792 4793 /* Finish up, return sequence. */ 4794 ret = get_insns (); 4795 end_sequence (); 4796 return ret; 4797 } 4798 4799 4800 /* Emit the code for a conditional move, return TRUE if we could do the 4801 move. */ 4802 4803 int 4804 frv_emit_cond_move (rtx dest, rtx test_rtx, rtx src1, rtx src2) 4805 { 4806 rtx set; 4807 rtx clobber_cc; 4808 rtx test2; 4809 rtx cr_reg; 4810 rtx if_rtx; 4811 enum rtx_code test = GET_CODE (test_rtx); 4812 rtx cc_reg = frv_emit_comparison (test, 4813 XEXP (test_rtx, 0), XEXP (test_rtx, 1)); 4814 machine_mode cc_mode = GET_MODE (cc_reg); 4815 4816 /* Conditional move instructions generate: 4817 (parallel [(set <target> 4818 (if_then_else (<test> <cc_reg> (const_int 0)) 4819 <src1> 4820 <src2>)) 4821 (clobber (<ccr_reg>))]) */ 4822 4823 /* Handle various cases of conditional move involving two constants. */ 4824 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT) 4825 { 4826 HOST_WIDE_INT value1 = INTVAL (src1); 4827 HOST_WIDE_INT value2 = INTVAL (src2); 4828 4829 /* Having 0 as one of the constants can be done by loading the other 4830 constant, and optionally moving in gr0. */ 4831 if (value1 == 0 || value2 == 0) 4832 ; 4833 4834 /* If the first value is within an addi range and also the difference 4835 between the two fits in an addi's range, load up the difference, then 4836 conditionally move in 0, and then unconditionally add the first 4837 value. */ 4838 else if (IN_RANGE (value1, -2048, 2047) 4839 && IN_RANGE (value2 - value1, -2048, 2047)) 4840 ; 4841 4842 /* If neither condition holds, just force the constant into a 4843 register. */ 4844 else 4845 { 4846 src1 = force_reg (GET_MODE (dest), src1); 4847 src2 = force_reg (GET_MODE (dest), src2); 4848 } 4849 } 4850 4851 /* If one value is a register, insure the other value is either 0 or a 4852 register. */ 4853 else 4854 { 4855 if (GET_CODE (src1) == CONST_INT && INTVAL (src1) != 0) 4856 src1 = force_reg (GET_MODE (dest), src1); 4857 4858 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0) 4859 src2 = force_reg (GET_MODE (dest), src2); 4860 } 4861 4862 test2 = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx); 4863 if_rtx = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), test2, src1, src2); 4864 4865 set = gen_rtx_SET (dest, if_rtx); 4866 4867 cr_reg = ((TARGET_ALLOC_CC) 4868 ? gen_reg_rtx (CC_CCRmode) 4869 : gen_rtx_REG (CC_CCRmode, 4870 (cc_mode == CC_FPmode) ? FCR_FIRST : ICR_FIRST)); 4871 4872 clobber_cc = gen_rtx_CLOBBER (VOIDmode, cr_reg); 4873 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber_cc))); 4874 return TRUE; 4875 } 4876 4877 4878 /* Split a conditional move into constituent parts, returning a SEQUENCE 4879 containing all of the insns. */ 4880 4881 rtx 4882 frv_split_cond_move (rtx operands[]) 4883 { 4884 rtx dest = operands[0]; 4885 rtx test = operands[1]; 4886 rtx cc_reg = operands[2]; 4887 rtx src1 = operands[3]; 4888 rtx src2 = operands[4]; 4889 rtx cr_reg = operands[5]; 4890 rtx ret; 4891 machine_mode cr_mode = GET_MODE (cr_reg); 4892 4893 start_sequence (); 4894 4895 /* Set the appropriate CCR bit. */ 4896 emit_insn (gen_rtx_SET (cr_reg, 4897 gen_rtx_fmt_ee (GET_CODE (test), 4898 GET_MODE (cr_reg), 4899 cc_reg, 4900 const0_rtx))); 4901 4902 /* Handle various cases of conditional move involving two constants. */ 4903 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT) 4904 { 4905 HOST_WIDE_INT value1 = INTVAL (src1); 4906 HOST_WIDE_INT value2 = INTVAL (src2); 4907 4908 /* Having 0 as one of the constants can be done by loading the other 4909 constant, and optionally moving in gr0. */ 4910 if (value1 == 0) 4911 { 4912 emit_move_insn (dest, src2); 4913 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 4914 gen_rtx_NE (cr_mode, cr_reg, 4915 const0_rtx), 4916 gen_rtx_SET (dest, src1))); 4917 } 4918 4919 else if (value2 == 0) 4920 { 4921 emit_move_insn (dest, src1); 4922 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 4923 gen_rtx_EQ (cr_mode, cr_reg, 4924 const0_rtx), 4925 gen_rtx_SET (dest, src2))); 4926 } 4927 4928 /* If the first value is within an addi range and also the difference 4929 between the two fits in an addi's range, load up the difference, then 4930 conditionally move in 0, and then unconditionally add the first 4931 value. */ 4932 else if (IN_RANGE (value1, -2048, 2047) 4933 && IN_RANGE (value2 - value1, -2048, 2047)) 4934 { 4935 rtx dest_si = ((GET_MODE (dest) == SImode) 4936 ? dest 4937 : gen_rtx_SUBREG (SImode, dest, 0)); 4938 4939 emit_move_insn (dest_si, GEN_INT (value2 - value1)); 4940 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 4941 gen_rtx_NE (cr_mode, cr_reg, 4942 const0_rtx), 4943 gen_rtx_SET (dest_si, const0_rtx))); 4944 emit_insn (gen_addsi3 (dest_si, dest_si, src1)); 4945 } 4946 4947 else 4948 gcc_unreachable (); 4949 } 4950 else 4951 { 4952 /* Emit the conditional move for the test being true if needed. */ 4953 if (! rtx_equal_p (dest, src1)) 4954 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 4955 gen_rtx_NE (cr_mode, cr_reg, const0_rtx), 4956 gen_rtx_SET (dest, src1))); 4957 4958 /* Emit the conditional move for the test being false if needed. */ 4959 if (! rtx_equal_p (dest, src2)) 4960 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 4961 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx), 4962 gen_rtx_SET (dest, src2))); 4963 } 4964 4965 /* Finish up, return sequence. */ 4966 ret = get_insns (); 4967 end_sequence (); 4968 return ret; 4969 } 4970 4971 4972 /* Split (set DEST SOURCE), where DEST is a double register and SOURCE is a 4973 memory location that is not known to be dword-aligned. */ 4974 void 4975 frv_split_double_load (rtx dest, rtx source) 4976 { 4977 int regno = REGNO (dest); 4978 rtx dest1 = gen_highpart (SImode, dest); 4979 rtx dest2 = gen_lowpart (SImode, dest); 4980 rtx address = XEXP (source, 0); 4981 4982 /* If the address is pre-modified, load the lower-numbered register 4983 first, then load the other register using an integer offset from 4984 the modified base register. This order should always be safe, 4985 since the pre-modification cannot affect the same registers as the 4986 load does. 4987 4988 The situation for other loads is more complicated. Loading one 4989 of the registers could affect the value of ADDRESS, so we must 4990 be careful which order we do them in. */ 4991 if (GET_CODE (address) == PRE_MODIFY 4992 || ! refers_to_regno_p (regno, address)) 4993 { 4994 /* It is safe to load the lower-numbered register first. */ 4995 emit_move_insn (dest1, change_address (source, SImode, NULL)); 4996 emit_move_insn (dest2, frv_index_memory (source, SImode, 1)); 4997 } 4998 else 4999 { 5000 /* ADDRESS is not pre-modified and the address depends on the 5001 lower-numbered register. Load the higher-numbered register 5002 first. */ 5003 emit_move_insn (dest2, frv_index_memory (source, SImode, 1)); 5004 emit_move_insn (dest1, change_address (source, SImode, NULL)); 5005 } 5006 } 5007 5008 /* Split (set DEST SOURCE), where DEST refers to a dword memory location 5009 and SOURCE is either a double register or the constant zero. */ 5010 void 5011 frv_split_double_store (rtx dest, rtx source) 5012 { 5013 rtx dest1 = change_address (dest, SImode, NULL); 5014 rtx dest2 = frv_index_memory (dest, SImode, 1); 5015 if (ZERO_P (source)) 5016 { 5017 emit_move_insn (dest1, CONST0_RTX (SImode)); 5018 emit_move_insn (dest2, CONST0_RTX (SImode)); 5019 } 5020 else 5021 { 5022 emit_move_insn (dest1, gen_highpart (SImode, source)); 5023 emit_move_insn (dest2, gen_lowpart (SImode, source)); 5024 } 5025 } 5026 5027 5028 /* Split a min/max operation returning a SEQUENCE containing all of the 5029 insns. */ 5030 5031 rtx 5032 frv_split_minmax (rtx operands[]) 5033 { 5034 rtx dest = operands[0]; 5035 rtx minmax = operands[1]; 5036 rtx src1 = operands[2]; 5037 rtx src2 = operands[3]; 5038 rtx cc_reg = operands[4]; 5039 rtx cr_reg = operands[5]; 5040 rtx ret; 5041 enum rtx_code test_code; 5042 machine_mode cr_mode = GET_MODE (cr_reg); 5043 5044 start_sequence (); 5045 5046 /* Figure out which test to use. */ 5047 switch (GET_CODE (minmax)) 5048 { 5049 default: 5050 gcc_unreachable (); 5051 5052 case SMIN: test_code = LT; break; 5053 case SMAX: test_code = GT; break; 5054 case UMIN: test_code = LTU; break; 5055 case UMAX: test_code = GTU; break; 5056 } 5057 5058 /* Issue the compare instruction. */ 5059 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (GET_MODE (cc_reg), 5060 src1, src2))); 5061 5062 /* Set the appropriate CCR bit. */ 5063 emit_insn (gen_rtx_SET (cr_reg, gen_rtx_fmt_ee (test_code, 5064 GET_MODE (cr_reg), 5065 cc_reg, 5066 const0_rtx))); 5067 5068 /* If are taking the min/max of a nonzero constant, load that first, and 5069 then do a conditional move of the other value. */ 5070 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0) 5071 { 5072 gcc_assert (!rtx_equal_p (dest, src1)); 5073 5074 emit_move_insn (dest, src2); 5075 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 5076 gen_rtx_NE (cr_mode, cr_reg, const0_rtx), 5077 gen_rtx_SET (dest, src1))); 5078 } 5079 5080 /* Otherwise, do each half of the move. */ 5081 else 5082 { 5083 /* Emit the conditional move for the test being true if needed. */ 5084 if (! rtx_equal_p (dest, src1)) 5085 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 5086 gen_rtx_NE (cr_mode, cr_reg, const0_rtx), 5087 gen_rtx_SET (dest, src1))); 5088 5089 /* Emit the conditional move for the test being false if needed. */ 5090 if (! rtx_equal_p (dest, src2)) 5091 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 5092 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx), 5093 gen_rtx_SET (dest, src2))); 5094 } 5095 5096 /* Finish up, return sequence. */ 5097 ret = get_insns (); 5098 end_sequence (); 5099 return ret; 5100 } 5101 5102 5103 /* Split an integer abs operation returning a SEQUENCE containing all of the 5104 insns. */ 5105 5106 rtx 5107 frv_split_abs (rtx operands[]) 5108 { 5109 rtx dest = operands[0]; 5110 rtx src = operands[1]; 5111 rtx cc_reg = operands[2]; 5112 rtx cr_reg = operands[3]; 5113 rtx ret; 5114 5115 start_sequence (); 5116 5117 /* Issue the compare < 0 instruction. */ 5118 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_COMPARE (CCmode, src, const0_rtx))); 5119 5120 /* Set the appropriate CCR bit. */ 5121 emit_insn (gen_rtx_SET (cr_reg, gen_rtx_fmt_ee (LT, CC_CCRmode, 5122 cc_reg, const0_rtx))); 5123 5124 /* Emit the conditional negate if the value is negative. */ 5125 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 5126 gen_rtx_NE (CC_CCRmode, cr_reg, const0_rtx), 5127 gen_negsi2 (dest, src))); 5128 5129 /* Emit the conditional move for the test being false if needed. */ 5130 if (! rtx_equal_p (dest, src)) 5131 emit_insn (gen_rtx_COND_EXEC (VOIDmode, 5132 gen_rtx_EQ (CC_CCRmode, cr_reg, const0_rtx), 5133 gen_rtx_SET (dest, src))); 5134 5135 /* Finish up, return sequence. */ 5136 ret = get_insns (); 5137 end_sequence (); 5138 return ret; 5139 } 5140 5141 5142 /* Initialize machine-specific if-conversion data. 5143 On the FR-V, we don't have any extra fields per se, but it is useful hook to 5144 initialize the static storage. */ 5145 void 5146 frv_ifcvt_machdep_init (void *ce_info ATTRIBUTE_UNUSED) 5147 { 5148 frv_ifcvt.added_insns_list = NULL_RTX; 5149 frv_ifcvt.cur_scratch_regs = 0; 5150 frv_ifcvt.num_nested_cond_exec = 0; 5151 frv_ifcvt.cr_reg = NULL_RTX; 5152 frv_ifcvt.nested_cc_reg = NULL_RTX; 5153 frv_ifcvt.extra_int_cr = NULL_RTX; 5154 frv_ifcvt.extra_fp_cr = NULL_RTX; 5155 frv_ifcvt.last_nested_if_cr = NULL_RTX; 5156 } 5157 5158 5159 /* Internal function to add a potential insn to the list of insns to be inserted 5160 if the conditional execution conversion is successful. */ 5161 5162 static void 5163 frv_ifcvt_add_insn (rtx pattern, rtx insn, int before_p) 5164 { 5165 rtx link = alloc_EXPR_LIST (VOIDmode, pattern, insn); 5166 5167 link->jump = before_p; /* Mark to add this before or after insn. */ 5168 frv_ifcvt.added_insns_list = alloc_EXPR_LIST (VOIDmode, link, 5169 frv_ifcvt.added_insns_list); 5170 5171 if (TARGET_DEBUG_COND_EXEC) 5172 { 5173 fprintf (stderr, 5174 "\n:::::::::: frv_ifcvt_add_insn: add the following %s insn %d:\n", 5175 (before_p) ? "before" : "after", 5176 (int)INSN_UID (insn)); 5177 5178 debug_rtx (pattern); 5179 } 5180 } 5181 5182 5183 /* A C expression to modify the code described by the conditional if 5184 information CE_INFO, possibly updating the tests in TRUE_EXPR, and 5185 FALSE_EXPR for converting if-then and if-then-else code to conditional 5186 instructions. Set either TRUE_EXPR or FALSE_EXPR to a null pointer if the 5187 tests cannot be converted. */ 5188 5189 void 5190 frv_ifcvt_modify_tests (ce_if_block *ce_info, rtx *p_true, rtx *p_false) 5191 { 5192 basic_block test_bb = ce_info->test_bb; /* test basic block */ 5193 basic_block then_bb = ce_info->then_bb; /* THEN */ 5194 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */ 5195 basic_block join_bb = ce_info->join_bb; /* join block or NULL */ 5196 rtx true_expr = *p_true; 5197 rtx cr; 5198 rtx cc; 5199 rtx nested_cc; 5200 machine_mode mode = GET_MODE (true_expr); 5201 int j; 5202 basic_block *bb; 5203 int num_bb; 5204 frv_tmp_reg_t *tmp_reg = &frv_ifcvt.tmp_reg; 5205 rtx check_insn; 5206 rtx sub_cond_exec_reg; 5207 enum rtx_code code; 5208 enum rtx_code code_true; 5209 enum rtx_code code_false; 5210 enum reg_class cc_class; 5211 enum reg_class cr_class; 5212 int cc_first; 5213 int cc_last; 5214 reg_set_iterator rsi; 5215 5216 /* Make sure we are only dealing with hard registers. Also honor the 5217 -mno-cond-exec switch, and -mno-nested-cond-exec switches if 5218 applicable. */ 5219 if (!reload_completed || !TARGET_COND_EXEC 5220 || (!TARGET_NESTED_CE && ce_info->pass > 1)) 5221 goto fail; 5222 5223 /* Figure out which registers we can allocate for our own purposes. Only 5224 consider registers that are not preserved across function calls and are 5225 not fixed. However, allow the ICC/ICR temporary registers to be allocated 5226 if we did not need to use them in reloading other registers. */ 5227 memset (&tmp_reg->regs, 0, sizeof (tmp_reg->regs)); 5228 COPY_HARD_REG_SET (tmp_reg->regs, call_used_reg_set); 5229 AND_COMPL_HARD_REG_SET (tmp_reg->regs, fixed_reg_set); 5230 SET_HARD_REG_BIT (tmp_reg->regs, ICC_TEMP); 5231 SET_HARD_REG_BIT (tmp_reg->regs, ICR_TEMP); 5232 5233 /* If this is a nested IF, we need to discover whether the CC registers that 5234 are set/used inside of the block are used anywhere else. If not, we can 5235 change them to be the CC register that is paired with the CR register that 5236 controls the outermost IF block. */ 5237 if (ce_info->pass > 1) 5238 { 5239 CLEAR_HARD_REG_SET (frv_ifcvt.nested_cc_ok_rewrite); 5240 for (j = CC_FIRST; j <= CC_LAST; j++) 5241 if (TEST_HARD_REG_BIT (tmp_reg->regs, j)) 5242 { 5243 if (REGNO_REG_SET_P (df_get_live_in (then_bb), j)) 5244 continue; 5245 5246 if (else_bb 5247 && REGNO_REG_SET_P (df_get_live_in (else_bb), j)) 5248 continue; 5249 5250 if (join_bb 5251 && REGNO_REG_SET_P (df_get_live_in (join_bb), j)) 5252 continue; 5253 5254 SET_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j); 5255 } 5256 } 5257 5258 for (j = 0; j < frv_ifcvt.cur_scratch_regs; j++) 5259 frv_ifcvt.scratch_regs[j] = NULL_RTX; 5260 5261 frv_ifcvt.added_insns_list = NULL_RTX; 5262 frv_ifcvt.cur_scratch_regs = 0; 5263 5264 bb = (basic_block *) alloca ((2 + ce_info->num_multiple_test_blocks) 5265 * sizeof (basic_block)); 5266 5267 if (join_bb) 5268 { 5269 unsigned int regno; 5270 5271 /* Remove anything live at the beginning of the join block from being 5272 available for allocation. */ 5273 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (join_bb), 0, regno, rsi) 5274 { 5275 if (regno < FIRST_PSEUDO_REGISTER) 5276 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno); 5277 } 5278 } 5279 5280 /* Add in all of the blocks in multiple &&/|| blocks to be scanned. */ 5281 num_bb = 0; 5282 if (ce_info->num_multiple_test_blocks) 5283 { 5284 basic_block multiple_test_bb = ce_info->last_test_bb; 5285 5286 while (multiple_test_bb != test_bb) 5287 { 5288 bb[num_bb++] = multiple_test_bb; 5289 multiple_test_bb = EDGE_PRED (multiple_test_bb, 0)->src; 5290 } 5291 } 5292 5293 /* Add in the THEN and ELSE blocks to be scanned. */ 5294 bb[num_bb++] = then_bb; 5295 if (else_bb) 5296 bb[num_bb++] = else_bb; 5297 5298 sub_cond_exec_reg = NULL_RTX; 5299 frv_ifcvt.num_nested_cond_exec = 0; 5300 5301 /* Scan all of the blocks for registers that must not be allocated. */ 5302 for (j = 0; j < num_bb; j++) 5303 { 5304 rtx_insn *last_insn = BB_END (bb[j]); 5305 rtx_insn *insn = BB_HEAD (bb[j]); 5306 unsigned int regno; 5307 5308 if (dump_file) 5309 fprintf (dump_file, "Scanning %s block %d, start %d, end %d\n", 5310 (bb[j] == else_bb) ? "else" : ((bb[j] == then_bb) ? "then" : "test"), 5311 (int) bb[j]->index, 5312 (int) INSN_UID (BB_HEAD (bb[j])), 5313 (int) INSN_UID (BB_END (bb[j]))); 5314 5315 /* Anything live at the beginning of the block is obviously unavailable 5316 for allocation. */ 5317 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb[j]), 0, regno, rsi) 5318 { 5319 if (regno < FIRST_PSEUDO_REGISTER) 5320 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno); 5321 } 5322 5323 /* Loop through the insns in the block. */ 5324 for (;;) 5325 { 5326 /* Mark any new registers that are created as being unavailable for 5327 allocation. Also see if the CC register used in nested IFs can be 5328 reallocated. */ 5329 if (INSN_P (insn)) 5330 { 5331 rtx pattern; 5332 rtx set; 5333 int skip_nested_if = FALSE; 5334 HARD_REG_SET mentioned_regs; 5335 5336 CLEAR_HARD_REG_SET (mentioned_regs); 5337 find_all_hard_regs (PATTERN (insn), &mentioned_regs); 5338 AND_COMPL_HARD_REG_SET (tmp_reg->regs, mentioned_regs); 5339 5340 pattern = PATTERN (insn); 5341 if (GET_CODE (pattern) == COND_EXEC) 5342 { 5343 rtx reg = XEXP (COND_EXEC_TEST (pattern), 0); 5344 5345 if (reg != sub_cond_exec_reg) 5346 { 5347 sub_cond_exec_reg = reg; 5348 frv_ifcvt.num_nested_cond_exec++; 5349 } 5350 } 5351 5352 set = single_set_pattern (pattern); 5353 if (set) 5354 { 5355 rtx dest = SET_DEST (set); 5356 rtx src = SET_SRC (set); 5357 5358 if (GET_CODE (dest) == REG) 5359 { 5360 int regno = REGNO (dest); 5361 enum rtx_code src_code = GET_CODE (src); 5362 5363 if (CC_P (regno) && src_code == COMPARE) 5364 skip_nested_if = TRUE; 5365 5366 else if (CR_P (regno) 5367 && (src_code == IF_THEN_ELSE 5368 || COMPARISON_P (src))) 5369 skip_nested_if = TRUE; 5370 } 5371 } 5372 5373 if (! skip_nested_if) 5374 AND_COMPL_HARD_REG_SET (frv_ifcvt.nested_cc_ok_rewrite, 5375 mentioned_regs); 5376 } 5377 5378 if (insn == last_insn) 5379 break; 5380 5381 insn = NEXT_INSN (insn); 5382 } 5383 } 5384 5385 /* If this is a nested if, rewrite the CC registers that are available to 5386 include the ones that can be rewritten, to increase the chance of being 5387 able to allocate a paired CC/CR register combination. */ 5388 if (ce_info->pass > 1) 5389 { 5390 for (j = CC_FIRST; j <= CC_LAST; j++) 5391 if (TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j)) 5392 SET_HARD_REG_BIT (tmp_reg->regs, j); 5393 else 5394 CLEAR_HARD_REG_BIT (tmp_reg->regs, j); 5395 } 5396 5397 if (dump_file) 5398 { 5399 int num_gprs = 0; 5400 fprintf (dump_file, "Available GPRs: "); 5401 5402 for (j = GPR_FIRST; j <= GPR_LAST; j++) 5403 if (TEST_HARD_REG_BIT (tmp_reg->regs, j)) 5404 { 5405 fprintf (dump_file, " %d [%s]", j, reg_names[j]); 5406 if (++num_gprs > GPR_TEMP_NUM+2) 5407 break; 5408 } 5409 5410 fprintf (dump_file, "%s\nAvailable CRs: ", 5411 (num_gprs > GPR_TEMP_NUM+2) ? " ..." : ""); 5412 5413 for (j = CR_FIRST; j <= CR_LAST; j++) 5414 if (TEST_HARD_REG_BIT (tmp_reg->regs, j)) 5415 fprintf (dump_file, " %d [%s]", j, reg_names[j]); 5416 5417 fputs ("\n", dump_file); 5418 5419 if (ce_info->pass > 1) 5420 { 5421 fprintf (dump_file, "Modifiable CCs: "); 5422 for (j = CC_FIRST; j <= CC_LAST; j++) 5423 if (TEST_HARD_REG_BIT (tmp_reg->regs, j)) 5424 fprintf (dump_file, " %d [%s]", j, reg_names[j]); 5425 5426 fprintf (dump_file, "\n%d nested COND_EXEC statements\n", 5427 frv_ifcvt.num_nested_cond_exec); 5428 } 5429 } 5430 5431 /* Allocate the appropriate temporary condition code register. Try to 5432 allocate the ICR/FCR register that corresponds to the ICC/FCC register so 5433 that conditional cmp's can be done. */ 5434 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode) 5435 { 5436 cr_class = ICR_REGS; 5437 cc_class = ICC_REGS; 5438 cc_first = ICC_FIRST; 5439 cc_last = ICC_LAST; 5440 } 5441 else if (mode == CC_FPmode) 5442 { 5443 cr_class = FCR_REGS; 5444 cc_class = FCC_REGS; 5445 cc_first = FCC_FIRST; 5446 cc_last = FCC_LAST; 5447 } 5448 else 5449 { 5450 cc_first = cc_last = 0; 5451 cr_class = cc_class = NO_REGS; 5452 } 5453 5454 cc = XEXP (true_expr, 0); 5455 nested_cc = cr = NULL_RTX; 5456 if (cc_class != NO_REGS) 5457 { 5458 /* For nested IFs and &&/||, see if we can find a CC and CR register pair 5459 so we can execute a csubcc/caddcc/cfcmps instruction. */ 5460 int cc_regno; 5461 5462 for (cc_regno = cc_first; cc_regno <= cc_last; cc_regno++) 5463 { 5464 int cr_regno = cc_regno - CC_FIRST + CR_FIRST; 5465 5466 if (TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cc_regno) 5467 && TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cr_regno)) 5468 { 5469 frv_ifcvt.tmp_reg.next_reg[ (int)cr_class ] = cr_regno; 5470 cr = frv_alloc_temp_reg (tmp_reg, cr_class, CC_CCRmode, TRUE, 5471 TRUE); 5472 5473 frv_ifcvt.tmp_reg.next_reg[ (int)cc_class ] = cc_regno; 5474 nested_cc = frv_alloc_temp_reg (tmp_reg, cc_class, CCmode, 5475 TRUE, TRUE); 5476 break; 5477 } 5478 } 5479 } 5480 5481 if (! cr) 5482 { 5483 if (dump_file) 5484 fprintf (dump_file, "Could not allocate a CR temporary register\n"); 5485 5486 goto fail; 5487 } 5488 5489 if (dump_file) 5490 fprintf (dump_file, 5491 "Will use %s for conditional execution, %s for nested comparisons\n", 5492 reg_names[ REGNO (cr)], 5493 (nested_cc) ? reg_names[ REGNO (nested_cc) ] : "<none>"); 5494 5495 /* Set the CCR bit. Note for integer tests, we reverse the condition so that 5496 in an IF-THEN-ELSE sequence, we are testing the TRUE case against the CCR 5497 bit being true. We don't do this for floating point, because of NaNs. */ 5498 code = GET_CODE (true_expr); 5499 if (GET_MODE (cc) != CC_FPmode) 5500 { 5501 code = reverse_condition (code); 5502 code_true = EQ; 5503 code_false = NE; 5504 } 5505 else 5506 { 5507 code_true = NE; 5508 code_false = EQ; 5509 } 5510 5511 check_insn = gen_rtx_SET (cr, gen_rtx_fmt_ee (code, CC_CCRmode, 5512 cc, const0_rtx)); 5513 5514 /* Record the check insn to be inserted later. */ 5515 frv_ifcvt_add_insn (check_insn, BB_END (test_bb), TRUE); 5516 5517 /* Update the tests. */ 5518 frv_ifcvt.cr_reg = cr; 5519 frv_ifcvt.nested_cc_reg = nested_cc; 5520 *p_true = gen_rtx_fmt_ee (code_true, CC_CCRmode, cr, const0_rtx); 5521 *p_false = gen_rtx_fmt_ee (code_false, CC_CCRmode, cr, const0_rtx); 5522 return; 5523 5524 /* Fail, don't do this conditional execution. */ 5525 fail: 5526 *p_true = NULL_RTX; 5527 *p_false = NULL_RTX; 5528 if (dump_file) 5529 fprintf (dump_file, "Disabling this conditional execution.\n"); 5530 5531 return; 5532 } 5533 5534 5535 /* A C expression to modify the code described by the conditional if 5536 information CE_INFO, for the basic block BB, possibly updating the tests in 5537 TRUE_EXPR, and FALSE_EXPR for converting the && and || parts of if-then or 5538 if-then-else code to conditional instructions. Set either TRUE_EXPR or 5539 FALSE_EXPR to a null pointer if the tests cannot be converted. */ 5540 5541 /* p_true and p_false are given expressions of the form: 5542 5543 (and (eq:CC_CCR (reg:CC_CCR) 5544 (const_int 0)) 5545 (eq:CC (reg:CC) 5546 (const_int 0))) */ 5547 5548 void 5549 frv_ifcvt_modify_multiple_tests (ce_if_block *ce_info, 5550 basic_block bb, 5551 rtx *p_true, 5552 rtx *p_false) 5553 { 5554 rtx old_true = XEXP (*p_true, 0); 5555 rtx old_false = XEXP (*p_false, 0); 5556 rtx true_expr = XEXP (*p_true, 1); 5557 rtx false_expr = XEXP (*p_false, 1); 5558 rtx test_expr; 5559 rtx old_test; 5560 rtx cr = XEXP (old_true, 0); 5561 rtx check_insn; 5562 rtx new_cr = NULL_RTX; 5563 rtx *p_new_cr = (rtx *)0; 5564 rtx if_else; 5565 rtx compare; 5566 rtx cc; 5567 enum reg_class cr_class; 5568 machine_mode mode = GET_MODE (true_expr); 5569 rtx (*logical_func)(rtx, rtx, rtx); 5570 5571 if (TARGET_DEBUG_COND_EXEC) 5572 { 5573 fprintf (stderr, 5574 "\n:::::::::: frv_ifcvt_modify_multiple_tests, before modification for %s\ntrue insn:\n", 5575 ce_info->and_and_p ? "&&" : "||"); 5576 5577 debug_rtx (*p_true); 5578 5579 fputs ("\nfalse insn:\n", stderr); 5580 debug_rtx (*p_false); 5581 } 5582 5583 if (!TARGET_MULTI_CE) 5584 goto fail; 5585 5586 if (GET_CODE (cr) != REG) 5587 goto fail; 5588 5589 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode) 5590 { 5591 cr_class = ICR_REGS; 5592 p_new_cr = &frv_ifcvt.extra_int_cr; 5593 } 5594 else if (mode == CC_FPmode) 5595 { 5596 cr_class = FCR_REGS; 5597 p_new_cr = &frv_ifcvt.extra_fp_cr; 5598 } 5599 else 5600 goto fail; 5601 5602 /* Allocate a temp CR, reusing a previously allocated temp CR if we have 3 or 5603 more &&/|| tests. */ 5604 new_cr = *p_new_cr; 5605 if (! new_cr) 5606 { 5607 new_cr = *p_new_cr = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, cr_class, 5608 CC_CCRmode, TRUE, TRUE); 5609 if (! new_cr) 5610 goto fail; 5611 } 5612 5613 if (ce_info->and_and_p) 5614 { 5615 old_test = old_false; 5616 test_expr = true_expr; 5617 logical_func = (GET_CODE (old_true) == EQ) ? gen_andcr : gen_andncr; 5618 *p_true = gen_rtx_NE (CC_CCRmode, cr, const0_rtx); 5619 *p_false = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx); 5620 } 5621 else 5622 { 5623 old_test = old_false; 5624 test_expr = false_expr; 5625 logical_func = (GET_CODE (old_false) == EQ) ? gen_orcr : gen_orncr; 5626 *p_true = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx); 5627 *p_false = gen_rtx_NE (CC_CCRmode, cr, const0_rtx); 5628 } 5629 5630 /* First add the andcr/andncr/orcr/orncr, which will be added after the 5631 conditional check instruction, due to frv_ifcvt_add_insn being a LIFO 5632 stack. */ 5633 frv_ifcvt_add_insn ((*logical_func) (cr, cr, new_cr), BB_END (bb), TRUE); 5634 5635 /* Now add the conditional check insn. */ 5636 cc = XEXP (test_expr, 0); 5637 compare = gen_rtx_fmt_ee (GET_CODE (test_expr), CC_CCRmode, cc, const0_rtx); 5638 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, old_test, compare, const0_rtx); 5639 5640 check_insn = gen_rtx_SET (new_cr, if_else); 5641 5642 /* Add the new check insn to the list of check insns that need to be 5643 inserted. */ 5644 frv_ifcvt_add_insn (check_insn, BB_END (bb), TRUE); 5645 5646 if (TARGET_DEBUG_COND_EXEC) 5647 { 5648 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, after modification\ntrue insn:\n", 5649 stderr); 5650 5651 debug_rtx (*p_true); 5652 5653 fputs ("\nfalse insn:\n", stderr); 5654 debug_rtx (*p_false); 5655 } 5656 5657 return; 5658 5659 fail: 5660 *p_true = *p_false = NULL_RTX; 5661 5662 /* If we allocated a CR register, release it. */ 5663 if (new_cr) 5664 { 5665 CLEAR_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, REGNO (new_cr)); 5666 *p_new_cr = NULL_RTX; 5667 } 5668 5669 if (TARGET_DEBUG_COND_EXEC) 5670 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, failed.\n", stderr); 5671 5672 return; 5673 } 5674 5675 5676 /* Return a register which will be loaded with a value if an IF block is 5677 converted to conditional execution. This is used to rewrite instructions 5678 that use constants to ones that just use registers. */ 5679 5680 static rtx 5681 frv_ifcvt_load_value (rtx value, rtx insn ATTRIBUTE_UNUSED) 5682 { 5683 int num_alloc = frv_ifcvt.cur_scratch_regs; 5684 int i; 5685 rtx reg; 5686 5687 /* We know gr0 == 0, so replace any errant uses. */ 5688 if (value == const0_rtx) 5689 return gen_rtx_REG (SImode, GPR_FIRST); 5690 5691 /* First search all registers currently loaded to see if we have an 5692 applicable constant. */ 5693 if (CONSTANT_P (value) 5694 || (GET_CODE (value) == REG && REGNO (value) == LR_REGNO)) 5695 { 5696 for (i = 0; i < num_alloc; i++) 5697 { 5698 if (rtx_equal_p (SET_SRC (frv_ifcvt.scratch_regs[i]), value)) 5699 return SET_DEST (frv_ifcvt.scratch_regs[i]); 5700 } 5701 } 5702 5703 /* Have we exhausted the number of registers available? */ 5704 if (num_alloc >= GPR_TEMP_NUM) 5705 { 5706 if (dump_file) 5707 fprintf (dump_file, "Too many temporary registers allocated\n"); 5708 5709 return NULL_RTX; 5710 } 5711 5712 /* Allocate the new register. */ 5713 reg = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, GPR_REGS, SImode, TRUE, TRUE); 5714 if (! reg) 5715 { 5716 if (dump_file) 5717 fputs ("Could not find a scratch register\n", dump_file); 5718 5719 return NULL_RTX; 5720 } 5721 5722 frv_ifcvt.cur_scratch_regs++; 5723 frv_ifcvt.scratch_regs[num_alloc] = gen_rtx_SET (reg, value); 5724 5725 if (dump_file) 5726 { 5727 if (GET_CODE (value) == CONST_INT) 5728 fprintf (dump_file, "Register %s will hold %ld\n", 5729 reg_names[ REGNO (reg)], (long)INTVAL (value)); 5730 5731 else if (GET_CODE (value) == REG && REGNO (value) == LR_REGNO) 5732 fprintf (dump_file, "Register %s will hold LR\n", 5733 reg_names[ REGNO (reg)]); 5734 5735 else 5736 fprintf (dump_file, "Register %s will hold a saved value\n", 5737 reg_names[ REGNO (reg)]); 5738 } 5739 5740 return reg; 5741 } 5742 5743 5744 /* Update a MEM used in conditional code that might contain an offset to put 5745 the offset into a scratch register, so that the conditional load/store 5746 operations can be used. This function returns the original pointer if the 5747 MEM is valid to use in conditional code, NULL if we can't load up the offset 5748 into a temporary register, or the new MEM if we were successful. */ 5749 5750 static rtx 5751 frv_ifcvt_rewrite_mem (rtx mem, machine_mode mode, rtx insn) 5752 { 5753 rtx addr = XEXP (mem, 0); 5754 5755 if (!frv_legitimate_address_p_1 (mode, addr, reload_completed, TRUE, FALSE)) 5756 { 5757 if (GET_CODE (addr) == PLUS) 5758 { 5759 rtx addr_op0 = XEXP (addr, 0); 5760 rtx addr_op1 = XEXP (addr, 1); 5761 5762 if (GET_CODE (addr_op0) == REG && CONSTANT_P (addr_op1)) 5763 { 5764 rtx reg = frv_ifcvt_load_value (addr_op1, insn); 5765 if (!reg) 5766 return NULL_RTX; 5767 5768 addr = gen_rtx_PLUS (Pmode, addr_op0, reg); 5769 } 5770 5771 else 5772 return NULL_RTX; 5773 } 5774 5775 else if (CONSTANT_P (addr)) 5776 addr = frv_ifcvt_load_value (addr, insn); 5777 5778 else 5779 return NULL_RTX; 5780 5781 if (addr == NULL_RTX) 5782 return NULL_RTX; 5783 5784 else if (XEXP (mem, 0) != addr) 5785 return change_address (mem, mode, addr); 5786 } 5787 5788 return mem; 5789 } 5790 5791 5792 /* Given a PATTERN, return a SET expression if this PATTERN has only a single 5793 SET, possibly conditionally executed. It may also have CLOBBERs, USEs. */ 5794 5795 static rtx 5796 single_set_pattern (rtx pattern) 5797 { 5798 rtx set; 5799 int i; 5800 5801 if (GET_CODE (pattern) == COND_EXEC) 5802 pattern = COND_EXEC_CODE (pattern); 5803 5804 if (GET_CODE (pattern) == SET) 5805 return pattern; 5806 5807 else if (GET_CODE (pattern) == PARALLEL) 5808 { 5809 for (i = 0, set = 0; i < XVECLEN (pattern, 0); i++) 5810 { 5811 rtx sub = XVECEXP (pattern, 0, i); 5812 5813 switch (GET_CODE (sub)) 5814 { 5815 case USE: 5816 case CLOBBER: 5817 break; 5818 5819 case SET: 5820 if (set) 5821 return 0; 5822 else 5823 set = sub; 5824 break; 5825 5826 default: 5827 return 0; 5828 } 5829 } 5830 return set; 5831 } 5832 5833 return 0; 5834 } 5835 5836 5837 /* A C expression to modify the code described by the conditional if 5838 information CE_INFO with the new PATTERN in INSN. If PATTERN is a null 5839 pointer after the IFCVT_MODIFY_INSN macro executes, it is assumed that that 5840 insn cannot be converted to be executed conditionally. */ 5841 5842 rtx 5843 frv_ifcvt_modify_insn (ce_if_block *ce_info, 5844 rtx pattern, 5845 rtx insn) 5846 { 5847 rtx orig_ce_pattern = pattern; 5848 rtx set; 5849 rtx op0; 5850 rtx op1; 5851 rtx test; 5852 5853 gcc_assert (GET_CODE (pattern) == COND_EXEC); 5854 5855 test = COND_EXEC_TEST (pattern); 5856 if (GET_CODE (test) == AND) 5857 { 5858 rtx cr = frv_ifcvt.cr_reg; 5859 rtx test_reg; 5860 5861 op0 = XEXP (test, 0); 5862 if (! rtx_equal_p (cr, XEXP (op0, 0))) 5863 goto fail; 5864 5865 op1 = XEXP (test, 1); 5866 test_reg = XEXP (op1, 0); 5867 if (GET_CODE (test_reg) != REG) 5868 goto fail; 5869 5870 /* Is this the first nested if block in this sequence? If so, generate 5871 an andcr or andncr. */ 5872 if (! frv_ifcvt.last_nested_if_cr) 5873 { 5874 rtx and_op; 5875 5876 frv_ifcvt.last_nested_if_cr = test_reg; 5877 if (GET_CODE (op0) == NE) 5878 and_op = gen_andcr (test_reg, cr, test_reg); 5879 else 5880 and_op = gen_andncr (test_reg, cr, test_reg); 5881 5882 frv_ifcvt_add_insn (and_op, insn, TRUE); 5883 } 5884 5885 /* If this isn't the first statement in the nested if sequence, see if we 5886 are dealing with the same register. */ 5887 else if (! rtx_equal_p (test_reg, frv_ifcvt.last_nested_if_cr)) 5888 goto fail; 5889 5890 COND_EXEC_TEST (pattern) = test = op1; 5891 } 5892 5893 /* If this isn't a nested if, reset state variables. */ 5894 else 5895 { 5896 frv_ifcvt.last_nested_if_cr = NULL_RTX; 5897 } 5898 5899 set = single_set_pattern (pattern); 5900 if (set) 5901 { 5902 rtx dest = SET_DEST (set); 5903 rtx src = SET_SRC (set); 5904 machine_mode mode = GET_MODE (dest); 5905 5906 /* Check for normal binary operators. */ 5907 if (mode == SImode && ARITHMETIC_P (src)) 5908 { 5909 op0 = XEXP (src, 0); 5910 op1 = XEXP (src, 1); 5911 5912 if (integer_register_operand (op0, SImode) && CONSTANT_P (op1)) 5913 { 5914 op1 = frv_ifcvt_load_value (op1, insn); 5915 if (op1) 5916 COND_EXEC_CODE (pattern) 5917 = gen_rtx_SET (dest, gen_rtx_fmt_ee (GET_CODE (src), 5918 GET_MODE (src), 5919 op0, op1)); 5920 else 5921 goto fail; 5922 } 5923 } 5924 5925 /* For multiply by a constant, we need to handle the sign extending 5926 correctly. Add a USE of the value after the multiply to prevent flow 5927 from cratering because only one register out of the two were used. */ 5928 else if (mode == DImode && GET_CODE (src) == MULT) 5929 { 5930 op0 = XEXP (src, 0); 5931 op1 = XEXP (src, 1); 5932 if (GET_CODE (op0) == SIGN_EXTEND && GET_CODE (op1) == CONST_INT) 5933 { 5934 op1 = frv_ifcvt_load_value (op1, insn); 5935 if (op1) 5936 { 5937 op1 = gen_rtx_SIGN_EXTEND (DImode, op1); 5938 COND_EXEC_CODE (pattern) 5939 = gen_rtx_SET (dest, gen_rtx_MULT (DImode, op0, op1)); 5940 } 5941 else 5942 goto fail; 5943 } 5944 5945 frv_ifcvt_add_insn (gen_use (dest), insn, FALSE); 5946 } 5947 5948 /* If we are just loading a constant created for a nested conditional 5949 execution statement, just load the constant without any conditional 5950 execution, since we know that the constant will not interfere with any 5951 other registers. */ 5952 else if (frv_ifcvt.scratch_insns_bitmap 5953 && bitmap_bit_p (frv_ifcvt.scratch_insns_bitmap, 5954 INSN_UID (insn)) 5955 && REG_P (SET_DEST (set)) 5956 /* We must not unconditionally set a scratch reg chosen 5957 for a nested if-converted block if its incoming 5958 value from the TEST block (or the result of the THEN 5959 branch) could/should propagate to the JOIN block. 5960 It suffices to test whether the register is live at 5961 the JOIN point: if it's live there, we can infer 5962 that we set it in the former JOIN block of the 5963 nested if-converted block (otherwise it wouldn't 5964 have been available as a scratch register), and it 5965 is either propagated through or set in the other 5966 conditional block. It's probably not worth trying 5967 to catch the latter case, and it could actually 5968 limit scheduling of the combined block quite 5969 severely. */ 5970 && ce_info->join_bb 5971 && ! (REGNO_REG_SET_P (df_get_live_in (ce_info->join_bb), 5972 REGNO (SET_DEST (set)))) 5973 /* Similarly, we must not unconditionally set a reg 5974 used as scratch in the THEN branch if the same reg 5975 is live in the ELSE branch. */ 5976 && (! ce_info->else_bb 5977 || BLOCK_FOR_INSN (insn) == ce_info->else_bb 5978 || ! (REGNO_REG_SET_P (df_get_live_in (ce_info->else_bb), 5979 REGNO (SET_DEST (set)))))) 5980 pattern = set; 5981 5982 else if (mode == QImode || mode == HImode || mode == SImode 5983 || mode == SFmode) 5984 { 5985 int changed_p = FALSE; 5986 5987 /* Check for just loading up a constant */ 5988 if (CONSTANT_P (src) && integer_register_operand (dest, mode)) 5989 { 5990 src = frv_ifcvt_load_value (src, insn); 5991 if (!src) 5992 goto fail; 5993 5994 changed_p = TRUE; 5995 } 5996 5997 /* See if we need to fix up stores */ 5998 if (GET_CODE (dest) == MEM) 5999 { 6000 rtx new_mem = frv_ifcvt_rewrite_mem (dest, mode, insn); 6001 6002 if (!new_mem) 6003 goto fail; 6004 6005 else if (new_mem != dest) 6006 { 6007 changed_p = TRUE; 6008 dest = new_mem; 6009 } 6010 } 6011 6012 /* See if we need to fix up loads */ 6013 if (GET_CODE (src) == MEM) 6014 { 6015 rtx new_mem = frv_ifcvt_rewrite_mem (src, mode, insn); 6016 6017 if (!new_mem) 6018 goto fail; 6019 6020 else if (new_mem != src) 6021 { 6022 changed_p = TRUE; 6023 src = new_mem; 6024 } 6025 } 6026 6027 /* If either src or destination changed, redo SET. */ 6028 if (changed_p) 6029 COND_EXEC_CODE (pattern) = gen_rtx_SET (dest, src); 6030 } 6031 6032 /* Rewrite a nested set cccr in terms of IF_THEN_ELSE. Also deal with 6033 rewriting the CC register to be the same as the paired CC/CR register 6034 for nested ifs. */ 6035 else if (mode == CC_CCRmode && COMPARISON_P (src)) 6036 { 6037 int regno = REGNO (XEXP (src, 0)); 6038 rtx if_else; 6039 6040 if (ce_info->pass > 1 6041 && regno != (int)REGNO (frv_ifcvt.nested_cc_reg) 6042 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, regno)) 6043 { 6044 src = gen_rtx_fmt_ee (GET_CODE (src), 6045 CC_CCRmode, 6046 frv_ifcvt.nested_cc_reg, 6047 XEXP (src, 1)); 6048 } 6049 6050 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, test, src, const0_rtx); 6051 pattern = gen_rtx_SET (dest, if_else); 6052 } 6053 6054 /* Remap a nested compare instruction to use the paired CC/CR reg. */ 6055 else if (ce_info->pass > 1 6056 && GET_CODE (dest) == REG 6057 && CC_P (REGNO (dest)) 6058 && REGNO (dest) != REGNO (frv_ifcvt.nested_cc_reg) 6059 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, 6060 REGNO (dest)) 6061 && GET_CODE (src) == COMPARE) 6062 { 6063 PUT_MODE (frv_ifcvt.nested_cc_reg, GET_MODE (dest)); 6064 COND_EXEC_CODE (pattern) 6065 = gen_rtx_SET (frv_ifcvt.nested_cc_reg, copy_rtx (src)); 6066 } 6067 } 6068 6069 if (TARGET_DEBUG_COND_EXEC) 6070 { 6071 rtx orig_pattern = PATTERN (insn); 6072 6073 PATTERN (insn) = pattern; 6074 fprintf (stderr, 6075 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn after modification:\n", 6076 ce_info->pass); 6077 6078 debug_rtx (insn); 6079 PATTERN (insn) = orig_pattern; 6080 } 6081 6082 return pattern; 6083 6084 fail: 6085 if (TARGET_DEBUG_COND_EXEC) 6086 { 6087 rtx orig_pattern = PATTERN (insn); 6088 6089 PATTERN (insn) = orig_ce_pattern; 6090 fprintf (stderr, 6091 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn could not be modified:\n", 6092 ce_info->pass); 6093 6094 debug_rtx (insn); 6095 PATTERN (insn) = orig_pattern; 6096 } 6097 6098 return NULL_RTX; 6099 } 6100 6101 6102 /* A C expression to perform any final machine dependent modifications in 6103 converting code to conditional execution in the code described by the 6104 conditional if information CE_INFO. */ 6105 6106 void 6107 frv_ifcvt_modify_final (ce_if_block *ce_info ATTRIBUTE_UNUSED) 6108 { 6109 rtx existing_insn; 6110 rtx check_insn; 6111 rtx p = frv_ifcvt.added_insns_list; 6112 int i; 6113 6114 /* Loop inserting the check insns. The last check insn is the first test, 6115 and is the appropriate place to insert constants. */ 6116 gcc_assert (p); 6117 6118 do 6119 { 6120 rtx check_and_insert_insns = XEXP (p, 0); 6121 rtx old_p = p; 6122 6123 check_insn = XEXP (check_and_insert_insns, 0); 6124 existing_insn = XEXP (check_and_insert_insns, 1); 6125 p = XEXP (p, 1); 6126 6127 /* The jump bit is used to say that the new insn is to be inserted BEFORE 6128 the existing insn, otherwise it is to be inserted AFTER. */ 6129 if (check_and_insert_insns->jump) 6130 { 6131 emit_insn_before (check_insn, existing_insn); 6132 check_and_insert_insns->jump = 0; 6133 } 6134 else 6135 emit_insn_after (check_insn, existing_insn); 6136 6137 free_EXPR_LIST_node (check_and_insert_insns); 6138 free_EXPR_LIST_node (old_p); 6139 } 6140 while (p != NULL_RTX); 6141 6142 /* Load up any constants needed into temp gprs */ 6143 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++) 6144 { 6145 rtx insn = emit_insn_before (frv_ifcvt.scratch_regs[i], existing_insn); 6146 if (! frv_ifcvt.scratch_insns_bitmap) 6147 frv_ifcvt.scratch_insns_bitmap = BITMAP_ALLOC (NULL); 6148 bitmap_set_bit (frv_ifcvt.scratch_insns_bitmap, INSN_UID (insn)); 6149 frv_ifcvt.scratch_regs[i] = NULL_RTX; 6150 } 6151 6152 frv_ifcvt.added_insns_list = NULL_RTX; 6153 frv_ifcvt.cur_scratch_regs = 0; 6154 } 6155 6156 6157 /* A C expression to cancel any machine dependent modifications in converting 6158 code to conditional execution in the code described by the conditional if 6159 information CE_INFO. */ 6160 6161 void 6162 frv_ifcvt_modify_cancel (ce_if_block *ce_info ATTRIBUTE_UNUSED) 6163 { 6164 int i; 6165 rtx p = frv_ifcvt.added_insns_list; 6166 6167 /* Loop freeing up the EXPR_LIST's allocated. */ 6168 while (p != NULL_RTX) 6169 { 6170 rtx check_and_jump = XEXP (p, 0); 6171 rtx old_p = p; 6172 6173 p = XEXP (p, 1); 6174 free_EXPR_LIST_node (check_and_jump); 6175 free_EXPR_LIST_node (old_p); 6176 } 6177 6178 /* Release any temporary gprs allocated. */ 6179 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++) 6180 frv_ifcvt.scratch_regs[i] = NULL_RTX; 6181 6182 frv_ifcvt.added_insns_list = NULL_RTX; 6183 frv_ifcvt.cur_scratch_regs = 0; 6184 return; 6185 } 6186 6187 /* A C expression for the size in bytes of the trampoline, as an integer. 6188 The template is: 6189 6190 setlo #0, <jmp_reg> 6191 setlo #0, <static_chain> 6192 sethi #0, <jmp_reg> 6193 sethi #0, <static_chain> 6194 jmpl @(gr0,<jmp_reg>) */ 6195 6196 int 6197 frv_trampoline_size (void) 6198 { 6199 if (TARGET_FDPIC) 6200 /* Allocate room for the function descriptor and the lddi 6201 instruction. */ 6202 return 8 + 6 * 4; 6203 return 5 /* instructions */ * 4 /* instruction size. */; 6204 } 6205 6206 6207 /* A C statement to initialize the variable parts of a trampoline. ADDR is an 6208 RTX for the address of the trampoline; FNADDR is an RTX for the address of 6209 the nested function; STATIC_CHAIN is an RTX for the static chain value that 6210 should be passed to the function when it is called. 6211 6212 The template is: 6213 6214 setlo #0, <jmp_reg> 6215 setlo #0, <static_chain> 6216 sethi #0, <jmp_reg> 6217 sethi #0, <static_chain> 6218 jmpl @(gr0,<jmp_reg>) */ 6219 6220 static void 6221 frv_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain) 6222 { 6223 rtx addr = XEXP (m_tramp, 0); 6224 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0); 6225 rtx sc_reg = force_reg (Pmode, static_chain); 6226 6227 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"), 6228 LCT_NORMAL, VOIDmode, 6229 addr, Pmode, 6230 GEN_INT (frv_trampoline_size ()), SImode, 6231 fnaddr, Pmode, 6232 sc_reg, Pmode); 6233 } 6234 6235 6236 /* Many machines have some registers that cannot be copied directly to or from 6237 memory or even from other types of registers. An example is the `MQ' 6238 register, which on most machines, can only be copied to or from general 6239 registers, but not memory. Some machines allow copying all registers to and 6240 from memory, but require a scratch register for stores to some memory 6241 locations (e.g., those with symbolic address on the RT, and those with 6242 certain symbolic address on the SPARC when compiling PIC). In some cases, 6243 both an intermediate and a scratch register are required. 6244 6245 You should define these macros to indicate to the reload phase that it may 6246 need to allocate at least one register for a reload in addition to the 6247 register to contain the data. Specifically, if copying X to a register 6248 RCLASS in MODE requires an intermediate register, you should define 6249 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of 6250 whose registers can be used as intermediate registers or scratch registers. 6251 6252 If copying a register RCLASS in MODE to X requires an intermediate or scratch 6253 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the 6254 largest register class required. If the requirements for input and output 6255 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used 6256 instead of defining both macros identically. 6257 6258 The values returned by these macros are often `GENERAL_REGS'. Return 6259 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied 6260 to or from a register of RCLASS in MODE without requiring a scratch register. 6261 Do not define this macro if it would always return `NO_REGS'. 6262 6263 If a scratch register is required (either with or without an intermediate 6264 register), you should define patterns for `reload_inM' or `reload_outM', as 6265 required.. These patterns, which will normally be implemented with a 6266 `define_expand', should be similar to the `movM' patterns, except that 6267 operand 2 is the scratch register. 6268 6269 Define constraints for the reload register and scratch register that contain 6270 a single register class. If the original reload register (whose class is 6271 RCLASS) can meet the constraint given in the pattern, the value returned by 6272 these macros is used for the class of the scratch register. Otherwise, two 6273 additional reload registers are required. Their classes are obtained from 6274 the constraints in the insn pattern. 6275 6276 X might be a pseudo-register or a `subreg' of a pseudo-register, which could 6277 either be in a hard register or in memory. Use `true_regnum' to find out; 6278 it will return -1 if the pseudo is in memory and the hard register number if 6279 it is in a register. 6280 6281 These macros should not be used in the case where a particular class of 6282 registers can only be copied to memory and not to another class of 6283 registers. In that case, secondary reload registers are not needed and 6284 would not be helpful. Instead, a stack location must be used to perform the 6285 copy and the `movM' pattern should use memory as an intermediate storage. 6286 This case often occurs between floating-point and general registers. */ 6287 6288 enum reg_class 6289 frv_secondary_reload_class (enum reg_class rclass, 6290 machine_mode mode ATTRIBUTE_UNUSED, 6291 rtx x) 6292 { 6293 enum reg_class ret; 6294 6295 switch (rclass) 6296 { 6297 default: 6298 ret = NO_REGS; 6299 break; 6300 6301 /* Accumulators/Accumulator guard registers need to go through floating 6302 point registers. */ 6303 case QUAD_REGS: 6304 case GPR_REGS: 6305 ret = NO_REGS; 6306 if (x && GET_CODE (x) == REG) 6307 { 6308 int regno = REGNO (x); 6309 6310 if (ACC_P (regno) || ACCG_P (regno)) 6311 ret = FPR_REGS; 6312 } 6313 break; 6314 6315 /* Nonzero constants should be loaded into an FPR through a GPR. */ 6316 case QUAD_FPR_REGS: 6317 if (x && CONSTANT_P (x) && !ZERO_P (x)) 6318 ret = GPR_REGS; 6319 else 6320 ret = NO_REGS; 6321 break; 6322 6323 /* All of these types need gpr registers. */ 6324 case ICC_REGS: 6325 case FCC_REGS: 6326 case CC_REGS: 6327 case ICR_REGS: 6328 case FCR_REGS: 6329 case CR_REGS: 6330 case LCR_REG: 6331 case LR_REG: 6332 ret = GPR_REGS; 6333 break; 6334 6335 /* The accumulators need fpr registers. */ 6336 case QUAD_ACC_REGS: 6337 case ACCG_REGS: 6338 ret = FPR_REGS; 6339 break; 6340 } 6341 6342 return ret; 6343 } 6344 6345 /* This hook exists to catch the case where secondary_reload_class() is 6346 called from init_reg_autoinc() in regclass.c - before the reload optabs 6347 have been initialised. */ 6348 6349 static reg_class_t 6350 frv_secondary_reload (bool in_p, rtx x, reg_class_t reload_class_i, 6351 machine_mode reload_mode, 6352 secondary_reload_info * sri) 6353 { 6354 enum reg_class rclass = NO_REGS; 6355 enum reg_class reload_class = (enum reg_class) reload_class_i; 6356 6357 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing) 6358 { 6359 sri->icode = sri->prev_sri->t_icode; 6360 return NO_REGS; 6361 } 6362 6363 rclass = frv_secondary_reload_class (reload_class, reload_mode, x); 6364 6365 if (rclass != NO_REGS) 6366 { 6367 enum insn_code icode 6368 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab, 6369 reload_mode); 6370 if (icode == 0) 6371 { 6372 /* This happens when then the reload_[in|out]_optabs have 6373 not been initialised. */ 6374 sri->t_icode = CODE_FOR_nothing; 6375 return rclass; 6376 } 6377 } 6378 6379 /* Fall back to the default secondary reload handler. */ 6380 return default_secondary_reload (in_p, x, reload_class, reload_mode, sri); 6381 6382 } 6383 6384 /* Worker function for TARGET_CLASS_LIKELY_SPILLED_P. */ 6385 6386 static bool 6387 frv_class_likely_spilled_p (reg_class_t rclass) 6388 { 6389 switch (rclass) 6390 { 6391 default: 6392 break; 6393 6394 case GR8_REGS: 6395 case GR9_REGS: 6396 case GR89_REGS: 6397 case FDPIC_FPTR_REGS: 6398 case FDPIC_REGS: 6399 case ICC_REGS: 6400 case FCC_REGS: 6401 case CC_REGS: 6402 case ICR_REGS: 6403 case FCR_REGS: 6404 case CR_REGS: 6405 case LCR_REG: 6406 case LR_REG: 6407 case SPR_REGS: 6408 case QUAD_ACC_REGS: 6409 case ACCG_REGS: 6410 return true; 6411 } 6412 6413 return false; 6414 } 6415 6416 6417 /* An expression for the alignment of a structure field FIELD if the 6418 alignment computed in the usual way is COMPUTED. GCC uses this 6419 value instead of the value in `BIGGEST_ALIGNMENT' or 6420 `BIGGEST_FIELD_ALIGNMENT', if defined, for structure fields only. */ 6421 6422 /* The definition type of the bit field data is either char, short, long or 6423 long long. The maximum bit size is the number of bits of its own type. 6424 6425 The bit field data is assigned to a storage unit that has an adequate size 6426 for bit field data retention and is located at the smallest address. 6427 6428 Consecutive bit field data are packed at consecutive bits having the same 6429 storage unit, with regard to the type, beginning with the MSB and continuing 6430 toward the LSB. 6431 6432 If a field to be assigned lies over a bit field type boundary, its 6433 assignment is completed by aligning it with a boundary suitable for the 6434 type. 6435 6436 When a bit field having a bit length of 0 is declared, it is forcibly 6437 assigned to the next storage unit. 6438 6439 e.g) 6440 struct { 6441 int a:2; 6442 int b:6; 6443 char c:4; 6444 int d:10; 6445 int :0; 6446 int f:2; 6447 } x; 6448 6449 +0 +1 +2 +3 6450 &x 00000000 00000000 00000000 00000000 6451 MLM----L 6452 a b 6453 &x+4 00000000 00000000 00000000 00000000 6454 M--L 6455 c 6456 &x+8 00000000 00000000 00000000 00000000 6457 M----------L 6458 d 6459 &x+12 00000000 00000000 00000000 00000000 6460 ML 6461 f 6462 */ 6463 6464 int 6465 frv_adjust_field_align (tree field, int computed) 6466 { 6467 /* Make sure that the bitfield is not wider than the type. */ 6468 if (field 6469 && DECL_BIT_FIELD (field) 6470 && !DECL_ARTIFICIAL (field)) 6471 { 6472 tree parent = DECL_CONTEXT (field); 6473 tree prev = NULL_TREE; 6474 tree cur; 6475 6476 for (cur = TYPE_FIELDS (parent); cur && cur != field; cur = DECL_CHAIN (cur)) 6477 { 6478 if (TREE_CODE (cur) != FIELD_DECL) 6479 continue; 6480 6481 prev = cur; 6482 } 6483 6484 gcc_assert (cur); 6485 6486 /* If this isn't a :0 field and if the previous element is a bitfield 6487 also, see if the type is different, if so, we will need to align the 6488 bit-field to the next boundary. */ 6489 if (prev 6490 && ! DECL_PACKED (field) 6491 && ! integer_zerop (DECL_SIZE (field)) 6492 && DECL_BIT_FIELD_TYPE (field) != DECL_BIT_FIELD_TYPE (prev)) 6493 { 6494 int prev_align = TYPE_ALIGN (TREE_TYPE (prev)); 6495 int cur_align = TYPE_ALIGN (TREE_TYPE (field)); 6496 computed = (prev_align > cur_align) ? prev_align : cur_align; 6497 } 6498 } 6499 6500 return computed; 6501 } 6502 6503 6504 /* Implement TARGET_HARD_REGNO_MODE_OK. */ 6505 6506 static bool 6507 frv_hard_regno_mode_ok (unsigned int regno, machine_mode mode) 6508 { 6509 int base; 6510 int mask; 6511 6512 switch (mode) 6513 { 6514 case E_CCmode: 6515 case E_CC_UNSmode: 6516 case E_CC_NZmode: 6517 return ICC_P (regno) || GPR_P (regno); 6518 6519 case E_CC_CCRmode: 6520 return CR_P (regno) || GPR_P (regno); 6521 6522 case E_CC_FPmode: 6523 return FCC_P (regno) || GPR_P (regno); 6524 6525 default: 6526 break; 6527 } 6528 6529 /* Set BASE to the first register in REGNO's class. Set MASK to the 6530 bits that must be clear in (REGNO - BASE) for the register to be 6531 well-aligned. */ 6532 if (INTEGRAL_MODE_P (mode) || FLOAT_MODE_P (mode) || VECTOR_MODE_P (mode)) 6533 { 6534 if (ACCG_P (regno)) 6535 { 6536 /* ACCGs store one byte. Two-byte quantities must start in 6537 even-numbered registers, four-byte ones in registers whose 6538 numbers are divisible by four, and so on. */ 6539 base = ACCG_FIRST; 6540 mask = GET_MODE_SIZE (mode) - 1; 6541 } 6542 else 6543 { 6544 /* The other registers store one word. */ 6545 if (GPR_P (regno) || regno == AP_FIRST) 6546 base = GPR_FIRST; 6547 6548 else if (FPR_P (regno)) 6549 base = FPR_FIRST; 6550 6551 else if (ACC_P (regno)) 6552 base = ACC_FIRST; 6553 6554 else if (SPR_P (regno)) 6555 return mode == SImode; 6556 6557 /* Fill in the table. */ 6558 else 6559 return false; 6560 6561 /* Anything smaller than an SI is OK in any word-sized register. */ 6562 if (GET_MODE_SIZE (mode) < 4) 6563 return true; 6564 6565 mask = (GET_MODE_SIZE (mode) / 4) - 1; 6566 } 6567 return (((regno - base) & mask) == 0); 6568 } 6569 6570 return false; 6571 } 6572 6573 /* Implement TARGET_MODES_TIEABLE_P. */ 6574 6575 static bool 6576 frv_modes_tieable_p (machine_mode mode1, machine_mode mode2) 6577 { 6578 return mode1 == mode2; 6579 } 6580 6581 6582 /* Implement TARGET_HARD_REGNO_NREGS. 6583 6584 On the FRV, make the CC_FP mode take 3 words in the integer registers, so 6585 that we can build the appropriate instructions to properly reload the 6586 values. Also, make the byte-sized accumulator guards use one guard 6587 for each byte. */ 6588 6589 static unsigned int 6590 frv_hard_regno_nregs (unsigned int regno, machine_mode mode) 6591 { 6592 if (ACCG_P (regno)) 6593 return GET_MODE_SIZE (mode); 6594 else 6595 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD; 6596 } 6597 6598 6599 /* Implement CLASS_MAX_NREGS. */ 6600 6601 int 6602 frv_class_max_nregs (enum reg_class rclass, machine_mode mode) 6603 { 6604 if (rclass == ACCG_REGS) 6605 /* An N-byte value requires N accumulator guards. */ 6606 return GET_MODE_SIZE (mode); 6607 else 6608 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD; 6609 } 6610 6611 6612 /* A C expression that is nonzero if X is a legitimate constant for an 6613 immediate operand on the target machine. You can assume that X satisfies 6614 `CONSTANT_P', so you need not check this. In fact, `1' is a suitable 6615 definition for this macro on machines where anything `CONSTANT_P' is valid. */ 6616 6617 static bool 6618 frv_legitimate_constant_p (machine_mode mode, rtx x) 6619 { 6620 /* frv_cannot_force_const_mem always returns true for FDPIC. This 6621 means that the move expanders will be expected to deal with most 6622 kinds of constant, regardless of what we return here. 6623 6624 However, among its other duties, frv_legitimate_constant_p decides whether 6625 a constant can be entered into reg_equiv_constant[]. If we return true, 6626 reload can create new instances of the constant whenever it likes. 6627 6628 The idea is therefore to accept as many constants as possible (to give 6629 reload more freedom) while rejecting constants that can only be created 6630 at certain times. In particular, anything with a symbolic component will 6631 require use of the pseudo FDPIC register, which is only available before 6632 reload. */ 6633 if (TARGET_FDPIC) 6634 return LEGITIMATE_PIC_OPERAND_P (x); 6635 6636 /* All of the integer constants are ok. */ 6637 if (GET_CODE (x) != CONST_DOUBLE) 6638 return TRUE; 6639 6640 /* double integer constants are ok. */ 6641 if (GET_MODE (x) == VOIDmode || mode == DImode) 6642 return TRUE; 6643 6644 /* 0 is always ok. */ 6645 if (x == CONST0_RTX (mode)) 6646 return TRUE; 6647 6648 /* If floating point is just emulated, allow any constant, since it will be 6649 constructed in the GPRs. */ 6650 if (!TARGET_HAS_FPRS) 6651 return TRUE; 6652 6653 if (mode == DFmode && !TARGET_DOUBLE) 6654 return TRUE; 6655 6656 /* Otherwise store the constant away and do a load. */ 6657 return FALSE; 6658 } 6659 6660 /* Implement SELECT_CC_MODE. Choose CC_FP for floating-point comparisons, 6661 CC_NZ for comparisons against zero in which a single Z or N flag test 6662 is enough, CC_UNS for other unsigned comparisons, and CC for other 6663 signed comparisons. */ 6664 6665 machine_mode 6666 frv_select_cc_mode (enum rtx_code code, rtx x, rtx y) 6667 { 6668 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) 6669 return CC_FPmode; 6670 6671 switch (code) 6672 { 6673 case EQ: 6674 case NE: 6675 case LT: 6676 case GE: 6677 return y == const0_rtx ? CC_NZmode : CCmode; 6678 6679 case GTU: 6680 case GEU: 6681 case LTU: 6682 case LEU: 6683 return y == const0_rtx ? CC_NZmode : CC_UNSmode; 6684 6685 default: 6686 return CCmode; 6687 } 6688 } 6689 6690 6691 /* Worker function for TARGET_REGISTER_MOVE_COST. */ 6692 6693 #define HIGH_COST 40 6694 #define MEDIUM_COST 3 6695 #define LOW_COST 1 6696 6697 static int 6698 frv_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED, 6699 reg_class_t from, reg_class_t to) 6700 { 6701 switch (from) 6702 { 6703 default: 6704 break; 6705 6706 case QUAD_REGS: 6707 case GPR_REGS: 6708 case GR8_REGS: 6709 case GR9_REGS: 6710 case GR89_REGS: 6711 case FDPIC_REGS: 6712 case FDPIC_FPTR_REGS: 6713 case FDPIC_CALL_REGS: 6714 6715 switch (to) 6716 { 6717 default: 6718 break; 6719 6720 case QUAD_REGS: 6721 case GPR_REGS: 6722 case GR8_REGS: 6723 case GR9_REGS: 6724 case GR89_REGS: 6725 case FDPIC_REGS: 6726 case FDPIC_FPTR_REGS: 6727 case FDPIC_CALL_REGS: 6728 6729 return LOW_COST; 6730 6731 case FPR_REGS: 6732 return LOW_COST; 6733 6734 case LCR_REG: 6735 case LR_REG: 6736 case SPR_REGS: 6737 return LOW_COST; 6738 } 6739 6740 case QUAD_FPR_REGS: 6741 switch (to) 6742 { 6743 default: 6744 break; 6745 6746 case QUAD_REGS: 6747 case GPR_REGS: 6748 case GR8_REGS: 6749 case GR9_REGS: 6750 case GR89_REGS: 6751 case FDPIC_REGS: 6752 case FDPIC_FPTR_REGS: 6753 case FDPIC_CALL_REGS: 6754 6755 case QUAD_ACC_REGS: 6756 case ACCG_REGS: 6757 return MEDIUM_COST; 6758 6759 case QUAD_FPR_REGS: 6760 return LOW_COST; 6761 } 6762 6763 case LCR_REG: 6764 case LR_REG: 6765 case SPR_REGS: 6766 switch (to) 6767 { 6768 default: 6769 break; 6770 6771 case QUAD_REGS: 6772 case GPR_REGS: 6773 case GR8_REGS: 6774 case GR9_REGS: 6775 case GR89_REGS: 6776 case FDPIC_REGS: 6777 case FDPIC_FPTR_REGS: 6778 case FDPIC_CALL_REGS: 6779 6780 return MEDIUM_COST; 6781 } 6782 6783 case QUAD_ACC_REGS: 6784 case ACCG_REGS: 6785 switch (to) 6786 { 6787 default: 6788 break; 6789 6790 case QUAD_FPR_REGS: 6791 return MEDIUM_COST; 6792 6793 } 6794 } 6795 6796 return HIGH_COST; 6797 } 6798 6799 /* Worker function for TARGET_MEMORY_MOVE_COST. */ 6800 6801 static int 6802 frv_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED, 6803 reg_class_t rclass ATTRIBUTE_UNUSED, 6804 bool in ATTRIBUTE_UNUSED) 6805 { 6806 return 4; 6807 } 6808 6809 6810 /* Implementation of TARGET_ASM_INTEGER. In the FRV case we need to 6811 use ".picptr" to generate safe relocations for PIC code. We also 6812 need a fixup entry for aligned (non-debugging) code. */ 6813 6814 static bool 6815 frv_assemble_integer (rtx value, unsigned int size, int aligned_p) 6816 { 6817 if ((flag_pic || TARGET_FDPIC) && size == UNITS_PER_WORD) 6818 { 6819 if (GET_CODE (value) == CONST 6820 || GET_CODE (value) == SYMBOL_REF 6821 || GET_CODE (value) == LABEL_REF) 6822 { 6823 if (TARGET_FDPIC && GET_CODE (value) == SYMBOL_REF 6824 && SYMBOL_REF_FUNCTION_P (value)) 6825 { 6826 fputs ("\t.picptr\tfuncdesc(", asm_out_file); 6827 output_addr_const (asm_out_file, value); 6828 fputs (")\n", asm_out_file); 6829 return true; 6830 } 6831 else if (TARGET_FDPIC && GET_CODE (value) == CONST 6832 && frv_function_symbol_referenced_p (value)) 6833 return false; 6834 if (aligned_p && !TARGET_FDPIC) 6835 { 6836 static int label_num = 0; 6837 char buf[256]; 6838 const char *p; 6839 6840 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", label_num++); 6841 p = (* targetm.strip_name_encoding) (buf); 6842 6843 fprintf (asm_out_file, "%s:\n", p); 6844 fprintf (asm_out_file, "%s\n", FIXUP_SECTION_ASM_OP); 6845 fprintf (asm_out_file, "\t.picptr\t%s\n", p); 6846 fprintf (asm_out_file, "\t.previous\n"); 6847 } 6848 assemble_integer_with_op ("\t.picptr\t", value); 6849 return true; 6850 } 6851 if (!aligned_p) 6852 { 6853 /* We've set the unaligned SI op to NULL, so we always have to 6854 handle the unaligned case here. */ 6855 assemble_integer_with_op ("\t.4byte\t", value); 6856 return true; 6857 } 6858 } 6859 return default_assemble_integer (value, size, aligned_p); 6860 } 6861 6862 /* Function to set up the backend function structure. */ 6863 6864 static struct machine_function * 6865 frv_init_machine_status (void) 6866 { 6867 return ggc_cleared_alloc<machine_function> (); 6868 } 6869 6870 /* Implement TARGET_SCHED_ISSUE_RATE. */ 6871 6872 int 6873 frv_issue_rate (void) 6874 { 6875 if (!TARGET_PACK) 6876 return 1; 6877 6878 switch (frv_cpu_type) 6879 { 6880 default: 6881 case FRV_CPU_FR300: 6882 case FRV_CPU_SIMPLE: 6883 return 1; 6884 6885 case FRV_CPU_FR400: 6886 case FRV_CPU_FR405: 6887 case FRV_CPU_FR450: 6888 return 2; 6889 6890 case FRV_CPU_GENERIC: 6891 case FRV_CPU_FR500: 6892 case FRV_CPU_TOMCAT: 6893 return 4; 6894 6895 case FRV_CPU_FR550: 6896 return 8; 6897 } 6898 } 6899 6900 /* Return the value of INSN's acc_group attribute. */ 6901 6902 int 6903 frv_acc_group (rtx insn) 6904 { 6905 /* This distinction only applies to the FR550 packing constraints. */ 6906 if (frv_cpu_type == FRV_CPU_FR550) 6907 { 6908 subrtx_iterator::array_type array; 6909 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST) 6910 if (REG_P (*iter)) 6911 { 6912 unsigned int regno = REGNO (*iter); 6913 /* If REGNO refers to an accumulator, return ACC_GROUP_ODD if 6914 the bit 2 of the register number is set and ACC_GROUP_EVEN if 6915 it is clear. */ 6916 if (ACC_P (regno)) 6917 return (regno - ACC_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN; 6918 if (ACCG_P (regno)) 6919 return (regno - ACCG_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN; 6920 } 6921 } 6922 return ACC_GROUP_NONE; 6923 } 6924 6925 /* Return the index of the DFA unit in FRV_UNIT_NAMES[] that instruction 6926 INSN will try to claim first. Since this value depends only on the 6927 type attribute, we can cache the results in FRV_TYPE_TO_UNIT[]. */ 6928 6929 static unsigned int 6930 frv_insn_unit (rtx_insn *insn) 6931 { 6932 enum attr_type type; 6933 6934 type = get_attr_type (insn); 6935 if (frv_type_to_unit[type] == ARRAY_SIZE (frv_unit_codes)) 6936 { 6937 /* We haven't seen this type of instruction before. */ 6938 state_t state; 6939 unsigned int unit; 6940 6941 /* Issue the instruction on its own to see which unit it prefers. */ 6942 state = alloca (state_size ()); 6943 state_reset (state); 6944 state_transition (state, insn); 6945 6946 /* Find out which unit was taken. */ 6947 for (unit = 0; unit < ARRAY_SIZE (frv_unit_codes); unit++) 6948 if (cpu_unit_reservation_p (state, frv_unit_codes[unit])) 6949 break; 6950 6951 gcc_assert (unit != ARRAY_SIZE (frv_unit_codes)); 6952 6953 frv_type_to_unit[type] = unit; 6954 } 6955 return frv_type_to_unit[type]; 6956 } 6957 6958 /* Return true if INSN issues to a branch unit. */ 6959 6960 static bool 6961 frv_issues_to_branch_unit_p (rtx_insn *insn) 6962 { 6963 return frv_unit_groups[frv_insn_unit (insn)] == GROUP_B; 6964 } 6965 6966 /* The instructions in the packet, partitioned into groups. */ 6967 struct frv_packet_group { 6968 /* How many instructions in the packet belong to this group. */ 6969 unsigned int num_insns; 6970 6971 /* A list of the instructions that belong to this group, in the order 6972 they appear in the rtl stream. */ 6973 rtx_insn *insns[ARRAY_SIZE (frv_unit_codes)]; 6974 6975 /* The contents of INSNS after they have been sorted into the correct 6976 assembly-language order. Element X issues to unit X. The list may 6977 contain extra nops. */ 6978 rtx_insn *sorted[ARRAY_SIZE (frv_unit_codes)]; 6979 6980 /* The member of frv_nops[] to use in sorted[]. */ 6981 rtx_insn *nop; 6982 }; 6983 6984 /* The current state of the packing pass, implemented by frv_pack_insns. */ 6985 static struct { 6986 /* The state of the pipeline DFA. */ 6987 state_t dfa_state; 6988 6989 /* Which hardware registers are set within the current packet, 6990 and the conditions under which they are set. */ 6991 regstate_t regstate[FIRST_PSEUDO_REGISTER]; 6992 6993 /* The memory locations that have been modified so far in this 6994 packet. MEM is the memref and COND is the regstate_t condition 6995 under which it is set. */ 6996 struct { 6997 rtx mem; 6998 regstate_t cond; 6999 } mems[2]; 7000 7001 /* The number of valid entries in MEMS. The value is larger than 7002 ARRAY_SIZE (mems) if there were too many mems to record. */ 7003 unsigned int num_mems; 7004 7005 /* The maximum number of instructions that can be packed together. */ 7006 unsigned int issue_rate; 7007 7008 /* The instructions in the packet, partitioned into groups. */ 7009 struct frv_packet_group groups[NUM_GROUPS]; 7010 7011 /* The instructions that make up the current packet. */ 7012 rtx_insn *insns[ARRAY_SIZE (frv_unit_codes)]; 7013 unsigned int num_insns; 7014 } frv_packet; 7015 7016 /* Return the regstate_t flags for the given COND_EXEC condition. 7017 Abort if the condition isn't in the right form. */ 7018 7019 static int 7020 frv_cond_flags (rtx cond) 7021 { 7022 gcc_assert ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE) 7023 && GET_CODE (XEXP (cond, 0)) == REG 7024 && CR_P (REGNO (XEXP (cond, 0))) 7025 && XEXP (cond, 1) == const0_rtx); 7026 return ((REGNO (XEXP (cond, 0)) - CR_FIRST) 7027 | (GET_CODE (cond) == NE 7028 ? REGSTATE_IF_TRUE 7029 : REGSTATE_IF_FALSE)); 7030 } 7031 7032 7033 /* Return true if something accessed under condition COND2 can 7034 conflict with something written under condition COND1. */ 7035 7036 static bool 7037 frv_regstate_conflict_p (regstate_t cond1, regstate_t cond2) 7038 { 7039 /* If either reference was unconditional, we have a conflict. */ 7040 if ((cond1 & REGSTATE_IF_EITHER) == 0 7041 || (cond2 & REGSTATE_IF_EITHER) == 0) 7042 return true; 7043 7044 /* The references might conflict if they were controlled by 7045 different CRs. */ 7046 if ((cond1 & REGSTATE_CC_MASK) != (cond2 & REGSTATE_CC_MASK)) 7047 return true; 7048 7049 /* They definitely conflict if they are controlled by the 7050 same condition. */ 7051 if ((cond1 & cond2 & REGSTATE_IF_EITHER) != 0) 7052 return true; 7053 7054 return false; 7055 } 7056 7057 7058 /* Return true if an instruction with pattern PAT depends on an 7059 instruction in the current packet. COND describes the condition 7060 under which PAT might be set or used. */ 7061 7062 static bool 7063 frv_registers_conflict_p_1 (rtx pat, regstate_t cond) 7064 { 7065 subrtx_var_iterator::array_type array; 7066 FOR_EACH_SUBRTX_VAR (iter, array, pat, NONCONST) 7067 { 7068 rtx x = *iter; 7069 if (GET_CODE (x) == REG) 7070 { 7071 unsigned int regno; 7072 FOR_EACH_REGNO (regno, x) 7073 if ((frv_packet.regstate[regno] & REGSTATE_MODIFIED) != 0) 7074 if (frv_regstate_conflict_p (frv_packet.regstate[regno], cond)) 7075 return true; 7076 } 7077 else if (GET_CODE (x) == MEM) 7078 { 7079 /* If we ran out of memory slots, assume a conflict. */ 7080 if (frv_packet.num_mems > ARRAY_SIZE (frv_packet.mems)) 7081 return 1; 7082 7083 /* Check for output or true dependencies with earlier MEMs. */ 7084 for (unsigned int i = 0; i < frv_packet.num_mems; i++) 7085 if (frv_regstate_conflict_p (frv_packet.mems[i].cond, cond)) 7086 { 7087 if (true_dependence (frv_packet.mems[i].mem, VOIDmode, x)) 7088 return true; 7089 7090 if (output_dependence (frv_packet.mems[i].mem, x)) 7091 return true; 7092 } 7093 } 7094 7095 /* The return values of calls aren't significant: they describe 7096 the effect of the call as a whole, not of the insn itself. */ 7097 else if (GET_CODE (x) == SET && GET_CODE (SET_SRC (x)) == CALL) 7098 iter.substitute (SET_SRC (x)); 7099 } 7100 return false; 7101 } 7102 7103 7104 /* Return true if something in X might depend on an instruction 7105 in the current packet. */ 7106 7107 static bool 7108 frv_registers_conflict_p (rtx x) 7109 { 7110 regstate_t flags; 7111 7112 flags = 0; 7113 if (GET_CODE (x) == COND_EXEC) 7114 { 7115 if (frv_registers_conflict_p_1 (XEXP (x, 0), flags)) 7116 return true; 7117 7118 flags |= frv_cond_flags (XEXP (x, 0)); 7119 x = XEXP (x, 1); 7120 } 7121 return frv_registers_conflict_p_1 (x, flags); 7122 } 7123 7124 7125 /* A note_stores callback. DATA points to the regstate_t condition 7126 under which X is modified. Update FRV_PACKET accordingly. */ 7127 7128 static void 7129 frv_registers_update_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) 7130 { 7131 unsigned int regno; 7132 7133 if (GET_CODE (x) == REG) 7134 FOR_EACH_REGNO (regno, x) 7135 frv_packet.regstate[regno] |= *(regstate_t *) data; 7136 7137 if (GET_CODE (x) == MEM) 7138 { 7139 if (frv_packet.num_mems < ARRAY_SIZE (frv_packet.mems)) 7140 { 7141 frv_packet.mems[frv_packet.num_mems].mem = x; 7142 frv_packet.mems[frv_packet.num_mems].cond = *(regstate_t *) data; 7143 } 7144 frv_packet.num_mems++; 7145 } 7146 } 7147 7148 7149 /* Update the register state information for an instruction whose 7150 body is X. */ 7151 7152 static void 7153 frv_registers_update (rtx x) 7154 { 7155 regstate_t flags; 7156 7157 flags = REGSTATE_MODIFIED; 7158 if (GET_CODE (x) == COND_EXEC) 7159 { 7160 flags |= frv_cond_flags (XEXP (x, 0)); 7161 x = XEXP (x, 1); 7162 } 7163 note_stores (x, frv_registers_update_1, &flags); 7164 } 7165 7166 7167 /* Initialize frv_packet for the start of a new packet. */ 7168 7169 static void 7170 frv_start_packet (void) 7171 { 7172 enum frv_insn_group group; 7173 7174 memset (frv_packet.regstate, 0, sizeof (frv_packet.regstate)); 7175 frv_packet.num_mems = 0; 7176 frv_packet.num_insns = 0; 7177 for (group = GROUP_I; group < NUM_GROUPS; 7178 group = (enum frv_insn_group) (group + 1)) 7179 frv_packet.groups[group].num_insns = 0; 7180 } 7181 7182 7183 /* Likewise for the start of a new basic block. */ 7184 7185 static void 7186 frv_start_packet_block (void) 7187 { 7188 state_reset (frv_packet.dfa_state); 7189 frv_start_packet (); 7190 } 7191 7192 7193 /* Finish the current packet, if any, and start a new one. Call 7194 HANDLE_PACKET with FRV_PACKET describing the completed packet. */ 7195 7196 static void 7197 frv_finish_packet (void (*handle_packet) (void)) 7198 { 7199 if (frv_packet.num_insns > 0) 7200 { 7201 handle_packet (); 7202 state_transition (frv_packet.dfa_state, 0); 7203 frv_start_packet (); 7204 } 7205 } 7206 7207 7208 /* Return true if INSN can be added to the current packet. Update 7209 the DFA state on success. */ 7210 7211 static bool 7212 frv_pack_insn_p (rtx_insn *insn) 7213 { 7214 /* See if the packet is already as long as it can be. */ 7215 if (frv_packet.num_insns == frv_packet.issue_rate) 7216 return false; 7217 7218 /* If the scheduler thought that an instruction should start a packet, 7219 it's usually a good idea to believe it. It knows much more about 7220 the latencies than we do. 7221 7222 There are some exceptions though: 7223 7224 - Conditional instructions are scheduled on the assumption that 7225 they will be executed. This is usually a good thing, since it 7226 tends to avoid unnecessary stalls in the conditional code. 7227 But we want to pack conditional instructions as tightly as 7228 possible, in order to optimize the case where they aren't 7229 executed. 7230 7231 - The scheduler will always put branches on their own, even 7232 if there's no real dependency. 7233 7234 - There's no point putting a call in its own packet unless 7235 we have to. */ 7236 if (frv_packet.num_insns > 0 7237 && NONJUMP_INSN_P (insn) 7238 && GET_MODE (insn) == TImode 7239 && GET_CODE (PATTERN (insn)) != COND_EXEC) 7240 return false; 7241 7242 /* Check for register conflicts. Don't do this for setlo since any 7243 conflict will be with the partnering sethi, with which it can 7244 be packed. */ 7245 if (get_attr_type (insn) != TYPE_SETLO) 7246 if (frv_registers_conflict_p (PATTERN (insn))) 7247 return false; 7248 7249 return state_transition (frv_packet.dfa_state, insn) < 0; 7250 } 7251 7252 7253 /* Add instruction INSN to the current packet. */ 7254 7255 static void 7256 frv_add_insn_to_packet (rtx_insn *insn) 7257 { 7258 struct frv_packet_group *packet_group; 7259 7260 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]]; 7261 packet_group->insns[packet_group->num_insns++] = insn; 7262 frv_packet.insns[frv_packet.num_insns++] = insn; 7263 7264 frv_registers_update (PATTERN (insn)); 7265 } 7266 7267 7268 /* Insert INSN (a member of frv_nops[]) into the current packet. If the 7269 packet ends in a branch or call, insert the nop before it, otherwise 7270 add to the end. */ 7271 7272 static void 7273 frv_insert_nop_in_packet (rtx_insn *insn) 7274 { 7275 struct frv_packet_group *packet_group; 7276 rtx_insn *last; 7277 7278 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]]; 7279 last = frv_packet.insns[frv_packet.num_insns - 1]; 7280 if (! NONJUMP_INSN_P (last)) 7281 { 7282 insn = emit_insn_before (PATTERN (insn), last); 7283 frv_packet.insns[frv_packet.num_insns - 1] = insn; 7284 frv_packet.insns[frv_packet.num_insns++] = last; 7285 } 7286 else 7287 { 7288 insn = emit_insn_after (PATTERN (insn), last); 7289 frv_packet.insns[frv_packet.num_insns++] = insn; 7290 } 7291 packet_group->insns[packet_group->num_insns++] = insn; 7292 } 7293 7294 7295 /* If packing is enabled, divide the instructions into packets and 7296 return true. Call HANDLE_PACKET for each complete packet. */ 7297 7298 static bool 7299 frv_for_each_packet (void (*handle_packet) (void)) 7300 { 7301 rtx_insn *insn, *next_insn; 7302 7303 frv_packet.issue_rate = frv_issue_rate (); 7304 7305 /* Early exit if we don't want to pack insns. */ 7306 if (!optimize 7307 || !flag_schedule_insns_after_reload 7308 || !TARGET_VLIW_BRANCH 7309 || frv_packet.issue_rate == 1) 7310 return false; 7311 7312 /* Set up the initial packing state. */ 7313 dfa_start (); 7314 frv_packet.dfa_state = alloca (state_size ()); 7315 7316 frv_start_packet_block (); 7317 for (insn = get_insns (); insn != 0; insn = next_insn) 7318 { 7319 enum rtx_code code; 7320 bool eh_insn_p; 7321 7322 code = GET_CODE (insn); 7323 next_insn = NEXT_INSN (insn); 7324 7325 if (code == CODE_LABEL) 7326 { 7327 frv_finish_packet (handle_packet); 7328 frv_start_packet_block (); 7329 } 7330 7331 if (INSN_P (insn)) 7332 switch (GET_CODE (PATTERN (insn))) 7333 { 7334 case USE: 7335 case CLOBBER: 7336 break; 7337 7338 default: 7339 /* Calls mustn't be packed on a TOMCAT. */ 7340 if (CALL_P (insn) && frv_cpu_type == FRV_CPU_TOMCAT) 7341 frv_finish_packet (handle_packet); 7342 7343 /* Since the last instruction in a packet determines the EH 7344 region, any exception-throwing instruction must come at 7345 the end of reordered packet. Insns that issue to a 7346 branch unit are bound to come last; for others it's 7347 too hard to predict. */ 7348 eh_insn_p = (find_reg_note (insn, REG_EH_REGION, NULL) != NULL); 7349 if (eh_insn_p && !frv_issues_to_branch_unit_p (insn)) 7350 frv_finish_packet (handle_packet); 7351 7352 /* Finish the current packet if we can't add INSN to it. 7353 Simulate cycles until INSN is ready to issue. */ 7354 if (!frv_pack_insn_p (insn)) 7355 { 7356 frv_finish_packet (handle_packet); 7357 while (!frv_pack_insn_p (insn)) 7358 state_transition (frv_packet.dfa_state, 0); 7359 } 7360 7361 /* Add the instruction to the packet. */ 7362 frv_add_insn_to_packet (insn); 7363 7364 /* Calls and jumps end a packet, as do insns that throw 7365 an exception. */ 7366 if (code == CALL_INSN || code == JUMP_INSN || eh_insn_p) 7367 frv_finish_packet (handle_packet); 7368 break; 7369 } 7370 } 7371 frv_finish_packet (handle_packet); 7372 dfa_finish (); 7373 return true; 7374 } 7375 7376 /* Subroutine of frv_sort_insn_group. We are trying to sort 7377 frv_packet.groups[GROUP].sorted[0...NUM_INSNS-1] into assembly 7378 language order. We have already picked a new position for 7379 frv_packet.groups[GROUP].sorted[X] if bit X of ISSUED is set. 7380 These instructions will occupy elements [0, LOWER_SLOT) and 7381 [UPPER_SLOT, NUM_INSNS) of the final (sorted) array. STATE is 7382 the DFA state after issuing these instructions. 7383 7384 Try filling elements [LOWER_SLOT, UPPER_SLOT) with every permutation 7385 of the unused instructions. Return true if one such permutation gives 7386 a valid ordering, leaving the successful permutation in sorted[]. 7387 Do not modify sorted[] until a valid permutation is found. */ 7388 7389 static bool 7390 frv_sort_insn_group_1 (enum frv_insn_group group, 7391 unsigned int lower_slot, unsigned int upper_slot, 7392 unsigned int issued, unsigned int num_insns, 7393 state_t state) 7394 { 7395 struct frv_packet_group *packet_group; 7396 unsigned int i; 7397 state_t test_state; 7398 size_t dfa_size; 7399 rtx_insn *insn; 7400 7401 /* Early success if we've filled all the slots. */ 7402 if (lower_slot == upper_slot) 7403 return true; 7404 7405 packet_group = &frv_packet.groups[group]; 7406 dfa_size = state_size (); 7407 test_state = alloca (dfa_size); 7408 7409 /* Try issuing each unused instruction. */ 7410 for (i = num_insns - 1; i + 1 != 0; i--) 7411 if (~issued & (1 << i)) 7412 { 7413 insn = packet_group->sorted[i]; 7414 memcpy (test_state, state, dfa_size); 7415 if (state_transition (test_state, insn) < 0 7416 && cpu_unit_reservation_p (test_state, 7417 NTH_UNIT (group, upper_slot - 1)) 7418 && frv_sort_insn_group_1 (group, lower_slot, upper_slot - 1, 7419 issued | (1 << i), num_insns, 7420 test_state)) 7421 { 7422 packet_group->sorted[upper_slot - 1] = insn; 7423 return true; 7424 } 7425 } 7426 7427 return false; 7428 } 7429 7430 /* Compare two instructions by their frv_insn_unit. */ 7431 7432 static int 7433 frv_compare_insns (const void *first, const void *second) 7434 { 7435 rtx_insn * const *insn1 = (rtx_insn * const *) first; 7436 rtx_insn * const *insn2 = (rtx_insn * const *) second; 7437 return frv_insn_unit (*insn1) - frv_insn_unit (*insn2); 7438 } 7439 7440 /* Copy frv_packet.groups[GROUP].insns[] to frv_packet.groups[GROUP].sorted[] 7441 and sort it into assembly language order. See frv.md for a description of 7442 the algorithm. */ 7443 7444 static void 7445 frv_sort_insn_group (enum frv_insn_group group) 7446 { 7447 struct frv_packet_group *packet_group; 7448 unsigned int first, i, nop, max_unit, num_slots; 7449 state_t state, test_state; 7450 size_t dfa_size; 7451 7452 packet_group = &frv_packet.groups[group]; 7453 7454 /* Assume no nop is needed. */ 7455 packet_group->nop = 0; 7456 7457 if (packet_group->num_insns == 0) 7458 return; 7459 7460 /* Copy insns[] to sorted[]. */ 7461 memcpy (packet_group->sorted, packet_group->insns, 7462 sizeof (rtx) * packet_group->num_insns); 7463 7464 /* Sort sorted[] by the unit that each insn tries to take first. */ 7465 if (packet_group->num_insns > 1) 7466 qsort (packet_group->sorted, packet_group->num_insns, 7467 sizeof (rtx), frv_compare_insns); 7468 7469 /* That's always enough for branch and control insns. */ 7470 if (group == GROUP_B || group == GROUP_C) 7471 return; 7472 7473 dfa_size = state_size (); 7474 state = alloca (dfa_size); 7475 test_state = alloca (dfa_size); 7476 7477 /* Find the highest FIRST such that sorted[0...FIRST-1] can issue 7478 consecutively and such that the DFA takes unit X when sorted[X] 7479 is added. Set STATE to the new DFA state. */ 7480 state_reset (test_state); 7481 for (first = 0; first < packet_group->num_insns; first++) 7482 { 7483 memcpy (state, test_state, dfa_size); 7484 if (state_transition (test_state, packet_group->sorted[first]) >= 0 7485 || !cpu_unit_reservation_p (test_state, NTH_UNIT (group, first))) 7486 break; 7487 } 7488 7489 /* If all the instructions issued in ascending order, we're done. */ 7490 if (first == packet_group->num_insns) 7491 return; 7492 7493 /* Add nops to the end of sorted[] and try each permutation until 7494 we find one that works. */ 7495 for (nop = 0; nop < frv_num_nops; nop++) 7496 { 7497 max_unit = frv_insn_unit (frv_nops[nop]); 7498 if (frv_unit_groups[max_unit] == group) 7499 { 7500 packet_group->nop = frv_nops[nop]; 7501 num_slots = UNIT_NUMBER (max_unit) + 1; 7502 for (i = packet_group->num_insns; i < num_slots; i++) 7503 packet_group->sorted[i] = frv_nops[nop]; 7504 if (frv_sort_insn_group_1 (group, first, num_slots, 7505 (1 << first) - 1, num_slots, state)) 7506 return; 7507 } 7508 } 7509 gcc_unreachable (); 7510 } 7511 7512 /* Sort the current packet into assembly-language order. Set packing 7513 flags as appropriate. */ 7514 7515 static void 7516 frv_reorder_packet (void) 7517 { 7518 unsigned int cursor[NUM_GROUPS]; 7519 rtx_insn *insns[ARRAY_SIZE (frv_unit_groups)]; 7520 unsigned int unit, to, from; 7521 enum frv_insn_group group; 7522 struct frv_packet_group *packet_group; 7523 7524 /* First sort each group individually. */ 7525 for (group = GROUP_I; group < NUM_GROUPS; 7526 group = (enum frv_insn_group) (group + 1)) 7527 { 7528 cursor[group] = 0; 7529 frv_sort_insn_group (group); 7530 } 7531 7532 /* Go through the unit template and try add an instruction from 7533 that unit's group. */ 7534 to = 0; 7535 for (unit = 0; unit < ARRAY_SIZE (frv_unit_groups); unit++) 7536 { 7537 group = frv_unit_groups[unit]; 7538 packet_group = &frv_packet.groups[group]; 7539 if (cursor[group] < packet_group->num_insns) 7540 { 7541 /* frv_reorg should have added nops for us. */ 7542 gcc_assert (packet_group->sorted[cursor[group]] 7543 != packet_group->nop); 7544 insns[to++] = packet_group->sorted[cursor[group]++]; 7545 } 7546 } 7547 7548 gcc_assert (to == frv_packet.num_insns); 7549 7550 /* Clear the last instruction's packing flag, thus marking the end of 7551 a packet. Reorder the other instructions relative to it. */ 7552 CLEAR_PACKING_FLAG (insns[to - 1]); 7553 for (from = 0; from < to - 1; from++) 7554 { 7555 remove_insn (insns[from]); 7556 add_insn_before (insns[from], insns[to - 1], NULL); 7557 SET_PACKING_FLAG (insns[from]); 7558 } 7559 } 7560 7561 7562 /* Divide instructions into packets. Reorder the contents of each 7563 packet so that they are in the correct assembly-language order. 7564 7565 Since this pass can change the raw meaning of the rtl stream, it must 7566 only be called at the last minute, just before the instructions are 7567 written out. */ 7568 7569 static void 7570 frv_pack_insns (void) 7571 { 7572 if (frv_for_each_packet (frv_reorder_packet)) 7573 frv_insn_packing_flag = 0; 7574 else 7575 frv_insn_packing_flag = -1; 7576 } 7577 7578 /* See whether we need to add nops to group GROUP in order to 7579 make a valid packet. */ 7580 7581 static void 7582 frv_fill_unused_units (enum frv_insn_group group) 7583 { 7584 unsigned int non_nops, nops, i; 7585 struct frv_packet_group *packet_group; 7586 7587 packet_group = &frv_packet.groups[group]; 7588 7589 /* Sort the instructions into assembly-language order. 7590 Use nops to fill slots that are otherwise unused. */ 7591 frv_sort_insn_group (group); 7592 7593 /* See how many nops are needed before the final useful instruction. */ 7594 i = nops = 0; 7595 for (non_nops = 0; non_nops < packet_group->num_insns; non_nops++) 7596 while (packet_group->sorted[i++] == packet_group->nop) 7597 nops++; 7598 7599 /* Insert that many nops into the instruction stream. */ 7600 while (nops-- > 0) 7601 frv_insert_nop_in_packet (packet_group->nop); 7602 } 7603 7604 /* Return true if accesses IO1 and IO2 refer to the same doubleword. */ 7605 7606 static bool 7607 frv_same_doubleword_p (const struct frv_io *io1, const struct frv_io *io2) 7608 { 7609 if (io1->const_address != 0 && io2->const_address != 0) 7610 return io1->const_address == io2->const_address; 7611 7612 if (io1->var_address != 0 && io2->var_address != 0) 7613 return rtx_equal_p (io1->var_address, io2->var_address); 7614 7615 return false; 7616 } 7617 7618 /* Return true if operations IO1 and IO2 are guaranteed to complete 7619 in order. */ 7620 7621 static bool 7622 frv_io_fixed_order_p (const struct frv_io *io1, const struct frv_io *io2) 7623 { 7624 /* The order of writes is always preserved. */ 7625 if (io1->type == FRV_IO_WRITE && io2->type == FRV_IO_WRITE) 7626 return true; 7627 7628 /* The order of reads isn't preserved. */ 7629 if (io1->type != FRV_IO_WRITE && io2->type != FRV_IO_WRITE) 7630 return false; 7631 7632 /* One operation is a write and the other is (or could be) a read. 7633 The order is only guaranteed if the accesses are to the same 7634 doubleword. */ 7635 return frv_same_doubleword_p (io1, io2); 7636 } 7637 7638 /* Generalize I/O operation X so that it covers both X and Y. */ 7639 7640 static void 7641 frv_io_union (struct frv_io *x, const struct frv_io *y) 7642 { 7643 if (x->type != y->type) 7644 x->type = FRV_IO_UNKNOWN; 7645 if (!frv_same_doubleword_p (x, y)) 7646 { 7647 x->const_address = 0; 7648 x->var_address = 0; 7649 } 7650 } 7651 7652 /* Fill IO with information about the load or store associated with 7653 membar instruction INSN. */ 7654 7655 static void 7656 frv_extract_membar (struct frv_io *io, rtx_insn *insn) 7657 { 7658 extract_insn (insn); 7659 io->type = (enum frv_io_type) INTVAL (recog_data.operand[2]); 7660 io->const_address = INTVAL (recog_data.operand[1]); 7661 io->var_address = XEXP (recog_data.operand[0], 0); 7662 } 7663 7664 /* A note_stores callback for which DATA points to an rtx. Nullify *DATA 7665 if X is a register and *DATA depends on X. */ 7666 7667 static void 7668 frv_io_check_address (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) 7669 { 7670 rtx *other = (rtx *) data; 7671 7672 if (REG_P (x) && *other != 0 && reg_overlap_mentioned_p (x, *other)) 7673 *other = 0; 7674 } 7675 7676 /* A note_stores callback for which DATA points to a HARD_REG_SET. 7677 Remove every modified register from the set. */ 7678 7679 static void 7680 frv_io_handle_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) 7681 { 7682 HARD_REG_SET *set = (HARD_REG_SET *) data; 7683 unsigned int regno; 7684 7685 if (REG_P (x)) 7686 FOR_EACH_REGNO (regno, x) 7687 CLEAR_HARD_REG_BIT (*set, regno); 7688 } 7689 7690 /* A note_uses callback that adds all registers in *X to hard register 7691 set *DATA. */ 7692 7693 static void 7694 frv_io_handle_use (rtx *x, void *data) 7695 { 7696 find_all_hard_regs (*x, (HARD_REG_SET *) data); 7697 } 7698 7699 /* Go through block BB looking for membars to remove. There are two 7700 cases where intra-block analysis is enough: 7701 7702 - a membar is redundant if it occurs between two consecutive I/O 7703 operations and if those operations are guaranteed to complete 7704 in order. 7705 7706 - a membar for a __builtin_read is redundant if the result is 7707 used before the next I/O operation is issued. 7708 7709 If the last membar in the block could not be removed, and there 7710 are guaranteed to be no I/O operations between that membar and 7711 the end of the block, store the membar in *LAST_MEMBAR, otherwise 7712 store null. 7713 7714 Describe the block's first I/O operation in *NEXT_IO. Describe 7715 an unknown operation if the block doesn't do any I/O. */ 7716 7717 static void 7718 frv_optimize_membar_local (basic_block bb, struct frv_io *next_io, 7719 rtx_insn **last_membar) 7720 { 7721 HARD_REG_SET used_regs; 7722 rtx set; 7723 rtx_insn *insn, *next_membar; 7724 bool next_is_end_p; 7725 7726 /* NEXT_IO is the next I/O operation to be performed after the current 7727 instruction. It starts off as being an unknown operation. */ 7728 memset (next_io, 0, sizeof (*next_io)); 7729 7730 /* NEXT_IS_END_P is true if NEXT_IO describes the end of the block. */ 7731 next_is_end_p = true; 7732 7733 /* If the current instruction is a __builtin_read or __builtin_write, 7734 NEXT_MEMBAR is the membar instruction associated with it. NEXT_MEMBAR 7735 is null if the membar has already been deleted. 7736 7737 Note that the initialization here should only be needed to 7738 suppress warnings. */ 7739 next_membar = 0; 7740 7741 /* USED_REGS is the set of registers that are used before the 7742 next I/O instruction. */ 7743 CLEAR_HARD_REG_SET (used_regs); 7744 7745 for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn)) 7746 if (CALL_P (insn)) 7747 { 7748 /* We can't predict what a call will do to volatile memory. */ 7749 memset (next_io, 0, sizeof (struct frv_io)); 7750 next_is_end_p = false; 7751 CLEAR_HARD_REG_SET (used_regs); 7752 } 7753 else if (INSN_P (insn)) 7754 switch (recog_memoized (insn)) 7755 { 7756 case CODE_FOR_optional_membar_qi: 7757 case CODE_FOR_optional_membar_hi: 7758 case CODE_FOR_optional_membar_si: 7759 case CODE_FOR_optional_membar_di: 7760 next_membar = insn; 7761 if (next_is_end_p) 7762 { 7763 /* Local information isn't enough to decide whether this 7764 membar is needed. Stash it away for later. */ 7765 *last_membar = insn; 7766 frv_extract_membar (next_io, insn); 7767 next_is_end_p = false; 7768 } 7769 else 7770 { 7771 /* Check whether the I/O operation before INSN could be 7772 reordered with one described by NEXT_IO. If it can't, 7773 INSN will not be needed. */ 7774 struct frv_io prev_io; 7775 7776 frv_extract_membar (&prev_io, insn); 7777 if (frv_io_fixed_order_p (&prev_io, next_io)) 7778 { 7779 if (dump_file) 7780 fprintf (dump_file, 7781 ";; [Local] Removing membar %d since order" 7782 " of accesses is guaranteed\n", 7783 INSN_UID (next_membar)); 7784 7785 insn = NEXT_INSN (insn); 7786 delete_insn (next_membar); 7787 next_membar = 0; 7788 } 7789 *next_io = prev_io; 7790 } 7791 break; 7792 7793 default: 7794 /* Invalidate NEXT_IO's address if it depends on something that 7795 is clobbered by INSN. */ 7796 if (next_io->var_address) 7797 note_stores (PATTERN (insn), frv_io_check_address, 7798 &next_io->var_address); 7799 7800 /* If the next membar is associated with a __builtin_read, 7801 see if INSN reads from that address. If it does, and if 7802 the destination register is used before the next I/O access, 7803 there is no need for the membar. */ 7804 set = PATTERN (insn); 7805 if (next_io->type == FRV_IO_READ 7806 && next_io->var_address != 0 7807 && next_membar != 0 7808 && GET_CODE (set) == SET 7809 && GET_CODE (SET_DEST (set)) == REG 7810 && TEST_HARD_REG_BIT (used_regs, REGNO (SET_DEST (set)))) 7811 { 7812 rtx src; 7813 7814 src = SET_SRC (set); 7815 if (GET_CODE (src) == ZERO_EXTEND) 7816 src = XEXP (src, 0); 7817 7818 if (GET_CODE (src) == MEM 7819 && rtx_equal_p (XEXP (src, 0), next_io->var_address)) 7820 { 7821 if (dump_file) 7822 fprintf (dump_file, 7823 ";; [Local] Removing membar %d since the target" 7824 " of %d is used before the I/O operation\n", 7825 INSN_UID (next_membar), INSN_UID (insn)); 7826 7827 if (next_membar == *last_membar) 7828 *last_membar = 0; 7829 7830 delete_insn (next_membar); 7831 next_membar = 0; 7832 } 7833 } 7834 7835 /* If INSN has volatile references, forget about any registers 7836 that are used after it. Otherwise forget about uses that 7837 are (or might be) defined by INSN. */ 7838 if (volatile_refs_p (PATTERN (insn))) 7839 CLEAR_HARD_REG_SET (used_regs); 7840 else 7841 note_stores (PATTERN (insn), frv_io_handle_set, &used_regs); 7842 7843 note_uses (&PATTERN (insn), frv_io_handle_use, &used_regs); 7844 break; 7845 } 7846 } 7847 7848 /* See if MEMBAR, the last membar instruction in BB, can be removed. 7849 FIRST_IO[X] describes the first operation performed by basic block X. */ 7850 7851 static void 7852 frv_optimize_membar_global (basic_block bb, struct frv_io *first_io, 7853 rtx_insn *membar) 7854 { 7855 struct frv_io this_io, next_io; 7856 edge succ; 7857 edge_iterator ei; 7858 7859 /* We need to keep the membar if there is an edge to the exit block. */ 7860 FOR_EACH_EDGE (succ, ei, bb->succs) 7861 /* for (succ = bb->succ; succ != 0; succ = succ->succ_next) */ 7862 if (succ->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)) 7863 return; 7864 7865 /* Work out the union of all successor blocks. */ 7866 ei = ei_start (bb->succs); 7867 ei_cond (ei, &succ); 7868 /* next_io = first_io[bb->succ->dest->index]; */ 7869 next_io = first_io[succ->dest->index]; 7870 ei = ei_start (bb->succs); 7871 if (ei_cond (ei, &succ)) 7872 { 7873 for (ei_next (&ei); ei_cond (ei, &succ); ei_next (&ei)) 7874 /*for (succ = bb->succ->succ_next; succ != 0; succ = succ->succ_next)*/ 7875 frv_io_union (&next_io, &first_io[succ->dest->index]); 7876 } 7877 else 7878 gcc_unreachable (); 7879 7880 frv_extract_membar (&this_io, membar); 7881 if (frv_io_fixed_order_p (&this_io, &next_io)) 7882 { 7883 if (dump_file) 7884 fprintf (dump_file, 7885 ";; [Global] Removing membar %d since order of accesses" 7886 " is guaranteed\n", INSN_UID (membar)); 7887 7888 delete_insn (membar); 7889 } 7890 } 7891 7892 /* Remove redundant membars from the current function. */ 7893 7894 static void 7895 frv_optimize_membar (void) 7896 { 7897 basic_block bb; 7898 struct frv_io *first_io; 7899 rtx_insn **last_membar; 7900 7901 compute_bb_for_insn (); 7902 first_io = XCNEWVEC (struct frv_io, last_basic_block_for_fn (cfun)); 7903 last_membar = XCNEWVEC (rtx_insn *, last_basic_block_for_fn (cfun)); 7904 7905 FOR_EACH_BB_FN (bb, cfun) 7906 frv_optimize_membar_local (bb, &first_io[bb->index], 7907 &last_membar[bb->index]); 7908 7909 FOR_EACH_BB_FN (bb, cfun) 7910 if (last_membar[bb->index] != 0) 7911 frv_optimize_membar_global (bb, first_io, last_membar[bb->index]); 7912 7913 free (first_io); 7914 free (last_membar); 7915 } 7916 7917 /* Used by frv_reorg to keep track of the current packet's address. */ 7918 static unsigned int frv_packet_address; 7919 7920 /* If the current packet falls through to a label, try to pad the packet 7921 with nops in order to fit the label's alignment requirements. */ 7922 7923 static void 7924 frv_align_label (void) 7925 { 7926 unsigned int alignment, target, nop; 7927 rtx_insn *x, *last, *barrier, *label; 7928 7929 /* Walk forward to the start of the next packet. Set ALIGNMENT to the 7930 maximum alignment of that packet, LABEL to the last label between 7931 the packets, and BARRIER to the last barrier. */ 7932 last = frv_packet.insns[frv_packet.num_insns - 1]; 7933 label = barrier = 0; 7934 alignment = 4; 7935 for (x = NEXT_INSN (last); x != 0 && !INSN_P (x); x = NEXT_INSN (x)) 7936 { 7937 if (LABEL_P (x)) 7938 { 7939 unsigned int subalign = 1 << label_to_alignment (x); 7940 alignment = MAX (alignment, subalign); 7941 label = x; 7942 } 7943 if (BARRIER_P (x)) 7944 barrier = x; 7945 } 7946 7947 /* If -malign-labels, and the packet falls through to an unaligned 7948 label, try introducing a nop to align that label to 8 bytes. */ 7949 if (TARGET_ALIGN_LABELS 7950 && label != 0 7951 && barrier == 0 7952 && frv_packet.num_insns < frv_packet.issue_rate) 7953 alignment = MAX (alignment, 8); 7954 7955 /* Advance the address to the end of the current packet. */ 7956 frv_packet_address += frv_packet.num_insns * 4; 7957 7958 /* Work out the target address, after alignment. */ 7959 target = (frv_packet_address + alignment - 1) & -alignment; 7960 7961 /* If the packet falls through to the label, try to find an efficient 7962 padding sequence. */ 7963 if (barrier == 0) 7964 { 7965 /* First try adding nops to the current packet. */ 7966 for (nop = 0; nop < frv_num_nops; nop++) 7967 while (frv_packet_address < target && frv_pack_insn_p (frv_nops[nop])) 7968 { 7969 frv_insert_nop_in_packet (frv_nops[nop]); 7970 frv_packet_address += 4; 7971 } 7972 7973 /* If we still haven't reached the target, add some new packets that 7974 contain only nops. If there are two types of nop, insert an 7975 alternating sequence of frv_nops[0] and frv_nops[1], which will 7976 lead to packets like: 7977 7978 nop.p 7979 mnop.p/fnop.p 7980 nop.p 7981 mnop/fnop 7982 7983 etc. Just emit frv_nops[0] if that's the only nop we have. */ 7984 last = frv_packet.insns[frv_packet.num_insns - 1]; 7985 nop = 0; 7986 while (frv_packet_address < target) 7987 { 7988 last = emit_insn_after (PATTERN (frv_nops[nop]), last); 7989 frv_packet_address += 4; 7990 if (frv_num_nops > 1) 7991 nop ^= 1; 7992 } 7993 } 7994 7995 frv_packet_address = target; 7996 } 7997 7998 /* Subroutine of frv_reorg, called after each packet has been constructed 7999 in frv_packet. */ 8000 8001 static void 8002 frv_reorg_packet (void) 8003 { 8004 frv_fill_unused_units (GROUP_I); 8005 frv_fill_unused_units (GROUP_FM); 8006 frv_align_label (); 8007 } 8008 8009 /* Add an instruction with pattern NOP to frv_nops[]. */ 8010 8011 static void 8012 frv_register_nop (rtx nop) 8013 { 8014 rtx_insn *nop_insn = make_insn_raw (nop); 8015 SET_NEXT_INSN (nop_insn) = 0; 8016 SET_PREV_INSN (nop_insn) = 0; 8017 frv_nops[frv_num_nops++] = nop_insn; 8018 } 8019 8020 /* Implement TARGET_MACHINE_DEPENDENT_REORG. Divide the instructions 8021 into packets and check whether we need to insert nops in order to 8022 fulfill the processor's issue requirements. Also, if the user has 8023 requested a certain alignment for a label, try to meet that alignment 8024 by inserting nops in the previous packet. */ 8025 8026 static void 8027 frv_reorg (void) 8028 { 8029 if (optimize > 0 && TARGET_OPTIMIZE_MEMBAR && cfun->machine->has_membar_p) 8030 frv_optimize_membar (); 8031 8032 frv_num_nops = 0; 8033 frv_register_nop (gen_nop ()); 8034 if (TARGET_MEDIA) 8035 frv_register_nop (gen_mnop ()); 8036 if (TARGET_HARD_FLOAT) 8037 frv_register_nop (gen_fnop ()); 8038 8039 /* Estimate the length of each branch. Although this may change after 8040 we've inserted nops, it will only do so in big functions. */ 8041 shorten_branches (get_insns ()); 8042 8043 frv_packet_address = 0; 8044 frv_for_each_packet (frv_reorg_packet); 8045 } 8046 8047 #define def_builtin(name, type, code) \ 8048 add_builtin_function ((name), (type), (code), BUILT_IN_MD, NULL, NULL) 8049 8050 struct builtin_description 8051 { 8052 enum insn_code icode; 8053 const char *name; 8054 enum frv_builtins code; 8055 enum rtx_code comparison; 8056 unsigned int flag; 8057 }; 8058 8059 /* Media intrinsics that take a single, constant argument. */ 8060 8061 static struct builtin_description bdesc_set[] = 8062 { 8063 { CODE_FOR_mhdsets, "__MHDSETS", FRV_BUILTIN_MHDSETS, UNKNOWN, 0 } 8064 }; 8065 8066 /* Media intrinsics that take just one argument. */ 8067 8068 static struct builtin_description bdesc_1arg[] = 8069 { 8070 { CODE_FOR_mnot, "__MNOT", FRV_BUILTIN_MNOT, UNKNOWN, 0 }, 8071 { CODE_FOR_munpackh, "__MUNPACKH", FRV_BUILTIN_MUNPACKH, UNKNOWN, 0 }, 8072 { CODE_FOR_mbtoh, "__MBTOH", FRV_BUILTIN_MBTOH, UNKNOWN, 0 }, 8073 { CODE_FOR_mhtob, "__MHTOB", FRV_BUILTIN_MHTOB, UNKNOWN, 0}, 8074 { CODE_FOR_mabshs, "__MABSHS", FRV_BUILTIN_MABSHS, UNKNOWN, 0 }, 8075 { CODE_FOR_scutss, "__SCUTSS", FRV_BUILTIN_SCUTSS, UNKNOWN, 0 } 8076 }; 8077 8078 /* Media intrinsics that take two arguments. */ 8079 8080 static struct builtin_description bdesc_2arg[] = 8081 { 8082 { CODE_FOR_mand, "__MAND", FRV_BUILTIN_MAND, UNKNOWN, 0}, 8083 { CODE_FOR_mor, "__MOR", FRV_BUILTIN_MOR, UNKNOWN, 0}, 8084 { CODE_FOR_mxor, "__MXOR", FRV_BUILTIN_MXOR, UNKNOWN, 0}, 8085 { CODE_FOR_maveh, "__MAVEH", FRV_BUILTIN_MAVEH, UNKNOWN, 0}, 8086 { CODE_FOR_msaths, "__MSATHS", FRV_BUILTIN_MSATHS, UNKNOWN, 0}, 8087 { CODE_FOR_msathu, "__MSATHU", FRV_BUILTIN_MSATHU, UNKNOWN, 0}, 8088 { CODE_FOR_maddhss, "__MADDHSS", FRV_BUILTIN_MADDHSS, UNKNOWN, 0}, 8089 { CODE_FOR_maddhus, "__MADDHUS", FRV_BUILTIN_MADDHUS, UNKNOWN, 0}, 8090 { CODE_FOR_msubhss, "__MSUBHSS", FRV_BUILTIN_MSUBHSS, UNKNOWN, 0}, 8091 { CODE_FOR_msubhus, "__MSUBHUS", FRV_BUILTIN_MSUBHUS, UNKNOWN, 0}, 8092 { CODE_FOR_mqaddhss, "__MQADDHSS", FRV_BUILTIN_MQADDHSS, UNKNOWN, 0}, 8093 { CODE_FOR_mqaddhus, "__MQADDHUS", FRV_BUILTIN_MQADDHUS, UNKNOWN, 0}, 8094 { CODE_FOR_mqsubhss, "__MQSUBHSS", FRV_BUILTIN_MQSUBHSS, UNKNOWN, 0}, 8095 { CODE_FOR_mqsubhus, "__MQSUBHUS", FRV_BUILTIN_MQSUBHUS, UNKNOWN, 0}, 8096 { CODE_FOR_mpackh, "__MPACKH", FRV_BUILTIN_MPACKH, UNKNOWN, 0}, 8097 { CODE_FOR_mcop1, "__Mcop1", FRV_BUILTIN_MCOP1, UNKNOWN, 0}, 8098 { CODE_FOR_mcop2, "__Mcop2", FRV_BUILTIN_MCOP2, UNKNOWN, 0}, 8099 { CODE_FOR_mwcut, "__MWCUT", FRV_BUILTIN_MWCUT, UNKNOWN, 0}, 8100 { CODE_FOR_mqsaths, "__MQSATHS", FRV_BUILTIN_MQSATHS, UNKNOWN, 0}, 8101 { CODE_FOR_mqlclrhs, "__MQLCLRHS", FRV_BUILTIN_MQLCLRHS, UNKNOWN, 0}, 8102 { CODE_FOR_mqlmths, "__MQLMTHS", FRV_BUILTIN_MQLMTHS, UNKNOWN, 0}, 8103 { CODE_FOR_smul, "__SMUL", FRV_BUILTIN_SMUL, UNKNOWN, 0}, 8104 { CODE_FOR_umul, "__UMUL", FRV_BUILTIN_UMUL, UNKNOWN, 0}, 8105 { CODE_FOR_addss, "__ADDSS", FRV_BUILTIN_ADDSS, UNKNOWN, 0}, 8106 { CODE_FOR_subss, "__SUBSS", FRV_BUILTIN_SUBSS, UNKNOWN, 0}, 8107 { CODE_FOR_slass, "__SLASS", FRV_BUILTIN_SLASS, UNKNOWN, 0}, 8108 { CODE_FOR_scan, "__SCAN", FRV_BUILTIN_SCAN, UNKNOWN, 0} 8109 }; 8110 8111 /* Integer intrinsics that take two arguments and have no return value. */ 8112 8113 static struct builtin_description bdesc_int_void2arg[] = 8114 { 8115 { CODE_FOR_smass, "__SMASS", FRV_BUILTIN_SMASS, UNKNOWN, 0}, 8116 { CODE_FOR_smsss, "__SMSSS", FRV_BUILTIN_SMSSS, UNKNOWN, 0}, 8117 { CODE_FOR_smu, "__SMU", FRV_BUILTIN_SMU, UNKNOWN, 0} 8118 }; 8119 8120 static struct builtin_description bdesc_prefetches[] = 8121 { 8122 { CODE_FOR_frv_prefetch0, "__data_prefetch0", FRV_BUILTIN_PREFETCH0, UNKNOWN, 8123 0}, 8124 { CODE_FOR_frv_prefetch, "__data_prefetch", FRV_BUILTIN_PREFETCH, UNKNOWN, 0} 8125 }; 8126 8127 /* Media intrinsics that take two arguments, the first being an ACC number. */ 8128 8129 static struct builtin_description bdesc_cut[] = 8130 { 8131 { CODE_FOR_mcut, "__MCUT", FRV_BUILTIN_MCUT, UNKNOWN, 0}, 8132 { CODE_FOR_mcutss, "__MCUTSS", FRV_BUILTIN_MCUTSS, UNKNOWN, 0}, 8133 { CODE_FOR_mdcutssi, "__MDCUTSSI", FRV_BUILTIN_MDCUTSSI, UNKNOWN, 0} 8134 }; 8135 8136 /* Two-argument media intrinsics with an immediate second argument. */ 8137 8138 static struct builtin_description bdesc_2argimm[] = 8139 { 8140 { CODE_FOR_mrotli, "__MROTLI", FRV_BUILTIN_MROTLI, UNKNOWN, 0}, 8141 { CODE_FOR_mrotri, "__MROTRI", FRV_BUILTIN_MROTRI, UNKNOWN, 0}, 8142 { CODE_FOR_msllhi, "__MSLLHI", FRV_BUILTIN_MSLLHI, UNKNOWN, 0}, 8143 { CODE_FOR_msrlhi, "__MSRLHI", FRV_BUILTIN_MSRLHI, UNKNOWN, 0}, 8144 { CODE_FOR_msrahi, "__MSRAHI", FRV_BUILTIN_MSRAHI, UNKNOWN, 0}, 8145 { CODE_FOR_mexpdhw, "__MEXPDHW", FRV_BUILTIN_MEXPDHW, UNKNOWN, 0}, 8146 { CODE_FOR_mexpdhd, "__MEXPDHD", FRV_BUILTIN_MEXPDHD, UNKNOWN, 0}, 8147 { CODE_FOR_mdrotli, "__MDROTLI", FRV_BUILTIN_MDROTLI, UNKNOWN, 0}, 8148 { CODE_FOR_mcplhi, "__MCPLHI", FRV_BUILTIN_MCPLHI, UNKNOWN, 0}, 8149 { CODE_FOR_mcpli, "__MCPLI", FRV_BUILTIN_MCPLI, UNKNOWN, 0}, 8150 { CODE_FOR_mhsetlos, "__MHSETLOS", FRV_BUILTIN_MHSETLOS, UNKNOWN, 0}, 8151 { CODE_FOR_mhsetloh, "__MHSETLOH", FRV_BUILTIN_MHSETLOH, UNKNOWN, 0}, 8152 { CODE_FOR_mhsethis, "__MHSETHIS", FRV_BUILTIN_MHSETHIS, UNKNOWN, 0}, 8153 { CODE_FOR_mhsethih, "__MHSETHIH", FRV_BUILTIN_MHSETHIH, UNKNOWN, 0}, 8154 { CODE_FOR_mhdseth, "__MHDSETH", FRV_BUILTIN_MHDSETH, UNKNOWN, 0}, 8155 { CODE_FOR_mqsllhi, "__MQSLLHI", FRV_BUILTIN_MQSLLHI, UNKNOWN, 0}, 8156 { CODE_FOR_mqsrahi, "__MQSRAHI", FRV_BUILTIN_MQSRAHI, UNKNOWN, 0} 8157 }; 8158 8159 /* Media intrinsics that take two arguments and return void, the first argument 8160 being a pointer to 4 words in memory. */ 8161 8162 static struct builtin_description bdesc_void2arg[] = 8163 { 8164 { CODE_FOR_mdunpackh, "__MDUNPACKH", FRV_BUILTIN_MDUNPACKH, UNKNOWN, 0}, 8165 { CODE_FOR_mbtohe, "__MBTOHE", FRV_BUILTIN_MBTOHE, UNKNOWN, 0}, 8166 }; 8167 8168 /* Media intrinsics that take three arguments, the first being a const_int that 8169 denotes an accumulator, and that return void. */ 8170 8171 static struct builtin_description bdesc_void3arg[] = 8172 { 8173 { CODE_FOR_mcpxrs, "__MCPXRS", FRV_BUILTIN_MCPXRS, UNKNOWN, 0}, 8174 { CODE_FOR_mcpxru, "__MCPXRU", FRV_BUILTIN_MCPXRU, UNKNOWN, 0}, 8175 { CODE_FOR_mcpxis, "__MCPXIS", FRV_BUILTIN_MCPXIS, UNKNOWN, 0}, 8176 { CODE_FOR_mcpxiu, "__MCPXIU", FRV_BUILTIN_MCPXIU, UNKNOWN, 0}, 8177 { CODE_FOR_mmulhs, "__MMULHS", FRV_BUILTIN_MMULHS, UNKNOWN, 0}, 8178 { CODE_FOR_mmulhu, "__MMULHU", FRV_BUILTIN_MMULHU, UNKNOWN, 0}, 8179 { CODE_FOR_mmulxhs, "__MMULXHS", FRV_BUILTIN_MMULXHS, UNKNOWN, 0}, 8180 { CODE_FOR_mmulxhu, "__MMULXHU", FRV_BUILTIN_MMULXHU, UNKNOWN, 0}, 8181 { CODE_FOR_mmachs, "__MMACHS", FRV_BUILTIN_MMACHS, UNKNOWN, 0}, 8182 { CODE_FOR_mmachu, "__MMACHU", FRV_BUILTIN_MMACHU, UNKNOWN, 0}, 8183 { CODE_FOR_mmrdhs, "__MMRDHS", FRV_BUILTIN_MMRDHS, UNKNOWN, 0}, 8184 { CODE_FOR_mmrdhu, "__MMRDHU", FRV_BUILTIN_MMRDHU, UNKNOWN, 0}, 8185 { CODE_FOR_mqcpxrs, "__MQCPXRS", FRV_BUILTIN_MQCPXRS, UNKNOWN, 0}, 8186 { CODE_FOR_mqcpxru, "__MQCPXRU", FRV_BUILTIN_MQCPXRU, UNKNOWN, 0}, 8187 { CODE_FOR_mqcpxis, "__MQCPXIS", FRV_BUILTIN_MQCPXIS, UNKNOWN, 0}, 8188 { CODE_FOR_mqcpxiu, "__MQCPXIU", FRV_BUILTIN_MQCPXIU, UNKNOWN, 0}, 8189 { CODE_FOR_mqmulhs, "__MQMULHS", FRV_BUILTIN_MQMULHS, UNKNOWN, 0}, 8190 { CODE_FOR_mqmulhu, "__MQMULHU", FRV_BUILTIN_MQMULHU, UNKNOWN, 0}, 8191 { CODE_FOR_mqmulxhs, "__MQMULXHS", FRV_BUILTIN_MQMULXHS, UNKNOWN, 0}, 8192 { CODE_FOR_mqmulxhu, "__MQMULXHU", FRV_BUILTIN_MQMULXHU, UNKNOWN, 0}, 8193 { CODE_FOR_mqmachs, "__MQMACHS", FRV_BUILTIN_MQMACHS, UNKNOWN, 0}, 8194 { CODE_FOR_mqmachu, "__MQMACHU", FRV_BUILTIN_MQMACHU, UNKNOWN, 0}, 8195 { CODE_FOR_mqxmachs, "__MQXMACHS", FRV_BUILTIN_MQXMACHS, UNKNOWN, 0}, 8196 { CODE_FOR_mqxmacxhs, "__MQXMACXHS", FRV_BUILTIN_MQXMACXHS, UNKNOWN, 0}, 8197 { CODE_FOR_mqmacxhs, "__MQMACXHS", FRV_BUILTIN_MQMACXHS, UNKNOWN, 0} 8198 }; 8199 8200 /* Media intrinsics that take two accumulator numbers as argument and 8201 return void. */ 8202 8203 static struct builtin_description bdesc_voidacc[] = 8204 { 8205 { CODE_FOR_maddaccs, "__MADDACCS", FRV_BUILTIN_MADDACCS, UNKNOWN, 0}, 8206 { CODE_FOR_msubaccs, "__MSUBACCS", FRV_BUILTIN_MSUBACCS, UNKNOWN, 0}, 8207 { CODE_FOR_masaccs, "__MASACCS", FRV_BUILTIN_MASACCS, UNKNOWN, 0}, 8208 { CODE_FOR_mdaddaccs, "__MDADDACCS", FRV_BUILTIN_MDADDACCS, UNKNOWN, 0}, 8209 { CODE_FOR_mdsubaccs, "__MDSUBACCS", FRV_BUILTIN_MDSUBACCS, UNKNOWN, 0}, 8210 { CODE_FOR_mdasaccs, "__MDASACCS", FRV_BUILTIN_MDASACCS, UNKNOWN, 0} 8211 }; 8212 8213 /* Intrinsics that load a value and then issue a MEMBAR. The load is 8214 a normal move and the ICODE is for the membar. */ 8215 8216 static struct builtin_description bdesc_loads[] = 8217 { 8218 { CODE_FOR_optional_membar_qi, "__builtin_read8", 8219 FRV_BUILTIN_READ8, UNKNOWN, 0}, 8220 { CODE_FOR_optional_membar_hi, "__builtin_read16", 8221 FRV_BUILTIN_READ16, UNKNOWN, 0}, 8222 { CODE_FOR_optional_membar_si, "__builtin_read32", 8223 FRV_BUILTIN_READ32, UNKNOWN, 0}, 8224 { CODE_FOR_optional_membar_di, "__builtin_read64", 8225 FRV_BUILTIN_READ64, UNKNOWN, 0} 8226 }; 8227 8228 /* Likewise stores. */ 8229 8230 static struct builtin_description bdesc_stores[] = 8231 { 8232 { CODE_FOR_optional_membar_qi, "__builtin_write8", 8233 FRV_BUILTIN_WRITE8, UNKNOWN, 0}, 8234 { CODE_FOR_optional_membar_hi, "__builtin_write16", 8235 FRV_BUILTIN_WRITE16, UNKNOWN, 0}, 8236 { CODE_FOR_optional_membar_si, "__builtin_write32", 8237 FRV_BUILTIN_WRITE32, UNKNOWN, 0}, 8238 { CODE_FOR_optional_membar_di, "__builtin_write64", 8239 FRV_BUILTIN_WRITE64, UNKNOWN, 0}, 8240 }; 8241 8242 /* Initialize media builtins. */ 8243 8244 static void 8245 frv_init_builtins (void) 8246 { 8247 tree accumulator = integer_type_node; 8248 tree integer = integer_type_node; 8249 tree voidt = void_type_node; 8250 tree uhalf = short_unsigned_type_node; 8251 tree sword1 = long_integer_type_node; 8252 tree uword1 = long_unsigned_type_node; 8253 tree sword2 = long_long_integer_type_node; 8254 tree uword2 = long_long_unsigned_type_node; 8255 tree uword4 = build_pointer_type (uword1); 8256 tree vptr = build_pointer_type (build_type_variant (void_type_node, 0, 1)); 8257 tree ubyte = unsigned_char_type_node; 8258 tree iacc = integer_type_node; 8259 8260 #define UNARY(RET, T1) \ 8261 build_function_type_list (RET, T1, NULL_TREE) 8262 8263 #define BINARY(RET, T1, T2) \ 8264 build_function_type_list (RET, T1, T2, NULL_TREE) 8265 8266 #define TRINARY(RET, T1, T2, T3) \ 8267 build_function_type_list (RET, T1, T2, T3, NULL_TREE) 8268 8269 #define QUAD(RET, T1, T2, T3, T4) \ 8270 build_function_type_list (RET, T1, T2, T3, T4, NULL_TREE) 8271 8272 tree void_ftype_void = build_function_type_list (voidt, NULL_TREE); 8273 8274 tree void_ftype_acc = UNARY (voidt, accumulator); 8275 tree void_ftype_uw4_uw1 = BINARY (voidt, uword4, uword1); 8276 tree void_ftype_uw4_uw2 = BINARY (voidt, uword4, uword2); 8277 tree void_ftype_acc_uw1 = BINARY (voidt, accumulator, uword1); 8278 tree void_ftype_acc_acc = BINARY (voidt, accumulator, accumulator); 8279 tree void_ftype_acc_uw1_uw1 = TRINARY (voidt, accumulator, uword1, uword1); 8280 tree void_ftype_acc_sw1_sw1 = TRINARY (voidt, accumulator, sword1, sword1); 8281 tree void_ftype_acc_uw2_uw2 = TRINARY (voidt, accumulator, uword2, uword2); 8282 tree void_ftype_acc_sw2_sw2 = TRINARY (voidt, accumulator, sword2, sword2); 8283 8284 tree uw1_ftype_uw1 = UNARY (uword1, uword1); 8285 tree uw1_ftype_sw1 = UNARY (uword1, sword1); 8286 tree uw1_ftype_uw2 = UNARY (uword1, uword2); 8287 tree uw1_ftype_acc = UNARY (uword1, accumulator); 8288 tree uw1_ftype_uh_uh = BINARY (uword1, uhalf, uhalf); 8289 tree uw1_ftype_uw1_uw1 = BINARY (uword1, uword1, uword1); 8290 tree uw1_ftype_uw1_int = BINARY (uword1, uword1, integer); 8291 tree uw1_ftype_acc_uw1 = BINARY (uword1, accumulator, uword1); 8292 tree uw1_ftype_acc_sw1 = BINARY (uword1, accumulator, sword1); 8293 tree uw1_ftype_uw2_uw1 = BINARY (uword1, uword2, uword1); 8294 tree uw1_ftype_uw2_int = BINARY (uword1, uword2, integer); 8295 8296 tree sw1_ftype_int = UNARY (sword1, integer); 8297 tree sw1_ftype_sw1_sw1 = BINARY (sword1, sword1, sword1); 8298 tree sw1_ftype_sw1_int = BINARY (sword1, sword1, integer); 8299 8300 tree uw2_ftype_uw1 = UNARY (uword2, uword1); 8301 tree uw2_ftype_uw1_int = BINARY (uword2, uword1, integer); 8302 tree uw2_ftype_uw2_uw2 = BINARY (uword2, uword2, uword2); 8303 tree uw2_ftype_uw2_int = BINARY (uword2, uword2, integer); 8304 tree uw2_ftype_acc_int = BINARY (uword2, accumulator, integer); 8305 tree uw2_ftype_uh_uh_uh_uh = QUAD (uword2, uhalf, uhalf, uhalf, uhalf); 8306 8307 tree sw2_ftype_sw2_sw2 = BINARY (sword2, sword2, sword2); 8308 tree sw2_ftype_sw2_int = BINARY (sword2, sword2, integer); 8309 tree uw2_ftype_uw1_uw1 = BINARY (uword2, uword1, uword1); 8310 tree sw2_ftype_sw1_sw1 = BINARY (sword2, sword1, sword1); 8311 tree void_ftype_sw1_sw1 = BINARY (voidt, sword1, sword1); 8312 tree void_ftype_iacc_sw2 = BINARY (voidt, iacc, sword2); 8313 tree void_ftype_iacc_sw1 = BINARY (voidt, iacc, sword1); 8314 tree sw1_ftype_sw1 = UNARY (sword1, sword1); 8315 tree sw2_ftype_iacc = UNARY (sword2, iacc); 8316 tree sw1_ftype_iacc = UNARY (sword1, iacc); 8317 tree void_ftype_ptr = UNARY (voidt, const_ptr_type_node); 8318 tree uw1_ftype_vptr = UNARY (uword1, vptr); 8319 tree uw2_ftype_vptr = UNARY (uword2, vptr); 8320 tree void_ftype_vptr_ub = BINARY (voidt, vptr, ubyte); 8321 tree void_ftype_vptr_uh = BINARY (voidt, vptr, uhalf); 8322 tree void_ftype_vptr_uw1 = BINARY (voidt, vptr, uword1); 8323 tree void_ftype_vptr_uw2 = BINARY (voidt, vptr, uword2); 8324 8325 def_builtin ("__MAND", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAND); 8326 def_builtin ("__MOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MOR); 8327 def_builtin ("__MXOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MXOR); 8328 def_builtin ("__MNOT", uw1_ftype_uw1, FRV_BUILTIN_MNOT); 8329 def_builtin ("__MROTLI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTLI); 8330 def_builtin ("__MROTRI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTRI); 8331 def_builtin ("__MWCUT", uw1_ftype_uw2_uw1, FRV_BUILTIN_MWCUT); 8332 def_builtin ("__MAVEH", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAVEH); 8333 def_builtin ("__MSLLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSLLHI); 8334 def_builtin ("__MSRLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSRLHI); 8335 def_builtin ("__MSRAHI", sw1_ftype_sw1_int, FRV_BUILTIN_MSRAHI); 8336 def_builtin ("__MSATHS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSATHS); 8337 def_builtin ("__MSATHU", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSATHU); 8338 def_builtin ("__MADDHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MADDHSS); 8339 def_builtin ("__MADDHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MADDHUS); 8340 def_builtin ("__MSUBHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSUBHSS); 8341 def_builtin ("__MSUBHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSUBHUS); 8342 def_builtin ("__MMULHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULHS); 8343 def_builtin ("__MMULHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULHU); 8344 def_builtin ("__MMULXHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULXHS); 8345 def_builtin ("__MMULXHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULXHU); 8346 def_builtin ("__MMACHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMACHS); 8347 def_builtin ("__MMACHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMACHU); 8348 def_builtin ("__MMRDHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMRDHS); 8349 def_builtin ("__MMRDHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMRDHU); 8350 def_builtin ("__MQADDHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQADDHSS); 8351 def_builtin ("__MQADDHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQADDHUS); 8352 def_builtin ("__MQSUBHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSUBHSS); 8353 def_builtin ("__MQSUBHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQSUBHUS); 8354 def_builtin ("__MQMULHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULHS); 8355 def_builtin ("__MQMULHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULHU); 8356 def_builtin ("__MQMULXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULXHS); 8357 def_builtin ("__MQMULXHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULXHU); 8358 def_builtin ("__MQMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACHS); 8359 def_builtin ("__MQMACHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMACHU); 8360 def_builtin ("__MCPXRS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXRS); 8361 def_builtin ("__MCPXRU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXRU); 8362 def_builtin ("__MCPXIS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXIS); 8363 def_builtin ("__MCPXIU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXIU); 8364 def_builtin ("__MQCPXRS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXRS); 8365 def_builtin ("__MQCPXRU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXRU); 8366 def_builtin ("__MQCPXIS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXIS); 8367 def_builtin ("__MQCPXIU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXIU); 8368 def_builtin ("__MCUT", uw1_ftype_acc_uw1, FRV_BUILTIN_MCUT); 8369 def_builtin ("__MCUTSS", uw1_ftype_acc_sw1, FRV_BUILTIN_MCUTSS); 8370 def_builtin ("__MEXPDHW", uw1_ftype_uw1_int, FRV_BUILTIN_MEXPDHW); 8371 def_builtin ("__MEXPDHD", uw2_ftype_uw1_int, FRV_BUILTIN_MEXPDHD); 8372 def_builtin ("__MPACKH", uw1_ftype_uh_uh, FRV_BUILTIN_MPACKH); 8373 def_builtin ("__MUNPACKH", uw2_ftype_uw1, FRV_BUILTIN_MUNPACKH); 8374 def_builtin ("__MDPACKH", uw2_ftype_uh_uh_uh_uh, FRV_BUILTIN_MDPACKH); 8375 def_builtin ("__MDUNPACKH", void_ftype_uw4_uw2, FRV_BUILTIN_MDUNPACKH); 8376 def_builtin ("__MBTOH", uw2_ftype_uw1, FRV_BUILTIN_MBTOH); 8377 def_builtin ("__MHTOB", uw1_ftype_uw2, FRV_BUILTIN_MHTOB); 8378 def_builtin ("__MBTOHE", void_ftype_uw4_uw1, FRV_BUILTIN_MBTOHE); 8379 def_builtin ("__MCLRACC", void_ftype_acc, FRV_BUILTIN_MCLRACC); 8380 def_builtin ("__MCLRACCA", void_ftype_void, FRV_BUILTIN_MCLRACCA); 8381 def_builtin ("__MRDACC", uw1_ftype_acc, FRV_BUILTIN_MRDACC); 8382 def_builtin ("__MRDACCG", uw1_ftype_acc, FRV_BUILTIN_MRDACCG); 8383 def_builtin ("__MWTACC", void_ftype_acc_uw1, FRV_BUILTIN_MWTACC); 8384 def_builtin ("__MWTACCG", void_ftype_acc_uw1, FRV_BUILTIN_MWTACCG); 8385 def_builtin ("__Mcop1", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP1); 8386 def_builtin ("__Mcop2", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP2); 8387 def_builtin ("__MTRAP", void_ftype_void, FRV_BUILTIN_MTRAP); 8388 def_builtin ("__MQXMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACHS); 8389 def_builtin ("__MQXMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACXHS); 8390 def_builtin ("__MQMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACXHS); 8391 def_builtin ("__MADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MADDACCS); 8392 def_builtin ("__MSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MSUBACCS); 8393 def_builtin ("__MASACCS", void_ftype_acc_acc, FRV_BUILTIN_MASACCS); 8394 def_builtin ("__MDADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MDADDACCS); 8395 def_builtin ("__MDSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MDSUBACCS); 8396 def_builtin ("__MDASACCS", void_ftype_acc_acc, FRV_BUILTIN_MDASACCS); 8397 def_builtin ("__MABSHS", uw1_ftype_sw1, FRV_BUILTIN_MABSHS); 8398 def_builtin ("__MDROTLI", uw2_ftype_uw2_int, FRV_BUILTIN_MDROTLI); 8399 def_builtin ("__MCPLHI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLHI); 8400 def_builtin ("__MCPLI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLI); 8401 def_builtin ("__MDCUTSSI", uw2_ftype_acc_int, FRV_BUILTIN_MDCUTSSI); 8402 def_builtin ("__MQSATHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSATHS); 8403 def_builtin ("__MHSETLOS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETLOS); 8404 def_builtin ("__MHSETHIS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETHIS); 8405 def_builtin ("__MHDSETS", sw1_ftype_int, FRV_BUILTIN_MHDSETS); 8406 def_builtin ("__MHSETLOH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETLOH); 8407 def_builtin ("__MHSETHIH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETHIH); 8408 def_builtin ("__MHDSETH", uw1_ftype_uw1_int, FRV_BUILTIN_MHDSETH); 8409 def_builtin ("__MQLCLRHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLCLRHS); 8410 def_builtin ("__MQLMTHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLMTHS); 8411 def_builtin ("__MQSLLHI", uw2_ftype_uw2_int, FRV_BUILTIN_MQSLLHI); 8412 def_builtin ("__MQSRAHI", sw2_ftype_sw2_int, FRV_BUILTIN_MQSRAHI); 8413 def_builtin ("__SMUL", sw2_ftype_sw1_sw1, FRV_BUILTIN_SMUL); 8414 def_builtin ("__UMUL", uw2_ftype_uw1_uw1, FRV_BUILTIN_UMUL); 8415 def_builtin ("__SMASS", void_ftype_sw1_sw1, FRV_BUILTIN_SMASS); 8416 def_builtin ("__SMSSS", void_ftype_sw1_sw1, FRV_BUILTIN_SMSSS); 8417 def_builtin ("__SMU", void_ftype_sw1_sw1, FRV_BUILTIN_SMU); 8418 def_builtin ("__ADDSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_ADDSS); 8419 def_builtin ("__SUBSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SUBSS); 8420 def_builtin ("__SLASS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SLASS); 8421 def_builtin ("__SCAN", sw1_ftype_sw1_sw1, FRV_BUILTIN_SCAN); 8422 def_builtin ("__SCUTSS", sw1_ftype_sw1, FRV_BUILTIN_SCUTSS); 8423 def_builtin ("__IACCreadll", sw2_ftype_iacc, FRV_BUILTIN_IACCreadll); 8424 def_builtin ("__IACCreadl", sw1_ftype_iacc, FRV_BUILTIN_IACCreadl); 8425 def_builtin ("__IACCsetll", void_ftype_iacc_sw2, FRV_BUILTIN_IACCsetll); 8426 def_builtin ("__IACCsetl", void_ftype_iacc_sw1, FRV_BUILTIN_IACCsetl); 8427 def_builtin ("__data_prefetch0", void_ftype_ptr, FRV_BUILTIN_PREFETCH0); 8428 def_builtin ("__data_prefetch", void_ftype_ptr, FRV_BUILTIN_PREFETCH); 8429 def_builtin ("__builtin_read8", uw1_ftype_vptr, FRV_BUILTIN_READ8); 8430 def_builtin ("__builtin_read16", uw1_ftype_vptr, FRV_BUILTIN_READ16); 8431 def_builtin ("__builtin_read32", uw1_ftype_vptr, FRV_BUILTIN_READ32); 8432 def_builtin ("__builtin_read64", uw2_ftype_vptr, FRV_BUILTIN_READ64); 8433 8434 def_builtin ("__builtin_write8", void_ftype_vptr_ub, FRV_BUILTIN_WRITE8); 8435 def_builtin ("__builtin_write16", void_ftype_vptr_uh, FRV_BUILTIN_WRITE16); 8436 def_builtin ("__builtin_write32", void_ftype_vptr_uw1, FRV_BUILTIN_WRITE32); 8437 def_builtin ("__builtin_write64", void_ftype_vptr_uw2, FRV_BUILTIN_WRITE64); 8438 8439 #undef UNARY 8440 #undef BINARY 8441 #undef TRINARY 8442 #undef QUAD 8443 } 8444 8445 /* Set the names for various arithmetic operations according to the 8446 FRV ABI. */ 8447 static void 8448 frv_init_libfuncs (void) 8449 { 8450 set_optab_libfunc (smod_optab, SImode, "__modi"); 8451 set_optab_libfunc (umod_optab, SImode, "__umodi"); 8452 8453 set_optab_libfunc (add_optab, DImode, "__addll"); 8454 set_optab_libfunc (sub_optab, DImode, "__subll"); 8455 set_optab_libfunc (smul_optab, DImode, "__mulll"); 8456 set_optab_libfunc (sdiv_optab, DImode, "__divll"); 8457 set_optab_libfunc (smod_optab, DImode, "__modll"); 8458 set_optab_libfunc (umod_optab, DImode, "__umodll"); 8459 set_optab_libfunc (and_optab, DImode, "__andll"); 8460 set_optab_libfunc (ior_optab, DImode, "__orll"); 8461 set_optab_libfunc (xor_optab, DImode, "__xorll"); 8462 set_optab_libfunc (one_cmpl_optab, DImode, "__notll"); 8463 8464 set_optab_libfunc (add_optab, SFmode, "__addf"); 8465 set_optab_libfunc (sub_optab, SFmode, "__subf"); 8466 set_optab_libfunc (smul_optab, SFmode, "__mulf"); 8467 set_optab_libfunc (sdiv_optab, SFmode, "__divf"); 8468 8469 set_optab_libfunc (add_optab, DFmode, "__addd"); 8470 set_optab_libfunc (sub_optab, DFmode, "__subd"); 8471 set_optab_libfunc (smul_optab, DFmode, "__muld"); 8472 set_optab_libfunc (sdiv_optab, DFmode, "__divd"); 8473 8474 set_conv_libfunc (sext_optab, DFmode, SFmode, "__ftod"); 8475 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__dtof"); 8476 8477 set_conv_libfunc (sfix_optab, SImode, SFmode, "__ftoi"); 8478 set_conv_libfunc (sfix_optab, DImode, SFmode, "__ftoll"); 8479 set_conv_libfunc (sfix_optab, SImode, DFmode, "__dtoi"); 8480 set_conv_libfunc (sfix_optab, DImode, DFmode, "__dtoll"); 8481 8482 set_conv_libfunc (ufix_optab, SImode, SFmode, "__ftoui"); 8483 set_conv_libfunc (ufix_optab, DImode, SFmode, "__ftoull"); 8484 set_conv_libfunc (ufix_optab, SImode, DFmode, "__dtoui"); 8485 set_conv_libfunc (ufix_optab, DImode, DFmode, "__dtoull"); 8486 8487 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__itof"); 8488 set_conv_libfunc (sfloat_optab, SFmode, DImode, "__lltof"); 8489 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__itod"); 8490 set_conv_libfunc (sfloat_optab, DFmode, DImode, "__lltod"); 8491 } 8492 8493 /* Convert an integer constant to an accumulator register. ICODE is the 8494 code of the target instruction, OPNUM is the number of the 8495 accumulator operand and OPVAL is the constant integer. Try both 8496 ACC and ACCG registers; only report an error if neither fit the 8497 instruction. */ 8498 8499 static rtx 8500 frv_int_to_acc (enum insn_code icode, int opnum, rtx opval) 8501 { 8502 rtx reg; 8503 int i; 8504 8505 /* ACCs and ACCGs are implicit global registers if media intrinsics 8506 are being used. We set up this lazily to avoid creating lots of 8507 unnecessary call_insn rtl in non-media code. */ 8508 for (i = 0; i <= ACC_MASK; i++) 8509 if ((i & ACC_MASK) == i) 8510 global_regs[i + ACC_FIRST] = global_regs[i + ACCG_FIRST] = 1; 8511 8512 if (GET_CODE (opval) != CONST_INT) 8513 { 8514 error ("accumulator is not a constant integer"); 8515 return NULL_RTX; 8516 } 8517 if ((INTVAL (opval) & ~ACC_MASK) != 0) 8518 { 8519 error ("accumulator number is out of bounds"); 8520 return NULL_RTX; 8521 } 8522 8523 reg = gen_rtx_REG (insn_data[icode].operand[opnum].mode, 8524 ACC_FIRST + INTVAL (opval)); 8525 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode)) 8526 SET_REGNO (reg, ACCG_FIRST + INTVAL (opval)); 8527 8528 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode)) 8529 { 8530 error ("inappropriate accumulator for %qs", insn_data[icode].name); 8531 return NULL_RTX; 8532 } 8533 return reg; 8534 } 8535 8536 /* If an ACC rtx has mode MODE, return the mode that the matching ACCG 8537 should have. */ 8538 8539 static machine_mode 8540 frv_matching_accg_mode (machine_mode mode) 8541 { 8542 switch (mode) 8543 { 8544 case E_V4SImode: 8545 return V4QImode; 8546 8547 case E_DImode: 8548 return HImode; 8549 8550 case E_SImode: 8551 return QImode; 8552 8553 default: 8554 gcc_unreachable (); 8555 } 8556 } 8557 8558 /* Given that a __builtin_read or __builtin_write function is accessing 8559 address ADDRESS, return the value that should be used as operand 1 8560 of the membar. */ 8561 8562 static rtx 8563 frv_io_address_cookie (rtx address) 8564 { 8565 return (GET_CODE (address) == CONST_INT 8566 ? GEN_INT (INTVAL (address) / 8 * 8) 8567 : const0_rtx); 8568 } 8569 8570 /* Return the accumulator guard that should be paired with accumulator 8571 register ACC. The mode of the returned register is in the same 8572 class as ACC, but is four times smaller. */ 8573 8574 rtx 8575 frv_matching_accg_for_acc (rtx acc) 8576 { 8577 return gen_rtx_REG (frv_matching_accg_mode (GET_MODE (acc)), 8578 REGNO (acc) - ACC_FIRST + ACCG_FIRST); 8579 } 8580 8581 /* Read the requested argument from the call EXP given by INDEX. 8582 Return the value as an rtx. */ 8583 8584 static rtx 8585 frv_read_argument (tree exp, unsigned int index) 8586 { 8587 return expand_normal (CALL_EXPR_ARG (exp, index)); 8588 } 8589 8590 /* Like frv_read_argument, but interpret the argument as the number 8591 of an IACC register and return a (reg:MODE ...) rtx for it. */ 8592 8593 static rtx 8594 frv_read_iacc_argument (machine_mode mode, tree call, 8595 unsigned int index) 8596 { 8597 int i, regno; 8598 rtx op; 8599 8600 op = frv_read_argument (call, index); 8601 if (GET_CODE (op) != CONST_INT 8602 || INTVAL (op) < 0 8603 || INTVAL (op) > IACC_LAST - IACC_FIRST 8604 || ((INTVAL (op) * 4) & (GET_MODE_SIZE (mode) - 1)) != 0) 8605 { 8606 error ("invalid IACC argument"); 8607 op = const0_rtx; 8608 } 8609 8610 /* IACCs are implicit global registers. We set up this lazily to 8611 avoid creating lots of unnecessary call_insn rtl when IACCs aren't 8612 being used. */ 8613 regno = INTVAL (op) + IACC_FIRST; 8614 for (i = 0; i < hard_regno_nregs (regno, mode); i++) 8615 global_regs[regno + i] = 1; 8616 8617 return gen_rtx_REG (mode, regno); 8618 } 8619 8620 /* Return true if OPVAL can be used for operand OPNUM of instruction ICODE. 8621 The instruction should require a constant operand of some sort. The 8622 function prints an error if OPVAL is not valid. */ 8623 8624 static int 8625 frv_check_constant_argument (enum insn_code icode, int opnum, rtx opval) 8626 { 8627 if (GET_CODE (opval) != CONST_INT) 8628 { 8629 error ("%qs expects a constant argument", insn_data[icode].name); 8630 return FALSE; 8631 } 8632 if (! (*insn_data[icode].operand[opnum].predicate) (opval, VOIDmode)) 8633 { 8634 error ("constant argument out of range for %qs", insn_data[icode].name); 8635 return FALSE; 8636 } 8637 return TRUE; 8638 } 8639 8640 /* Return a legitimate rtx for instruction ICODE's return value. Use TARGET 8641 if it's not null, has the right mode, and satisfies operand 0's 8642 predicate. */ 8643 8644 static rtx 8645 frv_legitimize_target (enum insn_code icode, rtx target) 8646 { 8647 machine_mode mode = insn_data[icode].operand[0].mode; 8648 8649 if (! target 8650 || GET_MODE (target) != mode 8651 || ! (*insn_data[icode].operand[0].predicate) (target, mode)) 8652 return gen_reg_rtx (mode); 8653 else 8654 return target; 8655 } 8656 8657 /* Given that ARG is being passed as operand OPNUM to instruction ICODE, 8658 check whether ARG satisfies the operand's constraints. If it doesn't, 8659 copy ARG to a temporary register and return that. Otherwise return ARG 8660 itself. */ 8661 8662 static rtx 8663 frv_legitimize_argument (enum insn_code icode, int opnum, rtx arg) 8664 { 8665 machine_mode mode = insn_data[icode].operand[opnum].mode; 8666 8667 if ((*insn_data[icode].operand[opnum].predicate) (arg, mode)) 8668 return arg; 8669 else 8670 return copy_to_mode_reg (mode, arg); 8671 } 8672 8673 /* Return a volatile memory reference of mode MODE whose address is ARG. */ 8674 8675 static rtx 8676 frv_volatile_memref (machine_mode mode, rtx arg) 8677 { 8678 rtx mem; 8679 8680 mem = gen_rtx_MEM (mode, memory_address (mode, arg)); 8681 MEM_VOLATILE_P (mem) = 1; 8682 return mem; 8683 } 8684 8685 /* Expand builtins that take a single, constant argument. At the moment, 8686 only MHDSETS falls into this category. */ 8687 8688 static rtx 8689 frv_expand_set_builtin (enum insn_code icode, tree call, rtx target) 8690 { 8691 rtx pat; 8692 rtx op0 = frv_read_argument (call, 0); 8693 8694 if (! frv_check_constant_argument (icode, 1, op0)) 8695 return NULL_RTX; 8696 8697 target = frv_legitimize_target (icode, target); 8698 pat = GEN_FCN (icode) (target, op0); 8699 if (! pat) 8700 return NULL_RTX; 8701 8702 emit_insn (pat); 8703 return target; 8704 } 8705 8706 /* Expand builtins that take one operand. */ 8707 8708 static rtx 8709 frv_expand_unop_builtin (enum insn_code icode, tree call, rtx target) 8710 { 8711 rtx pat; 8712 rtx op0 = frv_read_argument (call, 0); 8713 8714 target = frv_legitimize_target (icode, target); 8715 op0 = frv_legitimize_argument (icode, 1, op0); 8716 pat = GEN_FCN (icode) (target, op0); 8717 if (! pat) 8718 return NULL_RTX; 8719 8720 emit_insn (pat); 8721 return target; 8722 } 8723 8724 /* Expand builtins that take two operands. */ 8725 8726 static rtx 8727 frv_expand_binop_builtin (enum insn_code icode, tree call, rtx target) 8728 { 8729 rtx pat; 8730 rtx op0 = frv_read_argument (call, 0); 8731 rtx op1 = frv_read_argument (call, 1); 8732 8733 target = frv_legitimize_target (icode, target); 8734 op0 = frv_legitimize_argument (icode, 1, op0); 8735 op1 = frv_legitimize_argument (icode, 2, op1); 8736 pat = GEN_FCN (icode) (target, op0, op1); 8737 if (! pat) 8738 return NULL_RTX; 8739 8740 emit_insn (pat); 8741 return target; 8742 } 8743 8744 /* Expand cut-style builtins, which take two operands and an implicit ACCG 8745 one. */ 8746 8747 static rtx 8748 frv_expand_cut_builtin (enum insn_code icode, tree call, rtx target) 8749 { 8750 rtx pat; 8751 rtx op0 = frv_read_argument (call, 0); 8752 rtx op1 = frv_read_argument (call, 1); 8753 rtx op2; 8754 8755 target = frv_legitimize_target (icode, target); 8756 op0 = frv_int_to_acc (icode, 1, op0); 8757 if (! op0) 8758 return NULL_RTX; 8759 8760 if (icode == CODE_FOR_mdcutssi || GET_CODE (op1) == CONST_INT) 8761 { 8762 if (! frv_check_constant_argument (icode, 2, op1)) 8763 return NULL_RTX; 8764 } 8765 else 8766 op1 = frv_legitimize_argument (icode, 2, op1); 8767 8768 op2 = frv_matching_accg_for_acc (op0); 8769 pat = GEN_FCN (icode) (target, op0, op1, op2); 8770 if (! pat) 8771 return NULL_RTX; 8772 8773 emit_insn (pat); 8774 return target; 8775 } 8776 8777 /* Expand builtins that take two operands and the second is immediate. */ 8778 8779 static rtx 8780 frv_expand_binopimm_builtin (enum insn_code icode, tree call, rtx target) 8781 { 8782 rtx pat; 8783 rtx op0 = frv_read_argument (call, 0); 8784 rtx op1 = frv_read_argument (call, 1); 8785 8786 if (! frv_check_constant_argument (icode, 2, op1)) 8787 return NULL_RTX; 8788 8789 target = frv_legitimize_target (icode, target); 8790 op0 = frv_legitimize_argument (icode, 1, op0); 8791 pat = GEN_FCN (icode) (target, op0, op1); 8792 if (! pat) 8793 return NULL_RTX; 8794 8795 emit_insn (pat); 8796 return target; 8797 } 8798 8799 /* Expand builtins that take two operands, the first operand being a pointer to 8800 ints and return void. */ 8801 8802 static rtx 8803 frv_expand_voidbinop_builtin (enum insn_code icode, tree call) 8804 { 8805 rtx pat; 8806 rtx op0 = frv_read_argument (call, 0); 8807 rtx op1 = frv_read_argument (call, 1); 8808 machine_mode mode0 = insn_data[icode].operand[0].mode; 8809 rtx addr; 8810 8811 if (GET_CODE (op0) != MEM) 8812 { 8813 rtx reg = op0; 8814 8815 if (! offsettable_address_p (0, mode0, op0)) 8816 { 8817 reg = gen_reg_rtx (Pmode); 8818 emit_insn (gen_rtx_SET (reg, op0)); 8819 } 8820 8821 op0 = gen_rtx_MEM (SImode, reg); 8822 } 8823 8824 addr = XEXP (op0, 0); 8825 if (! offsettable_address_p (0, mode0, addr)) 8826 addr = copy_to_mode_reg (Pmode, op0); 8827 8828 op0 = change_address (op0, V4SImode, addr); 8829 op1 = frv_legitimize_argument (icode, 1, op1); 8830 pat = GEN_FCN (icode) (op0, op1); 8831 if (! pat) 8832 return 0; 8833 8834 emit_insn (pat); 8835 return 0; 8836 } 8837 8838 /* Expand builtins that take two long operands and return void. */ 8839 8840 static rtx 8841 frv_expand_int_void2arg (enum insn_code icode, tree call) 8842 { 8843 rtx pat; 8844 rtx op0 = frv_read_argument (call, 0); 8845 rtx op1 = frv_read_argument (call, 1); 8846 8847 op0 = frv_legitimize_argument (icode, 1, op0); 8848 op1 = frv_legitimize_argument (icode, 1, op1); 8849 pat = GEN_FCN (icode) (op0, op1); 8850 if (! pat) 8851 return NULL_RTX; 8852 8853 emit_insn (pat); 8854 return NULL_RTX; 8855 } 8856 8857 /* Expand prefetch builtins. These take a single address as argument. */ 8858 8859 static rtx 8860 frv_expand_prefetches (enum insn_code icode, tree call) 8861 { 8862 rtx pat; 8863 rtx op0 = frv_read_argument (call, 0); 8864 8865 pat = GEN_FCN (icode) (force_reg (Pmode, op0)); 8866 if (! pat) 8867 return 0; 8868 8869 emit_insn (pat); 8870 return 0; 8871 } 8872 8873 /* Expand builtins that take three operands and return void. The first 8874 argument must be a constant that describes a pair or quad accumulators. A 8875 fourth argument is created that is the accumulator guard register that 8876 corresponds to the accumulator. */ 8877 8878 static rtx 8879 frv_expand_voidtriop_builtin (enum insn_code icode, tree call) 8880 { 8881 rtx pat; 8882 rtx op0 = frv_read_argument (call, 0); 8883 rtx op1 = frv_read_argument (call, 1); 8884 rtx op2 = frv_read_argument (call, 2); 8885 rtx op3; 8886 8887 op0 = frv_int_to_acc (icode, 0, op0); 8888 if (! op0) 8889 return NULL_RTX; 8890 8891 op1 = frv_legitimize_argument (icode, 1, op1); 8892 op2 = frv_legitimize_argument (icode, 2, op2); 8893 op3 = frv_matching_accg_for_acc (op0); 8894 pat = GEN_FCN (icode) (op0, op1, op2, op3); 8895 if (! pat) 8896 return NULL_RTX; 8897 8898 emit_insn (pat); 8899 return NULL_RTX; 8900 } 8901 8902 /* Expand builtins that perform accumulator-to-accumulator operations. 8903 These builtins take two accumulator numbers as argument and return 8904 void. */ 8905 8906 static rtx 8907 frv_expand_voidaccop_builtin (enum insn_code icode, tree call) 8908 { 8909 rtx pat; 8910 rtx op0 = frv_read_argument (call, 0); 8911 rtx op1 = frv_read_argument (call, 1); 8912 rtx op2; 8913 rtx op3; 8914 8915 op0 = frv_int_to_acc (icode, 0, op0); 8916 if (! op0) 8917 return NULL_RTX; 8918 8919 op1 = frv_int_to_acc (icode, 1, op1); 8920 if (! op1) 8921 return NULL_RTX; 8922 8923 op2 = frv_matching_accg_for_acc (op0); 8924 op3 = frv_matching_accg_for_acc (op1); 8925 pat = GEN_FCN (icode) (op0, op1, op2, op3); 8926 if (! pat) 8927 return NULL_RTX; 8928 8929 emit_insn (pat); 8930 return NULL_RTX; 8931 } 8932 8933 /* Expand a __builtin_read* function. ICODE is the instruction code for the 8934 membar and TARGET_MODE is the mode that the loaded value should have. */ 8935 8936 static rtx 8937 frv_expand_load_builtin (enum insn_code icode, machine_mode target_mode, 8938 tree call, rtx target) 8939 { 8940 rtx op0 = frv_read_argument (call, 0); 8941 rtx cookie = frv_io_address_cookie (op0); 8942 8943 if (target == 0 || !REG_P (target)) 8944 target = gen_reg_rtx (target_mode); 8945 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0); 8946 convert_move (target, op0, 1); 8947 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_READ))); 8948 cfun->machine->has_membar_p = 1; 8949 return target; 8950 } 8951 8952 /* Likewise __builtin_write* functions. */ 8953 8954 static rtx 8955 frv_expand_store_builtin (enum insn_code icode, tree call) 8956 { 8957 rtx op0 = frv_read_argument (call, 0); 8958 rtx op1 = frv_read_argument (call, 1); 8959 rtx cookie = frv_io_address_cookie (op0); 8960 8961 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0); 8962 convert_move (op0, force_reg (insn_data[icode].operand[0].mode, op1), 1); 8963 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_WRITE))); 8964 cfun->machine->has_membar_p = 1; 8965 return NULL_RTX; 8966 } 8967 8968 /* Expand the MDPACKH builtin. It takes four unsigned short arguments and 8969 each argument forms one word of the two double-word input registers. 8970 CALL is the tree for the call and TARGET, if nonnull, suggests a good place 8971 to put the return value. */ 8972 8973 static rtx 8974 frv_expand_mdpackh_builtin (tree call, rtx target) 8975 { 8976 enum insn_code icode = CODE_FOR_mdpackh; 8977 rtx pat, op0, op1; 8978 rtx arg1 = frv_read_argument (call, 0); 8979 rtx arg2 = frv_read_argument (call, 1); 8980 rtx arg3 = frv_read_argument (call, 2); 8981 rtx arg4 = frv_read_argument (call, 3); 8982 8983 target = frv_legitimize_target (icode, target); 8984 op0 = gen_reg_rtx (DImode); 8985 op1 = gen_reg_rtx (DImode); 8986 8987 /* The high half of each word is not explicitly initialized, so indicate 8988 that the input operands are not live before this point. */ 8989 emit_clobber (op0); 8990 emit_clobber (op1); 8991 8992 /* Move each argument into the low half of its associated input word. */ 8993 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 2), arg1); 8994 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 6), arg2); 8995 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 2), arg3); 8996 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 6), arg4); 8997 8998 pat = GEN_FCN (icode) (target, op0, op1); 8999 if (! pat) 9000 return NULL_RTX; 9001 9002 emit_insn (pat); 9003 return target; 9004 } 9005 9006 /* Expand the MCLRACC builtin. This builtin takes a single accumulator 9007 number as argument. */ 9008 9009 static rtx 9010 frv_expand_mclracc_builtin (tree call) 9011 { 9012 enum insn_code icode = CODE_FOR_mclracc; 9013 rtx pat; 9014 rtx op0 = frv_read_argument (call, 0); 9015 9016 op0 = frv_int_to_acc (icode, 0, op0); 9017 if (! op0) 9018 return NULL_RTX; 9019 9020 pat = GEN_FCN (icode) (op0); 9021 if (pat) 9022 emit_insn (pat); 9023 9024 return NULL_RTX; 9025 } 9026 9027 /* Expand builtins that take no arguments. */ 9028 9029 static rtx 9030 frv_expand_noargs_builtin (enum insn_code icode) 9031 { 9032 rtx pat = GEN_FCN (icode) (const0_rtx); 9033 if (pat) 9034 emit_insn (pat); 9035 9036 return NULL_RTX; 9037 } 9038 9039 /* Expand MRDACC and MRDACCG. These builtins take a single accumulator 9040 number or accumulator guard number as argument and return an SI integer. */ 9041 9042 static rtx 9043 frv_expand_mrdacc_builtin (enum insn_code icode, tree call) 9044 { 9045 rtx pat; 9046 rtx target = gen_reg_rtx (SImode); 9047 rtx op0 = frv_read_argument (call, 0); 9048 9049 op0 = frv_int_to_acc (icode, 1, op0); 9050 if (! op0) 9051 return NULL_RTX; 9052 9053 pat = GEN_FCN (icode) (target, op0); 9054 if (! pat) 9055 return NULL_RTX; 9056 9057 emit_insn (pat); 9058 return target; 9059 } 9060 9061 /* Expand MWTACC and MWTACCG. These builtins take an accumulator or 9062 accumulator guard as their first argument and an SImode value as their 9063 second. */ 9064 9065 static rtx 9066 frv_expand_mwtacc_builtin (enum insn_code icode, tree call) 9067 { 9068 rtx pat; 9069 rtx op0 = frv_read_argument (call, 0); 9070 rtx op1 = frv_read_argument (call, 1); 9071 9072 op0 = frv_int_to_acc (icode, 0, op0); 9073 if (! op0) 9074 return NULL_RTX; 9075 9076 op1 = frv_legitimize_argument (icode, 1, op1); 9077 pat = GEN_FCN (icode) (op0, op1); 9078 if (pat) 9079 emit_insn (pat); 9080 9081 return NULL_RTX; 9082 } 9083 9084 /* Emit a move from SRC to DEST in SImode chunks. This can be used 9085 to move DImode values into and out of IACC0. */ 9086 9087 static void 9088 frv_split_iacc_move (rtx dest, rtx src) 9089 { 9090 machine_mode inner; 9091 int i; 9092 9093 inner = GET_MODE (dest); 9094 for (i = 0; i < GET_MODE_SIZE (inner); i += GET_MODE_SIZE (SImode)) 9095 emit_move_insn (simplify_gen_subreg (SImode, dest, inner, i), 9096 simplify_gen_subreg (SImode, src, inner, i)); 9097 } 9098 9099 /* Expand builtins. */ 9100 9101 static rtx 9102 frv_expand_builtin (tree exp, 9103 rtx target, 9104 rtx subtarget ATTRIBUTE_UNUSED, 9105 machine_mode mode ATTRIBUTE_UNUSED, 9106 int ignore ATTRIBUTE_UNUSED) 9107 { 9108 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0); 9109 unsigned fcode = (unsigned)DECL_FUNCTION_CODE (fndecl); 9110 unsigned i; 9111 struct builtin_description *d; 9112 9113 if (fcode < FRV_BUILTIN_FIRST_NONMEDIA && !TARGET_MEDIA) 9114 { 9115 error ("media functions are not available unless -mmedia is used"); 9116 return NULL_RTX; 9117 } 9118 9119 switch (fcode) 9120 { 9121 case FRV_BUILTIN_MCOP1: 9122 case FRV_BUILTIN_MCOP2: 9123 case FRV_BUILTIN_MDUNPACKH: 9124 case FRV_BUILTIN_MBTOHE: 9125 if (! TARGET_MEDIA_REV1) 9126 { 9127 error ("this media function is only available on the fr500"); 9128 return NULL_RTX; 9129 } 9130 break; 9131 9132 case FRV_BUILTIN_MQXMACHS: 9133 case FRV_BUILTIN_MQXMACXHS: 9134 case FRV_BUILTIN_MQMACXHS: 9135 case FRV_BUILTIN_MADDACCS: 9136 case FRV_BUILTIN_MSUBACCS: 9137 case FRV_BUILTIN_MASACCS: 9138 case FRV_BUILTIN_MDADDACCS: 9139 case FRV_BUILTIN_MDSUBACCS: 9140 case FRV_BUILTIN_MDASACCS: 9141 case FRV_BUILTIN_MABSHS: 9142 case FRV_BUILTIN_MDROTLI: 9143 case FRV_BUILTIN_MCPLHI: 9144 case FRV_BUILTIN_MCPLI: 9145 case FRV_BUILTIN_MDCUTSSI: 9146 case FRV_BUILTIN_MQSATHS: 9147 case FRV_BUILTIN_MHSETLOS: 9148 case FRV_BUILTIN_MHSETLOH: 9149 case FRV_BUILTIN_MHSETHIS: 9150 case FRV_BUILTIN_MHSETHIH: 9151 case FRV_BUILTIN_MHDSETS: 9152 case FRV_BUILTIN_MHDSETH: 9153 if (! TARGET_MEDIA_REV2) 9154 { 9155 error ("this media function is only available on the fr400" 9156 " and fr550"); 9157 return NULL_RTX; 9158 } 9159 break; 9160 9161 case FRV_BUILTIN_SMASS: 9162 case FRV_BUILTIN_SMSSS: 9163 case FRV_BUILTIN_SMU: 9164 case FRV_BUILTIN_ADDSS: 9165 case FRV_BUILTIN_SUBSS: 9166 case FRV_BUILTIN_SLASS: 9167 case FRV_BUILTIN_SCUTSS: 9168 case FRV_BUILTIN_IACCreadll: 9169 case FRV_BUILTIN_IACCreadl: 9170 case FRV_BUILTIN_IACCsetll: 9171 case FRV_BUILTIN_IACCsetl: 9172 if (!TARGET_FR405_BUILTINS) 9173 { 9174 error ("this builtin function is only available" 9175 " on the fr405 and fr450"); 9176 return NULL_RTX; 9177 } 9178 break; 9179 9180 case FRV_BUILTIN_PREFETCH: 9181 if (!TARGET_FR500_FR550_BUILTINS) 9182 { 9183 error ("this builtin function is only available on the fr500" 9184 " and fr550"); 9185 return NULL_RTX; 9186 } 9187 break; 9188 9189 case FRV_BUILTIN_MQLCLRHS: 9190 case FRV_BUILTIN_MQLMTHS: 9191 case FRV_BUILTIN_MQSLLHI: 9192 case FRV_BUILTIN_MQSRAHI: 9193 if (!TARGET_MEDIA_FR450) 9194 { 9195 error ("this builtin function is only available on the fr450"); 9196 return NULL_RTX; 9197 } 9198 break; 9199 9200 default: 9201 break; 9202 } 9203 9204 /* Expand unique builtins. */ 9205 9206 switch (fcode) 9207 { 9208 case FRV_BUILTIN_MTRAP: 9209 return frv_expand_noargs_builtin (CODE_FOR_mtrap); 9210 9211 case FRV_BUILTIN_MCLRACC: 9212 return frv_expand_mclracc_builtin (exp); 9213 9214 case FRV_BUILTIN_MCLRACCA: 9215 if (TARGET_ACC_8) 9216 return frv_expand_noargs_builtin (CODE_FOR_mclracca8); 9217 else 9218 return frv_expand_noargs_builtin (CODE_FOR_mclracca4); 9219 9220 case FRV_BUILTIN_MRDACC: 9221 return frv_expand_mrdacc_builtin (CODE_FOR_mrdacc, exp); 9222 9223 case FRV_BUILTIN_MRDACCG: 9224 return frv_expand_mrdacc_builtin (CODE_FOR_mrdaccg, exp); 9225 9226 case FRV_BUILTIN_MWTACC: 9227 return frv_expand_mwtacc_builtin (CODE_FOR_mwtacc, exp); 9228 9229 case FRV_BUILTIN_MWTACCG: 9230 return frv_expand_mwtacc_builtin (CODE_FOR_mwtaccg, exp); 9231 9232 case FRV_BUILTIN_MDPACKH: 9233 return frv_expand_mdpackh_builtin (exp, target); 9234 9235 case FRV_BUILTIN_IACCreadll: 9236 { 9237 rtx src = frv_read_iacc_argument (DImode, exp, 0); 9238 if (target == 0 || !REG_P (target)) 9239 target = gen_reg_rtx (DImode); 9240 frv_split_iacc_move (target, src); 9241 return target; 9242 } 9243 9244 case FRV_BUILTIN_IACCreadl: 9245 return frv_read_iacc_argument (SImode, exp, 0); 9246 9247 case FRV_BUILTIN_IACCsetll: 9248 { 9249 rtx dest = frv_read_iacc_argument (DImode, exp, 0); 9250 rtx src = frv_read_argument (exp, 1); 9251 frv_split_iacc_move (dest, force_reg (DImode, src)); 9252 return 0; 9253 } 9254 9255 case FRV_BUILTIN_IACCsetl: 9256 { 9257 rtx dest = frv_read_iacc_argument (SImode, exp, 0); 9258 rtx src = frv_read_argument (exp, 1); 9259 emit_move_insn (dest, force_reg (SImode, src)); 9260 return 0; 9261 } 9262 9263 default: 9264 break; 9265 } 9266 9267 /* Expand groups of builtins. */ 9268 9269 for (i = 0, d = bdesc_set; i < ARRAY_SIZE (bdesc_set); i++, d++) 9270 if (d->code == fcode) 9271 return frv_expand_set_builtin (d->icode, exp, target); 9272 9273 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++) 9274 if (d->code == fcode) 9275 return frv_expand_unop_builtin (d->icode, exp, target); 9276 9277 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++) 9278 if (d->code == fcode) 9279 return frv_expand_binop_builtin (d->icode, exp, target); 9280 9281 for (i = 0, d = bdesc_cut; i < ARRAY_SIZE (bdesc_cut); i++, d++) 9282 if (d->code == fcode) 9283 return frv_expand_cut_builtin (d->icode, exp, target); 9284 9285 for (i = 0, d = bdesc_2argimm; i < ARRAY_SIZE (bdesc_2argimm); i++, d++) 9286 if (d->code == fcode) 9287 return frv_expand_binopimm_builtin (d->icode, exp, target); 9288 9289 for (i = 0, d = bdesc_void2arg; i < ARRAY_SIZE (bdesc_void2arg); i++, d++) 9290 if (d->code == fcode) 9291 return frv_expand_voidbinop_builtin (d->icode, exp); 9292 9293 for (i = 0, d = bdesc_void3arg; i < ARRAY_SIZE (bdesc_void3arg); i++, d++) 9294 if (d->code == fcode) 9295 return frv_expand_voidtriop_builtin (d->icode, exp); 9296 9297 for (i = 0, d = bdesc_voidacc; i < ARRAY_SIZE (bdesc_voidacc); i++, d++) 9298 if (d->code == fcode) 9299 return frv_expand_voidaccop_builtin (d->icode, exp); 9300 9301 for (i = 0, d = bdesc_int_void2arg; 9302 i < ARRAY_SIZE (bdesc_int_void2arg); i++, d++) 9303 if (d->code == fcode) 9304 return frv_expand_int_void2arg (d->icode, exp); 9305 9306 for (i = 0, d = bdesc_prefetches; 9307 i < ARRAY_SIZE (bdesc_prefetches); i++, d++) 9308 if (d->code == fcode) 9309 return frv_expand_prefetches (d->icode, exp); 9310 9311 for (i = 0, d = bdesc_loads; i < ARRAY_SIZE (bdesc_loads); i++, d++) 9312 if (d->code == fcode) 9313 return frv_expand_load_builtin (d->icode, TYPE_MODE (TREE_TYPE (exp)), 9314 exp, target); 9315 9316 for (i = 0, d = bdesc_stores; i < ARRAY_SIZE (bdesc_stores); i++, d++) 9317 if (d->code == fcode) 9318 return frv_expand_store_builtin (d->icode, exp); 9319 9320 return 0; 9321 } 9322 9323 static bool 9324 frv_in_small_data_p (const_tree decl) 9325 { 9326 HOST_WIDE_INT size; 9327 const char *section_name; 9328 9329 /* Don't apply the -G flag to internal compiler structures. We 9330 should leave such structures in the main data section, partly 9331 for efficiency and partly because the size of some of them 9332 (such as C++ typeinfos) is not known until later. */ 9333 if (TREE_CODE (decl) != VAR_DECL || DECL_ARTIFICIAL (decl)) 9334 return false; 9335 9336 /* If we already know which section the decl should be in, see if 9337 it's a small data section. */ 9338 section_name = DECL_SECTION_NAME (decl); 9339 if (section_name) 9340 { 9341 if (frv_string_begins_with (section_name, ".sdata")) 9342 return true; 9343 if (frv_string_begins_with (section_name, ".sbss")) 9344 return true; 9345 return false; 9346 } 9347 9348 size = int_size_in_bytes (TREE_TYPE (decl)); 9349 if (size > 0 && size <= g_switch_value) 9350 return true; 9351 9352 return false; 9353 } 9354 9355 static bool 9356 frv_rtx_costs (rtx x, 9357 machine_mode mode, 9358 int outer_code, 9359 int opno ATTRIBUTE_UNUSED, 9360 int *total, 9361 bool speed ATTRIBUTE_UNUSED) 9362 { 9363 int code = GET_CODE (x); 9364 9365 if (outer_code == MEM) 9366 { 9367 /* Don't differentiate between memory addresses. All the ones 9368 we accept have equal cost. */ 9369 *total = COSTS_N_INSNS (0); 9370 return true; 9371 } 9372 9373 switch (code) 9374 { 9375 case CONST_INT: 9376 /* Make 12-bit integers really cheap. */ 9377 if (IN_RANGE (INTVAL (x), -2048, 2047)) 9378 { 9379 *total = 0; 9380 return true; 9381 } 9382 /* Fall through. */ 9383 9384 case CONST: 9385 case LABEL_REF: 9386 case SYMBOL_REF: 9387 case CONST_DOUBLE: 9388 *total = COSTS_N_INSNS (2); 9389 return true; 9390 9391 case PLUS: 9392 case MINUS: 9393 case AND: 9394 case IOR: 9395 case XOR: 9396 case ASHIFT: 9397 case ASHIFTRT: 9398 case LSHIFTRT: 9399 case NOT: 9400 case NEG: 9401 case COMPARE: 9402 if (mode == SImode) 9403 *total = COSTS_N_INSNS (1); 9404 else if (mode == DImode) 9405 *total = COSTS_N_INSNS (2); 9406 else 9407 *total = COSTS_N_INSNS (3); 9408 return true; 9409 9410 case MULT: 9411 if (mode == SImode) 9412 *total = COSTS_N_INSNS (2); 9413 else 9414 *total = COSTS_N_INSNS (6); /* guess */ 9415 return true; 9416 9417 case DIV: 9418 case UDIV: 9419 case MOD: 9420 case UMOD: 9421 *total = COSTS_N_INSNS (18); 9422 return true; 9423 9424 case MEM: 9425 *total = COSTS_N_INSNS (3); 9426 return true; 9427 9428 default: 9429 return false; 9430 } 9431 } 9432 9433 static void 9434 frv_asm_out_constructor (rtx symbol, int priority ATTRIBUTE_UNUSED) 9435 { 9436 switch_to_section (ctors_section); 9437 assemble_align (POINTER_SIZE); 9438 if (TARGET_FDPIC) 9439 { 9440 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1); 9441 9442 gcc_assert (ok); 9443 return; 9444 } 9445 assemble_integer_with_op ("\t.picptr\t", symbol); 9446 } 9447 9448 static void 9449 frv_asm_out_destructor (rtx symbol, int priority ATTRIBUTE_UNUSED) 9450 { 9451 switch_to_section (dtors_section); 9452 assemble_align (POINTER_SIZE); 9453 if (TARGET_FDPIC) 9454 { 9455 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1); 9456 9457 gcc_assert (ok); 9458 return; 9459 } 9460 assemble_integer_with_op ("\t.picptr\t", symbol); 9461 } 9462 9463 /* Worker function for TARGET_STRUCT_VALUE_RTX. */ 9464 9465 static rtx 9466 frv_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED, 9467 int incoming ATTRIBUTE_UNUSED) 9468 { 9469 return gen_rtx_REG (Pmode, FRV_STRUCT_VALUE_REGNUM); 9470 } 9471 9472 #define TLS_BIAS (2048 - 16) 9473 9474 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL. 9475 We need to emit DTP-relative relocations. */ 9476 9477 static void 9478 frv_output_dwarf_dtprel (FILE *file, int size, rtx x) 9479 { 9480 gcc_assert (size == 4); 9481 fputs ("\t.picptr\ttlsmoff(", file); 9482 /* We want the unbiased TLS offset, so add the bias to the 9483 expression, such that the implicit biasing cancels out. */ 9484 output_addr_const (file, plus_constant (Pmode, x, TLS_BIAS)); 9485 fputs (")", file); 9486 } 9487 9488 #include "gt-frv.h" 9489