1 /* Subroutines for insn-output.c for NEC V850 series 2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 3 2006, 2007, 2008, 2009 Free Software Foundation, Inc. 4 Contributed by Jeff Law (law@cygnus.com). 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify it 9 under the terms of the GNU General Public License as published by 10 the Free Software Foundation; either version 3, or (at your option) 11 any later version. 12 13 GCC is distributed in the hope that it will be useful, but WITHOUT 14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16 for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 #include "config.h" 23 #include "system.h" 24 #include "coretypes.h" 25 #include "tm.h" 26 #include "tree.h" 27 #include "rtl.h" 28 #include "regs.h" 29 #include "hard-reg-set.h" 30 #include "real.h" 31 #include "insn-config.h" 32 #include "conditions.h" 33 #include "output.h" 34 #include "insn-attr.h" 35 #include "flags.h" 36 #include "recog.h" 37 #include "expr.h" 38 #include "function.h" 39 #include "toplev.h" 40 #include "ggc.h" 41 #include "integrate.h" 42 #include "tm_p.h" 43 #include "target.h" 44 #include "target-def.h" 45 #include "df.h" 46 47 #ifndef streq 48 #define streq(a,b) (strcmp (a, b) == 0) 49 #endif 50 51 /* Function prototypes for stupid compilers: */ 52 static bool v850_handle_option (size_t, const char *, int); 53 static void const_double_split (rtx, HOST_WIDE_INT *, HOST_WIDE_INT *); 54 static int const_costs_int (HOST_WIDE_INT, int); 55 static int const_costs (rtx, enum rtx_code); 56 static bool v850_rtx_costs (rtx, int, int, int *, bool); 57 static void substitute_ep_register (rtx, rtx, int, int, rtx *, rtx *); 58 static void v850_reorg (void); 59 static int ep_memory_offset (enum machine_mode, int); 60 static void v850_set_data_area (tree, v850_data_area); 61 static tree v850_handle_interrupt_attribute (tree *, tree, tree, int, bool *); 62 static tree v850_handle_data_area_attribute (tree *, tree, tree, int, bool *); 63 static void v850_insert_attributes (tree, tree *); 64 static void v850_asm_init_sections (void); 65 static section *v850_select_section (tree, int, unsigned HOST_WIDE_INT); 66 static void v850_encode_data_area (tree, rtx); 67 static void v850_encode_section_info (tree, rtx, int); 68 static bool v850_return_in_memory (const_tree, const_tree); 69 static rtx v850_function_value (const_tree, const_tree, bool); 70 static void v850_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode, 71 tree, int *, int); 72 static bool v850_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode, 73 const_tree, bool); 74 static int v850_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode, 75 tree, bool); 76 static bool v850_can_eliminate (const int, const int); 77 static void v850_asm_trampoline_template (FILE *); 78 static void v850_trampoline_init (rtx, tree, rtx); 79 80 /* Information about the various small memory areas. */ 81 struct small_memory_info small_memory[ (int)SMALL_MEMORY_max ] = 82 { 83 /* name max physical max */ 84 { "tda", 0, 256 }, 85 { "sda", 0, 65536 }, 86 { "zda", 0, 32768 }, 87 }; 88 89 /* Names of the various data areas used on the v850. */ 90 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS]; 91 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS]; 92 93 /* Track the current data area set by the data area pragma (which 94 can be nested). Tested by check_default_data_area. */ 95 data_area_stack_element * data_area_stack = NULL; 96 97 /* True if we don't need to check any more if the current 98 function is an interrupt handler. */ 99 static int v850_interrupt_cache_p = FALSE; 100 101 /* Whether current function is an interrupt handler. */ 102 static int v850_interrupt_p = FALSE; 103 104 static GTY(()) section *rosdata_section; 105 static GTY(()) section *rozdata_section; 106 static GTY(()) section *tdata_section; 107 static GTY(()) section *zdata_section; 108 static GTY(()) section *zbss_section; 109 110 /* V850 specific attributes. */ 111 112 static const struct attribute_spec v850_attribute_table[] = 113 { 114 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */ 115 { "interrupt_handler", 0, 0, true, false, false, v850_handle_interrupt_attribute }, 116 { "interrupt", 0, 0, true, false, false, v850_handle_interrupt_attribute }, 117 { "sda", 0, 0, true, false, false, v850_handle_data_area_attribute }, 118 { "tda", 0, 0, true, false, false, v850_handle_data_area_attribute }, 119 { "zda", 0, 0, true, false, false, v850_handle_data_area_attribute }, 120 { NULL, 0, 0, false, false, false, NULL } 121 }; 122 123 124 /* Initialize the GCC target structure. */ 125 #undef TARGET_ASM_ALIGNED_HI_OP 126 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t" 127 128 #undef TARGET_ATTRIBUTE_TABLE 129 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table 130 131 #undef TARGET_INSERT_ATTRIBUTES 132 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes 133 134 #undef TARGET_ASM_SELECT_SECTION 135 #define TARGET_ASM_SELECT_SECTION v850_select_section 136 137 /* The assembler supports switchable .bss sections, but 138 v850_select_section doesn't yet make use of them. */ 139 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS 140 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false 141 142 #undef TARGET_ENCODE_SECTION_INFO 143 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info 144 145 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE 146 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true 147 148 #undef TARGET_DEFAULT_TARGET_FLAGS 149 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_DEFAULT | MASK_APP_REGS) 150 #undef TARGET_HANDLE_OPTION 151 #define TARGET_HANDLE_OPTION v850_handle_option 152 153 #undef TARGET_RTX_COSTS 154 #define TARGET_RTX_COSTS v850_rtx_costs 155 156 #undef TARGET_ADDRESS_COST 157 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0 158 159 #undef TARGET_MACHINE_DEPENDENT_REORG 160 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg 161 162 #undef TARGET_PROMOTE_PROTOTYPES 163 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true 164 165 #undef TARGET_RETURN_IN_MEMORY 166 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory 167 168 #undef TARGET_FUNCTION_VALUE 169 #define TARGET_FUNCTION_VALUE v850_function_value 170 171 #undef TARGET_PASS_BY_REFERENCE 172 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference 173 174 #undef TARGET_CALLEE_COPIES 175 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true 176 177 #undef TARGET_SETUP_INCOMING_VARARGS 178 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs 179 180 #undef TARGET_ARG_PARTIAL_BYTES 181 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes 182 183 #undef TARGET_CAN_ELIMINATE 184 #define TARGET_CAN_ELIMINATE v850_can_eliminate 185 186 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE 187 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template 188 #undef TARGET_TRAMPOLINE_INIT 189 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init 190 191 struct gcc_target targetm = TARGET_INITIALIZER; 192 193 /* Set the maximum size of small memory area TYPE to the value given 194 by VALUE. Return true if VALUE was syntactically correct. VALUE 195 starts with the argument separator: either "-" or "=". */ 196 197 static bool 198 v850_handle_memory_option (enum small_memory_type type, const char *value) 199 { 200 int i, size; 201 202 if (*value != '-' && *value != '=') 203 return false; 204 205 value++; 206 for (i = 0; value[i]; i++) 207 if (!ISDIGIT (value[i])) 208 return false; 209 210 size = atoi (value); 211 if (size > small_memory[type].physical_max) 212 error ("value passed to %<-m%s%> is too large", small_memory[type].name); 213 else 214 small_memory[type].max = size; 215 return true; 216 } 217 218 /* Implement TARGET_HANDLE_OPTION. */ 219 220 static bool 221 v850_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED) 222 { 223 switch (code) 224 { 225 case OPT_mspace: 226 target_flags |= MASK_EP | MASK_PROLOG_FUNCTION; 227 return true; 228 229 case OPT_mv850: 230 target_flags &= ~(MASK_CPU ^ MASK_V850); 231 return true; 232 233 case OPT_mv850e: 234 case OPT_mv850e1: 235 target_flags &= ~(MASK_CPU ^ MASK_V850E); 236 return true; 237 238 case OPT_mtda: 239 return v850_handle_memory_option (SMALL_MEMORY_TDA, arg); 240 241 case OPT_msda: 242 return v850_handle_memory_option (SMALL_MEMORY_SDA, arg); 243 244 case OPT_mzda: 245 return v850_handle_memory_option (SMALL_MEMORY_ZDA, arg); 246 247 default: 248 return true; 249 } 250 } 251 252 static bool 253 v850_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED, 254 enum machine_mode mode, const_tree type, 255 bool named ATTRIBUTE_UNUSED) 256 { 257 unsigned HOST_WIDE_INT size; 258 259 if (type) 260 size = int_size_in_bytes (type); 261 else 262 size = GET_MODE_SIZE (mode); 263 264 return size > 8; 265 } 266 267 /* Return an RTX to represent where an argument with mode MODE 268 and type TYPE will be passed to a function. If the result 269 is NULL_RTX, the argument will be pushed. */ 270 271 rtx 272 function_arg (CUMULATIVE_ARGS * cum, 273 enum machine_mode mode, 274 tree type, 275 int named) 276 { 277 rtx result = NULL_RTX; 278 int size, align; 279 280 if (TARGET_GHS && !named) 281 return NULL_RTX; 282 283 if (mode == BLKmode) 284 size = int_size_in_bytes (type); 285 else 286 size = GET_MODE_SIZE (mode); 287 288 if (size < 1) 289 { 290 /* Once we have stopped using argument registers, do not start up again. */ 291 cum->nbytes = 4 * UNITS_PER_WORD; 292 return NULL_RTX; 293 } 294 295 if (type) 296 align = TYPE_ALIGN (type) / BITS_PER_UNIT; 297 else 298 align = size; 299 300 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1); 301 302 if (cum->nbytes > 4 * UNITS_PER_WORD) 303 return NULL_RTX; 304 305 if (type == NULL_TREE 306 && cum->nbytes + size > 4 * UNITS_PER_WORD) 307 return NULL_RTX; 308 309 switch (cum->nbytes / UNITS_PER_WORD) 310 { 311 case 0: 312 result = gen_rtx_REG (mode, 6); 313 break; 314 case 1: 315 result = gen_rtx_REG (mode, 7); 316 break; 317 case 2: 318 result = gen_rtx_REG (mode, 8); 319 break; 320 case 3: 321 result = gen_rtx_REG (mode, 9); 322 break; 323 default: 324 result = NULL_RTX; 325 } 326 327 return result; 328 } 329 330 331 /* Return the number of bytes which must be put into registers 332 for values which are part in registers and part in memory. */ 333 334 static int 335 v850_arg_partial_bytes (CUMULATIVE_ARGS * cum, enum machine_mode mode, 336 tree type, bool named) 337 { 338 int size, align; 339 340 if (TARGET_GHS && !named) 341 return 0; 342 343 if (mode == BLKmode) 344 size = int_size_in_bytes (type); 345 else 346 size = GET_MODE_SIZE (mode); 347 348 if (size < 1) 349 size = 1; 350 351 if (type) 352 align = TYPE_ALIGN (type) / BITS_PER_UNIT; 353 else 354 align = size; 355 356 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1); 357 358 if (cum->nbytes > 4 * UNITS_PER_WORD) 359 return 0; 360 361 if (cum->nbytes + size <= 4 * UNITS_PER_WORD) 362 return 0; 363 364 if (type == NULL_TREE 365 && cum->nbytes + size > 4 * UNITS_PER_WORD) 366 return 0; 367 368 return 4 * UNITS_PER_WORD - cum->nbytes; 369 } 370 371 372 /* Return the high and low words of a CONST_DOUBLE */ 373 374 static void 375 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low) 376 { 377 if (GET_CODE (x) == CONST_DOUBLE) 378 { 379 long t[2]; 380 REAL_VALUE_TYPE rv; 381 382 switch (GET_MODE (x)) 383 { 384 case DFmode: 385 REAL_VALUE_FROM_CONST_DOUBLE (rv, x); 386 REAL_VALUE_TO_TARGET_DOUBLE (rv, t); 387 *p_high = t[1]; /* since v850 is little endian */ 388 *p_low = t[0]; /* high is second word */ 389 return; 390 391 case SFmode: 392 REAL_VALUE_FROM_CONST_DOUBLE (rv, x); 393 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high); 394 *p_low = 0; 395 return; 396 397 case VOIDmode: 398 case DImode: 399 *p_high = CONST_DOUBLE_HIGH (x); 400 *p_low = CONST_DOUBLE_LOW (x); 401 return; 402 403 default: 404 break; 405 } 406 } 407 408 fatal_insn ("const_double_split got a bad insn:", x); 409 } 410 411 412 /* Return the cost of the rtx R with code CODE. */ 413 414 static int 415 const_costs_int (HOST_WIDE_INT value, int zero_cost) 416 { 417 if (CONST_OK_FOR_I (value)) 418 return zero_cost; 419 else if (CONST_OK_FOR_J (value)) 420 return 1; 421 else if (CONST_OK_FOR_K (value)) 422 return 2; 423 else 424 return 4; 425 } 426 427 static int 428 const_costs (rtx r, enum rtx_code c) 429 { 430 HOST_WIDE_INT high, low; 431 432 switch (c) 433 { 434 case CONST_INT: 435 return const_costs_int (INTVAL (r), 0); 436 437 case CONST_DOUBLE: 438 const_double_split (r, &high, &low); 439 if (GET_MODE (r) == SFmode) 440 return const_costs_int (high, 1); 441 else 442 return const_costs_int (high, 1) + const_costs_int (low, 1); 443 444 case SYMBOL_REF: 445 case LABEL_REF: 446 case CONST: 447 return 2; 448 449 case HIGH: 450 return 1; 451 452 default: 453 return 4; 454 } 455 } 456 457 static bool 458 v850_rtx_costs (rtx x, 459 int codearg, 460 int outer_code ATTRIBUTE_UNUSED, 461 int * total, bool speed) 462 { 463 enum rtx_code code = (enum rtx_code) codearg; 464 465 switch (code) 466 { 467 case CONST_INT: 468 case CONST_DOUBLE: 469 case CONST: 470 case SYMBOL_REF: 471 case LABEL_REF: 472 *total = COSTS_N_INSNS (const_costs (x, code)); 473 return true; 474 475 case MOD: 476 case DIV: 477 case UMOD: 478 case UDIV: 479 if (TARGET_V850E && !speed) 480 *total = 6; 481 else 482 *total = 60; 483 return true; 484 485 case MULT: 486 if (TARGET_V850E 487 && ( GET_MODE (x) == SImode 488 || GET_MODE (x) == HImode 489 || GET_MODE (x) == QImode)) 490 { 491 if (GET_CODE (XEXP (x, 1)) == REG) 492 *total = 4; 493 else if (GET_CODE (XEXP (x, 1)) == CONST_INT) 494 { 495 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1)))) 496 *total = 6; 497 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1)))) 498 *total = 10; 499 } 500 } 501 else 502 *total = 20; 503 return true; 504 505 case ZERO_EXTRACT: 506 if (outer_code == COMPARE) 507 *total = 0; 508 return false; 509 510 default: 511 return false; 512 } 513 } 514 515 /* Print operand X using operand code CODE to assembly language output file 516 FILE. */ 517 518 void 519 print_operand (FILE * file, rtx x, int code) 520 { 521 HOST_WIDE_INT high, low; 522 523 switch (code) 524 { 525 case 'c': 526 /* We use 'c' operands with symbols for .vtinherit */ 527 if (GET_CODE (x) == SYMBOL_REF) 528 { 529 output_addr_const(file, x); 530 break; 531 } 532 /* fall through */ 533 case 'b': 534 case 'B': 535 case 'C': 536 switch ((code == 'B' || code == 'C') 537 ? reverse_condition (GET_CODE (x)) : GET_CODE (x)) 538 { 539 case NE: 540 if (code == 'c' || code == 'C') 541 fprintf (file, "nz"); 542 else 543 fprintf (file, "ne"); 544 break; 545 case EQ: 546 if (code == 'c' || code == 'C') 547 fprintf (file, "z"); 548 else 549 fprintf (file, "e"); 550 break; 551 case GE: 552 fprintf (file, "ge"); 553 break; 554 case GT: 555 fprintf (file, "gt"); 556 break; 557 case LE: 558 fprintf (file, "le"); 559 break; 560 case LT: 561 fprintf (file, "lt"); 562 break; 563 case GEU: 564 fprintf (file, "nl"); 565 break; 566 case GTU: 567 fprintf (file, "h"); 568 break; 569 case LEU: 570 fprintf (file, "nh"); 571 break; 572 case LTU: 573 fprintf (file, "l"); 574 break; 575 default: 576 gcc_unreachable (); 577 } 578 break; 579 case 'F': /* high word of CONST_DOUBLE */ 580 switch (GET_CODE (x)) 581 { 582 case CONST_INT: 583 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1); 584 break; 585 586 case CONST_DOUBLE: 587 const_double_split (x, &high, &low); 588 fprintf (file, "%ld", (long) high); 589 break; 590 591 default: 592 gcc_unreachable (); 593 } 594 break; 595 case 'G': /* low word of CONST_DOUBLE */ 596 switch (GET_CODE (x)) 597 { 598 case CONST_INT: 599 fprintf (file, "%ld", (long) INTVAL (x)); 600 break; 601 602 case CONST_DOUBLE: 603 const_double_split (x, &high, &low); 604 fprintf (file, "%ld", (long) low); 605 break; 606 607 default: 608 gcc_unreachable (); 609 } 610 break; 611 case 'L': 612 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff)); 613 break; 614 case 'M': 615 fprintf (file, "%d", exact_log2 (INTVAL (x))); 616 break; 617 case 'O': 618 gcc_assert (special_symbolref_operand (x, VOIDmode)); 619 620 if (GET_CODE (x) == CONST) 621 x = XEXP (XEXP (x, 0), 0); 622 else 623 gcc_assert (GET_CODE (x) == SYMBOL_REF); 624 625 if (SYMBOL_REF_ZDA_P (x)) 626 fprintf (file, "zdaoff"); 627 else if (SYMBOL_REF_SDA_P (x)) 628 fprintf (file, "sdaoff"); 629 else if (SYMBOL_REF_TDA_P (x)) 630 fprintf (file, "tdaoff"); 631 else 632 gcc_unreachable (); 633 break; 634 case 'P': 635 gcc_assert (special_symbolref_operand (x, VOIDmode)); 636 output_addr_const (file, x); 637 break; 638 case 'Q': 639 gcc_assert (special_symbolref_operand (x, VOIDmode)); 640 641 if (GET_CODE (x) == CONST) 642 x = XEXP (XEXP (x, 0), 0); 643 else 644 gcc_assert (GET_CODE (x) == SYMBOL_REF); 645 646 if (SYMBOL_REF_ZDA_P (x)) 647 fprintf (file, "r0"); 648 else if (SYMBOL_REF_SDA_P (x)) 649 fprintf (file, "gp"); 650 else if (SYMBOL_REF_TDA_P (x)) 651 fprintf (file, "ep"); 652 else 653 gcc_unreachable (); 654 break; 655 case 'R': /* 2nd word of a double. */ 656 switch (GET_CODE (x)) 657 { 658 case REG: 659 fprintf (file, reg_names[REGNO (x) + 1]); 660 break; 661 case MEM: 662 x = XEXP (adjust_address (x, SImode, 4), 0); 663 print_operand_address (file, x); 664 if (GET_CODE (x) == CONST_INT) 665 fprintf (file, "[r0]"); 666 break; 667 668 default: 669 break; 670 } 671 break; 672 case 'S': 673 { 674 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */ 675 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE)) 676 fputs ("s", file); 677 678 break; 679 } 680 case 'T': 681 { 682 /* Like an 'S' operand above, but for unsigned loads only. */ 683 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE)) 684 fputs ("s", file); 685 686 break; 687 } 688 case 'W': /* print the instruction suffix */ 689 switch (GET_MODE (x)) 690 { 691 default: 692 gcc_unreachable (); 693 694 case QImode: fputs (".b", file); break; 695 case HImode: fputs (".h", file); break; 696 case SImode: fputs (".w", file); break; 697 case SFmode: fputs (".w", file); break; 698 } 699 break; 700 case '.': /* register r0 */ 701 fputs (reg_names[0], file); 702 break; 703 case 'z': /* reg or zero */ 704 if (GET_CODE (x) == REG) 705 fputs (reg_names[REGNO (x)], file); 706 else 707 { 708 gcc_assert (x == const0_rtx); 709 fputs (reg_names[0], file); 710 } 711 break; 712 default: 713 switch (GET_CODE (x)) 714 { 715 case MEM: 716 if (GET_CODE (XEXP (x, 0)) == CONST_INT) 717 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0), 718 XEXP (x, 0))); 719 else 720 output_address (XEXP (x, 0)); 721 break; 722 723 case REG: 724 fputs (reg_names[REGNO (x)], file); 725 break; 726 case SUBREG: 727 fputs (reg_names[subreg_regno (x)], file); 728 break; 729 case CONST_INT: 730 case SYMBOL_REF: 731 case CONST: 732 case LABEL_REF: 733 case CODE_LABEL: 734 print_operand_address (file, x); 735 break; 736 default: 737 gcc_unreachable (); 738 } 739 break; 740 741 } 742 } 743 744 745 /* Output assembly language output for the address ADDR to FILE. */ 746 747 void 748 print_operand_address (FILE * file, rtx addr) 749 { 750 switch (GET_CODE (addr)) 751 { 752 case REG: 753 fprintf (file, "0["); 754 print_operand (file, addr, 0); 755 fprintf (file, "]"); 756 break; 757 case LO_SUM: 758 if (GET_CODE (XEXP (addr, 0)) == REG) 759 { 760 /* reg,foo */ 761 fprintf (file, "lo("); 762 print_operand (file, XEXP (addr, 1), 0); 763 fprintf (file, ")["); 764 print_operand (file, XEXP (addr, 0), 0); 765 fprintf (file, "]"); 766 } 767 break; 768 case PLUS: 769 if (GET_CODE (XEXP (addr, 0)) == REG 770 || GET_CODE (XEXP (addr, 0)) == SUBREG) 771 { 772 /* reg,foo */ 773 print_operand (file, XEXP (addr, 1), 0); 774 fprintf (file, "["); 775 print_operand (file, XEXP (addr, 0), 0); 776 fprintf (file, "]"); 777 } 778 else 779 { 780 print_operand (file, XEXP (addr, 0), 0); 781 fprintf (file, "+"); 782 print_operand (file, XEXP (addr, 1), 0); 783 } 784 break; 785 case SYMBOL_REF: 786 { 787 const char *off_name = NULL; 788 const char *reg_name = NULL; 789 790 if (SYMBOL_REF_ZDA_P (addr)) 791 { 792 off_name = "zdaoff"; 793 reg_name = "r0"; 794 } 795 else if (SYMBOL_REF_SDA_P (addr)) 796 { 797 off_name = "sdaoff"; 798 reg_name = "gp"; 799 } 800 else if (SYMBOL_REF_TDA_P (addr)) 801 { 802 off_name = "tdaoff"; 803 reg_name = "ep"; 804 } 805 806 if (off_name) 807 fprintf (file, "%s(", off_name); 808 output_addr_const (file, addr); 809 if (reg_name) 810 fprintf (file, ")[%s]", reg_name); 811 } 812 break; 813 case CONST: 814 if (special_symbolref_operand (addr, VOIDmode)) 815 { 816 rtx x = XEXP (XEXP (addr, 0), 0); 817 const char *off_name; 818 const char *reg_name; 819 820 if (SYMBOL_REF_ZDA_P (x)) 821 { 822 off_name = "zdaoff"; 823 reg_name = "r0"; 824 } 825 else if (SYMBOL_REF_SDA_P (x)) 826 { 827 off_name = "sdaoff"; 828 reg_name = "gp"; 829 } 830 else if (SYMBOL_REF_TDA_P (x)) 831 { 832 off_name = "tdaoff"; 833 reg_name = "ep"; 834 } 835 else 836 gcc_unreachable (); 837 838 fprintf (file, "%s(", off_name); 839 output_addr_const (file, addr); 840 fprintf (file, ")[%s]", reg_name); 841 } 842 else 843 output_addr_const (file, addr); 844 break; 845 default: 846 output_addr_const (file, addr); 847 break; 848 } 849 } 850 851 /* When assemble_integer is used to emit the offsets for a switch 852 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))). 853 output_addr_const will normally barf at this, but it is OK to omit 854 the truncate and just emit the difference of the two labels. The 855 .hword directive will automatically handle the truncation for us. 856 857 Returns 1 if rtx was handled, 0 otherwise. */ 858 859 int 860 v850_output_addr_const_extra (FILE * file, rtx x) 861 { 862 if (GET_CODE (x) != TRUNCATE) 863 return 0; 864 865 x = XEXP (x, 0); 866 867 /* We must also handle the case where the switch table was passed a 868 constant value and so has been collapsed. In this case the first 869 label will have been deleted. In such a case it is OK to emit 870 nothing, since the table will not be used. 871 (cf gcc.c-torture/compile/990801-1.c). */ 872 if (GET_CODE (x) == MINUS 873 && GET_CODE (XEXP (x, 0)) == LABEL_REF 874 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL 875 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0))) 876 return 1; 877 878 output_addr_const (file, x); 879 return 1; 880 } 881 882 /* Return appropriate code to load up a 1, 2, or 4 integer/floating 883 point value. */ 884 885 const char * 886 output_move_single (rtx * operands) 887 { 888 rtx dst = operands[0]; 889 rtx src = operands[1]; 890 891 if (REG_P (dst)) 892 { 893 if (REG_P (src)) 894 return "mov %1,%0"; 895 896 else if (GET_CODE (src) == CONST_INT) 897 { 898 HOST_WIDE_INT value = INTVAL (src); 899 900 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */ 901 return "mov %1,%0"; 902 903 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */ 904 return "movea lo(%1),%.,%0"; 905 906 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */ 907 return "movhi hi(%1),%.,%0"; 908 909 /* A random constant. */ 910 else if (TARGET_V850E) 911 return "mov %1,%0"; 912 else 913 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0"; 914 } 915 916 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode) 917 { 918 HOST_WIDE_INT high, low; 919 920 const_double_split (src, &high, &low); 921 922 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */ 923 return "mov %F1,%0"; 924 925 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */ 926 return "movea lo(%F1),%.,%0"; 927 928 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */ 929 return "movhi hi(%F1),%.,%0"; 930 931 /* A random constant. */ 932 else if (TARGET_V850E) 933 return "mov %F1,%0"; 934 935 else 936 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0"; 937 } 938 939 else if (GET_CODE (src) == MEM) 940 return "%S1ld%W1 %1,%0"; 941 942 else if (special_symbolref_operand (src, VOIDmode)) 943 return "movea %O1(%P1),%Q1,%0"; 944 945 else if (GET_CODE (src) == LABEL_REF 946 || GET_CODE (src) == SYMBOL_REF 947 || GET_CODE (src) == CONST) 948 { 949 if (TARGET_V850E) 950 return "mov hilo(%1),%0"; 951 else 952 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0"; 953 } 954 955 else if (GET_CODE (src) == HIGH) 956 return "movhi hi(%1),%.,%0"; 957 958 else if (GET_CODE (src) == LO_SUM) 959 { 960 operands[2] = XEXP (src, 0); 961 operands[3] = XEXP (src, 1); 962 return "movea lo(%3),%2,%0"; 963 } 964 } 965 966 else if (GET_CODE (dst) == MEM) 967 { 968 if (REG_P (src)) 969 return "%S0st%W0 %1,%0"; 970 971 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0) 972 return "%S0st%W0 %.,%0"; 973 974 else if (GET_CODE (src) == CONST_DOUBLE 975 && CONST0_RTX (GET_MODE (dst)) == src) 976 return "%S0st%W0 %.,%0"; 977 } 978 979 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src)); 980 return ""; 981 } 982 983 984 /* Return maximum offset supported for a short EP memory reference of mode 985 MODE and signedness UNSIGNEDP. */ 986 987 static int 988 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED) 989 { 990 int max_offset = 0; 991 992 switch (mode) 993 { 994 case QImode: 995 if (TARGET_SMALL_SLD) 996 max_offset = (1 << 4); 997 else if (TARGET_V850E 998 && ( ( unsignedp && ! TARGET_US_BIT_SET) 999 || (! unsignedp && TARGET_US_BIT_SET))) 1000 max_offset = (1 << 4); 1001 else 1002 max_offset = (1 << 7); 1003 break; 1004 1005 case HImode: 1006 if (TARGET_SMALL_SLD) 1007 max_offset = (1 << 5); 1008 else if (TARGET_V850E 1009 && ( ( unsignedp && ! TARGET_US_BIT_SET) 1010 || (! unsignedp && TARGET_US_BIT_SET))) 1011 max_offset = (1 << 5); 1012 else 1013 max_offset = (1 << 8); 1014 break; 1015 1016 case SImode: 1017 case SFmode: 1018 max_offset = (1 << 8); 1019 break; 1020 1021 default: 1022 break; 1023 } 1024 1025 return max_offset; 1026 } 1027 1028 /* Return true if OP is a valid short EP memory reference */ 1029 1030 int 1031 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load) 1032 { 1033 rtx addr, op0, op1; 1034 int max_offset; 1035 int mask; 1036 1037 /* If we are not using the EP register on a per-function basis 1038 then do not allow this optimization at all. This is to 1039 prevent the use of the SLD/SST instructions which cannot be 1040 guaranteed to work properly due to a hardware bug. */ 1041 if (!TARGET_EP) 1042 return FALSE; 1043 1044 if (GET_CODE (op) != MEM) 1045 return FALSE; 1046 1047 max_offset = ep_memory_offset (mode, unsigned_load); 1048 1049 mask = GET_MODE_SIZE (mode) - 1; 1050 1051 addr = XEXP (op, 0); 1052 if (GET_CODE (addr) == CONST) 1053 addr = XEXP (addr, 0); 1054 1055 switch (GET_CODE (addr)) 1056 { 1057 default: 1058 break; 1059 1060 case SYMBOL_REF: 1061 return SYMBOL_REF_TDA_P (addr); 1062 1063 case REG: 1064 return REGNO (addr) == EP_REGNUM; 1065 1066 case PLUS: 1067 op0 = XEXP (addr, 0); 1068 op1 = XEXP (addr, 1); 1069 if (GET_CODE (op1) == CONST_INT 1070 && INTVAL (op1) < max_offset 1071 && INTVAL (op1) >= 0 1072 && (INTVAL (op1) & mask) == 0) 1073 { 1074 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM) 1075 return TRUE; 1076 1077 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0)) 1078 return TRUE; 1079 } 1080 break; 1081 } 1082 1083 return FALSE; 1084 } 1085 1086 /* Substitute memory references involving a pointer, to use the ep pointer, 1087 taking care to save and preserve the ep. */ 1088 1089 static void 1090 substitute_ep_register (rtx first_insn, 1091 rtx last_insn, 1092 int uses, 1093 int regno, 1094 rtx * p_r1, 1095 rtx * p_ep) 1096 { 1097 rtx reg = gen_rtx_REG (Pmode, regno); 1098 rtx insn; 1099 1100 if (!*p_r1) 1101 { 1102 df_set_regs_ever_live (1, true); 1103 *p_r1 = gen_rtx_REG (Pmode, 1); 1104 *p_ep = gen_rtx_REG (Pmode, 30); 1105 } 1106 1107 if (TARGET_DEBUG) 1108 fprintf (stderr, "\ 1109 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n", 1110 2 * (uses - 3), uses, reg_names[regno], 1111 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)), 1112 INSN_UID (first_insn), INSN_UID (last_insn)); 1113 1114 if (GET_CODE (first_insn) == NOTE) 1115 first_insn = next_nonnote_insn (first_insn); 1116 1117 last_insn = next_nonnote_insn (last_insn); 1118 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn)) 1119 { 1120 if (GET_CODE (insn) == INSN) 1121 { 1122 rtx pattern = single_set (insn); 1123 1124 /* Replace the memory references. */ 1125 if (pattern) 1126 { 1127 rtx *p_mem; 1128 /* Memory operands are signed by default. */ 1129 int unsignedp = FALSE; 1130 1131 if (GET_CODE (SET_DEST (pattern)) == MEM 1132 && GET_CODE (SET_SRC (pattern)) == MEM) 1133 p_mem = (rtx *)0; 1134 1135 else if (GET_CODE (SET_DEST (pattern)) == MEM) 1136 p_mem = &SET_DEST (pattern); 1137 1138 else if (GET_CODE (SET_SRC (pattern)) == MEM) 1139 p_mem = &SET_SRC (pattern); 1140 1141 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND 1142 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM) 1143 p_mem = &XEXP (SET_SRC (pattern), 0); 1144 1145 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND 1146 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM) 1147 { 1148 p_mem = &XEXP (SET_SRC (pattern), 0); 1149 unsignedp = TRUE; 1150 } 1151 else 1152 p_mem = (rtx *)0; 1153 1154 if (p_mem) 1155 { 1156 rtx addr = XEXP (*p_mem, 0); 1157 1158 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno) 1159 *p_mem = change_address (*p_mem, VOIDmode, *p_ep); 1160 1161 else if (GET_CODE (addr) == PLUS 1162 && GET_CODE (XEXP (addr, 0)) == REG 1163 && REGNO (XEXP (addr, 0)) == (unsigned) regno 1164 && GET_CODE (XEXP (addr, 1)) == CONST_INT 1165 && ((INTVAL (XEXP (addr, 1))) 1166 < ep_memory_offset (GET_MODE (*p_mem), 1167 unsignedp)) 1168 && ((INTVAL (XEXP (addr, 1))) >= 0)) 1169 *p_mem = change_address (*p_mem, VOIDmode, 1170 gen_rtx_PLUS (Pmode, 1171 *p_ep, 1172 XEXP (addr, 1))); 1173 } 1174 } 1175 } 1176 } 1177 1178 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */ 1179 insn = prev_nonnote_insn (first_insn); 1180 if (insn && GET_CODE (insn) == INSN 1181 && GET_CODE (PATTERN (insn)) == SET 1182 && SET_DEST (PATTERN (insn)) == *p_ep 1183 && SET_SRC (PATTERN (insn)) == *p_r1) 1184 delete_insn (insn); 1185 else 1186 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn); 1187 1188 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn); 1189 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn); 1190 } 1191 1192 1193 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement 1194 the -mep mode to copy heavily used pointers to ep to use the implicit 1195 addressing. */ 1196 1197 static void 1198 v850_reorg (void) 1199 { 1200 struct 1201 { 1202 int uses; 1203 rtx first_insn; 1204 rtx last_insn; 1205 } 1206 regs[FIRST_PSEUDO_REGISTER]; 1207 1208 int i; 1209 int use_ep = FALSE; 1210 rtx r1 = NULL_RTX; 1211 rtx ep = NULL_RTX; 1212 rtx insn; 1213 rtx pattern; 1214 1215 /* If not ep mode, just return now. */ 1216 if (!TARGET_EP) 1217 return; 1218 1219 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1220 { 1221 regs[i].uses = 0; 1222 regs[i].first_insn = NULL_RTX; 1223 regs[i].last_insn = NULL_RTX; 1224 } 1225 1226 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn)) 1227 { 1228 switch (GET_CODE (insn)) 1229 { 1230 /* End of basic block */ 1231 default: 1232 if (!use_ep) 1233 { 1234 int max_uses = -1; 1235 int max_regno = -1; 1236 1237 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1238 { 1239 if (max_uses < regs[i].uses) 1240 { 1241 max_uses = regs[i].uses; 1242 max_regno = i; 1243 } 1244 } 1245 1246 if (max_uses > 3) 1247 substitute_ep_register (regs[max_regno].first_insn, 1248 regs[max_regno].last_insn, 1249 max_uses, max_regno, &r1, &ep); 1250 } 1251 1252 use_ep = FALSE; 1253 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1254 { 1255 regs[i].uses = 0; 1256 regs[i].first_insn = NULL_RTX; 1257 regs[i].last_insn = NULL_RTX; 1258 } 1259 break; 1260 1261 case NOTE: 1262 break; 1263 1264 case INSN: 1265 pattern = single_set (insn); 1266 1267 /* See if there are any memory references we can shorten */ 1268 if (pattern) 1269 { 1270 rtx src = SET_SRC (pattern); 1271 rtx dest = SET_DEST (pattern); 1272 rtx mem; 1273 /* Memory operands are signed by default. */ 1274 int unsignedp = FALSE; 1275 1276 /* We might have (SUBREG (MEM)) here, so just get rid of the 1277 subregs to make this code simpler. */ 1278 if (GET_CODE (dest) == SUBREG 1279 && (GET_CODE (SUBREG_REG (dest)) == MEM 1280 || GET_CODE (SUBREG_REG (dest)) == REG)) 1281 alter_subreg (&dest); 1282 if (GET_CODE (src) == SUBREG 1283 && (GET_CODE (SUBREG_REG (src)) == MEM 1284 || GET_CODE (SUBREG_REG (src)) == REG)) 1285 alter_subreg (&src); 1286 1287 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM) 1288 mem = NULL_RTX; 1289 1290 else if (GET_CODE (dest) == MEM) 1291 mem = dest; 1292 1293 else if (GET_CODE (src) == MEM) 1294 mem = src; 1295 1296 else if (GET_CODE (src) == SIGN_EXTEND 1297 && GET_CODE (XEXP (src, 0)) == MEM) 1298 mem = XEXP (src, 0); 1299 1300 else if (GET_CODE (src) == ZERO_EXTEND 1301 && GET_CODE (XEXP (src, 0)) == MEM) 1302 { 1303 mem = XEXP (src, 0); 1304 unsignedp = TRUE; 1305 } 1306 else 1307 mem = NULL_RTX; 1308 1309 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp)) 1310 use_ep = TRUE; 1311 1312 else if (!use_ep && mem 1313 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD) 1314 { 1315 rtx addr = XEXP (mem, 0); 1316 int regno = -1; 1317 int short_p; 1318 1319 if (GET_CODE (addr) == REG) 1320 { 1321 short_p = TRUE; 1322 regno = REGNO (addr); 1323 } 1324 1325 else if (GET_CODE (addr) == PLUS 1326 && GET_CODE (XEXP (addr, 0)) == REG 1327 && GET_CODE (XEXP (addr, 1)) == CONST_INT 1328 && ((INTVAL (XEXP (addr, 1))) 1329 < ep_memory_offset (GET_MODE (mem), unsignedp)) 1330 && ((INTVAL (XEXP (addr, 1))) >= 0)) 1331 { 1332 short_p = TRUE; 1333 regno = REGNO (XEXP (addr, 0)); 1334 } 1335 1336 else 1337 short_p = FALSE; 1338 1339 if (short_p) 1340 { 1341 regs[regno].uses++; 1342 regs[regno].last_insn = insn; 1343 if (!regs[regno].first_insn) 1344 regs[regno].first_insn = insn; 1345 } 1346 } 1347 1348 /* Loading up a register in the basic block zaps any savings 1349 for the register */ 1350 if (GET_CODE (dest) == REG) 1351 { 1352 enum machine_mode mode = GET_MODE (dest); 1353 int regno; 1354 int endregno; 1355 1356 regno = REGNO (dest); 1357 endregno = regno + HARD_REGNO_NREGS (regno, mode); 1358 1359 if (!use_ep) 1360 { 1361 /* See if we can use the pointer before this 1362 modification. */ 1363 int max_uses = -1; 1364 int max_regno = -1; 1365 1366 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1367 { 1368 if (max_uses < regs[i].uses) 1369 { 1370 max_uses = regs[i].uses; 1371 max_regno = i; 1372 } 1373 } 1374 1375 if (max_uses > 3 1376 && max_regno >= regno 1377 && max_regno < endregno) 1378 { 1379 substitute_ep_register (regs[max_regno].first_insn, 1380 regs[max_regno].last_insn, 1381 max_uses, max_regno, &r1, 1382 &ep); 1383 1384 /* Since we made a substitution, zap all remembered 1385 registers. */ 1386 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1387 { 1388 regs[i].uses = 0; 1389 regs[i].first_insn = NULL_RTX; 1390 regs[i].last_insn = NULL_RTX; 1391 } 1392 } 1393 } 1394 1395 for (i = regno; i < endregno; i++) 1396 { 1397 regs[i].uses = 0; 1398 regs[i].first_insn = NULL_RTX; 1399 regs[i].last_insn = NULL_RTX; 1400 } 1401 } 1402 } 1403 } 1404 } 1405 } 1406 1407 1408 /* # of registers saved by the interrupt handler. */ 1409 #define INTERRUPT_FIXED_NUM 4 1410 1411 /* # of bytes for registers saved by the interrupt handler. */ 1412 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM) 1413 1414 /* # of registers saved in register parameter area. */ 1415 #define INTERRUPT_REGPARM_NUM 4 1416 /* # of words saved for other registers. */ 1417 #define INTERRUPT_ALL_SAVE_NUM \ 1418 (30 - INTERRUPT_FIXED_NUM + INTERRUPT_REGPARM_NUM) 1419 1420 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM) 1421 1422 int 1423 compute_register_save_size (long * p_reg_saved) 1424 { 1425 int size = 0; 1426 int i; 1427 int interrupt_handler = v850_interrupt_function_p (current_function_decl); 1428 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM); 1429 long reg_saved = 0; 1430 1431 /* Count the return pointer if we need to save it. */ 1432 if (crtl->profile && !call_p) 1433 { 1434 df_set_regs_ever_live (LINK_POINTER_REGNUM, true); 1435 call_p = 1; 1436 } 1437 1438 /* Count space for the register saves. */ 1439 if (interrupt_handler) 1440 { 1441 for (i = 0; i <= 31; i++) 1442 switch (i) 1443 { 1444 default: 1445 if (df_regs_ever_live_p (i) || call_p) 1446 { 1447 size += 4; 1448 reg_saved |= 1L << i; 1449 } 1450 break; 1451 1452 /* We don't save/restore r0 or the stack pointer */ 1453 case 0: 1454 case STACK_POINTER_REGNUM: 1455 break; 1456 1457 /* For registers with fixed use, we save them, set them to the 1458 appropriate value, and then restore them. 1459 These registers are handled specially, so don't list them 1460 on the list of registers to save in the prologue. */ 1461 case 1: /* temp used to hold ep */ 1462 case 4: /* gp */ 1463 case 10: /* temp used to call interrupt save/restore */ 1464 case EP_REGNUM: /* ep */ 1465 size += 4; 1466 break; 1467 } 1468 } 1469 else 1470 { 1471 /* Find the first register that needs to be saved. */ 1472 for (i = 0; i <= 31; i++) 1473 if (df_regs_ever_live_p (i) && ((! call_used_regs[i]) 1474 || i == LINK_POINTER_REGNUM)) 1475 break; 1476 1477 /* If it is possible that an out-of-line helper function might be 1478 used to generate the prologue for the current function, then we 1479 need to cover the possibility that such a helper function will 1480 be used, despite the fact that there might be gaps in the list of 1481 registers that need to be saved. To detect this we note that the 1482 helper functions always push at least register r29 (provided 1483 that the function is not an interrupt handler). */ 1484 1485 if (TARGET_PROLOG_FUNCTION 1486 && (i == 2 || ((i >= 20) && (i < 30)))) 1487 { 1488 if (i == 2) 1489 { 1490 size += 4; 1491 reg_saved |= 1L << i; 1492 1493 i = 20; 1494 } 1495 1496 /* Helper functions save all registers between the starting 1497 register and the last register, regardless of whether they 1498 are actually used by the function or not. */ 1499 for (; i <= 29; i++) 1500 { 1501 size += 4; 1502 reg_saved |= 1L << i; 1503 } 1504 1505 if (df_regs_ever_live_p (LINK_POINTER_REGNUM)) 1506 { 1507 size += 4; 1508 reg_saved |= 1L << LINK_POINTER_REGNUM; 1509 } 1510 } 1511 else 1512 { 1513 for (; i <= 31; i++) 1514 if (df_regs_ever_live_p (i) && ((! call_used_regs[i]) 1515 || i == LINK_POINTER_REGNUM)) 1516 { 1517 size += 4; 1518 reg_saved |= 1L << i; 1519 } 1520 } 1521 } 1522 1523 if (p_reg_saved) 1524 *p_reg_saved = reg_saved; 1525 1526 return size; 1527 } 1528 1529 int 1530 compute_frame_size (int size, long * p_reg_saved) 1531 { 1532 return (size 1533 + compute_register_save_size (p_reg_saved) 1534 + crtl->outgoing_args_size); 1535 } 1536 1537 1538 void 1539 expand_prologue (void) 1540 { 1541 unsigned int i; 1542 int offset; 1543 unsigned int size = get_frame_size (); 1544 unsigned int actual_fsize; 1545 unsigned int init_stack_alloc = 0; 1546 rtx save_regs[32]; 1547 rtx save_all; 1548 unsigned int num_save; 1549 unsigned int default_stack; 1550 int code; 1551 int interrupt_handler = v850_interrupt_function_p (current_function_decl); 1552 long reg_saved = 0; 1553 1554 actual_fsize = compute_frame_size (size, ®_saved); 1555 1556 /* Save/setup global registers for interrupt functions right now. */ 1557 if (interrupt_handler) 1558 { 1559 if (TARGET_V850E && ! TARGET_DISABLE_CALLT) 1560 emit_insn (gen_callt_save_interrupt ()); 1561 else 1562 emit_insn (gen_save_interrupt ()); 1563 1564 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE; 1565 1566 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0) 1567 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE; 1568 } 1569 1570 /* Save arg registers to the stack if necessary. */ 1571 else if (crtl->args.info.anonymous_args) 1572 { 1573 if (TARGET_PROLOG_FUNCTION && TARGET_V850E && !TARGET_DISABLE_CALLT) 1574 emit_insn (gen_save_r6_r9_v850e ()); 1575 else if (TARGET_PROLOG_FUNCTION && ! TARGET_LONG_CALLS) 1576 emit_insn (gen_save_r6_r9 ()); 1577 else 1578 { 1579 offset = 0; 1580 for (i = 6; i < 10; i++) 1581 { 1582 emit_move_insn (gen_rtx_MEM (SImode, 1583 plus_constant (stack_pointer_rtx, 1584 offset)), 1585 gen_rtx_REG (SImode, i)); 1586 offset += 4; 1587 } 1588 } 1589 } 1590 1591 /* Identify all of the saved registers. */ 1592 num_save = 0; 1593 default_stack = 0; 1594 for (i = 1; i < 31; i++) 1595 { 1596 if (((1L << i) & reg_saved) != 0) 1597 save_regs[num_save++] = gen_rtx_REG (Pmode, i); 1598 } 1599 1600 /* If the return pointer is saved, the helper functions also allocate 1601 16 bytes of stack for arguments to be saved in. */ 1602 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0) 1603 { 1604 save_regs[num_save++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM); 1605 default_stack = 16; 1606 } 1607 1608 /* See if we have an insn that allocates stack space and saves the particular 1609 registers we want to. */ 1610 save_all = NULL_RTX; 1611 if (TARGET_PROLOG_FUNCTION && num_save > 0 && actual_fsize >= default_stack) 1612 { 1613 int alloc_stack = (4 * num_save) + default_stack; 1614 int unalloc_stack = actual_fsize - alloc_stack; 1615 int save_func_len = 4; 1616 int save_normal_len; 1617 1618 if (unalloc_stack) 1619 save_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4; 1620 1621 /* see if we would have used ep to save the stack */ 1622 if (TARGET_EP && num_save > 3 && (unsigned)actual_fsize < 255) 1623 save_normal_len = (3 * 2) + (2 * num_save); 1624 else 1625 save_normal_len = 4 * num_save; 1626 1627 save_normal_len += CONST_OK_FOR_J (actual_fsize) ? 2 : 4; 1628 1629 /* Don't bother checking if we don't actually save any space. 1630 This happens for instance if one register is saved and additional 1631 stack space is allocated. */ 1632 if (save_func_len < save_normal_len) 1633 { 1634 save_all = gen_rtx_PARALLEL 1635 (VOIDmode, 1636 rtvec_alloc (num_save + 1 1637 + (TARGET_V850 ? (TARGET_LONG_CALLS ? 2 : 1) : 0))); 1638 1639 XVECEXP (save_all, 0, 0) 1640 = gen_rtx_SET (VOIDmode, 1641 stack_pointer_rtx, 1642 plus_constant (stack_pointer_rtx, -alloc_stack)); 1643 1644 offset = - default_stack; 1645 for (i = 0; i < num_save; i++) 1646 { 1647 XVECEXP (save_all, 0, i+1) 1648 = gen_rtx_SET (VOIDmode, 1649 gen_rtx_MEM (Pmode, 1650 plus_constant (stack_pointer_rtx, 1651 offset)), 1652 save_regs[i]); 1653 offset -= 4; 1654 } 1655 1656 if (TARGET_V850) 1657 { 1658 XVECEXP (save_all, 0, num_save + 1) 1659 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10)); 1660 1661 if (TARGET_LONG_CALLS) 1662 XVECEXP (save_all, 0, num_save + 2) 1663 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11)); 1664 } 1665 1666 code = recog (save_all, NULL_RTX, NULL); 1667 if (code >= 0) 1668 { 1669 rtx insn = emit_insn (save_all); 1670 INSN_CODE (insn) = code; 1671 actual_fsize -= alloc_stack; 1672 1673 if (TARGET_DEBUG) 1674 fprintf (stderr, "\ 1675 Saved %d bytes via prologue function (%d vs. %d) for function %s\n", 1676 save_normal_len - save_func_len, 1677 save_normal_len, save_func_len, 1678 IDENTIFIER_POINTER (DECL_NAME (current_function_decl))); 1679 } 1680 else 1681 save_all = NULL_RTX; 1682 } 1683 } 1684 1685 /* If no prolog save function is available, store the registers the old 1686 fashioned way (one by one). */ 1687 if (!save_all) 1688 { 1689 /* Special case interrupt functions that save all registers for a call. */ 1690 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0) 1691 { 1692 if (TARGET_V850E && ! TARGET_DISABLE_CALLT) 1693 emit_insn (gen_callt_save_all_interrupt ()); 1694 else 1695 emit_insn (gen_save_all_interrupt ()); 1696 } 1697 else 1698 { 1699 /* If the stack is too big, allocate it in chunks so we can do the 1700 register saves. We use the register save size so we use the ep 1701 register. */ 1702 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize)) 1703 init_stack_alloc = compute_register_save_size (NULL); 1704 else 1705 init_stack_alloc = actual_fsize; 1706 1707 /* Save registers at the beginning of the stack frame. */ 1708 offset = init_stack_alloc - 4; 1709 1710 if (init_stack_alloc) 1711 emit_insn (gen_addsi3 (stack_pointer_rtx, 1712 stack_pointer_rtx, 1713 GEN_INT (- (signed) init_stack_alloc))); 1714 1715 /* Save the return pointer first. */ 1716 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM) 1717 { 1718 emit_move_insn (gen_rtx_MEM (SImode, 1719 plus_constant (stack_pointer_rtx, 1720 offset)), 1721 save_regs[--num_save]); 1722 offset -= 4; 1723 } 1724 1725 for (i = 0; i < num_save; i++) 1726 { 1727 emit_move_insn (gen_rtx_MEM (SImode, 1728 plus_constant (stack_pointer_rtx, 1729 offset)), 1730 save_regs[i]); 1731 offset -= 4; 1732 } 1733 } 1734 } 1735 1736 /* Allocate the rest of the stack that was not allocated above (either it is 1737 > 32K or we just called a function to save the registers and needed more 1738 stack. */ 1739 if (actual_fsize > init_stack_alloc) 1740 { 1741 int diff = actual_fsize - init_stack_alloc; 1742 if (CONST_OK_FOR_K (diff)) 1743 emit_insn (gen_addsi3 (stack_pointer_rtx, 1744 stack_pointer_rtx, 1745 GEN_INT (-diff))); 1746 else 1747 { 1748 rtx reg = gen_rtx_REG (Pmode, 12); 1749 emit_move_insn (reg, GEN_INT (-diff)); 1750 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg)); 1751 } 1752 } 1753 1754 /* If we need a frame pointer, set it up now. */ 1755 if (frame_pointer_needed) 1756 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx); 1757 } 1758 1759 1760 void 1761 expand_epilogue (void) 1762 { 1763 unsigned int i; 1764 int offset; 1765 unsigned int size = get_frame_size (); 1766 long reg_saved = 0; 1767 int actual_fsize = compute_frame_size (size, ®_saved); 1768 unsigned int init_stack_free = 0; 1769 rtx restore_regs[32]; 1770 rtx restore_all; 1771 unsigned int num_restore; 1772 unsigned int default_stack; 1773 int code; 1774 int interrupt_handler = v850_interrupt_function_p (current_function_decl); 1775 1776 /* Eliminate the initial stack stored by interrupt functions. */ 1777 if (interrupt_handler) 1778 { 1779 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE; 1780 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0) 1781 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE; 1782 } 1783 1784 /* Cut off any dynamic stack created. */ 1785 if (frame_pointer_needed) 1786 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx); 1787 1788 /* Identify all of the saved registers. */ 1789 num_restore = 0; 1790 default_stack = 0; 1791 for (i = 1; i < 31; i++) 1792 { 1793 if (((1L << i) & reg_saved) != 0) 1794 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i); 1795 } 1796 1797 /* If the return pointer is saved, the helper functions also allocate 1798 16 bytes of stack for arguments to be saved in. */ 1799 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0) 1800 { 1801 restore_regs[num_restore++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM); 1802 default_stack = 16; 1803 } 1804 1805 /* See if we have an insn that restores the particular registers we 1806 want to. */ 1807 restore_all = NULL_RTX; 1808 1809 if (TARGET_PROLOG_FUNCTION 1810 && num_restore > 0 1811 && actual_fsize >= (signed) default_stack 1812 && !interrupt_handler) 1813 { 1814 int alloc_stack = (4 * num_restore) + default_stack; 1815 int unalloc_stack = actual_fsize - alloc_stack; 1816 int restore_func_len = 4; 1817 int restore_normal_len; 1818 1819 if (unalloc_stack) 1820 restore_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4; 1821 1822 /* See if we would have used ep to restore the registers. */ 1823 if (TARGET_EP && num_restore > 3 && (unsigned)actual_fsize < 255) 1824 restore_normal_len = (3 * 2) + (2 * num_restore); 1825 else 1826 restore_normal_len = 4 * num_restore; 1827 1828 restore_normal_len += (CONST_OK_FOR_J (actual_fsize) ? 2 : 4) + 2; 1829 1830 /* Don't bother checking if we don't actually save any space. */ 1831 if (restore_func_len < restore_normal_len) 1832 { 1833 restore_all = gen_rtx_PARALLEL (VOIDmode, 1834 rtvec_alloc (num_restore + 2)); 1835 XVECEXP (restore_all, 0, 0) = gen_rtx_RETURN (VOIDmode); 1836 XVECEXP (restore_all, 0, 1) 1837 = gen_rtx_SET (VOIDmode, stack_pointer_rtx, 1838 gen_rtx_PLUS (Pmode, 1839 stack_pointer_rtx, 1840 GEN_INT (alloc_stack))); 1841 1842 offset = alloc_stack - 4; 1843 for (i = 0; i < num_restore; i++) 1844 { 1845 XVECEXP (restore_all, 0, i+2) 1846 = gen_rtx_SET (VOIDmode, 1847 restore_regs[i], 1848 gen_rtx_MEM (Pmode, 1849 plus_constant (stack_pointer_rtx, 1850 offset))); 1851 offset -= 4; 1852 } 1853 1854 code = recog (restore_all, NULL_RTX, NULL); 1855 1856 if (code >= 0) 1857 { 1858 rtx insn; 1859 1860 actual_fsize -= alloc_stack; 1861 if (actual_fsize) 1862 { 1863 if (CONST_OK_FOR_K (actual_fsize)) 1864 emit_insn (gen_addsi3 (stack_pointer_rtx, 1865 stack_pointer_rtx, 1866 GEN_INT (actual_fsize))); 1867 else 1868 { 1869 rtx reg = gen_rtx_REG (Pmode, 12); 1870 emit_move_insn (reg, GEN_INT (actual_fsize)); 1871 emit_insn (gen_addsi3 (stack_pointer_rtx, 1872 stack_pointer_rtx, 1873 reg)); 1874 } 1875 } 1876 1877 insn = emit_jump_insn (restore_all); 1878 INSN_CODE (insn) = code; 1879 1880 if (TARGET_DEBUG) 1881 fprintf (stderr, "\ 1882 Saved %d bytes via epilogue function (%d vs. %d) in function %s\n", 1883 restore_normal_len - restore_func_len, 1884 restore_normal_len, restore_func_len, 1885 IDENTIFIER_POINTER (DECL_NAME (current_function_decl))); 1886 } 1887 else 1888 restore_all = NULL_RTX; 1889 } 1890 } 1891 1892 /* If no epilogue save function is available, restore the registers the 1893 old fashioned way (one by one). */ 1894 if (!restore_all) 1895 { 1896 /* If the stack is large, we need to cut it down in 2 pieces. */ 1897 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize)) 1898 init_stack_free = 4 * num_restore; 1899 else 1900 init_stack_free = (signed) actual_fsize; 1901 1902 /* Deallocate the rest of the stack if it is > 32K. */ 1903 if ((unsigned int) actual_fsize > init_stack_free) 1904 { 1905 int diff; 1906 1907 diff = actual_fsize - ((interrupt_handler) ? 0 : init_stack_free); 1908 1909 if (CONST_OK_FOR_K (diff)) 1910 emit_insn (gen_addsi3 (stack_pointer_rtx, 1911 stack_pointer_rtx, 1912 GEN_INT (diff))); 1913 else 1914 { 1915 rtx reg = gen_rtx_REG (Pmode, 12); 1916 emit_move_insn (reg, GEN_INT (diff)); 1917 emit_insn (gen_addsi3 (stack_pointer_rtx, 1918 stack_pointer_rtx, 1919 reg)); 1920 } 1921 } 1922 1923 /* Special case interrupt functions that save all registers 1924 for a call. */ 1925 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0) 1926 { 1927 if (TARGET_V850E && ! TARGET_DISABLE_CALLT) 1928 emit_insn (gen_callt_restore_all_interrupt ()); 1929 else 1930 emit_insn (gen_restore_all_interrupt ()); 1931 } 1932 else 1933 { 1934 /* Restore registers from the beginning of the stack frame. */ 1935 offset = init_stack_free - 4; 1936 1937 /* Restore the return pointer first. */ 1938 if (num_restore > 0 1939 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM) 1940 { 1941 emit_move_insn (restore_regs[--num_restore], 1942 gen_rtx_MEM (SImode, 1943 plus_constant (stack_pointer_rtx, 1944 offset))); 1945 offset -= 4; 1946 } 1947 1948 for (i = 0; i < num_restore; i++) 1949 { 1950 emit_move_insn (restore_regs[i], 1951 gen_rtx_MEM (SImode, 1952 plus_constant (stack_pointer_rtx, 1953 offset))); 1954 1955 emit_use (restore_regs[i]); 1956 offset -= 4; 1957 } 1958 1959 /* Cut back the remainder of the stack. */ 1960 if (init_stack_free) 1961 emit_insn (gen_addsi3 (stack_pointer_rtx, 1962 stack_pointer_rtx, 1963 GEN_INT (init_stack_free))); 1964 } 1965 1966 /* And return or use reti for interrupt handlers. */ 1967 if (interrupt_handler) 1968 { 1969 if (TARGET_V850E && ! TARGET_DISABLE_CALLT) 1970 emit_insn (gen_callt_return_interrupt ()); 1971 else 1972 emit_jump_insn (gen_return_interrupt ()); 1973 } 1974 else if (actual_fsize) 1975 emit_jump_insn (gen_return_internal ()); 1976 else 1977 emit_jump_insn (gen_return_simple ()); 1978 } 1979 1980 v850_interrupt_cache_p = FALSE; 1981 v850_interrupt_p = FALSE; 1982 } 1983 1984 1985 /* Update the condition code from the insn. */ 1986 1987 void 1988 notice_update_cc (rtx body, rtx insn) 1989 { 1990 switch (get_attr_cc (insn)) 1991 { 1992 case CC_NONE: 1993 /* Insn does not affect CC at all. */ 1994 break; 1995 1996 case CC_NONE_0HIT: 1997 /* Insn does not change CC, but the 0'th operand has been changed. */ 1998 if (cc_status.value1 != 0 1999 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1)) 2000 cc_status.value1 = 0; 2001 break; 2002 2003 case CC_SET_ZN: 2004 /* Insn sets the Z,N flags of CC to recog_data.operand[0]. 2005 V,C is in an unusable state. */ 2006 CC_STATUS_INIT; 2007 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY; 2008 cc_status.value1 = recog_data.operand[0]; 2009 break; 2010 2011 case CC_SET_ZNV: 2012 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0]. 2013 C is in an unusable state. */ 2014 CC_STATUS_INIT; 2015 cc_status.flags |= CC_NO_CARRY; 2016 cc_status.value1 = recog_data.operand[0]; 2017 break; 2018 2019 case CC_COMPARE: 2020 /* The insn is a compare instruction. */ 2021 CC_STATUS_INIT; 2022 cc_status.value1 = SET_SRC (body); 2023 break; 2024 2025 case CC_CLOBBER: 2026 /* Insn doesn't leave CC in a usable state. */ 2027 CC_STATUS_INIT; 2028 break; 2029 } 2030 } 2031 2032 /* Retrieve the data area that has been chosen for the given decl. */ 2033 2034 v850_data_area 2035 v850_get_data_area (tree decl) 2036 { 2037 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE) 2038 return DATA_AREA_SDA; 2039 2040 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE) 2041 return DATA_AREA_TDA; 2042 2043 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE) 2044 return DATA_AREA_ZDA; 2045 2046 return DATA_AREA_NORMAL; 2047 } 2048 2049 /* Store the indicated data area in the decl's attributes. */ 2050 2051 static void 2052 v850_set_data_area (tree decl, v850_data_area data_area) 2053 { 2054 tree name; 2055 2056 switch (data_area) 2057 { 2058 case DATA_AREA_SDA: name = get_identifier ("sda"); break; 2059 case DATA_AREA_TDA: name = get_identifier ("tda"); break; 2060 case DATA_AREA_ZDA: name = get_identifier ("zda"); break; 2061 default: 2062 return; 2063 } 2064 2065 DECL_ATTRIBUTES (decl) = tree_cons 2066 (name, NULL, DECL_ATTRIBUTES (decl)); 2067 } 2068 2069 /* Handle an "interrupt" attribute; arguments as in 2070 struct attribute_spec.handler. */ 2071 static tree 2072 v850_handle_interrupt_attribute (tree * node, 2073 tree name, 2074 tree args ATTRIBUTE_UNUSED, 2075 int flags ATTRIBUTE_UNUSED, 2076 bool * no_add_attrs) 2077 { 2078 if (TREE_CODE (*node) != FUNCTION_DECL) 2079 { 2080 warning (OPT_Wattributes, "%qE attribute only applies to functions", 2081 name); 2082 *no_add_attrs = true; 2083 } 2084 2085 return NULL_TREE; 2086 } 2087 2088 /* Handle a "sda", "tda" or "zda" attribute; arguments as in 2089 struct attribute_spec.handler. */ 2090 static tree 2091 v850_handle_data_area_attribute (tree* node, 2092 tree name, 2093 tree args ATTRIBUTE_UNUSED, 2094 int flags ATTRIBUTE_UNUSED, 2095 bool * no_add_attrs) 2096 { 2097 v850_data_area data_area; 2098 v850_data_area area; 2099 tree decl = *node; 2100 2101 /* Implement data area attribute. */ 2102 if (is_attribute_p ("sda", name)) 2103 data_area = DATA_AREA_SDA; 2104 else if (is_attribute_p ("tda", name)) 2105 data_area = DATA_AREA_TDA; 2106 else if (is_attribute_p ("zda", name)) 2107 data_area = DATA_AREA_ZDA; 2108 else 2109 gcc_unreachable (); 2110 2111 switch (TREE_CODE (decl)) 2112 { 2113 case VAR_DECL: 2114 if (current_function_decl != NULL_TREE) 2115 { 2116 error_at (DECL_SOURCE_LOCATION (decl), 2117 "data area attributes cannot be specified for " 2118 "local variables"); 2119 *no_add_attrs = true; 2120 } 2121 2122 /* Drop through. */ 2123 2124 case FUNCTION_DECL: 2125 area = v850_get_data_area (decl); 2126 if (area != DATA_AREA_NORMAL && data_area != area) 2127 { 2128 error ("data area of %q+D conflicts with previous declaration", 2129 decl); 2130 *no_add_attrs = true; 2131 } 2132 break; 2133 2134 default: 2135 break; 2136 } 2137 2138 return NULL_TREE; 2139 } 2140 2141 2142 /* Return nonzero if FUNC is an interrupt function as specified 2143 by the "interrupt" attribute. */ 2144 2145 int 2146 v850_interrupt_function_p (tree func) 2147 { 2148 tree a; 2149 int ret = 0; 2150 2151 if (v850_interrupt_cache_p) 2152 return v850_interrupt_p; 2153 2154 if (TREE_CODE (func) != FUNCTION_DECL) 2155 return 0; 2156 2157 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func)); 2158 if (a != NULL_TREE) 2159 ret = 1; 2160 2161 else 2162 { 2163 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func)); 2164 ret = a != NULL_TREE; 2165 } 2166 2167 /* Its not safe to trust global variables until after function inlining has 2168 been done. */ 2169 if (reload_completed | reload_in_progress) 2170 v850_interrupt_p = ret; 2171 2172 return ret; 2173 } 2174 2175 2176 static void 2177 v850_encode_data_area (tree decl, rtx symbol) 2178 { 2179 int flags; 2180 2181 /* Map explicit sections into the appropriate attribute */ 2182 if (v850_get_data_area (decl) == DATA_AREA_NORMAL) 2183 { 2184 if (DECL_SECTION_NAME (decl)) 2185 { 2186 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl)); 2187 2188 if (streq (name, ".zdata") || streq (name, ".zbss")) 2189 v850_set_data_area (decl, DATA_AREA_ZDA); 2190 2191 else if (streq (name, ".sdata") || streq (name, ".sbss")) 2192 v850_set_data_area (decl, DATA_AREA_SDA); 2193 2194 else if (streq (name, ".tdata")) 2195 v850_set_data_area (decl, DATA_AREA_TDA); 2196 } 2197 2198 /* If no attribute, support -m{zda,sda,tda}=n */ 2199 else 2200 { 2201 int size = int_size_in_bytes (TREE_TYPE (decl)); 2202 if (size <= 0) 2203 ; 2204 2205 else if (size <= small_memory [(int) SMALL_MEMORY_TDA].max) 2206 v850_set_data_area (decl, DATA_AREA_TDA); 2207 2208 else if (size <= small_memory [(int) SMALL_MEMORY_SDA].max) 2209 v850_set_data_area (decl, DATA_AREA_SDA); 2210 2211 else if (size <= small_memory [(int) SMALL_MEMORY_ZDA].max) 2212 v850_set_data_area (decl, DATA_AREA_ZDA); 2213 } 2214 2215 if (v850_get_data_area (decl) == DATA_AREA_NORMAL) 2216 return; 2217 } 2218 2219 flags = SYMBOL_REF_FLAGS (symbol); 2220 switch (v850_get_data_area (decl)) 2221 { 2222 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break; 2223 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break; 2224 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break; 2225 default: gcc_unreachable (); 2226 } 2227 SYMBOL_REF_FLAGS (symbol) = flags; 2228 } 2229 2230 static void 2231 v850_encode_section_info (tree decl, rtx rtl, int first) 2232 { 2233 default_encode_section_info (decl, rtl, first); 2234 2235 if (TREE_CODE (decl) == VAR_DECL 2236 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))) 2237 v850_encode_data_area (decl, XEXP (rtl, 0)); 2238 } 2239 2240 /* Construct a JR instruction to a routine that will perform the equivalent of 2241 the RTL passed in as an argument. This RTL is a function epilogue that 2242 pops registers off the stack and possibly releases some extra stack space 2243 as well. The code has already verified that the RTL matches these 2244 requirements. */ 2245 char * 2246 construct_restore_jr (rtx op) 2247 { 2248 int count = XVECLEN (op, 0); 2249 int stack_bytes; 2250 unsigned long int mask; 2251 unsigned long int first; 2252 unsigned long int last; 2253 int i; 2254 static char buff [100]; /* XXX */ 2255 2256 if (count <= 2) 2257 { 2258 error ("bogus JR construction: %d", count); 2259 return NULL; 2260 } 2261 2262 /* Work out how many bytes to pop off the stack before retrieving 2263 registers. */ 2264 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET); 2265 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS); 2266 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT); 2267 2268 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)); 2269 2270 /* Each pop will remove 4 bytes from the stack.... */ 2271 stack_bytes -= (count - 2) * 4; 2272 2273 /* Make sure that the amount we are popping either 0 or 16 bytes. */ 2274 if (stack_bytes != 0 && stack_bytes != 16) 2275 { 2276 error ("bad amount of stack space removal: %d", stack_bytes); 2277 return NULL; 2278 } 2279 2280 /* Now compute the bit mask of registers to push. */ 2281 mask = 0; 2282 for (i = 2; i < count; i++) 2283 { 2284 rtx vector_element = XVECEXP (op, 0, i); 2285 2286 gcc_assert (GET_CODE (vector_element) == SET); 2287 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG); 2288 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element), 2289 SImode)); 2290 2291 mask |= 1 << REGNO (SET_DEST (vector_element)); 2292 } 2293 2294 /* Scan for the first register to pop. */ 2295 for (first = 0; first < 32; first++) 2296 { 2297 if (mask & (1 << first)) 2298 break; 2299 } 2300 2301 gcc_assert (first < 32); 2302 2303 /* Discover the last register to pop. */ 2304 if (mask & (1 << LINK_POINTER_REGNUM)) 2305 { 2306 gcc_assert (stack_bytes == 16); 2307 2308 last = LINK_POINTER_REGNUM; 2309 } 2310 else 2311 { 2312 gcc_assert (!stack_bytes); 2313 gcc_assert (mask & (1 << 29)); 2314 2315 last = 29; 2316 } 2317 2318 /* Note, it is possible to have gaps in the register mask. 2319 We ignore this here, and generate a JR anyway. We will 2320 be popping more registers than is strictly necessary, but 2321 it does save code space. */ 2322 2323 if (TARGET_LONG_CALLS) 2324 { 2325 char name[40]; 2326 2327 if (first == last) 2328 sprintf (name, "__return_%s", reg_names [first]); 2329 else 2330 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]); 2331 2332 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6", 2333 name, name); 2334 } 2335 else 2336 { 2337 if (first == last) 2338 sprintf (buff, "jr __return_%s", reg_names [first]); 2339 else 2340 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]); 2341 } 2342 2343 return buff; 2344 } 2345 2346 2347 /* Construct a JARL instruction to a routine that will perform the equivalent 2348 of the RTL passed as a parameter. This RTL is a function prologue that 2349 saves some of the registers r20 - r31 onto the stack, and possibly acquires 2350 some stack space as well. The code has already verified that the RTL 2351 matches these requirements. */ 2352 char * 2353 construct_save_jarl (rtx op) 2354 { 2355 int count = XVECLEN (op, 0); 2356 int stack_bytes; 2357 unsigned long int mask; 2358 unsigned long int first; 2359 unsigned long int last; 2360 int i; 2361 static char buff [100]; /* XXX */ 2362 2363 if (count <= 2) 2364 { 2365 error ("bogus JARL construction: %d\n", count); 2366 return NULL; 2367 } 2368 2369 /* Paranoia. */ 2370 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET); 2371 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS); 2372 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG); 2373 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT); 2374 2375 /* Work out how many bytes to push onto the stack after storing the 2376 registers. */ 2377 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)); 2378 2379 /* Each push will put 4 bytes from the stack.... */ 2380 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4; 2381 2382 /* Make sure that the amount we are popping either 0 or 16 bytes. */ 2383 if (stack_bytes != 0 && stack_bytes != -16) 2384 { 2385 error ("bad amount of stack space removal: %d", stack_bytes); 2386 return NULL; 2387 } 2388 2389 /* Now compute the bit mask of registers to push. */ 2390 mask = 0; 2391 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++) 2392 { 2393 rtx vector_element = XVECEXP (op, 0, i); 2394 2395 gcc_assert (GET_CODE (vector_element) == SET); 2396 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG); 2397 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element), 2398 SImode)); 2399 2400 mask |= 1 << REGNO (SET_SRC (vector_element)); 2401 } 2402 2403 /* Scan for the first register to push. */ 2404 for (first = 0; first < 32; first++) 2405 { 2406 if (mask & (1 << first)) 2407 break; 2408 } 2409 2410 gcc_assert (first < 32); 2411 2412 /* Discover the last register to push. */ 2413 if (mask & (1 << LINK_POINTER_REGNUM)) 2414 { 2415 gcc_assert (stack_bytes == -16); 2416 2417 last = LINK_POINTER_REGNUM; 2418 } 2419 else 2420 { 2421 gcc_assert (!stack_bytes); 2422 gcc_assert (mask & (1 << 29)); 2423 2424 last = 29; 2425 } 2426 2427 /* Note, it is possible to have gaps in the register mask. 2428 We ignore this here, and generate a JARL anyway. We will 2429 be pushing more registers than is strictly necessary, but 2430 it does save code space. */ 2431 2432 if (TARGET_LONG_CALLS) 2433 { 2434 char name[40]; 2435 2436 if (first == last) 2437 sprintf (name, "__save_%s", reg_names [first]); 2438 else 2439 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]); 2440 2441 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11", 2442 name, name); 2443 } 2444 else 2445 { 2446 if (first == last) 2447 sprintf (buff, "jarl __save_%s, r10", reg_names [first]); 2448 else 2449 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first], 2450 reg_names [last]); 2451 } 2452 2453 return buff; 2454 } 2455 2456 extern tree last_assemble_variable_decl; 2457 extern int size_directive_output; 2458 2459 /* A version of asm_output_aligned_bss() that copes with the special 2460 data areas of the v850. */ 2461 void 2462 v850_output_aligned_bss (FILE * file, 2463 tree decl, 2464 const char * name, 2465 unsigned HOST_WIDE_INT size, 2466 int align) 2467 { 2468 switch (v850_get_data_area (decl)) 2469 { 2470 case DATA_AREA_ZDA: 2471 switch_to_section (zbss_section); 2472 break; 2473 2474 case DATA_AREA_SDA: 2475 switch_to_section (sbss_section); 2476 break; 2477 2478 case DATA_AREA_TDA: 2479 switch_to_section (tdata_section); 2480 2481 default: 2482 switch_to_section (bss_section); 2483 break; 2484 } 2485 2486 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT)); 2487 #ifdef ASM_DECLARE_OBJECT_NAME 2488 last_assemble_variable_decl = decl; 2489 ASM_DECLARE_OBJECT_NAME (file, name, decl); 2490 #else 2491 /* Standard thing is just output label for the object. */ 2492 ASM_OUTPUT_LABEL (file, name); 2493 #endif /* ASM_DECLARE_OBJECT_NAME */ 2494 ASM_OUTPUT_SKIP (file, size ? size : 1); 2495 } 2496 2497 /* Called via the macro ASM_OUTPUT_DECL_COMMON */ 2498 void 2499 v850_output_common (FILE * file, 2500 tree decl, 2501 const char * name, 2502 int size, 2503 int align) 2504 { 2505 if (decl == NULL_TREE) 2506 { 2507 fprintf (file, "%s", COMMON_ASM_OP); 2508 } 2509 else 2510 { 2511 switch (v850_get_data_area (decl)) 2512 { 2513 case DATA_AREA_ZDA: 2514 fprintf (file, "%s", ZCOMMON_ASM_OP); 2515 break; 2516 2517 case DATA_AREA_SDA: 2518 fprintf (file, "%s", SCOMMON_ASM_OP); 2519 break; 2520 2521 case DATA_AREA_TDA: 2522 fprintf (file, "%s", TCOMMON_ASM_OP); 2523 break; 2524 2525 default: 2526 fprintf (file, "%s", COMMON_ASM_OP); 2527 break; 2528 } 2529 } 2530 2531 assemble_name (file, name); 2532 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT); 2533 } 2534 2535 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */ 2536 void 2537 v850_output_local (FILE * file, 2538 tree decl, 2539 const char * name, 2540 int size, 2541 int align) 2542 { 2543 fprintf (file, "%s", LOCAL_ASM_OP); 2544 assemble_name (file, name); 2545 fprintf (file, "\n"); 2546 2547 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align); 2548 } 2549 2550 /* Add data area to the given declaration if a ghs data area pragma is 2551 currently in effect (#pragma ghs startXXX/endXXX). */ 2552 static void 2553 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED ) 2554 { 2555 if (data_area_stack 2556 && data_area_stack->data_area 2557 && current_function_decl == NULL_TREE 2558 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL) 2559 && v850_get_data_area (decl) == DATA_AREA_NORMAL) 2560 v850_set_data_area (decl, data_area_stack->data_area); 2561 2562 /* Initialize the default names of the v850 specific sections, 2563 if this has not been done before. */ 2564 2565 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL) 2566 { 2567 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] 2568 = build_string (sizeof (".sdata")-1, ".sdata"); 2569 2570 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA] 2571 = build_string (sizeof (".rosdata")-1, ".rosdata"); 2572 2573 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA] 2574 = build_string (sizeof (".tdata")-1, ".tdata"); 2575 2576 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA] 2577 = build_string (sizeof (".zdata")-1, ".zdata"); 2578 2579 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA] 2580 = build_string (sizeof (".rozdata")-1, ".rozdata"); 2581 } 2582 2583 if (current_function_decl == NULL_TREE 2584 && (TREE_CODE (decl) == VAR_DECL 2585 || TREE_CODE (decl) == CONST_DECL 2586 || TREE_CODE (decl) == FUNCTION_DECL) 2587 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl)) 2588 && !DECL_SECTION_NAME (decl)) 2589 { 2590 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT; 2591 tree chosen_section; 2592 2593 if (TREE_CODE (decl) == FUNCTION_DECL) 2594 kind = GHS_SECTION_KIND_TEXT; 2595 else 2596 { 2597 /* First choose a section kind based on the data area of the decl. */ 2598 switch (v850_get_data_area (decl)) 2599 { 2600 default: 2601 gcc_unreachable (); 2602 2603 case DATA_AREA_SDA: 2604 kind = ((TREE_READONLY (decl)) 2605 ? GHS_SECTION_KIND_ROSDATA 2606 : GHS_SECTION_KIND_SDATA); 2607 break; 2608 2609 case DATA_AREA_TDA: 2610 kind = GHS_SECTION_KIND_TDATA; 2611 break; 2612 2613 case DATA_AREA_ZDA: 2614 kind = ((TREE_READONLY (decl)) 2615 ? GHS_SECTION_KIND_ROZDATA 2616 : GHS_SECTION_KIND_ZDATA); 2617 break; 2618 2619 case DATA_AREA_NORMAL: /* default data area */ 2620 if (TREE_READONLY (decl)) 2621 kind = GHS_SECTION_KIND_RODATA; 2622 else if (DECL_INITIAL (decl)) 2623 kind = GHS_SECTION_KIND_DATA; 2624 else 2625 kind = GHS_SECTION_KIND_BSS; 2626 } 2627 } 2628 2629 /* Now, if the section kind has been explicitly renamed, 2630 then attach a section attribute. */ 2631 chosen_section = GHS_current_section_names [(int) kind]; 2632 2633 /* Otherwise, if this kind of section needs an explicit section 2634 attribute, then also attach one. */ 2635 if (chosen_section == NULL) 2636 chosen_section = GHS_default_section_names [(int) kind]; 2637 2638 if (chosen_section) 2639 { 2640 /* Only set the section name if specified by a pragma, because 2641 otherwise it will force those variables to get allocated storage 2642 in this module, rather than by the linker. */ 2643 DECL_SECTION_NAME (decl) = chosen_section; 2644 } 2645 } 2646 } 2647 2648 /* Construct a DISPOSE instruction that is the equivalent of 2649 the given RTX. We have already verified that this should 2650 be possible. */ 2651 2652 char * 2653 construct_dispose_instruction (rtx op) 2654 { 2655 int count = XVECLEN (op, 0); 2656 int stack_bytes; 2657 unsigned long int mask; 2658 int i; 2659 static char buff[ 100 ]; /* XXX */ 2660 int use_callt = 0; 2661 2662 if (count <= 2) 2663 { 2664 error ("bogus DISPOSE construction: %d", count); 2665 return NULL; 2666 } 2667 2668 /* Work out how many bytes to pop off the 2669 stack before retrieving registers. */ 2670 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET); 2671 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS); 2672 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT); 2673 2674 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)); 2675 2676 /* Each pop will remove 4 bytes from the stack.... */ 2677 stack_bytes -= (count - 2) * 4; 2678 2679 /* Make sure that the amount we are popping 2680 will fit into the DISPOSE instruction. */ 2681 if (stack_bytes > 128) 2682 { 2683 error ("too much stack space to dispose of: %d", stack_bytes); 2684 return NULL; 2685 } 2686 2687 /* Now compute the bit mask of registers to push. */ 2688 mask = 0; 2689 2690 for (i = 2; i < count; i++) 2691 { 2692 rtx vector_element = XVECEXP (op, 0, i); 2693 2694 gcc_assert (GET_CODE (vector_element) == SET); 2695 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG); 2696 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element), 2697 SImode)); 2698 2699 if (REGNO (SET_DEST (vector_element)) == 2) 2700 use_callt = 1; 2701 else 2702 mask |= 1 << REGNO (SET_DEST (vector_element)); 2703 } 2704 2705 if (! TARGET_DISABLE_CALLT 2706 && (use_callt || stack_bytes == 0 || stack_bytes == 16)) 2707 { 2708 if (use_callt) 2709 { 2710 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29); 2711 return buff; 2712 } 2713 else 2714 { 2715 for (i = 20; i < 32; i++) 2716 if (mask & (1 << i)) 2717 break; 2718 2719 if (i == 31) 2720 sprintf (buff, "callt ctoff(__callt_return_r31c)"); 2721 else 2722 sprintf (buff, "callt ctoff(__callt_return_r%d_r%d%s)", 2723 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : ""); 2724 } 2725 } 2726 else 2727 { 2728 static char regs [100]; /* XXX */ 2729 int done_one; 2730 2731 /* Generate the DISPOSE instruction. Note we could just issue the 2732 bit mask as a number as the assembler can cope with this, but for 2733 the sake of our readers we turn it into a textual description. */ 2734 regs[0] = 0; 2735 done_one = 0; 2736 2737 for (i = 20; i < 32; i++) 2738 { 2739 if (mask & (1 << i)) 2740 { 2741 int first; 2742 2743 if (done_one) 2744 strcat (regs, ", "); 2745 else 2746 done_one = 1; 2747 2748 first = i; 2749 strcat (regs, reg_names[ first ]); 2750 2751 for (i++; i < 32; i++) 2752 if ((mask & (1 << i)) == 0) 2753 break; 2754 2755 if (i > first + 1) 2756 { 2757 strcat (regs, " - "); 2758 strcat (regs, reg_names[ i - 1 ] ); 2759 } 2760 } 2761 } 2762 2763 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs); 2764 } 2765 2766 return buff; 2767 } 2768 2769 /* Construct a PREPARE instruction that is the equivalent of 2770 the given RTL. We have already verified that this should 2771 be possible. */ 2772 2773 char * 2774 construct_prepare_instruction (rtx op) 2775 { 2776 int count = XVECLEN (op, 0); 2777 int stack_bytes; 2778 unsigned long int mask; 2779 int i; 2780 static char buff[ 100 ]; /* XXX */ 2781 int use_callt = 0; 2782 2783 if (count <= 1) 2784 { 2785 error ("bogus PREPEARE construction: %d", count); 2786 return NULL; 2787 } 2788 2789 /* Work out how many bytes to push onto 2790 the stack after storing the registers. */ 2791 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET); 2792 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS); 2793 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT); 2794 2795 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)); 2796 2797 /* Each push will put 4 bytes from the stack. */ 2798 stack_bytes += (count - 1) * 4; 2799 2800 /* Make sure that the amount we are popping 2801 will fit into the DISPOSE instruction. */ 2802 if (stack_bytes < -128) 2803 { 2804 error ("too much stack space to prepare: %d", stack_bytes); 2805 return NULL; 2806 } 2807 2808 /* Now compute the bit mask of registers to push. */ 2809 mask = 0; 2810 for (i = 1; i < count; i++) 2811 { 2812 rtx vector_element = XVECEXP (op, 0, i); 2813 2814 gcc_assert (GET_CODE (vector_element) == SET); 2815 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG); 2816 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element), 2817 SImode)); 2818 2819 if (REGNO (SET_SRC (vector_element)) == 2) 2820 use_callt = 1; 2821 else 2822 mask |= 1 << REGNO (SET_SRC (vector_element)); 2823 } 2824 2825 if ((! TARGET_DISABLE_CALLT) 2826 && (use_callt || stack_bytes == 0 || stack_bytes == -16)) 2827 { 2828 if (use_callt) 2829 { 2830 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 ); 2831 return buff; 2832 } 2833 2834 for (i = 20; i < 32; i++) 2835 if (mask & (1 << i)) 2836 break; 2837 2838 if (i == 31) 2839 sprintf (buff, "callt ctoff(__callt_save_r31c)"); 2840 else 2841 sprintf (buff, "callt ctoff(__callt_save_r%d_r%d%s)", 2842 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : ""); 2843 } 2844 else 2845 { 2846 static char regs [100]; /* XXX */ 2847 int done_one; 2848 2849 2850 /* Generate the PREPARE instruction. Note we could just issue the 2851 bit mask as a number as the assembler can cope with this, but for 2852 the sake of our readers we turn it into a textual description. */ 2853 regs[0] = 0; 2854 done_one = 0; 2855 2856 for (i = 20; i < 32; i++) 2857 { 2858 if (mask & (1 << i)) 2859 { 2860 int first; 2861 2862 if (done_one) 2863 strcat (regs, ", "); 2864 else 2865 done_one = 1; 2866 2867 first = i; 2868 strcat (regs, reg_names[ first ]); 2869 2870 for (i++; i < 32; i++) 2871 if ((mask & (1 << i)) == 0) 2872 break; 2873 2874 if (i > first + 1) 2875 { 2876 strcat (regs, " - "); 2877 strcat (regs, reg_names[ i - 1 ] ); 2878 } 2879 } 2880 } 2881 2882 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4); 2883 } 2884 2885 return buff; 2886 } 2887 2888 /* Return an RTX indicating where the return address to the 2889 calling function can be found. */ 2890 2891 rtx 2892 v850_return_addr (int count) 2893 { 2894 if (count != 0) 2895 return const0_rtx; 2896 2897 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM); 2898 } 2899 2900 /* Implement TARGET_ASM_INIT_SECTIONS. */ 2901 2902 static void 2903 v850_asm_init_sections (void) 2904 { 2905 rosdata_section 2906 = get_unnamed_section (0, output_section_asm_op, 2907 "\t.section .rosdata,\"a\""); 2908 2909 rozdata_section 2910 = get_unnamed_section (0, output_section_asm_op, 2911 "\t.section .rozdata,\"a\""); 2912 2913 tdata_section 2914 = get_unnamed_section (SECTION_WRITE, output_section_asm_op, 2915 "\t.section .tdata,\"aw\""); 2916 2917 zdata_section 2918 = get_unnamed_section (SECTION_WRITE, output_section_asm_op, 2919 "\t.section .zdata,\"aw\""); 2920 2921 zbss_section 2922 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, 2923 output_section_asm_op, 2924 "\t.section .zbss,\"aw\""); 2925 } 2926 2927 static section * 2928 v850_select_section (tree exp, 2929 int reloc ATTRIBUTE_UNUSED, 2930 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED) 2931 { 2932 if (TREE_CODE (exp) == VAR_DECL) 2933 { 2934 int is_const; 2935 if (!TREE_READONLY (exp) 2936 || TREE_SIDE_EFFECTS (exp) 2937 || !DECL_INITIAL (exp) 2938 || (DECL_INITIAL (exp) != error_mark_node 2939 && !TREE_CONSTANT (DECL_INITIAL (exp)))) 2940 is_const = FALSE; 2941 else 2942 is_const = TRUE; 2943 2944 switch (v850_get_data_area (exp)) 2945 { 2946 case DATA_AREA_ZDA: 2947 return is_const ? rozdata_section : zdata_section; 2948 2949 case DATA_AREA_TDA: 2950 return tdata_section; 2951 2952 case DATA_AREA_SDA: 2953 return is_const ? rosdata_section : sdata_section; 2954 2955 default: 2956 return is_const ? readonly_data_section : data_section; 2957 } 2958 } 2959 return readonly_data_section; 2960 } 2961 2962 /* Worker function for TARGET_RETURN_IN_MEMORY. */ 2963 2964 static bool 2965 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED) 2966 { 2967 /* Return values > 8 bytes in length in memory. */ 2968 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode; 2969 } 2970 2971 /* Worker function for TARGET_FUNCTION_VALUE. */ 2972 2973 rtx 2974 v850_function_value (const_tree valtype, 2975 const_tree fn_decl_or_type ATTRIBUTE_UNUSED, 2976 bool outgoing ATTRIBUTE_UNUSED) 2977 { 2978 return gen_rtx_REG (TYPE_MODE (valtype), 10); 2979 } 2980 2981 2982 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */ 2983 2984 static void 2985 v850_setup_incoming_varargs (CUMULATIVE_ARGS *ca, 2986 enum machine_mode mode ATTRIBUTE_UNUSED, 2987 tree type ATTRIBUTE_UNUSED, 2988 int *pretend_arg_size ATTRIBUTE_UNUSED, 2989 int second_time ATTRIBUTE_UNUSED) 2990 { 2991 ca->anonymous_args = (!TARGET_GHS ? 1 : 0); 2992 } 2993 2994 /* Worker function for TARGET_CAN_ELIMINATE. */ 2995 2996 static bool 2997 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to) 2998 { 2999 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true); 3000 } 3001 3002 3003 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */ 3004 3005 static void 3006 v850_asm_trampoline_template (FILE *f) 3007 { 3008 fprintf (f, "\tjarl .+4,r12\n"); 3009 fprintf (f, "\tld.w 12[r12],r20\n"); 3010 fprintf (f, "\tld.w 16[r12],r12\n"); 3011 fprintf (f, "\tjmp [r12]\n"); 3012 fprintf (f, "\tnop\n"); 3013 fprintf (f, "\t.long 0\n"); 3014 fprintf (f, "\t.long 0\n"); 3015 } 3016 3017 /* Worker function for TARGET_TRAMPOLINE_INIT. */ 3018 3019 static void 3020 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value) 3021 { 3022 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0); 3023 3024 emit_block_move (m_tramp, assemble_trampoline_template (), 3025 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL); 3026 3027 mem = adjust_address (m_tramp, SImode, 16); 3028 emit_move_insn (mem, chain_value); 3029 mem = adjust_address (m_tramp, SImode, 20); 3030 emit_move_insn (mem, fnaddr); 3031 } 3032 3033 #include "gt-v850.h" 3034