1 /* Xstormy16 target functions. 2 Copyright (C) 1997-2020 Free Software Foundation, Inc. 3 Contributed by Red Hat, Inc. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3, or (at your option) 10 any later version. 11 12 GCC is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #define IN_TARGET_CODE 1 22 23 #include "config.h" 24 #include "system.h" 25 #include "coretypes.h" 26 #include "backend.h" 27 #include "target.h" 28 #include "rtl.h" 29 #include "tree.h" 30 #include "stringpool.h" 31 #include "attribs.h" 32 #include "gimple.h" 33 #include "df.h" 34 #include "memmodel.h" 35 #include "tm_p.h" 36 #include "stringpool.h" 37 #include "optabs.h" 38 #include "emit-rtl.h" 39 #include "recog.h" 40 #include "diagnostic-core.h" 41 #include "output.h" 42 #include "fold-const.h" 43 #include "stor-layout.h" 44 #include "varasm.h" 45 #include "calls.h" 46 #include "explow.h" 47 #include "expr.h" 48 #include "langhooks.h" 49 #include "cfgrtl.h" 50 #include "gimplify.h" 51 #include "reload.h" 52 #include "builtins.h" 53 54 /* This file should be included last. */ 55 #include "target-def.h" 56 57 static rtx emit_addhi3_postreload (rtx, rtx, rtx); 58 static void xstormy16_asm_out_constructor (rtx, int); 59 static void xstormy16_asm_out_destructor (rtx, int); 60 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, 61 HOST_WIDE_INT, tree); 62 63 static void xstormy16_init_builtins (void); 64 static rtx xstormy16_expand_builtin (tree, rtx, rtx, machine_mode, int); 65 static int xstormy16_address_cost (rtx, machine_mode, addr_space_t, bool); 66 static bool xstormy16_return_in_memory (const_tree, const_tree); 67 68 static GTY(()) section *bss100_section; 69 70 /* Compute a (partial) cost for rtx X. Return true if the complete 71 cost has been computed, and false if subexpressions should be 72 scanned. In either case, *TOTAL contains the cost result. */ 73 74 static bool 75 xstormy16_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED, 76 int outer_code ATTRIBUTE_UNUSED, 77 int opno ATTRIBUTE_UNUSED, int *total, 78 bool speed ATTRIBUTE_UNUSED) 79 { 80 int code = GET_CODE (x); 81 82 switch (code) 83 { 84 case CONST_INT: 85 if (INTVAL (x) < 16 && INTVAL (x) >= 0) 86 *total = COSTS_N_INSNS (1) / 2; 87 else if (INTVAL (x) < 256 && INTVAL (x) >= 0) 88 *total = COSTS_N_INSNS (1); 89 else 90 *total = COSTS_N_INSNS (2); 91 return true; 92 93 case CONST_DOUBLE: 94 case CONST: 95 case SYMBOL_REF: 96 case LABEL_REF: 97 *total = COSTS_N_INSNS (2); 98 return true; 99 100 case MULT: 101 *total = COSTS_N_INSNS (35 + 6); 102 return true; 103 case DIV: 104 *total = COSTS_N_INSNS (51 - 6); 105 return true; 106 107 default: 108 return false; 109 } 110 } 111 112 static int 113 xstormy16_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED, 114 addr_space_t as ATTRIBUTE_UNUSED, 115 bool speed ATTRIBUTE_UNUSED) 116 { 117 return (CONST_INT_P (x) ? 2 118 : GET_CODE (x) == PLUS ? 7 119 : 5); 120 } 121 122 /* Worker function for TARGET_MEMORY_MOVE_COST. */ 123 124 static int 125 xstormy16_memory_move_cost (machine_mode mode, reg_class_t rclass, 126 bool in) 127 { 128 return (5 + memory_move_secondary_cost (mode, rclass, in)); 129 } 130 131 /* Branches are handled as follows: 132 133 1. HImode compare-and-branches. The machine supports these 134 natively, so the appropriate pattern is emitted directly. 135 136 2. SImode EQ and NE. These are emitted as pairs of HImode 137 compare-and-branches. 138 139 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence 140 of a SImode subtract followed by a branch (not a compare-and-branch), 141 like this: 142 sub 143 sbc 144 blt 145 146 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like: 147 sub 148 sbc 149 blt 150 or 151 bne. */ 152 153 /* Emit a branch of kind CODE to location LOC. */ 154 155 void 156 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc) 157 { 158 rtx condition_rtx, loc_ref, branch, cy_clobber; 159 rtvec vec; 160 machine_mode mode; 161 162 mode = GET_MODE (op0); 163 gcc_assert (mode == HImode || mode == SImode); 164 165 if (mode == SImode 166 && (code == GT || code == LE || code == GTU || code == LEU)) 167 { 168 int unsigned_p = (code == GTU || code == LEU); 169 int gt_p = (code == GT || code == GTU); 170 rtx lab = NULL_RTX; 171 172 if (gt_p) 173 lab = gen_label_rtx (); 174 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc); 175 /* This should be generated as a comparison against the temporary 176 created by the previous insn, but reload can't handle that. */ 177 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc); 178 if (gt_p) 179 emit_label (lab); 180 return; 181 } 182 else if (mode == SImode 183 && (code == NE || code == EQ) 184 && op1 != const0_rtx) 185 { 186 rtx op0_word, op1_word; 187 rtx lab = NULL_RTX; 188 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD; 189 int i; 190 191 if (code == EQ) 192 lab = gen_label_rtx (); 193 194 for (i = 0; i < num_words - 1; i++) 195 { 196 op0_word = simplify_gen_subreg (word_mode, op0, mode, 197 i * UNITS_PER_WORD); 198 op1_word = simplify_gen_subreg (word_mode, op1, mode, 199 i * UNITS_PER_WORD); 200 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc); 201 } 202 op0_word = simplify_gen_subreg (word_mode, op0, mode, 203 i * UNITS_PER_WORD); 204 op1_word = simplify_gen_subreg (word_mode, op1, mode, 205 i * UNITS_PER_WORD); 206 xstormy16_emit_cbranch (code, op0_word, op1_word, loc); 207 208 if (code == EQ) 209 emit_label (lab); 210 return; 211 } 212 213 /* We can't allow reload to try to generate any reload after a branch, 214 so when some register must match we must make the temporary ourselves. */ 215 if (mode != HImode) 216 { 217 rtx tmp; 218 tmp = gen_reg_rtx (mode); 219 emit_move_insn (tmp, op0); 220 op0 = tmp; 221 } 222 223 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1); 224 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc); 225 branch = gen_rtx_SET (pc_rtx, 226 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx, 227 loc_ref, pc_rtx)); 228 229 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM)); 230 231 if (mode == HImode) 232 vec = gen_rtvec (2, branch, cy_clobber); 233 else if (code == NE || code == EQ) 234 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0)); 235 else 236 { 237 rtx sub; 238 #if 0 239 sub = gen_rtx_SET (op0, gen_rtx_MINUS (SImode, op0, op1)); 240 #else 241 sub = gen_rtx_CLOBBER (SImode, op0); 242 #endif 243 vec = gen_rtvec (3, branch, sub, cy_clobber); 244 } 245 246 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec)); 247 } 248 249 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split 250 the arithmetic operation. Most of the work is done by 251 xstormy16_expand_arith. */ 252 253 void 254 xstormy16_split_cbranch (machine_mode mode, rtx label, rtx comparison, 255 rtx dest) 256 { 257 rtx op0 = XEXP (comparison, 0); 258 rtx op1 = XEXP (comparison, 1); 259 rtx_insn *seq, *last_insn; 260 rtx compare; 261 262 start_sequence (); 263 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1); 264 seq = get_insns (); 265 end_sequence (); 266 267 gcc_assert (INSN_P (seq)); 268 269 last_insn = seq; 270 while (NEXT_INSN (last_insn) != NULL_RTX) 271 last_insn = NEXT_INSN (last_insn); 272 273 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0)); 274 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison)); 275 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label); 276 emit_insn (seq); 277 } 278 279 280 /* Return the string to output a conditional branch to LABEL, which is 281 the operand number of the label. 282 283 OP is the conditional expression, or NULL for branch-always. 284 285 REVERSED is nonzero if we should reverse the sense of the comparison. 286 287 INSN is the insn. */ 288 289 char * 290 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, 291 rtx_insn *insn) 292 { 293 static char string[64]; 294 int need_longbranch = (op != NULL_RTX 295 ? get_attr_length (insn) == 8 296 : get_attr_length (insn) == 4); 297 int really_reversed = reversed ^ need_longbranch; 298 const char *ccode; 299 const char *templ; 300 const char *operands; 301 enum rtx_code code; 302 303 if (! op) 304 { 305 if (need_longbranch) 306 ccode = "jmpf"; 307 else 308 ccode = "br"; 309 sprintf (string, "%s %s", ccode, label); 310 return string; 311 } 312 313 code = GET_CODE (op); 314 315 if (! REG_P (XEXP (op, 0))) 316 { 317 code = swap_condition (code); 318 operands = "%3,%2"; 319 } 320 else 321 operands = "%2,%3"; 322 323 /* Work out which way this really branches. */ 324 if (really_reversed) 325 code = reverse_condition (code); 326 327 switch (code) 328 { 329 case EQ: ccode = "z"; break; 330 case NE: ccode = "nz"; break; 331 case GE: ccode = "ge"; break; 332 case LT: ccode = "lt"; break; 333 case GT: ccode = "gt"; break; 334 case LE: ccode = "le"; break; 335 case GEU: ccode = "nc"; break; 336 case LTU: ccode = "c"; break; 337 case GTU: ccode = "hi"; break; 338 case LEU: ccode = "ls"; break; 339 340 default: 341 gcc_unreachable (); 342 } 343 344 if (need_longbranch) 345 templ = "b%s %s,.+8 | jmpf %s"; 346 else 347 templ = "b%s %s,%s"; 348 sprintf (string, templ, ccode, operands, label); 349 350 return string; 351 } 352 353 /* Return the string to output a conditional branch to LABEL, which is 354 the operand number of the label, but suitable for the tail of a 355 SImode branch. 356 357 OP is the conditional expression (OP is never NULL_RTX). 358 359 REVERSED is nonzero if we should reverse the sense of the comparison. 360 361 INSN is the insn. */ 362 363 char * 364 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, 365 rtx_insn *insn) 366 { 367 static char string[64]; 368 int need_longbranch = get_attr_length (insn) >= 8; 369 int really_reversed = reversed ^ need_longbranch; 370 const char *ccode; 371 const char *templ; 372 char prevop[16]; 373 enum rtx_code code; 374 375 code = GET_CODE (op); 376 377 /* Work out which way this really branches. */ 378 if (really_reversed) 379 code = reverse_condition (code); 380 381 switch (code) 382 { 383 case EQ: ccode = "z"; break; 384 case NE: ccode = "nz"; break; 385 case GE: ccode = "ge"; break; 386 case LT: ccode = "lt"; break; 387 case GEU: ccode = "nc"; break; 388 case LTU: ccode = "c"; break; 389 390 /* The missing codes above should never be generated. */ 391 default: 392 gcc_unreachable (); 393 } 394 395 switch (code) 396 { 397 case EQ: case NE: 398 { 399 int regnum; 400 401 gcc_assert (REG_P (XEXP (op, 0))); 402 403 regnum = REGNO (XEXP (op, 0)); 404 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]); 405 } 406 break; 407 408 case GE: case LT: case GEU: case LTU: 409 strcpy (prevop, "sbc %2,%3"); 410 break; 411 412 default: 413 gcc_unreachable (); 414 } 415 416 if (need_longbranch) 417 templ = "%s | b%s .+6 | jmpf %s"; 418 else 419 templ = "%s | b%s %s"; 420 sprintf (string, templ, prevop, ccode, label); 421 422 return string; 423 } 424 425 /* Many machines have some registers that cannot be copied directly to or from 426 memory or even from other types of registers. An example is the `MQ' 427 register, which on most machines, can only be copied to or from general 428 registers, but not memory. Some machines allow copying all registers to and 429 from memory, but require a scratch register for stores to some memory 430 locations (e.g., those with symbolic address on the RT, and those with 431 certain symbolic address on the SPARC when compiling PIC). In some cases, 432 both an intermediate and a scratch register are required. 433 434 You should define these macros to indicate to the reload phase that it may 435 need to allocate at least one register for a reload in addition to the 436 register to contain the data. Specifically, if copying X to a register 437 RCLASS in MODE requires an intermediate register, you should define 438 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of 439 whose registers can be used as intermediate registers or scratch registers. 440 441 If copying a register RCLASS in MODE to X requires an intermediate or scratch 442 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the 443 largest register class required. If the requirements for input and output 444 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used 445 instead of defining both macros identically. 446 447 The values returned by these macros are often `GENERAL_REGS'. Return 448 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied 449 to or from a register of RCLASS in MODE without requiring a scratch register. 450 Do not define this macro if it would always return `NO_REGS'. 451 452 If a scratch register is required (either with or without an intermediate 453 register), you should define patterns for `reload_inM' or `reload_outM', as 454 required.. These patterns, which will normally be implemented with a 455 `define_expand', should be similar to the `movM' patterns, except that 456 operand 2 is the scratch register. 457 458 Define constraints for the reload register and scratch register that contain 459 a single register class. If the original reload register (whose class is 460 RCLASS) can meet the constraint given in the pattern, the value returned by 461 these macros is used for the class of the scratch register. Otherwise, two 462 additional reload registers are required. Their classes are obtained from 463 the constraints in the insn pattern. 464 465 X might be a pseudo-register or a `subreg' of a pseudo-register, which could 466 either be in a hard register or in memory. Use `true_regnum' to find out; 467 it will return -1 if the pseudo is in memory and the hard register number if 468 it is in a register. 469 470 These macros should not be used in the case where a particular class of 471 registers can only be copied to memory and not to another class of 472 registers. In that case, secondary reload registers are not needed and 473 would not be helpful. Instead, a stack location must be used to perform the 474 copy and the `movM' pattern should use memory as an intermediate storage. 475 This case often occurs between floating-point and general registers. */ 476 477 enum reg_class 478 xstormy16_secondary_reload_class (enum reg_class rclass, 479 machine_mode mode ATTRIBUTE_UNUSED, 480 rtx x) 481 { 482 /* This chip has the interesting property that only the first eight 483 registers can be moved to/from memory. */ 484 if ((MEM_P (x) 485 || ((GET_CODE (x) == SUBREG || REG_P (x)) 486 && (true_regnum (x) == -1 487 || true_regnum (x) >= FIRST_PSEUDO_REGISTER))) 488 && ! reg_class_subset_p (rclass, EIGHT_REGS)) 489 return EIGHT_REGS; 490 491 return NO_REGS; 492 } 493 494 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS 495 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */ 496 497 static reg_class_t 498 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass) 499 { 500 /* Only the first eight registers can be moved to/from memory. 501 So those prefer EIGHT_REGS. 502 503 Similarly reloading an auto-increment address is going to 504 require loads and stores, so we must use EIGHT_REGS for those 505 too. */ 506 if (rclass == GENERAL_REGS 507 && (MEM_P (x) 508 || GET_CODE (x) == POST_INC 509 || GET_CODE (x) == PRE_DEC 510 || GET_CODE (x) == PRE_MODIFY)) 511 return EIGHT_REGS; 512 513 return rclass; 514 } 515 516 /* Predicate for symbols and addresses that reflect special 8-bit 517 addressing. */ 518 519 int 520 xstormy16_below100_symbol (rtx x, 521 machine_mode mode ATTRIBUTE_UNUSED) 522 { 523 if (GET_CODE (x) == CONST) 524 x = XEXP (x, 0); 525 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1))) 526 x = XEXP (x, 0); 527 528 if (GET_CODE (x) == SYMBOL_REF) 529 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0; 530 531 if (CONST_INT_P (x)) 532 { 533 HOST_WIDE_INT i = INTVAL (x); 534 535 if ((i >= 0x0000 && i <= 0x00ff) 536 || (i >= 0x7f00 && i <= 0x7fff)) 537 return 1; 538 } 539 return 0; 540 } 541 542 /* Likewise, but only for non-volatile MEMs, for patterns where the 543 MEM will get split into smaller sized accesses. */ 544 545 int 546 xstormy16_splittable_below100_operand (rtx x, machine_mode mode) 547 { 548 if (MEM_P (x) && MEM_VOLATILE_P (x)) 549 return 0; 550 return xstormy16_below100_operand (x, mode); 551 } 552 553 /* Expand an 8-bit IOR. This either detects the one case we can 554 actually do, or uses a 16-bit IOR. */ 555 556 void 557 xstormy16_expand_iorqi3 (rtx *operands) 558 { 559 rtx in, out, outsub, val; 560 561 out = operands[0]; 562 in = operands[1]; 563 val = operands[2]; 564 565 if (xstormy16_onebit_set_operand (val, QImode)) 566 { 567 if (!xstormy16_below100_or_register (in, QImode)) 568 in = copy_to_mode_reg (QImode, in); 569 if (!xstormy16_below100_or_register (out, QImode)) 570 out = gen_reg_rtx (QImode); 571 emit_insn (gen_iorqi3_internal (out, in, val)); 572 if (out != operands[0]) 573 emit_move_insn (operands[0], out); 574 return; 575 } 576 577 if (! REG_P (in)) 578 in = copy_to_mode_reg (QImode, in); 579 580 if (! REG_P (val) && ! CONST_INT_P (val)) 581 val = copy_to_mode_reg (QImode, val); 582 583 if (! REG_P (out)) 584 out = gen_reg_rtx (QImode); 585 586 in = simplify_gen_subreg (HImode, in, QImode, 0); 587 outsub = simplify_gen_subreg (HImode, out, QImode, 0); 588 589 if (! CONST_INT_P (val)) 590 val = simplify_gen_subreg (HImode, val, QImode, 0); 591 592 emit_insn (gen_iorhi3 (outsub, in, val)); 593 594 if (out != operands[0]) 595 emit_move_insn (operands[0], out); 596 } 597 598 /* Expand an 8-bit AND. This either detects the one case we can 599 actually do, or uses a 16-bit AND. */ 600 601 void 602 xstormy16_expand_andqi3 (rtx *operands) 603 { 604 rtx in, out, outsub, val; 605 606 out = operands[0]; 607 in = operands[1]; 608 val = operands[2]; 609 610 if (xstormy16_onebit_clr_operand (val, QImode)) 611 { 612 if (!xstormy16_below100_or_register (in, QImode)) 613 in = copy_to_mode_reg (QImode, in); 614 if (!xstormy16_below100_or_register (out, QImode)) 615 out = gen_reg_rtx (QImode); 616 emit_insn (gen_andqi3_internal (out, in, val)); 617 if (out != operands[0]) 618 emit_move_insn (operands[0], out); 619 return; 620 } 621 622 if (! REG_P (in)) 623 in = copy_to_mode_reg (QImode, in); 624 625 if (! REG_P (val) && ! CONST_INT_P (val)) 626 val = copy_to_mode_reg (QImode, val); 627 628 if (! REG_P (out)) 629 out = gen_reg_rtx (QImode); 630 631 in = simplify_gen_subreg (HImode, in, QImode, 0); 632 outsub = simplify_gen_subreg (HImode, out, QImode, 0); 633 634 if (! CONST_INT_P (val)) 635 val = simplify_gen_subreg (HImode, val, QImode, 0); 636 637 emit_insn (gen_andhi3 (outsub, in, val)); 638 639 if (out != operands[0]) 640 emit_move_insn (operands[0], out); 641 } 642 643 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \ 644 (CONST_INT_P (X) \ 645 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096) 646 647 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \ 648 (CONST_INT_P (X) \ 649 && INTVAL (X) + (OFFSET) >= 0 \ 650 && INTVAL (X) + (OFFSET) < 0x8000 \ 651 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00)) 652 653 bool 654 xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, 655 rtx x, bool strict) 656 { 657 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)) 658 return true; 659 660 if (GET_CODE (x) == PLUS 661 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)) 662 { 663 x = XEXP (x, 0); 664 /* PR 31232: Do not allow INT+INT as an address. */ 665 if (CONST_INT_P (x)) 666 return false; 667 } 668 669 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1))) 670 || GET_CODE (x) == POST_INC 671 || GET_CODE (x) == PRE_DEC) 672 x = XEXP (x, 0); 673 674 if (REG_P (x) 675 && REGNO_OK_FOR_BASE_P (REGNO (x)) 676 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER)) 677 return true; 678 679 if (xstormy16_below100_symbol (x, mode)) 680 return true; 681 682 return false; 683 } 684 685 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. 686 687 On this chip, this is true if the address is valid with an offset 688 of 0 but not of 6, because in that case it cannot be used as an 689 address for DImode or DFmode, or if the address is a post-increment 690 or pre-decrement address. */ 691 692 static bool 693 xstormy16_mode_dependent_address_p (const_rtx x, 694 addr_space_t as ATTRIBUTE_UNUSED) 695 { 696 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0) 697 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6)) 698 return true; 699 700 if (GET_CODE (x) == PLUS 701 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0) 702 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6)) 703 return true; 704 705 /* Auto-increment addresses are now treated generically in recog.c. */ 706 return false; 707 } 708 709 int 710 short_memory_operand (rtx x, machine_mode mode) 711 { 712 if (! memory_operand (x, mode)) 713 return 0; 714 return (GET_CODE (XEXP (x, 0)) != PLUS); 715 } 716 717 /* Splitter for the 'move' patterns, for modes not directly implemented 718 by hardware. Emit insns to copy a value of mode MODE from SRC to 719 DEST. 720 721 This function is only called when reload_completed. */ 722 723 void 724 xstormy16_split_move (machine_mode mode, rtx dest, rtx src) 725 { 726 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD; 727 int direction, end, i; 728 int src_modifies = 0; 729 int dest_modifies = 0; 730 int src_volatile = 0; 731 int dest_volatile = 0; 732 rtx mem_operand; 733 rtx auto_inc_reg_rtx = NULL_RTX; 734 735 /* Check initial conditions. */ 736 gcc_assert (reload_completed 737 && mode != QImode && mode != HImode 738 && nonimmediate_operand (dest, mode) 739 && general_operand (src, mode)); 740 741 /* This case is not supported below, and shouldn't be generated. */ 742 gcc_assert (! MEM_P (dest) || ! MEM_P (src)); 743 744 /* This case is very very bad after reload, so trap it now. */ 745 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG); 746 747 /* The general idea is to copy by words, offsetting the source and 748 destination. Normally the least-significant word will be copied 749 first, but for pre-dec operations it's better to copy the 750 most-significant word first. Only one operand can be a pre-dec 751 or post-inc operand. 752 753 It's also possible that the copy overlaps so that the direction 754 must be reversed. */ 755 direction = 1; 756 757 if (MEM_P (dest)) 758 { 759 mem_operand = XEXP (dest, 0); 760 dest_modifies = side_effects_p (mem_operand); 761 if (auto_inc_p (mem_operand)) 762 auto_inc_reg_rtx = XEXP (mem_operand, 0); 763 dest_volatile = MEM_VOLATILE_P (dest); 764 if (dest_volatile) 765 { 766 dest = copy_rtx (dest); 767 MEM_VOLATILE_P (dest) = 0; 768 } 769 } 770 else if (MEM_P (src)) 771 { 772 mem_operand = XEXP (src, 0); 773 src_modifies = side_effects_p (mem_operand); 774 if (auto_inc_p (mem_operand)) 775 auto_inc_reg_rtx = XEXP (mem_operand, 0); 776 src_volatile = MEM_VOLATILE_P (src); 777 if (src_volatile) 778 { 779 src = copy_rtx (src); 780 MEM_VOLATILE_P (src) = 0; 781 } 782 } 783 else 784 mem_operand = NULL_RTX; 785 786 if (mem_operand == NULL_RTX) 787 { 788 if (REG_P (src) 789 && REG_P (dest) 790 && reg_overlap_mentioned_p (dest, src) 791 && REGNO (dest) > REGNO (src)) 792 direction = -1; 793 } 794 else if (GET_CODE (mem_operand) == PRE_DEC 795 || (GET_CODE (mem_operand) == PLUS 796 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC)) 797 direction = -1; 798 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src)) 799 { 800 int regno; 801 802 gcc_assert (REG_P (dest)); 803 regno = REGNO (dest); 804 805 gcc_assert (refers_to_regno_p (regno, regno + num_words, 806 mem_operand, 0)); 807 808 if (refers_to_regno_p (regno, mem_operand)) 809 direction = -1; 810 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words, 811 mem_operand, 0)) 812 direction = 1; 813 else 814 /* This means something like 815 (set (reg:DI r0) (mem:DI (reg:HI r1))) 816 which we'd need to support by doing the set of the second word 817 last. */ 818 gcc_unreachable (); 819 } 820 821 end = direction < 0 ? -1 : num_words; 822 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction) 823 { 824 rtx w_src, w_dest, insn; 825 826 if (src_modifies) 827 w_src = gen_rtx_MEM (word_mode, mem_operand); 828 else 829 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD); 830 if (src_volatile) 831 MEM_VOLATILE_P (w_src) = 1; 832 if (dest_modifies) 833 w_dest = gen_rtx_MEM (word_mode, mem_operand); 834 else 835 w_dest = simplify_gen_subreg (word_mode, dest, mode, 836 i * UNITS_PER_WORD); 837 if (dest_volatile) 838 MEM_VOLATILE_P (w_dest) = 1; 839 840 /* The simplify_subreg calls must always be able to simplify. */ 841 gcc_assert (GET_CODE (w_src) != SUBREG 842 && GET_CODE (w_dest) != SUBREG); 843 844 insn = emit_insn (gen_rtx_SET (w_dest, w_src)); 845 if (auto_inc_reg_rtx) 846 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, 847 auto_inc_reg_rtx, 848 REG_NOTES (insn)); 849 } 850 } 851 852 /* Expander for the 'move' patterns. Emit insns to copy a value of 853 mode MODE from SRC to DEST. */ 854 855 void 856 xstormy16_expand_move (machine_mode mode, rtx dest, rtx src) 857 { 858 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY)) 859 { 860 rtx pmv = XEXP (dest, 0); 861 rtx dest_reg = XEXP (pmv, 0); 862 rtx dest_mod = XEXP (pmv, 1); 863 rtx set = gen_rtx_SET (dest_reg, dest_mod); 864 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM)); 865 866 dest = gen_rtx_MEM (mode, dest_reg); 867 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber))); 868 } 869 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY)) 870 { 871 rtx pmv = XEXP (src, 0); 872 rtx src_reg = XEXP (pmv, 0); 873 rtx src_mod = XEXP (pmv, 1); 874 rtx set = gen_rtx_SET (src_reg, src_mod); 875 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM)); 876 877 src = gen_rtx_MEM (mode, src_reg); 878 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber))); 879 } 880 881 /* There are only limited immediate-to-memory move instructions. */ 882 if (! reload_in_progress 883 && ! reload_completed 884 && MEM_P (dest) 885 && (! CONST_INT_P (XEXP (dest, 0)) 886 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0)) 887 && ! xstormy16_below100_operand (dest, mode) 888 && ! REG_P (src) 889 && GET_CODE (src) != SUBREG) 890 src = copy_to_mode_reg (mode, src); 891 892 /* Don't emit something we would immediately split. */ 893 if (reload_completed 894 && mode != HImode && mode != QImode) 895 { 896 xstormy16_split_move (mode, dest, src); 897 return; 898 } 899 900 emit_insn (gen_rtx_SET (dest, src)); 901 } 902 903 /* Stack Layout: 904 905 The stack is laid out as follows: 906 907 SP-> 908 FP-> Local variables 909 Register save area (up to 4 words) 910 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words) 911 912 AP-> Return address (two words) 913 9th procedure parameter word 914 10th procedure parameter word 915 ... 916 last procedure parameter word 917 918 The frame pointer location is tuned to make it most likely that all 919 parameters and local variables can be accessed using a load-indexed 920 instruction. */ 921 922 /* A structure to describe the layout. */ 923 struct xstormy16_stack_layout 924 { 925 /* Size of the topmost three items on the stack. */ 926 int locals_size; 927 int register_save_size; 928 int stdarg_save_size; 929 /* Sum of the above items. */ 930 int frame_size; 931 /* Various offsets. */ 932 int first_local_minus_ap; 933 int sp_minus_fp; 934 int fp_minus_ap; 935 }; 936 937 /* Does REGNO need to be saved? */ 938 #define REG_NEEDS_SAVE(REGNUM, IFUN) \ 939 ((df_regs_ever_live_p (REGNUM) && !call_used_or_fixed_reg_p (REGNUM)) \ 940 || (IFUN && !fixed_regs[REGNUM] && call_used_or_fixed_reg_p (REGNUM) \ 941 && (REGNUM != CARRY_REGNUM) \ 942 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf))) 943 944 /* Compute the stack layout. */ 945 946 struct xstormy16_stack_layout 947 xstormy16_compute_stack_layout (void) 948 { 949 struct xstormy16_stack_layout layout; 950 int regno; 951 const int ifun = xstormy16_interrupt_function_p (); 952 953 layout.locals_size = get_frame_size (); 954 955 layout.register_save_size = 0; 956 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 957 if (REG_NEEDS_SAVE (regno, ifun)) 958 layout.register_save_size += UNITS_PER_WORD; 959 960 if (cfun->stdarg) 961 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD; 962 else 963 layout.stdarg_save_size = 0; 964 965 layout.frame_size = (layout.locals_size 966 + layout.register_save_size 967 + layout.stdarg_save_size); 968 969 if (crtl->args.size <= 2048 && crtl->args.size != -1) 970 { 971 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET 972 + crtl->args.size <= 2048) 973 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET; 974 else 975 layout.fp_minus_ap = 2048 - crtl->args.size; 976 } 977 else 978 layout.fp_minus_ap = (layout.stdarg_save_size 979 + layout.register_save_size 980 - INCOMING_FRAME_SP_OFFSET); 981 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET 982 - layout.fp_minus_ap); 983 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size; 984 return layout; 985 } 986 987 /* Worker function for TARGET_CAN_ELIMINATE. */ 988 989 static bool 990 xstormy16_can_eliminate (const int from, const int to) 991 { 992 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM 993 ? ! frame_pointer_needed 994 : true); 995 } 996 997 /* Determine how all the special registers get eliminated. */ 998 999 int 1000 xstormy16_initial_elimination_offset (int from, int to) 1001 { 1002 struct xstormy16_stack_layout layout; 1003 int result; 1004 1005 layout = xstormy16_compute_stack_layout (); 1006 1007 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM) 1008 result = layout.sp_minus_fp - layout.locals_size; 1009 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM) 1010 result = - layout.locals_size; 1011 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM) 1012 result = - layout.fp_minus_ap; 1013 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM) 1014 result = - (layout.sp_minus_fp + layout.fp_minus_ap); 1015 else 1016 gcc_unreachable (); 1017 1018 return result; 1019 } 1020 1021 static rtx 1022 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1) 1023 { 1024 rtx set, clobber, insn; 1025 1026 set = gen_rtx_SET (dest, gen_rtx_PLUS (HImode, src0, src1)); 1027 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM)); 1028 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber))); 1029 return insn; 1030 } 1031 1032 /* Called after register allocation to add any instructions needed for 1033 the prologue. Using a prologue insn is favored compared to putting 1034 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro, 1035 since it allows the scheduler to intermix instructions with the 1036 saves of the caller saved registers. In some cases, it might be 1037 necessary to emit a barrier instruction as the last insn to prevent 1038 such scheduling. 1039 1040 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1 1041 so that the debug info generation code can handle them properly. */ 1042 1043 void 1044 xstormy16_expand_prologue (void) 1045 { 1046 struct xstormy16_stack_layout layout; 1047 int regno; 1048 rtx insn; 1049 rtx mem_push_rtx; 1050 const int ifun = xstormy16_interrupt_function_p (); 1051 1052 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx); 1053 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx); 1054 1055 layout = xstormy16_compute_stack_layout (); 1056 1057 if (layout.locals_size >= 32768) 1058 error ("local variable memory requirements exceed capacity"); 1059 1060 if (flag_stack_usage_info) 1061 current_function_static_stack_size = layout.frame_size; 1062 1063 /* Save the argument registers if necessary. */ 1064 if (layout.stdarg_save_size) 1065 for (regno = FIRST_ARGUMENT_REGISTER; 1066 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS; 1067 regno++) 1068 { 1069 rtx dwarf; 1070 rtx reg = gen_rtx_REG (HImode, regno); 1071 1072 insn = emit_move_insn (mem_push_rtx, reg); 1073 RTX_FRAME_RELATED_P (insn) = 1; 1074 1075 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2)); 1076 1077 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (gen_rtx_MEM (Pmode, stack_pointer_rtx), 1078 reg); 1079 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (stack_pointer_rtx, 1080 plus_constant (Pmode, 1081 stack_pointer_rtx, 1082 GET_MODE_SIZE (Pmode))); 1083 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf); 1084 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1; 1085 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1; 1086 } 1087 1088 /* Push each of the registers to save. */ 1089 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 1090 if (REG_NEEDS_SAVE (regno, ifun)) 1091 { 1092 rtx dwarf; 1093 rtx reg = gen_rtx_REG (HImode, regno); 1094 1095 insn = emit_move_insn (mem_push_rtx, reg); 1096 RTX_FRAME_RELATED_P (insn) = 1; 1097 1098 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2)); 1099 1100 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (gen_rtx_MEM (Pmode, stack_pointer_rtx), 1101 reg); 1102 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (stack_pointer_rtx, 1103 plus_constant (Pmode, 1104 stack_pointer_rtx, 1105 GET_MODE_SIZE (Pmode))); 1106 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf); 1107 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1; 1108 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1; 1109 } 1110 1111 /* It's just possible that the SP here might be what we need for 1112 the new FP... */ 1113 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size) 1114 { 1115 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx); 1116 RTX_FRAME_RELATED_P (insn) = 1; 1117 } 1118 1119 /* Allocate space for local variables. */ 1120 if (layout.locals_size) 1121 { 1122 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx, 1123 GEN_INT (layout.locals_size)); 1124 RTX_FRAME_RELATED_P (insn) = 1; 1125 } 1126 1127 /* Set up the frame pointer, if required. */ 1128 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size) 1129 { 1130 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx); 1131 RTX_FRAME_RELATED_P (insn) = 1; 1132 1133 if (layout.sp_minus_fp) 1134 { 1135 insn = emit_addhi3_postreload (hard_frame_pointer_rtx, 1136 hard_frame_pointer_rtx, 1137 GEN_INT (- layout.sp_minus_fp)); 1138 RTX_FRAME_RELATED_P (insn) = 1; 1139 } 1140 } 1141 } 1142 1143 /* Do we need an epilogue at all? */ 1144 1145 int 1146 direct_return (void) 1147 { 1148 return (reload_completed 1149 && xstormy16_compute_stack_layout ().frame_size == 0 1150 && ! xstormy16_interrupt_function_p ()); 1151 } 1152 1153 /* Called after register allocation to add any instructions needed for 1154 the epilogue. Using an epilogue insn is favored compared to putting 1155 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro, 1156 since it allows the scheduler to intermix instructions with the 1157 saves of the caller saved registers. In some cases, it might be 1158 necessary to emit a barrier instruction as the last insn to prevent 1159 such scheduling. */ 1160 1161 void 1162 xstormy16_expand_epilogue (void) 1163 { 1164 struct xstormy16_stack_layout layout; 1165 rtx mem_pop_rtx; 1166 int regno; 1167 const int ifun = xstormy16_interrupt_function_p (); 1168 1169 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx); 1170 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx); 1171 1172 layout = xstormy16_compute_stack_layout (); 1173 1174 /* Pop the stack for the locals. */ 1175 if (layout.locals_size) 1176 { 1177 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size) 1178 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx); 1179 else 1180 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx, 1181 GEN_INT (- layout.locals_size)); 1182 } 1183 1184 /* Restore any call-saved registers. */ 1185 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--) 1186 if (REG_NEEDS_SAVE (regno, ifun)) 1187 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx); 1188 1189 /* Pop the stack for the stdarg save area. */ 1190 if (layout.stdarg_save_size) 1191 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx, 1192 GEN_INT (- layout.stdarg_save_size)); 1193 1194 /* Return. */ 1195 if (ifun) 1196 emit_jump_insn (gen_return_internal_interrupt ()); 1197 else 1198 emit_jump_insn (gen_return_internal ()); 1199 } 1200 1201 int 1202 xstormy16_epilogue_uses (int regno) 1203 { 1204 if (reload_completed && call_used_or_fixed_reg_p (regno)) 1205 { 1206 const int ifun = xstormy16_interrupt_function_p (); 1207 return REG_NEEDS_SAVE (regno, ifun); 1208 } 1209 return 0; 1210 } 1211 1212 void 1213 xstormy16_function_profiler (void) 1214 { 1215 sorry ("function_profiler support"); 1216 } 1217 1218 /* Update CUM to advance past argument ARG. Once this is done, 1219 the variable CUM is suitable for analyzing the *following* 1220 argument with `TARGET_FUNCTION_ARG', etc. 1221 1222 This function need not do anything if the argument in question was 1223 passed on the stack. The compiler knows how to track the amount of 1224 stack space used for arguments without any special help. However, 1225 it makes life easier for xstormy16_build_va_list if it does update 1226 the word count. */ 1227 1228 static void 1229 xstormy16_function_arg_advance (cumulative_args_t cum_v, 1230 const function_arg_info &arg) 1231 { 1232 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1233 1234 /* If an argument would otherwise be passed partially in registers, 1235 and partially on the stack, the whole of it is passed on the 1236 stack. */ 1237 if (*cum < NUM_ARGUMENT_REGISTERS 1238 && (*cum + XSTORMY16_WORD_SIZE (arg.type, arg.mode) 1239 > NUM_ARGUMENT_REGISTERS)) 1240 *cum = NUM_ARGUMENT_REGISTERS; 1241 1242 *cum += XSTORMY16_WORD_SIZE (arg.type, arg.mode); 1243 } 1244 1245 static rtx 1246 xstormy16_function_arg (cumulative_args_t cum_v, const function_arg_info &arg) 1247 { 1248 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1249 1250 if (arg.end_marker_p ()) 1251 return const0_rtx; 1252 if (targetm.calls.must_pass_in_stack (arg) 1253 || (*cum + XSTORMY16_WORD_SIZE (arg.type, arg.mode) 1254 > NUM_ARGUMENT_REGISTERS)) 1255 return NULL_RTX; 1256 return gen_rtx_REG (arg.mode, *cum + FIRST_ARGUMENT_REGISTER); 1257 } 1258 1259 /* Build the va_list type. 1260 1261 For this chip, va_list is a record containing a counter and a pointer. 1262 The counter is of type 'int' and indicates how many bytes 1263 have been used to date. The pointer indicates the stack position 1264 for arguments that have not been passed in registers. 1265 To keep the layout nice, the pointer is first in the structure. */ 1266 1267 static tree 1268 xstormy16_build_builtin_va_list (void) 1269 { 1270 tree f_1, f_2, record, type_decl; 1271 1272 record = (*lang_hooks.types.make_type) (RECORD_TYPE); 1273 type_decl = build_decl (BUILTINS_LOCATION, 1274 TYPE_DECL, get_identifier ("__va_list_tag"), record); 1275 1276 f_1 = build_decl (BUILTINS_LOCATION, 1277 FIELD_DECL, get_identifier ("base"), 1278 ptr_type_node); 1279 f_2 = build_decl (BUILTINS_LOCATION, 1280 FIELD_DECL, get_identifier ("count"), 1281 unsigned_type_node); 1282 1283 DECL_FIELD_CONTEXT (f_1) = record; 1284 DECL_FIELD_CONTEXT (f_2) = record; 1285 1286 TYPE_STUB_DECL (record) = type_decl; 1287 TYPE_NAME (record) = type_decl; 1288 TYPE_FIELDS (record) = f_1; 1289 DECL_CHAIN (f_1) = f_2; 1290 1291 layout_type (record); 1292 1293 return record; 1294 } 1295 1296 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this 1297 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list 1298 variable to initialize. NEXTARG is the machine independent notion of the 1299 'next' argument after the variable arguments. */ 1300 1301 static void 1302 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED) 1303 { 1304 tree f_base, f_count; 1305 tree base, count; 1306 tree t,u; 1307 1308 if (xstormy16_interrupt_function_p ()) 1309 error ("cannot use va_start in interrupt function"); 1310 1311 f_base = TYPE_FIELDS (va_list_type_node); 1312 f_count = DECL_CHAIN (f_base); 1313 1314 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE); 1315 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count, 1316 NULL_TREE); 1317 1318 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx); 1319 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET); 1320 u = fold_convert (TREE_TYPE (count), u); 1321 t = fold_build_pointer_plus (t, u); 1322 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t); 1323 TREE_SIDE_EFFECTS (t) = 1; 1324 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 1325 1326 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count, 1327 build_int_cst (NULL_TREE, 1328 crtl->args.info * UNITS_PER_WORD)); 1329 TREE_SIDE_EFFECTS (t) = 1; 1330 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 1331 } 1332 1333 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable 1334 of type va_list as a tree, TYPE is the type passed to va_arg. 1335 Note: This algorithm is documented in stormy-abi. */ 1336 1337 static tree 1338 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p, 1339 gimple_seq *post_p ATTRIBUTE_UNUSED) 1340 { 1341 tree f_base, f_count; 1342 tree base, count; 1343 tree count_tmp, addr, t; 1344 tree lab_gotaddr, lab_fromstack; 1345 int size, size_of_reg_args, must_stack; 1346 tree size_tree; 1347 1348 f_base = TYPE_FIELDS (va_list_type_node); 1349 f_count = DECL_CHAIN (f_base); 1350 1351 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE); 1352 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count, 1353 NULL_TREE); 1354 1355 must_stack = must_pass_va_arg_in_stack (type); 1356 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD); 1357 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue); 1358 1359 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD; 1360 1361 count_tmp = get_initialized_tmp_var (count, pre_p, NULL); 1362 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION); 1363 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION); 1364 addr = create_tmp_var (ptr_type_node); 1365 1366 if (!must_stack) 1367 { 1368 tree r; 1369 1370 t = fold_convert (TREE_TYPE (count), size_tree); 1371 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t); 1372 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args)); 1373 t = build2 (GT_EXPR, boolean_type_node, t, r); 1374 t = build3 (COND_EXPR, void_type_node, t, 1375 build1 (GOTO_EXPR, void_type_node, lab_fromstack), 1376 NULL_TREE); 1377 gimplify_and_add (t, pre_p); 1378 1379 t = fold_build_pointer_plus (base, count_tmp); 1380 gimplify_assign (addr, t, pre_p); 1381 1382 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr); 1383 gimplify_and_add (t, pre_p); 1384 1385 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack); 1386 gimplify_and_add (t, pre_p); 1387 } 1388 1389 /* Arguments larger than a word might need to skip over some 1390 registers, since arguments are either passed entirely in 1391 registers or entirely on the stack. */ 1392 size = PUSH_ROUNDING (int_size_in_bytes (type)); 1393 if (size > 2 || size < 0 || must_stack) 1394 { 1395 tree r, u; 1396 1397 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD); 1398 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r); 1399 1400 t = fold_convert (TREE_TYPE (count), r); 1401 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t); 1402 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u); 1403 gimplify_and_add (t, pre_p); 1404 } 1405 1406 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD 1407 + INCOMING_FRAME_SP_OFFSET); 1408 t = fold_convert (TREE_TYPE (count), t); 1409 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t); 1410 t = build2 (PLUS_EXPR, TREE_TYPE (count), t, 1411 fold_convert (TREE_TYPE (count), size_tree)); 1412 t = fold_convert (TREE_TYPE (t), fold (t)); 1413 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t); 1414 t = fold_build_pointer_plus (base, t); 1415 gimplify_assign (addr, t, pre_p); 1416 1417 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr); 1418 gimplify_and_add (t, pre_p); 1419 1420 t = fold_convert (TREE_TYPE (count), size_tree); 1421 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t); 1422 gimplify_assign (count, t, pre_p); 1423 1424 addr = fold_convert (build_pointer_type (type), addr); 1425 return build_va_arg_indirect_ref (addr); 1426 } 1427 1428 /* Worker function for TARGET_TRAMPOLINE_INIT. */ 1429 1430 static void 1431 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain) 1432 { 1433 rtx temp = gen_reg_rtx (HImode); 1434 rtx reg_fnaddr = gen_reg_rtx (HImode); 1435 rtx reg_addr, reg_addr_mem; 1436 1437 reg_addr = copy_to_reg (XEXP (m_tramp, 0)); 1438 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0); 1439 1440 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM)); 1441 emit_move_insn (reg_addr_mem, temp); 1442 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx)); 1443 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2); 1444 1445 emit_move_insn (temp, static_chain); 1446 emit_move_insn (reg_addr_mem, temp); 1447 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx)); 1448 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2); 1449 1450 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0)); 1451 emit_move_insn (temp, reg_fnaddr); 1452 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF))); 1453 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200))); 1454 emit_move_insn (reg_addr_mem, temp); 1455 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx)); 1456 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2); 1457 1458 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8))); 1459 emit_move_insn (reg_addr_mem, reg_fnaddr); 1460 } 1461 1462 /* Worker function for TARGET_FUNCTION_VALUE. */ 1463 1464 static rtx 1465 xstormy16_function_value (const_tree valtype, 1466 const_tree func ATTRIBUTE_UNUSED, 1467 bool outgoing ATTRIBUTE_UNUSED) 1468 { 1469 machine_mode mode; 1470 mode = TYPE_MODE (valtype); 1471 PROMOTE_MODE (mode, 0, valtype); 1472 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM); 1473 } 1474 1475 /* Worker function for TARGET_LIBCALL_VALUE. */ 1476 1477 static rtx 1478 xstormy16_libcall_value (machine_mode mode, 1479 const_rtx fun ATTRIBUTE_UNUSED) 1480 { 1481 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM); 1482 } 1483 1484 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */ 1485 1486 static bool 1487 xstormy16_function_value_regno_p (const unsigned int regno) 1488 { 1489 return (regno == RETURN_VALUE_REGNUM); 1490 } 1491 1492 /* A C compound statement that outputs the assembler code for a thunk function, 1493 used to implement C++ virtual function calls with multiple inheritance. The 1494 thunk acts as a wrapper around a virtual function, adjusting the implicit 1495 object parameter before handing control off to the real function. 1496 1497 First, emit code to add the integer DELTA to the location that contains the 1498 incoming first argument. Assume that this argument contains a pointer, and 1499 is the one used to pass the `this' pointer in C++. This is the incoming 1500 argument *before* the function prologue, e.g. `%o0' on a sparc. The 1501 addition must preserve the values of all other incoming arguments. 1502 1503 After the addition, emit code to jump to FUNCTION, which is a 1504 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch 1505 the return address. Hence returning from FUNCTION will return to whoever 1506 called the current `thunk'. 1507 1508 The effect must be as if @var{function} had been called directly 1509 with the adjusted first argument. This macro is responsible for 1510 emitting all of the code for a thunk function; 1511 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are 1512 not invoked. 1513 1514 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been 1515 extracted from it.) It might possibly be useful on some targets, but 1516 probably not. */ 1517 1518 static void 1519 xstormy16_asm_output_mi_thunk (FILE *file, 1520 tree thunk_fndecl ATTRIBUTE_UNUSED, 1521 HOST_WIDE_INT delta, 1522 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED, 1523 tree function) 1524 { 1525 const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl)); 1526 int regnum = FIRST_ARGUMENT_REGISTER; 1527 1528 assemble_start_function (thunk_fndecl, fnname); 1529 /* There might be a hidden first argument for a returned structure. */ 1530 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function)) 1531 regnum += 1; 1532 1533 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF); 1534 fputs ("\tjmpf ", file); 1535 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0)); 1536 putc ('\n', file); 1537 assemble_end_function (thunk_fndecl, fnname); 1538 } 1539 1540 /* The purpose of this function is to override the default behavior of 1541 BSS objects. Normally, they go into .bss or .sbss via ".common" 1542 directives, but we need to override that and put them in 1543 .bss_below100. We can't just use a section override (like we do 1544 for .data_below100), because that makes them initialized rather 1545 than uninitialized. */ 1546 1547 void 1548 xstormy16_asm_output_aligned_common (FILE *stream, 1549 tree decl, 1550 const char *name, 1551 int size, 1552 int align, 1553 int global) 1554 { 1555 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl); 1556 rtx symbol; 1557 1558 if (mem != NULL_RTX 1559 && MEM_P (mem) 1560 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF 1561 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100) 1562 { 1563 const char *name2; 1564 int p2align = 0; 1565 1566 switch_to_section (bss100_section); 1567 1568 while (align > 8) 1569 { 1570 align /= 2; 1571 p2align ++; 1572 } 1573 1574 name2 = default_strip_name_encoding (name); 1575 if (global) 1576 fprintf (stream, "\t.globl\t%s\n", name2); 1577 if (p2align) 1578 fprintf (stream, "\t.p2align %d\n", p2align); 1579 fprintf (stream, "\t.type\t%s, @object\n", name2); 1580 fprintf (stream, "\t.size\t%s, %d\n", name2, size); 1581 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size); 1582 return; 1583 } 1584 1585 if (!global) 1586 { 1587 fprintf (stream, "\t.local\t"); 1588 assemble_name (stream, name); 1589 fprintf (stream, "\n"); 1590 } 1591 fprintf (stream, "\t.comm\t"); 1592 assemble_name (stream, name); 1593 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT); 1594 } 1595 1596 /* Implement TARGET_ASM_INIT_SECTIONS. */ 1597 1598 static void 1599 xstormy16_asm_init_sections (void) 1600 { 1601 bss100_section 1602 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, 1603 output_section_asm_op, 1604 "\t.section \".bss_below100\",\"aw\",@nobits"); 1605 } 1606 1607 /* Mark symbols with the "below100" attribute so that we can use the 1608 special addressing modes for them. */ 1609 1610 static void 1611 xstormy16_encode_section_info (tree decl, rtx r, int first) 1612 { 1613 default_encode_section_info (decl, r, first); 1614 1615 if (TREE_CODE (decl) == VAR_DECL 1616 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl)) 1617 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl)))) 1618 { 1619 rtx symbol = XEXP (r, 0); 1620 1621 gcc_assert (GET_CODE (symbol) == SYMBOL_REF); 1622 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100; 1623 } 1624 } 1625 1626 #undef TARGET_ASM_CONSTRUCTOR 1627 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor 1628 #undef TARGET_ASM_DESTRUCTOR 1629 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor 1630 1631 /* Output constructors and destructors. Just like 1632 default_named_section_asm_out_* but don't set the sections writable. */ 1633 1634 static void 1635 xstormy16_asm_out_destructor (rtx symbol, int priority) 1636 { 1637 const char *section = ".dtors"; 1638 char buf[18]; 1639 1640 /* ??? This only works reliably with the GNU linker. */ 1641 if (priority != DEFAULT_INIT_PRIORITY) 1642 { 1643 sprintf (buf, ".dtors.%.5u", 1644 /* Invert the numbering so the linker puts us in the proper 1645 order; constructors are run from right to left, and the 1646 linker sorts in increasing order. */ 1647 MAX_INIT_PRIORITY - priority); 1648 section = buf; 1649 } 1650 1651 switch_to_section (get_section (section, 0, NULL)); 1652 assemble_align (POINTER_SIZE); 1653 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1); 1654 } 1655 1656 static void 1657 xstormy16_asm_out_constructor (rtx symbol, int priority) 1658 { 1659 const char *section = ".ctors"; 1660 char buf[18]; 1661 1662 /* ??? This only works reliably with the GNU linker. */ 1663 if (priority != DEFAULT_INIT_PRIORITY) 1664 { 1665 sprintf (buf, ".ctors.%.5u", 1666 /* Invert the numbering so the linker puts us in the proper 1667 order; constructors are run from right to left, and the 1668 linker sorts in increasing order. */ 1669 MAX_INIT_PRIORITY - priority); 1670 section = buf; 1671 } 1672 1673 switch_to_section (get_section (section, 0, NULL)); 1674 assemble_align (POINTER_SIZE); 1675 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1); 1676 } 1677 1678 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS. 1679 1680 Print a memory address as an operand to reference that memory location. */ 1681 1682 static void 1683 xstormy16_print_operand_address (FILE *file, machine_mode /*mode*/, 1684 rtx address) 1685 { 1686 HOST_WIDE_INT offset; 1687 int pre_dec, post_inc; 1688 1689 /* There are a few easy cases. */ 1690 if (CONST_INT_P (address)) 1691 { 1692 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF); 1693 return; 1694 } 1695 1696 if (CONSTANT_P (address) || LABEL_P (address)) 1697 { 1698 output_addr_const (file, address); 1699 return; 1700 } 1701 1702 /* Otherwise, it's hopefully something of the form 1703 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */ 1704 if (GET_CODE (address) == PLUS) 1705 { 1706 gcc_assert (CONST_INT_P (XEXP (address, 1))); 1707 offset = INTVAL (XEXP (address, 1)); 1708 address = XEXP (address, 0); 1709 } 1710 else 1711 offset = 0; 1712 1713 pre_dec = (GET_CODE (address) == PRE_DEC); 1714 post_inc = (GET_CODE (address) == POST_INC); 1715 if (pre_dec || post_inc) 1716 address = XEXP (address, 0); 1717 1718 gcc_assert (REG_P (address)); 1719 1720 fputc ('(', file); 1721 if (pre_dec) 1722 fputs ("--", file); 1723 fputs (reg_names [REGNO (address)], file); 1724 if (post_inc) 1725 fputs ("++", file); 1726 if (offset != 0) 1727 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset); 1728 fputc (')', file); 1729 } 1730 1731 /* Worker function for TARGET_PRINT_OPERAND. 1732 1733 Print an operand to an assembler instruction. */ 1734 1735 static void 1736 xstormy16_print_operand (FILE *file, rtx x, int code) 1737 { 1738 switch (code) 1739 { 1740 case 'B': 1741 /* There is either one bit set, or one bit clear, in X. 1742 Print it preceded by '#'. */ 1743 { 1744 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 }; 1745 HOST_WIDE_INT xx = 1; 1746 HOST_WIDE_INT l; 1747 1748 if (CONST_INT_P (x)) 1749 xx = INTVAL (x); 1750 else 1751 output_operand_lossage ("'B' operand is not constant"); 1752 1753 /* GCC sign-extends masks with the MSB set, so we have to 1754 detect all the cases that differ only in sign extension 1755 beyond the bits we care about. Normally, the predicates 1756 and constraints ensure that we have the right values. This 1757 works correctly for valid masks. */ 1758 if (bits_set[xx & 7] <= 1) 1759 { 1760 /* Remove sign extension bits. */ 1761 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0) 1762 xx &= 0xff; 1763 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0) 1764 xx &= 0xffff; 1765 l = exact_log2 (xx); 1766 } 1767 else 1768 { 1769 /* Add sign extension bits. */ 1770 if ((xx & ~(HOST_WIDE_INT)0xff) == 0) 1771 xx |= ~(HOST_WIDE_INT)0xff; 1772 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0) 1773 xx |= ~(HOST_WIDE_INT)0xffff; 1774 l = exact_log2 (~xx); 1775 } 1776 1777 if (l == -1) 1778 output_operand_lossage ("'B' operand has multiple bits set"); 1779 1780 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l); 1781 return; 1782 } 1783 1784 case 'C': 1785 /* Print the symbol without a surrounding @fptr(). */ 1786 if (GET_CODE (x) == SYMBOL_REF) 1787 assemble_name (file, XSTR (x, 0)); 1788 else if (LABEL_P (x)) 1789 output_asm_label (x); 1790 else 1791 xstormy16_print_operand_address (file, VOIDmode, x); 1792 return; 1793 1794 case 'o': 1795 case 'O': 1796 /* Print the immediate operand less one, preceded by '#'. 1797 For 'O', negate it first. */ 1798 { 1799 HOST_WIDE_INT xx = 0; 1800 1801 if (CONST_INT_P (x)) 1802 xx = INTVAL (x); 1803 else 1804 output_operand_lossage ("'o' operand is not constant"); 1805 1806 if (code == 'O') 1807 xx = -xx; 1808 1809 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1); 1810 return; 1811 } 1812 1813 case 'b': 1814 /* Print the shift mask for bp/bn. */ 1815 { 1816 HOST_WIDE_INT xx = 1; 1817 HOST_WIDE_INT l; 1818 1819 if (CONST_INT_P (x)) 1820 xx = INTVAL (x); 1821 else 1822 output_operand_lossage ("'B' operand is not constant"); 1823 1824 l = 7 - xx; 1825 1826 fputs (IMMEDIATE_PREFIX, file); 1827 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l); 1828 return; 1829 } 1830 1831 case 0: 1832 /* Handled below. */ 1833 break; 1834 1835 default: 1836 output_operand_lossage ("xstormy16_print_operand: unknown code"); 1837 return; 1838 } 1839 1840 switch (GET_CODE (x)) 1841 { 1842 case REG: 1843 fputs (reg_names [REGNO (x)], file); 1844 break; 1845 1846 case MEM: 1847 xstormy16_print_operand_address (file, GET_MODE (x), XEXP (x, 0)); 1848 break; 1849 1850 default: 1851 /* Some kind of constant or label; an immediate operand, 1852 so prefix it with '#' for the assembler. */ 1853 fputs (IMMEDIATE_PREFIX, file); 1854 output_addr_const (file, x); 1855 break; 1856 } 1857 1858 return; 1859 } 1860 1861 /* Expander for the `casesi' pattern. 1862 INDEX is the index of the switch statement. 1863 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding 1864 to the first table entry. 1865 RANGE is the number of table entries. 1866 TABLE is an ADDR_VEC that is the jump table. 1867 DEFAULT_LABEL is the address to branch to if INDEX is outside the 1868 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */ 1869 1870 void 1871 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range, 1872 rtx table, rtx default_label) 1873 { 1874 HOST_WIDE_INT range_i = INTVAL (range); 1875 rtx int_index; 1876 1877 /* This code uses 'br', so it can deal only with tables of size up to 1878 8192 entries. */ 1879 if (range_i >= 8192) 1880 sorry ("switch statement of size %lu entries too large", 1881 (unsigned long) range_i); 1882 1883 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0, 1884 OPTAB_LIB_WIDEN); 1885 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1, 1886 default_label); 1887 int_index = gen_lowpart_common (HImode, index); 1888 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx)); 1889 emit_jump_insn (gen_tablejump_pcrel (int_index, table)); 1890 } 1891 1892 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf' 1893 instructions, without label or alignment or any other special 1894 constructs. We know that the previous instruction will be the 1895 `tablejump_pcrel' output above. 1896 1897 TODO: it might be nice to output 'br' instructions if they could 1898 all reach. */ 1899 1900 void 1901 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table) 1902 { 1903 int vlen, idx; 1904 1905 switch_to_section (current_function_section ()); 1906 1907 vlen = XVECLEN (table, 0); 1908 for (idx = 0; idx < vlen; idx++) 1909 { 1910 fputs ("\tjmpf ", file); 1911 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0)); 1912 fputc ('\n', file); 1913 } 1914 } 1915 1916 /* Expander for the `call' patterns. 1917 RETVAL is the RTL for the return register or NULL for void functions. 1918 DEST is the function to call, expressed as a MEM. 1919 COUNTER is ignored. */ 1920 1921 void 1922 xstormy16_expand_call (rtx retval, rtx dest, rtx counter) 1923 { 1924 rtx call, temp; 1925 machine_mode mode; 1926 1927 gcc_assert (MEM_P (dest)); 1928 dest = XEXP (dest, 0); 1929 1930 if (! CONSTANT_P (dest) && ! REG_P (dest)) 1931 dest = force_reg (Pmode, dest); 1932 1933 if (retval == NULL) 1934 mode = VOIDmode; 1935 else 1936 mode = GET_MODE (retval); 1937 1938 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest), 1939 counter); 1940 if (retval) 1941 call = gen_rtx_SET (retval, call); 1942 1943 if (! CONSTANT_P (dest)) 1944 { 1945 temp = gen_reg_rtx (HImode); 1946 emit_move_insn (temp, const0_rtx); 1947 } 1948 else 1949 temp = const0_rtx; 1950 1951 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call, 1952 gen_rtx_USE (VOIDmode, temp))); 1953 emit_call_insn (call); 1954 } 1955 1956 /* Expanders for multiword computational operations. */ 1957 1958 /* Expander for arithmetic operations; emit insns to compute 1959 1960 (set DEST (CODE:MODE SRC0 SRC1)) 1961 1962 When CODE is COMPARE, a branch template is generated 1963 (this saves duplicating code in xstormy16_split_cbranch). */ 1964 1965 void 1966 xstormy16_expand_arith (machine_mode mode, enum rtx_code code, 1967 rtx dest, rtx src0, rtx src1) 1968 { 1969 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD; 1970 int i; 1971 int firstloop = 1; 1972 1973 if (code == NEG) 1974 emit_move_insn (src0, const0_rtx); 1975 1976 for (i = 0; i < num_words; i++) 1977 { 1978 rtx w_src0, w_src1, w_dest; 1979 rtx insn; 1980 1981 w_src0 = simplify_gen_subreg (word_mode, src0, mode, 1982 i * UNITS_PER_WORD); 1983 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD); 1984 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD); 1985 1986 switch (code) 1987 { 1988 case PLUS: 1989 if (firstloop 1990 && CONST_INT_P (w_src1) 1991 && INTVAL (w_src1) == 0) 1992 continue; 1993 1994 if (firstloop) 1995 insn = gen_addchi4 (w_dest, w_src0, w_src1); 1996 else 1997 insn = gen_addchi5 (w_dest, w_src0, w_src1); 1998 break; 1999 2000 case NEG: 2001 case MINUS: 2002 case COMPARE: 2003 if (code == COMPARE && i == num_words - 1) 2004 { 2005 rtx branch, sub, clobber, sub_1; 2006 2007 sub_1 = gen_rtx_MINUS (HImode, w_src0, 2008 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM))); 2009 sub = gen_rtx_SET (w_dest, 2010 gen_rtx_MINUS (HImode, sub_1, w_src1)); 2011 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM)); 2012 branch = gen_rtx_SET (pc_rtx, 2013 gen_rtx_IF_THEN_ELSE (VOIDmode, 2014 gen_rtx_EQ (HImode, 2015 sub_1, 2016 w_src1), 2017 pc_rtx, 2018 pc_rtx)); 2019 insn = gen_rtx_PARALLEL (VOIDmode, 2020 gen_rtvec (3, branch, sub, clobber)); 2021 } 2022 else if (firstloop 2023 && code != COMPARE 2024 && CONST_INT_P (w_src1) 2025 && INTVAL (w_src1) == 0) 2026 continue; 2027 else if (firstloop) 2028 insn = gen_subchi4 (w_dest, w_src0, w_src1); 2029 else 2030 insn = gen_subchi5 (w_dest, w_src0, w_src1); 2031 break; 2032 2033 case IOR: 2034 case XOR: 2035 case AND: 2036 if (CONST_INT_P (w_src1) 2037 && INTVAL (w_src1) == -(code == AND)) 2038 continue; 2039 2040 insn = gen_rtx_SET (w_dest, gen_rtx_fmt_ee (code, mode, 2041 w_src0, w_src1)); 2042 break; 2043 2044 case NOT: 2045 insn = gen_rtx_SET (w_dest, gen_rtx_NOT (mode, w_src0)); 2046 break; 2047 2048 default: 2049 gcc_unreachable (); 2050 } 2051 2052 firstloop = 0; 2053 emit (insn); 2054 } 2055 2056 /* If we emit nothing, try_split() will think we failed. So emit 2057 something that does nothing and can be optimized away. */ 2058 if (firstloop) 2059 emit (gen_nop ()); 2060 } 2061 2062 /* The shift operations are split at output time for constant values; 2063 variable-width shifts get handed off to a library routine. 2064 2065 Generate an output string to do (set X (CODE:MODE X SIZE_R)) 2066 SIZE_R will be a CONST_INT, X will be a hard register. */ 2067 2068 const char * 2069 xstormy16_output_shift (machine_mode mode, enum rtx_code code, 2070 rtx x, rtx size_r, rtx temp) 2071 { 2072 HOST_WIDE_INT size; 2073 const char *r0, *r1, *rt; 2074 static char r[64]; 2075 2076 gcc_assert (CONST_INT_P (size_r) 2077 && REG_P (x) 2078 && mode == SImode); 2079 2080 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1); 2081 2082 if (size == 0) 2083 return ""; 2084 2085 r0 = reg_names [REGNO (x)]; 2086 r1 = reg_names [REGNO (x) + 1]; 2087 2088 /* For shifts of size 1, we can use the rotate instructions. */ 2089 if (size == 1) 2090 { 2091 switch (code) 2092 { 2093 case ASHIFT: 2094 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1); 2095 break; 2096 case ASHIFTRT: 2097 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0); 2098 break; 2099 case LSHIFTRT: 2100 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0); 2101 break; 2102 default: 2103 gcc_unreachable (); 2104 } 2105 return r; 2106 } 2107 2108 /* For large shifts, there are easy special cases. */ 2109 if (size == 16) 2110 { 2111 switch (code) 2112 { 2113 case ASHIFT: 2114 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0); 2115 break; 2116 case ASHIFTRT: 2117 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1); 2118 break; 2119 case LSHIFTRT: 2120 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1); 2121 break; 2122 default: 2123 gcc_unreachable (); 2124 } 2125 return r; 2126 } 2127 if (size > 16) 2128 { 2129 switch (code) 2130 { 2131 case ASHIFT: 2132 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d", 2133 r1, r0, r0, r1, (int) size - 16); 2134 break; 2135 case ASHIFTRT: 2136 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d", 2137 r0, r1, r1, r0, (int) size - 16); 2138 break; 2139 case LSHIFTRT: 2140 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d", 2141 r0, r1, r1, r0, (int) size - 16); 2142 break; 2143 default: 2144 gcc_unreachable (); 2145 } 2146 return r; 2147 } 2148 2149 /* For the rest, we have to do more work. In particular, we 2150 need a temporary. */ 2151 rt = reg_names [REGNO (temp)]; 2152 switch (code) 2153 { 2154 case ASHIFT: 2155 sprintf (r, 2156 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s", 2157 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size), 2158 r1, rt); 2159 break; 2160 case ASHIFTRT: 2161 sprintf (r, 2162 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s", 2163 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size), 2164 r0, rt); 2165 break; 2166 case LSHIFTRT: 2167 sprintf (r, 2168 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s", 2169 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size), 2170 r0, rt); 2171 break; 2172 default: 2173 gcc_unreachable (); 2174 } 2175 return r; 2176 } 2177 2178 /* Attribute handling. */ 2179 2180 /* Return nonzero if the function is an interrupt function. */ 2181 2182 int 2183 xstormy16_interrupt_function_p (void) 2184 { 2185 tree attributes; 2186 2187 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before 2188 any functions are declared, which is demonstrably wrong, but 2189 it is worked around here. FIXME. */ 2190 if (!cfun) 2191 return 0; 2192 2193 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl)); 2194 return lookup_attribute ("interrupt", attributes) != NULL_TREE; 2195 } 2196 2197 #undef TARGET_ATTRIBUTE_TABLE 2198 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table 2199 2200 static tree xstormy16_handle_interrupt_attribute 2201 (tree *, tree, tree, int, bool *); 2202 static tree xstormy16_handle_below100_attribute 2203 (tree *, tree, tree, int, bool *); 2204 2205 static const struct attribute_spec xstormy16_attribute_table[] = 2206 { 2207 /* name, min_len, max_len, decl_req, type_req, fn_type_req, 2208 affects_type_identity, handler, exclude. */ 2209 { "interrupt", 0, 0, false, true, true, false, 2210 xstormy16_handle_interrupt_attribute, NULL }, 2211 { "BELOW100", 0, 0, false, false, false, false, 2212 xstormy16_handle_below100_attribute, NULL }, 2213 { "below100", 0, 0, false, false, false, false, 2214 xstormy16_handle_below100_attribute, NULL }, 2215 { NULL, 0, 0, false, false, false, false, NULL, NULL } 2216 }; 2217 2218 /* Handle an "interrupt" attribute; 2219 arguments as in struct attribute_spec.handler. */ 2220 2221 static tree 2222 xstormy16_handle_interrupt_attribute (tree *node, tree name, 2223 tree args ATTRIBUTE_UNUSED, 2224 int flags ATTRIBUTE_UNUSED, 2225 bool *no_add_attrs) 2226 { 2227 if (TREE_CODE (*node) != FUNCTION_TYPE) 2228 { 2229 warning (OPT_Wattributes, "%qE attribute only applies to functions", 2230 name); 2231 *no_add_attrs = true; 2232 } 2233 2234 return NULL_TREE; 2235 } 2236 2237 /* Handle an "below" attribute; 2238 arguments as in struct attribute_spec.handler. */ 2239 2240 static tree 2241 xstormy16_handle_below100_attribute (tree *node, 2242 tree name ATTRIBUTE_UNUSED, 2243 tree args ATTRIBUTE_UNUSED, 2244 int flags ATTRIBUTE_UNUSED, 2245 bool *no_add_attrs) 2246 { 2247 if (TREE_CODE (*node) != VAR_DECL 2248 && TREE_CODE (*node) != POINTER_TYPE 2249 && TREE_CODE (*node) != TYPE_DECL) 2250 { 2251 warning (OPT_Wattributes, 2252 "%<__BELOW100__%> attribute only applies to variables"); 2253 *no_add_attrs = true; 2254 } 2255 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL) 2256 { 2257 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node))) 2258 { 2259 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed " 2260 "with auto storage class"); 2261 *no_add_attrs = true; 2262 } 2263 } 2264 2265 return NULL_TREE; 2266 } 2267 2268 #undef TARGET_INIT_BUILTINS 2269 #define TARGET_INIT_BUILTINS xstormy16_init_builtins 2270 #undef TARGET_EXPAND_BUILTIN 2271 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin 2272 2273 static struct 2274 { 2275 const char * name; 2276 int md_code; 2277 const char * arg_ops; /* 0..9, t for temp register, r for return value. */ 2278 const char * arg_types; /* s=short,l=long, upper case for unsigned. */ 2279 } 2280 s16builtins[] = 2281 { 2282 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" }, 2283 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" }, 2284 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" }, 2285 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" }, 2286 { NULL, 0, NULL, NULL } 2287 }; 2288 2289 static void 2290 xstormy16_init_builtins (void) 2291 { 2292 tree args[2], ret_type, arg = NULL_TREE, ftype; 2293 int i, a, n_args; 2294 2295 ret_type = void_type_node; 2296 2297 for (i = 0; s16builtins[i].name; i++) 2298 { 2299 n_args = strlen (s16builtins[i].arg_types) - 1; 2300 2301 gcc_assert (n_args <= (int) ARRAY_SIZE (args)); 2302 2303 for (a = n_args - 1; a >= 0; a--) 2304 args[a] = NULL_TREE; 2305 2306 for (a = n_args; a >= 0; a--) 2307 { 2308 switch (s16builtins[i].arg_types[a]) 2309 { 2310 case 's': arg = short_integer_type_node; break; 2311 case 'S': arg = short_unsigned_type_node; break; 2312 case 'l': arg = long_integer_type_node; break; 2313 case 'L': arg = long_unsigned_type_node; break; 2314 default: gcc_unreachable (); 2315 } 2316 if (a == 0) 2317 ret_type = arg; 2318 else 2319 args[a-1] = arg; 2320 } 2321 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE); 2322 add_builtin_function (s16builtins[i].name, ftype, 2323 i, BUILT_IN_MD, NULL, NULL_TREE); 2324 } 2325 } 2326 2327 static rtx 2328 xstormy16_expand_builtin (tree exp, rtx target, 2329 rtx subtarget ATTRIBUTE_UNUSED, 2330 machine_mode mode ATTRIBUTE_UNUSED, 2331 int ignore ATTRIBUTE_UNUSED) 2332 { 2333 rtx op[10], args[10], pat, copyto[10], retval = 0; 2334 tree fndecl, argtree; 2335 int i, a, o, code; 2336 2337 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); 2338 argtree = TREE_OPERAND (exp, 1); 2339 i = DECL_MD_FUNCTION_CODE (fndecl); 2340 code = s16builtins[i].md_code; 2341 2342 for (a = 0; a < 10 && argtree; a++) 2343 { 2344 args[a] = expand_normal (TREE_VALUE (argtree)); 2345 argtree = TREE_CHAIN (argtree); 2346 } 2347 2348 for (o = 0; s16builtins[i].arg_ops[o]; o++) 2349 { 2350 char ao = s16builtins[i].arg_ops[o]; 2351 char c = insn_data[code].operand[o].constraint[0]; 2352 machine_mode omode; 2353 2354 copyto[o] = 0; 2355 2356 omode = (machine_mode) insn_data[code].operand[o].mode; 2357 if (ao == 'r') 2358 op[o] = target ? target : gen_reg_rtx (omode); 2359 else if (ao == 't') 2360 op[o] = gen_reg_rtx (omode); 2361 else 2362 op[o] = args[(int) hex_value (ao)]; 2363 2364 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o]))) 2365 { 2366 if (c == '+' || c == '=') 2367 { 2368 copyto[o] = op[o]; 2369 op[o] = gen_reg_rtx (omode); 2370 } 2371 else 2372 op[o] = copy_to_mode_reg (omode, op[o]); 2373 } 2374 2375 if (ao == 'r') 2376 retval = op[o]; 2377 } 2378 2379 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4], 2380 op[5], op[6], op[7], op[8], op[9]); 2381 emit_insn (pat); 2382 2383 for (o = 0; s16builtins[i].arg_ops[o]; o++) 2384 if (copyto[o]) 2385 { 2386 emit_move_insn (copyto[o], op[o]); 2387 if (op[o] == retval) 2388 retval = copyto[o]; 2389 } 2390 2391 return retval; 2392 } 2393 2394 /* Look for combinations of insns that can be converted to BN or BP 2395 opcodes. This is, unfortunately, too complex to do with MD 2396 patterns. */ 2397 2398 static void 2399 combine_bnp (rtx_insn *insn) 2400 { 2401 int insn_code, regno, need_extend; 2402 unsigned int mask; 2403 rtx cond, reg, qireg, mem; 2404 rtx_insn *and_insn, *load; 2405 machine_mode load_mode = QImode; 2406 machine_mode and_mode = QImode; 2407 rtx_insn *shift = NULL; 2408 2409 insn_code = recog_memoized (insn); 2410 if (insn_code != CODE_FOR_cbranchhi 2411 && insn_code != CODE_FOR_cbranchhi_neg) 2412 return; 2413 2414 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */ 2415 cond = XEXP (cond, 1); /* if */ 2416 cond = XEXP (cond, 0); /* cond */ 2417 switch (GET_CODE (cond)) 2418 { 2419 case NE: 2420 case EQ: 2421 need_extend = 0; 2422 break; 2423 case LT: 2424 case GE: 2425 need_extend = 1; 2426 break; 2427 default: 2428 return; 2429 } 2430 2431 reg = XEXP (cond, 0); 2432 if (! REG_P (reg)) 2433 return; 2434 regno = REGNO (reg); 2435 if (XEXP (cond, 1) != const0_rtx) 2436 return; 2437 if (! find_regno_note (insn, REG_DEAD, regno)) 2438 return; 2439 qireg = gen_rtx_REG (QImode, regno); 2440 2441 if (need_extend) 2442 { 2443 /* LT and GE conditionals should have a sign extend before 2444 them. */ 2445 for (and_insn = prev_real_insn (insn); 2446 and_insn != NULL_RTX; 2447 and_insn = prev_real_insn (and_insn)) 2448 { 2449 int and_code = recog_memoized (and_insn); 2450 2451 if (and_code == CODE_FOR_extendqihi2 2452 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg) 2453 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg)) 2454 break; 2455 2456 if (and_code == CODE_FOR_movhi_internal 2457 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)) 2458 { 2459 /* This is for testing bit 15. */ 2460 and_insn = insn; 2461 break; 2462 } 2463 2464 if (reg_mentioned_p (reg, and_insn)) 2465 return; 2466 2467 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn)) 2468 return; 2469 } 2470 } 2471 else 2472 { 2473 /* EQ and NE conditionals have an AND before them. */ 2474 for (and_insn = prev_real_insn (insn); 2475 and_insn != NULL_RTX; 2476 and_insn = prev_real_insn (and_insn)) 2477 { 2478 if (recog_memoized (and_insn) == CODE_FOR_andhi3 2479 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg) 2480 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg)) 2481 break; 2482 2483 if (reg_mentioned_p (reg, and_insn)) 2484 return; 2485 2486 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn)) 2487 return; 2488 } 2489 2490 if (and_insn) 2491 { 2492 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT 2493 followed by an AND like this: 2494 2495 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3))) 2496 (clobber (reg:BI carry))] 2497 2498 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1))) 2499 2500 Attempt to detect this here. */ 2501 for (shift = prev_real_insn (and_insn); shift; 2502 shift = prev_real_insn (shift)) 2503 { 2504 if (recog_memoized (shift) == CODE_FOR_lshrhi3 2505 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg) 2506 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg)) 2507 break; 2508 2509 if (reg_mentioned_p (reg, shift) 2510 || (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift))) 2511 { 2512 shift = NULL; 2513 break; 2514 } 2515 } 2516 } 2517 } 2518 2519 if (and_insn == NULL_RTX) 2520 return; 2521 2522 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn); 2523 load; 2524 load = prev_real_insn (load)) 2525 { 2526 int load_code = recog_memoized (load); 2527 2528 if (load_code == CODE_FOR_movhi_internal 2529 && rtx_equal_p (SET_DEST (PATTERN (load)), reg) 2530 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode) 2531 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load)))) 2532 { 2533 load_mode = HImode; 2534 break; 2535 } 2536 2537 if (load_code == CODE_FOR_movqi_internal 2538 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg) 2539 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode)) 2540 { 2541 load_mode = QImode; 2542 break; 2543 } 2544 2545 if (load_code == CODE_FOR_zero_extendqihi2 2546 && rtx_equal_p (SET_DEST (PATTERN (load)), reg) 2547 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode)) 2548 { 2549 load_mode = QImode; 2550 and_mode = HImode; 2551 break; 2552 } 2553 2554 if (reg_mentioned_p (reg, load)) 2555 return; 2556 2557 if (! NOTE_P (load) && ! NONJUMP_INSN_P (load)) 2558 return; 2559 } 2560 if (!load) 2561 return; 2562 2563 mem = SET_SRC (PATTERN (load)); 2564 2565 if (need_extend) 2566 { 2567 mask = (load_mode == HImode) ? 0x8000 : 0x80; 2568 2569 /* If the mem includes a zero-extend operation and we are 2570 going to generate a sign-extend operation then move the 2571 mem inside the zero-extend. */ 2572 if (GET_CODE (mem) == ZERO_EXTEND) 2573 mem = XEXP (mem, 0); 2574 } 2575 else 2576 { 2577 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1), 2578 load_mode)) 2579 return; 2580 2581 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1)); 2582 2583 if (shift) 2584 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1)); 2585 } 2586 2587 if (load_mode == HImode) 2588 { 2589 rtx addr = XEXP (mem, 0); 2590 2591 if (! (mask & 0xff)) 2592 { 2593 addr = plus_constant (Pmode, addr, 1); 2594 mask >>= 8; 2595 } 2596 mem = gen_rtx_MEM (QImode, addr); 2597 } 2598 2599 if (need_extend) 2600 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem); 2601 else 2602 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask)); 2603 2604 INSN_CODE (insn) = -1; 2605 delete_insn (load); 2606 2607 if (and_insn != insn) 2608 delete_insn (and_insn); 2609 2610 if (shift != NULL_RTX) 2611 delete_insn (shift); 2612 } 2613 2614 static void 2615 xstormy16_reorg (void) 2616 { 2617 rtx_insn *insn; 2618 2619 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 2620 { 2621 if (! JUMP_P (insn)) 2622 continue; 2623 combine_bnp (insn); 2624 } 2625 } 2626 2627 /* Worker function for TARGET_RETURN_IN_MEMORY. */ 2628 2629 static bool 2630 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED) 2631 { 2632 const HOST_WIDE_INT size = int_size_in_bytes (type); 2633 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS); 2634 } 2635 2636 /* Implement TARGET_HARD_REGNO_MODE_OK. */ 2637 2638 static bool 2639 xstormy16_hard_regno_mode_ok (unsigned int regno, machine_mode mode) 2640 { 2641 return regno != 16 || mode == BImode; 2642 } 2643 2644 /* Implement TARGET_MODES_TIEABLE_P. */ 2645 2646 static bool 2647 xstormy16_modes_tieable_p (machine_mode mode1, machine_mode mode2) 2648 { 2649 return mode1 != BImode && mode2 != BImode; 2650 } 2651 2652 /* Implement PUSH_ROUNDING. */ 2653 2654 poly_int64 2655 xstormy16_push_rounding (poly_int64 bytes) 2656 { 2657 return (bytes + 1) & ~1; 2658 } 2659 2660 #undef TARGET_ASM_ALIGNED_HI_OP 2661 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t" 2662 #undef TARGET_ASM_ALIGNED_SI_OP 2663 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t" 2664 #undef TARGET_ENCODE_SECTION_INFO 2665 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info 2666 2667 /* Select_section doesn't handle .bss_below100. */ 2668 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS 2669 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false 2670 2671 #undef TARGET_ASM_OUTPUT_MI_THUNK 2672 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk 2673 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK 2674 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall 2675 2676 #undef TARGET_PRINT_OPERAND 2677 #define TARGET_PRINT_OPERAND xstormy16_print_operand 2678 #undef TARGET_PRINT_OPERAND_ADDRESS 2679 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address 2680 2681 #undef TARGET_MEMORY_MOVE_COST 2682 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost 2683 #undef TARGET_RTX_COSTS 2684 #define TARGET_RTX_COSTS xstormy16_rtx_costs 2685 #undef TARGET_ADDRESS_COST 2686 #define TARGET_ADDRESS_COST xstormy16_address_cost 2687 2688 #undef TARGET_BUILD_BUILTIN_VA_LIST 2689 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list 2690 #undef TARGET_EXPAND_BUILTIN_VA_START 2691 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start 2692 #undef TARGET_GIMPLIFY_VA_ARG_EXPR 2693 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr 2694 2695 #undef TARGET_PROMOTE_FUNCTION_MODE 2696 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote 2697 #undef TARGET_PROMOTE_PROTOTYPES 2698 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true 2699 2700 #undef TARGET_FUNCTION_ARG 2701 #define TARGET_FUNCTION_ARG xstormy16_function_arg 2702 #undef TARGET_FUNCTION_ARG_ADVANCE 2703 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance 2704 2705 #undef TARGET_RETURN_IN_MEMORY 2706 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory 2707 #undef TARGET_FUNCTION_VALUE 2708 #define TARGET_FUNCTION_VALUE xstormy16_function_value 2709 #undef TARGET_LIBCALL_VALUE 2710 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value 2711 #undef TARGET_FUNCTION_VALUE_REGNO_P 2712 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p 2713 2714 #undef TARGET_MACHINE_DEPENDENT_REORG 2715 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg 2716 2717 #undef TARGET_PREFERRED_RELOAD_CLASS 2718 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class 2719 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS 2720 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class 2721 2722 #undef TARGET_LRA_P 2723 #define TARGET_LRA_P hook_bool_void_false 2724 2725 #undef TARGET_LEGITIMATE_ADDRESS_P 2726 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p 2727 #undef TARGET_MODE_DEPENDENT_ADDRESS_P 2728 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p 2729 2730 #undef TARGET_CAN_ELIMINATE 2731 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate 2732 2733 #undef TARGET_TRAMPOLINE_INIT 2734 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init 2735 2736 #undef TARGET_HARD_REGNO_MODE_OK 2737 #define TARGET_HARD_REGNO_MODE_OK xstormy16_hard_regno_mode_ok 2738 #undef TARGET_MODES_TIEABLE_P 2739 #define TARGET_MODES_TIEABLE_P xstormy16_modes_tieable_p 2740 2741 #undef TARGET_CONSTANT_ALIGNMENT 2742 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings 2743 2744 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE 2745 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed 2746 2747 struct gcc_target targetm = TARGET_INITIALIZER; 2748 2749 #include "gt-stormy16.h" 2750