1 /* Xstormy16 target functions. 2 Copyright (C) 1997-2015 Free Software Foundation, Inc. 3 Contributed by Red Hat, Inc. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3, or (at your option) 10 any later version. 11 12 GCC is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "tm.h" 25 #include "rtl.h" 26 #include "regs.h" 27 #include "hard-reg-set.h" 28 #include "insn-config.h" 29 #include "conditions.h" 30 #include "insn-flags.h" 31 #include "output.h" 32 #include "insn-attr.h" 33 #include "flags.h" 34 #include "recog.h" 35 #include "diagnostic-core.h" 36 #include "obstack.h" 37 #include "hash-set.h" 38 #include "machmode.h" 39 #include "vec.h" 40 #include "double-int.h" 41 #include "input.h" 42 #include "alias.h" 43 #include "symtab.h" 44 #include "wide-int.h" 45 #include "inchash.h" 46 #include "tree.h" 47 #include "fold-const.h" 48 #include "stringpool.h" 49 #include "stor-layout.h" 50 #include "varasm.h" 51 #include "calls.h" 52 #include "hashtab.h" 53 #include "function.h" 54 #include "statistics.h" 55 #include "real.h" 56 #include "fixed-value.h" 57 #include "expmed.h" 58 #include "dojump.h" 59 #include "explow.h" 60 #include "emit-rtl.h" 61 #include "stmt.h" 62 #include "expr.h" 63 #include "insn-codes.h" 64 #include "optabs.h" 65 #include "except.h" 66 #include "target.h" 67 #include "target-def.h" 68 #include "tm_p.h" 69 #include "langhooks.h" 70 #include "hash-table.h" 71 #include "ggc.h" 72 #include "predict.h" 73 #include "dominance.h" 74 #include "cfg.h" 75 #include "cfgrtl.h" 76 #include "cfganal.h" 77 #include "lcm.h" 78 #include "cfgbuild.h" 79 #include "cfgcleanup.h" 80 #include "basic-block.h" 81 #include "tree-ssa-alias.h" 82 #include "internal-fn.h" 83 #include "gimple-fold.h" 84 #include "tree-eh.h" 85 #include "gimple-expr.h" 86 #include "is-a.h" 87 #include "gimple.h" 88 #include "gimplify.h" 89 #include "df.h" 90 #include "reload.h" 91 #include "builtins.h" 92 93 static rtx emit_addhi3_postreload (rtx, rtx, rtx); 94 static void xstormy16_asm_out_constructor (rtx, int); 95 static void xstormy16_asm_out_destructor (rtx, int); 96 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, 97 HOST_WIDE_INT, tree); 98 99 static void xstormy16_init_builtins (void); 100 static rtx xstormy16_expand_builtin (tree, rtx, rtx, machine_mode, int); 101 static bool xstormy16_rtx_costs (rtx, int, int, int, int *, bool); 102 static int xstormy16_address_cost (rtx, machine_mode, addr_space_t, bool); 103 static bool xstormy16_return_in_memory (const_tree, const_tree); 104 105 static GTY(()) section *bss100_section; 106 107 /* Compute a (partial) cost for rtx X. Return true if the complete 108 cost has been computed, and false if subexpressions should be 109 scanned. In either case, *TOTAL contains the cost result. */ 110 111 static bool 112 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, 113 int opno ATTRIBUTE_UNUSED, int *total, 114 bool speed ATTRIBUTE_UNUSED) 115 { 116 switch (code) 117 { 118 case CONST_INT: 119 if (INTVAL (x) < 16 && INTVAL (x) >= 0) 120 *total = COSTS_N_INSNS (1) / 2; 121 else if (INTVAL (x) < 256 && INTVAL (x) >= 0) 122 *total = COSTS_N_INSNS (1); 123 else 124 *total = COSTS_N_INSNS (2); 125 return true; 126 127 case CONST_DOUBLE: 128 case CONST: 129 case SYMBOL_REF: 130 case LABEL_REF: 131 *total = COSTS_N_INSNS (2); 132 return true; 133 134 case MULT: 135 *total = COSTS_N_INSNS (35 + 6); 136 return true; 137 case DIV: 138 *total = COSTS_N_INSNS (51 - 6); 139 return true; 140 141 default: 142 return false; 143 } 144 } 145 146 static int 147 xstormy16_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED, 148 addr_space_t as ATTRIBUTE_UNUSED, 149 bool speed ATTRIBUTE_UNUSED) 150 { 151 return (CONST_INT_P (x) ? 2 152 : GET_CODE (x) == PLUS ? 7 153 : 5); 154 } 155 156 /* Worker function for TARGET_MEMORY_MOVE_COST. */ 157 158 static int 159 xstormy16_memory_move_cost (machine_mode mode, reg_class_t rclass, 160 bool in) 161 { 162 return (5 + memory_move_secondary_cost (mode, rclass, in)); 163 } 164 165 /* Branches are handled as follows: 166 167 1. HImode compare-and-branches. The machine supports these 168 natively, so the appropriate pattern is emitted directly. 169 170 2. SImode EQ and NE. These are emitted as pairs of HImode 171 compare-and-branches. 172 173 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence 174 of a SImode subtract followed by a branch (not a compare-and-branch), 175 like this: 176 sub 177 sbc 178 blt 179 180 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like: 181 sub 182 sbc 183 blt 184 or 185 bne. */ 186 187 /* Emit a branch of kind CODE to location LOC. */ 188 189 void 190 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc) 191 { 192 rtx condition_rtx, loc_ref, branch, cy_clobber; 193 rtvec vec; 194 machine_mode mode; 195 196 mode = GET_MODE (op0); 197 gcc_assert (mode == HImode || mode == SImode); 198 199 if (mode == SImode 200 && (code == GT || code == LE || code == GTU || code == LEU)) 201 { 202 int unsigned_p = (code == GTU || code == LEU); 203 int gt_p = (code == GT || code == GTU); 204 rtx lab = NULL_RTX; 205 206 if (gt_p) 207 lab = gen_label_rtx (); 208 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc); 209 /* This should be generated as a comparison against the temporary 210 created by the previous insn, but reload can't handle that. */ 211 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc); 212 if (gt_p) 213 emit_label (lab); 214 return; 215 } 216 else if (mode == SImode 217 && (code == NE || code == EQ) 218 && op1 != const0_rtx) 219 { 220 rtx op0_word, op1_word; 221 rtx lab = NULL_RTX; 222 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD; 223 int i; 224 225 if (code == EQ) 226 lab = gen_label_rtx (); 227 228 for (i = 0; i < num_words - 1; i++) 229 { 230 op0_word = simplify_gen_subreg (word_mode, op0, mode, 231 i * UNITS_PER_WORD); 232 op1_word = simplify_gen_subreg (word_mode, op1, mode, 233 i * UNITS_PER_WORD); 234 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc); 235 } 236 op0_word = simplify_gen_subreg (word_mode, op0, mode, 237 i * UNITS_PER_WORD); 238 op1_word = simplify_gen_subreg (word_mode, op1, mode, 239 i * UNITS_PER_WORD); 240 xstormy16_emit_cbranch (code, op0_word, op1_word, loc); 241 242 if (code == EQ) 243 emit_label (lab); 244 return; 245 } 246 247 /* We can't allow reload to try to generate any reload after a branch, 248 so when some register must match we must make the temporary ourselves. */ 249 if (mode != HImode) 250 { 251 rtx tmp; 252 tmp = gen_reg_rtx (mode); 253 emit_move_insn (tmp, op0); 254 op0 = tmp; 255 } 256 257 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1); 258 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc); 259 branch = gen_rtx_SET (VOIDmode, pc_rtx, 260 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx, 261 loc_ref, pc_rtx)); 262 263 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM)); 264 265 if (mode == HImode) 266 vec = gen_rtvec (2, branch, cy_clobber); 267 else if (code == NE || code == EQ) 268 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0)); 269 else 270 { 271 rtx sub; 272 #if 0 273 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1)); 274 #else 275 sub = gen_rtx_CLOBBER (SImode, op0); 276 #endif 277 vec = gen_rtvec (3, branch, sub, cy_clobber); 278 } 279 280 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec)); 281 } 282 283 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split 284 the arithmetic operation. Most of the work is done by 285 xstormy16_expand_arith. */ 286 287 void 288 xstormy16_split_cbranch (machine_mode mode, rtx label, rtx comparison, 289 rtx dest) 290 { 291 rtx op0 = XEXP (comparison, 0); 292 rtx op1 = XEXP (comparison, 1); 293 rtx_insn *seq, *last_insn; 294 rtx compare; 295 296 start_sequence (); 297 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1); 298 seq = get_insns (); 299 end_sequence (); 300 301 gcc_assert (INSN_P (seq)); 302 303 last_insn = seq; 304 while (NEXT_INSN (last_insn) != NULL_RTX) 305 last_insn = NEXT_INSN (last_insn); 306 307 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0)); 308 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison)); 309 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label); 310 emit_insn (seq); 311 } 312 313 314 /* Return the string to output a conditional branch to LABEL, which is 315 the operand number of the label. 316 317 OP is the conditional expression, or NULL for branch-always. 318 319 REVERSED is nonzero if we should reverse the sense of the comparison. 320 321 INSN is the insn. */ 322 323 char * 324 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, 325 rtx_insn *insn) 326 { 327 static char string[64]; 328 int need_longbranch = (op != NULL_RTX 329 ? get_attr_length (insn) == 8 330 : get_attr_length (insn) == 4); 331 int really_reversed = reversed ^ need_longbranch; 332 const char *ccode; 333 const char *templ; 334 const char *operands; 335 enum rtx_code code; 336 337 if (! op) 338 { 339 if (need_longbranch) 340 ccode = "jmpf"; 341 else 342 ccode = "br"; 343 sprintf (string, "%s %s", ccode, label); 344 return string; 345 } 346 347 code = GET_CODE (op); 348 349 if (! REG_P (XEXP (op, 0))) 350 { 351 code = swap_condition (code); 352 operands = "%3,%2"; 353 } 354 else 355 operands = "%2,%3"; 356 357 /* Work out which way this really branches. */ 358 if (really_reversed) 359 code = reverse_condition (code); 360 361 switch (code) 362 { 363 case EQ: ccode = "z"; break; 364 case NE: ccode = "nz"; break; 365 case GE: ccode = "ge"; break; 366 case LT: ccode = "lt"; break; 367 case GT: ccode = "gt"; break; 368 case LE: ccode = "le"; break; 369 case GEU: ccode = "nc"; break; 370 case LTU: ccode = "c"; break; 371 case GTU: ccode = "hi"; break; 372 case LEU: ccode = "ls"; break; 373 374 default: 375 gcc_unreachable (); 376 } 377 378 if (need_longbranch) 379 templ = "b%s %s,.+8 | jmpf %s"; 380 else 381 templ = "b%s %s,%s"; 382 sprintf (string, templ, ccode, operands, label); 383 384 return string; 385 } 386 387 /* Return the string to output a conditional branch to LABEL, which is 388 the operand number of the label, but suitable for the tail of a 389 SImode branch. 390 391 OP is the conditional expression (OP is never NULL_RTX). 392 393 REVERSED is nonzero if we should reverse the sense of the comparison. 394 395 INSN is the insn. */ 396 397 char * 398 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, 399 rtx_insn *insn) 400 { 401 static char string[64]; 402 int need_longbranch = get_attr_length (insn) >= 8; 403 int really_reversed = reversed ^ need_longbranch; 404 const char *ccode; 405 const char *templ; 406 char prevop[16]; 407 enum rtx_code code; 408 409 code = GET_CODE (op); 410 411 /* Work out which way this really branches. */ 412 if (really_reversed) 413 code = reverse_condition (code); 414 415 switch (code) 416 { 417 case EQ: ccode = "z"; break; 418 case NE: ccode = "nz"; break; 419 case GE: ccode = "ge"; break; 420 case LT: ccode = "lt"; break; 421 case GEU: ccode = "nc"; break; 422 case LTU: ccode = "c"; break; 423 424 /* The missing codes above should never be generated. */ 425 default: 426 gcc_unreachable (); 427 } 428 429 switch (code) 430 { 431 case EQ: case NE: 432 { 433 int regnum; 434 435 gcc_assert (REG_P (XEXP (op, 0))); 436 437 regnum = REGNO (XEXP (op, 0)); 438 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]); 439 } 440 break; 441 442 case GE: case LT: case GEU: case LTU: 443 strcpy (prevop, "sbc %2,%3"); 444 break; 445 446 default: 447 gcc_unreachable (); 448 } 449 450 if (need_longbranch) 451 templ = "%s | b%s .+6 | jmpf %s"; 452 else 453 templ = "%s | b%s %s"; 454 sprintf (string, templ, prevop, ccode, label); 455 456 return string; 457 } 458 459 /* Many machines have some registers that cannot be copied directly to or from 460 memory or even from other types of registers. An example is the `MQ' 461 register, which on most machines, can only be copied to or from general 462 registers, but not memory. Some machines allow copying all registers to and 463 from memory, but require a scratch register for stores to some memory 464 locations (e.g., those with symbolic address on the RT, and those with 465 certain symbolic address on the SPARC when compiling PIC). In some cases, 466 both an intermediate and a scratch register are required. 467 468 You should define these macros to indicate to the reload phase that it may 469 need to allocate at least one register for a reload in addition to the 470 register to contain the data. Specifically, if copying X to a register 471 RCLASS in MODE requires an intermediate register, you should define 472 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of 473 whose registers can be used as intermediate registers or scratch registers. 474 475 If copying a register RCLASS in MODE to X requires an intermediate or scratch 476 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the 477 largest register class required. If the requirements for input and output 478 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used 479 instead of defining both macros identically. 480 481 The values returned by these macros are often `GENERAL_REGS'. Return 482 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied 483 to or from a register of RCLASS in MODE without requiring a scratch register. 484 Do not define this macro if it would always return `NO_REGS'. 485 486 If a scratch register is required (either with or without an intermediate 487 register), you should define patterns for `reload_inM' or `reload_outM', as 488 required.. These patterns, which will normally be implemented with a 489 `define_expand', should be similar to the `movM' patterns, except that 490 operand 2 is the scratch register. 491 492 Define constraints for the reload register and scratch register that contain 493 a single register class. If the original reload register (whose class is 494 RCLASS) can meet the constraint given in the pattern, the value returned by 495 these macros is used for the class of the scratch register. Otherwise, two 496 additional reload registers are required. Their classes are obtained from 497 the constraints in the insn pattern. 498 499 X might be a pseudo-register or a `subreg' of a pseudo-register, which could 500 either be in a hard register or in memory. Use `true_regnum' to find out; 501 it will return -1 if the pseudo is in memory and the hard register number if 502 it is in a register. 503 504 These macros should not be used in the case where a particular class of 505 registers can only be copied to memory and not to another class of 506 registers. In that case, secondary reload registers are not needed and 507 would not be helpful. Instead, a stack location must be used to perform the 508 copy and the `movM' pattern should use memory as an intermediate storage. 509 This case often occurs between floating-point and general registers. */ 510 511 enum reg_class 512 xstormy16_secondary_reload_class (enum reg_class rclass, 513 machine_mode mode ATTRIBUTE_UNUSED, 514 rtx x) 515 { 516 /* This chip has the interesting property that only the first eight 517 registers can be moved to/from memory. */ 518 if ((MEM_P (x) 519 || ((GET_CODE (x) == SUBREG || REG_P (x)) 520 && (true_regnum (x) == -1 521 || true_regnum (x) >= FIRST_PSEUDO_REGISTER))) 522 && ! reg_class_subset_p (rclass, EIGHT_REGS)) 523 return EIGHT_REGS; 524 525 return NO_REGS; 526 } 527 528 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS 529 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */ 530 531 static reg_class_t 532 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass) 533 { 534 if (rclass == GENERAL_REGS && MEM_P (x)) 535 return EIGHT_REGS; 536 537 return rclass; 538 } 539 540 /* Predicate for symbols and addresses that reflect special 8-bit 541 addressing. */ 542 543 int 544 xstormy16_below100_symbol (rtx x, 545 machine_mode mode ATTRIBUTE_UNUSED) 546 { 547 if (GET_CODE (x) == CONST) 548 x = XEXP (x, 0); 549 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1))) 550 x = XEXP (x, 0); 551 552 if (GET_CODE (x) == SYMBOL_REF) 553 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0; 554 555 if (CONST_INT_P (x)) 556 { 557 HOST_WIDE_INT i = INTVAL (x); 558 559 if ((i >= 0x0000 && i <= 0x00ff) 560 || (i >= 0x7f00 && i <= 0x7fff)) 561 return 1; 562 } 563 return 0; 564 } 565 566 /* Likewise, but only for non-volatile MEMs, for patterns where the 567 MEM will get split into smaller sized accesses. */ 568 569 int 570 xstormy16_splittable_below100_operand (rtx x, machine_mode mode) 571 { 572 if (MEM_P (x) && MEM_VOLATILE_P (x)) 573 return 0; 574 return xstormy16_below100_operand (x, mode); 575 } 576 577 /* Expand an 8-bit IOR. This either detects the one case we can 578 actually do, or uses a 16-bit IOR. */ 579 580 void 581 xstormy16_expand_iorqi3 (rtx *operands) 582 { 583 rtx in, out, outsub, val; 584 585 out = operands[0]; 586 in = operands[1]; 587 val = operands[2]; 588 589 if (xstormy16_onebit_set_operand (val, QImode)) 590 { 591 if (!xstormy16_below100_or_register (in, QImode)) 592 in = copy_to_mode_reg (QImode, in); 593 if (!xstormy16_below100_or_register (out, QImode)) 594 out = gen_reg_rtx (QImode); 595 emit_insn (gen_iorqi3_internal (out, in, val)); 596 if (out != operands[0]) 597 emit_move_insn (operands[0], out); 598 return; 599 } 600 601 if (! REG_P (in)) 602 in = copy_to_mode_reg (QImode, in); 603 604 if (! REG_P (val) && ! CONST_INT_P (val)) 605 val = copy_to_mode_reg (QImode, val); 606 607 if (! REG_P (out)) 608 out = gen_reg_rtx (QImode); 609 610 in = simplify_gen_subreg (HImode, in, QImode, 0); 611 outsub = simplify_gen_subreg (HImode, out, QImode, 0); 612 613 if (! CONST_INT_P (val)) 614 val = simplify_gen_subreg (HImode, val, QImode, 0); 615 616 emit_insn (gen_iorhi3 (outsub, in, val)); 617 618 if (out != operands[0]) 619 emit_move_insn (operands[0], out); 620 } 621 622 /* Expand an 8-bit AND. This either detects the one case we can 623 actually do, or uses a 16-bit AND. */ 624 625 void 626 xstormy16_expand_andqi3 (rtx *operands) 627 { 628 rtx in, out, outsub, val; 629 630 out = operands[0]; 631 in = operands[1]; 632 val = operands[2]; 633 634 if (xstormy16_onebit_clr_operand (val, QImode)) 635 { 636 if (!xstormy16_below100_or_register (in, QImode)) 637 in = copy_to_mode_reg (QImode, in); 638 if (!xstormy16_below100_or_register (out, QImode)) 639 out = gen_reg_rtx (QImode); 640 emit_insn (gen_andqi3_internal (out, in, val)); 641 if (out != operands[0]) 642 emit_move_insn (operands[0], out); 643 return; 644 } 645 646 if (! REG_P (in)) 647 in = copy_to_mode_reg (QImode, in); 648 649 if (! REG_P (val) && ! CONST_INT_P (val)) 650 val = copy_to_mode_reg (QImode, val); 651 652 if (! REG_P (out)) 653 out = gen_reg_rtx (QImode); 654 655 in = simplify_gen_subreg (HImode, in, QImode, 0); 656 outsub = simplify_gen_subreg (HImode, out, QImode, 0); 657 658 if (! CONST_INT_P (val)) 659 val = simplify_gen_subreg (HImode, val, QImode, 0); 660 661 emit_insn (gen_andhi3 (outsub, in, val)); 662 663 if (out != operands[0]) 664 emit_move_insn (operands[0], out); 665 } 666 667 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \ 668 (CONST_INT_P (X) \ 669 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096) 670 671 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \ 672 (CONST_INT_P (X) \ 673 && INTVAL (X) + (OFFSET) >= 0 \ 674 && INTVAL (X) + (OFFSET) < 0x8000 \ 675 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00)) 676 677 bool 678 xstormy16_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, 679 rtx x, bool strict) 680 { 681 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)) 682 return true; 683 684 if (GET_CODE (x) == PLUS 685 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)) 686 { 687 x = XEXP (x, 0); 688 /* PR 31232: Do not allow INT+INT as an address. */ 689 if (CONST_INT_P (x)) 690 return false; 691 } 692 693 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1))) 694 || GET_CODE (x) == POST_INC 695 || GET_CODE (x) == PRE_DEC) 696 x = XEXP (x, 0); 697 698 if (REG_P (x) 699 && REGNO_OK_FOR_BASE_P (REGNO (x)) 700 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER)) 701 return true; 702 703 if (xstormy16_below100_symbol (x, mode)) 704 return true; 705 706 return false; 707 } 708 709 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. 710 711 On this chip, this is true if the address is valid with an offset 712 of 0 but not of 6, because in that case it cannot be used as an 713 address for DImode or DFmode, or if the address is a post-increment 714 or pre-decrement address. */ 715 716 static bool 717 xstormy16_mode_dependent_address_p (const_rtx x, 718 addr_space_t as ATTRIBUTE_UNUSED) 719 { 720 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0) 721 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6)) 722 return true; 723 724 if (GET_CODE (x) == PLUS 725 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0) 726 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6)) 727 return true; 728 729 /* Auto-increment addresses are now treated generically in recog.c. */ 730 return false; 731 } 732 733 int 734 short_memory_operand (rtx x, machine_mode mode) 735 { 736 if (! memory_operand (x, mode)) 737 return 0; 738 return (GET_CODE (XEXP (x, 0)) != PLUS); 739 } 740 741 /* Splitter for the 'move' patterns, for modes not directly implemented 742 by hardware. Emit insns to copy a value of mode MODE from SRC to 743 DEST. 744 745 This function is only called when reload_completed. */ 746 747 void 748 xstormy16_split_move (machine_mode mode, rtx dest, rtx src) 749 { 750 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD; 751 int direction, end, i; 752 int src_modifies = 0; 753 int dest_modifies = 0; 754 int src_volatile = 0; 755 int dest_volatile = 0; 756 rtx mem_operand; 757 rtx auto_inc_reg_rtx = NULL_RTX; 758 759 /* Check initial conditions. */ 760 gcc_assert (reload_completed 761 && mode != QImode && mode != HImode 762 && nonimmediate_operand (dest, mode) 763 && general_operand (src, mode)); 764 765 /* This case is not supported below, and shouldn't be generated. */ 766 gcc_assert (! MEM_P (dest) || ! MEM_P (src)); 767 768 /* This case is very very bad after reload, so trap it now. */ 769 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG); 770 771 /* The general idea is to copy by words, offsetting the source and 772 destination. Normally the least-significant word will be copied 773 first, but for pre-dec operations it's better to copy the 774 most-significant word first. Only one operand can be a pre-dec 775 or post-inc operand. 776 777 It's also possible that the copy overlaps so that the direction 778 must be reversed. */ 779 direction = 1; 780 781 if (MEM_P (dest)) 782 { 783 mem_operand = XEXP (dest, 0); 784 dest_modifies = side_effects_p (mem_operand); 785 if (auto_inc_p (mem_operand)) 786 auto_inc_reg_rtx = XEXP (mem_operand, 0); 787 dest_volatile = MEM_VOLATILE_P (dest); 788 if (dest_volatile) 789 { 790 dest = copy_rtx (dest); 791 MEM_VOLATILE_P (dest) = 0; 792 } 793 } 794 else if (MEM_P (src)) 795 { 796 mem_operand = XEXP (src, 0); 797 src_modifies = side_effects_p (mem_operand); 798 if (auto_inc_p (mem_operand)) 799 auto_inc_reg_rtx = XEXP (mem_operand, 0); 800 src_volatile = MEM_VOLATILE_P (src); 801 if (src_volatile) 802 { 803 src = copy_rtx (src); 804 MEM_VOLATILE_P (src) = 0; 805 } 806 } 807 else 808 mem_operand = NULL_RTX; 809 810 if (mem_operand == NULL_RTX) 811 { 812 if (REG_P (src) 813 && REG_P (dest) 814 && reg_overlap_mentioned_p (dest, src) 815 && REGNO (dest) > REGNO (src)) 816 direction = -1; 817 } 818 else if (GET_CODE (mem_operand) == PRE_DEC 819 || (GET_CODE (mem_operand) == PLUS 820 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC)) 821 direction = -1; 822 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src)) 823 { 824 int regno; 825 826 gcc_assert (REG_P (dest)); 827 regno = REGNO (dest); 828 829 gcc_assert (refers_to_regno_p (regno, regno + num_words, 830 mem_operand, 0)); 831 832 if (refers_to_regno_p (regno, mem_operand)) 833 direction = -1; 834 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words, 835 mem_operand, 0)) 836 direction = 1; 837 else 838 /* This means something like 839 (set (reg:DI r0) (mem:DI (reg:HI r1))) 840 which we'd need to support by doing the set of the second word 841 last. */ 842 gcc_unreachable (); 843 } 844 845 end = direction < 0 ? -1 : num_words; 846 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction) 847 { 848 rtx w_src, w_dest, insn; 849 850 if (src_modifies) 851 w_src = gen_rtx_MEM (word_mode, mem_operand); 852 else 853 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD); 854 if (src_volatile) 855 MEM_VOLATILE_P (w_src) = 1; 856 if (dest_modifies) 857 w_dest = gen_rtx_MEM (word_mode, mem_operand); 858 else 859 w_dest = simplify_gen_subreg (word_mode, dest, mode, 860 i * UNITS_PER_WORD); 861 if (dest_volatile) 862 MEM_VOLATILE_P (w_dest) = 1; 863 864 /* The simplify_subreg calls must always be able to simplify. */ 865 gcc_assert (GET_CODE (w_src) != SUBREG 866 && GET_CODE (w_dest) != SUBREG); 867 868 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src)); 869 if (auto_inc_reg_rtx) 870 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, 871 auto_inc_reg_rtx, 872 REG_NOTES (insn)); 873 } 874 } 875 876 /* Expander for the 'move' patterns. Emit insns to copy a value of 877 mode MODE from SRC to DEST. */ 878 879 void 880 xstormy16_expand_move (machine_mode mode, rtx dest, rtx src) 881 { 882 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY)) 883 { 884 rtx pmv = XEXP (dest, 0); 885 rtx dest_reg = XEXP (pmv, 0); 886 rtx dest_mod = XEXP (pmv, 1); 887 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod); 888 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM)); 889 890 dest = gen_rtx_MEM (mode, dest_reg); 891 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber))); 892 } 893 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY)) 894 { 895 rtx pmv = XEXP (src, 0); 896 rtx src_reg = XEXP (pmv, 0); 897 rtx src_mod = XEXP (pmv, 1); 898 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod); 899 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM)); 900 901 src = gen_rtx_MEM (mode, src_reg); 902 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber))); 903 } 904 905 /* There are only limited immediate-to-memory move instructions. */ 906 if (! reload_in_progress 907 && ! reload_completed 908 && MEM_P (dest) 909 && (! CONST_INT_P (XEXP (dest, 0)) 910 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0)) 911 && ! xstormy16_below100_operand (dest, mode) 912 && ! REG_P (src) 913 && GET_CODE (src) != SUBREG) 914 src = copy_to_mode_reg (mode, src); 915 916 /* Don't emit something we would immediately split. */ 917 if (reload_completed 918 && mode != HImode && mode != QImode) 919 { 920 xstormy16_split_move (mode, dest, src); 921 return; 922 } 923 924 emit_insn (gen_rtx_SET (VOIDmode, dest, src)); 925 } 926 927 /* Stack Layout: 928 929 The stack is laid out as follows: 930 931 SP-> 932 FP-> Local variables 933 Register save area (up to 4 words) 934 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words) 935 936 AP-> Return address (two words) 937 9th procedure parameter word 938 10th procedure parameter word 939 ... 940 last procedure parameter word 941 942 The frame pointer location is tuned to make it most likely that all 943 parameters and local variables can be accessed using a load-indexed 944 instruction. */ 945 946 /* A structure to describe the layout. */ 947 struct xstormy16_stack_layout 948 { 949 /* Size of the topmost three items on the stack. */ 950 int locals_size; 951 int register_save_size; 952 int stdarg_save_size; 953 /* Sum of the above items. */ 954 int frame_size; 955 /* Various offsets. */ 956 int first_local_minus_ap; 957 int sp_minus_fp; 958 int fp_minus_ap; 959 }; 960 961 /* Does REGNO need to be saved? */ 962 #define REG_NEEDS_SAVE(REGNUM, IFUN) \ 963 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \ 964 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \ 965 && (REGNUM != CARRY_REGNUM) \ 966 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf))) 967 968 /* Compute the stack layout. */ 969 970 struct xstormy16_stack_layout 971 xstormy16_compute_stack_layout (void) 972 { 973 struct xstormy16_stack_layout layout; 974 int regno; 975 const int ifun = xstormy16_interrupt_function_p (); 976 977 layout.locals_size = get_frame_size (); 978 979 layout.register_save_size = 0; 980 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 981 if (REG_NEEDS_SAVE (regno, ifun)) 982 layout.register_save_size += UNITS_PER_WORD; 983 984 if (cfun->stdarg) 985 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD; 986 else 987 layout.stdarg_save_size = 0; 988 989 layout.frame_size = (layout.locals_size 990 + layout.register_save_size 991 + layout.stdarg_save_size); 992 993 if (crtl->args.size <= 2048 && crtl->args.size != -1) 994 { 995 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET 996 + crtl->args.size <= 2048) 997 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET; 998 else 999 layout.fp_minus_ap = 2048 - crtl->args.size; 1000 } 1001 else 1002 layout.fp_minus_ap = (layout.stdarg_save_size 1003 + layout.register_save_size 1004 - INCOMING_FRAME_SP_OFFSET); 1005 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET 1006 - layout.fp_minus_ap); 1007 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size; 1008 return layout; 1009 } 1010 1011 /* Worker function for TARGET_CAN_ELIMINATE. */ 1012 1013 static bool 1014 xstormy16_can_eliminate (const int from, const int to) 1015 { 1016 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM 1017 ? ! frame_pointer_needed 1018 : true); 1019 } 1020 1021 /* Determine how all the special registers get eliminated. */ 1022 1023 int 1024 xstormy16_initial_elimination_offset (int from, int to) 1025 { 1026 struct xstormy16_stack_layout layout; 1027 int result; 1028 1029 layout = xstormy16_compute_stack_layout (); 1030 1031 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM) 1032 result = layout.sp_minus_fp - layout.locals_size; 1033 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM) 1034 result = - layout.locals_size; 1035 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM) 1036 result = - layout.fp_minus_ap; 1037 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM) 1038 result = - (layout.sp_minus_fp + layout.fp_minus_ap); 1039 else 1040 gcc_unreachable (); 1041 1042 return result; 1043 } 1044 1045 static rtx 1046 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1) 1047 { 1048 rtx set, clobber, insn; 1049 1050 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1)); 1051 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM)); 1052 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber))); 1053 return insn; 1054 } 1055 1056 /* Called after register allocation to add any instructions needed for 1057 the prologue. Using a prologue insn is favored compared to putting 1058 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro, 1059 since it allows the scheduler to intermix instructions with the 1060 saves of the caller saved registers. In some cases, it might be 1061 necessary to emit a barrier instruction as the last insn to prevent 1062 such scheduling. 1063 1064 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1 1065 so that the debug info generation code can handle them properly. */ 1066 1067 void 1068 xstormy16_expand_prologue (void) 1069 { 1070 struct xstormy16_stack_layout layout; 1071 int regno; 1072 rtx insn; 1073 rtx mem_push_rtx; 1074 const int ifun = xstormy16_interrupt_function_p (); 1075 1076 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx); 1077 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx); 1078 1079 layout = xstormy16_compute_stack_layout (); 1080 1081 if (layout.locals_size >= 32768) 1082 error ("local variable memory requirements exceed capacity"); 1083 1084 if (flag_stack_usage_info) 1085 current_function_static_stack_size = layout.frame_size; 1086 1087 /* Save the argument registers if necessary. */ 1088 if (layout.stdarg_save_size) 1089 for (regno = FIRST_ARGUMENT_REGISTER; 1090 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS; 1091 regno++) 1092 { 1093 rtx dwarf; 1094 rtx reg = gen_rtx_REG (HImode, regno); 1095 1096 insn = emit_move_insn (mem_push_rtx, reg); 1097 RTX_FRAME_RELATED_P (insn) = 1; 1098 1099 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2)); 1100 1101 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode, 1102 gen_rtx_MEM (Pmode, stack_pointer_rtx), 1103 reg); 1104 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx, 1105 plus_constant (Pmode, 1106 stack_pointer_rtx, 1107 GET_MODE_SIZE (Pmode))); 1108 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf); 1109 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1; 1110 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1; 1111 } 1112 1113 /* Push each of the registers to save. */ 1114 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 1115 if (REG_NEEDS_SAVE (regno, ifun)) 1116 { 1117 rtx dwarf; 1118 rtx reg = gen_rtx_REG (HImode, regno); 1119 1120 insn = emit_move_insn (mem_push_rtx, reg); 1121 RTX_FRAME_RELATED_P (insn) = 1; 1122 1123 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2)); 1124 1125 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode, 1126 gen_rtx_MEM (Pmode, stack_pointer_rtx), 1127 reg); 1128 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx, 1129 plus_constant (Pmode, 1130 stack_pointer_rtx, 1131 GET_MODE_SIZE (Pmode))); 1132 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf); 1133 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1; 1134 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1; 1135 } 1136 1137 /* It's just possible that the SP here might be what we need for 1138 the new FP... */ 1139 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size) 1140 { 1141 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx); 1142 RTX_FRAME_RELATED_P (insn) = 1; 1143 } 1144 1145 /* Allocate space for local variables. */ 1146 if (layout.locals_size) 1147 { 1148 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx, 1149 GEN_INT (layout.locals_size)); 1150 RTX_FRAME_RELATED_P (insn) = 1; 1151 } 1152 1153 /* Set up the frame pointer, if required. */ 1154 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size) 1155 { 1156 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx); 1157 RTX_FRAME_RELATED_P (insn) = 1; 1158 1159 if (layout.sp_minus_fp) 1160 { 1161 insn = emit_addhi3_postreload (hard_frame_pointer_rtx, 1162 hard_frame_pointer_rtx, 1163 GEN_INT (- layout.sp_minus_fp)); 1164 RTX_FRAME_RELATED_P (insn) = 1; 1165 } 1166 } 1167 } 1168 1169 /* Do we need an epilogue at all? */ 1170 1171 int 1172 direct_return (void) 1173 { 1174 return (reload_completed 1175 && xstormy16_compute_stack_layout ().frame_size == 0 1176 && ! xstormy16_interrupt_function_p ()); 1177 } 1178 1179 /* Called after register allocation to add any instructions needed for 1180 the epilogue. Using an epilogue insn is favored compared to putting 1181 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro, 1182 since it allows the scheduler to intermix instructions with the 1183 saves of the caller saved registers. In some cases, it might be 1184 necessary to emit a barrier instruction as the last insn to prevent 1185 such scheduling. */ 1186 1187 void 1188 xstormy16_expand_epilogue (void) 1189 { 1190 struct xstormy16_stack_layout layout; 1191 rtx mem_pop_rtx; 1192 int regno; 1193 const int ifun = xstormy16_interrupt_function_p (); 1194 1195 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx); 1196 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx); 1197 1198 layout = xstormy16_compute_stack_layout (); 1199 1200 /* Pop the stack for the locals. */ 1201 if (layout.locals_size) 1202 { 1203 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size) 1204 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx); 1205 else 1206 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx, 1207 GEN_INT (- layout.locals_size)); 1208 } 1209 1210 /* Restore any call-saved registers. */ 1211 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--) 1212 if (REG_NEEDS_SAVE (regno, ifun)) 1213 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx); 1214 1215 /* Pop the stack for the stdarg save area. */ 1216 if (layout.stdarg_save_size) 1217 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx, 1218 GEN_INT (- layout.stdarg_save_size)); 1219 1220 /* Return. */ 1221 if (ifun) 1222 emit_jump_insn (gen_return_internal_interrupt ()); 1223 else 1224 emit_jump_insn (gen_return_internal ()); 1225 } 1226 1227 int 1228 xstormy16_epilogue_uses (int regno) 1229 { 1230 if (reload_completed && call_used_regs[regno]) 1231 { 1232 const int ifun = xstormy16_interrupt_function_p (); 1233 return REG_NEEDS_SAVE (regno, ifun); 1234 } 1235 return 0; 1236 } 1237 1238 void 1239 xstormy16_function_profiler (void) 1240 { 1241 sorry ("function_profiler support"); 1242 } 1243 1244 /* Update CUM to advance past an argument in the argument list. The 1245 values MODE, TYPE and NAMED describe that argument. Once this is 1246 done, the variable CUM is suitable for analyzing the *following* 1247 argument with `TARGET_FUNCTION_ARG', etc. 1248 1249 This function need not do anything if the argument in question was 1250 passed on the stack. The compiler knows how to track the amount of 1251 stack space used for arguments without any special help. However, 1252 it makes life easier for xstormy16_build_va_list if it does update 1253 the word count. */ 1254 1255 static void 1256 xstormy16_function_arg_advance (cumulative_args_t cum_v, machine_mode mode, 1257 const_tree type, bool named ATTRIBUTE_UNUSED) 1258 { 1259 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1260 1261 /* If an argument would otherwise be passed partially in registers, 1262 and partially on the stack, the whole of it is passed on the 1263 stack. */ 1264 if (*cum < NUM_ARGUMENT_REGISTERS 1265 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS) 1266 *cum = NUM_ARGUMENT_REGISTERS; 1267 1268 *cum += XSTORMY16_WORD_SIZE (type, mode); 1269 } 1270 1271 static rtx 1272 xstormy16_function_arg (cumulative_args_t cum_v, machine_mode mode, 1273 const_tree type, bool named ATTRIBUTE_UNUSED) 1274 { 1275 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); 1276 1277 if (mode == VOIDmode) 1278 return const0_rtx; 1279 if (targetm.calls.must_pass_in_stack (mode, type) 1280 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS) 1281 return NULL_RTX; 1282 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER); 1283 } 1284 1285 /* Build the va_list type. 1286 1287 For this chip, va_list is a record containing a counter and a pointer. 1288 The counter is of type 'int' and indicates how many bytes 1289 have been used to date. The pointer indicates the stack position 1290 for arguments that have not been passed in registers. 1291 To keep the layout nice, the pointer is first in the structure. */ 1292 1293 static tree 1294 xstormy16_build_builtin_va_list (void) 1295 { 1296 tree f_1, f_2, record, type_decl; 1297 1298 record = (*lang_hooks.types.make_type) (RECORD_TYPE); 1299 type_decl = build_decl (BUILTINS_LOCATION, 1300 TYPE_DECL, get_identifier ("__va_list_tag"), record); 1301 1302 f_1 = build_decl (BUILTINS_LOCATION, 1303 FIELD_DECL, get_identifier ("base"), 1304 ptr_type_node); 1305 f_2 = build_decl (BUILTINS_LOCATION, 1306 FIELD_DECL, get_identifier ("count"), 1307 unsigned_type_node); 1308 1309 DECL_FIELD_CONTEXT (f_1) = record; 1310 DECL_FIELD_CONTEXT (f_2) = record; 1311 1312 TYPE_STUB_DECL (record) = type_decl; 1313 TYPE_NAME (record) = type_decl; 1314 TYPE_FIELDS (record) = f_1; 1315 DECL_CHAIN (f_1) = f_2; 1316 1317 layout_type (record); 1318 1319 return record; 1320 } 1321 1322 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this 1323 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list 1324 variable to initialize. NEXTARG is the machine independent notion of the 1325 'next' argument after the variable arguments. */ 1326 1327 static void 1328 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED) 1329 { 1330 tree f_base, f_count; 1331 tree base, count; 1332 tree t,u; 1333 1334 if (xstormy16_interrupt_function_p ()) 1335 error ("cannot use va_start in interrupt function"); 1336 1337 f_base = TYPE_FIELDS (va_list_type_node); 1338 f_count = DECL_CHAIN (f_base); 1339 1340 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE); 1341 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count, 1342 NULL_TREE); 1343 1344 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx); 1345 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET); 1346 u = fold_convert (TREE_TYPE (count), u); 1347 t = fold_build_pointer_plus (t, u); 1348 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t); 1349 TREE_SIDE_EFFECTS (t) = 1; 1350 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 1351 1352 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count, 1353 build_int_cst (NULL_TREE, 1354 crtl->args.info * UNITS_PER_WORD)); 1355 TREE_SIDE_EFFECTS (t) = 1; 1356 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 1357 } 1358 1359 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable 1360 of type va_list as a tree, TYPE is the type passed to va_arg. 1361 Note: This algorithm is documented in stormy-abi. */ 1362 1363 static tree 1364 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p, 1365 gimple_seq *post_p ATTRIBUTE_UNUSED) 1366 { 1367 tree f_base, f_count; 1368 tree base, count; 1369 tree count_tmp, addr, t; 1370 tree lab_gotaddr, lab_fromstack; 1371 int size, size_of_reg_args, must_stack; 1372 tree size_tree; 1373 1374 f_base = TYPE_FIELDS (va_list_type_node); 1375 f_count = DECL_CHAIN (f_base); 1376 1377 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE); 1378 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count, 1379 NULL_TREE); 1380 1381 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type); 1382 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD); 1383 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue); 1384 1385 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD; 1386 1387 count_tmp = get_initialized_tmp_var (count, pre_p, NULL); 1388 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION); 1389 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION); 1390 addr = create_tmp_var (ptr_type_node); 1391 1392 if (!must_stack) 1393 { 1394 tree r; 1395 1396 t = fold_convert (TREE_TYPE (count), size_tree); 1397 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t); 1398 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args)); 1399 t = build2 (GT_EXPR, boolean_type_node, t, r); 1400 t = build3 (COND_EXPR, void_type_node, t, 1401 build1 (GOTO_EXPR, void_type_node, lab_fromstack), 1402 NULL_TREE); 1403 gimplify_and_add (t, pre_p); 1404 1405 t = fold_build_pointer_plus (base, count_tmp); 1406 gimplify_assign (addr, t, pre_p); 1407 1408 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr); 1409 gimplify_and_add (t, pre_p); 1410 1411 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack); 1412 gimplify_and_add (t, pre_p); 1413 } 1414 1415 /* Arguments larger than a word might need to skip over some 1416 registers, since arguments are either passed entirely in 1417 registers or entirely on the stack. */ 1418 size = PUSH_ROUNDING (int_size_in_bytes (type)); 1419 if (size > 2 || size < 0 || must_stack) 1420 { 1421 tree r, u; 1422 1423 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD); 1424 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r); 1425 1426 t = fold_convert (TREE_TYPE (count), r); 1427 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t); 1428 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u); 1429 gimplify_and_add (t, pre_p); 1430 } 1431 1432 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD 1433 + INCOMING_FRAME_SP_OFFSET); 1434 t = fold_convert (TREE_TYPE (count), t); 1435 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t); 1436 t = build2 (PLUS_EXPR, TREE_TYPE (count), t, 1437 fold_convert (TREE_TYPE (count), size_tree)); 1438 t = fold_convert (TREE_TYPE (t), fold (t)); 1439 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t); 1440 t = fold_build_pointer_plus (base, t); 1441 gimplify_assign (addr, t, pre_p); 1442 1443 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr); 1444 gimplify_and_add (t, pre_p); 1445 1446 t = fold_convert (TREE_TYPE (count), size_tree); 1447 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t); 1448 gimplify_assign (count, t, pre_p); 1449 1450 addr = fold_convert (build_pointer_type (type), addr); 1451 return build_va_arg_indirect_ref (addr); 1452 } 1453 1454 /* Worker function for TARGET_TRAMPOLINE_INIT. */ 1455 1456 static void 1457 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain) 1458 { 1459 rtx temp = gen_reg_rtx (HImode); 1460 rtx reg_fnaddr = gen_reg_rtx (HImode); 1461 rtx reg_addr, reg_addr_mem; 1462 1463 reg_addr = copy_to_reg (XEXP (m_tramp, 0)); 1464 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0); 1465 1466 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM)); 1467 emit_move_insn (reg_addr_mem, temp); 1468 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx)); 1469 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2); 1470 1471 emit_move_insn (temp, static_chain); 1472 emit_move_insn (reg_addr_mem, temp); 1473 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx)); 1474 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2); 1475 1476 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0)); 1477 emit_move_insn (temp, reg_fnaddr); 1478 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF))); 1479 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200))); 1480 emit_move_insn (reg_addr_mem, temp); 1481 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx)); 1482 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2); 1483 1484 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8))); 1485 emit_move_insn (reg_addr_mem, reg_fnaddr); 1486 } 1487 1488 /* Worker function for TARGET_FUNCTION_VALUE. */ 1489 1490 static rtx 1491 xstormy16_function_value (const_tree valtype, 1492 const_tree func ATTRIBUTE_UNUSED, 1493 bool outgoing ATTRIBUTE_UNUSED) 1494 { 1495 machine_mode mode; 1496 mode = TYPE_MODE (valtype); 1497 PROMOTE_MODE (mode, 0, valtype); 1498 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM); 1499 } 1500 1501 /* Worker function for TARGET_LIBCALL_VALUE. */ 1502 1503 static rtx 1504 xstormy16_libcall_value (machine_mode mode, 1505 const_rtx fun ATTRIBUTE_UNUSED) 1506 { 1507 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM); 1508 } 1509 1510 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */ 1511 1512 static bool 1513 xstormy16_function_value_regno_p (const unsigned int regno) 1514 { 1515 return (regno == RETURN_VALUE_REGNUM); 1516 } 1517 1518 /* A C compound statement that outputs the assembler code for a thunk function, 1519 used to implement C++ virtual function calls with multiple inheritance. The 1520 thunk acts as a wrapper around a virtual function, adjusting the implicit 1521 object parameter before handing control off to the real function. 1522 1523 First, emit code to add the integer DELTA to the location that contains the 1524 incoming first argument. Assume that this argument contains a pointer, and 1525 is the one used to pass the `this' pointer in C++. This is the incoming 1526 argument *before* the function prologue, e.g. `%o0' on a sparc. The 1527 addition must preserve the values of all other incoming arguments. 1528 1529 After the addition, emit code to jump to FUNCTION, which is a 1530 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch 1531 the return address. Hence returning from FUNCTION will return to whoever 1532 called the current `thunk'. 1533 1534 The effect must be as if @var{function} had been called directly 1535 with the adjusted first argument. This macro is responsible for 1536 emitting all of the code for a thunk function; 1537 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are 1538 not invoked. 1539 1540 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been 1541 extracted from it.) It might possibly be useful on some targets, but 1542 probably not. */ 1543 1544 static void 1545 xstormy16_asm_output_mi_thunk (FILE *file, 1546 tree thunk_fndecl ATTRIBUTE_UNUSED, 1547 HOST_WIDE_INT delta, 1548 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED, 1549 tree function) 1550 { 1551 int regnum = FIRST_ARGUMENT_REGISTER; 1552 1553 /* There might be a hidden first argument for a returned structure. */ 1554 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function)) 1555 regnum += 1; 1556 1557 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF); 1558 fputs ("\tjmpf ", file); 1559 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0)); 1560 putc ('\n', file); 1561 } 1562 1563 /* The purpose of this function is to override the default behavior of 1564 BSS objects. Normally, they go into .bss or .sbss via ".common" 1565 directives, but we need to override that and put them in 1566 .bss_below100. We can't just use a section override (like we do 1567 for .data_below100), because that makes them initialized rather 1568 than uninitialized. */ 1569 1570 void 1571 xstormy16_asm_output_aligned_common (FILE *stream, 1572 tree decl, 1573 const char *name, 1574 int size, 1575 int align, 1576 int global) 1577 { 1578 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl); 1579 rtx symbol; 1580 1581 if (mem != NULL_RTX 1582 && MEM_P (mem) 1583 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF 1584 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100) 1585 { 1586 const char *name2; 1587 int p2align = 0; 1588 1589 switch_to_section (bss100_section); 1590 1591 while (align > 8) 1592 { 1593 align /= 2; 1594 p2align ++; 1595 } 1596 1597 name2 = default_strip_name_encoding (name); 1598 if (global) 1599 fprintf (stream, "\t.globl\t%s\n", name2); 1600 if (p2align) 1601 fprintf (stream, "\t.p2align %d\n", p2align); 1602 fprintf (stream, "\t.type\t%s, @object\n", name2); 1603 fprintf (stream, "\t.size\t%s, %d\n", name2, size); 1604 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size); 1605 return; 1606 } 1607 1608 if (!global) 1609 { 1610 fprintf (stream, "\t.local\t"); 1611 assemble_name (stream, name); 1612 fprintf (stream, "\n"); 1613 } 1614 fprintf (stream, "\t.comm\t"); 1615 assemble_name (stream, name); 1616 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT); 1617 } 1618 1619 /* Implement TARGET_ASM_INIT_SECTIONS. */ 1620 1621 static void 1622 xstormy16_asm_init_sections (void) 1623 { 1624 bss100_section 1625 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, 1626 output_section_asm_op, 1627 "\t.section \".bss_below100\",\"aw\",@nobits"); 1628 } 1629 1630 /* Mark symbols with the "below100" attribute so that we can use the 1631 special addressing modes for them. */ 1632 1633 static void 1634 xstormy16_encode_section_info (tree decl, rtx r, int first) 1635 { 1636 default_encode_section_info (decl, r, first); 1637 1638 if (TREE_CODE (decl) == VAR_DECL 1639 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl)) 1640 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl)))) 1641 { 1642 rtx symbol = XEXP (r, 0); 1643 1644 gcc_assert (GET_CODE (symbol) == SYMBOL_REF); 1645 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100; 1646 } 1647 } 1648 1649 #undef TARGET_ASM_CONSTRUCTOR 1650 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor 1651 #undef TARGET_ASM_DESTRUCTOR 1652 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor 1653 1654 /* Output constructors and destructors. Just like 1655 default_named_section_asm_out_* but don't set the sections writable. */ 1656 1657 static void 1658 xstormy16_asm_out_destructor (rtx symbol, int priority) 1659 { 1660 const char *section = ".dtors"; 1661 char buf[16]; 1662 1663 /* ??? This only works reliably with the GNU linker. */ 1664 if (priority != DEFAULT_INIT_PRIORITY) 1665 { 1666 sprintf (buf, ".dtors.%.5u", 1667 /* Invert the numbering so the linker puts us in the proper 1668 order; constructors are run from right to left, and the 1669 linker sorts in increasing order. */ 1670 MAX_INIT_PRIORITY - priority); 1671 section = buf; 1672 } 1673 1674 switch_to_section (get_section (section, 0, NULL)); 1675 assemble_align (POINTER_SIZE); 1676 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1); 1677 } 1678 1679 static void 1680 xstormy16_asm_out_constructor (rtx symbol, int priority) 1681 { 1682 const char *section = ".ctors"; 1683 char buf[16]; 1684 1685 /* ??? This only works reliably with the GNU linker. */ 1686 if (priority != DEFAULT_INIT_PRIORITY) 1687 { 1688 sprintf (buf, ".ctors.%.5u", 1689 /* Invert the numbering so the linker puts us in the proper 1690 order; constructors are run from right to left, and the 1691 linker sorts in increasing order. */ 1692 MAX_INIT_PRIORITY - priority); 1693 section = buf; 1694 } 1695 1696 switch_to_section (get_section (section, 0, NULL)); 1697 assemble_align (POINTER_SIZE); 1698 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1); 1699 } 1700 1701 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS. 1702 1703 Print a memory address as an operand to reference that memory location. */ 1704 1705 static void 1706 xstormy16_print_operand_address (FILE *file, rtx address) 1707 { 1708 HOST_WIDE_INT offset; 1709 int pre_dec, post_inc; 1710 1711 /* There are a few easy cases. */ 1712 if (CONST_INT_P (address)) 1713 { 1714 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF); 1715 return; 1716 } 1717 1718 if (CONSTANT_P (address) || LABEL_P (address)) 1719 { 1720 output_addr_const (file, address); 1721 return; 1722 } 1723 1724 /* Otherwise, it's hopefully something of the form 1725 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */ 1726 if (GET_CODE (address) == PLUS) 1727 { 1728 gcc_assert (CONST_INT_P (XEXP (address, 1))); 1729 offset = INTVAL (XEXP (address, 1)); 1730 address = XEXP (address, 0); 1731 } 1732 else 1733 offset = 0; 1734 1735 pre_dec = (GET_CODE (address) == PRE_DEC); 1736 post_inc = (GET_CODE (address) == POST_INC); 1737 if (pre_dec || post_inc) 1738 address = XEXP (address, 0); 1739 1740 gcc_assert (REG_P (address)); 1741 1742 fputc ('(', file); 1743 if (pre_dec) 1744 fputs ("--", file); 1745 fputs (reg_names [REGNO (address)], file); 1746 if (post_inc) 1747 fputs ("++", file); 1748 if (offset != 0) 1749 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset); 1750 fputc (')', file); 1751 } 1752 1753 /* Worker function for TARGET_PRINT_OPERAND. 1754 1755 Print an operand to an assembler instruction. */ 1756 1757 static void 1758 xstormy16_print_operand (FILE *file, rtx x, int code) 1759 { 1760 switch (code) 1761 { 1762 case 'B': 1763 /* There is either one bit set, or one bit clear, in X. 1764 Print it preceded by '#'. */ 1765 { 1766 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 }; 1767 HOST_WIDE_INT xx = 1; 1768 HOST_WIDE_INT l; 1769 1770 if (CONST_INT_P (x)) 1771 xx = INTVAL (x); 1772 else 1773 output_operand_lossage ("'B' operand is not constant"); 1774 1775 /* GCC sign-extends masks with the MSB set, so we have to 1776 detect all the cases that differ only in sign extension 1777 beyond the bits we care about. Normally, the predicates 1778 and constraints ensure that we have the right values. This 1779 works correctly for valid masks. */ 1780 if (bits_set[xx & 7] <= 1) 1781 { 1782 /* Remove sign extension bits. */ 1783 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0) 1784 xx &= 0xff; 1785 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0) 1786 xx &= 0xffff; 1787 l = exact_log2 (xx); 1788 } 1789 else 1790 { 1791 /* Add sign extension bits. */ 1792 if ((xx & ~(HOST_WIDE_INT)0xff) == 0) 1793 xx |= ~(HOST_WIDE_INT)0xff; 1794 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0) 1795 xx |= ~(HOST_WIDE_INT)0xffff; 1796 l = exact_log2 (~xx); 1797 } 1798 1799 if (l == -1) 1800 output_operand_lossage ("'B' operand has multiple bits set"); 1801 1802 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l); 1803 return; 1804 } 1805 1806 case 'C': 1807 /* Print the symbol without a surrounding @fptr(). */ 1808 if (GET_CODE (x) == SYMBOL_REF) 1809 assemble_name (file, XSTR (x, 0)); 1810 else if (LABEL_P (x)) 1811 output_asm_label (x); 1812 else 1813 xstormy16_print_operand_address (file, x); 1814 return; 1815 1816 case 'o': 1817 case 'O': 1818 /* Print the immediate operand less one, preceded by '#'. 1819 For 'O', negate it first. */ 1820 { 1821 HOST_WIDE_INT xx = 0; 1822 1823 if (CONST_INT_P (x)) 1824 xx = INTVAL (x); 1825 else 1826 output_operand_lossage ("'o' operand is not constant"); 1827 1828 if (code == 'O') 1829 xx = -xx; 1830 1831 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1); 1832 return; 1833 } 1834 1835 case 'b': 1836 /* Print the shift mask for bp/bn. */ 1837 { 1838 HOST_WIDE_INT xx = 1; 1839 HOST_WIDE_INT l; 1840 1841 if (CONST_INT_P (x)) 1842 xx = INTVAL (x); 1843 else 1844 output_operand_lossage ("'B' operand is not constant"); 1845 1846 l = 7 - xx; 1847 1848 fputs (IMMEDIATE_PREFIX, file); 1849 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l); 1850 return; 1851 } 1852 1853 case 0: 1854 /* Handled below. */ 1855 break; 1856 1857 default: 1858 output_operand_lossage ("xstormy16_print_operand: unknown code"); 1859 return; 1860 } 1861 1862 switch (GET_CODE (x)) 1863 { 1864 case REG: 1865 fputs (reg_names [REGNO (x)], file); 1866 break; 1867 1868 case MEM: 1869 xstormy16_print_operand_address (file, XEXP (x, 0)); 1870 break; 1871 1872 default: 1873 /* Some kind of constant or label; an immediate operand, 1874 so prefix it with '#' for the assembler. */ 1875 fputs (IMMEDIATE_PREFIX, file); 1876 output_addr_const (file, x); 1877 break; 1878 } 1879 1880 return; 1881 } 1882 1883 /* Expander for the `casesi' pattern. 1884 INDEX is the index of the switch statement. 1885 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding 1886 to the first table entry. 1887 RANGE is the number of table entries. 1888 TABLE is an ADDR_VEC that is the jump table. 1889 DEFAULT_LABEL is the address to branch to if INDEX is outside the 1890 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */ 1891 1892 void 1893 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range, 1894 rtx table, rtx default_label) 1895 { 1896 HOST_WIDE_INT range_i = INTVAL (range); 1897 rtx int_index; 1898 1899 /* This code uses 'br', so it can deal only with tables of size up to 1900 8192 entries. */ 1901 if (range_i >= 8192) 1902 sorry ("switch statement of size %lu entries too large", 1903 (unsigned long) range_i); 1904 1905 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0, 1906 OPTAB_LIB_WIDEN); 1907 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1, 1908 default_label); 1909 int_index = gen_lowpart_common (HImode, index); 1910 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx)); 1911 emit_jump_insn (gen_tablejump_pcrel (int_index, table)); 1912 } 1913 1914 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf' 1915 instructions, without label or alignment or any other special 1916 constructs. We know that the previous instruction will be the 1917 `tablejump_pcrel' output above. 1918 1919 TODO: it might be nice to output 'br' instructions if they could 1920 all reach. */ 1921 1922 void 1923 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table) 1924 { 1925 int vlen, idx; 1926 1927 switch_to_section (current_function_section ()); 1928 1929 vlen = XVECLEN (table, 0); 1930 for (idx = 0; idx < vlen; idx++) 1931 { 1932 fputs ("\tjmpf ", file); 1933 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0)); 1934 fputc ('\n', file); 1935 } 1936 } 1937 1938 /* Expander for the `call' patterns. 1939 RETVAL is the RTL for the return register or NULL for void functions. 1940 DEST is the function to call, expressed as a MEM. 1941 COUNTER is ignored. */ 1942 1943 void 1944 xstormy16_expand_call (rtx retval, rtx dest, rtx counter) 1945 { 1946 rtx call, temp; 1947 machine_mode mode; 1948 1949 gcc_assert (MEM_P (dest)); 1950 dest = XEXP (dest, 0); 1951 1952 if (! CONSTANT_P (dest) && ! REG_P (dest)) 1953 dest = force_reg (Pmode, dest); 1954 1955 if (retval == NULL) 1956 mode = VOIDmode; 1957 else 1958 mode = GET_MODE (retval); 1959 1960 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest), 1961 counter); 1962 if (retval) 1963 call = gen_rtx_SET (VOIDmode, retval, call); 1964 1965 if (! CONSTANT_P (dest)) 1966 { 1967 temp = gen_reg_rtx (HImode); 1968 emit_move_insn (temp, const0_rtx); 1969 } 1970 else 1971 temp = const0_rtx; 1972 1973 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call, 1974 gen_rtx_USE (VOIDmode, temp))); 1975 emit_call_insn (call); 1976 } 1977 1978 /* Expanders for multiword computational operations. */ 1979 1980 /* Expander for arithmetic operations; emit insns to compute 1981 1982 (set DEST (CODE:MODE SRC0 SRC1)) 1983 1984 When CODE is COMPARE, a branch template is generated 1985 (this saves duplicating code in xstormy16_split_cbranch). */ 1986 1987 void 1988 xstormy16_expand_arith (machine_mode mode, enum rtx_code code, 1989 rtx dest, rtx src0, rtx src1) 1990 { 1991 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD; 1992 int i; 1993 int firstloop = 1; 1994 1995 if (code == NEG) 1996 emit_move_insn (src0, const0_rtx); 1997 1998 for (i = 0; i < num_words; i++) 1999 { 2000 rtx w_src0, w_src1, w_dest; 2001 rtx insn; 2002 2003 w_src0 = simplify_gen_subreg (word_mode, src0, mode, 2004 i * UNITS_PER_WORD); 2005 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD); 2006 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD); 2007 2008 switch (code) 2009 { 2010 case PLUS: 2011 if (firstloop 2012 && CONST_INT_P (w_src1) 2013 && INTVAL (w_src1) == 0) 2014 continue; 2015 2016 if (firstloop) 2017 insn = gen_addchi4 (w_dest, w_src0, w_src1); 2018 else 2019 insn = gen_addchi5 (w_dest, w_src0, w_src1); 2020 break; 2021 2022 case NEG: 2023 case MINUS: 2024 case COMPARE: 2025 if (code == COMPARE && i == num_words - 1) 2026 { 2027 rtx branch, sub, clobber, sub_1; 2028 2029 sub_1 = gen_rtx_MINUS (HImode, w_src0, 2030 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM))); 2031 sub = gen_rtx_SET (VOIDmode, w_dest, 2032 gen_rtx_MINUS (HImode, sub_1, w_src1)); 2033 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM)); 2034 branch = gen_rtx_SET (VOIDmode, pc_rtx, 2035 gen_rtx_IF_THEN_ELSE (VOIDmode, 2036 gen_rtx_EQ (HImode, 2037 sub_1, 2038 w_src1), 2039 pc_rtx, 2040 pc_rtx)); 2041 insn = gen_rtx_PARALLEL (VOIDmode, 2042 gen_rtvec (3, branch, sub, clobber)); 2043 } 2044 else if (firstloop 2045 && code != COMPARE 2046 && CONST_INT_P (w_src1) 2047 && INTVAL (w_src1) == 0) 2048 continue; 2049 else if (firstloop) 2050 insn = gen_subchi4 (w_dest, w_src0, w_src1); 2051 else 2052 insn = gen_subchi5 (w_dest, w_src0, w_src1); 2053 break; 2054 2055 case IOR: 2056 case XOR: 2057 case AND: 2058 if (CONST_INT_P (w_src1) 2059 && INTVAL (w_src1) == -(code == AND)) 2060 continue; 2061 2062 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode, 2063 w_src0, w_src1)); 2064 break; 2065 2066 case NOT: 2067 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0)); 2068 break; 2069 2070 default: 2071 gcc_unreachable (); 2072 } 2073 2074 firstloop = 0; 2075 emit (insn); 2076 } 2077 2078 /* If we emit nothing, try_split() will think we failed. So emit 2079 something that does nothing and can be optimized away. */ 2080 if (firstloop) 2081 emit (gen_nop ()); 2082 } 2083 2084 /* The shift operations are split at output time for constant values; 2085 variable-width shifts get handed off to a library routine. 2086 2087 Generate an output string to do (set X (CODE:MODE X SIZE_R)) 2088 SIZE_R will be a CONST_INT, X will be a hard register. */ 2089 2090 const char * 2091 xstormy16_output_shift (machine_mode mode, enum rtx_code code, 2092 rtx x, rtx size_r, rtx temp) 2093 { 2094 HOST_WIDE_INT size; 2095 const char *r0, *r1, *rt; 2096 static char r[64]; 2097 2098 gcc_assert (CONST_INT_P (size_r) 2099 && REG_P (x) 2100 && mode == SImode); 2101 2102 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1); 2103 2104 if (size == 0) 2105 return ""; 2106 2107 r0 = reg_names [REGNO (x)]; 2108 r1 = reg_names [REGNO (x) + 1]; 2109 2110 /* For shifts of size 1, we can use the rotate instructions. */ 2111 if (size == 1) 2112 { 2113 switch (code) 2114 { 2115 case ASHIFT: 2116 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1); 2117 break; 2118 case ASHIFTRT: 2119 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0); 2120 break; 2121 case LSHIFTRT: 2122 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0); 2123 break; 2124 default: 2125 gcc_unreachable (); 2126 } 2127 return r; 2128 } 2129 2130 /* For large shifts, there are easy special cases. */ 2131 if (size == 16) 2132 { 2133 switch (code) 2134 { 2135 case ASHIFT: 2136 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0); 2137 break; 2138 case ASHIFTRT: 2139 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1); 2140 break; 2141 case LSHIFTRT: 2142 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1); 2143 break; 2144 default: 2145 gcc_unreachable (); 2146 } 2147 return r; 2148 } 2149 if (size > 16) 2150 { 2151 switch (code) 2152 { 2153 case ASHIFT: 2154 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d", 2155 r1, r0, r0, r1, (int) size - 16); 2156 break; 2157 case ASHIFTRT: 2158 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d", 2159 r0, r1, r1, r0, (int) size - 16); 2160 break; 2161 case LSHIFTRT: 2162 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d", 2163 r0, r1, r1, r0, (int) size - 16); 2164 break; 2165 default: 2166 gcc_unreachable (); 2167 } 2168 return r; 2169 } 2170 2171 /* For the rest, we have to do more work. In particular, we 2172 need a temporary. */ 2173 rt = reg_names [REGNO (temp)]; 2174 switch (code) 2175 { 2176 case ASHIFT: 2177 sprintf (r, 2178 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s", 2179 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size), 2180 r1, rt); 2181 break; 2182 case ASHIFTRT: 2183 sprintf (r, 2184 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s", 2185 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size), 2186 r0, rt); 2187 break; 2188 case LSHIFTRT: 2189 sprintf (r, 2190 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s", 2191 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size), 2192 r0, rt); 2193 break; 2194 default: 2195 gcc_unreachable (); 2196 } 2197 return r; 2198 } 2199 2200 /* Attribute handling. */ 2201 2202 /* Return nonzero if the function is an interrupt function. */ 2203 2204 int 2205 xstormy16_interrupt_function_p (void) 2206 { 2207 tree attributes; 2208 2209 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before 2210 any functions are declared, which is demonstrably wrong, but 2211 it is worked around here. FIXME. */ 2212 if (!cfun) 2213 return 0; 2214 2215 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl)); 2216 return lookup_attribute ("interrupt", attributes) != NULL_TREE; 2217 } 2218 2219 #undef TARGET_ATTRIBUTE_TABLE 2220 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table 2221 2222 static tree xstormy16_handle_interrupt_attribute 2223 (tree *, tree, tree, int, bool *); 2224 static tree xstormy16_handle_below100_attribute 2225 (tree *, tree, tree, int, bool *); 2226 2227 static const struct attribute_spec xstormy16_attribute_table[] = 2228 { 2229 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler, 2230 affects_type_identity. */ 2231 { "interrupt", 0, 0, false, true, true, 2232 xstormy16_handle_interrupt_attribute , false }, 2233 { "BELOW100", 0, 0, false, false, false, 2234 xstormy16_handle_below100_attribute, false }, 2235 { "below100", 0, 0, false, false, false, 2236 xstormy16_handle_below100_attribute, false }, 2237 { NULL, 0, 0, false, false, false, NULL, false } 2238 }; 2239 2240 /* Handle an "interrupt" attribute; 2241 arguments as in struct attribute_spec.handler. */ 2242 2243 static tree 2244 xstormy16_handle_interrupt_attribute (tree *node, tree name, 2245 tree args ATTRIBUTE_UNUSED, 2246 int flags ATTRIBUTE_UNUSED, 2247 bool *no_add_attrs) 2248 { 2249 if (TREE_CODE (*node) != FUNCTION_TYPE) 2250 { 2251 warning (OPT_Wattributes, "%qE attribute only applies to functions", 2252 name); 2253 *no_add_attrs = true; 2254 } 2255 2256 return NULL_TREE; 2257 } 2258 2259 /* Handle an "below" attribute; 2260 arguments as in struct attribute_spec.handler. */ 2261 2262 static tree 2263 xstormy16_handle_below100_attribute (tree *node, 2264 tree name ATTRIBUTE_UNUSED, 2265 tree args ATTRIBUTE_UNUSED, 2266 int flags ATTRIBUTE_UNUSED, 2267 bool *no_add_attrs) 2268 { 2269 if (TREE_CODE (*node) != VAR_DECL 2270 && TREE_CODE (*node) != POINTER_TYPE 2271 && TREE_CODE (*node) != TYPE_DECL) 2272 { 2273 warning (OPT_Wattributes, 2274 "%<__BELOW100__%> attribute only applies to variables"); 2275 *no_add_attrs = true; 2276 } 2277 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL) 2278 { 2279 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node))) 2280 { 2281 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed " 2282 "with auto storage class"); 2283 *no_add_attrs = true; 2284 } 2285 } 2286 2287 return NULL_TREE; 2288 } 2289 2290 #undef TARGET_INIT_BUILTINS 2291 #define TARGET_INIT_BUILTINS xstormy16_init_builtins 2292 #undef TARGET_EXPAND_BUILTIN 2293 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin 2294 2295 static struct 2296 { 2297 const char * name; 2298 int md_code; 2299 const char * arg_ops; /* 0..9, t for temp register, r for return value. */ 2300 const char * arg_types; /* s=short,l=long, upper case for unsigned. */ 2301 } 2302 s16builtins[] = 2303 { 2304 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" }, 2305 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" }, 2306 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" }, 2307 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" }, 2308 { NULL, 0, NULL, NULL } 2309 }; 2310 2311 static void 2312 xstormy16_init_builtins (void) 2313 { 2314 tree args[2], ret_type, arg = NULL_TREE, ftype; 2315 int i, a, n_args; 2316 2317 ret_type = void_type_node; 2318 2319 for (i = 0; s16builtins[i].name; i++) 2320 { 2321 n_args = strlen (s16builtins[i].arg_types) - 1; 2322 2323 gcc_assert (n_args <= (int) ARRAY_SIZE (args)); 2324 2325 for (a = n_args - 1; a >= 0; a--) 2326 args[a] = NULL_TREE; 2327 2328 for (a = n_args; a >= 0; a--) 2329 { 2330 switch (s16builtins[i].arg_types[a]) 2331 { 2332 case 's': arg = short_integer_type_node; break; 2333 case 'S': arg = short_unsigned_type_node; break; 2334 case 'l': arg = long_integer_type_node; break; 2335 case 'L': arg = long_unsigned_type_node; break; 2336 default: gcc_unreachable (); 2337 } 2338 if (a == 0) 2339 ret_type = arg; 2340 else 2341 args[a-1] = arg; 2342 } 2343 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE); 2344 add_builtin_function (s16builtins[i].name, ftype, 2345 i, BUILT_IN_MD, NULL, NULL_TREE); 2346 } 2347 } 2348 2349 static rtx 2350 xstormy16_expand_builtin (tree exp, rtx target, 2351 rtx subtarget ATTRIBUTE_UNUSED, 2352 machine_mode mode ATTRIBUTE_UNUSED, 2353 int ignore ATTRIBUTE_UNUSED) 2354 { 2355 rtx op[10], args[10], pat, copyto[10], retval = 0; 2356 tree fndecl, argtree; 2357 int i, a, o, code; 2358 2359 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); 2360 argtree = TREE_OPERAND (exp, 1); 2361 i = DECL_FUNCTION_CODE (fndecl); 2362 code = s16builtins[i].md_code; 2363 2364 for (a = 0; a < 10 && argtree; a++) 2365 { 2366 args[a] = expand_normal (TREE_VALUE (argtree)); 2367 argtree = TREE_CHAIN (argtree); 2368 } 2369 2370 for (o = 0; s16builtins[i].arg_ops[o]; o++) 2371 { 2372 char ao = s16builtins[i].arg_ops[o]; 2373 char c = insn_data[code].operand[o].constraint[0]; 2374 machine_mode omode; 2375 2376 copyto[o] = 0; 2377 2378 omode = (machine_mode) insn_data[code].operand[o].mode; 2379 if (ao == 'r') 2380 op[o] = target ? target : gen_reg_rtx (omode); 2381 else if (ao == 't') 2382 op[o] = gen_reg_rtx (omode); 2383 else 2384 op[o] = args[(int) hex_value (ao)]; 2385 2386 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o]))) 2387 { 2388 if (c == '+' || c == '=') 2389 { 2390 copyto[o] = op[o]; 2391 op[o] = gen_reg_rtx (omode); 2392 } 2393 else 2394 op[o] = copy_to_mode_reg (omode, op[o]); 2395 } 2396 2397 if (ao == 'r') 2398 retval = op[o]; 2399 } 2400 2401 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4], 2402 op[5], op[6], op[7], op[8], op[9]); 2403 emit_insn (pat); 2404 2405 for (o = 0; s16builtins[i].arg_ops[o]; o++) 2406 if (copyto[o]) 2407 { 2408 emit_move_insn (copyto[o], op[o]); 2409 if (op[o] == retval) 2410 retval = copyto[o]; 2411 } 2412 2413 return retval; 2414 } 2415 2416 /* Look for combinations of insns that can be converted to BN or BP 2417 opcodes. This is, unfortunately, too complex to do with MD 2418 patterns. */ 2419 2420 static void 2421 combine_bnp (rtx_insn *insn) 2422 { 2423 int insn_code, regno, need_extend; 2424 unsigned int mask; 2425 rtx cond, reg, qireg, mem; 2426 rtx_insn *and_insn, *load; 2427 machine_mode load_mode = QImode; 2428 machine_mode and_mode = QImode; 2429 rtx_insn *shift = NULL; 2430 2431 insn_code = recog_memoized (insn); 2432 if (insn_code != CODE_FOR_cbranchhi 2433 && insn_code != CODE_FOR_cbranchhi_neg) 2434 return; 2435 2436 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */ 2437 cond = XEXP (cond, 1); /* if */ 2438 cond = XEXP (cond, 0); /* cond */ 2439 switch (GET_CODE (cond)) 2440 { 2441 case NE: 2442 case EQ: 2443 need_extend = 0; 2444 break; 2445 case LT: 2446 case GE: 2447 need_extend = 1; 2448 break; 2449 default: 2450 return; 2451 } 2452 2453 reg = XEXP (cond, 0); 2454 if (! REG_P (reg)) 2455 return; 2456 regno = REGNO (reg); 2457 if (XEXP (cond, 1) != const0_rtx) 2458 return; 2459 if (! find_regno_note (insn, REG_DEAD, regno)) 2460 return; 2461 qireg = gen_rtx_REG (QImode, regno); 2462 2463 if (need_extend) 2464 { 2465 /* LT and GE conditionals should have a sign extend before 2466 them. */ 2467 for (and_insn = prev_real_insn (insn); 2468 and_insn != NULL_RTX; 2469 and_insn = prev_real_insn (and_insn)) 2470 { 2471 int and_code = recog_memoized (and_insn); 2472 2473 if (and_code == CODE_FOR_extendqihi2 2474 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg) 2475 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg)) 2476 break; 2477 2478 if (and_code == CODE_FOR_movhi_internal 2479 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)) 2480 { 2481 /* This is for testing bit 15. */ 2482 and_insn = insn; 2483 break; 2484 } 2485 2486 if (reg_mentioned_p (reg, and_insn)) 2487 return; 2488 2489 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn)) 2490 return; 2491 } 2492 } 2493 else 2494 { 2495 /* EQ and NE conditionals have an AND before them. */ 2496 for (and_insn = prev_real_insn (insn); 2497 and_insn != NULL_RTX; 2498 and_insn = prev_real_insn (and_insn)) 2499 { 2500 if (recog_memoized (and_insn) == CODE_FOR_andhi3 2501 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg) 2502 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg)) 2503 break; 2504 2505 if (reg_mentioned_p (reg, and_insn)) 2506 return; 2507 2508 if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn)) 2509 return; 2510 } 2511 2512 if (and_insn) 2513 { 2514 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT 2515 followed by an AND like this: 2516 2517 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3))) 2518 (clobber (reg:BI carry))] 2519 2520 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1))) 2521 2522 Attempt to detect this here. */ 2523 for (shift = prev_real_insn (and_insn); shift; 2524 shift = prev_real_insn (shift)) 2525 { 2526 if (recog_memoized (shift) == CODE_FOR_lshrhi3 2527 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg) 2528 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg)) 2529 break; 2530 2531 if (reg_mentioned_p (reg, shift) 2532 || (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift))) 2533 { 2534 shift = NULL; 2535 break; 2536 } 2537 } 2538 } 2539 } 2540 2541 if (and_insn == NULL_RTX) 2542 return; 2543 2544 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn); 2545 load; 2546 load = prev_real_insn (load)) 2547 { 2548 int load_code = recog_memoized (load); 2549 2550 if (load_code == CODE_FOR_movhi_internal 2551 && rtx_equal_p (SET_DEST (PATTERN (load)), reg) 2552 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode) 2553 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load)))) 2554 { 2555 load_mode = HImode; 2556 break; 2557 } 2558 2559 if (load_code == CODE_FOR_movqi_internal 2560 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg) 2561 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode)) 2562 { 2563 load_mode = QImode; 2564 break; 2565 } 2566 2567 if (load_code == CODE_FOR_zero_extendqihi2 2568 && rtx_equal_p (SET_DEST (PATTERN (load)), reg) 2569 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode)) 2570 { 2571 load_mode = QImode; 2572 and_mode = HImode; 2573 break; 2574 } 2575 2576 if (reg_mentioned_p (reg, load)) 2577 return; 2578 2579 if (! NOTE_P (load) && ! NONJUMP_INSN_P (load)) 2580 return; 2581 } 2582 if (!load) 2583 return; 2584 2585 mem = SET_SRC (PATTERN (load)); 2586 2587 if (need_extend) 2588 { 2589 mask = (load_mode == HImode) ? 0x8000 : 0x80; 2590 2591 /* If the mem includes a zero-extend operation and we are 2592 going to generate a sign-extend operation then move the 2593 mem inside the zero-extend. */ 2594 if (GET_CODE (mem) == ZERO_EXTEND) 2595 mem = XEXP (mem, 0); 2596 } 2597 else 2598 { 2599 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1), 2600 load_mode)) 2601 return; 2602 2603 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1)); 2604 2605 if (shift) 2606 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1)); 2607 } 2608 2609 if (load_mode == HImode) 2610 { 2611 rtx addr = XEXP (mem, 0); 2612 2613 if (! (mask & 0xff)) 2614 { 2615 addr = plus_constant (Pmode, addr, 1); 2616 mask >>= 8; 2617 } 2618 mem = gen_rtx_MEM (QImode, addr); 2619 } 2620 2621 if (need_extend) 2622 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem); 2623 else 2624 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask)); 2625 2626 INSN_CODE (insn) = -1; 2627 delete_insn (load); 2628 2629 if (and_insn != insn) 2630 delete_insn (and_insn); 2631 2632 if (shift != NULL_RTX) 2633 delete_insn (shift); 2634 } 2635 2636 static void 2637 xstormy16_reorg (void) 2638 { 2639 rtx_insn *insn; 2640 2641 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 2642 { 2643 if (! JUMP_P (insn)) 2644 continue; 2645 combine_bnp (insn); 2646 } 2647 } 2648 2649 /* Worker function for TARGET_RETURN_IN_MEMORY. */ 2650 2651 static bool 2652 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED) 2653 { 2654 const HOST_WIDE_INT size = int_size_in_bytes (type); 2655 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS); 2656 } 2657 2658 #undef TARGET_ASM_ALIGNED_HI_OP 2659 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t" 2660 #undef TARGET_ASM_ALIGNED_SI_OP 2661 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t" 2662 #undef TARGET_ENCODE_SECTION_INFO 2663 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info 2664 2665 /* Select_section doesn't handle .bss_below100. */ 2666 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS 2667 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false 2668 2669 #undef TARGET_ASM_OUTPUT_MI_THUNK 2670 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk 2671 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK 2672 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall 2673 2674 #undef TARGET_PRINT_OPERAND 2675 #define TARGET_PRINT_OPERAND xstormy16_print_operand 2676 #undef TARGET_PRINT_OPERAND_ADDRESS 2677 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address 2678 2679 #undef TARGET_MEMORY_MOVE_COST 2680 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost 2681 #undef TARGET_RTX_COSTS 2682 #define TARGET_RTX_COSTS xstormy16_rtx_costs 2683 #undef TARGET_ADDRESS_COST 2684 #define TARGET_ADDRESS_COST xstormy16_address_cost 2685 2686 #undef TARGET_BUILD_BUILTIN_VA_LIST 2687 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list 2688 #undef TARGET_EXPAND_BUILTIN_VA_START 2689 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start 2690 #undef TARGET_GIMPLIFY_VA_ARG_EXPR 2691 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr 2692 2693 #undef TARGET_PROMOTE_FUNCTION_MODE 2694 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote 2695 #undef TARGET_PROMOTE_PROTOTYPES 2696 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true 2697 2698 #undef TARGET_FUNCTION_ARG 2699 #define TARGET_FUNCTION_ARG xstormy16_function_arg 2700 #undef TARGET_FUNCTION_ARG_ADVANCE 2701 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance 2702 2703 #undef TARGET_RETURN_IN_MEMORY 2704 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory 2705 #undef TARGET_FUNCTION_VALUE 2706 #define TARGET_FUNCTION_VALUE xstormy16_function_value 2707 #undef TARGET_LIBCALL_VALUE 2708 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value 2709 #undef TARGET_FUNCTION_VALUE_REGNO_P 2710 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p 2711 2712 #undef TARGET_MACHINE_DEPENDENT_REORG 2713 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg 2714 2715 #undef TARGET_PREFERRED_RELOAD_CLASS 2716 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class 2717 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS 2718 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class 2719 2720 #undef TARGET_LEGITIMATE_ADDRESS_P 2721 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p 2722 #undef TARGET_MODE_DEPENDENT_ADDRESS_P 2723 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p 2724 2725 #undef TARGET_CAN_ELIMINATE 2726 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate 2727 2728 #undef TARGET_TRAMPOLINE_INIT 2729 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init 2730 2731 struct gcc_target targetm = TARGET_INITIALIZER; 2732 2733 #include "gt-stormy16.h" 2734