xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/rx/rx.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* Subroutines used for code generation on Renesas RX processors.
2    Copyright (C) 2008-2017 Free Software Foundation, Inc.
3    Contributed by Red Hat.
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3, or (at your option)
10    any later version.
11 
12    GCC is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 /* To Do:
22 
23  * Re-enable memory-to-memory copies and fix up reload.  */
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "cfghooks.h"
33 #include "df.h"
34 #include "memmodel.h"
35 #include "tm_p.h"
36 #include "regs.h"
37 #include "emit-rtl.h"
38 #include "diagnostic-core.h"
39 #include "varasm.h"
40 #include "stor-layout.h"
41 #include "calls.h"
42 #include "output.h"
43 #include "flags.h"
44 #include "explow.h"
45 #include "expr.h"
46 #include "toplev.h"
47 #include "langhooks.h"
48 #include "opts.h"
49 #include "builtins.h"
50 
51 /* This file should be included last.  */
52 #include "target-def.h"
53 
54 static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
55 static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
56 static unsigned int rx_num_interrupt_regs;
57 
58 static unsigned int
59 rx_gp_base_regnum (void)
60 {
61   if (rx_gp_base_regnum_val == INVALID_REGNUM)
62     gcc_unreachable ();
63   return rx_gp_base_regnum_val;
64 }
65 
66 static unsigned int
67 rx_pid_base_regnum (void)
68 {
69   if (rx_pid_base_regnum_val == INVALID_REGNUM)
70     gcc_unreachable ();
71   return rx_pid_base_regnum_val;
72 }
73 
74 /* Find a SYMBOL_REF in a "standard" MEM address and return its decl.  */
75 
76 static tree
77 rx_decl_for_addr (rtx op)
78 {
79   if (GET_CODE (op) == MEM)
80     op = XEXP (op, 0);
81   if (GET_CODE (op) == CONST)
82     op = XEXP (op, 0);
83   while (GET_CODE (op) == PLUS)
84     op = XEXP (op, 0);
85   if (GET_CODE (op) == SYMBOL_REF)
86     return SYMBOL_REF_DECL (op);
87   return NULL_TREE;
88 }
89 
90 static void rx_print_operand (FILE *, rtx, int);
91 
92 #define CC_FLAG_S	(1 << 0)
93 #define CC_FLAG_Z	(1 << 1)
94 #define CC_FLAG_O	(1 << 2)
95 #define CC_FLAG_C	(1 << 3)
96 #define CC_FLAG_FP	(1 << 4)	/* Fake, to differentiate CC_Fmode.  */
97 
98 static unsigned int flags_from_mode (machine_mode mode);
99 static unsigned int flags_from_code (enum rtx_code code);
100 
101 /* Return true if OP is a reference to an object in a PID data area.  */
102 
103 enum pid_type
104 {
105   PID_NOT_PID = 0,	/* The object is not in the PID data area.  */
106   PID_ENCODED,		/* The object is in the PID data area.  */
107   PID_UNENCODED		/* The object will be placed in the PID data area, but it has not been placed there yet.  */
108 };
109 
110 static enum pid_type
111 rx_pid_data_operand (rtx op)
112 {
113   tree op_decl;
114 
115   if (!TARGET_PID)
116     return PID_NOT_PID;
117 
118   if (GET_CODE (op) == PLUS
119       && GET_CODE (XEXP (op, 0)) == REG
120       && GET_CODE (XEXP (op, 1)) == CONST
121       && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
122     return PID_ENCODED;
123 
124   op_decl = rx_decl_for_addr (op);
125 
126   if (op_decl)
127     {
128       if (TREE_READONLY (op_decl))
129 	return PID_UNENCODED;
130     }
131   else
132     {
133       /* Sigh, some special cases.  */
134       if (GET_CODE (op) == SYMBOL_REF
135 	  || GET_CODE (op) == LABEL_REF)
136 	return PID_UNENCODED;
137     }
138 
139   return PID_NOT_PID;
140 }
141 
142 static rtx
143 rx_legitimize_address (rtx x,
144 		       rtx oldx ATTRIBUTE_UNUSED,
145 		       machine_mode mode ATTRIBUTE_UNUSED)
146 {
147   if (rx_pid_data_operand (x) == PID_UNENCODED)
148     {
149       rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
150       return rv;
151     }
152 
153   if (GET_CODE (x) == PLUS
154       && GET_CODE (XEXP (x, 0)) == PLUS
155       && REG_P (XEXP (XEXP (x, 0), 0))
156       && REG_P (XEXP (x, 1)))
157     return force_reg (SImode, x);
158 
159   return x;
160 }
161 
162 /* Return true if OP is a reference to an object in a small data area.  */
163 
164 static bool
165 rx_small_data_operand (rtx op)
166 {
167   if (rx_small_data_limit == 0)
168     return false;
169 
170   if (GET_CODE (op) == SYMBOL_REF)
171     return SYMBOL_REF_SMALL_P (op);
172 
173   return false;
174 }
175 
176 static bool
177 rx_is_legitimate_address (machine_mode mode, rtx x,
178 			  bool strict ATTRIBUTE_UNUSED)
179 {
180   if (RTX_OK_FOR_BASE (x, strict))
181     /* Register Indirect.  */
182     return true;
183 
184   if ((GET_MODE_SIZE (mode) == 4
185        || GET_MODE_SIZE (mode) == 2
186        || GET_MODE_SIZE (mode) == 1)
187       && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
188     /* Pre-decrement Register Indirect or
189        Post-increment Register Indirect.  */
190     return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
191 
192   switch (rx_pid_data_operand (x))
193     {
194     case PID_UNENCODED:
195       return false;
196     case PID_ENCODED:
197       return true;
198     default:
199       break;
200     }
201 
202   if (GET_CODE (x) == PLUS)
203     {
204       rtx arg1 = XEXP (x, 0);
205       rtx arg2 = XEXP (x, 1);
206       rtx index = NULL_RTX;
207 
208       if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
209 	index = arg2;
210       else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
211 	index = arg1;
212       else
213 	return false;
214 
215       switch (GET_CODE (index))
216 	{
217 	case CONST_INT:
218 	  {
219 	    /* Register Relative: REG + INT.
220 	       Only positive, mode-aligned, mode-sized
221 	       displacements are allowed.  */
222 	    HOST_WIDE_INT val = INTVAL (index);
223 	    int factor;
224 
225 	    if (val < 0)
226 	      return false;
227 
228 	    switch (GET_MODE_SIZE (mode))
229 	      {
230 	      default:
231 	      case 4: factor = 4; break;
232 	      case 2: factor = 2; break;
233 	      case 1: factor = 1; break;
234 	      }
235 
236 	    if (val > (65535 * factor))
237 	      return false;
238 	    return (val % factor) == 0;
239 	  }
240 
241 	case REG:
242 	  /* Unscaled Indexed Register Indirect: REG + REG
243 	     Size has to be "QI", REG has to be valid.  */
244 	  return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
245 
246 	case MULT:
247 	  {
248 	    /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
249 	       Factor has to equal the mode size, REG has to be valid.  */
250 	    rtx factor;
251 
252 	    factor = XEXP (index, 1);
253 	    index = XEXP (index, 0);
254 
255 	    return REG_P (index)
256 	      && RTX_OK_FOR_BASE (index, strict)
257 	      && CONST_INT_P (factor)
258 	      && GET_MODE_SIZE (mode) == INTVAL (factor);
259 	  }
260 
261 	default:
262 	  return false;
263 	}
264     }
265 
266   /* Small data area accesses turn into register relative offsets.  */
267   return rx_small_data_operand (x);
268 }
269 
270 /* Returns TRUE for simple memory addresses, ie ones
271    that do not involve register indirect addressing
272    or pre/post increment/decrement.  */
273 
274 bool
275 rx_is_restricted_memory_address (rtx mem, machine_mode mode)
276 {
277   if (! rx_is_legitimate_address
278       (mode, mem, reload_in_progress || reload_completed))
279     return false;
280 
281   switch (GET_CODE (mem))
282     {
283     case REG:
284       /* Simple memory addresses are OK.  */
285       return true;
286 
287     case SUBREG:
288       return RX_REG_P (SUBREG_REG (mem));
289 
290     case PRE_DEC:
291     case POST_INC:
292       return false;
293 
294     case PLUS:
295       {
296 	rtx base, index;
297 
298 	/* Only allow REG+INT addressing.  */
299 	base = XEXP (mem, 0);
300 	index = XEXP (mem, 1);
301 
302 	if (! RX_REG_P (base) || ! CONST_INT_P (index))
303 	  return false;
304 
305 	return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
306       }
307 
308     case SYMBOL_REF:
309       /* Can happen when small data is being supported.
310          Assume that it will be resolved into GP+INT.  */
311       return true;
312 
313     default:
314       gcc_unreachable ();
315     }
316 }
317 
318 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P.  */
319 
320 static bool
321 rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
322 {
323   if (GET_CODE (addr) == CONST)
324     addr = XEXP (addr, 0);
325 
326   switch (GET_CODE (addr))
327     {
328       /* --REG and REG++ only work in SImode.  */
329     case PRE_DEC:
330     case POST_INC:
331       return true;
332 
333     case MINUS:
334     case PLUS:
335       if (! REG_P (XEXP (addr, 0)))
336 	return true;
337 
338       addr = XEXP (addr, 1);
339 
340       switch (GET_CODE (addr))
341 	{
342 	case REG:
343 	  /* REG+REG only works in SImode.  */
344 	  return true;
345 
346 	case CONST_INT:
347 	  /* REG+INT is only mode independent if INT is a
348 	     multiple of 4, positive and will fit into 16-bits.  */
349 	  if (((INTVAL (addr) & 3) == 0)
350 	      && IN_RANGE (INTVAL (addr), 4, 0xfffc))
351 	    return false;
352 	  return true;
353 
354 	case SYMBOL_REF:
355 	case LABEL_REF:
356 	  return true;
357 
358 	case MULT:
359 	  /* REG+REG*SCALE is always mode dependent.  */
360 	  return true;
361 
362 	default:
363 	  /* Not recognized, so treat as mode dependent.  */
364 	  return true;
365 	}
366 
367     case CONST_INT:
368     case SYMBOL_REF:
369     case LABEL_REF:
370     case REG:
371       /* These are all mode independent.  */
372       return false;
373 
374     default:
375       /* Everything else is unrecognized,
376 	 so treat as mode dependent.  */
377       return true;
378     }
379 }
380 
381 /* A C compound statement to output to stdio stream FILE the
382    assembler syntax for an instruction operand that is a memory
383    reference whose address is ADDR.  */
384 
385 static void
386 rx_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
387 {
388   switch (GET_CODE (addr))
389     {
390     case REG:
391       fprintf (file, "[");
392       rx_print_operand (file, addr, 0);
393       fprintf (file, "]");
394       break;
395 
396     case PRE_DEC:
397       fprintf (file, "[-");
398       rx_print_operand (file, XEXP (addr, 0), 0);
399       fprintf (file, "]");
400       break;
401 
402     case POST_INC:
403       fprintf (file, "[");
404       rx_print_operand (file, XEXP (addr, 0), 0);
405       fprintf (file, "+]");
406       break;
407 
408     case PLUS:
409       {
410 	rtx arg1 = XEXP (addr, 0);
411 	rtx arg2 = XEXP (addr, 1);
412 	rtx base, index;
413 
414 	if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
415 	  base = arg1, index = arg2;
416 	else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
417 	  base = arg2, index = arg1;
418 	else
419 	  {
420 	    rx_print_operand (file, arg1, 0);
421 	    fprintf (file, " + ");
422 	    rx_print_operand (file, arg2, 0);
423 	    break;
424 	  }
425 
426 	if (REG_P (index) || GET_CODE (index) == MULT)
427 	  {
428 	    fprintf (file, "[");
429 	    rx_print_operand (file, index, 'A');
430 	    fprintf (file, ",");
431 	  }
432 	else /* GET_CODE (index) == CONST_INT  */
433 	  {
434 	    rx_print_operand (file, index, 'A');
435 	    fprintf (file, "[");
436 	  }
437 	rx_print_operand (file, base, 0);
438 	fprintf (file, "]");
439 	break;
440       }
441 
442     case CONST:
443       if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
444 	{
445 	  addr = XEXP (addr, 0);
446 	  gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
447 
448           addr = XVECEXP (addr, 0, 0);
449 	  gcc_assert (CONST_INT_P (addr));
450 	  fprintf (file, "#");
451 	  output_addr_const (file, addr);
452 	  break;
453 	}
454       fprintf (file, "#");
455       output_addr_const (file, XEXP (addr, 0));
456       break;
457 
458     case UNSPEC:
459       addr = XVECEXP (addr, 0, 0);
460       /* Fall through.  */
461     case LABEL_REF:
462     case SYMBOL_REF:
463       fprintf (file, "#");
464       /* Fall through.  */
465     default:
466       output_addr_const (file, addr);
467       break;
468     }
469 }
470 
471 static void
472 rx_print_integer (FILE * file, HOST_WIDE_INT val)
473 {
474   if (val < 64)
475     fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
476   else
477     fprintf (file,
478 	     TARGET_AS100_SYNTAX
479 	     ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
480 	     val);
481 }
482 
483 static bool
484 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
485 {
486   const char *  op = integer_asm_op (size, is_aligned);
487 
488   if (! CONST_INT_P (x))
489     return default_assemble_integer (x, size, is_aligned);
490 
491   if (op == NULL)
492     return false;
493   fputs (op, asm_out_file);
494 
495   rx_print_integer (asm_out_file, INTVAL (x));
496   fputc ('\n', asm_out_file);
497   return true;
498 }
499 
500 
501 /* Handles the insertion of a single operand into the assembler output.
502    The %<letter> directives supported are:
503 
504      %A  Print an operand without a leading # character.
505      %B  Print an integer comparison name.
506      %C  Print a control register name.
507      %F  Print a condition code flag name.
508      %G  Register used for small-data-area addressing
509      %H  Print high part of a DImode register, integer or address.
510      %L  Print low part of a DImode register, integer or address.
511      %N  Print the negation of the immediate value.
512      %P  Register used for PID addressing
513      %Q  If the operand is a MEM, then correctly generate
514          register indirect or register relative addressing.
515      %R  Like %Q but for zero-extending loads.  */
516 
517 static void
518 rx_print_operand (FILE * file, rtx op, int letter)
519 {
520   bool unsigned_load = false;
521   bool print_hash = true;
522 
523   if (letter == 'A'
524       && ((GET_CODE (op) == CONST
525 	   && GET_CODE (XEXP (op, 0)) == UNSPEC)
526 	  || GET_CODE (op) == UNSPEC))
527     {
528       print_hash = false;
529       letter = 0;
530     }
531 
532   switch (letter)
533     {
534     case 'A':
535       /* Print an operand without a leading #.  */
536       if (MEM_P (op))
537 	op = XEXP (op, 0);
538 
539       switch (GET_CODE (op))
540 	{
541 	case LABEL_REF:
542 	case SYMBOL_REF:
543 	  output_addr_const (file, op);
544 	  break;
545 	case CONST_INT:
546 	  fprintf (file, "%ld", (long) INTVAL (op));
547 	  break;
548 	default:
549 	  rx_print_operand (file, op, 0);
550 	  break;
551 	}
552       break;
553 
554     case 'B':
555       {
556 	enum rtx_code code = GET_CODE (op);
557 	machine_mode mode = GET_MODE (XEXP (op, 0));
558 	const char *ret;
559 
560 	if (mode == CC_Fmode)
561 	  {
562 	    /* C flag is undefined, and O flag carries unordered.  None of the
563 	       branch combinations that include O use it helpfully.  */
564 	    switch (code)
565 	      {
566 	      case ORDERED:
567 		ret = "no";
568 		break;
569 	      case UNORDERED:
570 		ret = "o";
571 		break;
572 	      case LT:
573 		ret = "n";
574 		break;
575 	      case GE:
576 		ret = "pz";
577 		break;
578 	      case EQ:
579 		ret = "eq";
580 		break;
581 	      case NE:
582 		ret = "ne";
583 		break;
584 	      default:
585 		gcc_unreachable ();
586 	      }
587 	  }
588 	else
589 	  {
590 	    unsigned int flags = flags_from_mode (mode);
591 
592 	    switch (code)
593 	      {
594 	      case LT:
595 		ret = (flags & CC_FLAG_O ? "lt" : "n");
596 		break;
597 	      case GE:
598 		ret = (flags & CC_FLAG_O ? "ge" : "pz");
599 		break;
600 	      case GT:
601 		ret = "gt";
602 		break;
603 	      case LE:
604 		ret = "le";
605 		break;
606 	      case GEU:
607 		ret = "geu";
608 		break;
609 	      case LTU:
610 		ret = "ltu";
611 		break;
612 	      case GTU:
613 		ret = "gtu";
614 		break;
615 	      case LEU:
616 		ret = "leu";
617 		break;
618 	      case EQ:
619 		ret = "eq";
620 		break;
621 	      case NE:
622 		ret = "ne";
623 		break;
624 	      default:
625 		gcc_unreachable ();
626 	      }
627 	    gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
628 	  }
629 	fputs (ret, file);
630 	break;
631       }
632 
633     case 'C':
634       gcc_assert (CONST_INT_P (op));
635       switch (INTVAL (op))
636 	{
637 	case CTRLREG_PSW:   fprintf (file, "psw"); break;
638 	case CTRLREG_USP:   fprintf (file, "usp"); break;
639 	case CTRLREG_FPSW:  fprintf (file, "fpsw"); break;
640 	case CTRLREG_CPEN:  fprintf (file, "cpen"); break;
641 	case CTRLREG_BPSW:  fprintf (file, "bpsw"); break;
642 	case CTRLREG_BPC:   fprintf (file, "bpc"); break;
643 	case CTRLREG_ISP:   fprintf (file, "isp"); break;
644 	case CTRLREG_FINTV: fprintf (file, "fintv"); break;
645 	case CTRLREG_INTB:  fprintf (file, "intb"); break;
646 	default:
647 	  warning (0, "unrecognized control register number: %d - using 'psw'",
648 		   (int) INTVAL (op));
649 	  fprintf (file, "psw");
650 	  break;
651 	}
652       break;
653 
654     case 'F':
655       gcc_assert (CONST_INT_P (op));
656       switch (INTVAL (op))
657 	{
658 	case 0: case 'c': case 'C': fprintf (file, "C"); break;
659 	case 1:	case 'z': case 'Z': fprintf (file, "Z"); break;
660 	case 2: case 's': case 'S': fprintf (file, "S"); break;
661 	case 3: case 'o': case 'O': fprintf (file, "O"); break;
662 	case 8: case 'i': case 'I': fprintf (file, "I"); break;
663 	case 9: case 'u': case 'U': fprintf (file, "U"); break;
664 	default:
665 	  gcc_unreachable ();
666 	}
667       break;
668 
669     case 'G':
670       fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
671       break;
672 
673     case 'H':
674       switch (GET_CODE (op))
675 	{
676 	case REG:
677 	  fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
678 	  break;
679 	case CONST_INT:
680 	  {
681 	    HOST_WIDE_INT v = INTVAL (op);
682 
683 	    fprintf (file, "#");
684 	    /* Trickery to avoid problems with shifting 32 bits at a time.  */
685 	    v = v >> 16;
686 	    v = v >> 16;
687 	    rx_print_integer (file, v);
688 	    break;
689 	  }
690 	case CONST_DOUBLE:
691 	  fprintf (file, "#");
692 	  rx_print_integer (file, CONST_DOUBLE_HIGH (op));
693 	  break;
694 	case MEM:
695 	  if (! WORDS_BIG_ENDIAN)
696 	    op = adjust_address (op, SImode, 4);
697 	  output_address (GET_MODE (op), XEXP (op, 0));
698 	  break;
699 	default:
700 	  gcc_unreachable ();
701 	}
702       break;
703 
704     case 'L':
705       switch (GET_CODE (op))
706 	{
707 	case REG:
708 	  fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
709 	  break;
710 	case CONST_INT:
711 	  fprintf (file, "#");
712 	  rx_print_integer (file, INTVAL (op) & 0xffffffff);
713 	  break;
714 	case CONST_DOUBLE:
715 	  fprintf (file, "#");
716 	  rx_print_integer (file, CONST_DOUBLE_LOW (op));
717 	  break;
718 	case MEM:
719 	  if (WORDS_BIG_ENDIAN)
720 	    op = adjust_address (op, SImode, 4);
721 	  output_address (GET_MODE (op), XEXP (op, 0));
722 	  break;
723 	default:
724 	  gcc_unreachable ();
725 	}
726       break;
727 
728     case 'N':
729       gcc_assert (CONST_INT_P (op));
730       fprintf (file, "#");
731       rx_print_integer (file, - INTVAL (op));
732       break;
733 
734     case 'P':
735       fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
736       break;
737 
738     case 'R':
739       gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
740       unsigned_load = true;
741       /* Fall through.  */
742     case 'Q':
743       if (MEM_P (op))
744 	{
745 	  HOST_WIDE_INT offset;
746 	  rtx mem = op;
747 
748 	  op = XEXP (op, 0);
749 
750 	  if (REG_P (op))
751 	    offset = 0;
752 	  else if (GET_CODE (op) == PLUS)
753 	    {
754 	      rtx displacement;
755 
756 	      if (REG_P (XEXP (op, 0)))
757 		{
758 		  displacement = XEXP (op, 1);
759 		  op = XEXP (op, 0);
760 		}
761 	      else
762 		{
763 		  displacement = XEXP (op, 0);
764 		  op = XEXP (op, 1);
765 		  gcc_assert (REG_P (op));
766 		}
767 
768 	      gcc_assert (CONST_INT_P (displacement));
769 	      offset = INTVAL (displacement);
770 	      gcc_assert (offset >= 0);
771 
772 	      fprintf (file, "%ld", offset);
773 	    }
774 	  else
775 	    gcc_unreachable ();
776 
777 	  fprintf (file, "[");
778 	  rx_print_operand (file, op, 0);
779 	  fprintf (file, "].");
780 
781 	  switch (GET_MODE_SIZE (GET_MODE (mem)))
782 	    {
783 	    case 1:
784 	      gcc_assert (offset <= 65535 * 1);
785 	      fprintf (file, unsigned_load ? "UB" : "B");
786 	      break;
787 	    case 2:
788 	      gcc_assert (offset % 2 == 0);
789 	      gcc_assert (offset <= 65535 * 2);
790 	      fprintf (file, unsigned_load ? "UW" : "W");
791 	      break;
792 	    case 4:
793 	      gcc_assert (offset % 4 == 0);
794 	      gcc_assert (offset <= 65535 * 4);
795 	      fprintf (file, "L");
796 	      break;
797 	    default:
798 	      gcc_unreachable ();
799 	    }
800 	  break;
801 	}
802 
803       /* Fall through.  */
804 
805     default:
806       if (GET_CODE (op) == CONST
807 	  && GET_CODE (XEXP (op, 0)) == UNSPEC)
808 	op = XEXP (op, 0);
809       else if (GET_CODE (op) == CONST
810 	       && GET_CODE (XEXP (op, 0)) == PLUS
811 	       && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
812 	       && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
813 	{
814 	  if (print_hash)
815 	    fprintf (file, "#");
816 	  fprintf (file, "(");
817 	  rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
818 	  fprintf (file, " + ");
819 	  output_addr_const (file, XEXP (XEXP (op, 0), 1));
820 	  fprintf (file, ")");
821 	  return;
822 	}
823 
824       switch (GET_CODE (op))
825 	{
826 	case MULT:
827 	  /* Should be the scaled part of an
828 	     indexed register indirect address.  */
829 	  {
830 	    rtx base = XEXP (op, 0);
831 	    rtx index = XEXP (op, 1);
832 
833 	    /* Check for a swaped index register and scaling factor.
834 	       Not sure if this can happen, but be prepared to handle it.  */
835 	    if (CONST_INT_P (base) && REG_P (index))
836 	      {
837 		rtx tmp = base;
838 		base = index;
839 		index = tmp;
840 	      }
841 
842 	    gcc_assert (REG_P (base));
843 	    gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
844 	    gcc_assert (CONST_INT_P (index));
845 	    /* Do not try to verify the value of the scalar as it is based
846 	       on the mode of the MEM not the mode of the MULT.  (Which
847 	       will always be SImode).  */
848 	    fprintf (file, "%s", reg_names [REGNO (base)]);
849 	    break;
850 	  }
851 
852 	case MEM:
853 	  output_address (GET_MODE (op), XEXP (op, 0));
854 	  break;
855 
856 	case PLUS:
857 	  output_address (VOIDmode, op);
858 	  break;
859 
860 	case REG:
861 	  gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
862 	  fprintf (file, "%s", reg_names [REGNO (op)]);
863 	  break;
864 
865 	case SUBREG:
866 	  gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
867 	  fprintf (file, "%s", reg_names [subreg_regno (op)]);
868 	  break;
869 
870 	  /* This will only be single precision....  */
871 	case CONST_DOUBLE:
872 	  {
873 	    unsigned long val;
874 
875 	    REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (op), val);
876 	    if (print_hash)
877 	      fprintf (file, "#");
878 	    fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
879 	    break;
880 	  }
881 
882 	case CONST_INT:
883 	  if (print_hash)
884 	    fprintf (file, "#");
885 	  rx_print_integer (file, INTVAL (op));
886 	  break;
887 
888 	case UNSPEC:
889 	  switch (XINT (op, 1))
890 	    {
891 	    case UNSPEC_PID_ADDR:
892 	      {
893 		rtx sym, add;
894 
895 		if (print_hash)
896 		  fprintf (file, "#");
897 		sym = XVECEXP (op, 0, 0);
898 		add = NULL_RTX;
899 		fprintf (file, "(");
900 		if (GET_CODE (sym) == PLUS)
901 		  {
902 		    add = XEXP (sym, 1);
903 		    sym = XEXP (sym, 0);
904 		  }
905 		output_addr_const (file, sym);
906 		if (add != NULL_RTX)
907 		  {
908 		    fprintf (file, "+");
909 		    output_addr_const (file, add);
910 		  }
911 		fprintf (file, "-__pid_base");
912 		fprintf (file, ")");
913 		return;
914 	      }
915 	    }
916 	  /* Fall through */
917 
918 	case CONST:
919 	case SYMBOL_REF:
920 	case LABEL_REF:
921 	case CODE_LABEL:
922 	  rx_print_operand_address (file, VOIDmode, op);
923 	  break;
924 
925 	default:
926 	  gcc_unreachable ();
927 	}
928       break;
929     }
930 }
931 
932 /* Maybe convert an operand into its PID format.  */
933 
934 rtx
935 rx_maybe_pidify_operand (rtx op, int copy_to_reg)
936 {
937   if (rx_pid_data_operand (op) == PID_UNENCODED)
938     {
939       if (GET_CODE (op) == MEM)
940 	{
941 	  rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
942 	  op = replace_equiv_address (op, a);
943 	}
944       else
945 	{
946 	  op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
947 	}
948 
949       if (copy_to_reg)
950 	op = copy_to_mode_reg (GET_MODE (op), op);
951     }
952   return op;
953 }
954 
955 /* Returns an assembler template for a move instruction.  */
956 
957 char *
958 rx_gen_move_template (rtx * operands, bool is_movu)
959 {
960   static char  out_template [64];
961   const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
962   const char * src_template;
963   const char * dst_template;
964   rtx          dest = operands[0];
965   rtx          src  = operands[1];
966 
967   /* Decide which extension, if any, should be given to the move instruction.  */
968   switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
969     {
970     case QImode:
971       /* The .B extension is not valid when
972 	 loading an immediate into a register.  */
973       if (! REG_P (dest) || ! CONST_INT_P (src))
974 	extension = ".B";
975       break;
976     case HImode:
977       if (! REG_P (dest) || ! CONST_INT_P (src))
978 	/* The .W extension is not valid when
979 	   loading an immediate into a register.  */
980 	extension = ".W";
981       break;
982     case DFmode:
983     case DImode:
984     case SFmode:
985     case SImode:
986       extension = ".L";
987       break;
988     case VOIDmode:
989       /* This mode is used by constants.  */
990       break;
991     default:
992       debug_rtx (src);
993       gcc_unreachable ();
994     }
995 
996   if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
997     {
998       gcc_assert (GET_MODE (src) != DImode);
999       gcc_assert (GET_MODE (src) != DFmode);
1000 
1001       src_template = "(%A1 - __pid_base)[%P1]";
1002     }
1003   else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
1004     {
1005       gcc_assert (GET_MODE (src) != DImode);
1006       gcc_assert (GET_MODE (src) != DFmode);
1007 
1008       src_template = "%%gp(%A1)[%G1]";
1009     }
1010   else
1011     src_template = "%1";
1012 
1013   if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
1014     {
1015       gcc_assert (GET_MODE (dest) != DImode);
1016       gcc_assert (GET_MODE (dest) != DFmode);
1017 
1018       dst_template = "%%gp(%A0)[%G0]";
1019     }
1020   else
1021     dst_template = "%0";
1022 
1023   if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1024     {
1025       gcc_assert (! is_movu);
1026 
1027       if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
1028 	sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
1029       else
1030 	sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
1031     }
1032   else
1033     sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1034 	     extension, src_template, dst_template);
1035   return out_template;
1036 }
1037 
1038 /* Return VALUE rounded up to the next ALIGNMENT boundary.  */
1039 
1040 static inline unsigned int
1041 rx_round_up (unsigned int value, unsigned int alignment)
1042 {
1043   alignment -= 1;
1044   return (value + alignment) & (~ alignment);
1045 }
1046 
1047 /* Return the number of bytes in the argument registers
1048    occupied by an argument of type TYPE and mode MODE.  */
1049 
1050 static unsigned int
1051 rx_function_arg_size (machine_mode mode, const_tree type)
1052 {
1053   unsigned int num_bytes;
1054 
1055   num_bytes = (mode == BLKmode)
1056     ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1057   return rx_round_up (num_bytes, UNITS_PER_WORD);
1058 }
1059 
1060 #define NUM_ARG_REGS		4
1061 #define MAX_NUM_ARG_BYTES	(NUM_ARG_REGS * UNITS_PER_WORD)
1062 
1063 /* Return an RTL expression describing the register holding a function
1064    parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1065    be passed on the stack.  CUM describes the previous parameters to the
1066    function and NAMED is false if the parameter is part of a variable
1067    parameter list, or the last named parameter before the start of a
1068    variable parameter list.  */
1069 
1070 static rtx
1071 rx_function_arg (cumulative_args_t cum, machine_mode mode,
1072 		 const_tree type, bool named)
1073 {
1074   unsigned int next_reg;
1075   unsigned int bytes_so_far = *get_cumulative_args (cum);
1076   unsigned int size;
1077   unsigned int rounded_size;
1078 
1079   /* An exploded version of rx_function_arg_size.  */
1080   size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1081   /* If the size is not known it cannot be passed in registers.  */
1082   if (size < 1)
1083     return NULL_RTX;
1084 
1085   rounded_size = rx_round_up (size, UNITS_PER_WORD);
1086 
1087   /* Don't pass this arg via registers if there
1088      are insufficient registers to hold all of it.  */
1089   if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1090     return NULL_RTX;
1091 
1092   /* Unnamed arguments and the last named argument in a
1093      variadic function are always passed on the stack.  */
1094   if (!named)
1095     return NULL_RTX;
1096 
1097   /* Structures must occupy an exact number of registers,
1098      otherwise they are passed on the stack.  */
1099   if ((type == NULL || AGGREGATE_TYPE_P (type))
1100       && (size % UNITS_PER_WORD) != 0)
1101     return NULL_RTX;
1102 
1103   next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1104 
1105   return gen_rtx_REG (mode, next_reg);
1106 }
1107 
1108 static void
1109 rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
1110 			 const_tree type, bool named ATTRIBUTE_UNUSED)
1111 {
1112   *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
1113 }
1114 
1115 static unsigned int
1116 rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1117 			  const_tree type ATTRIBUTE_UNUSED)
1118 {
1119   /* Older versions of the RX backend aligned all on-stack arguments
1120      to 32-bits.  The RX C ABI however says that they should be
1121      aligned to their natural alignment.  (See section 5.2.2 of the ABI).  */
1122   if (TARGET_GCC_ABI)
1123     return STACK_BOUNDARY;
1124 
1125   if (type)
1126     {
1127       if (DECL_P (type))
1128 	return DECL_ALIGN (type);
1129       return TYPE_ALIGN (type);
1130     }
1131 
1132   return PARM_BOUNDARY;
1133 }
1134 
1135 /* Return an RTL describing where a function return value of type RET_TYPE
1136    is held.  */
1137 
1138 static rtx
1139 rx_function_value (const_tree ret_type,
1140 		   const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1141 		   bool       outgoing ATTRIBUTE_UNUSED)
1142 {
1143   machine_mode mode = TYPE_MODE (ret_type);
1144 
1145   /* RX ABI specifies that small integer types are
1146      promoted to int when returned by a function.  */
1147   if (GET_MODE_SIZE (mode) > 0
1148       && GET_MODE_SIZE (mode) < 4
1149       && ! COMPLEX_MODE_P (mode)
1150       && ! VECTOR_TYPE_P (ret_type)
1151       && ! VECTOR_MODE_P (mode)
1152       )
1153     return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1154 
1155   return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1156 }
1157 
1158 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1159    regard to function returns as does TARGET_FUNCTION_VALUE.  */
1160 
1161 static machine_mode
1162 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1163 			  machine_mode mode,
1164 			  int * punsignedp ATTRIBUTE_UNUSED,
1165 			  const_tree funtype ATTRIBUTE_UNUSED,
1166 			  int for_return)
1167 {
1168   if (for_return != 1
1169       || GET_MODE_SIZE (mode) >= 4
1170       || COMPLEX_MODE_P (mode)
1171       || VECTOR_MODE_P (mode)
1172       || VECTOR_TYPE_P (type)
1173       || GET_MODE_SIZE (mode) < 1)
1174     return mode;
1175 
1176   return SImode;
1177 }
1178 
1179 static bool
1180 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1181 {
1182   HOST_WIDE_INT size;
1183 
1184   if (TYPE_MODE (type) != BLKmode
1185       && ! AGGREGATE_TYPE_P (type))
1186     return false;
1187 
1188   size = int_size_in_bytes (type);
1189   /* Large structs and those whose size is not an
1190      exact multiple of 4 are returned in memory.  */
1191   return size < 1
1192     || size > 16
1193     || (size % UNITS_PER_WORD) != 0;
1194 }
1195 
1196 static rtx
1197 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1198 		     int incoming ATTRIBUTE_UNUSED)
1199 {
1200   return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1201 }
1202 
1203 static bool
1204 rx_return_in_msb (const_tree valtype)
1205 {
1206   return TARGET_BIG_ENDIAN_DATA
1207     && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1208 }
1209 
1210 /* Returns true if the provided function has the specified attribute.  */
1211 
1212 static inline bool
1213 has_func_attr (const_tree decl, const char * func_attr)
1214 {
1215   if (decl == NULL_TREE)
1216     decl = current_function_decl;
1217 
1218   return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1219 }
1220 
1221 /* Returns true if the provided function has the "fast_interrupt" attribute.  */
1222 
1223 bool
1224 is_fast_interrupt_func (const_tree decl)
1225 {
1226   return has_func_attr (decl, "fast_interrupt");
1227 }
1228 
1229 /* Returns true if the provided function has the "interrupt" attribute.  */
1230 
1231 bool
1232 is_interrupt_func (const_tree decl)
1233 {
1234   return has_func_attr (decl, "interrupt");
1235 }
1236 
1237 /* Returns true if the provided function has the "naked" attribute.  */
1238 
1239 static inline bool
1240 is_naked_func (const_tree decl)
1241 {
1242   return has_func_attr (decl, "naked");
1243 }
1244 
1245 static bool use_fixed_regs = false;
1246 
1247 static void
1248 rx_conditional_register_usage (void)
1249 {
1250   static bool using_fixed_regs = false;
1251 
1252   if (TARGET_PID)
1253     {
1254       rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1255       fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1256     }
1257 
1258   if (rx_small_data_limit > 0)
1259     {
1260       if (TARGET_PID)
1261 	rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1262       else
1263 	rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1264 
1265       fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1266     }
1267 
1268   if (use_fixed_regs != using_fixed_regs)
1269     {
1270       static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1271       static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1272 
1273       if (use_fixed_regs)
1274 	{
1275 	  unsigned int r;
1276 
1277 	  memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1278 	  memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1279 
1280 	  /* This is for fast interrupt handlers.  Any register in
1281 	     the range r10 to r13 (inclusive) that is currently
1282 	     marked as fixed is now a viable, call-used register.  */
1283 	  for (r = 10; r <= 13; r++)
1284 	    if (fixed_regs[r])
1285 	      {
1286 		fixed_regs[r] = 0;
1287 		call_used_regs[r] = 1;
1288 	      }
1289 
1290 	  /* Mark r7 as fixed.  This is just a hack to avoid
1291 	     altering the reg_alloc_order array so that the newly
1292 	     freed r10-r13 registers are the preferred registers.  */
1293 	  fixed_regs[7] = call_used_regs[7] = 1;
1294 	}
1295       else
1296 	{
1297 	  /* Restore the normal register masks.  */
1298 	  memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1299 	  memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1300 	}
1301 
1302       using_fixed_regs = use_fixed_regs;
1303     }
1304 }
1305 
1306 struct decl_chain
1307 {
1308   tree fndecl;
1309   struct decl_chain * next;
1310 };
1311 
1312 /* Stack of decls for which we have issued warnings.  */
1313 static struct decl_chain * warned_decls = NULL;
1314 
1315 static void
1316 add_warned_decl (tree fndecl)
1317 {
1318   struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1319 
1320   warned->fndecl = fndecl;
1321   warned->next = warned_decls;
1322   warned_decls = warned;
1323 }
1324 
1325 /* Returns TRUE if FNDECL is on our list of warned about decls.  */
1326 
1327 static bool
1328 already_warned (tree fndecl)
1329 {
1330   struct decl_chain * warned;
1331 
1332   for (warned = warned_decls;
1333        warned != NULL;
1334        warned = warned->next)
1335     if (warned->fndecl == fndecl)
1336       return true;
1337 
1338   return false;
1339 }
1340 
1341 /* Perform any actions necessary before starting to compile FNDECL.
1342    For the RX we use this to make sure that we have the correct
1343    set of register masks selected.  If FNDECL is NULL then we are
1344    compiling top level things.  */
1345 
1346 static void
1347 rx_set_current_function (tree fndecl)
1348 {
1349   /* Remember the last target of rx_set_current_function.  */
1350   static tree rx_previous_fndecl;
1351   bool prev_was_fast_interrupt;
1352   bool current_is_fast_interrupt;
1353 
1354   /* Only change the context if the function changes.  This hook is called
1355      several times in the course of compiling a function, and we don't want
1356      to slow things down too much or call target_reinit when it isn't safe.  */
1357   if (fndecl == rx_previous_fndecl)
1358     return;
1359 
1360   prev_was_fast_interrupt
1361     = rx_previous_fndecl
1362     ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1363 
1364   current_is_fast_interrupt
1365     = fndecl ? is_fast_interrupt_func (fndecl) : false;
1366 
1367   if (prev_was_fast_interrupt != current_is_fast_interrupt)
1368     {
1369       use_fixed_regs = current_is_fast_interrupt;
1370       target_reinit ();
1371     }
1372 
1373   if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1374     {
1375       /* We do not warn about the first fast interrupt routine that
1376 	 we see.  Instead we just push it onto the stack.  */
1377       if (warned_decls == NULL)
1378 	add_warned_decl (fndecl);
1379 
1380       /* Otherwise if this fast interrupt is one for which we have
1381 	 not already issued a warning, generate one and then push
1382 	 it onto the stack as well.  */
1383       else if (! already_warned (fndecl))
1384 	{
1385 	  warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1386 		   fndecl, warned_decls->fndecl);
1387 	  add_warned_decl (fndecl);
1388 	}
1389     }
1390 
1391   rx_previous_fndecl = fndecl;
1392 }
1393 
1394 /* Typical stack layout should looks like this after the function's prologue:
1395 
1396                             |    |
1397                               --                       ^
1398                             |    | \                   |
1399                             |    |   arguments saved   | Increasing
1400                             |    |   on the stack      |  addresses
1401     PARENT   arg pointer -> |    | /
1402   -------------------------- ---- -------------------
1403     CHILD                   |ret |   return address
1404                               --
1405                             |    | \
1406                             |    |   call saved
1407                             |    |   registers
1408 			    |    | /
1409                               --
1410                             |    | \
1411                             |    |   local
1412                             |    |   variables
1413         frame pointer ->    |    | /
1414                               --
1415                             |    | \
1416                             |    |   outgoing          | Decreasing
1417                             |    |   arguments         |  addresses
1418    current stack pointer -> |    | /                   |
1419   -------------------------- ---- ------------------   V
1420                             |    |                 */
1421 
1422 static unsigned int
1423 bit_count (unsigned int x)
1424 {
1425   const unsigned int m1 = 0x55555555;
1426   const unsigned int m2 = 0x33333333;
1427   const unsigned int m4 = 0x0f0f0f0f;
1428 
1429   x -= (x >> 1) & m1;
1430   x = (x & m2) + ((x >> 2) & m2);
1431   x = (x + (x >> 4)) & m4;
1432   x += x >>  8;
1433 
1434   return (x + (x >> 16)) & 0x3f;
1435 }
1436 
1437 #define MUST_SAVE_ACC_REGISTER			\
1438   (TARGET_SAVE_ACC_REGISTER			\
1439    && (is_interrupt_func (NULL_TREE)		\
1440        || is_fast_interrupt_func (NULL_TREE)))
1441 
1442 /* Returns either the lowest numbered and highest numbered registers that
1443    occupy the call-saved area of the stack frame, if the registers are
1444    stored as a contiguous block, or else a bitmask of the individual
1445    registers if they are stored piecemeal.
1446 
1447    Also computes the size of the frame and the size of the outgoing
1448    arguments block (in bytes).  */
1449 
1450 static void
1451 rx_get_stack_layout (unsigned int * lowest,
1452 		     unsigned int * highest,
1453 		     unsigned int * register_mask,
1454 		     unsigned int * frame_size,
1455 		     unsigned int * stack_size)
1456 {
1457   unsigned int reg;
1458   unsigned int low;
1459   unsigned int high;
1460   unsigned int fixed_reg = 0;
1461   unsigned int save_mask;
1462   unsigned int pushed_mask;
1463   unsigned int unneeded_pushes;
1464 
1465   if (is_naked_func (NULL_TREE))
1466     {
1467       /* Naked functions do not create their own stack frame.
1468 	 Instead the programmer must do that for us.  */
1469       * lowest = 0;
1470       * highest = 0;
1471       * register_mask = 0;
1472       * frame_size = 0;
1473       * stack_size = 0;
1474       return;
1475     }
1476 
1477   for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1478     {
1479       if ((df_regs_ever_live_p (reg)
1480 	   /* Always save all call clobbered registers inside non-leaf
1481 	      interrupt handlers, even if they are not live - they may
1482 	      be used in (non-interrupt aware) routines called from this one.  */
1483 	   || (call_used_regs[reg]
1484 	       && is_interrupt_func (NULL_TREE)
1485 	       && ! crtl->is_leaf))
1486 	  && (! call_used_regs[reg]
1487 	      /* Even call clobbered registered must
1488 		 be pushed inside interrupt handlers.  */
1489 	      || is_interrupt_func (NULL_TREE)
1490 	      /* Likewise for fast interrupt handlers, except registers r10 -
1491 		 r13.  These are normally call-saved, but may have been set
1492 		 to call-used by rx_conditional_register_usage.  If so then
1493 		 they can be used in the fast interrupt handler without
1494 		 saving them on the stack.  */
1495 	      || (is_fast_interrupt_func (NULL_TREE)
1496 		  && ! IN_RANGE (reg, 10, 13))))
1497 	{
1498 	  if (low == 0)
1499 	    low = reg;
1500 	  high = reg;
1501 
1502 	  save_mask |= 1 << reg;
1503 	}
1504 
1505       /* Remember if we see a fixed register
1506 	 after having found the low register.  */
1507       if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1508 	fixed_reg = reg;
1509     }
1510 
1511   /* If we have to save the accumulator register, make sure
1512      that at least two registers are pushed into the frame.  */
1513   if (MUST_SAVE_ACC_REGISTER
1514       && bit_count (save_mask) < 2)
1515     {
1516       save_mask |= (1 << 13) | (1 << 14);
1517       if (low == 0)
1518 	low = 13;
1519       if (high == 0 || low == high)
1520 	high = low + 1;
1521     }
1522 
1523   /* Decide if it would be faster fill in the call-saved area of the stack
1524      frame using multiple PUSH instructions instead of a single PUSHM
1525      instruction.
1526 
1527      SAVE_MASK is a bitmask of the registers that must be stored in the
1528      call-save area.  PUSHED_MASK is a bitmask of the registers that would
1529      be pushed into the area if we used a PUSHM instruction.  UNNEEDED_PUSHES
1530      is a bitmask of those registers in pushed_mask that are not in
1531      save_mask.
1532 
1533      We use a simple heuristic that says that it is better to use
1534      multiple PUSH instructions if the number of unnecessary pushes is
1535      greater than the number of necessary pushes.
1536 
1537      We also use multiple PUSH instructions if there are any fixed registers
1538      between LOW and HIGH.  The only way that this can happen is if the user
1539      has specified --fixed-<reg-name> on the command line and in such
1540      circumstances we do not want to touch the fixed registers at all.
1541 
1542      Note also that the code in the prologue/epilogue handlers will
1543      automatically merge multiple PUSHes of adjacent registers into a single
1544      PUSHM.
1545 
1546      FIXME: Is it worth improving this heuristic ?  */
1547   pushed_mask = (HOST_WIDE_INT_M1U << low) & ~(HOST_WIDE_INT_M1U << (high + 1));
1548   unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1549 
1550   if ((fixed_reg && fixed_reg <= high)
1551       || (optimize_function_for_speed_p (cfun)
1552 	  && bit_count (save_mask) < bit_count (unneeded_pushes)))
1553     {
1554       /* Use multiple pushes.  */
1555       * lowest = 0;
1556       * highest = 0;
1557       * register_mask = save_mask;
1558     }
1559   else
1560     {
1561       /* Use one push multiple instruction.  */
1562       * lowest = low;
1563       * highest = high;
1564       * register_mask = 0;
1565     }
1566 
1567   * frame_size = rx_round_up
1568     (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1569 
1570   if (crtl->args.size > 0)
1571     * frame_size += rx_round_up
1572       (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1573 
1574   * stack_size = rx_round_up
1575     (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1576 }
1577 
1578 /* Generate a PUSHM instruction that matches the given operands.  */
1579 
1580 void
1581 rx_emit_stack_pushm (rtx * operands)
1582 {
1583   HOST_WIDE_INT last_reg;
1584   rtx first_push;
1585 
1586   gcc_assert (CONST_INT_P (operands[0]));
1587   last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1588 
1589   gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1590   first_push = XVECEXP (operands[1], 0, 1);
1591   gcc_assert (SET_P (first_push));
1592   first_push = SET_SRC (first_push);
1593   gcc_assert (REG_P (first_push));
1594 
1595   asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1596 	       reg_names [REGNO (first_push) - last_reg],
1597 	       reg_names [REGNO (first_push)]);
1598 }
1599 
1600 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate.  */
1601 
1602 static rtx
1603 gen_rx_store_vector (unsigned int low, unsigned int high)
1604 {
1605   unsigned int i;
1606   unsigned int count = (high - low) + 2;
1607   rtx vector;
1608 
1609   vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1610 
1611   XVECEXP (vector, 0, 0) =
1612     gen_rtx_SET (stack_pointer_rtx,
1613 		 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1614 				GEN_INT ((count - 1) * UNITS_PER_WORD)));
1615 
1616   for (i = 0; i < count - 1; i++)
1617     XVECEXP (vector, 0, i + 1) =
1618       gen_rtx_SET (gen_rtx_MEM (SImode,
1619 				gen_rtx_MINUS (SImode, stack_pointer_rtx,
1620 					       GEN_INT ((i + 1) * UNITS_PER_WORD))),
1621 		   gen_rtx_REG (SImode, high - i));
1622   return vector;
1623 }
1624 
1625 /* Mark INSN as being frame related.  If it is a PARALLEL
1626    then mark each element as being frame related as well.  */
1627 
1628 static void
1629 mark_frame_related (rtx insn)
1630 {
1631   RTX_FRAME_RELATED_P (insn) = 1;
1632   insn = PATTERN (insn);
1633 
1634   if (GET_CODE (insn) == PARALLEL)
1635     {
1636       unsigned int i;
1637 
1638       for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1639 	RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1640     }
1641 }
1642 
1643 static bool
1644 ok_for_max_constant (HOST_WIDE_INT val)
1645 {
1646   if (rx_max_constant_size == 0  || rx_max_constant_size == 4)
1647     /* If there is no constraint on the size of constants
1648        used as operands, then any value is legitimate.  */
1649     return true;
1650 
1651   /* rx_max_constant_size specifies the maximum number
1652      of bytes that can be used to hold a signed value.  */
1653   return IN_RANGE (val, (HOST_WIDE_INT_M1U << (rx_max_constant_size * 8)),
1654 		        ( 1 << (rx_max_constant_size * 8)));
1655 }
1656 
1657 /* Generate an ADD of SRC plus VAL into DEST.
1658    Handles the case where VAL is too big for max_constant_value.
1659    Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true.  */
1660 
1661 static void
1662 gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1663 {
1664   rtx insn;
1665 
1666   if (val == NULL_RTX || INTVAL (val) == 0)
1667     {
1668       gcc_assert (dest != src);
1669 
1670       insn = emit_move_insn (dest, src);
1671     }
1672   else if (ok_for_max_constant (INTVAL (val)))
1673     insn = emit_insn (gen_addsi3 (dest, src, val));
1674   else
1675     {
1676       /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1677 	 will not reject it.  */
1678       val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1679       insn = emit_insn (gen_addsi3 (dest, src, val));
1680 
1681       if (is_frame_related)
1682 	/* We have to provide our own frame related note here
1683 	   as the dwarf2out code cannot be expected to grok
1684 	   our unspec.  */
1685 	add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1686 		      gen_rtx_SET (dest, gen_rtx_PLUS (SImode, src, val)));
1687       return;
1688     }
1689 
1690   if (is_frame_related)
1691     RTX_FRAME_RELATED_P (insn) = 1;
1692 }
1693 
1694 static void
1695 push_regs (unsigned int high, unsigned int low)
1696 {
1697   rtx insn;
1698 
1699   if (low == high)
1700     insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1701   else
1702     insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
1703 				       gen_rx_store_vector (low, high)));
1704   mark_frame_related (insn);
1705 }
1706 
1707 void
1708 rx_expand_prologue (void)
1709 {
1710   unsigned int stack_size;
1711   unsigned int frame_size;
1712   unsigned int mask;
1713   unsigned int low;
1714   unsigned int high;
1715   unsigned int reg;
1716 
1717   /* Naked functions use their own, programmer provided prologues.  */
1718   if (is_naked_func (NULL_TREE))
1719     return;
1720 
1721   rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1722 
1723   if (flag_stack_usage_info)
1724     current_function_static_stack_size = frame_size + stack_size;
1725 
1726   /* If we use any of the callee-saved registers, save them now.  */
1727   if (mask)
1728     {
1729       /* Push registers in reverse order.  */
1730       for (reg = CC_REGNUM; reg --;)
1731 	if (mask & (1 << reg))
1732 	  {
1733 	    low = high = reg;
1734 
1735 	    /* Look for a span of registers.
1736 	       Note - we do not have to worry about -Os and whether
1737 	       it is better to use a single, longer PUSHM as
1738 	       rx_get_stack_layout has already done that for us.  */
1739 	    while (reg-- > 0)
1740 	      if ((mask & (1 << reg)) == 0)
1741 		break;
1742 	      else
1743 		--low;
1744 
1745 	    push_regs (high, low);
1746 	    if (reg == (unsigned) -1)
1747 	      break;
1748 	  }
1749     }
1750   else if (low)
1751     push_regs (high, low);
1752 
1753   if (MUST_SAVE_ACC_REGISTER)
1754     {
1755       unsigned int acc_high, acc_low;
1756 
1757       /* Interrupt handlers have to preserve the accumulator
1758 	 register if so requested by the user.  Use the first
1759          two pushed registers as intermediaries.  */
1760       if (mask)
1761 	{
1762 	  acc_low = acc_high = 0;
1763 
1764 	  for (reg = 1; reg < CC_REGNUM; reg ++)
1765 	    if (mask & (1 << reg))
1766 	      {
1767 		if (acc_low == 0)
1768 		  acc_low = reg;
1769 		else
1770 		  {
1771 		    acc_high = reg;
1772 		    break;
1773 		  }
1774 	      }
1775 
1776 	  /* We have assumed that there are at least two registers pushed... */
1777 	  gcc_assert (acc_high != 0);
1778 
1779 	  /* Note - the bottom 16 bits of the accumulator are inaccessible.
1780 	     We just assume that they are zero.  */
1781 	  emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1782 	  emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1783 	  emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1784 	  emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1785 	}
1786       else
1787 	{
1788 	  acc_low = low;
1789 	  acc_high = low + 1;
1790 
1791 	  /* We have assumed that there are at least two registers pushed... */
1792 	  gcc_assert (acc_high <= high);
1793 
1794 	  emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1795 	  emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1796 	  emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1797 				      gen_rx_store_vector (acc_low, acc_high)));
1798 	}
1799     }
1800 
1801   /* If needed, set up the frame pointer.  */
1802   if (frame_pointer_needed)
1803     gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1804 		  GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1805 
1806   /* Allocate space for the outgoing args.
1807      If the stack frame has not already been set up then handle this as well.  */
1808   if (stack_size)
1809     {
1810       if (frame_size)
1811 	{
1812 	  if (frame_pointer_needed)
1813 	    gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1814 			  GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1815 	  else
1816 	    gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1817 			  GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1818 			  true);
1819 	}
1820       else
1821 	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1822 		      GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1823     }
1824   else if (frame_size)
1825     {
1826       if (! frame_pointer_needed)
1827 	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1828 		      GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1829       else
1830 	gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1831 		      false /* False because the epilogue will use the FP not the SP.  */);
1832     }
1833 }
1834 
1835 static void
1836 add_vector_labels (FILE *file, const char *aname)
1837 {
1838   tree vec_attr;
1839   tree val_attr;
1840   const char *vname = "vect";
1841   const char *s;
1842   int vnum;
1843 
1844   /* This node is for the vector/interrupt tag itself */
1845   vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1846   if (!vec_attr)
1847     return;
1848 
1849   /* Now point it at the first argument */
1850   vec_attr = TREE_VALUE (vec_attr);
1851 
1852   /* Iterate through the arguments.  */
1853   while (vec_attr)
1854     {
1855       val_attr = TREE_VALUE (vec_attr);
1856       switch (TREE_CODE (val_attr))
1857 	{
1858 	case STRING_CST:
1859 	  s = TREE_STRING_POINTER (val_attr);
1860 	  goto string_id_common;
1861 
1862 	case IDENTIFIER_NODE:
1863 	  s = IDENTIFIER_POINTER (val_attr);
1864 
1865 	string_id_common:
1866 	  if (strcmp (s, "$default") == 0)
1867 	    {
1868 	      fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1869 	      fprintf (file, "$tableentry$default$%s:\n", vname);
1870 	    }
1871 	  else
1872 	    vname = s;
1873 	  break;
1874 
1875 	case INTEGER_CST:
1876 	  vnum = TREE_INT_CST_LOW (val_attr);
1877 
1878 	  fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1879 	  fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1880 	  break;
1881 
1882 	default:
1883 	  ;
1884 	}
1885 
1886       vec_attr = TREE_CHAIN (vec_attr);
1887     }
1888 
1889 }
1890 
1891 static void
1892 rx_output_function_prologue (FILE * file,
1893 			     HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1894 {
1895   add_vector_labels (file, "interrupt");
1896   add_vector_labels (file, "vector");
1897 
1898   if (is_fast_interrupt_func (NULL_TREE))
1899     asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1900 
1901   if (is_interrupt_func (NULL_TREE))
1902     asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1903 
1904   if (is_naked_func (NULL_TREE))
1905     asm_fprintf (file, "\t; Note: Naked Function\n");
1906 
1907   if (cfun->static_chain_decl != NULL)
1908     asm_fprintf (file, "\t; Note: Nested function declared "
1909 		 "inside another function.\n");
1910 
1911   if (crtl->calls_eh_return)
1912     asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1913 }
1914 
1915 /* Generate a POPM or RTSD instruction that matches the given operands.  */
1916 
1917 void
1918 rx_emit_stack_popm (rtx * operands, bool is_popm)
1919 {
1920   HOST_WIDE_INT stack_adjust;
1921   HOST_WIDE_INT last_reg;
1922   rtx first_push;
1923 
1924   gcc_assert (CONST_INT_P (operands[0]));
1925   stack_adjust = INTVAL (operands[0]);
1926 
1927   gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1928   last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1929 
1930   first_push = XVECEXP (operands[1], 0, 1);
1931   gcc_assert (SET_P (first_push));
1932   first_push = SET_DEST (first_push);
1933   gcc_assert (REG_P (first_push));
1934 
1935   if (is_popm)
1936     asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1937 		 reg_names [REGNO (first_push)],
1938 		 reg_names [REGNO (first_push) + last_reg]);
1939   else
1940     asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1941 		 (int) stack_adjust,
1942 		 reg_names [REGNO (first_push)],
1943 		 reg_names [REGNO (first_push) + last_reg]);
1944 }
1945 
1946 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate.  */
1947 
1948 static rtx
1949 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1950 {
1951   unsigned int i;
1952   unsigned int bias = 3;
1953   unsigned int count = (high - low) + bias;
1954   rtx vector;
1955 
1956   vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1957 
1958   XVECEXP (vector, 0, 0) =
1959     gen_rtx_SET (stack_pointer_rtx,
1960 		 plus_constant (Pmode, stack_pointer_rtx, adjust));
1961 
1962   for (i = 0; i < count - 2; i++)
1963     XVECEXP (vector, 0, i + 1) =
1964       gen_rtx_SET (gen_rtx_REG (SImode, low + i),
1965 		   gen_rtx_MEM (SImode,
1966 				i == 0 ? stack_pointer_rtx
1967 				: plus_constant (Pmode, stack_pointer_rtx,
1968 						 i * UNITS_PER_WORD)));
1969 
1970   XVECEXP (vector, 0, count - 1) = ret_rtx;
1971 
1972   return vector;
1973 }
1974 
1975 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate.  */
1976 
1977 static rtx
1978 gen_rx_popm_vector (unsigned int low, unsigned int high)
1979 {
1980   unsigned int i;
1981   unsigned int count = (high - low) + 2;
1982   rtx vector;
1983 
1984   vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1985 
1986   XVECEXP (vector, 0, 0) =
1987     gen_rtx_SET (stack_pointer_rtx,
1988 		 plus_constant (Pmode, stack_pointer_rtx,
1989 				(count - 1) * UNITS_PER_WORD));
1990 
1991   for (i = 0; i < count - 1; i++)
1992     XVECEXP (vector, 0, i + 1) =
1993       gen_rtx_SET (gen_rtx_REG (SImode, low + i),
1994 		   gen_rtx_MEM (SImode,
1995 				i == 0 ? stack_pointer_rtx
1996 				: plus_constant (Pmode, stack_pointer_rtx,
1997 						 i * UNITS_PER_WORD)));
1998 
1999   return vector;
2000 }
2001 
2002 /* Returns true if a simple return insn can be used.  */
2003 
2004 bool
2005 rx_can_use_simple_return (void)
2006 {
2007   unsigned int low;
2008   unsigned int high;
2009   unsigned int frame_size;
2010   unsigned int stack_size;
2011   unsigned int register_mask;
2012 
2013   if (is_naked_func (NULL_TREE)
2014       || is_fast_interrupt_func (NULL_TREE)
2015       || is_interrupt_func (NULL_TREE))
2016     return false;
2017 
2018   rx_get_stack_layout (& low, & high, & register_mask,
2019 		       & frame_size, & stack_size);
2020 
2021   return (register_mask == 0
2022 	  && (frame_size + stack_size) == 0
2023 	  && low == 0);
2024 }
2025 
2026 static void
2027 pop_regs (unsigned int high, unsigned int low)
2028 {
2029   if (high == low)
2030     emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2031   else
2032     emit_insn (gen_stack_popm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
2033 			       gen_rx_popm_vector (low, high)));
2034 }
2035 
2036 void
2037 rx_expand_epilogue (bool is_sibcall)
2038 {
2039   unsigned int low;
2040   unsigned int high;
2041   unsigned int frame_size;
2042   unsigned int stack_size;
2043   unsigned int register_mask;
2044   unsigned int regs_size;
2045   unsigned int reg;
2046   unsigned HOST_WIDE_INT total_size;
2047 
2048   /* FIXME: We do not support indirect sibcalls at the moment becaause we
2049      cannot guarantee that the register holding the function address is a
2050      call-used register.  If it is a call-saved register then the stack
2051      pop instructions generated in the epilogue will corrupt the address
2052      before it is used.
2053 
2054      Creating a new call-used-only register class works but then the
2055      reload pass gets stuck because it cannot always find a call-used
2056      register for spilling sibcalls.
2057 
2058      The other possible solution is for this pass to scan forward for the
2059      sibcall instruction (if it has been generated) and work out if it
2060      is an indirect sibcall using a call-saved register.  If it is then
2061      the address can copied into a call-used register in this epilogue
2062      code and the sibcall instruction modified to use that register.  */
2063 
2064   if (is_naked_func (NULL_TREE))
2065     {
2066       gcc_assert (! is_sibcall);
2067 
2068       /* Naked functions use their own, programmer provided epilogues.
2069 	 But, in order to keep gcc happy we have to generate some kind of
2070 	 epilogue RTL.  */
2071       emit_jump_insn (gen_naked_return ());
2072       return;
2073     }
2074 
2075   rx_get_stack_layout (& low, & high, & register_mask,
2076 		       & frame_size, & stack_size);
2077 
2078   total_size = frame_size + stack_size;
2079   regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2080 
2081   /* See if we are unable to use the special stack frame deconstruct and
2082      return instructions.  In most cases we can use them, but the exceptions
2083      are:
2084 
2085      - Sibling calling functions deconstruct the frame but do not return to
2086        their caller.  Instead they branch to their sibling and allow their
2087        return instruction to return to this function's parent.
2088 
2089      - Fast and normal interrupt handling functions have to use special
2090        return instructions.
2091 
2092      - Functions where we have pushed a fragmented set of registers into the
2093        call-save area must have the same set of registers popped.  */
2094   if (is_sibcall
2095       || is_fast_interrupt_func (NULL_TREE)
2096       || is_interrupt_func (NULL_TREE)
2097       || register_mask)
2098     {
2099       /* Cannot use the special instructions - deconstruct by hand.  */
2100       if (total_size)
2101 	gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2102 		      GEN_INT (total_size), false);
2103 
2104       if (MUST_SAVE_ACC_REGISTER)
2105 	{
2106 	  unsigned int acc_low, acc_high;
2107 
2108 	  /* Reverse the saving of the accumulator register onto the stack.
2109 	     Note we must adjust the saved "low" accumulator value as it
2110 	     is really the middle 32-bits of the accumulator.  */
2111 	  if (register_mask)
2112 	    {
2113 	      acc_low = acc_high = 0;
2114 
2115 	      for (reg = 1; reg < CC_REGNUM; reg ++)
2116 		if (register_mask & (1 << reg))
2117 		  {
2118 		    if (acc_low == 0)
2119 		      acc_low = reg;
2120 		    else
2121 		      {
2122 			acc_high = reg;
2123 			break;
2124 		      }
2125 		  }
2126 	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2127 	      emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2128 	    }
2129 	  else
2130 	    {
2131 	      acc_low = low;
2132 	      acc_high = low + 1;
2133 	      emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2134 					 gen_rx_popm_vector (acc_low, acc_high)));
2135 	    }
2136 
2137 	  emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2138 				  gen_rtx_REG (SImode, acc_low),
2139 				  GEN_INT (16)));
2140 	  emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2141 	  emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2142 	}
2143 
2144       if (register_mask)
2145 	{
2146 	  for (reg = 0; reg < CC_REGNUM; reg ++)
2147 	    if (register_mask & (1 << reg))
2148 	      {
2149 		low = high = reg;
2150 		while (register_mask & (1 << high))
2151 		  high ++;
2152 		pop_regs (high - 1, low);
2153 		reg = high;
2154 	      }
2155 	}
2156       else if (low)
2157 	pop_regs (high, low);
2158 
2159       if (is_fast_interrupt_func (NULL_TREE))
2160 	{
2161 	  gcc_assert (! is_sibcall);
2162 	  emit_jump_insn (gen_fast_interrupt_return ());
2163 	}
2164       else if (is_interrupt_func (NULL_TREE))
2165 	{
2166 	  gcc_assert (! is_sibcall);
2167 	  emit_jump_insn (gen_exception_return ());
2168 	}
2169       else if (! is_sibcall)
2170 	emit_jump_insn (gen_simple_return ());
2171 
2172       return;
2173     }
2174 
2175   /* If we allocated space on the stack, free it now.  */
2176   if (total_size)
2177     {
2178       unsigned HOST_WIDE_INT rtsd_size;
2179 
2180       /* See if we can use the RTSD instruction.  */
2181       rtsd_size = total_size + regs_size;
2182       if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2183 	{
2184 	  if (low)
2185 	    emit_jump_insn (gen_pop_and_return
2186 			    (GEN_INT (rtsd_size),
2187 			     gen_rx_rtsd_vector (rtsd_size, low, high)));
2188 	  else
2189 	    emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2190 
2191 	  return;
2192 	}
2193 
2194       gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2195 		    GEN_INT (total_size), false);
2196     }
2197 
2198   if (low)
2199     emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2200 					gen_rx_rtsd_vector (regs_size,
2201 							    low, high)));
2202   else
2203     emit_jump_insn (gen_simple_return ());
2204 }
2205 
2206 
2207 /* Compute the offset (in words) between FROM (arg pointer
2208    or frame pointer) and TO (frame pointer or stack pointer).
2209    See ASCII art comment at the start of rx_expand_prologue
2210    for more information.  */
2211 
2212 int
2213 rx_initial_elimination_offset (int from, int to)
2214 {
2215   unsigned int low;
2216   unsigned int high;
2217   unsigned int frame_size;
2218   unsigned int stack_size;
2219   unsigned int mask;
2220 
2221   rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2222 
2223   if (from == ARG_POINTER_REGNUM)
2224     {
2225       /* Extend the computed size of the stack frame to
2226 	 include the registers pushed in the prologue.  */
2227       if (low)
2228 	frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2229       else
2230 	frame_size += bit_count (mask) * UNITS_PER_WORD;
2231 
2232       /* Remember to include the return address.  */
2233       frame_size += 1 * UNITS_PER_WORD;
2234 
2235       if (to == FRAME_POINTER_REGNUM)
2236 	return frame_size;
2237 
2238       gcc_assert (to == STACK_POINTER_REGNUM);
2239       return frame_size + stack_size;
2240     }
2241 
2242   gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2243   return stack_size;
2244 }
2245 
2246 /* Decide if a variable should go into one of the small data sections.  */
2247 
2248 static bool
2249 rx_in_small_data (const_tree decl)
2250 {
2251   int size;
2252   const char * section;
2253 
2254   if (rx_small_data_limit == 0)
2255     return false;
2256 
2257   if (TREE_CODE (decl) != VAR_DECL)
2258     return false;
2259 
2260   /* We do not put read-only variables into a small data area because
2261      they would be placed with the other read-only sections, far away
2262      from the read-write data sections, and we only have one small
2263      data area pointer.
2264      Similarly commons are placed in the .bss section which might be
2265      far away (and out of alignment with respect to) the .data section.  */
2266   if (TREE_READONLY (decl) || DECL_COMMON (decl))
2267     return false;
2268 
2269   section = DECL_SECTION_NAME (decl);
2270   if (section)
2271     return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
2272 
2273   size = int_size_in_bytes (TREE_TYPE (decl));
2274 
2275   return (size > 0) && (size <= rx_small_data_limit);
2276 }
2277 
2278 /* Return a section for X.
2279    The only special thing we do here is to honor small data.  */
2280 
2281 static section *
2282 rx_select_rtx_section (machine_mode mode,
2283 		       rtx x,
2284 		       unsigned HOST_WIDE_INT align)
2285 {
2286   if (rx_small_data_limit > 0
2287       && GET_MODE_SIZE (mode) <= rx_small_data_limit
2288       && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2289     return sdata_section;
2290 
2291   return default_elf_select_rtx_section (mode, x, align);
2292 }
2293 
2294 static section *
2295 rx_select_section (tree decl,
2296 		   int reloc,
2297 		   unsigned HOST_WIDE_INT align)
2298 {
2299   if (rx_small_data_limit > 0)
2300     {
2301       switch (categorize_decl_for_section (decl, reloc))
2302 	{
2303 	case SECCAT_SDATA:	return sdata_section;
2304 	case SECCAT_SBSS:	return sbss_section;
2305 	case SECCAT_SRODATA:
2306 	  /* Fall through.  We do not put small, read only
2307 	     data into the C_2 section because we are not
2308 	     using the C_2 section.  We do not use the C_2
2309 	     section because it is located with the other
2310 	     read-only data sections, far away from the read-write
2311 	     data sections and we only have one small data
2312 	     pointer (r13).  */
2313 	default:
2314 	  break;
2315 	}
2316     }
2317 
2318   /* If we are supporting the Renesas assembler
2319      we cannot use mergeable sections.  */
2320   if (TARGET_AS100_SYNTAX)
2321     switch (categorize_decl_for_section (decl, reloc))
2322       {
2323       case SECCAT_RODATA_MERGE_CONST:
2324       case SECCAT_RODATA_MERGE_STR_INIT:
2325       case SECCAT_RODATA_MERGE_STR:
2326 	return readonly_data_section;
2327 
2328       default:
2329 	break;
2330       }
2331 
2332   return default_elf_select_section (decl, reloc, align);
2333 }
2334 
2335 enum rx_builtin
2336 {
2337   RX_BUILTIN_BRK,
2338   RX_BUILTIN_CLRPSW,
2339   RX_BUILTIN_INT,
2340   RX_BUILTIN_MACHI,
2341   RX_BUILTIN_MACLO,
2342   RX_BUILTIN_MULHI,
2343   RX_BUILTIN_MULLO,
2344   RX_BUILTIN_MVFACHI,
2345   RX_BUILTIN_MVFACMI,
2346   RX_BUILTIN_MVFC,
2347   RX_BUILTIN_MVTACHI,
2348   RX_BUILTIN_MVTACLO,
2349   RX_BUILTIN_MVTC,
2350   RX_BUILTIN_MVTIPL,
2351   RX_BUILTIN_RACW,
2352   RX_BUILTIN_REVW,
2353   RX_BUILTIN_RMPA,
2354   RX_BUILTIN_ROUND,
2355   RX_BUILTIN_SETPSW,
2356   RX_BUILTIN_WAIT,
2357   RX_BUILTIN_max
2358 };
2359 
2360 static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2361 
2362 static void
2363 rx_init_builtins (void)
2364 {
2365 #define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE)		\
2366    rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2367    add_builtin_function ("__builtin_rx_" LC_NAME,			\
2368 			build_function_type_list (RET_TYPE##_type_node, \
2369 						  NULL_TREE),		\
2370 			RX_BUILTIN_##UC_NAME,				\
2371 			BUILT_IN_MD, NULL, NULL_TREE)
2372 
2373 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE)		\
2374    rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2375    add_builtin_function ("__builtin_rx_" LC_NAME,			\
2376 			build_function_type_list (RET_TYPE##_type_node, \
2377 						  ARG_TYPE##_type_node, \
2378 						  NULL_TREE),		\
2379 			RX_BUILTIN_##UC_NAME,				\
2380 			BUILT_IN_MD, NULL, NULL_TREE)
2381 
2382 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2383   rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2384   add_builtin_function ("__builtin_rx_" LC_NAME,			\
2385 			build_function_type_list (RET_TYPE##_type_node, \
2386 						  ARG_TYPE1##_type_node,\
2387 						  ARG_TYPE2##_type_node,\
2388 						  NULL_TREE),		\
2389 			RX_BUILTIN_##UC_NAME,				\
2390 			BUILT_IN_MD, NULL, NULL_TREE)
2391 
2392 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2393   rx_builtins[RX_BUILTIN_##UC_NAME] =					\
2394   add_builtin_function ("__builtin_rx_" LC_NAME,			\
2395 			build_function_type_list (RET_TYPE##_type_node, \
2396 						  ARG_TYPE1##_type_node,\
2397 						  ARG_TYPE2##_type_node,\
2398 						  ARG_TYPE3##_type_node,\
2399 						  NULL_TREE),		\
2400 			RX_BUILTIN_##UC_NAME,				\
2401 			BUILT_IN_MD, NULL, NULL_TREE)
2402 
2403   ADD_RX_BUILTIN0 (BRK,     "brk",     void);
2404   ADD_RX_BUILTIN1 (CLRPSW,  "clrpsw",  void,  integer);
2405   ADD_RX_BUILTIN1 (SETPSW,  "setpsw",  void,  integer);
2406   ADD_RX_BUILTIN1 (INT,     "int",     void,  integer);
2407   ADD_RX_BUILTIN2 (MACHI,   "machi",   void,  intSI, intSI);
2408   ADD_RX_BUILTIN2 (MACLO,   "maclo",   void,  intSI, intSI);
2409   ADD_RX_BUILTIN2 (MULHI,   "mulhi",   void,  intSI, intSI);
2410   ADD_RX_BUILTIN2 (MULLO,   "mullo",   void,  intSI, intSI);
2411   ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2412   ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
2413   ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void,  intSI);
2414   ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void,  intSI);
2415   ADD_RX_BUILTIN0 (RMPA,    "rmpa",    void);
2416   ADD_RX_BUILTIN1 (MVFC,    "mvfc",    intSI, integer);
2417   ADD_RX_BUILTIN2 (MVTC,    "mvtc",    void,  integer, integer);
2418   ADD_RX_BUILTIN1 (MVTIPL,  "mvtipl",  void,  integer);
2419   ADD_RX_BUILTIN1 (RACW,    "racw",    void,  integer);
2420   ADD_RX_BUILTIN1 (ROUND,   "round",   intSI, float);
2421   ADD_RX_BUILTIN1 (REVW,    "revw",    intSI, intSI);
2422   ADD_RX_BUILTIN0 (WAIT,    "wait",    void);
2423 }
2424 
2425 /* Return the RX builtin for CODE.  */
2426 
2427 static tree
2428 rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2429 {
2430   if (code >= RX_BUILTIN_max)
2431     return error_mark_node;
2432 
2433   return rx_builtins[code];
2434 }
2435 
2436 static rtx
2437 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2438 {
2439   if (reg && ! REG_P (arg))
2440     arg = force_reg (SImode, arg);
2441 
2442   emit_insn (gen_func (arg));
2443 
2444   return NULL_RTX;
2445 }
2446 
2447 static rtx
2448 rx_expand_builtin_mvtc (tree exp)
2449 {
2450   rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2451   rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2452 
2453   if (! CONST_INT_P (arg1))
2454     return NULL_RTX;
2455 
2456   if (! REG_P (arg2))
2457     arg2 = force_reg (SImode, arg2);
2458 
2459   emit_insn (gen_mvtc (arg1, arg2));
2460 
2461   return NULL_RTX;
2462 }
2463 
2464 static rtx
2465 rx_expand_builtin_mvfc (tree t_arg, rtx target)
2466 {
2467   rtx arg = expand_normal (t_arg);
2468 
2469   if (! CONST_INT_P (arg))
2470     return NULL_RTX;
2471 
2472   if (target == NULL_RTX)
2473     return NULL_RTX;
2474 
2475   if (! REG_P (target))
2476     target = force_reg (SImode, target);
2477 
2478   emit_insn (gen_mvfc (target, arg));
2479 
2480   return target;
2481 }
2482 
2483 static rtx
2484 rx_expand_builtin_mvtipl (rtx arg)
2485 {
2486   /* The RX610 does not support the MVTIPL instruction.  */
2487   if (rx_cpu_type == RX610)
2488     return NULL_RTX;
2489 
2490   if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2491     return NULL_RTX;
2492 
2493   emit_insn (gen_mvtipl (arg));
2494 
2495   return NULL_RTX;
2496 }
2497 
2498 static rtx
2499 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2500 {
2501   rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2502   rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2503 
2504   if (! REG_P (arg1))
2505     arg1 = force_reg (SImode, arg1);
2506 
2507   if (! REG_P (arg2))
2508     arg2 = force_reg (SImode, arg2);
2509 
2510   emit_insn (gen_func (arg1, arg2));
2511 
2512   return NULL_RTX;
2513 }
2514 
2515 static rtx
2516 rx_expand_int_builtin_1_arg (rtx arg,
2517 			     rtx target,
2518 			     rtx (* gen_func)(rtx, rtx),
2519 			     bool mem_ok)
2520 {
2521   if (! REG_P (arg))
2522     if (!mem_ok || ! MEM_P (arg))
2523       arg = force_reg (SImode, arg);
2524 
2525   if (target == NULL_RTX || ! REG_P (target))
2526     target = gen_reg_rtx (SImode);
2527 
2528   emit_insn (gen_func (target, arg));
2529 
2530   return target;
2531 }
2532 
2533 static rtx
2534 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2535 {
2536   if (target == NULL_RTX || ! REG_P (target))
2537     target = gen_reg_rtx (SImode);
2538 
2539   emit_insn (gen_func (target));
2540 
2541   return target;
2542 }
2543 
2544 static rtx
2545 rx_expand_builtin_round (rtx arg, rtx target)
2546 {
2547   if ((! REG_P (arg) && ! MEM_P (arg))
2548       || GET_MODE (arg) != SFmode)
2549     arg = force_reg (SFmode, arg);
2550 
2551   if (target == NULL_RTX || ! REG_P (target))
2552     target = gen_reg_rtx (SImode);
2553 
2554   emit_insn (gen_lrintsf2 (target, arg));
2555 
2556   return target;
2557 }
2558 
2559 static int
2560 valid_psw_flag (rtx op, const char *which)
2561 {
2562   static int mvtc_inform_done = 0;
2563 
2564   if (GET_CODE (op) == CONST_INT)
2565     switch (INTVAL (op))
2566       {
2567       case 0: case 'c': case 'C':
2568       case 1: case 'z': case 'Z':
2569       case 2: case 's': case 'S':
2570       case 3: case 'o': case 'O':
2571       case 8: case 'i': case 'I':
2572       case 9: case 'u': case 'U':
2573 	return 1;
2574       }
2575 
2576   error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2577   if (!mvtc_inform_done)
2578     error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2579   mvtc_inform_done = 1;
2580 
2581   return 0;
2582 }
2583 
2584 static rtx
2585 rx_expand_builtin (tree exp,
2586 		   rtx target,
2587 		   rtx subtarget ATTRIBUTE_UNUSED,
2588 		   machine_mode mode ATTRIBUTE_UNUSED,
2589 		   int ignore ATTRIBUTE_UNUSED)
2590 {
2591   tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2592   tree arg    = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2593   rtx  op     = arg ? expand_normal (arg) : NULL_RTX;
2594   unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2595 
2596   switch (fcode)
2597     {
2598     case RX_BUILTIN_BRK:     emit_insn (gen_brk ()); return NULL_RTX;
2599     case RX_BUILTIN_CLRPSW:
2600       if (!valid_psw_flag (op, "clrpsw"))
2601 	return NULL_RTX;
2602       return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2603     case RX_BUILTIN_SETPSW:
2604       if (!valid_psw_flag (op, "setpsw"))
2605 	return NULL_RTX;
2606       return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2607     case RX_BUILTIN_INT:     return rx_expand_void_builtin_1_arg
2608 	(op, gen_int, false);
2609     case RX_BUILTIN_MACHI:   return rx_expand_builtin_mac (exp, gen_machi);
2610     case RX_BUILTIN_MACLO:   return rx_expand_builtin_mac (exp, gen_maclo);
2611     case RX_BUILTIN_MULHI:   return rx_expand_builtin_mac (exp, gen_mulhi);
2612     case RX_BUILTIN_MULLO:   return rx_expand_builtin_mac (exp, gen_mullo);
2613     case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2614 	(target, gen_mvfachi);
2615     case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2616 	(target, gen_mvfacmi);
2617     case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2618 	(op, gen_mvtachi, true);
2619     case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2620 	(op, gen_mvtaclo, true);
2621     case RX_BUILTIN_RMPA:
2622       if (rx_allow_string_insns)
2623 	emit_insn (gen_rmpa ());
2624       else
2625 	error ("-mno-allow-string-insns forbids the generation of the RMPA instruction");
2626       return NULL_RTX;
2627     case RX_BUILTIN_MVFC:    return rx_expand_builtin_mvfc (arg, target);
2628     case RX_BUILTIN_MVTC:    return rx_expand_builtin_mvtc (exp);
2629     case RX_BUILTIN_MVTIPL:  return rx_expand_builtin_mvtipl (op);
2630     case RX_BUILTIN_RACW:    return rx_expand_void_builtin_1_arg
2631 	(op, gen_racw, false);
2632     case RX_BUILTIN_ROUND:   return rx_expand_builtin_round (op, target);
2633     case RX_BUILTIN_REVW:    return rx_expand_int_builtin_1_arg
2634 	(op, target, gen_revw, false);
2635     case RX_BUILTIN_WAIT:    emit_insn (gen_wait ()); return NULL_RTX;
2636 
2637     default:
2638       internal_error ("bad builtin code");
2639       break;
2640     }
2641 
2642   return NULL_RTX;
2643 }
2644 
2645 /* Place an element into a constructor or destructor section.
2646    Like default_ctor_section_asm_out_constructor in varasm.c
2647    except that it uses .init_array (or .fini_array) and it
2648    handles constructor priorities.  */
2649 
2650 static void
2651 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2652 {
2653   section * s;
2654 
2655   if (priority != DEFAULT_INIT_PRIORITY)
2656     {
2657       char buf[18];
2658 
2659       sprintf (buf, "%s.%.5u",
2660 	       is_ctor ? ".init_array" : ".fini_array",
2661 	       priority);
2662       s = get_section (buf, SECTION_WRITE, NULL_TREE);
2663     }
2664   else if (is_ctor)
2665     s = ctors_section;
2666   else
2667     s = dtors_section;
2668 
2669   switch_to_section (s);
2670   assemble_align (POINTER_SIZE);
2671   assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2672 }
2673 
2674 static void
2675 rx_elf_asm_constructor (rtx symbol, int priority)
2676 {
2677   rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2678 }
2679 
2680 static void
2681 rx_elf_asm_destructor (rtx symbol, int priority)
2682 {
2683   rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2684 }
2685 
2686 /* Check "fast_interrupt", "interrupt" and "naked" attributes.  */
2687 
2688 static tree
2689 rx_handle_func_attribute (tree * node,
2690 			  tree   name,
2691 			  tree   args ATTRIBUTE_UNUSED,
2692 			  int    flags ATTRIBUTE_UNUSED,
2693 			  bool * no_add_attrs)
2694 {
2695   gcc_assert (DECL_P (* node));
2696 
2697   if (TREE_CODE (* node) != FUNCTION_DECL)
2698     {
2699       warning (OPT_Wattributes, "%qE attribute only applies to functions",
2700 	       name);
2701       * no_add_attrs = true;
2702     }
2703 
2704   /* FIXME: We ought to check for conflicting attributes.  */
2705 
2706   /* FIXME: We ought to check that the interrupt and exception
2707      handler attributes have been applied to void functions.  */
2708   return NULL_TREE;
2709 }
2710 
2711 /* Check "vector" attribute.  */
2712 
2713 static tree
2714 rx_handle_vector_attribute (tree * node,
2715 			    tree   name,
2716 			    tree   args,
2717 			    int    flags ATTRIBUTE_UNUSED,
2718 			    bool * no_add_attrs)
2719 {
2720   gcc_assert (DECL_P (* node));
2721   gcc_assert (args != NULL_TREE);
2722 
2723   if (TREE_CODE (* node) != FUNCTION_DECL)
2724     {
2725       warning (OPT_Wattributes, "%qE attribute only applies to functions",
2726 	       name);
2727       * no_add_attrs = true;
2728     }
2729 
2730   return NULL_TREE;
2731 }
2732 
2733 /* Table of RX specific attributes.  */
2734 const struct attribute_spec rx_attribute_table[] =
2735 {
2736   /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2737      affects_type_identity.  */
2738   { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2739     false },
2740   { "interrupt",      0, -1, true, false, false, rx_handle_func_attribute,
2741     false },
2742   { "naked",          0, 0, true, false, false, rx_handle_func_attribute,
2743     false },
2744   { "vector",         1, -1, true, false, false, rx_handle_vector_attribute,
2745     false },
2746   { NULL,             0, 0, false, false, false, NULL, false }
2747 };
2748 
2749 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE.  */
2750 
2751 static void
2752 rx_override_options_after_change (void)
2753 {
2754   static bool first_time = TRUE;
2755 
2756   if (first_time)
2757     {
2758       /* If this is the first time through and the user has not disabled
2759 	 the use of RX FPU hardware then enable -ffinite-math-only,
2760 	 since the FPU instructions do not support NaNs and infinities.  */
2761       if (TARGET_USE_FPU)
2762 	flag_finite_math_only = 1;
2763 
2764       first_time = FALSE;
2765     }
2766   else
2767     {
2768       /* Alert the user if they are changing the optimization options
2769 	 to use IEEE compliant floating point arithmetic with RX FPU insns.  */
2770       if (TARGET_USE_FPU
2771 	  && !flag_finite_math_only)
2772 	warning (0, "RX FPU instructions do not support NaNs and infinities");
2773     }
2774 }
2775 
2776 static void
2777 rx_option_override (void)
2778 {
2779   unsigned int i;
2780   cl_deferred_option *opt;
2781   vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
2782 
2783   if (v)
2784     FOR_EACH_VEC_ELT (*v, i, opt)
2785       {
2786 	switch (opt->opt_index)
2787 	  {
2788 	  case OPT_mint_register_:
2789 	    switch (opt->value)
2790 	      {
2791 	      case 4:
2792 		fixed_regs[10] = call_used_regs [10] = 1;
2793 		/* Fall through.  */
2794 	      case 3:
2795 		fixed_regs[11] = call_used_regs [11] = 1;
2796 		/* Fall through.  */
2797 	      case 2:
2798 		fixed_regs[12] = call_used_regs [12] = 1;
2799 		/* Fall through.  */
2800 	      case 1:
2801 		fixed_regs[13] = call_used_regs [13] = 1;
2802 		/* Fall through.  */
2803 	      case 0:
2804 		rx_num_interrupt_regs = opt->value;
2805 		break;
2806 	      default:
2807 		rx_num_interrupt_regs = 0;
2808 		/* Error message already given because rx_handle_option
2809 		  returned false.  */
2810 		break;
2811 	      }
2812 	    break;
2813 
2814 	  default:
2815 	    gcc_unreachable ();
2816 	  }
2817       }
2818 
2819   /* This target defaults to strict volatile bitfields.  */
2820   if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2821     flag_strict_volatile_bitfields = 1;
2822 
2823   rx_override_options_after_change ();
2824 
2825   /* These values are bytes, not log.  */
2826   if (align_jumps == 0 && ! optimize_size)
2827     align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2828   if (align_loops == 0 && ! optimize_size)
2829     align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2830   if (align_labels == 0 && ! optimize_size)
2831     align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2832 }
2833 
2834 
2835 static bool
2836 rx_allocate_stack_slots_for_args (void)
2837 {
2838   /* Naked functions should not allocate stack slots for arguments.  */
2839   return ! is_naked_func (NULL_TREE);
2840 }
2841 
2842 static bool
2843 rx_func_attr_inlinable (const_tree decl)
2844 {
2845   return ! is_fast_interrupt_func (decl)
2846     &&   ! is_interrupt_func (decl)
2847     &&   ! is_naked_func (decl);
2848 }
2849 
2850 static bool
2851 rx_warn_func_return (tree decl)
2852 {
2853   /* Naked functions are implemented entirely in assembly, including the
2854      return sequence, so suppress warnings about this.  */
2855   return !is_naked_func (decl);
2856 }
2857 
2858 /* Return nonzero if it is ok to make a tail-call to DECL,
2859    a function_decl or NULL if this is an indirect call, using EXP  */
2860 
2861 static bool
2862 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2863 {
2864   if (TARGET_JSR)
2865     return false;
2866 
2867   /* Do not allow indirect tailcalls.  The
2868      sibcall patterns do not support them.  */
2869   if (decl == NULL)
2870     return false;
2871 
2872   /* Never tailcall from inside interrupt handlers or naked functions.  */
2873   if (is_fast_interrupt_func (NULL_TREE)
2874       || is_interrupt_func (NULL_TREE)
2875       || is_naked_func (NULL_TREE))
2876     return false;
2877 
2878   return true;
2879 }
2880 
2881 static void
2882 rx_file_start (void)
2883 {
2884   if (! TARGET_AS100_SYNTAX)
2885     default_file_start ();
2886 }
2887 
2888 static bool
2889 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2890 {
2891   /* The packed attribute overrides the MS behavior.  */
2892   return ! TYPE_PACKED (record_type);
2893 }
2894 
2895 /* Returns true if X a legitimate constant for an immediate
2896    operand on the RX.  X is already known to satisfy CONSTANT_P.  */
2897 
2898 bool
2899 rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2900 {
2901   switch (GET_CODE (x))
2902     {
2903     case CONST:
2904       x = XEXP (x, 0);
2905 
2906       if (GET_CODE (x) == PLUS)
2907 	{
2908 	  if (! CONST_INT_P (XEXP (x, 1)))
2909 	    return false;
2910 
2911 	  /* GCC would not pass us CONST_INT + CONST_INT so we
2912 	     know that we have {SYMBOL|LABEL} + CONST_INT.  */
2913 	  x = XEXP (x, 0);
2914 	  gcc_assert (! CONST_INT_P (x));
2915 	}
2916 
2917       switch (GET_CODE (x))
2918 	{
2919 	case LABEL_REF:
2920 	case SYMBOL_REF:
2921 	  return true;
2922 
2923 	case UNSPEC:
2924 	  return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2925 
2926 	default:
2927 	  /* FIXME: Can this ever happen ?  */
2928 	  gcc_unreachable ();
2929 	}
2930       break;
2931 
2932     case LABEL_REF:
2933     case SYMBOL_REF:
2934       return true;
2935     case CONST_DOUBLE:
2936       return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2937     case CONST_VECTOR:
2938       return false;
2939     default:
2940       gcc_assert (CONST_INT_P (x));
2941       break;
2942     }
2943 
2944   return ok_for_max_constant (INTVAL (x));
2945 }
2946 
2947 static int
2948 rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
2949 		 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
2950 {
2951   rtx a, b;
2952 
2953   if (GET_CODE (addr) != PLUS)
2954     return COSTS_N_INSNS (1);
2955 
2956   a = XEXP (addr, 0);
2957   b = XEXP (addr, 1);
2958 
2959   if (REG_P (a) && REG_P (b))
2960     /* Try to discourage REG+REG addressing as it keeps two registers live.  */
2961     return COSTS_N_INSNS (4);
2962 
2963   if (speed)
2964     /* [REG+OFF] is just as fast as [REG].  */
2965     return COSTS_N_INSNS (1);
2966 
2967   if (CONST_INT_P (b)
2968       && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2969     /* Try to discourage REG + <large OFF> when optimizing for size.  */
2970     return COSTS_N_INSNS (2);
2971 
2972   return COSTS_N_INSNS (1);
2973 }
2974 
2975 static bool
2976 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2977 {
2978   /* We can always eliminate to the frame pointer.
2979      We can eliminate to the stack pointer unless a frame
2980      pointer is needed.  */
2981 
2982   return to == FRAME_POINTER_REGNUM
2983     || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2984 }
2985 
2986 
2987 static void
2988 rx_trampoline_template (FILE * file)
2989 {
2990   /* Output assembler code for a block containing the constant
2991      part of a trampoline, leaving space for the variable parts.
2992 
2993      On the RX, (where r8 is the static chain regnum) the trampoline
2994      looks like:
2995 
2996 	   mov 		#<static chain value>, r8
2997 	   mov          #<function's address>, r9
2998 	   jmp		r9
2999 
3000      In big-endian-data-mode however instructions are read into the CPU
3001      4 bytes at a time.  These bytes are then swapped around before being
3002      passed to the decoder.  So...we must partition our trampoline into
3003      4 byte packets and swap these packets around so that the instruction
3004      reader will reverse the process.  But, in order to avoid splitting
3005      the 32-bit constants across these packet boundaries, (making inserting
3006      them into the constructed trampoline very difficult) we have to pad the
3007      instruction sequence with NOP insns.  ie:
3008 
3009            nop
3010 	   nop
3011            mov.l	#<...>, r8
3012 	   nop
3013 	   nop
3014            mov.l	#<...>, r9
3015            jmp		r9
3016 	   nop
3017 	   nop             */
3018 
3019   if (! TARGET_BIG_ENDIAN_DATA)
3020     {
3021       asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3022       asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3023       asm_fprintf (file, "\tjmp\tr%d\n",                TRAMPOLINE_TEMP_REGNUM);
3024     }
3025   else
3026     {
3027       char r8 = '0' + STATIC_CHAIN_REGNUM;
3028       char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3029 
3030       if (TARGET_AS100_SYNTAX)
3031         {
3032           asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H,  003H\n", r8);
3033           asm_fprintf (file, "\t.BYTE 0deH,  0adH, 0beH,  0efH\n");
3034           asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H,  003H\n", r9);
3035           asm_fprintf (file, "\t.BYTE 0deH,  0adH, 0beH,  0efH\n");
3036           asm_fprintf (file, "\t.BYTE 003H,  003H, 00%cH, 07fH\n", r9);
3037         }
3038       else
3039         {
3040           asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03,  0x03\n", r8);
3041           asm_fprintf (file, "\t.byte 0xde,  0xad, 0xbe,  0xef\n");
3042           asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03,  0x03\n", r9);
3043           asm_fprintf (file, "\t.byte 0xde,  0xad, 0xbe,  0xef\n");
3044           asm_fprintf (file, "\t.byte 0x03,  0x03, 0x0%c, 0x7f\n", r9);
3045         }
3046     }
3047 }
3048 
3049 static void
3050 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3051 {
3052   rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3053 
3054   emit_block_move (tramp, assemble_trampoline_template (),
3055 		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3056 
3057   if (TARGET_BIG_ENDIAN_DATA)
3058     {
3059       emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3060       emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3061     }
3062   else
3063     {
3064       emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3065       emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3066     }
3067 }
3068 
3069 static int
3070 rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3071 		     reg_class_t regclass ATTRIBUTE_UNUSED,
3072 		     bool in)
3073 {
3074   return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
3075 }
3076 
3077 /* Convert a CC_MODE to the set of flags that it represents.  */
3078 
3079 static unsigned int
3080 flags_from_mode (machine_mode mode)
3081 {
3082   switch (mode)
3083     {
3084     case CC_ZSmode:
3085       return CC_FLAG_S | CC_FLAG_Z;
3086     case CC_ZSOmode:
3087       return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3088     case CC_ZSCmode:
3089       return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3090     case CCmode:
3091       return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3092     case CC_Fmode:
3093       return CC_FLAG_FP;
3094     default:
3095       gcc_unreachable ();
3096     }
3097 }
3098 
3099 /* Convert a set of flags to a CC_MODE that can implement it.  */
3100 
3101 static machine_mode
3102 mode_from_flags (unsigned int f)
3103 {
3104   if (f & CC_FLAG_FP)
3105     return CC_Fmode;
3106   if (f & CC_FLAG_O)
3107     {
3108       if (f & CC_FLAG_C)
3109 	return CCmode;
3110       else
3111 	return CC_ZSOmode;
3112     }
3113   else if (f & CC_FLAG_C)
3114     return CC_ZSCmode;
3115   else
3116     return CC_ZSmode;
3117 }
3118 
3119 /* Convert an RTX_CODE to the set of flags needed to implement it.
3120    This assumes an integer comparison.  */
3121 
3122 static unsigned int
3123 flags_from_code (enum rtx_code code)
3124 {
3125   switch (code)
3126     {
3127     case LT:
3128     case GE:
3129       return CC_FLAG_S;
3130     case GT:
3131     case LE:
3132       return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3133     case GEU:
3134     case LTU:
3135       return CC_FLAG_C;
3136     case GTU:
3137     case LEU:
3138       return CC_FLAG_C | CC_FLAG_Z;
3139     case EQ:
3140     case NE:
3141       return CC_FLAG_Z;
3142     default:
3143       gcc_unreachable ();
3144     }
3145 }
3146 
3147 /* Return a CC_MODE of which both M1 and M2 are subsets.  */
3148 
3149 static machine_mode
3150 rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
3151 {
3152   unsigned f;
3153 
3154   /* Early out for identical modes.  */
3155   if (m1 == m2)
3156     return m1;
3157 
3158   /* There's no valid combination for FP vs non-FP.  */
3159   f = flags_from_mode (m1) | flags_from_mode (m2);
3160   if (f & CC_FLAG_FP)
3161     return VOIDmode;
3162 
3163   /* Otherwise, see what mode can implement all the flags.  */
3164   return mode_from_flags (f);
3165 }
3166 
3167 /* Return the minimal CC mode needed to implement (CMP_CODE X Y).  */
3168 
3169 machine_mode
3170 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
3171 {
3172   if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3173     return CC_Fmode;
3174 
3175   if (y != const0_rtx)
3176     return CCmode;
3177 
3178   return mode_from_flags (flags_from_code (cmp_code));
3179 }
3180 
3181 /* Split the conditional branch.  Emit (COMPARE C1 C2) into CC_REG with
3182    CC_MODE, and use that in branches based on that compare.  */
3183 
3184 void
3185 rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
3186 		  rtx c1, rtx c2, rtx label)
3187 {
3188   rtx flags, x;
3189 
3190   flags = gen_rtx_REG (cc_mode, CC_REG);
3191   x = gen_rtx_COMPARE (cc_mode, c1, c2);
3192   x = gen_rtx_SET (flags, x);
3193   emit_insn (x);
3194 
3195   x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3196   x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
3197   x = gen_rtx_SET (pc_rtx, x);
3198   emit_jump_insn (x);
3199 }
3200 
3201 /* A helper function for matching parallels that set the flags.  */
3202 
3203 bool
3204 rx_match_ccmode (rtx insn, machine_mode cc_mode)
3205 {
3206   rtx op1, flags;
3207   machine_mode flags_mode;
3208 
3209   gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3210 
3211   op1 = XVECEXP (PATTERN (insn), 0, 1);
3212   gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3213 
3214   flags = SET_DEST (op1);
3215   flags_mode = GET_MODE (flags);
3216 
3217   if (GET_MODE (SET_SRC (op1)) != flags_mode)
3218     return false;
3219   if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3220     return false;
3221 
3222   /* Ensure that the mode of FLAGS is compatible with CC_MODE.  */
3223   if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3224     return false;
3225 
3226   return true;
3227 }
3228 
3229 int
3230 rx_align_for_label (rtx lab, int uses_threshold)
3231 {
3232   /* This is a simple heuristic to guess when an alignment would not be useful
3233      because the delay due to the inserted NOPs would be greater than the delay
3234      due to the misaligned branch.  If uses_threshold is zero then the alignment
3235      is always useful.  */
3236   if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3237     return 0;
3238 
3239   if (optimize_size)
3240     return 0;
3241   /* These values are log, not bytes.  */
3242   if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3243     return 2; /* 4 bytes */
3244   return 3;   /* 8 bytes */
3245 }
3246 
3247 static int
3248 rx_max_skip_for_label (rtx_insn *lab)
3249 {
3250   int opsize;
3251   rtx_insn *op;
3252 
3253   if (optimize_size)
3254     return 0;
3255 
3256   if (lab == NULL)
3257     return 0;
3258 
3259   op = lab;
3260   do
3261     {
3262       op = next_nonnote_nondebug_insn (op);
3263     }
3264   while (op && (LABEL_P (op)
3265 		|| (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3266   if (!op)
3267     return 0;
3268 
3269   opsize = get_attr_length (op);
3270   if (opsize >= 0 && opsize < 8)
3271     return opsize - 1;
3272   return 0;
3273 }
3274 
3275 /* Compute the real length of the extending load-and-op instructions.  */
3276 
3277 int
3278 rx_adjust_insn_length (rtx_insn *insn, int current_length)
3279 {
3280   rtx extend, mem, offset;
3281   bool zero;
3282   int factor;
3283 
3284   if (!INSN_P (insn))
3285     return current_length;
3286 
3287   switch (INSN_CODE (insn))
3288     {
3289     default:
3290       return current_length;
3291 
3292     case CODE_FOR_plussi3_zero_extendhi:
3293     case CODE_FOR_andsi3_zero_extendhi:
3294     case CODE_FOR_iorsi3_zero_extendhi:
3295     case CODE_FOR_xorsi3_zero_extendhi:
3296     case CODE_FOR_divsi3_zero_extendhi:
3297     case CODE_FOR_udivsi3_zero_extendhi:
3298     case CODE_FOR_minussi3_zero_extendhi:
3299     case CODE_FOR_smaxsi3_zero_extendhi:
3300     case CODE_FOR_sminsi3_zero_extendhi:
3301     case CODE_FOR_multsi3_zero_extendhi:
3302     case CODE_FOR_comparesi3_zero_extendhi:
3303       zero = true;
3304       factor = 2;
3305       break;
3306 
3307     case CODE_FOR_plussi3_sign_extendhi:
3308     case CODE_FOR_andsi3_sign_extendhi:
3309     case CODE_FOR_iorsi3_sign_extendhi:
3310     case CODE_FOR_xorsi3_sign_extendhi:
3311     case CODE_FOR_divsi3_sign_extendhi:
3312     case CODE_FOR_udivsi3_sign_extendhi:
3313     case CODE_FOR_minussi3_sign_extendhi:
3314     case CODE_FOR_smaxsi3_sign_extendhi:
3315     case CODE_FOR_sminsi3_sign_extendhi:
3316     case CODE_FOR_multsi3_sign_extendhi:
3317     case CODE_FOR_comparesi3_sign_extendhi:
3318       zero = false;
3319       factor = 2;
3320       break;
3321 
3322     case CODE_FOR_plussi3_zero_extendqi:
3323     case CODE_FOR_andsi3_zero_extendqi:
3324     case CODE_FOR_iorsi3_zero_extendqi:
3325     case CODE_FOR_xorsi3_zero_extendqi:
3326     case CODE_FOR_divsi3_zero_extendqi:
3327     case CODE_FOR_udivsi3_zero_extendqi:
3328     case CODE_FOR_minussi3_zero_extendqi:
3329     case CODE_FOR_smaxsi3_zero_extendqi:
3330     case CODE_FOR_sminsi3_zero_extendqi:
3331     case CODE_FOR_multsi3_zero_extendqi:
3332     case CODE_FOR_comparesi3_zero_extendqi:
3333       zero = true;
3334       factor = 1;
3335       break;
3336 
3337     case CODE_FOR_plussi3_sign_extendqi:
3338     case CODE_FOR_andsi3_sign_extendqi:
3339     case CODE_FOR_iorsi3_sign_extendqi:
3340     case CODE_FOR_xorsi3_sign_extendqi:
3341     case CODE_FOR_divsi3_sign_extendqi:
3342     case CODE_FOR_udivsi3_sign_extendqi:
3343     case CODE_FOR_minussi3_sign_extendqi:
3344     case CODE_FOR_smaxsi3_sign_extendqi:
3345     case CODE_FOR_sminsi3_sign_extendqi:
3346     case CODE_FOR_multsi3_sign_extendqi:
3347     case CODE_FOR_comparesi3_sign_extendqi:
3348       zero = false;
3349       factor = 1;
3350       break;
3351     }
3352 
3353   /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))).  */
3354   extend = single_set (insn);
3355   gcc_assert (extend != NULL_RTX);
3356 
3357   extend = SET_SRC (extend);
3358   if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3359       || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3360     extend = XEXP (extend, 0);
3361   else
3362     extend = XEXP (extend, 1);
3363 
3364   gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3365 	      || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3366 
3367   mem = XEXP (extend, 0);
3368   gcc_checking_assert (MEM_P (mem));
3369   if (REG_P (XEXP (mem, 0)))
3370     return (zero && factor == 1) ? 2 : 3;
3371 
3372   /* We are expecting: (MEM (PLUS (REG) (CONST_INT))).  */
3373   gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3374   gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3375 
3376   offset = XEXP (XEXP (mem, 0), 1);
3377   gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3378 
3379   if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3380     return (zero && factor == 1) ? 3 : 4;
3381 
3382   return (zero && factor == 1) ? 4 : 5;
3383 }
3384 
3385 static bool
3386 rx_narrow_volatile_bitfield (void)
3387 {
3388   return true;
3389 }
3390 
3391 static bool
3392 rx_ok_to_inline (tree caller, tree callee)
3393 {
3394   /* Do not inline functions with local variables
3395      into a naked CALLER - naked function have no stack frame and
3396      locals need a frame in order to have somewhere to live.
3397 
3398      Unfortunately we have no way to determine the presence of
3399      local variables in CALLEE, so we have to be cautious and
3400      assume that there might be some there.
3401 
3402      We do allow inlining when CALLEE has the "inline" type
3403      modifier or the "always_inline" or "gnu_inline" attributes.  */
3404   return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3405     || DECL_DECLARED_INLINE_P (callee)
3406     || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3407     || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3408 }
3409 
3410 static bool
3411 rx_enable_lra (void)
3412 {
3413   return TARGET_ENABLE_LRA;
3414 }
3415 
3416 rx_atomic_sequence::rx_atomic_sequence (const_tree fun_decl)
3417 {
3418   if (is_fast_interrupt_func (fun_decl) || is_interrupt_func (fun_decl))
3419     {
3420       /* If we are inside an interrupt handler, assume that interrupts are
3421 	 off -- which is the default hardware behavior.  In this case, there
3422 	 is no need to disable the interrupts.  */
3423       m_prev_psw_reg = NULL;
3424     }
3425   else
3426     {
3427       m_prev_psw_reg = gen_reg_rtx (SImode);
3428       emit_insn (gen_mvfc (m_prev_psw_reg, GEN_INT (CTRLREG_PSW)));
3429       emit_insn (gen_clrpsw (GEN_INT ('I')));
3430     }
3431 }
3432 
3433 rx_atomic_sequence::~rx_atomic_sequence (void)
3434 {
3435   if (m_prev_psw_reg != NULL)
3436     emit_insn (gen_mvtc (GEN_INT (CTRLREG_PSW), m_prev_psw_reg));
3437 }
3438 
3439 
3440 #undef  TARGET_NARROW_VOLATILE_BITFIELD
3441 #define TARGET_NARROW_VOLATILE_BITFIELD		rx_narrow_volatile_bitfield
3442 
3443 #undef  TARGET_CAN_INLINE_P
3444 #define TARGET_CAN_INLINE_P			rx_ok_to_inline
3445 
3446 #undef  TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3447 #define TARGET_ASM_JUMP_ALIGN_MAX_SKIP			rx_max_skip_for_label
3448 #undef  TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3449 #define TARGET_ASM_LOOP_ALIGN_MAX_SKIP			rx_max_skip_for_label
3450 #undef  TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3451 #define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP	rx_max_skip_for_label
3452 #undef  TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3453 #define TARGET_ASM_LABEL_ALIGN_MAX_SKIP			rx_max_skip_for_label
3454 
3455 #undef  TARGET_FUNCTION_VALUE
3456 #define TARGET_FUNCTION_VALUE		rx_function_value
3457 
3458 #undef  TARGET_RETURN_IN_MSB
3459 #define TARGET_RETURN_IN_MSB		rx_return_in_msb
3460 
3461 #undef  TARGET_IN_SMALL_DATA_P
3462 #define TARGET_IN_SMALL_DATA_P		rx_in_small_data
3463 
3464 #undef  TARGET_RETURN_IN_MEMORY
3465 #define TARGET_RETURN_IN_MEMORY		rx_return_in_memory
3466 
3467 #undef  TARGET_HAVE_SRODATA_SECTION
3468 #define TARGET_HAVE_SRODATA_SECTION	true
3469 
3470 #undef	TARGET_ASM_SELECT_RTX_SECTION
3471 #define	TARGET_ASM_SELECT_RTX_SECTION	rx_select_rtx_section
3472 
3473 #undef	TARGET_ASM_SELECT_SECTION
3474 #define	TARGET_ASM_SELECT_SECTION	rx_select_section
3475 
3476 #undef  TARGET_INIT_BUILTINS
3477 #define TARGET_INIT_BUILTINS		rx_init_builtins
3478 
3479 #undef  TARGET_BUILTIN_DECL
3480 #define TARGET_BUILTIN_DECL		rx_builtin_decl
3481 
3482 #undef  TARGET_EXPAND_BUILTIN
3483 #define TARGET_EXPAND_BUILTIN		rx_expand_builtin
3484 
3485 #undef  TARGET_ASM_CONSTRUCTOR
3486 #define TARGET_ASM_CONSTRUCTOR		rx_elf_asm_constructor
3487 
3488 #undef  TARGET_ASM_DESTRUCTOR
3489 #define TARGET_ASM_DESTRUCTOR		rx_elf_asm_destructor
3490 
3491 #undef  TARGET_STRUCT_VALUE_RTX
3492 #define TARGET_STRUCT_VALUE_RTX		rx_struct_value_rtx
3493 
3494 #undef  TARGET_ATTRIBUTE_TABLE
3495 #define TARGET_ATTRIBUTE_TABLE		rx_attribute_table
3496 
3497 #undef  TARGET_ASM_FILE_START
3498 #define TARGET_ASM_FILE_START			rx_file_start
3499 
3500 #undef  TARGET_MS_BITFIELD_LAYOUT_P
3501 #define TARGET_MS_BITFIELD_LAYOUT_P		rx_is_ms_bitfield_layout
3502 
3503 #undef  TARGET_LEGITIMATE_ADDRESS_P
3504 #define TARGET_LEGITIMATE_ADDRESS_P		rx_is_legitimate_address
3505 
3506 #undef  TARGET_MODE_DEPENDENT_ADDRESS_P
3507 #define TARGET_MODE_DEPENDENT_ADDRESS_P		rx_mode_dependent_address_p
3508 
3509 #undef  TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3510 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS	rx_allocate_stack_slots_for_args
3511 
3512 #undef  TARGET_ASM_FUNCTION_PROLOGUE
3513 #define TARGET_ASM_FUNCTION_PROLOGUE 		rx_output_function_prologue
3514 
3515 #undef  TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3516 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P 	rx_func_attr_inlinable
3517 
3518 #undef  TARGET_FUNCTION_OK_FOR_SIBCALL
3519 #define TARGET_FUNCTION_OK_FOR_SIBCALL		rx_function_ok_for_sibcall
3520 
3521 #undef  TARGET_FUNCTION_ARG
3522 #define TARGET_FUNCTION_ARG     		rx_function_arg
3523 
3524 #undef  TARGET_FUNCTION_ARG_ADVANCE
3525 #define TARGET_FUNCTION_ARG_ADVANCE     	rx_function_arg_advance
3526 
3527 #undef	TARGET_FUNCTION_ARG_BOUNDARY
3528 #define	TARGET_FUNCTION_ARG_BOUNDARY		rx_function_arg_boundary
3529 
3530 #undef  TARGET_SET_CURRENT_FUNCTION
3531 #define TARGET_SET_CURRENT_FUNCTION		rx_set_current_function
3532 
3533 #undef  TARGET_ASM_INTEGER
3534 #define TARGET_ASM_INTEGER			rx_assemble_integer
3535 
3536 #undef  TARGET_USE_BLOCKS_FOR_CONSTANT_P
3537 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P	hook_bool_mode_const_rtx_true
3538 
3539 #undef  TARGET_MAX_ANCHOR_OFFSET
3540 #define TARGET_MAX_ANCHOR_OFFSET		32
3541 
3542 #undef  TARGET_ADDRESS_COST
3543 #define TARGET_ADDRESS_COST			rx_address_cost
3544 
3545 #undef  TARGET_CAN_ELIMINATE
3546 #define TARGET_CAN_ELIMINATE			rx_can_eliminate
3547 
3548 #undef  TARGET_CONDITIONAL_REGISTER_USAGE
3549 #define TARGET_CONDITIONAL_REGISTER_USAGE	rx_conditional_register_usage
3550 
3551 #undef  TARGET_ASM_TRAMPOLINE_TEMPLATE
3552 #define TARGET_ASM_TRAMPOLINE_TEMPLATE		rx_trampoline_template
3553 
3554 #undef  TARGET_TRAMPOLINE_INIT
3555 #define TARGET_TRAMPOLINE_INIT			rx_trampoline_init
3556 
3557 #undef  TARGET_PRINT_OPERAND
3558 #define TARGET_PRINT_OPERAND			rx_print_operand
3559 
3560 #undef  TARGET_PRINT_OPERAND_ADDRESS
3561 #define TARGET_PRINT_OPERAND_ADDRESS		rx_print_operand_address
3562 
3563 #undef  TARGET_CC_MODES_COMPATIBLE
3564 #define TARGET_CC_MODES_COMPATIBLE		rx_cc_modes_compatible
3565 
3566 #undef  TARGET_MEMORY_MOVE_COST
3567 #define TARGET_MEMORY_MOVE_COST			rx_memory_move_cost
3568 
3569 #undef  TARGET_OPTION_OVERRIDE
3570 #define TARGET_OPTION_OVERRIDE			rx_option_override
3571 
3572 #undef  TARGET_PROMOTE_FUNCTION_MODE
3573 #define TARGET_PROMOTE_FUNCTION_MODE		rx_promote_function_mode
3574 
3575 #undef  TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3576 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE	rx_override_options_after_change
3577 
3578 #undef  TARGET_FLAGS_REGNUM
3579 #define TARGET_FLAGS_REGNUM			CC_REG
3580 
3581 #undef  TARGET_LEGITIMATE_CONSTANT_P
3582 #define TARGET_LEGITIMATE_CONSTANT_P		rx_is_legitimate_constant
3583 
3584 #undef  TARGET_LEGITIMIZE_ADDRESS
3585 #define TARGET_LEGITIMIZE_ADDRESS		rx_legitimize_address
3586 
3587 #undef  TARGET_WARN_FUNC_RETURN
3588 #define TARGET_WARN_FUNC_RETURN 		rx_warn_func_return
3589 
3590 #undef  TARGET_LRA_P
3591 #define TARGET_LRA_P 				rx_enable_lra
3592 
3593 struct gcc_target targetm = TARGET_INITIALIZER;
3594 
3595 #include "gt-rx.h"
3596