xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/v850/v850.c (revision f0fde9902fd4d72ded2807793acc7bfaa1ebf243)
1 /* Subroutines for insn-output.c for NEC V850 series
2    Copyright (C) 1996-2019 Free Software Foundation, Inc.
3    Contributed by Jeff Law (law@cygnus.com).
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it
8    under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3, or (at your option)
10    any later version.
11 
12    GCC is distributed in the hope that it will be useful, but WITHOUT
13    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14    FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15    for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #define IN_TARGET_CODE 1
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "df.h"
31 #include "memmodel.h"
32 #include "tm_p.h"
33 #include "stringpool.h"
34 #include "attribs.h"
35 #include "insn-config.h"
36 #include "regs.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "diagnostic-core.h"
40 #include "stor-layout.h"
41 #include "varasm.h"
42 #include "calls.h"
43 #include "conditions.h"
44 #include "output.h"
45 #include "insn-attr.h"
46 #include "expr.h"
47 #include "cfgrtl.h"
48 #include "builtins.h"
49 
50 /* This file should be included last.  */
51 #include "target-def.h"
52 
53 #ifndef streq
54 #define streq(a,b) (strcmp (a, b) == 0)
55 #endif
56 
57 static void v850_print_operand_address (FILE *, machine_mode, rtx);
58 
59 /* Names of the various data areas used on the v850.  */
60 const char * GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
61 const char * GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
62 
63 /* Track the current data area set by the data area pragma (which
64    can be nested).  Tested by check_default_data_area.  */
65 data_area_stack_element * data_area_stack = NULL;
66 
67 /* True if we don't need to check any more if the current
68    function is an interrupt handler.  */
69 static int v850_interrupt_cache_p = FALSE;
70 
71 /* Whether current function is an interrupt handler.  */
72 static int v850_interrupt_p = FALSE;
73 
74 static GTY(()) section * rosdata_section;
75 static GTY(()) section * rozdata_section;
76 static GTY(()) section * tdata_section;
77 static GTY(()) section * zdata_section;
78 static GTY(()) section * zbss_section;
79 
80 /* We use this to wrap all emitted insns in the prologue.  */
81 static rtx
82 F (rtx x)
83 {
84   if (GET_CODE (x) != CLOBBER)
85     RTX_FRAME_RELATED_P (x) = 1;
86   return x;
87 }
88 
89 /* Mark all the subexpressions of the PARALLEL rtx PAR as
90    frame-related.  Return PAR.
91 
92    dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
93    PARALLEL rtx other than the first if they do not have the
94    FRAME_RELATED flag set on them.  */
95 
96 static rtx
97 v850_all_frame_related (rtx par)
98 {
99   int len = XVECLEN (par, 0);
100   int i;
101 
102   gcc_assert (GET_CODE (par) == PARALLEL);
103   for (i = 0; i < len; i++)
104     F (XVECEXP (par, 0, i));
105 
106   return par;
107 }
108 
109 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
110    Specify whether to pass the argument by reference.  */
111 
112 static bool
113 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
114 			machine_mode mode, const_tree type,
115 			bool named ATTRIBUTE_UNUSED)
116 {
117   unsigned HOST_WIDE_INT size;
118 
119   if (!TARGET_GCC_ABI)
120     return 0;
121 
122   if (type)
123     size = int_size_in_bytes (type);
124   else
125     size = GET_MODE_SIZE (mode);
126 
127   return size > 8;
128 }
129 
130 /* Return an RTX to represent where an argument with mode MODE
131    and type TYPE will be passed to a function.  If the result
132    is NULL_RTX, the argument will be pushed.  */
133 
134 static rtx
135 v850_function_arg (cumulative_args_t cum_v, machine_mode mode,
136 		   const_tree type, bool named)
137 {
138   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
139   rtx result = NULL_RTX;
140   int size, align;
141 
142   if (!named)
143     return NULL_RTX;
144 
145   if (mode == BLKmode)
146     size = int_size_in_bytes (type);
147   else
148     size = GET_MODE_SIZE (mode);
149 
150   size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
151 
152   if (size < 1)
153     {
154       /* Once we have stopped using argument registers, do not start up again.  */
155       cum->nbytes = 4 * UNITS_PER_WORD;
156       return NULL_RTX;
157     }
158 
159   if (!TARGET_GCC_ABI)
160     align = UNITS_PER_WORD;
161   else if (size <= UNITS_PER_WORD && type)
162     align = TYPE_ALIGN (type) / BITS_PER_UNIT;
163   else
164     align = size;
165 
166   cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
167 
168   if (cum->nbytes > 4 * UNITS_PER_WORD)
169     return NULL_RTX;
170 
171   if (type == NULL_TREE
172       && cum->nbytes + size > 4 * UNITS_PER_WORD)
173     return NULL_RTX;
174 
175   switch (cum->nbytes / UNITS_PER_WORD)
176     {
177     case 0:
178       result = gen_rtx_REG (mode, 6);
179       break;
180     case 1:
181       result = gen_rtx_REG (mode, 7);
182       break;
183     case 2:
184       result = gen_rtx_REG (mode, 8);
185       break;
186     case 3:
187       result = gen_rtx_REG (mode, 9);
188       break;
189     default:
190       result = NULL_RTX;
191     }
192 
193   return result;
194 }
195 
196 /* Return the number of bytes which must be put into registers
197    for values which are part in registers and part in memory.  */
198 static int
199 v850_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
200                         tree type, bool named)
201 {
202   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
203   int size, align;
204 
205   if (!named)
206     return 0;
207 
208   if (mode == BLKmode)
209     size = int_size_in_bytes (type);
210   else
211     size = GET_MODE_SIZE (mode);
212 
213   if (size < 1)
214     size = 1;
215 
216   if (!TARGET_GCC_ABI)
217     align = UNITS_PER_WORD;
218   else if (type)
219     align = TYPE_ALIGN (type) / BITS_PER_UNIT;
220   else
221     align = size;
222 
223   cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
224 
225   if (cum->nbytes > 4 * UNITS_PER_WORD)
226     return 0;
227 
228   if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
229     return 0;
230 
231   if (type == NULL_TREE
232       && cum->nbytes + size > 4 * UNITS_PER_WORD)
233     return 0;
234 
235   return 4 * UNITS_PER_WORD - cum->nbytes;
236 }
237 
238 /* Update the data in CUM to advance over an argument
239    of mode MODE and data type TYPE.
240    (TYPE is null for libcalls where that information may not be available.)  */
241 
242 static void
243 v850_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
244 			   const_tree type, bool named ATTRIBUTE_UNUSED)
245 {
246   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
247 
248   if (!TARGET_GCC_ABI)
249     cum->nbytes += (((mode != BLKmode
250 		      ? GET_MODE_SIZE (mode)
251 		      : int_size_in_bytes (type)) + UNITS_PER_WORD - 1)
252 		    & -UNITS_PER_WORD);
253   else
254     cum->nbytes += (((type && int_size_in_bytes (type) > 8
255 		      ? GET_MODE_SIZE (Pmode)
256 		      : (mode != BLKmode
257 			 ? GET_MODE_SIZE (mode)
258 			 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
259 		    & -UNITS_PER_WORD);
260 }
261 
262 /* Return the high and low words of a CONST_DOUBLE */
263 
264 static void
265 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
266 {
267   if (GET_CODE (x) == CONST_DOUBLE)
268     {
269       long t[2];
270 
271       switch (GET_MODE (x))
272 	{
273 	case E_DFmode:
274 	  REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (x), t);
275 	  *p_high = t[1];	/* since v850 is little endian */
276 	  *p_low = t[0];	/* high is second word */
277 	  return;
278 
279 	case E_SFmode:
280 	  REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), *p_high);
281 	  *p_low = 0;
282 	  return;
283 
284 	case E_VOIDmode:
285 	case E_DImode:
286 	  *p_high = CONST_DOUBLE_HIGH (x);
287 	  *p_low  = CONST_DOUBLE_LOW (x);
288 	  return;
289 
290 	default:
291 	  break;
292 	}
293     }
294 
295   fatal_insn ("const_double_split got a bad insn:", x);
296 }
297 
298 
299 /* Return the cost of the rtx R with code CODE.  */
300 
301 static int
302 const_costs_int (HOST_WIDE_INT value, int zero_cost)
303 {
304   if (CONST_OK_FOR_I (value))
305       return zero_cost;
306   else if (CONST_OK_FOR_J (value))
307     return 1;
308   else if (CONST_OK_FOR_K (value))
309     return 2;
310   else
311     return 4;
312 }
313 
314 static int
315 const_costs (rtx r, enum rtx_code c)
316 {
317   HOST_WIDE_INT high, low;
318 
319   switch (c)
320     {
321     case CONST_INT:
322       return const_costs_int (INTVAL (r), 0);
323 
324     case CONST_DOUBLE:
325       const_double_split (r, &high, &low);
326       if (GET_MODE (r) == SFmode)
327 	return const_costs_int (high, 1);
328       else
329 	return const_costs_int (high, 1) + const_costs_int (low, 1);
330 
331     case SYMBOL_REF:
332     case LABEL_REF:
333     case CONST:
334       return 2;
335 
336     case HIGH:
337       return 1;
338 
339     default:
340       return 4;
341     }
342 }
343 
344 static bool
345 v850_rtx_costs (rtx x, machine_mode mode, int outer_code,
346 		int opno ATTRIBUTE_UNUSED, int *total, bool speed)
347 {
348   enum rtx_code code = GET_CODE (x);
349 
350   switch (code)
351     {
352     case CONST_INT:
353     case CONST_DOUBLE:
354     case CONST:
355     case SYMBOL_REF:
356     case LABEL_REF:
357       *total = COSTS_N_INSNS (const_costs (x, code));
358       return true;
359 
360     case MOD:
361     case DIV:
362     case UMOD:
363     case UDIV:
364       if (TARGET_V850E && !speed)
365         *total = 6;
366       else
367 	*total = 60;
368       return true;
369 
370     case MULT:
371       if (TARGET_V850E
372 	  && (mode == SImode || mode == HImode || mode == QImode))
373         {
374 	  if (GET_CODE (XEXP (x, 1)) == REG)
375 	    *total = 4;
376 	  else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
377 	    {
378 	      if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
379 	        *total = 6;
380 	      else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
381 	        *total = 10;
382 	    }
383         }
384       else
385 	*total = 20;
386       return true;
387 
388     case ZERO_EXTRACT:
389       if (outer_code == COMPARE)
390 	*total = 0;
391       return false;
392 
393     default:
394       return false;
395     }
396 }
397 
398 /* Print operand X using operand code CODE to assembly language output file
399    FILE.  */
400 
401 static void
402 v850_print_operand (FILE * file, rtx x, int code)
403 {
404   HOST_WIDE_INT high, low;
405 
406   switch (code)
407     {
408     case 'c':
409       /* We use 'c' operands with symbols for .vtinherit.  */
410       if (GET_CODE (x) == SYMBOL_REF)
411         {
412           output_addr_const(file, x);
413           break;
414         }
415       /* Fall through.  */
416     case 'b':
417     case 'B':
418     case 'C':
419     case 'd':
420     case 'D':
421       switch ((code == 'B' || code == 'C' || code == 'D')
422 	      ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
423 	{
424 	  case NE:
425 	    if (code == 'c' || code == 'C')
426 	      fprintf (file, "nz");
427 	    else
428 	      fprintf (file, "ne");
429 	    break;
430 	  case EQ:
431 	    if (code == 'c' || code == 'C')
432 	      fprintf (file, "z");
433 	    else
434 	      fprintf (file, "e");
435 	    break;
436 	  case GE:
437 	    if (code == 'D' || code == 'd')
438 	      fprintf (file, "p");
439 	    else
440 	      fprintf (file, "ge");
441 	    break;
442 	  case GT:
443 	    fprintf (file, "gt");
444 	    break;
445 	  case LE:
446 	    fprintf (file, "le");
447 	    break;
448 	  case LT:
449 	    if (code == 'D' || code == 'd')
450 	      fprintf (file, "n");
451 	    else
452 	      fprintf (file, "lt");
453 	    break;
454 	  case GEU:
455 	    fprintf (file, "nl");
456 	    break;
457 	  case GTU:
458 	    fprintf (file, "h");
459 	    break;
460 	  case LEU:
461 	    fprintf (file, "nh");
462 	    break;
463 	  case LTU:
464 	    fprintf (file, "l");
465 	    break;
466 	  default:
467 	    gcc_unreachable ();
468 	}
469       break;
470     case 'F':			/* High word of CONST_DOUBLE.  */
471       switch (GET_CODE (x))
472 	{
473 	case CONST_INT:
474 	  fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
475 	  break;
476 
477 	case CONST_DOUBLE:
478 	  const_double_split (x, &high, &low);
479 	  fprintf (file, "%ld", (long) high);
480 	  break;
481 
482 	default:
483 	  gcc_unreachable ();
484 	}
485       break;
486     case 'G':			/* Low word of CONST_DOUBLE.  */
487       switch (GET_CODE (x))
488 	{
489 	case CONST_INT:
490 	  fprintf (file, "%ld", (long) INTVAL (x));
491 	  break;
492 
493 	case CONST_DOUBLE:
494 	  const_double_split (x, &high, &low);
495 	  fprintf (file, "%ld", (long) low);
496 	  break;
497 
498 	default:
499 	  gcc_unreachable ();
500 	}
501       break;
502     case 'L':
503       fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
504       break;
505     case 'M':
506       fprintf (file, "%d", exact_log2 (INTVAL (x)));
507       break;
508     case 'O':
509       gcc_assert (special_symbolref_operand (x, VOIDmode));
510 
511       if (GET_CODE (x) == CONST)
512 	x = XEXP (XEXP (x, 0), 0);
513       else
514 	gcc_assert (GET_CODE (x) == SYMBOL_REF);
515 
516       if (SYMBOL_REF_ZDA_P (x))
517 	fprintf (file, "zdaoff");
518       else if (SYMBOL_REF_SDA_P (x))
519 	fprintf (file, "sdaoff");
520       else if (SYMBOL_REF_TDA_P (x))
521 	fprintf (file, "tdaoff");
522       else
523 	gcc_unreachable ();
524       break;
525     case 'P':
526       gcc_assert (special_symbolref_operand (x, VOIDmode));
527       output_addr_const (file, x);
528       break;
529     case 'Q':
530       gcc_assert (special_symbolref_operand (x, VOIDmode));
531 
532       if (GET_CODE (x) == CONST)
533 	x = XEXP (XEXP (x, 0), 0);
534       else
535 	gcc_assert (GET_CODE (x) == SYMBOL_REF);
536 
537       if (SYMBOL_REF_ZDA_P (x))
538 	fprintf (file, "r0");
539       else if (SYMBOL_REF_SDA_P (x))
540 	fprintf (file, "gp");
541       else if (SYMBOL_REF_TDA_P (x))
542 	fprintf (file, "ep");
543       else
544 	gcc_unreachable ();
545       break;
546     case 'R':		/* 2nd word of a double.  */
547       switch (GET_CODE (x))
548 	{
549 	case REG:
550 	  fprintf (file, reg_names[REGNO (x) + 1]);
551 	  break;
552 	case MEM:
553 	  {
554 	    machine_mode mode = GET_MODE (x);
555 	    x = XEXP (adjust_address (x, SImode, 4), 0);
556 	    v850_print_operand_address (file, mode, x);
557 	    if (GET_CODE (x) == CONST_INT)
558 	      fprintf (file, "[r0]");
559 	  }
560 	  break;
561 
562 	case CONST_INT:
563 	  {
564 	    unsigned HOST_WIDE_INT v = INTVAL (x);
565 
566 	    /* Trickery to avoid problems with shifting
567 	       32-bits at a time on a 32-bit host.  */
568 	    v = v >> 16;
569 	    v = v >> 16;
570 	    fprintf (file, HOST_WIDE_INT_PRINT_HEX, v);
571 	    break;
572 	  }
573 
574 	case CONST_DOUBLE:
575 	  fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_HIGH (x));
576 	  break;
577 
578 	default:
579 	  debug_rtx (x);
580 	  gcc_unreachable ();
581 	}
582       break;
583     case 'S':
584       {
585         /* If it's a reference to a TDA variable, use sst/sld vs. st/ld.  */
586         if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
587           fputs ("s", file);
588 
589         break;
590       }
591     case 'T':
592       {
593 	/* Like an 'S' operand above, but for unsigned loads only.  */
594         if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
595           fputs ("s", file);
596 
597         break;
598       }
599     case 'W':			/* Print the instruction suffix.  */
600       switch (GET_MODE (x))
601 	{
602 	default:
603 	  gcc_unreachable ();
604 
605 	case E_QImode: fputs (".b", file); break;
606 	case E_HImode: fputs (".h", file); break;
607 	case E_SImode: fputs (".w", file); break;
608 	case E_SFmode: fputs (".w", file); break;
609 	}
610       break;
611     case '.':			/* Register r0.  */
612       fputs (reg_names[0], file);
613       break;
614     case 'z':			/* Reg or zero.  */
615       if (REG_P (x))
616 	fputs (reg_names[REGNO (x)], file);
617       else if ((GET_MODE(x) == SImode
618 		|| GET_MODE(x) == DFmode
619 		|| GET_MODE(x) == SFmode)
620 		&& x == CONST0_RTX(GET_MODE(x)))
621       fputs (reg_names[0], file);
622       else
623 	{
624 	  gcc_assert (x == const0_rtx);
625 	  fputs (reg_names[0], file);
626 	}
627       break;
628     default:
629       switch (GET_CODE (x))
630 	{
631 	case MEM:
632 	  if (GET_CODE (XEXP (x, 0)) == CONST_INT)
633 	    output_address (GET_MODE (x),
634 			    gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
635 					  XEXP (x, 0)));
636 	  else
637 	    output_address (GET_MODE (x), XEXP (x, 0));
638 	  break;
639 
640 	case REG:
641 	  fputs (reg_names[REGNO (x)], file);
642 	  break;
643 	case SUBREG:
644 	  fputs (reg_names[subreg_regno (x)], file);
645 	  break;
646 	case CONST_DOUBLE:
647 	  fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
648 	  break;
649 
650 	case CONST_INT:
651 	case SYMBOL_REF:
652 	case CONST:
653 	case LABEL_REF:
654 	case CODE_LABEL:
655 	  v850_print_operand_address (file, VOIDmode, x);
656 	  break;
657 	default:
658 	  gcc_unreachable ();
659 	}
660       break;
661 
662     }
663 }
664 
665 
666 /* Output assembly language output for the address ADDR to FILE.  */
667 
668 static void
669 v850_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
670 {
671   switch (GET_CODE (addr))
672     {
673     case REG:
674       fprintf (file, "0[");
675       v850_print_operand (file, addr, 0);
676       fprintf (file, "]");
677       break;
678     case LO_SUM:
679       if (GET_CODE (XEXP (addr, 0)) == REG)
680 	{
681 	  /* reg,foo */
682 	  fprintf (file, "lo(");
683 	  v850_print_operand (file, XEXP (addr, 1), 0);
684 	  fprintf (file, ")[");
685 	  v850_print_operand (file, XEXP (addr, 0), 0);
686 	  fprintf (file, "]");
687 	}
688       break;
689     case PLUS:
690       if (GET_CODE (XEXP (addr, 0)) == REG
691 	  || GET_CODE (XEXP (addr, 0)) == SUBREG)
692 	{
693 	  /* reg,foo */
694 	  v850_print_operand (file, XEXP (addr, 1), 0);
695 	  fprintf (file, "[");
696 	  v850_print_operand (file, XEXP (addr, 0), 0);
697 	  fprintf (file, "]");
698 	}
699       else
700 	{
701 	  v850_print_operand (file, XEXP (addr, 0), 0);
702 	  fprintf (file, "+");
703 	  v850_print_operand (file, XEXP (addr, 1), 0);
704 	}
705       break;
706     case SYMBOL_REF:
707       {
708         const char *off_name = NULL;
709         const char *reg_name = NULL;
710 
711 	if (SYMBOL_REF_ZDA_P (addr))
712           {
713             off_name = "zdaoff";
714             reg_name = "r0";
715           }
716         else if (SYMBOL_REF_SDA_P (addr))
717           {
718             off_name = "sdaoff";
719             reg_name = "gp";
720           }
721         else if (SYMBOL_REF_TDA_P (addr))
722           {
723             off_name = "tdaoff";
724             reg_name = "ep";
725           }
726 
727 	if (off_name)
728           fprintf (file, "%s(", off_name);
729         output_addr_const (file, addr);
730 	if (reg_name)
731           fprintf (file, ")[%s]", reg_name);
732       }
733       break;
734     case CONST:
735       if (special_symbolref_operand (addr, VOIDmode))
736         {
737 	  rtx x = XEXP (XEXP (addr, 0), 0);
738           const char *off_name;
739           const char *reg_name;
740 
741           if (SYMBOL_REF_ZDA_P (x))
742             {
743               off_name = "zdaoff";
744               reg_name = "r0";
745             }
746           else if (SYMBOL_REF_SDA_P (x))
747             {
748               off_name = "sdaoff";
749               reg_name = "gp";
750             }
751           else if (SYMBOL_REF_TDA_P (x))
752             {
753               off_name = "tdaoff";
754               reg_name = "ep";
755             }
756           else
757             gcc_unreachable ();
758 
759           fprintf (file, "%s(", off_name);
760           output_addr_const (file, addr);
761           fprintf (file, ")[%s]", reg_name);
762         }
763       else
764         output_addr_const (file, addr);
765       break;
766     default:
767       output_addr_const (file, addr);
768       break;
769     }
770 }
771 
772 static bool
773 v850_print_operand_punct_valid_p (unsigned char code)
774 {
775   return code == '.';
776 }
777 
778 /* When assemble_integer is used to emit the offsets for a switch
779    table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
780    output_addr_const will normally barf at this, but it is OK to omit
781    the truncate and just emit the difference of the two labels.  The
782    .hword directive will automatically handle the truncation for us.
783 
784    Returns true if rtx was handled, false otherwise.  */
785 
786 static bool
787 v850_output_addr_const_extra (FILE * file, rtx x)
788 {
789   if (GET_CODE (x) != TRUNCATE)
790     return false;
791 
792   x = XEXP (x, 0);
793 
794   /* We must also handle the case where the switch table was passed a
795      constant value and so has been collapsed.  In this case the first
796      label will have been deleted.  In such a case it is OK to emit
797      nothing, since the table will not be used.
798      (cf gcc.c-torture/compile/990801-1.c).  */
799   if (GET_CODE (x) == MINUS
800       && GET_CODE (XEXP (x, 0)) == LABEL_REF)
801     {
802       rtx_code_label *label
803 	= dyn_cast<rtx_code_label *> (XEXP (XEXP (x, 0), 0));
804       if (label && label->deleted ())
805 	return true;
806     }
807 
808   output_addr_const (file, x);
809   return true;
810 }
811 
812 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
813    point value.  */
814 
815 const char *
816 output_move_single (rtx * operands)
817 {
818   rtx dst = operands[0];
819   rtx src = operands[1];
820 
821   if (REG_P (dst))
822     {
823       if (REG_P (src))
824 	return "mov %1,%0";
825 
826       else if (GET_CODE (src) == CONST_INT)
827 	{
828 	  HOST_WIDE_INT value = INTVAL (src);
829 
830 	  if (CONST_OK_FOR_J (value))		/* Signed 5-bit immediate.  */
831 	    return "mov %1,%0";
832 
833 	  else if (CONST_OK_FOR_K (value))	/* Signed 16-bit immediate.  */
834 	    return "movea %1,%.,%0";
835 
836 	  else if (CONST_OK_FOR_L (value))	/* Upper 16 bits were set.  */
837 	    return "movhi hi0(%1),%.,%0";
838 
839 	  /* A random constant.  */
840 	  else if (TARGET_V850E_UP)
841 	      return "mov %1,%0";
842 	  else
843 	    return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
844 	}
845 
846       else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
847 	{
848 	  HOST_WIDE_INT high, low;
849 
850 	  const_double_split (src, &high, &low);
851 
852 	  if (CONST_OK_FOR_J (high))		/* Signed 5-bit immediate.  */
853 	    return "mov %F1,%0";
854 
855 	  else if (CONST_OK_FOR_K (high))	/* Signed 16-bit immediate.  */
856 	    return "movea %F1,%.,%0";
857 
858 	  else if (CONST_OK_FOR_L (high))	/* Upper 16 bits were set.  */
859 	    return "movhi hi0(%F1),%.,%0";
860 
861 	  /* A random constant.  */
862 	else if (TARGET_V850E_UP)
863 	      return "mov %F1,%0";
864 
865 	  else
866 	    return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
867 	}
868 
869       else if (GET_CODE (src) == MEM)
870 	return "%S1ld%W1 %1,%0";
871 
872       else if (special_symbolref_operand (src, VOIDmode))
873 	return "movea %O1(%P1),%Q1,%0";
874 
875       else if (GET_CODE (src) == LABEL_REF
876 	       || GET_CODE (src) == SYMBOL_REF
877 	       || GET_CODE (src) == CONST)
878 	{
879 	  if (TARGET_V850E_UP)
880 	    return "mov hilo(%1),%0";
881 	  else
882 	    return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
883 	}
884 
885       else if (GET_CODE (src) == HIGH)
886 	return "movhi hi(%1),%.,%0";
887 
888       else if (GET_CODE (src) == LO_SUM)
889 	{
890 	  operands[2] = XEXP (src, 0);
891 	  operands[3] = XEXP (src, 1);
892 	  return "movea lo(%3),%2,%0";
893 	}
894     }
895 
896   else if (GET_CODE (dst) == MEM)
897     {
898       if (REG_P (src))
899 	return "%S0st%W0 %1,%0";
900 
901       else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
902 	return "%S0st%W0 %.,%0";
903 
904       else if (GET_CODE (src) == CONST_DOUBLE
905 	       && CONST0_RTX (GET_MODE (dst)) == src)
906 	return "%S0st%W0 %.,%0";
907     }
908 
909   fatal_insn ("output_move_single:", gen_rtx_SET (dst, src));
910   return "";
911 }
912 
913 machine_mode
914 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1)
915 {
916   if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
917     {
918       switch (cond)
919 	{
920 	case LE:
921 	  return CC_FPU_LEmode;
922 	case GE:
923 	  return CC_FPU_GEmode;
924 	case LT:
925 	  return CC_FPU_LTmode;
926 	case GT:
927 	  return CC_FPU_GTmode;
928 	case EQ:
929 	  return CC_FPU_EQmode;
930 	case NE:
931 	  return CC_FPU_NEmode;
932 	default:
933 	  gcc_unreachable ();
934 	}
935     }
936 
937   if (op1 == const0_rtx
938       && (cond == EQ || cond == NE || cond == LT || cond == GE)
939       && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
940 	  || GET_CODE (op0) == NEG || GET_CODE (op0) == AND
941 	  || GET_CODE (op0) == IOR || GET_CODE (op0) == XOR
942 	  || GET_CODE (op0) == NOT || GET_CODE (op0) == ASHIFT))
943     return CCNZmode;
944 
945   return CCmode;
946 }
947 
948 machine_mode
949 v850_gen_float_compare (enum rtx_code cond, machine_mode mode, rtx op0, rtx op1)
950 {
951   if (GET_MODE (op0) == DFmode)
952     {
953       switch (cond)
954 	{
955 	case LE:
956 	  emit_insn (gen_cmpdf_le_insn (op0, op1));
957 	  break;
958 	case GE:
959 	  emit_insn (gen_cmpdf_ge_insn (op0, op1));
960 	  break;
961 	case LT:
962 	  emit_insn (gen_cmpdf_lt_insn (op0, op1));
963 	  break;
964 	case GT:
965 	  emit_insn (gen_cmpdf_gt_insn (op0, op1));
966 	  break;
967 	case NE:
968 	  /* Note: There is no NE comparison operator. So we
969 	     perform an EQ comparison and invert the branch.
970 	     See v850_float_nz_comparison for how this is done.  */
971 	case EQ:
972 	  emit_insn (gen_cmpdf_eq_insn (op0, op1));
973 	  break;
974 	default:
975 	  gcc_unreachable ();
976 	}
977     }
978   else if (mode == SFmode)
979     {
980       switch (cond)
981 	{
982 	case LE:
983 	  emit_insn (gen_cmpsf_le_insn(op0, op1));
984 	  break;
985 	case GE:
986 	  emit_insn (gen_cmpsf_ge_insn(op0, op1));
987 	  break;
988 	case LT:
989 	  emit_insn (gen_cmpsf_lt_insn(op0, op1));
990 	  break;
991 	case GT:
992 	  emit_insn (gen_cmpsf_gt_insn(op0, op1));
993 	  break;
994 	case NE:
995 	  /* Note: There is no NE comparison operator. So we
996 	     perform an EQ comparison and invert the branch.
997 	     See v850_float_nz_comparison for how this is done.  */
998 	case EQ:
999 	  emit_insn (gen_cmpsf_eq_insn(op0, op1));
1000 	  break;
1001 	default:
1002 	  gcc_unreachable ();
1003 	}
1004     }
1005   else
1006     gcc_unreachable ();
1007 
1008   return v850_select_cc_mode (cond, op0, op1);
1009 }
1010 
1011 /* Return maximum offset supported for a short EP memory reference of mode
1012    MODE and signedness UNSIGNEDP.  */
1013 
1014 static int
1015 ep_memory_offset (machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1016 {
1017   int max_offset = 0;
1018 
1019   switch (mode)
1020     {
1021     case E_QImode:
1022       if (TARGET_SMALL_SLD)
1023 	max_offset = (1 << 4);
1024       else if ((TARGET_V850E_UP)
1025 		&& unsignedp)
1026 	max_offset = (1 << 4);
1027       else
1028 	max_offset = (1 << 7);
1029       break;
1030 
1031     case E_HImode:
1032       if (TARGET_SMALL_SLD)
1033 	max_offset = (1 << 5);
1034       else if ((TARGET_V850E_UP)
1035 		&& unsignedp)
1036 	max_offset = (1 << 5);
1037       else
1038 	max_offset = (1 << 8);
1039       break;
1040 
1041     case E_SImode:
1042     case E_SFmode:
1043       max_offset = (1 << 8);
1044       break;
1045 
1046     default:
1047       break;
1048     }
1049 
1050   return max_offset;
1051 }
1052 
1053 /* Return true if OP is a valid short EP memory reference */
1054 
1055 int
1056 ep_memory_operand (rtx op, machine_mode mode, int unsigned_load)
1057 {
1058   rtx addr, op0, op1;
1059   int max_offset;
1060   int mask;
1061 
1062   /* If we are not using the EP register on a per-function basis
1063      then do not allow this optimization at all.  This is to
1064      prevent the use of the SLD/SST instructions which cannot be
1065      guaranteed to work properly due to a hardware bug.  */
1066   if (!TARGET_EP)
1067     return FALSE;
1068 
1069   if (GET_CODE (op) != MEM)
1070     return FALSE;
1071 
1072   max_offset = ep_memory_offset (mode, unsigned_load);
1073 
1074   mask = GET_MODE_SIZE (mode) - 1;
1075 
1076   addr = XEXP (op, 0);
1077   if (GET_CODE (addr) == CONST)
1078     addr = XEXP (addr, 0);
1079 
1080   switch (GET_CODE (addr))
1081     {
1082     default:
1083       break;
1084 
1085     case SYMBOL_REF:
1086       return SYMBOL_REF_TDA_P (addr);
1087 
1088     case REG:
1089       return REGNO (addr) == EP_REGNUM;
1090 
1091     case PLUS:
1092       op0 = XEXP (addr, 0);
1093       op1 = XEXP (addr, 1);
1094       if (GET_CODE (op1) == CONST_INT
1095 	  && INTVAL (op1) < max_offset
1096 	  && INTVAL (op1) >= 0
1097 	  && (INTVAL (op1) & mask) == 0)
1098 	{
1099 	  if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1100 	    return TRUE;
1101 
1102 	  if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1103 	    return TRUE;
1104 	}
1105       break;
1106     }
1107 
1108   return FALSE;
1109 }
1110 
1111 /* Substitute memory references involving a pointer, to use the ep pointer,
1112    taking care to save and preserve the ep.  */
1113 
1114 static void
1115 substitute_ep_register (rtx_insn *first_insn,
1116                         rtx_insn *last_insn,
1117                         int uses,
1118                         int regno,
1119                         rtx * p_r1,
1120                         rtx * p_ep)
1121 {
1122   rtx reg = gen_rtx_REG (Pmode, regno);
1123   rtx_insn *insn;
1124 
1125   if (!*p_r1)
1126     {
1127       df_set_regs_ever_live (1, true);
1128       *p_r1 = gen_rtx_REG (Pmode, 1);
1129       *p_ep = gen_rtx_REG (Pmode, 30);
1130     }
1131 
1132   if (TARGET_DEBUG)
1133     fprintf (stderr, "\
1134 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1135 	     2 * (uses - 3), uses, reg_names[regno],
1136 	     IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1137 	     INSN_UID (first_insn), INSN_UID (last_insn));
1138 
1139   if (NOTE_P (first_insn))
1140     first_insn = next_nonnote_insn (first_insn);
1141 
1142   last_insn = next_nonnote_insn (last_insn);
1143   for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1144     {
1145       if (NONJUMP_INSN_P (insn))
1146 	{
1147 	  rtx pattern = single_set (insn);
1148 
1149 	  /* Replace the memory references.  */
1150 	  if (pattern)
1151 	    {
1152 	      rtx *p_mem;
1153 	      /* Memory operands are signed by default.  */
1154 	      int unsignedp = FALSE;
1155 
1156 	      if (GET_CODE (SET_DEST (pattern)) == MEM
1157 		  && GET_CODE (SET_SRC (pattern)) == MEM)
1158 		p_mem = (rtx *)0;
1159 
1160 	      else if (GET_CODE (SET_DEST (pattern)) == MEM)
1161 		p_mem = &SET_DEST (pattern);
1162 
1163 	      else if (GET_CODE (SET_SRC (pattern)) == MEM)
1164 		p_mem = &SET_SRC (pattern);
1165 
1166 	      else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1167 		       && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1168 		p_mem = &XEXP (SET_SRC (pattern), 0);
1169 
1170 	      else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1171 		       && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1172 		{
1173 		  p_mem = &XEXP (SET_SRC (pattern), 0);
1174 		  unsignedp = TRUE;
1175 		}
1176 	      else
1177 		p_mem = (rtx *)0;
1178 
1179 	      if (p_mem)
1180 		{
1181 		  rtx addr = XEXP (*p_mem, 0);
1182 
1183 		  if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1184 		    *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1185 
1186 		  else if (GET_CODE (addr) == PLUS
1187 			   && GET_CODE (XEXP (addr, 0)) == REG
1188 			   && REGNO (XEXP (addr, 0)) == (unsigned) regno
1189 			   && GET_CODE (XEXP (addr, 1)) == CONST_INT
1190 			   && ((INTVAL (XEXP (addr, 1)))
1191 			       < ep_memory_offset (GET_MODE (*p_mem),
1192 						   unsignedp))
1193 			   && ((INTVAL (XEXP (addr, 1))) >= 0))
1194 		    *p_mem = change_address (*p_mem, VOIDmode,
1195 					     gen_rtx_PLUS (Pmode,
1196 							   *p_ep,
1197 							   XEXP (addr, 1)));
1198 		}
1199 	    }
1200 	}
1201     }
1202 
1203   /* Optimize back to back cases of ep <- r1 & r1 <- ep.  */
1204   insn = prev_nonnote_insn (first_insn);
1205   if (insn && NONJUMP_INSN_P (insn)
1206       && GET_CODE (PATTERN (insn)) == SET
1207       && SET_DEST (PATTERN (insn)) == *p_ep
1208       && SET_SRC (PATTERN (insn)) == *p_r1)
1209     delete_insn (insn);
1210   else
1211     emit_insn_before (gen_rtx_SET (*p_r1, *p_ep), first_insn);
1212 
1213   emit_insn_before (gen_rtx_SET (*p_ep, reg), first_insn);
1214   emit_insn_before (gen_rtx_SET (*p_ep, *p_r1), last_insn);
1215 }
1216 
1217 
1218 /* TARGET_MACHINE_DEPENDENT_REORG.  On the 850, we use it to implement
1219    the -mep mode to copy heavily used pointers to ep to use the implicit
1220    addressing.  */
1221 
1222 static void
1223 v850_reorg (void)
1224 {
1225   struct
1226   {
1227     int uses;
1228     rtx_insn *first_insn;
1229     rtx_insn *last_insn;
1230   }
1231   regs[FIRST_PSEUDO_REGISTER];
1232 
1233   int i;
1234   int use_ep = FALSE;
1235   rtx r1 = NULL_RTX;
1236   rtx ep = NULL_RTX;
1237   rtx_insn *insn;
1238   rtx pattern;
1239 
1240   /* If not ep mode, just return now.  */
1241   if (!TARGET_EP)
1242     return;
1243 
1244   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1245     {
1246       regs[i].uses = 0;
1247       regs[i].first_insn = NULL;
1248       regs[i].last_insn = NULL;
1249     }
1250 
1251   for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1252     {
1253       switch (GET_CODE (insn))
1254 	{
1255 	  /* End of basic block */
1256 	default:
1257 	  if (!use_ep)
1258 	    {
1259 	      int max_uses = -1;
1260 	      int max_regno = -1;
1261 
1262 	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1263 		{
1264 		  if (max_uses < regs[i].uses)
1265 		    {
1266 		      max_uses = regs[i].uses;
1267 		      max_regno = i;
1268 		    }
1269 		}
1270 
1271 	      if (max_uses > 3)
1272 		substitute_ep_register (regs[max_regno].first_insn,
1273 					regs[max_regno].last_insn,
1274 					max_uses, max_regno, &r1, &ep);
1275 	    }
1276 
1277 	  use_ep = FALSE;
1278 	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1279 	    {
1280 	      regs[i].uses = 0;
1281 	      regs[i].first_insn = NULL;
1282 	      regs[i].last_insn = NULL;
1283 	    }
1284 	  break;
1285 
1286 	case NOTE:
1287 	  break;
1288 
1289 	case INSN:
1290 	  pattern = single_set (insn);
1291 
1292 	  /* See if there are any memory references we can shorten.  */
1293 	  if (pattern)
1294 	    {
1295 	      rtx src = SET_SRC (pattern);
1296 	      rtx dest = SET_DEST (pattern);
1297 	      rtx mem;
1298 	      /* Memory operands are signed by default.  */
1299 	      int unsignedp = FALSE;
1300 
1301 	      /* We might have (SUBREG (MEM)) here, so just get rid of the
1302 		 subregs to make this code simpler.  */
1303 	      if (GET_CODE (dest) == SUBREG
1304 		  && (GET_CODE (SUBREG_REG (dest)) == MEM
1305 		      || GET_CODE (SUBREG_REG (dest)) == REG))
1306 		alter_subreg (&dest, false);
1307 	      if (GET_CODE (src) == SUBREG
1308 		  && (GET_CODE (SUBREG_REG (src)) == MEM
1309 		      || GET_CODE (SUBREG_REG (src)) == REG))
1310 		alter_subreg (&src, false);
1311 
1312 	      if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1313 		mem = NULL_RTX;
1314 
1315 	      else if (GET_CODE (dest) == MEM)
1316 		mem = dest;
1317 
1318 	      else if (GET_CODE (src) == MEM)
1319 		mem = src;
1320 
1321 	      else if (GET_CODE (src) == SIGN_EXTEND
1322 		       && GET_CODE (XEXP (src, 0)) == MEM)
1323 		mem = XEXP (src, 0);
1324 
1325 	      else if (GET_CODE (src) == ZERO_EXTEND
1326 		       && GET_CODE (XEXP (src, 0)) == MEM)
1327 		{
1328 		  mem = XEXP (src, 0);
1329 		  unsignedp = TRUE;
1330 		}
1331 	      else
1332 		mem = NULL_RTX;
1333 
1334 	      if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1335 		use_ep = TRUE;
1336 
1337 	      else if (!use_ep && mem
1338 		       && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1339 		{
1340 		  rtx addr = XEXP (mem, 0);
1341 		  int regno = -1;
1342 		  int short_p;
1343 
1344 		  if (GET_CODE (addr) == REG)
1345 		    {
1346 		      short_p = TRUE;
1347 		      regno = REGNO (addr);
1348 		    }
1349 
1350 		  else if (GET_CODE (addr) == PLUS
1351 			   && GET_CODE (XEXP (addr, 0)) == REG
1352 			   && GET_CODE (XEXP (addr, 1)) == CONST_INT
1353 			   && ((INTVAL (XEXP (addr, 1)))
1354 			       < ep_memory_offset (GET_MODE (mem), unsignedp))
1355 			   && ((INTVAL (XEXP (addr, 1))) >= 0))
1356 		    {
1357 		      short_p = TRUE;
1358 		      regno = REGNO (XEXP (addr, 0));
1359 		    }
1360 
1361 		  else
1362 		    short_p = FALSE;
1363 
1364 		  if (short_p)
1365 		    {
1366 		      regs[regno].uses++;
1367 		      regs[regno].last_insn = insn;
1368 		      if (!regs[regno].first_insn)
1369 			regs[regno].first_insn = insn;
1370 		    }
1371 		}
1372 
1373 	      /* Loading up a register in the basic block zaps any savings
1374 		 for the register */
1375 	      if (GET_CODE (dest) == REG)
1376 		{
1377 		  int regno;
1378 		  int endregno;
1379 
1380 		  regno = REGNO (dest);
1381 		  endregno = END_REGNO (dest);
1382 
1383 		  if (!use_ep)
1384 		    {
1385 		      /* See if we can use the pointer before this
1386 			 modification.  */
1387 		      int max_uses = -1;
1388 		      int max_regno = -1;
1389 
1390 		      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1391 			{
1392 			  if (max_uses < regs[i].uses)
1393 			    {
1394 			      max_uses = regs[i].uses;
1395 			      max_regno = i;
1396 			    }
1397 			}
1398 
1399 		      if (max_uses > 3
1400 			  && max_regno >= regno
1401 			  && max_regno < endregno)
1402 			{
1403 			  substitute_ep_register (regs[max_regno].first_insn,
1404 						  regs[max_regno].last_insn,
1405 						  max_uses, max_regno, &r1,
1406 						  &ep);
1407 
1408 			  /* Since we made a substitution, zap all remembered
1409 			     registers.  */
1410 			  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1411 			    {
1412 			      regs[i].uses = 0;
1413 			      regs[i].first_insn = NULL;
1414 			      regs[i].last_insn = NULL;
1415 			    }
1416 			}
1417 		    }
1418 
1419 		  for (i = regno; i < endregno; i++)
1420 		    {
1421 		      regs[i].uses = 0;
1422 		      regs[i].first_insn = NULL;
1423 		      regs[i].last_insn = NULL;
1424 		    }
1425 		}
1426 	    }
1427 	}
1428     }
1429 }
1430 
1431 /* # of registers saved by the interrupt handler.  */
1432 #define INTERRUPT_FIXED_NUM 5
1433 
1434 /* # of bytes for registers saved by the interrupt handler.  */
1435 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1436 
1437 /* # of words saved for other registers.  */
1438 #define INTERRUPT_ALL_SAVE_NUM \
1439   (30 - INTERRUPT_FIXED_NUM)
1440 
1441 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1442 
1443 int
1444 compute_register_save_size (long * p_reg_saved)
1445 {
1446   int size = 0;
1447   int i;
1448   int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1449   int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1450   long reg_saved = 0;
1451 
1452   /* Count space for the register saves.  */
1453   if (interrupt_handler)
1454     {
1455       for (i = 0; i <= 31; i++)
1456 	switch (i)
1457 	  {
1458 	  default:
1459 	    if (df_regs_ever_live_p (i) || call_p)
1460 	      {
1461 		size += 4;
1462 		reg_saved |= 1L << i;
1463 	      }
1464 	    break;
1465 
1466 	    /* We don't save/restore r0 or the stack pointer */
1467 	  case 0:
1468 	  case STACK_POINTER_REGNUM:
1469 	    break;
1470 
1471 	    /* For registers with fixed use, we save them, set them to the
1472 	       appropriate value, and then restore them.
1473 	       These registers are handled specially, so don't list them
1474 	       on the list of registers to save in the prologue.  */
1475 	  case 1:		/* temp used to hold ep */
1476 	  case 4:		/* gp */
1477 	  case 10:		/* temp used to call interrupt save/restore */
1478 	  case 11:		/* temp used to call interrupt save/restore (long call) */
1479 	  case EP_REGNUM:	/* ep */
1480 	    size += 4;
1481 	    break;
1482 	  }
1483     }
1484   else
1485     {
1486       /* Find the first register that needs to be saved.  */
1487       for (i = 0; i <= 31; i++)
1488 	if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1489 				  || i == LINK_POINTER_REGNUM))
1490 	  break;
1491 
1492       /* If it is possible that an out-of-line helper function might be
1493 	 used to generate the prologue for the current function, then we
1494 	 need to cover the possibility that such a helper function will
1495 	 be used, despite the fact that there might be gaps in the list of
1496 	 registers that need to be saved.  To detect this we note that the
1497 	 helper functions always push at least register r29 (provided
1498 	 that the function is not an interrupt handler).  */
1499 
1500       if (TARGET_PROLOG_FUNCTION
1501           && (i == 2 || ((i >= 20) && (i < 30))))
1502 	{
1503 	  if (i == 2)
1504 	    {
1505 	      size += 4;
1506 	      reg_saved |= 1L << i;
1507 
1508 	      i = 20;
1509 	    }
1510 
1511 	  /* Helper functions save all registers between the starting
1512 	     register and the last register, regardless of whether they
1513 	     are actually used by the function or not.  */
1514 	  for (; i <= 29; i++)
1515 	    {
1516 	      size += 4;
1517 	      reg_saved |= 1L << i;
1518 	    }
1519 
1520 	  if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1521 	    {
1522 	      size += 4;
1523 	      reg_saved |= 1L << LINK_POINTER_REGNUM;
1524 	    }
1525 	}
1526       else
1527 	{
1528 	  for (; i <= 31; i++)
1529 	    if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1530 				      || i == LINK_POINTER_REGNUM))
1531 	      {
1532 		size += 4;
1533 		reg_saved |= 1L << i;
1534 	      }
1535 	}
1536     }
1537 
1538   if (p_reg_saved)
1539     *p_reg_saved = reg_saved;
1540 
1541   return size;
1542 }
1543 
1544 /* Typical stack layout should looks like this after the function's prologue:
1545 
1546                             |    |
1547                               --                       ^
1548                             |    | \                   |
1549                             |    |   arguments saved   | Increasing
1550                             |    |   on the stack      |  addresses
1551     PARENT   arg pointer -> |    | /
1552   -------------------------- ---- -------------------
1553                             |    | - space for argument split between regs & stack
1554 			      --
1555     CHILD                   |    | \    <-- (return address here)
1556                             |    |   other call
1557                             |    |   saved registers
1558                             |    | /
1559                               --
1560         frame pointer ->    |    | \             ___
1561                             |    |   local        |
1562                             |    |   variables    |f
1563                             |    | /              |r
1564                               --                  |a
1565                             |    | \              |m
1566                             |    |   outgoing     |e
1567                             |    |   arguments    |    | Decreasing
1568     (hard) frame pointer    |    |  /             |    |  addresses
1569        and stack pointer -> |    | /             _|_   |
1570   -------------------------- ---- ------------------   V */
1571 
1572 int
1573 compute_frame_size (poly_int64 size, long * p_reg_saved)
1574 {
1575   return (size
1576 	  + compute_register_save_size (p_reg_saved)
1577 	  + crtl->outgoing_args_size);
1578 }
1579 
1580 static int
1581 use_prolog_function (int num_save, int frame_size)
1582 {
1583   int alloc_stack = (4 * num_save);
1584   int unalloc_stack = frame_size - alloc_stack;
1585   int save_func_len, restore_func_len;
1586   int save_normal_len, restore_normal_len;
1587 
1588   if (! TARGET_DISABLE_CALLT)
1589       save_func_len = restore_func_len = 2;
1590   else
1591       save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1592 
1593   if (unalloc_stack)
1594     {
1595       save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1596       restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1597     }
1598 
1599   /* See if we would have used ep to save the stack.  */
1600   if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1601     save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1602   else
1603     save_normal_len = restore_normal_len = 4 * num_save;
1604 
1605   save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1606   restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1607 
1608   /* Don't bother checking if we don't actually save any space.
1609      This happens for instance if one register is saved and additional
1610      stack space is allocated.  */
1611   return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1612 }
1613 
1614 static void
1615 increment_stack (signed int amount, bool in_prologue)
1616 {
1617   rtx inc;
1618 
1619   if (amount == 0)
1620     return;
1621 
1622   inc = GEN_INT (amount);
1623 
1624   if (! CONST_OK_FOR_K (amount))
1625     {
1626       rtx reg = gen_rtx_REG (Pmode, 12);
1627 
1628       inc = emit_move_insn (reg, inc);
1629       if (in_prologue)
1630 	F (inc);
1631       inc = reg;
1632     }
1633 
1634   inc = emit_insn (gen_addsi3_clobber_flags (stack_pointer_rtx, stack_pointer_rtx, inc));
1635   if (in_prologue)
1636     F (inc);
1637 }
1638 
1639 void
1640 expand_prologue (void)
1641 {
1642   unsigned int i;
1643   unsigned int size = get_frame_size ();
1644   unsigned int actual_fsize;
1645   unsigned int init_stack_alloc = 0;
1646   rtx save_regs[32];
1647   rtx save_all;
1648   unsigned int num_save;
1649   int code;
1650   int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1651   long reg_saved = 0;
1652 
1653   actual_fsize = compute_frame_size (size, &reg_saved);
1654 
1655   if (flag_stack_usage_info)
1656     current_function_static_stack_size = actual_fsize;
1657 
1658   /* Save/setup global registers for interrupt functions right now.  */
1659   if (interrupt_handler)
1660     {
1661       if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1662 	emit_insn (gen_callt_save_interrupt ());
1663       else
1664 	emit_insn (gen_save_interrupt ());
1665 
1666       actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1667 
1668       if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1669 	actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1670 
1671       /* Interrupt functions are not passed arguments, so no need to
1672 	 allocate space for split structure arguments.  */
1673       gcc_assert (crtl->args.pretend_args_size == 0);
1674     }
1675 
1676   /* Identify all of the saved registers.  */
1677   num_save = 0;
1678   for (i = 1; i < 32; i++)
1679     {
1680       if (((1L << i) & reg_saved) != 0)
1681 	save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1682     }
1683 
1684   if (crtl->args.pretend_args_size)
1685     {
1686       if (num_save == 0)
1687 	{
1688 	  increment_stack (- (actual_fsize + crtl->args.pretend_args_size), true);
1689 	  actual_fsize = 0;
1690 	}
1691       else
1692 	increment_stack (- crtl->args.pretend_args_size, true);
1693     }
1694 
1695   /* See if we have an insn that allocates stack space and saves the particular
1696      registers we want to.  Note that the helpers won't
1697      allocate additional space for registers GCC saves to complete a
1698      "split" structure argument.  */
1699   save_all = NULL_RTX;
1700   if (TARGET_PROLOG_FUNCTION
1701       && !crtl->args.pretend_args_size
1702       && num_save > 0)
1703     {
1704       if (use_prolog_function (num_save, actual_fsize))
1705 	{
1706 	  int alloc_stack = 4 * num_save;
1707 	  int offset = 0;
1708 
1709 	  save_all = gen_rtx_PARALLEL
1710 	    (VOIDmode,
1711 	     rtvec_alloc (num_save + 2
1712 			  + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1713 
1714 	  XVECEXP (save_all, 0, 0)
1715 	    = gen_rtx_SET (stack_pointer_rtx,
1716 			   gen_rtx_PLUS (Pmode,
1717 					 stack_pointer_rtx,
1718 					 GEN_INT(-alloc_stack)));
1719 	  for (i = 0; i < num_save; i++)
1720 	    {
1721 	      offset -= 4;
1722 	      XVECEXP (save_all, 0, i+1)
1723 		= gen_rtx_SET (gen_rtx_MEM (Pmode,
1724 					    gen_rtx_PLUS (Pmode,
1725 							  stack_pointer_rtx,
1726 							  GEN_INT(offset))),
1727 			       save_regs[i]);
1728 	    }
1729 
1730 	  XVECEXP (save_all, 0, num_save + 1)
1731 	    = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, CC_REGNUM));
1732 
1733 	  if (TARGET_DISABLE_CALLT)
1734 	    {
1735 	      XVECEXP (save_all, 0, num_save + 2)
1736 		= gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1737 
1738 	      if (TARGET_LONG_CALLS)
1739 		XVECEXP (save_all, 0, num_save + 3)
1740 		  = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1741 	    }
1742 
1743 	  v850_all_frame_related (save_all);
1744 
1745 	  code = recog (save_all, NULL, NULL);
1746 	  if (code >= 0)
1747 	    {
1748 	      rtx insn = emit_insn (save_all);
1749 	      INSN_CODE (insn) = code;
1750 	      actual_fsize -= alloc_stack;
1751 
1752 	    }
1753 	  else
1754 	    save_all = NULL_RTX;
1755 	}
1756     }
1757 
1758   /* If no prolog save function is available, store the registers the old
1759      fashioned way (one by one).  */
1760   if (!save_all)
1761     {
1762       /* Special case interrupt functions that save all registers for a call.  */
1763       if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1764 	{
1765 	  if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1766 	    emit_insn (gen_callt_save_all_interrupt ());
1767 	  else
1768 	    emit_insn (gen_save_all_interrupt ());
1769 	}
1770       else
1771 	{
1772 	  int offset;
1773 	  /* If the stack is too big, allocate it in chunks so we can do the
1774 	     register saves.  We use the register save size so we use the ep
1775 	     register.  */
1776 	  if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1777 	    init_stack_alloc = compute_register_save_size (NULL);
1778 	  else
1779 	    init_stack_alloc = actual_fsize;
1780 
1781 	  /* Save registers at the beginning of the stack frame.  */
1782 	  offset = init_stack_alloc - 4;
1783 
1784 	  if (init_stack_alloc)
1785 	    increment_stack (- (signed) init_stack_alloc, true);
1786 
1787 	  /* Save the return pointer first.  */
1788 	  if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1789 	    {
1790 	      F (emit_move_insn (gen_rtx_MEM (SImode,
1791 					      plus_constant (Pmode,
1792 							     stack_pointer_rtx,
1793 							     offset)),
1794 				 save_regs[--num_save]));
1795 	      offset -= 4;
1796 	    }
1797 
1798 	  for (i = 0; i < num_save; i++)
1799 	    {
1800 	      F (emit_move_insn (gen_rtx_MEM (SImode,
1801 					      plus_constant (Pmode,
1802 							     stack_pointer_rtx,
1803 							     offset)),
1804 				 save_regs[i]));
1805 	      offset -= 4;
1806 	    }
1807 	}
1808     }
1809 
1810   /* Allocate the rest of the stack that was not allocated above (either it is
1811      > 32K or we just called a function to save the registers and needed more
1812      stack.  */
1813   if (actual_fsize > init_stack_alloc)
1814     increment_stack (init_stack_alloc - actual_fsize, true);
1815 
1816   /* If we need a frame pointer, set it up now.  */
1817   if (frame_pointer_needed)
1818     F (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
1819 }
1820 
1821 
1822 void
1823 expand_epilogue (void)
1824 {
1825   unsigned int i;
1826   unsigned int size = get_frame_size ();
1827   long reg_saved = 0;
1828   int actual_fsize = compute_frame_size (size, &reg_saved);
1829   rtx restore_regs[32];
1830   rtx restore_all;
1831   unsigned int num_restore;
1832   int code;
1833   int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1834 
1835   /* Eliminate the initial stack stored by interrupt functions.  */
1836   if (interrupt_handler)
1837     {
1838       actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1839       if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1840 	actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1841     }
1842 
1843   /* Cut off any dynamic stack created.  */
1844   if (frame_pointer_needed)
1845     emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1846 
1847   /* Identify all of the saved registers.  */
1848   num_restore = 0;
1849   for (i = 1; i < 32; i++)
1850     {
1851       if (((1L << i) & reg_saved) != 0)
1852 	restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1853     }
1854 
1855   /* See if we have an insn that restores the particular registers we
1856      want to.  */
1857   restore_all = NULL_RTX;
1858 
1859   if (TARGET_PROLOG_FUNCTION
1860       && num_restore > 0
1861       && !crtl->args.pretend_args_size
1862       && !interrupt_handler)
1863     {
1864       int alloc_stack = (4 * num_restore);
1865 
1866       /* Don't bother checking if we don't actually save any space.  */
1867       if (use_prolog_function (num_restore, actual_fsize))
1868 	{
1869 	  int offset;
1870 	  restore_all = gen_rtx_PARALLEL (VOIDmode,
1871 					  rtvec_alloc (num_restore + 2));
1872 	  XVECEXP (restore_all, 0, 0) = ret_rtx;
1873 	  XVECEXP (restore_all, 0, 1)
1874 	    = gen_rtx_SET (stack_pointer_rtx,
1875 			   gen_rtx_PLUS (Pmode,
1876 					 stack_pointer_rtx,
1877 					 GEN_INT (alloc_stack)));
1878 
1879 	  offset = alloc_stack - 4;
1880 	  for (i = 0; i < num_restore; i++)
1881 	    {
1882 	      XVECEXP (restore_all, 0, i+2)
1883 		= gen_rtx_SET (restore_regs[i],
1884 			       gen_rtx_MEM (Pmode,
1885                                             gen_rtx_PLUS (Pmode,
1886                                                           stack_pointer_rtx,
1887                                                           GEN_INT(offset))));
1888 	      offset -= 4;
1889 	    }
1890 
1891 	  code = recog (restore_all, NULL, NULL);
1892 
1893 	  if (code >= 0)
1894 	    {
1895 	      rtx insn;
1896 
1897 	      actual_fsize -= alloc_stack;
1898 	      increment_stack (actual_fsize, false);
1899 
1900 	      insn = emit_jump_insn (restore_all);
1901 	      INSN_CODE (insn) = code;
1902 	    }
1903 	  else
1904 	    restore_all = NULL_RTX;
1905 	}
1906     }
1907 
1908   /* If no epilogue save function is available, restore the registers the
1909      old fashioned way (one by one).  */
1910   if (!restore_all)
1911     {
1912       unsigned int init_stack_free;
1913 
1914       /* If the stack is large, we need to cut it down in 2 pieces.  */
1915       if (interrupt_handler)
1916        init_stack_free = 0;
1917       else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1918 	init_stack_free = 4 * num_restore;
1919       else
1920 	init_stack_free = (signed) actual_fsize;
1921 
1922       /* Deallocate the rest of the stack if it is > 32K.  */
1923       if ((unsigned int) actual_fsize > init_stack_free)
1924 	increment_stack (actual_fsize - init_stack_free, false);
1925 
1926       /* Special case interrupt functions that save all registers
1927 	 for a call.  */
1928       if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1929 	{
1930 	  if (! TARGET_DISABLE_CALLT)
1931 	    emit_insn (gen_callt_restore_all_interrupt ());
1932 	  else
1933 	    emit_insn (gen_restore_all_interrupt ());
1934 	}
1935       else
1936 	{
1937 	  /* Restore registers from the beginning of the stack frame.  */
1938 	  int offset = init_stack_free - 4;
1939 
1940 	  /* Restore the return pointer first.  */
1941 	  if (num_restore > 0
1942 	      && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1943 	    {
1944 	      emit_move_insn (restore_regs[--num_restore],
1945 			      gen_rtx_MEM (SImode,
1946 					   plus_constant (Pmode,
1947 							  stack_pointer_rtx,
1948 							  offset)));
1949 	      offset -= 4;
1950 	    }
1951 
1952 	  for (i = 0; i < num_restore; i++)
1953 	    {
1954 	      emit_move_insn (restore_regs[i],
1955 			      gen_rtx_MEM (SImode,
1956 					   plus_constant (Pmode,
1957 							  stack_pointer_rtx,
1958 							  offset)));
1959 
1960 	      emit_use (restore_regs[i]);
1961 	      offset -= 4;
1962 	    }
1963 
1964 	  /* Cut back the remainder of the stack.  */
1965 	  increment_stack (init_stack_free + crtl->args.pretend_args_size,
1966 			   false);
1967 	}
1968 
1969       /* And return or use reti for interrupt handlers.  */
1970       if (interrupt_handler)
1971         {
1972           if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1973             emit_insn (gen_callt_return_interrupt ());
1974           else
1975             emit_jump_insn (gen_return_interrupt ());
1976 	 }
1977       else if (actual_fsize)
1978 	emit_jump_insn (gen_return_internal ());
1979       else
1980 	emit_jump_insn (gen_return_simple ());
1981     }
1982 
1983   v850_interrupt_cache_p = FALSE;
1984   v850_interrupt_p = FALSE;
1985 }
1986 
1987 /* Retrieve the data area that has been chosen for the given decl.  */
1988 
1989 v850_data_area
1990 v850_get_data_area (tree decl)
1991 {
1992   if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
1993     return DATA_AREA_SDA;
1994 
1995   if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
1996     return DATA_AREA_TDA;
1997 
1998   if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
1999     return DATA_AREA_ZDA;
2000 
2001   return DATA_AREA_NORMAL;
2002 }
2003 
2004 /* Store the indicated data area in the decl's attributes.  */
2005 
2006 static void
2007 v850_set_data_area (tree decl, v850_data_area data_area)
2008 {
2009   tree name;
2010 
2011   switch (data_area)
2012     {
2013     case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2014     case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2015     case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2016     default:
2017       return;
2018     }
2019 
2020   DECL_ATTRIBUTES (decl) = tree_cons
2021     (name, NULL, DECL_ATTRIBUTES (decl));
2022 }
2023 
2024 /* Handle an "interrupt" attribute; arguments as in
2025    struct attribute_spec.handler.  */
2026 static tree
2027 v850_handle_interrupt_attribute (tree *node, tree name,
2028                                  tree args ATTRIBUTE_UNUSED,
2029                                  int flags ATTRIBUTE_UNUSED,
2030                                  bool * no_add_attrs)
2031 {
2032   if (TREE_CODE (*node) != FUNCTION_DECL)
2033     {
2034       warning (OPT_Wattributes, "%qE attribute only applies to functions",
2035 	       name);
2036       *no_add_attrs = true;
2037     }
2038 
2039   return NULL_TREE;
2040 }
2041 
2042 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2043    struct attribute_spec.handler.  */
2044 static tree
2045 v850_handle_data_area_attribute (tree *node, tree name,
2046                                  tree args ATTRIBUTE_UNUSED,
2047                                  int flags ATTRIBUTE_UNUSED,
2048                                  bool * no_add_attrs)
2049 {
2050   v850_data_area data_area;
2051   v850_data_area area;
2052   tree decl = *node;
2053 
2054   /* Implement data area attribute.  */
2055   if (is_attribute_p ("sda", name))
2056     data_area = DATA_AREA_SDA;
2057   else if (is_attribute_p ("tda", name))
2058     data_area = DATA_AREA_TDA;
2059   else if (is_attribute_p ("zda", name))
2060     data_area = DATA_AREA_ZDA;
2061   else
2062     gcc_unreachable ();
2063 
2064   switch (TREE_CODE (decl))
2065     {
2066     case VAR_DECL:
2067       if (current_function_decl != NULL_TREE)
2068 	{
2069           error_at (DECL_SOURCE_LOCATION (decl),
2070 		    "data area attributes cannot be specified for "
2071 		    "local variables");
2072 	  *no_add_attrs = true;
2073 	}
2074 
2075       /* FALLTHRU */
2076 
2077     case FUNCTION_DECL:
2078       area = v850_get_data_area (decl);
2079       if (area != DATA_AREA_NORMAL && data_area != area)
2080 	{
2081 	  error ("data area of %q+D conflicts with previous declaration",
2082                  decl);
2083 	  *no_add_attrs = true;
2084 	}
2085       break;
2086 
2087     default:
2088       break;
2089     }
2090 
2091   return NULL_TREE;
2092 }
2093 
2094 
2095 /* Return nonzero if FUNC is an interrupt function as specified
2096    by the "interrupt" attribute.  */
2097 
2098 int
2099 v850_interrupt_function_p (tree func)
2100 {
2101   tree a;
2102   int ret = 0;
2103 
2104   if (v850_interrupt_cache_p)
2105     return v850_interrupt_p;
2106 
2107   if (TREE_CODE (func) != FUNCTION_DECL)
2108     return 0;
2109 
2110   a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2111   if (a != NULL_TREE)
2112     ret = 1;
2113 
2114   else
2115     {
2116       a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2117       ret = a != NULL_TREE;
2118     }
2119 
2120   /* Its not safe to trust global variables until after function inlining has
2121      been done.  */
2122   if (reload_completed | reload_in_progress)
2123     v850_interrupt_p = ret;
2124 
2125   return ret;
2126 }
2127 
2128 
2129 static void
2130 v850_encode_data_area (tree decl, rtx symbol)
2131 {
2132   int flags;
2133 
2134   /* Map explicit sections into the appropriate attribute */
2135   if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2136     {
2137       if (DECL_SECTION_NAME (decl))
2138 	{
2139 	  const char *name = DECL_SECTION_NAME (decl);
2140 
2141 	  if (streq (name, ".zdata") || streq (name, ".zbss"))
2142 	    v850_set_data_area (decl, DATA_AREA_ZDA);
2143 
2144 	  else if (streq (name, ".sdata") || streq (name, ".sbss"))
2145 	    v850_set_data_area (decl, DATA_AREA_SDA);
2146 
2147 	  else if (streq (name, ".tdata"))
2148 	    v850_set_data_area (decl, DATA_AREA_TDA);
2149 	}
2150 
2151       /* If no attribute, support -m{zda,sda,tda}=n */
2152       else
2153 	{
2154 	  int size = int_size_in_bytes (TREE_TYPE (decl));
2155 	  if (size <= 0)
2156 	    ;
2157 
2158 	  else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2159 	    v850_set_data_area (decl, DATA_AREA_TDA);
2160 
2161 	  else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2162 	    v850_set_data_area (decl, DATA_AREA_SDA);
2163 
2164 	  else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2165 	    v850_set_data_area (decl, DATA_AREA_ZDA);
2166 	}
2167 
2168       if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2169 	return;
2170     }
2171 
2172   flags = SYMBOL_REF_FLAGS (symbol);
2173   switch (v850_get_data_area (decl))
2174     {
2175     case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2176     case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2177     case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2178     default: gcc_unreachable ();
2179     }
2180   SYMBOL_REF_FLAGS (symbol) = flags;
2181 }
2182 
2183 static void
2184 v850_encode_section_info (tree decl, rtx rtl, int first)
2185 {
2186   default_encode_section_info (decl, rtl, first);
2187 
2188   if (TREE_CODE (decl) == VAR_DECL
2189       && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2190     v850_encode_data_area (decl, XEXP (rtl, 0));
2191 }
2192 
2193 /* Construct a JR instruction to a routine that will perform the equivalent of
2194    the RTL passed in as an argument.  This RTL is a function epilogue that
2195    pops registers off the stack and possibly releases some extra stack space
2196    as well.  The code has already verified that the RTL matches these
2197    requirements.  */
2198 
2199 char *
2200 construct_restore_jr (rtx op)
2201 {
2202   int count = XVECLEN (op, 0);
2203   int stack_bytes;
2204   unsigned long int mask;
2205   unsigned long int first;
2206   unsigned long int last;
2207   int i;
2208   static char buff [100]; /* XXX */
2209 
2210   if (count <= 2)
2211     {
2212       error ("bogus JR construction: %d", count);
2213       return NULL;
2214     }
2215 
2216   /* Work out how many bytes to pop off the stack before retrieving
2217      registers.  */
2218   gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2219   gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2220   gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2221 
2222   stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2223 
2224   /* Each pop will remove 4 bytes from the stack....  */
2225   stack_bytes -= (count - 2) * 4;
2226 
2227   /* Make sure that the amount we are popping either 0 or 16 bytes.  */
2228   if (stack_bytes != 0)
2229     {
2230       error ("bad amount of stack space removal: %d", stack_bytes);
2231       return NULL;
2232     }
2233 
2234   /* Now compute the bit mask of registers to push.  */
2235   mask = 0;
2236   for (i = 2; i < count; i++)
2237     {
2238       rtx vector_element = XVECEXP (op, 0, i);
2239 
2240       gcc_assert (GET_CODE (vector_element) == SET);
2241       gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2242       gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2243 					       SImode));
2244 
2245       mask |= 1 << REGNO (SET_DEST (vector_element));
2246     }
2247 
2248   /* Scan for the first register to pop.  */
2249   for (first = 0; first < 32; first++)
2250     {
2251       if (mask & (1 << first))
2252 	break;
2253     }
2254 
2255   gcc_assert (first < 32);
2256 
2257   /* Discover the last register to pop.  */
2258   if (mask & (1 << LINK_POINTER_REGNUM))
2259     {
2260       last = LINK_POINTER_REGNUM;
2261     }
2262   else
2263     {
2264       gcc_assert (!stack_bytes);
2265       gcc_assert (mask & (1 << 29));
2266 
2267       last = 29;
2268     }
2269 
2270   /* Note, it is possible to have gaps in the register mask.
2271      We ignore this here, and generate a JR anyway.  We will
2272      be popping more registers than is strictly necessary, but
2273      it does save code space.  */
2274 
2275   if (TARGET_LONG_CALLS)
2276     {
2277       char name[40];
2278 
2279       if (first == last)
2280 	sprintf (name, "__return_%s", reg_names [first]);
2281       else
2282 	sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2283 
2284       sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2285 	       name, name);
2286     }
2287   else
2288     {
2289       if (first == last)
2290 	sprintf (buff, "jr __return_%s", reg_names [first]);
2291       else
2292 	sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2293     }
2294 
2295   return buff;
2296 }
2297 
2298 
2299 /* Construct a JARL instruction to a routine that will perform the equivalent
2300    of the RTL passed as a parameter.  This RTL is a function prologue that
2301    saves some of the registers r20 - r31 onto the stack, and possibly acquires
2302    some stack space as well.  The code has already verified that the RTL
2303    matches these requirements.  */
2304 char *
2305 construct_save_jarl (rtx op)
2306 {
2307   int count = XVECLEN (op, 0);
2308   int stack_bytes;
2309   unsigned long int mask;
2310   unsigned long int first;
2311   unsigned long int last;
2312   int i;
2313   static char buff [100]; /* XXX */
2314 
2315   if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2316     {
2317       error ("bogus JARL construction: %d", count);
2318       return NULL;
2319     }
2320 
2321   /* Paranoia.  */
2322   gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2323   gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2324   gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2325   gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2326 
2327   /* Work out how many bytes to push onto the stack after storing the
2328      registers.  */
2329   stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2330 
2331   /* Each push will put 4 bytes from the stack....  */
2332   stack_bytes += (count - (TARGET_LONG_CALLS ? 4 : 3)) * 4;
2333 
2334   /* Make sure that the amount we are popping either 0 or 16 bytes.  */
2335   if (stack_bytes != 0)
2336     {
2337       error ("bad amount of stack space removal: %d", stack_bytes);
2338       return NULL;
2339     }
2340 
2341   /* Now compute the bit mask of registers to push.  */
2342   mask = 0;
2343   for (i = 1; i < count - (TARGET_LONG_CALLS ? 3 : 2); i++)
2344     {
2345       rtx vector_element = XVECEXP (op, 0, i);
2346 
2347       gcc_assert (GET_CODE (vector_element) == SET);
2348       gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2349       gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2350 					       SImode));
2351 
2352       mask |= 1 << REGNO (SET_SRC (vector_element));
2353     }
2354 
2355   /* Scan for the first register to push.  */
2356   for (first = 0; first < 32; first++)
2357     {
2358       if (mask & (1 << first))
2359 	break;
2360     }
2361 
2362   gcc_assert (first < 32);
2363 
2364   /* Discover the last register to push.  */
2365   if (mask & (1 << LINK_POINTER_REGNUM))
2366     {
2367       last = LINK_POINTER_REGNUM;
2368     }
2369   else
2370     {
2371       gcc_assert (!stack_bytes);
2372       gcc_assert (mask & (1 << 29));
2373 
2374       last = 29;
2375     }
2376 
2377   /* Note, it is possible to have gaps in the register mask.
2378      We ignore this here, and generate a JARL anyway.  We will
2379      be pushing more registers than is strictly necessary, but
2380      it does save code space.  */
2381 
2382   if (TARGET_LONG_CALLS)
2383     {
2384       char name[40];
2385 
2386       if (first == last)
2387 	sprintf (name, "__save_%s", reg_names [first]);
2388       else
2389 	sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2390 
2391       if (TARGET_V850E3V5_UP)
2392 	sprintf (buff, "mov hilo(%s), r11\n\tjarl [r11], r10", name);
2393       else
2394 	sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2395 		 name, name);
2396     }
2397   else
2398     {
2399       if (first == last)
2400 	sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2401       else
2402 	sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2403 		 reg_names [last]);
2404     }
2405 
2406   return buff;
2407 }
2408 
2409 /* A version of asm_output_aligned_bss() that copes with the special
2410    data areas of the v850.  */
2411 void
2412 v850_output_aligned_bss (FILE * file,
2413                          tree decl,
2414                          const char * name,
2415                          unsigned HOST_WIDE_INT size,
2416                          int align)
2417 {
2418   switch (v850_get_data_area (decl))
2419     {
2420     case DATA_AREA_ZDA:
2421       switch_to_section (zbss_section);
2422       break;
2423 
2424     case DATA_AREA_SDA:
2425       switch_to_section (sbss_section);
2426       break;
2427 
2428     case DATA_AREA_TDA:
2429       switch_to_section (tdata_section);
2430       break;
2431 
2432     default:
2433       switch_to_section (bss_section);
2434       break;
2435     }
2436 
2437   ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2438 #ifdef ASM_DECLARE_OBJECT_NAME
2439   last_assemble_variable_decl = decl;
2440   ASM_DECLARE_OBJECT_NAME (file, name, decl);
2441 #else
2442   /* Standard thing is just output label for the object.  */
2443   ASM_OUTPUT_LABEL (file, name);
2444 #endif /* ASM_DECLARE_OBJECT_NAME */
2445   ASM_OUTPUT_SKIP (file, size ? size : 1);
2446 }
2447 
2448 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2449 void
2450 v850_output_common (FILE * file,
2451                     tree decl,
2452                     const char * name,
2453                     int size,
2454                     int align)
2455 {
2456   if (decl == NULL_TREE)
2457     {
2458       fprintf (file, "%s", COMMON_ASM_OP);
2459     }
2460   else
2461     {
2462       switch (v850_get_data_area (decl))
2463 	{
2464 	case DATA_AREA_ZDA:
2465 	  fprintf (file, "%s", ZCOMMON_ASM_OP);
2466 	  break;
2467 
2468 	case DATA_AREA_SDA:
2469 	  fprintf (file, "%s", SCOMMON_ASM_OP);
2470 	  break;
2471 
2472 	case DATA_AREA_TDA:
2473 	  fprintf (file, "%s", TCOMMON_ASM_OP);
2474 	  break;
2475 
2476 	default:
2477 	  fprintf (file, "%s", COMMON_ASM_OP);
2478 	  break;
2479 	}
2480     }
2481 
2482   assemble_name (file, name);
2483   fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2484 }
2485 
2486 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2487 void
2488 v850_output_local (FILE * file,
2489                    tree decl,
2490                    const char * name,
2491                    int size,
2492                    int align)
2493 {
2494   fprintf (file, "%s", LOCAL_ASM_OP);
2495   assemble_name (file, name);
2496   fprintf (file, "\n");
2497 
2498   ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2499 }
2500 
2501 /* Add data area to the given declaration if a ghs data area pragma is
2502    currently in effect (#pragma ghs startXXX/endXXX).  */
2503 static void
2504 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2505 {
2506   if (data_area_stack
2507       && data_area_stack->data_area
2508       && current_function_decl == NULL_TREE
2509       && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2510       && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2511     v850_set_data_area (decl, data_area_stack->data_area);
2512 
2513   /* Initialize the default names of the v850 specific sections,
2514      if this has not been done before.  */
2515 
2516   if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2517     {
2518       GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2519 	= ".sdata";
2520 
2521       GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2522 	= ".rosdata";
2523 
2524       GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2525 	= ".tdata";
2526 
2527       GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2528 	= ".zdata";
2529 
2530       GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2531 	= ".rozdata";
2532     }
2533 
2534   if (current_function_decl == NULL_TREE
2535       && (TREE_CODE (decl) == VAR_DECL
2536 	  || TREE_CODE (decl) == CONST_DECL
2537 	  || TREE_CODE (decl) == FUNCTION_DECL)
2538       && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2539       && !DECL_SECTION_NAME (decl))
2540     {
2541       enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2542       const char * chosen_section;
2543 
2544       if (TREE_CODE (decl) == FUNCTION_DECL)
2545 	kind = GHS_SECTION_KIND_TEXT;
2546       else
2547 	{
2548 	  /* First choose a section kind based on the data area of the decl.  */
2549 	  switch (v850_get_data_area (decl))
2550 	    {
2551 	    default:
2552 	      gcc_unreachable ();
2553 
2554 	    case DATA_AREA_SDA:
2555 	      kind = ((TREE_READONLY (decl))
2556 		      ? GHS_SECTION_KIND_ROSDATA
2557 		      : GHS_SECTION_KIND_SDATA);
2558 	      break;
2559 
2560 	    case DATA_AREA_TDA:
2561 	      kind = GHS_SECTION_KIND_TDATA;
2562 	      break;
2563 
2564 	    case DATA_AREA_ZDA:
2565 	      kind = ((TREE_READONLY (decl))
2566 		      ? GHS_SECTION_KIND_ROZDATA
2567 		      : GHS_SECTION_KIND_ZDATA);
2568 	      break;
2569 
2570 	    case DATA_AREA_NORMAL:		 /* default data area */
2571 	      if (TREE_READONLY (decl))
2572 		kind = GHS_SECTION_KIND_RODATA;
2573 	      else if (DECL_INITIAL (decl))
2574 		kind = GHS_SECTION_KIND_DATA;
2575 	      else
2576 		kind = GHS_SECTION_KIND_BSS;
2577 	    }
2578 	}
2579 
2580       /* Now, if the section kind has been explicitly renamed,
2581          then attach a section attribute.  */
2582       chosen_section = GHS_current_section_names [(int) kind];
2583 
2584       /* Otherwise, if this kind of section needs an explicit section
2585          attribute, then also attach one.  */
2586       if (chosen_section == NULL)
2587         chosen_section = GHS_default_section_names [(int) kind];
2588 
2589       if (chosen_section)
2590 	{
2591 	  /* Only set the section name if specified by a pragma, because
2592 	     otherwise it will force those variables to get allocated storage
2593 	     in this module, rather than by the linker.  */
2594 	  set_decl_section_name (decl, chosen_section);
2595 	}
2596     }
2597 }
2598 
2599 /* Construct a DISPOSE instruction that is the equivalent of
2600    the given RTX.  We have already verified that this should
2601    be possible.  */
2602 
2603 char *
2604 construct_dispose_instruction (rtx op)
2605 {
2606   int                count = XVECLEN (op, 0);
2607   int                stack_bytes;
2608   unsigned long int  mask;
2609   int		     i;
2610   static char        buff[ 100 ]; /* XXX */
2611   int                use_callt = 0;
2612 
2613   if (count <= 2)
2614     {
2615       error ("bogus DISPOSE construction: %d", count);
2616       return NULL;
2617     }
2618 
2619   /* Work out how many bytes to pop off the
2620      stack before retrieving registers.  */
2621   gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2622   gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2623   gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2624 
2625   stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2626 
2627   /* Each pop will remove 4 bytes from the stack....  */
2628   stack_bytes -= (count - 2) * 4;
2629 
2630   /* Make sure that the amount we are popping
2631      will fit into the DISPOSE instruction.  */
2632   if (stack_bytes > 128)
2633     {
2634       error ("too much stack space to dispose of: %d", stack_bytes);
2635       return NULL;
2636     }
2637 
2638   /* Now compute the bit mask of registers to push.  */
2639   mask = 0;
2640 
2641   for (i = 2; i < count; i++)
2642     {
2643       rtx vector_element = XVECEXP (op, 0, i);
2644 
2645       gcc_assert (GET_CODE (vector_element) == SET);
2646       gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2647       gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2648 					       SImode));
2649 
2650       if (REGNO (SET_DEST (vector_element)) == 2)
2651 	use_callt = 1;
2652       else
2653         mask |= 1 << REGNO (SET_DEST (vector_element));
2654     }
2655 
2656   if (! TARGET_DISABLE_CALLT
2657       && (use_callt || stack_bytes == 0))
2658     {
2659       if (use_callt)
2660 	{
2661 	  sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2662 	  return buff;
2663 	}
2664       else
2665 	{
2666 	  for (i = 20; i < 32; i++)
2667 	    if (mask & (1 << i))
2668 	      break;
2669 
2670 	  if (i == 31)
2671 	    sprintf (buff, "callt ctoff(__callt_return_r31c)");
2672 	  else
2673 	    sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2674 		     i, (mask & (1 << 31)) ? "31c" : "29");
2675 	}
2676     }
2677   else
2678     {
2679       static char        regs [100]; /* XXX */
2680       int                done_one;
2681 
2682       /* Generate the DISPOSE instruction.  Note we could just issue the
2683 	 bit mask as a number as the assembler can cope with this, but for
2684 	 the sake of our readers we turn it into a textual description.  */
2685       regs[0] = 0;
2686       done_one = 0;
2687 
2688       for (i = 20; i < 32; i++)
2689 	{
2690 	  if (mask & (1 << i))
2691 	    {
2692 	      int first;
2693 
2694 	      if (done_one)
2695 		strcat (regs, ", ");
2696 	      else
2697 		done_one = 1;
2698 
2699 	      first = i;
2700 	      strcat (regs, reg_names[ first ]);
2701 
2702 	      for (i++; i < 32; i++)
2703 		if ((mask & (1 << i)) == 0)
2704 		  break;
2705 
2706 	      if (i > first + 1)
2707 		{
2708 		  strcat (regs, " - ");
2709 		  strcat (regs, reg_names[ i - 1 ] );
2710 		}
2711 	    }
2712 	}
2713 
2714       sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2715     }
2716 
2717   return buff;
2718 }
2719 
2720 /* Construct a PREPARE instruction that is the equivalent of
2721    the given RTL.  We have already verified that this should
2722    be possible.  */
2723 
2724 char *
2725 construct_prepare_instruction (rtx op)
2726 {
2727   int                count;
2728   int                stack_bytes;
2729   unsigned long int  mask;
2730   int		     i;
2731   static char        buff[ 100 ]; /* XXX */
2732   int		     use_callt = 0;
2733 
2734   if (XVECLEN (op, 0) <= 1)
2735     {
2736       error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2737       return NULL;
2738     }
2739 
2740   /* Work out how many bytes to push onto
2741      the stack after storing the registers.  */
2742   gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2743   gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2744   gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2745 
2746   stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2747 
2748 
2749   /* Make sure that the amount we are popping
2750      will fit into the DISPOSE instruction.  */
2751   if (stack_bytes < -128)
2752     {
2753       error ("too much stack space to prepare: %d", stack_bytes);
2754       return NULL;
2755     }
2756 
2757   /* Now compute the bit mask of registers to push.  */
2758   count = 0;
2759   mask = 0;
2760   for (i = 1; i < XVECLEN (op, 0); i++)
2761     {
2762       rtx vector_element = XVECEXP (op, 0, i);
2763 
2764       if (GET_CODE (vector_element) == CLOBBER)
2765 	continue;
2766 
2767       gcc_assert (GET_CODE (vector_element) == SET);
2768       gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2769       gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2770 					       SImode));
2771 
2772       if (REGNO (SET_SRC (vector_element)) == 2)
2773 	use_callt = 1;
2774       else
2775 	mask |= 1 << REGNO (SET_SRC (vector_element));
2776       count++;
2777     }
2778 
2779   stack_bytes += count * 4;
2780 
2781   if ((! TARGET_DISABLE_CALLT)
2782       && (use_callt || stack_bytes == 0))
2783     {
2784       if (use_callt)
2785 	{
2786 	  sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2787 	  return buff;
2788 	}
2789 
2790       for (i = 20; i < 32; i++)
2791 	if (mask & (1 << i))
2792 	  break;
2793 
2794       if (i == 31)
2795 	sprintf (buff, "callt ctoff(__callt_save_r31c)");
2796       else
2797 	sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2798 		 i, (mask & (1 << 31)) ? "31c" : "29");
2799     }
2800   else
2801     {
2802       static char        regs [100]; /* XXX */
2803       int                done_one;
2804 
2805 
2806       /* Generate the PREPARE instruction.  Note we could just issue the
2807 	 bit mask as a number as the assembler can cope with this, but for
2808 	 the sake of our readers we turn it into a textual description.  */
2809       regs[0] = 0;
2810       done_one = 0;
2811 
2812       for (i = 20; i < 32; i++)
2813 	{
2814 	  if (mask & (1 << i))
2815 	    {
2816 	      int first;
2817 
2818 	      if (done_one)
2819 		strcat (regs, ", ");
2820 	      else
2821 		done_one = 1;
2822 
2823 	      first = i;
2824 	      strcat (regs, reg_names[ first ]);
2825 
2826 	      for (i++; i < 32; i++)
2827 		if ((mask & (1 << i)) == 0)
2828 		  break;
2829 
2830 	      if (i > first + 1)
2831 		{
2832 		  strcat (regs, " - ");
2833 		  strcat (regs, reg_names[ i - 1 ] );
2834 		}
2835 	    }
2836 	}
2837 
2838       sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2839     }
2840 
2841   return buff;
2842 }
2843 
2844 /* Return an RTX indicating where the return address to the
2845    calling function can be found.  */
2846 
2847 rtx
2848 v850_return_addr (int count)
2849 {
2850   if (count != 0)
2851     return const0_rtx;
2852 
2853   return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2854 }
2855 
2856 /* Implement TARGET_ASM_INIT_SECTIONS.  */
2857 
2858 static void
2859 v850_asm_init_sections (void)
2860 {
2861   rosdata_section
2862     = get_unnamed_section (0, output_section_asm_op,
2863 			   "\t.section .rosdata,\"a\"");
2864 
2865   rozdata_section
2866     = get_unnamed_section (0, output_section_asm_op,
2867 			   "\t.section .rozdata,\"a\"");
2868 
2869   tdata_section
2870     = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2871 			   "\t.section .tdata,\"aw\"");
2872 
2873   zdata_section
2874     = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2875 			   "\t.section .zdata,\"aw\"");
2876 
2877   zbss_section
2878     = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2879 			   output_section_asm_op,
2880 			   "\t.section .zbss,\"aw\"");
2881 }
2882 
2883 static section *
2884 v850_select_section (tree exp,
2885                      int reloc ATTRIBUTE_UNUSED,
2886                      unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2887 {
2888   if (TREE_CODE (exp) == VAR_DECL)
2889     {
2890       int is_const;
2891       if (!TREE_READONLY (exp)
2892 	  || TREE_SIDE_EFFECTS (exp)
2893 	  || !DECL_INITIAL (exp)
2894 	  || (DECL_INITIAL (exp) != error_mark_node
2895 	      && !TREE_CONSTANT (DECL_INITIAL (exp))))
2896         is_const = FALSE;
2897       else
2898         is_const = TRUE;
2899 
2900       switch (v850_get_data_area (exp))
2901         {
2902         case DATA_AREA_ZDA:
2903 	  return is_const ? rozdata_section : zdata_section;
2904 
2905         case DATA_AREA_TDA:
2906 	  return tdata_section;
2907 
2908         case DATA_AREA_SDA:
2909 	  return is_const ? rosdata_section : sdata_section;
2910 
2911         default:
2912 	  return is_const ? readonly_data_section : data_section;
2913         }
2914     }
2915   return readonly_data_section;
2916 }
2917 
2918 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.  */
2919 
2920 static bool
2921 v850_function_value_regno_p (const unsigned int regno)
2922 {
2923   return (regno == RV_REGNUM);
2924 }
2925 
2926 /* Worker function for TARGET_RETURN_IN_MEMORY.  */
2927 
2928 static bool
2929 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2930 {
2931   /* Return values > 8 bytes in length in memory.  */
2932   return int_size_in_bytes (type) > 8
2933     || TYPE_MODE (type) == BLKmode
2934     /* With the rh850 ABI return all aggregates in memory.  */
2935     || ((! TARGET_GCC_ABI) && AGGREGATE_TYPE_P (type))
2936     ;
2937 }
2938 
2939 /* Worker function for TARGET_FUNCTION_VALUE.  */
2940 
2941 static rtx
2942 v850_function_value (const_tree valtype,
2943                     const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
2944                     bool outgoing ATTRIBUTE_UNUSED)
2945 {
2946   return gen_rtx_REG (TYPE_MODE (valtype), RV_REGNUM);
2947 }
2948 
2949 /* Implement TARGET_LIBCALL_VALUE.  */
2950 
2951 static rtx
2952 v850_libcall_value (machine_mode mode,
2953 		    const_rtx func ATTRIBUTE_UNUSED)
2954 {
2955   return gen_rtx_REG (mode, RV_REGNUM);
2956 }
2957 
2958 
2959 /* Worker function for TARGET_CAN_ELIMINATE.  */
2960 
2961 static bool
2962 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2963 {
2964   return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
2965 }
2966 
2967 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2968 
2969    If TARGET_APP_REGS is not defined then add r2 and r5 to
2970    the pool of fixed registers. See PR 14505.  */
2971 
2972 static void
2973 v850_conditional_register_usage (void)
2974 {
2975   if (TARGET_APP_REGS)
2976     {
2977      fixed_regs[2] = 0;  call_used_regs[2] = 0;
2978      fixed_regs[5] = 0;  call_used_regs[5] = 1;
2979     }
2980 }
2981 
2982 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE.  */
2983 
2984 static void
2985 v850_asm_trampoline_template (FILE *f)
2986 {
2987   fprintf (f, "\tjarl .+4,r12\n");
2988   fprintf (f, "\tld.w 12[r12],r20\n");
2989   fprintf (f, "\tld.w 16[r12],r12\n");
2990   fprintf (f, "\tjmp [r12]\n");
2991   fprintf (f, "\tnop\n");
2992   fprintf (f, "\t.long 0\n");
2993   fprintf (f, "\t.long 0\n");
2994 }
2995 
2996 /* Worker function for TARGET_TRAMPOLINE_INIT.  */
2997 
2998 static void
2999 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3000 {
3001   rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3002 
3003   emit_block_move (m_tramp, assemble_trampoline_template (),
3004 		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3005 
3006   mem = adjust_address (m_tramp, SImode, 16);
3007   emit_move_insn (mem, chain_value);
3008   mem = adjust_address (m_tramp, SImode, 20);
3009   emit_move_insn (mem, fnaddr);
3010 }
3011 
3012 static int
3013 v850_issue_rate (void)
3014 {
3015   return (TARGET_V850E2_UP ? 2 : 1);
3016 }
3017 
3018 /* Implement TARGET_LEGITIMATE_CONSTANT_P.  */
3019 
3020 static bool
3021 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3022 {
3023   return (GET_CODE (x) == CONST_DOUBLE
3024 	  || !(GET_CODE (x) == CONST
3025 	       && GET_CODE (XEXP (x, 0)) == PLUS
3026 	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3027 	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3028 	       && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3029 }
3030 
3031 /* Helper function for `v850_legitimate_address_p'.  */
3032 
3033 static bool
3034 v850_reg_ok_for_base_p (const_rtx reg, bool strict_p)
3035 {
3036   if (strict_p)
3037   {
3038     return REGNO_OK_FOR_BASE_P (REGNO (reg));
3039   } else {
3040     return true;
3041   }
3042 }
3043 
3044 /* Accept either REG or SUBREG where a register is valid.  */
3045 
3046 static bool
3047 v850_rtx_ok_for_base_p (const_rtx x, bool strict_p)
3048 {
3049   return ((REG_P (x) && v850_reg_ok_for_base_p  (x, strict_p))
3050 	  || (SUBREG_P (x) && REG_P (SUBREG_REG (x))
3051 	      && v850_reg_ok_for_base_p (SUBREG_REG (x), strict_p)));
3052 }
3053 
3054 /* Implement TARGET_LEGITIMATE_ADDRESS_P.  */
3055 
3056 static bool
3057 v850_legitimate_address_p (machine_mode mode, rtx x, bool strict_p,
3058 			   addr_space_t as ATTRIBUTE_UNUSED)
3059 {
3060   gcc_assert (ADDR_SPACE_GENERIC_P (as));
3061 
3062   if (v850_rtx_ok_for_base_p (x, strict_p))
3063     return true;
3064   if (CONSTANT_ADDRESS_P (x)
3065       && (mode == QImode || INTVAL (x) % 2 == 0)
3066       && (GET_MODE_SIZE (mode) <= 4 || INTVAL (x) % 4 == 0))
3067     return true;
3068   if (GET_CODE (x) == LO_SUM
3069       && REG_P (XEXP (x, 0))
3070       && v850_reg_ok_for_base_p (XEXP (x, 0), strict_p)
3071       && CONSTANT_P (XEXP (x, 1))
3072       && (!CONST_INT_P (XEXP (x, 1))
3073 	  || ((mode == QImode || INTVAL (XEXP (x, 1)) % 2 == 0)
3074 	      && constraint_satisfied_p (XEXP (x, 1), CONSTRAINT_K)))
3075       && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (word_mode))
3076     return true;
3077   if (special_symbolref_operand (x, mode)
3078       && (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (word_mode)))
3079     return true;
3080   if (GET_CODE (x) == PLUS
3081       && v850_rtx_ok_for_base_p (XEXP (x, 0), strict_p)
3082       && (constraint_satisfied_p (XEXP (x, 1), CONSTRAINT_K)
3083 	  || (TARGET_V850E2V3_UP
3084 	      && (mode == SImode || mode == HImode || mode == QImode)
3085 	      && constraint_satisfied_p (XEXP (x, 1), CONSTRAINT_W)))
3086       && ((mode == QImode || INTVAL (XEXP (x, 1)) % 2 == 0)
3087 	   && CONST_OK_FOR_K (INTVAL (XEXP (x, 1))
3088 			      + (GET_MODE_NUNITS (mode) * UNITS_PER_WORD))))
3089     return true;
3090 
3091   return false;
3092 }
3093 
3094 static int
3095 v850_memory_move_cost (machine_mode mode,
3096 		       reg_class_t reg_class ATTRIBUTE_UNUSED,
3097 		       bool in)
3098 {
3099   switch (GET_MODE_SIZE (mode))
3100     {
3101     case 0:
3102       return in ? 24 : 8;
3103     case 1:
3104     case 2:
3105     case 3:
3106     case 4:
3107       return in ? 6 : 2;
3108     default:
3109       return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3110     }
3111 }
3112 
3113 int
3114 v850_adjust_insn_length (rtx_insn *insn, int length)
3115 {
3116   if (TARGET_V850E3V5_UP)
3117     {
3118       if (CALL_P (insn))
3119 	{
3120 	  if (TARGET_LONG_CALLS)
3121 	    {
3122 	      /* call_internal_long, call_value_internal_long.  */
3123 	      if (length == 8)
3124 		length = 4;
3125 	      if (length == 16)
3126 		length = 10;
3127 	    }
3128 	  else
3129 	    {
3130 	      /* call_internal_short, call_value_internal_short.  */
3131 	      if (length == 8)
3132 		length = 4;
3133 	    }
3134 	}
3135     }
3136   return length;
3137 }
3138 
3139 /* V850 specific attributes.  */
3140 
3141 static const struct attribute_spec v850_attribute_table[] =
3142 {
3143   /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
3144        affects_type_identity, handler, exclude } */
3145   { "interrupt_handler", 0, 0, true,  false, false, false,
3146     v850_handle_interrupt_attribute, NULL },
3147   { "interrupt",         0, 0, true,  false, false, false,
3148     v850_handle_interrupt_attribute, NULL },
3149   { "sda",               0, 0, true,  false, false, false,
3150     v850_handle_data_area_attribute, NULL },
3151   { "tda",               0, 0, true,  false, false, false,
3152     v850_handle_data_area_attribute, NULL },
3153   { "zda",               0, 0, true,  false, false, false,
3154     v850_handle_data_area_attribute, NULL },
3155   { NULL,                0, 0, false, false, false, false, NULL, NULL }
3156 };
3157 
3158 static void
3159 v850_option_override (void)
3160 {
3161   if (flag_exceptions || flag_non_call_exceptions)
3162     flag_omit_frame_pointer = 0;
3163 
3164   /* The RH850 ABI does not (currently) support the use of the CALLT instruction.  */
3165   if (! TARGET_GCC_ABI)
3166     target_flags |= MASK_DISABLE_CALLT;
3167 }
3168 
3169 const char *
3170 v850_gen_movdi (rtx * operands)
3171 {
3172   if (REG_P (operands[0]))
3173     {
3174       if (REG_P (operands[1]))
3175 	{
3176 	  if (REGNO (operands[0]) == (REGNO (operands[1]) - 1))
3177 	    return "mov %1, %0; mov %R1, %R0";
3178 
3179 	  return "mov %R1, %R0; mov %1, %0";
3180 	}
3181 
3182       if (MEM_P (operands[1]))
3183 	{
3184 	  if (REGNO (operands[0]) & 1)
3185 	    /* Use two load word instructions to synthesise a load double.  */
3186 	    return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3187 
3188 	  return "ld.dw %1, %0";
3189 	}
3190 
3191       return "mov %1, %0; mov %R1, %R0";
3192     }
3193 
3194   gcc_assert (REG_P (operands[1]));
3195 
3196   if (REGNO (operands[1]) & 1)
3197     /* Use two store word instructions to synthesise a store double.  */
3198     return "st.w %1, %0 ; st.w %R1, %R0 ";
3199 
3200   return "st.dw %1, %0";
3201 }
3202 
3203 /* Implement TARGET_HARD_REGNO_MODE_OK.  */
3204 
3205 static bool
3206 v850_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
3207 {
3208   return GET_MODE_SIZE (mode) <= 4 || ((regno & 1) == 0 && regno != 0);
3209 }
3210 
3211 /* Implement TARGET_MODES_TIEABLE_P.  */
3212 
3213 static bool
3214 v850_modes_tieable_p (machine_mode mode1, machine_mode mode2)
3215 {
3216   return (mode1 == mode2
3217 	  || (GET_MODE_SIZE (mode1) <= 4 && GET_MODE_SIZE (mode2) <= 4));
3218 }
3219 
3220 /* Initialize the GCC target structure.  */
3221 
3222 #undef  TARGET_OPTION_OVERRIDE
3223 #define TARGET_OPTION_OVERRIDE		v850_option_override
3224 
3225 #undef  TARGET_MEMORY_MOVE_COST
3226 #define TARGET_MEMORY_MOVE_COST 	v850_memory_move_cost
3227 
3228 #undef  TARGET_ASM_ALIGNED_HI_OP
3229 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3230 
3231 #undef  TARGET_PRINT_OPERAND
3232 #define TARGET_PRINT_OPERAND 		v850_print_operand
3233 #undef  TARGET_PRINT_OPERAND_ADDRESS
3234 #define TARGET_PRINT_OPERAND_ADDRESS 		v850_print_operand_address
3235 #undef  TARGET_PRINT_OPERAND_PUNCT_VALID_P
3236 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P 	v850_print_operand_punct_valid_p
3237 
3238 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3239 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3240 
3241 #undef  TARGET_ATTRIBUTE_TABLE
3242 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3243 
3244 #undef  TARGET_INSERT_ATTRIBUTES
3245 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3246 
3247 #undef  TARGET_ASM_SELECT_SECTION
3248 #define TARGET_ASM_SELECT_SECTION  v850_select_section
3249 
3250 /* The assembler supports switchable .bss sections, but
3251    v850_select_section doesn't yet make use of them.  */
3252 #undef  TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3253 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3254 
3255 #undef  TARGET_ENCODE_SECTION_INFO
3256 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3257 
3258 #undef  TARGET_ASM_FILE_START_FILE_DIRECTIVE
3259 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3260 
3261 #undef  TARGET_RTX_COSTS
3262 #define TARGET_RTX_COSTS v850_rtx_costs
3263 
3264 #undef  TARGET_ADDRESS_COST
3265 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3266 
3267 #undef  TARGET_MACHINE_DEPENDENT_REORG
3268 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3269 
3270 #undef  TARGET_SCHED_ISSUE_RATE
3271 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3272 
3273 #undef  TARGET_FUNCTION_VALUE_REGNO_P
3274 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3275 #undef  TARGET_FUNCTION_VALUE
3276 #define TARGET_FUNCTION_VALUE v850_function_value
3277 #undef  TARGET_LIBCALL_VALUE
3278 #define TARGET_LIBCALL_VALUE v850_libcall_value
3279 
3280 #undef  TARGET_PROMOTE_PROTOTYPES
3281 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3282 
3283 #undef  TARGET_RETURN_IN_MEMORY
3284 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3285 
3286 #undef  TARGET_PASS_BY_REFERENCE
3287 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3288 
3289 #undef  TARGET_CALLEE_COPIES
3290 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3291 
3292 #undef  TARGET_ARG_PARTIAL_BYTES
3293 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3294 
3295 #undef  TARGET_FUNCTION_ARG
3296 #define TARGET_FUNCTION_ARG v850_function_arg
3297 
3298 #undef  TARGET_FUNCTION_ARG_ADVANCE
3299 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3300 
3301 #undef  TARGET_CAN_ELIMINATE
3302 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3303 
3304 #undef  TARGET_CONDITIONAL_REGISTER_USAGE
3305 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3306 
3307 #undef  TARGET_ASM_TRAMPOLINE_TEMPLATE
3308 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3309 #undef  TARGET_TRAMPOLINE_INIT
3310 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3311 
3312 #undef  TARGET_LEGITIMATE_CONSTANT_P
3313 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3314 
3315 #undef  TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
3316 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P v850_legitimate_address_p
3317 
3318 #undef  TARGET_CAN_USE_DOLOOP_P
3319 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3320 
3321 #undef  TARGET_HARD_REGNO_MODE_OK
3322 #define TARGET_HARD_REGNO_MODE_OK v850_hard_regno_mode_ok
3323 
3324 #undef  TARGET_MODES_TIEABLE_P
3325 #define TARGET_MODES_TIEABLE_P v850_modes_tieable_p
3326 
3327 #undef TARGET_FLAGS_REGNUM
3328 #define TARGET_FLAGS_REGNUM 32
3329 
3330 #undef  TARGET_HAVE_SPECULATION_SAFE_VALUE
3331 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
3332 
3333 struct gcc_target targetm = TARGET_INITIALIZER;
3334 
3335 #include "gt-v850.h"
3336