xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/lm32/lm32.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2    Contributed by Jon Beniston <jon@beniston.com>
3 
4    Copyright (C) 2009-2015 Free Software Foundation, Inc.
5 
6    This file is part of GCC.
7 
8    GCC is free software; you can redistribute it and/or modify it
9    under the terms of the GNU General Public License as published
10    by the Free Software Foundation; either version 3, or (at your
11    option) any later version.
12 
13    GCC is distributed in the hope that it will be useful, but WITHOUT
14    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16    License for more details.
17 
18    You should have received a copy of the GNU General Public License
19    along with GCC; see the file COPYING3.  If not see
20    <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "predict.h"
30 #include "vec.h"
31 #include "hashtab.h"
32 #include "hash-set.h"
33 #include "machmode.h"
34 #include "input.h"
35 #include "function.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "cfgrtl.h"
39 #include "cfganal.h"
40 #include "lcm.h"
41 #include "cfgbuild.h"
42 #include "cfgcleanup.h"
43 #include "basic-block.h"
44 #include "insn-config.h"
45 #include "conditions.h"
46 #include "insn-flags.h"
47 #include "insn-attr.h"
48 #include "insn-codes.h"
49 #include "recog.h"
50 #include "output.h"
51 #include "symtab.h"
52 #include "wide-int.h"
53 #include "inchash.h"
54 #include "tree.h"
55 #include "fold-const.h"
56 #include "calls.h"
57 #include "flags.h"
58 #include "statistics.h"
59 #include "double-int.h"
60 #include "real.h"
61 #include "fixed-value.h"
62 #include "alias.h"
63 #include "expmed.h"
64 #include "dojump.h"
65 #include "explow.h"
66 #include "emit-rtl.h"
67 #include "varasm.h"
68 #include "stmt.h"
69 #include "expr.h"
70 #include "reload.h"
71 #include "tm_p.h"
72 #include "diagnostic-core.h"
73 #include "optabs.h"
74 #include "libfuncs.h"
75 #include "ggc.h"
76 #include "target.h"
77 #include "target-def.h"
78 #include "langhooks.h"
79 #include "tm-constrs.h"
80 #include "df.h"
81 #include "builtins.h"
82 
83 struct lm32_frame_info
84 {
85   HOST_WIDE_INT total_size;	/* number of bytes of entire frame.  */
86   HOST_WIDE_INT callee_size;	/* number of bytes to save callee saves.  */
87   HOST_WIDE_INT pretend_size;	/* number of bytes we pretend caller did.  */
88   HOST_WIDE_INT args_size;	/* number of bytes for outgoing arguments.  */
89   HOST_WIDE_INT locals_size;	/* number of bytes for local variables.  */
90   unsigned int reg_save_mask;	/* mask of saved registers.  */
91 };
92 
93 /* Prototypes for static functions.  */
94 static rtx emit_add (rtx dest, rtx src0, rtx src1);
95 static void expand_save_restore (struct lm32_frame_info *info, int op);
96 static void stack_adjust (HOST_WIDE_INT amount);
97 static bool lm32_in_small_data_p (const_tree);
98 static void lm32_setup_incoming_varargs (cumulative_args_t cum,
99 					 machine_mode mode, tree type,
100 					 int *pretend_size, int no_rtl);
101 static bool lm32_rtx_costs (rtx x, int code, int outer_code, int opno,
102 			    int *total, bool speed);
103 static bool lm32_can_eliminate (const int, const int);
104 static bool
105 lm32_legitimate_address_p (machine_mode mode, rtx x, bool strict);
106 static HOST_WIDE_INT lm32_compute_frame_size (int size);
107 static void lm32_option_override (void);
108 static rtx lm32_function_arg (cumulative_args_t cum,
109 			      machine_mode mode, const_tree type,
110 			      bool named);
111 static void lm32_function_arg_advance (cumulative_args_t cum,
112 				       machine_mode mode,
113 				       const_tree type, bool named);
114 
115 #undef TARGET_OPTION_OVERRIDE
116 #define TARGET_OPTION_OVERRIDE lm32_option_override
117 #undef TARGET_ADDRESS_COST
118 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
119 #undef TARGET_RTX_COSTS
120 #define TARGET_RTX_COSTS lm32_rtx_costs
121 #undef TARGET_IN_SMALL_DATA_P
122 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
123 #undef TARGET_PROMOTE_FUNCTION_MODE
124 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
125 #undef TARGET_SETUP_INCOMING_VARARGS
126 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
127 #undef TARGET_FUNCTION_ARG
128 #define TARGET_FUNCTION_ARG lm32_function_arg
129 #undef TARGET_FUNCTION_ARG_ADVANCE
130 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
131 #undef TARGET_PROMOTE_PROTOTYPES
132 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
133 #undef TARGET_MIN_ANCHOR_OFFSET
134 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
135 #undef TARGET_MAX_ANCHOR_OFFSET
136 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
137 #undef TARGET_CAN_ELIMINATE
138 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
139 #undef TARGET_LEGITIMATE_ADDRESS_P
140 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
141 
142 struct gcc_target targetm = TARGET_INITIALIZER;
143 
144 /* Current frame information calculated by lm32_compute_frame_size.  */
145 static struct lm32_frame_info current_frame_info;
146 
147 /* Return non-zero if the given return type should be returned in memory.  */
148 
149 int
150 lm32_return_in_memory (tree type)
151 {
152   HOST_WIDE_INT size;
153 
154   if (!AGGREGATE_TYPE_P (type))
155     {
156       /* All simple types are returned in registers.  */
157       return 0;
158     }
159 
160   size = int_size_in_bytes (type);
161   if (size >= 0 && size <= UNITS_PER_WORD)
162     {
163       /* If it can fit in one register.  */
164       return 0;
165     }
166 
167   return 1;
168 }
169 
170 /* Generate an emit a word sized add instruction.  */
171 
172 static rtx
173 emit_add (rtx dest, rtx src0, rtx src1)
174 {
175   rtx insn;
176   insn = emit_insn (gen_addsi3 (dest, src0, src1));
177   return insn;
178 }
179 
180 /* Generate the code to compare (and possibly branch) two integer values
181    TEST_CODE is the comparison code we are trying to emulate
182      (or implement directly)
183    RESULT is where to store the result of the comparison,
184      or null to emit a branch
185    CMP0 CMP1 are the two comparison operands
186    DESTINATION is the destination of the branch, or null to only compare
187    */
188 
189 static void
190 gen_int_relational (enum rtx_code code,
191 		    rtx result,
192 		    rtx cmp0,
193 		    rtx cmp1,
194 		    rtx destination)
195 {
196   machine_mode mode;
197   int branch_p;
198 
199   mode = GET_MODE (cmp0);
200   if (mode == VOIDmode)
201     mode = GET_MODE (cmp1);
202 
203   /* Is this a branch or compare.  */
204   branch_p = (destination != 0);
205 
206   /* Instruction set doesn't support LE or LT, so swap operands and use
207      GE, GT.  */
208   switch (code)
209     {
210     case LE:
211     case LT:
212     case LEU:
213     case LTU:
214       {
215 	rtx temp;
216 
217 	code = swap_condition (code);
218 	temp = cmp0;
219 	cmp0 = cmp1;
220 	cmp1 = temp;
221 	break;
222       }
223     default:
224       break;
225     }
226 
227   if (branch_p)
228     {
229       rtx insn, cond, label;
230 
231       /* Operands must be in registers.  */
232       if (!register_operand (cmp0, mode))
233 	cmp0 = force_reg (mode, cmp0);
234       if (!register_operand (cmp1, mode))
235 	cmp1 = force_reg (mode, cmp1);
236 
237       /* Generate conditional branch instruction.  */
238       cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
239       label = gen_rtx_LABEL_REF (VOIDmode, destination);
240       insn = gen_rtx_SET (VOIDmode, pc_rtx,
241 			  gen_rtx_IF_THEN_ELSE (VOIDmode,
242 						cond, label, pc_rtx));
243       emit_jump_insn (insn);
244     }
245   else
246     {
247       /* We can't have const_ints in cmp0, other than 0.  */
248       if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
249 	cmp0 = force_reg (mode, cmp0);
250 
251       /* If the comparison is against an int not in legal range
252          move it into a register.  */
253       if (GET_CODE (cmp1) == CONST_INT)
254 	{
255 	  switch (code)
256 	    {
257 	    case EQ:
258 	    case NE:
259 	    case LE:
260 	    case LT:
261 	    case GE:
262 	    case GT:
263 	      if (!satisfies_constraint_K (cmp1))
264 		cmp1 = force_reg (mode, cmp1);
265 	      break;
266 	    case LEU:
267 	    case LTU:
268 	    case GEU:
269 	    case GTU:
270 	      if (!satisfies_constraint_L (cmp1))
271 		cmp1 = force_reg (mode, cmp1);
272 	      break;
273 	    default:
274 	      gcc_unreachable ();
275 	    }
276 	}
277 
278       /* Generate compare instruction.  */
279       emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
280     }
281 }
282 
283 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
284    and OPERAND[3].  Store the result in OPERANDS[0].  */
285 
286 void
287 lm32_expand_scc (rtx operands[])
288 {
289   rtx target = operands[0];
290   enum rtx_code code = GET_CODE (operands[1]);
291   rtx op0 = operands[2];
292   rtx op1 = operands[3];
293 
294   gen_int_relational (code, target, op0, op1, NULL_RTX);
295 }
296 
297 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
298    CODE and jump to OPERANDS[3] if the condition holds.  */
299 
300 void
301 lm32_expand_conditional_branch (rtx operands[])
302 {
303   enum rtx_code code = GET_CODE (operands[0]);
304   rtx op0 = operands[1];
305   rtx op1 = operands[2];
306   rtx destination = operands[3];
307 
308   gen_int_relational (code, NULL_RTX, op0, op1, destination);
309 }
310 
311 /* Generate and emit RTL to save or restore callee save registers.  */
312 static void
313 expand_save_restore (struct lm32_frame_info *info, int op)
314 {
315   unsigned int reg_save_mask = info->reg_save_mask;
316   int regno;
317   HOST_WIDE_INT offset;
318   rtx insn;
319 
320   /* Callee saves are below locals and above outgoing arguments.  */
321   offset = info->args_size + info->callee_size;
322   for (regno = 0; regno <= 31; regno++)
323     {
324       if ((reg_save_mask & (1 << regno)) != 0)
325 	{
326 	  rtx offset_rtx;
327 	  rtx mem;
328 
329 	  offset_rtx = GEN_INT (offset);
330 	  if (satisfies_constraint_K (offset_rtx))
331 	    {
332               mem = gen_rtx_MEM (word_mode,
333                                  gen_rtx_PLUS (Pmode,
334                                                stack_pointer_rtx,
335                                                offset_rtx));
336             }
337           else
338             {
339               /* r10 is caller saved so it can be used as a temp reg.  */
340               rtx r10;
341 
342               r10 = gen_rtx_REG (word_mode, 10);
343               insn = emit_move_insn (r10, offset_rtx);
344               if (op == 0)
345                 RTX_FRAME_RELATED_P (insn) = 1;
346               insn = emit_add (r10, r10, stack_pointer_rtx);
347               if (op == 0)
348                 RTX_FRAME_RELATED_P (insn) = 1;
349               mem = gen_rtx_MEM (word_mode, r10);
350             }
351 
352 	  if (op == 0)
353 	    insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
354 	  else
355 	    insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
356 
357 	  /* only prologue instructions which set the sp fp or save a
358 	     register should be marked as frame related.  */
359 	  if (op == 0)
360 	    RTX_FRAME_RELATED_P (insn) = 1;
361 	  offset -= UNITS_PER_WORD;
362 	}
363     }
364 }
365 
366 static void
367 stack_adjust (HOST_WIDE_INT amount)
368 {
369   rtx insn;
370 
371   if (!IN_RANGE (amount, -32776, 32768))
372     {
373       /* r10 is caller saved so it can be used as a temp reg.  */
374       rtx r10;
375       r10 = gen_rtx_REG (word_mode, 10);
376       insn = emit_move_insn (r10, GEN_INT (amount));
377       if (amount < 0)
378 	RTX_FRAME_RELATED_P (insn) = 1;
379       insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
380       if (amount < 0)
381 	RTX_FRAME_RELATED_P (insn) = 1;
382     }
383   else
384     {
385       insn = emit_add (stack_pointer_rtx,
386 		       stack_pointer_rtx, GEN_INT (amount));
387       if (amount < 0)
388 	RTX_FRAME_RELATED_P (insn) = 1;
389     }
390 }
391 
392 
393 /* Create and emit instructions for a functions prologue.  */
394 void
395 lm32_expand_prologue (void)
396 {
397   rtx insn;
398 
399   lm32_compute_frame_size (get_frame_size ());
400 
401   if (current_frame_info.total_size > 0)
402     {
403       /* Add space on stack new frame.  */
404       stack_adjust (-current_frame_info.total_size);
405 
406       /* Save callee save registers.  */
407       if (current_frame_info.reg_save_mask != 0)
408 	expand_save_restore (&current_frame_info, 0);
409 
410       /* Setup frame pointer if it's needed.  */
411       if (frame_pointer_needed == 1)
412 	{
413 	  /* Move sp to fp.  */
414 	  insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
415 	  RTX_FRAME_RELATED_P (insn) = 1;
416 
417 	  /* Add offset - Don't use total_size, as that includes pretend_size,
418              which isn't part of this frame?  */
419 	  insn = emit_add (frame_pointer_rtx,
420 			   frame_pointer_rtx,
421 			   GEN_INT (current_frame_info.args_size +
422 				    current_frame_info.callee_size +
423 				    current_frame_info.locals_size));
424 	  RTX_FRAME_RELATED_P (insn) = 1;
425 	}
426 
427       /* Prevent prologue from being scheduled into function body.  */
428       emit_insn (gen_blockage ());
429     }
430 }
431 
432 /* Create an emit instructions for a functions epilogue.  */
433 void
434 lm32_expand_epilogue (void)
435 {
436   rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
437 
438   lm32_compute_frame_size (get_frame_size ());
439 
440   if (current_frame_info.total_size > 0)
441     {
442       /* Prevent stack code from being reordered.  */
443       emit_insn (gen_blockage ());
444 
445       /* Restore callee save registers.  */
446       if (current_frame_info.reg_save_mask != 0)
447 	expand_save_restore (&current_frame_info, 1);
448 
449       /* Deallocate stack.  */
450       stack_adjust (current_frame_info.total_size);
451 
452       /* Return to calling function.  */
453       emit_jump_insn (gen_return_internal (ra_rtx));
454     }
455   else
456     {
457       /* Return to calling function.  */
458       emit_jump_insn (gen_return_internal (ra_rtx));
459     }
460 }
461 
462 /* Return the bytes needed to compute the frame pointer from the current
463    stack pointer.  */
464 static HOST_WIDE_INT
465 lm32_compute_frame_size (int size)
466 {
467   int regno;
468   HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
469   unsigned int reg_save_mask;
470 
471   locals_size = size;
472   args_size = crtl->outgoing_args_size;
473   pretend_size = crtl->args.pretend_args_size;
474   callee_size = 0;
475   reg_save_mask = 0;
476 
477   /* Build mask that actually determines which regsiters we save
478      and calculate size required to store them in the stack.  */
479   for (regno = 1; regno < SP_REGNUM; regno++)
480     {
481       if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
482 	{
483 	  reg_save_mask |= 1 << regno;
484 	  callee_size += UNITS_PER_WORD;
485 	}
486     }
487   if (df_regs_ever_live_p (RA_REGNUM) || ! crtl->is_leaf
488       || !optimize)
489     {
490       reg_save_mask |= 1 << RA_REGNUM;
491       callee_size += UNITS_PER_WORD;
492     }
493   if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
494     {
495       reg_save_mask |= 1 << FP_REGNUM;
496       callee_size += UNITS_PER_WORD;
497     }
498 
499   /* Compute total frame size.  */
500   total_size = pretend_size + args_size + locals_size + callee_size;
501 
502   /* Align frame to appropriate boundary.  */
503   total_size = (total_size + 3) & ~3;
504 
505   /* Save computed information.  */
506   current_frame_info.total_size = total_size;
507   current_frame_info.callee_size = callee_size;
508   current_frame_info.pretend_size = pretend_size;
509   current_frame_info.locals_size = locals_size;
510   current_frame_info.args_size = args_size;
511   current_frame_info.reg_save_mask = reg_save_mask;
512 
513   return total_size;
514 }
515 
516 void
517 lm32_print_operand (FILE * file, rtx op, int letter)
518 {
519   enum rtx_code code;
520 
521   code = GET_CODE (op);
522 
523   if (code == SIGN_EXTEND)
524     op = XEXP (op, 0), code = GET_CODE (op);
525   else if (code == REG || code == SUBREG)
526     {
527       int regnum;
528 
529       if (code == REG)
530 	regnum = REGNO (op);
531       else
532 	regnum = true_regnum (op);
533 
534       fprintf (file, "%s", reg_names[regnum]);
535     }
536   else if (code == HIGH)
537     output_addr_const (file, XEXP (op, 0));
538   else if (code == MEM)
539     output_address (XEXP (op, 0));
540   else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
541     fprintf (file, "%s", reg_names[0]);
542   else if (GET_CODE (op) == CONST_DOUBLE)
543     {
544       if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
545 	output_operand_lossage ("only 0.0 can be loaded as an immediate");
546       else
547 	fprintf (file, "0");
548     }
549   else if (code == EQ)
550     fprintf (file, "e  ");
551   else if (code == NE)
552     fprintf (file, "ne ");
553   else if (code == GT)
554     fprintf (file, "g  ");
555   else if (code == GTU)
556     fprintf (file, "gu ");
557   else if (code == LT)
558     fprintf (file, "l  ");
559   else if (code == LTU)
560     fprintf (file, "lu ");
561   else if (code == GE)
562     fprintf (file, "ge ");
563   else if (code == GEU)
564     fprintf (file, "geu");
565   else if (code == LE)
566     fprintf (file, "le ");
567   else if (code == LEU)
568     fprintf (file, "leu");
569   else
570     output_addr_const (file, op);
571 }
572 
573 /* A C compound statement to output to stdio stream STREAM the
574    assembler syntax for an instruction operand that is a memory
575    reference whose address is ADDR.  ADDR is an RTL expression.
576 
577    On some machines, the syntax for a symbolic address depends on
578    the section that the address refers to.  On these machines,
579    define the macro `ENCODE_SECTION_INFO' to store the information
580    into the `symbol_ref', and then check for it here.  */
581 
582 void
583 lm32_print_operand_address (FILE * file, rtx addr)
584 {
585   switch (GET_CODE (addr))
586     {
587     case REG:
588       fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
589       break;
590 
591     case MEM:
592       output_address (XEXP (addr, 0));
593       break;
594 
595     case PLUS:
596       {
597 	rtx arg0 = XEXP (addr, 0);
598 	rtx arg1 = XEXP (addr, 1);
599 
600 	if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
601 	  {
602 	    if (GET_CODE (arg1) == CONST_INT)
603 	      fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
604 		       INTVAL (arg1));
605 	    else
606 	      {
607 		fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
608 		output_addr_const (file, arg1);
609 		fprintf (file, ")");
610 	      }
611 	  }
612 	else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
613 	  output_addr_const (file, addr);
614 	else
615 	  fatal_insn ("bad operand", addr);
616       }
617       break;
618 
619     case SYMBOL_REF:
620       if (SYMBOL_REF_SMALL_P (addr))
621 	{
622 	  fprintf (file, "gp(");
623 	  output_addr_const (file, addr);
624 	  fprintf (file, ")");
625 	}
626       else
627 	fatal_insn ("can't use non gp relative absolute address", addr);
628       break;
629 
630     default:
631       fatal_insn ("invalid addressing mode", addr);
632       break;
633     }
634 }
635 
636 /* Determine where to put an argument to a function.
637    Value is zero to push the argument on the stack,
638    or a hard register in which to store the argument.
639 
640    MODE is the argument's machine mode.
641    TYPE is the data type of the argument (as a tree).
642     This is null for libcalls where that information may
643     not be available.
644    CUM is a variable of type CUMULATIVE_ARGS which gives info about
645     the preceding args and about the function being called.
646    NAMED is nonzero if this argument is a named parameter
647     (otherwise it is an extra parameter matching an ellipsis).  */
648 
649 static rtx
650 lm32_function_arg (cumulative_args_t cum_v, machine_mode mode,
651 		   const_tree type, bool named)
652 {
653   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
654 
655   if (mode == VOIDmode)
656     /* Compute operand 2 of the call insn.  */
657     return GEN_INT (0);
658 
659   if (targetm.calls.must_pass_in_stack (mode, type))
660     return NULL_RTX;
661 
662   if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
663     return NULL_RTX;
664 
665   return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
666 }
667 
668 static void
669 lm32_function_arg_advance (cumulative_args_t cum, machine_mode mode,
670 			   const_tree type, bool named ATTRIBUTE_UNUSED)
671 {
672   *get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
673 }
674 
675 HOST_WIDE_INT
676 lm32_compute_initial_elimination_offset (int from, int to)
677 {
678   HOST_WIDE_INT offset = 0;
679 
680   switch (from)
681     {
682     case ARG_POINTER_REGNUM:
683       switch (to)
684 	{
685 	case FRAME_POINTER_REGNUM:
686 	  offset = 0;
687 	  break;
688 	case STACK_POINTER_REGNUM:
689 	  offset =
690 	    lm32_compute_frame_size (get_frame_size ()) -
691 	    current_frame_info.pretend_size;
692 	  break;
693 	default:
694 	  gcc_unreachable ();
695 	}
696       break;
697     default:
698       gcc_unreachable ();
699     }
700 
701   return offset;
702 }
703 
704 static void
705 lm32_setup_incoming_varargs (cumulative_args_t cum_v, machine_mode mode,
706 			     tree type, int *pretend_size, int no_rtl)
707 {
708   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
709   int first_anon_arg;
710   tree fntype;
711 
712   fntype = TREE_TYPE (current_function_decl);
713 
714   if (stdarg_p (fntype))
715     first_anon_arg = *cum + LM32_FIRST_ARG_REG;
716   else
717     {
718       /* this is the common case, we have been passed details setup
719          for the last named argument, we want to skip over the
720          registers, if any used in passing this named paramter in
721          order to determine which is the first registers used to pass
722          anonymous arguments.  */
723       int size;
724 
725       if (mode == BLKmode)
726 	size = int_size_in_bytes (type);
727       else
728 	size = GET_MODE_SIZE (mode);
729 
730       first_anon_arg =
731 	*cum + LM32_FIRST_ARG_REG +
732 	((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
733     }
734 
735   if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
736     {
737       int first_reg_offset = first_anon_arg;
738       int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
739       rtx regblock;
740 
741       regblock = gen_rtx_MEM (BLKmode,
742 			      plus_constant (Pmode, arg_pointer_rtx,
743 					     FIRST_PARM_OFFSET (0)));
744       move_block_from_reg (first_reg_offset, regblock, size);
745 
746       *pretend_size = size * UNITS_PER_WORD;
747     }
748 }
749 
750 /* Override command line options.  */
751 static void
752 lm32_option_override (void)
753 {
754   /* We must have sign-extend enabled if barrel-shift isn't.  */
755   if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
756     target_flags |= MASK_SIGN_EXTEND_ENABLED;
757 }
758 
759 /* Return nonzero if this function is known to have a null epilogue.
760    This allows the optimizer to omit jumps to jumps if no stack
761    was created.  */
762 int
763 lm32_can_use_return (void)
764 {
765   if (!reload_completed)
766     return 0;
767 
768   if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
769     return 0;
770 
771   if (lm32_compute_frame_size (get_frame_size ()) != 0)
772     return 0;
773 
774   return 1;
775 }
776 
777 /* Support function to determine the return address of the function
778    'count' frames back up the stack.  */
779 rtx
780 lm32_return_addr_rtx (int count, rtx frame)
781 {
782   rtx r;
783   if (count == 0)
784     {
785       if (!df_regs_ever_live_p (RA_REGNUM))
786 	r = gen_rtx_REG (Pmode, RA_REGNUM);
787       else
788 	{
789 	  r = gen_rtx_MEM (Pmode,
790 			   gen_rtx_PLUS (Pmode, frame,
791 					 GEN_INT (-2 * UNITS_PER_WORD)));
792 	  set_mem_alias_set (r, get_frame_alias_set ());
793 	}
794     }
795   else if (flag_omit_frame_pointer)
796     r = NULL_RTX;
797   else
798     {
799       r = gen_rtx_MEM (Pmode,
800 		       gen_rtx_PLUS (Pmode, frame,
801 				     GEN_INT (-2 * UNITS_PER_WORD)));
802       set_mem_alias_set (r, get_frame_alias_set ());
803     }
804   return r;
805 }
806 
807 /* Return true if EXP should be placed in the small data section.  */
808 
809 static bool
810 lm32_in_small_data_p (const_tree exp)
811 {
812   /* We want to merge strings, so we never consider them small data.  */
813   if (TREE_CODE (exp) == STRING_CST)
814     return false;
815 
816   /* Functions are never in the small data area.  Duh.  */
817   if (TREE_CODE (exp) == FUNCTION_DECL)
818     return false;
819 
820   if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
821     {
822       const char *section = DECL_SECTION_NAME (exp);
823       if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
824 	return true;
825     }
826   else
827     {
828       HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
829 
830       /* If this is an incomplete type with size 0, then we can't put it
831          in sdata because it might be too big when completed.  */
832       if (size > 0 && size <= g_switch_value)
833 	return true;
834     }
835 
836   return false;
837 }
838 
839 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
840    Assume that the areas do not overlap.  */
841 
842 static void
843 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
844 			HOST_WIDE_INT alignment)
845 {
846   HOST_WIDE_INT offset, delta;
847   unsigned HOST_WIDE_INT bits;
848   int i;
849   machine_mode mode;
850   rtx *regs;
851 
852   /* Work out how many bits to move at a time.  */
853   switch (alignment)
854     {
855     case 1:
856       bits = 8;
857       break;
858     case 2:
859       bits = 16;
860       break;
861     default:
862       bits = 32;
863       break;
864     }
865 
866   mode = mode_for_size (bits, MODE_INT, 0);
867   delta = bits / BITS_PER_UNIT;
868 
869   /* Allocate a buffer for the temporary registers.  */
870   regs = XALLOCAVEC (rtx, length / delta);
871 
872   /* Load as many BITS-sized chunks as possible.  */
873   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
874     {
875       regs[i] = gen_reg_rtx (mode);
876       emit_move_insn (regs[i], adjust_address (src, mode, offset));
877     }
878 
879   /* Copy the chunks to the destination.  */
880   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
881     emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
882 
883   /* Mop up any left-over bytes.  */
884   if (offset < length)
885     {
886       src = adjust_address (src, BLKmode, offset);
887       dest = adjust_address (dest, BLKmode, offset);
888       move_by_pieces (dest, src, length - offset,
889 		      MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
890     }
891 }
892 
893 /* Expand string/block move operations.
894 
895    operands[0] is the pointer to the destination.
896    operands[1] is the pointer to the source.
897    operands[2] is the number of bytes to move.
898    operands[3] is the alignment.  */
899 
900 int
901 lm32_expand_block_move (rtx * operands)
902 {
903   if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
904     {
905       lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
906 			      INTVAL (operands[3]));
907       return 1;
908     }
909   return 0;
910 }
911 
912 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
913    isn't protected by a PIC unspec.  */
914 int
915 nonpic_symbol_mentioned_p (rtx x)
916 {
917   const char *fmt;
918   int i;
919 
920   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
921       || GET_CODE (x) == PC)
922     return 1;
923 
924   /* We don't want to look into the possible MEM location of a
925      CONST_DOUBLE, since we're not going to use it, in general.  */
926   if (GET_CODE (x) == CONST_DOUBLE)
927     return 0;
928 
929   if (GET_CODE (x) == UNSPEC)
930     return 0;
931 
932   fmt = GET_RTX_FORMAT (GET_CODE (x));
933   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
934     {
935       if (fmt[i] == 'E')
936 	{
937 	  int j;
938 
939 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
940 	    if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
941 	      return 1;
942 	}
943       else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
944 	return 1;
945     }
946 
947   return 0;
948 }
949 
950 /* Compute a (partial) cost for rtx X.  Return true if the complete
951    cost has been computed, and false if subexpressions should be
952    scanned.  In either case, *TOTAL contains the cost result.  */
953 
954 static bool
955 lm32_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
956 		int *total, bool speed)
957 {
958   machine_mode mode = GET_MODE (x);
959   bool small_mode;
960 
961   const int arithmetic_latency = 1;
962   const int shift_latency = 1;
963   const int compare_latency = 2;
964   const int multiply_latency = 3;
965   const int load_latency = 3;
966   const int libcall_size_cost = 5;
967 
968   /* Determine if we can handle the given mode size in a single instruction.  */
969   small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
970 
971   switch (code)
972     {
973 
974     case PLUS:
975     case MINUS:
976     case AND:
977     case IOR:
978     case XOR:
979     case NOT:
980     case NEG:
981       if (!speed)
982 	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
983       else
984 	*total =
985 	  COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
986       break;
987 
988     case COMPARE:
989       if (small_mode)
990 	{
991 	  if (!speed)
992 	    *total = COSTS_N_INSNS (1);
993 	  else
994 	    *total = COSTS_N_INSNS (compare_latency);
995 	}
996       else
997 	{
998 	  /* FIXME. Guessing here.  */
999 	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
1000 	}
1001       break;
1002 
1003     case ASHIFT:
1004     case ASHIFTRT:
1005     case LSHIFTRT:
1006       if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
1007 	{
1008 	  if (!speed)
1009 	    *total = COSTS_N_INSNS (1);
1010 	  else
1011 	    *total = COSTS_N_INSNS (shift_latency);
1012 	}
1013       else if (TARGET_BARREL_SHIFT_ENABLED)
1014 	{
1015 	  /* FIXME: Guessing here.  */
1016 	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
1017 	}
1018       else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
1019 	{
1020 	  *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
1021 	}
1022       else
1023 	{
1024 	  /* Libcall.  */
1025 	  if (!speed)
1026 	    *total = COSTS_N_INSNS (libcall_size_cost);
1027 	  else
1028 	    *total = COSTS_N_INSNS (100);
1029 	}
1030       break;
1031 
1032     case MULT:
1033       if (TARGET_MULTIPLY_ENABLED && small_mode)
1034 	{
1035 	  if (!speed)
1036 	    *total = COSTS_N_INSNS (1);
1037 	  else
1038 	    *total = COSTS_N_INSNS (multiply_latency);
1039 	}
1040       else
1041 	{
1042 	  /* Libcall.  */
1043 	  if (!speed)
1044 	    *total = COSTS_N_INSNS (libcall_size_cost);
1045 	  else
1046 	    *total = COSTS_N_INSNS (100);
1047 	}
1048       break;
1049 
1050     case DIV:
1051     case MOD:
1052     case UDIV:
1053     case UMOD:
1054       if (TARGET_DIVIDE_ENABLED && small_mode)
1055 	{
1056 	  if (!speed)
1057 	    *total = COSTS_N_INSNS (1);
1058 	  else
1059 	    {
1060 	      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1061 		{
1062 		  int cycles = 0;
1063 		  unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1064 
1065 		  while (i)
1066 		    {
1067 		      i >>= 2;
1068 		      cycles++;
1069 		    }
1070 		  if (IN_RANGE (i, 0, 65536))
1071 		    *total = COSTS_N_INSNS (1 + 1 + cycles);
1072 		  else
1073 		    *total = COSTS_N_INSNS (2 + 1 + cycles);
1074 		  return true;
1075 		}
1076 	      else if (GET_CODE (XEXP (x, 1)) == REG)
1077 		{
1078 		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1079 		  return true;
1080 		}
1081 	      else
1082 		{
1083 		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1084 		  return false;
1085 		}
1086 	    }
1087 	}
1088       else
1089 	{
1090 	  /* Libcall.  */
1091 	  if (!speed)
1092 	    *total = COSTS_N_INSNS (libcall_size_cost);
1093 	  else
1094 	    *total = COSTS_N_INSNS (100);
1095 	}
1096       break;
1097 
1098     case HIGH:
1099     case LO_SUM:
1100       if (!speed)
1101 	*total = COSTS_N_INSNS (1);
1102       else
1103 	*total = COSTS_N_INSNS (arithmetic_latency);
1104       break;
1105 
1106     case ZERO_EXTEND:
1107       if (MEM_P (XEXP (x, 0)))
1108 	*total = COSTS_N_INSNS (0);
1109       else if (small_mode)
1110 	{
1111 	  if (!speed)
1112 	    *total = COSTS_N_INSNS (1);
1113 	  else
1114 	    *total = COSTS_N_INSNS (arithmetic_latency);
1115 	}
1116       else
1117 	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1118       break;
1119 
1120     case CONST_INT:
1121       {
1122 	switch (outer_code)
1123 	  {
1124 	  case HIGH:
1125 	  case LO_SUM:
1126 	    *total = COSTS_N_INSNS (0);
1127 	    return true;
1128 
1129 	  case AND:
1130 	  case XOR:
1131 	  case IOR:
1132 	  case ASHIFT:
1133 	  case ASHIFTRT:
1134 	  case LSHIFTRT:
1135 	  case ROTATE:
1136 	  case ROTATERT:
1137 	    if (satisfies_constraint_L (x))
1138 	      *total = COSTS_N_INSNS (0);
1139 	    else
1140 	      *total = COSTS_N_INSNS (2);
1141 	    return true;
1142 
1143 	  case SET:
1144 	  case PLUS:
1145 	  case MINUS:
1146 	  case COMPARE:
1147 	    if (satisfies_constraint_K (x))
1148 	      *total = COSTS_N_INSNS (0);
1149 	    else
1150 	      *total = COSTS_N_INSNS (2);
1151 	    return true;
1152 
1153 	  case MULT:
1154 	    if (TARGET_MULTIPLY_ENABLED)
1155 	      {
1156 	        if (satisfies_constraint_K (x))
1157 	         *total = COSTS_N_INSNS (0);
1158 	        else
1159 	          *total = COSTS_N_INSNS (2);
1160 		return true;
1161 	      }
1162 	    /* Fall through.  */
1163 
1164 	  default:
1165             if (satisfies_constraint_K (x))
1166 	      *total = COSTS_N_INSNS (1);
1167 	    else
1168 	      *total = COSTS_N_INSNS (2);
1169 	    return true;
1170 	  }
1171       }
1172 
1173     case SYMBOL_REF:
1174     case CONST:
1175       switch (outer_code)
1176 	{
1177 	case HIGH:
1178 	case LO_SUM:
1179 	  *total = COSTS_N_INSNS (0);
1180 	  return true;
1181 
1182 	case MEM:
1183 	case SET:
1184 	  if (g_switch_value)
1185 	    {
1186 	      *total = COSTS_N_INSNS (0);
1187 	      return true;
1188 	    }
1189 	  break;
1190 	}
1191       /* Fall through.  */
1192 
1193     case LABEL_REF:
1194     case CONST_DOUBLE:
1195       *total = COSTS_N_INSNS (2);
1196       return true;
1197 
1198     case SET:
1199       *total = COSTS_N_INSNS (1);
1200       break;
1201 
1202     case MEM:
1203       if (!speed)
1204 	*total = COSTS_N_INSNS (1);
1205       else
1206 	*total = COSTS_N_INSNS (load_latency);
1207       break;
1208 
1209     }
1210 
1211   return false;
1212 }
1213 
1214 /* Implemenent TARGET_CAN_ELIMINATE.  */
1215 
1216 bool
1217 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1218 {
1219   return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1220 }
1221 
1222 /* Implement TARGET_LEGITIMATE_ADDRESS_P.  */
1223 
1224 static bool
1225 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1226 {
1227    /* (rM) */
1228   if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1229     return true;
1230   if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1231     return true;
1232 
1233   /* (rM)+literal) */
1234   if (GET_CODE (x) == PLUS
1235      && REG_P (XEXP (x, 0))
1236      && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1237          || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1238      && GET_CODE (XEXP (x, 1)) == CONST_INT
1239      && satisfies_constraint_K (XEXP ((x), 1)))
1240     return true;
1241 
1242   /* gp(sym)  */
1243   if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1244     return true;
1245 
1246   return false;
1247 }
1248 
1249 /* Check a move is not memory to memory.  */
1250 
1251 bool
1252 lm32_move_ok (machine_mode mode, rtx operands[2]) {
1253   if (memory_operand (operands[0], mode))
1254     return register_or_zero_operand (operands[1], mode);
1255   return true;
1256 }
1257