xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/lm32/lm32.c (revision 946379e7b37692fc43f68eb0d1c10daa0a7f3b6c)
1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2    Contributed by Jon Beniston <jon@beniston.com>
3 
4    Copyright (C) 2009-2013 Free Software Foundation, Inc.
5 
6    This file is part of GCC.
7 
8    GCC is free software; you can redistribute it and/or modify it
9    under the terms of the GNU General Public License as published
10    by the Free Software Foundation; either version 3, or (at your
11    option) any later version.
12 
13    GCC is distributed in the hope that it will be useful, but WITHOUT
14    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16    License for more details.
17 
18    You should have received a copy of the GNU General Public License
19    along with GCC; see the file COPYING3.  If not see
20    <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "tm_p.h"
42 #include "function.h"
43 #include "diagnostic-core.h"
44 #include "optabs.h"
45 #include "libfuncs.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "tm-constrs.h"
51 #include "df.h"
52 
53 struct lm32_frame_info
54 {
55   HOST_WIDE_INT total_size;	/* number of bytes of entire frame.  */
56   HOST_WIDE_INT callee_size;	/* number of bytes to save callee saves.  */
57   HOST_WIDE_INT pretend_size;	/* number of bytes we pretend caller did.  */
58   HOST_WIDE_INT args_size;	/* number of bytes for outgoing arguments.  */
59   HOST_WIDE_INT locals_size;	/* number of bytes for local variables.  */
60   unsigned int reg_save_mask;	/* mask of saved registers.  */
61 };
62 
63 /* Prototypes for static functions.  */
64 static rtx emit_add (rtx dest, rtx src0, rtx src1);
65 static void expand_save_restore (struct lm32_frame_info *info, int op);
66 static void stack_adjust (HOST_WIDE_INT amount);
67 static bool lm32_in_small_data_p (const_tree);
68 static void lm32_setup_incoming_varargs (cumulative_args_t cum,
69 					 enum machine_mode mode, tree type,
70 					 int *pretend_size, int no_rtl);
71 static bool lm32_rtx_costs (rtx x, int code, int outer_code, int opno,
72 			    int *total, bool speed);
73 static bool lm32_can_eliminate (const int, const int);
74 static bool
75 lm32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict);
76 static HOST_WIDE_INT lm32_compute_frame_size (int size);
77 static void lm32_option_override (void);
78 static rtx lm32_function_arg (cumulative_args_t cum,
79 			      enum machine_mode mode, const_tree type,
80 			      bool named);
81 static void lm32_function_arg_advance (cumulative_args_t cum,
82 				       enum machine_mode mode,
83 				       const_tree type, bool named);
84 static bool lm32_legitimate_constant_p (enum machine_mode, rtx);
85 
86 #undef TARGET_OPTION_OVERRIDE
87 #define TARGET_OPTION_OVERRIDE lm32_option_override
88 #undef TARGET_ADDRESS_COST
89 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
90 #undef TARGET_RTX_COSTS
91 #define TARGET_RTX_COSTS lm32_rtx_costs
92 #undef TARGET_IN_SMALL_DATA_P
93 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
94 #undef TARGET_PROMOTE_FUNCTION_MODE
95 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
96 #undef TARGET_SETUP_INCOMING_VARARGS
97 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
98 #undef TARGET_FUNCTION_ARG
99 #define TARGET_FUNCTION_ARG lm32_function_arg
100 #undef TARGET_FUNCTION_ARG_ADVANCE
101 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
102 #undef TARGET_PROMOTE_PROTOTYPES
103 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
104 #undef TARGET_MIN_ANCHOR_OFFSET
105 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
106 #undef TARGET_MAX_ANCHOR_OFFSET
107 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
108 #undef TARGET_CAN_ELIMINATE
109 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
110 #undef TARGET_LEGITIMATE_ADDRESS_P
111 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
112 #undef TARGET_LEGITIMATE_CONSTANT_P
113 #define TARGET_LEGITIMATE_CONSTANT_P lm32_legitimate_constant_p
114 
115 struct gcc_target targetm = TARGET_INITIALIZER;
116 
117 /* Current frame information calculated by lm32_compute_frame_size.  */
118 static struct lm32_frame_info current_frame_info;
119 
120 /* Return non-zero if the given return type should be returned in memory.  */
121 
122 int
123 lm32_return_in_memory (tree type)
124 {
125   HOST_WIDE_INT size;
126 
127   if (!AGGREGATE_TYPE_P (type))
128     {
129       /* All simple types are returned in registers.  */
130       return 0;
131     }
132 
133   size = int_size_in_bytes (type);
134   if (size >= 0 && size <= UNITS_PER_WORD)
135     {
136       /* If it can fit in one register.  */
137       return 0;
138     }
139 
140   return 1;
141 }
142 
143 /* Generate an emit a word sized add instruction.  */
144 
145 static rtx
146 emit_add (rtx dest, rtx src0, rtx src1)
147 {
148   rtx insn;
149   insn = emit_insn (gen_addsi3 (dest, src0, src1));
150   return insn;
151 }
152 
153 /* Generate the code to compare (and possibly branch) two integer values
154    TEST_CODE is the comparison code we are trying to emulate
155      (or implement directly)
156    RESULT is where to store the result of the comparison,
157      or null to emit a branch
158    CMP0 CMP1 are the two comparison operands
159    DESTINATION is the destination of the branch, or null to only compare
160    */
161 
162 static void
163 gen_int_relational (enum rtx_code code,
164 		    rtx result,
165 		    rtx cmp0,
166 		    rtx cmp1,
167 		    rtx destination)
168 {
169   enum machine_mode mode;
170   int branch_p;
171 
172   mode = GET_MODE (cmp0);
173   if (mode == VOIDmode)
174     mode = GET_MODE (cmp1);
175 
176   /* Is this a branch or compare.  */
177   branch_p = (destination != 0);
178 
179   /* Instruction set doesn't support LE or LT, so swap operands and use
180      GE, GT.  */
181   switch (code)
182     {
183     case LE:
184     case LT:
185     case LEU:
186     case LTU:
187       {
188 	rtx temp;
189 
190 	code = swap_condition (code);
191 	temp = cmp0;
192 	cmp0 = cmp1;
193 	cmp1 = temp;
194 	break;
195       }
196     default:
197       break;
198     }
199 
200   if (branch_p)
201     {
202       rtx insn, cond, label;
203 
204       /* Operands must be in registers.  */
205       if (!register_operand (cmp0, mode))
206 	cmp0 = force_reg (mode, cmp0);
207       if (!register_operand (cmp1, mode))
208 	cmp1 = force_reg (mode, cmp1);
209 
210       /* Generate conditional branch instruction.  */
211       cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
212       label = gen_rtx_LABEL_REF (VOIDmode, destination);
213       insn = gen_rtx_SET (VOIDmode, pc_rtx,
214 			  gen_rtx_IF_THEN_ELSE (VOIDmode,
215 						cond, label, pc_rtx));
216       emit_jump_insn (insn);
217     }
218   else
219     {
220       /* We can't have const_ints in cmp0, other than 0.  */
221       if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
222 	cmp0 = force_reg (mode, cmp0);
223 
224       /* If the comparison is against an int not in legal range
225          move it into a register.  */
226       if (GET_CODE (cmp1) == CONST_INT)
227 	{
228 	  switch (code)
229 	    {
230 	    case EQ:
231 	    case NE:
232 	    case LE:
233 	    case LT:
234 	    case GE:
235 	    case GT:
236 	      if (!satisfies_constraint_K (cmp1))
237 		cmp1 = force_reg (mode, cmp1);
238 	      break;
239 	    case LEU:
240 	    case LTU:
241 	    case GEU:
242 	    case GTU:
243 	      if (!satisfies_constraint_L (cmp1))
244 		cmp1 = force_reg (mode, cmp1);
245 	      break;
246 	    default:
247 	      gcc_unreachable ();
248 	    }
249 	}
250 
251       /* Generate compare instruction.  */
252       emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
253     }
254 }
255 
256 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
257    and OPERAND[3].  Store the result in OPERANDS[0].  */
258 
259 void
260 lm32_expand_scc (rtx operands[])
261 {
262   rtx target = operands[0];
263   enum rtx_code code = GET_CODE (operands[1]);
264   rtx op0 = operands[2];
265   rtx op1 = operands[3];
266 
267   gen_int_relational (code, target, op0, op1, NULL_RTX);
268 }
269 
270 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
271    CODE and jump to OPERANDS[3] if the condition holds.  */
272 
273 void
274 lm32_expand_conditional_branch (rtx operands[])
275 {
276   enum rtx_code code = GET_CODE (operands[0]);
277   rtx op0 = operands[1];
278   rtx op1 = operands[2];
279   rtx destination = operands[3];
280 
281   gen_int_relational (code, NULL_RTX, op0, op1, destination);
282 }
283 
284 /* Generate and emit RTL to save or restore callee save registers.  */
285 static void
286 expand_save_restore (struct lm32_frame_info *info, int op)
287 {
288   unsigned int reg_save_mask = info->reg_save_mask;
289   int regno;
290   HOST_WIDE_INT offset;
291   rtx insn;
292 
293   /* Callee saves are below locals and above outgoing arguments.  */
294   offset = info->args_size + info->callee_size;
295   for (regno = 0; regno <= 31; regno++)
296     {
297       if ((reg_save_mask & (1 << regno)) != 0)
298 	{
299 	  rtx offset_rtx;
300 	  rtx mem;
301 
302 	  offset_rtx = GEN_INT (offset);
303 	  if (satisfies_constraint_K (offset_rtx))
304 	    {
305               mem = gen_rtx_MEM (word_mode,
306                                  gen_rtx_PLUS (Pmode,
307                                                stack_pointer_rtx,
308                                                offset_rtx));
309             }
310           else
311             {
312               /* r10 is caller saved so it can be used as a temp reg.  */
313               rtx r10;
314 
315               r10 = gen_rtx_REG (word_mode, 10);
316               insn = emit_move_insn (r10, offset_rtx);
317               if (op == 0)
318                 RTX_FRAME_RELATED_P (insn) = 1;
319               insn = emit_add (r10, r10, stack_pointer_rtx);
320               if (op == 0)
321                 RTX_FRAME_RELATED_P (insn) = 1;
322               mem = gen_rtx_MEM (word_mode, r10);
323             }
324 
325 	  if (op == 0)
326 	    insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
327 	  else
328 	    insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
329 
330 	  /* only prologue instructions which set the sp fp or save a
331 	     register should be marked as frame related.  */
332 	  if (op == 0)
333 	    RTX_FRAME_RELATED_P (insn) = 1;
334 	  offset -= UNITS_PER_WORD;
335 	}
336     }
337 }
338 
339 static void
340 stack_adjust (HOST_WIDE_INT amount)
341 {
342   rtx insn;
343 
344   if (!IN_RANGE (amount, -32776, 32768))
345     {
346       /* r10 is caller saved so it can be used as a temp reg.  */
347       rtx r10;
348       r10 = gen_rtx_REG (word_mode, 10);
349       insn = emit_move_insn (r10, GEN_INT (amount));
350       if (amount < 0)
351 	RTX_FRAME_RELATED_P (insn) = 1;
352       insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
353       if (amount < 0)
354 	RTX_FRAME_RELATED_P (insn) = 1;
355     }
356   else
357     {
358       insn = emit_add (stack_pointer_rtx,
359 		       stack_pointer_rtx, GEN_INT (amount));
360       if (amount < 0)
361 	RTX_FRAME_RELATED_P (insn) = 1;
362     }
363 }
364 
365 
366 /* Create and emit instructions for a functions prologue.  */
367 void
368 lm32_expand_prologue (void)
369 {
370   rtx insn;
371 
372   lm32_compute_frame_size (get_frame_size ());
373 
374   if (current_frame_info.total_size > 0)
375     {
376       /* Add space on stack new frame.  */
377       stack_adjust (-current_frame_info.total_size);
378 
379       /* Save callee save registers.  */
380       if (current_frame_info.reg_save_mask != 0)
381 	expand_save_restore (&current_frame_info, 0);
382 
383       /* Setup frame pointer if it's needed.  */
384       if (frame_pointer_needed == 1)
385 	{
386 	  /* Move sp to fp.  */
387 	  insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
388 	  RTX_FRAME_RELATED_P (insn) = 1;
389 
390 	  /* Add offset - Don't use total_size, as that includes pretend_size,
391              which isn't part of this frame?  */
392 	  insn = emit_add (frame_pointer_rtx,
393 			   frame_pointer_rtx,
394 			   GEN_INT (current_frame_info.args_size +
395 				    current_frame_info.callee_size +
396 				    current_frame_info.locals_size));
397 	  RTX_FRAME_RELATED_P (insn) = 1;
398 	}
399 
400       /* Prevent prologue from being scheduled into function body.  */
401       emit_insn (gen_blockage ());
402     }
403 }
404 
405 /* Create an emit instructions for a functions epilogue.  */
406 void
407 lm32_expand_epilogue (void)
408 {
409   rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
410 
411   lm32_compute_frame_size (get_frame_size ());
412 
413   if (current_frame_info.total_size > 0)
414     {
415       /* Prevent stack code from being reordered.  */
416       emit_insn (gen_blockage ());
417 
418       /* Restore callee save registers.  */
419       if (current_frame_info.reg_save_mask != 0)
420 	expand_save_restore (&current_frame_info, 1);
421 
422       /* Deallocate stack.  */
423       stack_adjust (current_frame_info.total_size);
424 
425       /* Return to calling function.  */
426       emit_jump_insn (gen_return_internal (ra_rtx));
427     }
428   else
429     {
430       /* Return to calling function.  */
431       emit_jump_insn (gen_return_internal (ra_rtx));
432     }
433 }
434 
435 /* Return the bytes needed to compute the frame pointer from the current
436    stack pointer.  */
437 static HOST_WIDE_INT
438 lm32_compute_frame_size (int size)
439 {
440   int regno;
441   HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
442   unsigned int reg_save_mask;
443 
444   locals_size = size;
445   args_size = crtl->outgoing_args_size;
446   pretend_size = crtl->args.pretend_args_size;
447   callee_size = 0;
448   reg_save_mask = 0;
449 
450   /* Build mask that actually determines which regsiters we save
451      and calculate size required to store them in the stack.  */
452   for (regno = 1; regno < SP_REGNUM; regno++)
453     {
454       if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
455 	{
456 	  reg_save_mask |= 1 << regno;
457 	  callee_size += UNITS_PER_WORD;
458 	}
459     }
460   if (df_regs_ever_live_p (RA_REGNUM) || ! crtl->is_leaf
461       || !optimize)
462     {
463       reg_save_mask |= 1 << RA_REGNUM;
464       callee_size += UNITS_PER_WORD;
465     }
466   if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
467     {
468       reg_save_mask |= 1 << FP_REGNUM;
469       callee_size += UNITS_PER_WORD;
470     }
471 
472   /* Compute total frame size.  */
473   total_size = pretend_size + args_size + locals_size + callee_size;
474 
475   /* Align frame to appropriate boundary.  */
476   total_size = (total_size + 3) & ~3;
477 
478   /* Save computed information.  */
479   current_frame_info.total_size = total_size;
480   current_frame_info.callee_size = callee_size;
481   current_frame_info.pretend_size = pretend_size;
482   current_frame_info.locals_size = locals_size;
483   current_frame_info.args_size = args_size;
484   current_frame_info.reg_save_mask = reg_save_mask;
485 
486   return total_size;
487 }
488 
489 void
490 lm32_print_operand (FILE * file, rtx op, int letter)
491 {
492   enum rtx_code code;
493 
494   code = GET_CODE (op);
495 
496   if (code == SIGN_EXTEND)
497     op = XEXP (op, 0), code = GET_CODE (op);
498   else if (code == REG || code == SUBREG)
499     {
500       int regnum;
501 
502       if (code == REG)
503 	regnum = REGNO (op);
504       else
505 	regnum = true_regnum (op);
506 
507       fprintf (file, "%s", reg_names[regnum]);
508     }
509   else if (code == HIGH)
510     output_addr_const (file, XEXP (op, 0));
511   else if (code == MEM)
512     output_address (XEXP (op, 0));
513   else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
514     fprintf (file, "%s", reg_names[0]);
515   else if (GET_CODE (op) == CONST_DOUBLE)
516     {
517       if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
518 	output_operand_lossage ("only 0.0 can be loaded as an immediate");
519       else
520 	fprintf (file, "0");
521     }
522   else if (code == EQ)
523     fprintf (file, "e  ");
524   else if (code == NE)
525     fprintf (file, "ne ");
526   else if (code == GT)
527     fprintf (file, "g  ");
528   else if (code == GTU)
529     fprintf (file, "gu ");
530   else if (code == LT)
531     fprintf (file, "l  ");
532   else if (code == LTU)
533     fprintf (file, "lu ");
534   else if (code == GE)
535     fprintf (file, "ge ");
536   else if (code == GEU)
537     fprintf (file, "geu");
538   else if (code == LE)
539     fprintf (file, "le ");
540   else if (code == LEU)
541     fprintf (file, "leu");
542   else
543     output_addr_const (file, op);
544 }
545 
546 /* A C compound statement to output to stdio stream STREAM the
547    assembler syntax for an instruction operand that is a memory
548    reference whose address is ADDR.  ADDR is an RTL expression.
549 
550    On some machines, the syntax for a symbolic address depends on
551    the section that the address refers to.  On these machines,
552    define the macro `ENCODE_SECTION_INFO' to store the information
553    into the `symbol_ref', and then check for it here.  */
554 
555 void
556 lm32_print_operand_address (FILE * file, rtx addr)
557 {
558   switch (GET_CODE (addr))
559     {
560     case REG:
561       fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
562       break;
563 
564     case MEM:
565       output_address (XEXP (addr, 0));
566       break;
567 
568     case PLUS:
569       {
570 	rtx arg0 = XEXP (addr, 0);
571 	rtx arg1 = XEXP (addr, 1);
572 
573 	if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
574 	  {
575 	    if (GET_CODE (arg1) == CONST_INT)
576 	      fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
577 		       INTVAL (arg1));
578 	    else
579 	      {
580 		fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
581 		output_addr_const (file, arg1);
582 		fprintf (file, ")");
583 	      }
584 	  }
585 	else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
586 	  output_addr_const (file, addr);
587 	else
588 	  fatal_insn ("bad operand", addr);
589       }
590       break;
591 
592     case SYMBOL_REF:
593       if (SYMBOL_REF_SMALL_P (addr))
594 	{
595 	  fprintf (file, "gp(");
596 	  output_addr_const (file, addr);
597 	  fprintf (file, ")");
598 	}
599       else
600 	fatal_insn ("can't use non gp relative absolute address", addr);
601       break;
602 
603     default:
604       fatal_insn ("invalid addressing mode", addr);
605       break;
606     }
607 }
608 
609 /* Determine where to put an argument to a function.
610    Value is zero to push the argument on the stack,
611    or a hard register in which to store the argument.
612 
613    MODE is the argument's machine mode.
614    TYPE is the data type of the argument (as a tree).
615     This is null for libcalls where that information may
616     not be available.
617    CUM is a variable of type CUMULATIVE_ARGS which gives info about
618     the preceding args and about the function being called.
619    NAMED is nonzero if this argument is a named parameter
620     (otherwise it is an extra parameter matching an ellipsis).  */
621 
622 static rtx
623 lm32_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
624 		   const_tree type, bool named)
625 {
626   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
627 
628   if (mode == VOIDmode)
629     /* Compute operand 2 of the call insn.  */
630     return GEN_INT (0);
631 
632   if (targetm.calls.must_pass_in_stack (mode, type))
633     return NULL_RTX;
634 
635   if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
636     return NULL_RTX;
637 
638   return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
639 }
640 
641 static void
642 lm32_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
643 			   const_tree type, bool named ATTRIBUTE_UNUSED)
644 {
645   *get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
646 }
647 
648 HOST_WIDE_INT
649 lm32_compute_initial_elimination_offset (int from, int to)
650 {
651   HOST_WIDE_INT offset = 0;
652 
653   switch (from)
654     {
655     case ARG_POINTER_REGNUM:
656       switch (to)
657 	{
658 	case FRAME_POINTER_REGNUM:
659 	  offset = 0;
660 	  break;
661 	case STACK_POINTER_REGNUM:
662 	  offset =
663 	    lm32_compute_frame_size (get_frame_size ()) -
664 	    current_frame_info.pretend_size;
665 	  break;
666 	default:
667 	  gcc_unreachable ();
668 	}
669       break;
670     default:
671       gcc_unreachable ();
672     }
673 
674   return offset;
675 }
676 
677 static void
678 lm32_setup_incoming_varargs (cumulative_args_t cum_v, enum machine_mode mode,
679 			     tree type, int *pretend_size, int no_rtl)
680 {
681   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
682   int first_anon_arg;
683   tree fntype;
684 
685   fntype = TREE_TYPE (current_function_decl);
686 
687   if (stdarg_p (fntype))
688     first_anon_arg = *cum + LM32_FIRST_ARG_REG;
689   else
690     {
691       /* this is the common case, we have been passed details setup
692          for the last named argument, we want to skip over the
693          registers, if any used in passing this named paramter in
694          order to determine which is the first registers used to pass
695          anonymous arguments.  */
696       int size;
697 
698       if (mode == BLKmode)
699 	size = int_size_in_bytes (type);
700       else
701 	size = GET_MODE_SIZE (mode);
702 
703       first_anon_arg =
704 	*cum + LM32_FIRST_ARG_REG +
705 	((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
706     }
707 
708   if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
709     {
710       int first_reg_offset = first_anon_arg;
711       int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
712       rtx regblock;
713 
714       regblock = gen_rtx_MEM (BLKmode,
715 			      plus_constant (Pmode, arg_pointer_rtx,
716 					     FIRST_PARM_OFFSET (0)));
717       move_block_from_reg (first_reg_offset, regblock, size);
718 
719       *pretend_size = size * UNITS_PER_WORD;
720     }
721 }
722 
723 /* Override command line options.  */
724 static void
725 lm32_option_override (void)
726 {
727   /* We must have sign-extend enabled if barrel-shift isn't.  */
728   if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
729     target_flags |= MASK_SIGN_EXTEND_ENABLED;
730 }
731 
732 /* Return nonzero if this function is known to have a null epilogue.
733    This allows the optimizer to omit jumps to jumps if no stack
734    was created.  */
735 int
736 lm32_can_use_return (void)
737 {
738   if (!reload_completed)
739     return 0;
740 
741   if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
742     return 0;
743 
744   if (lm32_compute_frame_size (get_frame_size ()) != 0)
745     return 0;
746 
747   return 1;
748 }
749 
750 /* Support function to determine the return address of the function
751    'count' frames back up the stack.  */
752 rtx
753 lm32_return_addr_rtx (int count, rtx frame)
754 {
755   rtx r;
756   if (count == 0)
757     {
758       if (!df_regs_ever_live_p (RA_REGNUM))
759 	r = gen_rtx_REG (Pmode, RA_REGNUM);
760       else
761 	{
762 	  r = gen_rtx_MEM (Pmode,
763 			   gen_rtx_PLUS (Pmode, frame,
764 					 GEN_INT (-2 * UNITS_PER_WORD)));
765 	  set_mem_alias_set (r, get_frame_alias_set ());
766 	}
767     }
768   else if (flag_omit_frame_pointer)
769     r = NULL_RTX;
770   else
771     {
772       r = gen_rtx_MEM (Pmode,
773 		       gen_rtx_PLUS (Pmode, frame,
774 				     GEN_INT (-2 * UNITS_PER_WORD)));
775       set_mem_alias_set (r, get_frame_alias_set ());
776     }
777   return r;
778 }
779 
780 /* Return true if EXP should be placed in the small data section.  */
781 
782 static bool
783 lm32_in_small_data_p (const_tree exp)
784 {
785   /* We want to merge strings, so we never consider them small data.  */
786   if (TREE_CODE (exp) == STRING_CST)
787     return false;
788 
789   /* Functions are never in the small data area.  Duh.  */
790   if (TREE_CODE (exp) == FUNCTION_DECL)
791     return false;
792 
793   if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
794     {
795       const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
796       if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
797 	return true;
798     }
799   else
800     {
801       HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
802 
803       /* If this is an incomplete type with size 0, then we can't put it
804          in sdata because it might be too big when completed.  */
805       if (size > 0 && size <= g_switch_value)
806 	return true;
807     }
808 
809   return false;
810 }
811 
812 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
813    Assume that the areas do not overlap.  */
814 
815 static void
816 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
817 			HOST_WIDE_INT alignment)
818 {
819   HOST_WIDE_INT offset, delta;
820   unsigned HOST_WIDE_INT bits;
821   int i;
822   enum machine_mode mode;
823   rtx *regs;
824 
825   /* Work out how many bits to move at a time.  */
826   switch (alignment)
827     {
828     case 1:
829       bits = 8;
830       break;
831     case 2:
832       bits = 16;
833       break;
834     default:
835       bits = 32;
836       break;
837     }
838 
839   mode = mode_for_size (bits, MODE_INT, 0);
840   delta = bits / BITS_PER_UNIT;
841 
842   /* Allocate a buffer for the temporary registers.  */
843   regs = XALLOCAVEC (rtx, length / delta);
844 
845   /* Load as many BITS-sized chunks as possible.  */
846   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
847     {
848       regs[i] = gen_reg_rtx (mode);
849       emit_move_insn (regs[i], adjust_address (src, mode, offset));
850     }
851 
852   /* Copy the chunks to the destination.  */
853   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
854     emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
855 
856   /* Mop up any left-over bytes.  */
857   if (offset < length)
858     {
859       src = adjust_address (src, BLKmode, offset);
860       dest = adjust_address (dest, BLKmode, offset);
861       move_by_pieces (dest, src, length - offset,
862 		      MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
863     }
864 }
865 
866 /* Expand string/block move operations.
867 
868    operands[0] is the pointer to the destination.
869    operands[1] is the pointer to the source.
870    operands[2] is the number of bytes to move.
871    operands[3] is the alignment.  */
872 
873 int
874 lm32_expand_block_move (rtx * operands)
875 {
876   if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
877     {
878       lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
879 			      INTVAL (operands[3]));
880       return 1;
881     }
882   return 0;
883 }
884 
885 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
886    isn't protected by a PIC unspec.  */
887 int
888 nonpic_symbol_mentioned_p (rtx x)
889 {
890   const char *fmt;
891   int i;
892 
893   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
894       || GET_CODE (x) == PC)
895     return 1;
896 
897   /* We don't want to look into the possible MEM location of a
898      CONST_DOUBLE, since we're not going to use it, in general.  */
899   if (GET_CODE (x) == CONST_DOUBLE)
900     return 0;
901 
902   if (GET_CODE (x) == UNSPEC)
903     return 0;
904 
905   fmt = GET_RTX_FORMAT (GET_CODE (x));
906   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
907     {
908       if (fmt[i] == 'E')
909 	{
910 	  int j;
911 
912 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
913 	    if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
914 	      return 1;
915 	}
916       else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
917 	return 1;
918     }
919 
920   return 0;
921 }
922 
923 /* Compute a (partial) cost for rtx X.  Return true if the complete
924    cost has been computed, and false if subexpressions should be
925    scanned.  In either case, *TOTAL contains the cost result.  */
926 
927 static bool
928 lm32_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
929 		int *total, bool speed)
930 {
931   enum machine_mode mode = GET_MODE (x);
932   bool small_mode;
933 
934   const int arithmetic_latency = 1;
935   const int shift_latency = 1;
936   const int compare_latency = 2;
937   const int multiply_latency = 3;
938   const int load_latency = 3;
939   const int libcall_size_cost = 5;
940 
941   /* Determine if we can handle the given mode size in a single instruction.  */
942   small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
943 
944   switch (code)
945     {
946 
947     case PLUS:
948     case MINUS:
949     case AND:
950     case IOR:
951     case XOR:
952     case NOT:
953     case NEG:
954       if (!speed)
955 	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
956       else
957 	*total =
958 	  COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
959       break;
960 
961     case COMPARE:
962       if (small_mode)
963 	{
964 	  if (!speed)
965 	    *total = COSTS_N_INSNS (1);
966 	  else
967 	    *total = COSTS_N_INSNS (compare_latency);
968 	}
969       else
970 	{
971 	  /* FIXME. Guessing here.  */
972 	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
973 	}
974       break;
975 
976     case ASHIFT:
977     case ASHIFTRT:
978     case LSHIFTRT:
979       if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
980 	{
981 	  if (!speed)
982 	    *total = COSTS_N_INSNS (1);
983 	  else
984 	    *total = COSTS_N_INSNS (shift_latency);
985 	}
986       else if (TARGET_BARREL_SHIFT_ENABLED)
987 	{
988 	  /* FIXME: Guessing here.  */
989 	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
990 	}
991       else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
992 	{
993 	  *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
994 	}
995       else
996 	{
997 	  /* Libcall.  */
998 	  if (!speed)
999 	    *total = COSTS_N_INSNS (libcall_size_cost);
1000 	  else
1001 	    *total = COSTS_N_INSNS (100);
1002 	}
1003       break;
1004 
1005     case MULT:
1006       if (TARGET_MULTIPLY_ENABLED && small_mode)
1007 	{
1008 	  if (!speed)
1009 	    *total = COSTS_N_INSNS (1);
1010 	  else
1011 	    *total = COSTS_N_INSNS (multiply_latency);
1012 	}
1013       else
1014 	{
1015 	  /* Libcall.  */
1016 	  if (!speed)
1017 	    *total = COSTS_N_INSNS (libcall_size_cost);
1018 	  else
1019 	    *total = COSTS_N_INSNS (100);
1020 	}
1021       break;
1022 
1023     case DIV:
1024     case MOD:
1025     case UDIV:
1026     case UMOD:
1027       if (TARGET_DIVIDE_ENABLED && small_mode)
1028 	{
1029 	  if (!speed)
1030 	    *total = COSTS_N_INSNS (1);
1031 	  else
1032 	    {
1033 	      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1034 		{
1035 		  int cycles = 0;
1036 		  unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1037 
1038 		  while (i)
1039 		    {
1040 		      i >>= 2;
1041 		      cycles++;
1042 		    }
1043 		  if (IN_RANGE (i, 0, 65536))
1044 		    *total = COSTS_N_INSNS (1 + 1 + cycles);
1045 		  else
1046 		    *total = COSTS_N_INSNS (2 + 1 + cycles);
1047 		  return true;
1048 		}
1049 	      else if (GET_CODE (XEXP (x, 1)) == REG)
1050 		{
1051 		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1052 		  return true;
1053 		}
1054 	      else
1055 		{
1056 		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1057 		  return false;
1058 		}
1059 	    }
1060 	}
1061       else
1062 	{
1063 	  /* Libcall.  */
1064 	  if (!speed)
1065 	    *total = COSTS_N_INSNS (libcall_size_cost);
1066 	  else
1067 	    *total = COSTS_N_INSNS (100);
1068 	}
1069       break;
1070 
1071     case HIGH:
1072     case LO_SUM:
1073       if (!speed)
1074 	*total = COSTS_N_INSNS (1);
1075       else
1076 	*total = COSTS_N_INSNS (arithmetic_latency);
1077       break;
1078 
1079     case ZERO_EXTEND:
1080       if (MEM_P (XEXP (x, 0)))
1081 	*total = COSTS_N_INSNS (0);
1082       else if (small_mode)
1083 	{
1084 	  if (!speed)
1085 	    *total = COSTS_N_INSNS (1);
1086 	  else
1087 	    *total = COSTS_N_INSNS (arithmetic_latency);
1088 	}
1089       else
1090 	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1091       break;
1092 
1093     case CONST_INT:
1094       {
1095 	switch (outer_code)
1096 	  {
1097 	  case HIGH:
1098 	  case LO_SUM:
1099 	    *total = COSTS_N_INSNS (0);
1100 	    return true;
1101 
1102 	  case AND:
1103 	  case XOR:
1104 	  case IOR:
1105 	  case ASHIFT:
1106 	  case ASHIFTRT:
1107 	  case LSHIFTRT:
1108 	  case ROTATE:
1109 	  case ROTATERT:
1110 	    if (satisfies_constraint_L (x))
1111 	      *total = COSTS_N_INSNS (0);
1112 	    else
1113 	      *total = COSTS_N_INSNS (2);
1114 	    return true;
1115 
1116 	  case SET:
1117 	  case PLUS:
1118 	  case MINUS:
1119 	  case COMPARE:
1120 	    if (satisfies_constraint_K (x))
1121 	      *total = COSTS_N_INSNS (0);
1122 	    else
1123 	      *total = COSTS_N_INSNS (2);
1124 	    return true;
1125 
1126 	  case MULT:
1127 	    if (TARGET_MULTIPLY_ENABLED)
1128 	      {
1129 	        if (satisfies_constraint_K (x))
1130 	         *total = COSTS_N_INSNS (0);
1131 	        else
1132 	          *total = COSTS_N_INSNS (2);
1133 		return true;
1134 	      }
1135 	    /* Fall through.  */
1136 
1137 	  default:
1138             if (satisfies_constraint_K (x))
1139 	      *total = COSTS_N_INSNS (1);
1140 	    else
1141 	      *total = COSTS_N_INSNS (2);
1142 	    return true;
1143 	  }
1144       }
1145 
1146     case SYMBOL_REF:
1147     case CONST:
1148       switch (outer_code)
1149 	{
1150 	case HIGH:
1151 	case LO_SUM:
1152 	  *total = COSTS_N_INSNS (0);
1153 	  return true;
1154 
1155 	case MEM:
1156 	case SET:
1157 	  if (g_switch_value)
1158 	    {
1159 	      *total = COSTS_N_INSNS (0);
1160 	      return true;
1161 	    }
1162 	  break;
1163 	}
1164       /* Fall through.  */
1165 
1166     case LABEL_REF:
1167     case CONST_DOUBLE:
1168       *total = COSTS_N_INSNS (2);
1169       return true;
1170 
1171     case SET:
1172       *total = COSTS_N_INSNS (1);
1173       break;
1174 
1175     case MEM:
1176       if (!speed)
1177 	*total = COSTS_N_INSNS (1);
1178       else
1179 	*total = COSTS_N_INSNS (load_latency);
1180       break;
1181 
1182     }
1183 
1184   return false;
1185 }
1186 
1187 /* Implemenent TARGET_CAN_ELIMINATE.  */
1188 
1189 bool
1190 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1191 {
1192   return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1193 }
1194 
1195 /* Implement TARGET_LEGITIMATE_ADDRESS_P.  */
1196 
1197 static bool
1198 lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1199 {
1200    /* (rM) */
1201   if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1202     return true;
1203   if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1204     return true;
1205 
1206   /* (rM)+literal) */
1207   if (GET_CODE (x) == PLUS
1208      && REG_P (XEXP (x, 0))
1209      && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1210          || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1211      && GET_CODE (XEXP (x, 1)) == CONST_INT
1212      && satisfies_constraint_K (XEXP ((x), 1)))
1213     return true;
1214 
1215   /* gp(sym)  */
1216   if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1217     return true;
1218 
1219   return false;
1220 }
1221 
1222 /* Check a move is not memory to memory.  */
1223 
1224 bool
1225 lm32_move_ok (enum machine_mode mode, rtx operands[2]) {
1226   if (memory_operand (operands[0], mode))
1227     return register_or_zero_operand (operands[1], mode);
1228   return true;
1229 }
1230 
1231 /* Implement TARGET_LEGITIMATE_CONSTANT_P.  */
1232 
1233 static bool
1234 lm32_legitimate_constant_p (enum machine_mode mode, rtx x)
1235 {
1236   /* 32-bit addresses require multiple instructions.  */
1237   if (!flag_pic && reloc_operand (x, mode))
1238     return false;
1239 
1240   return true;
1241 }
1242