xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/lm32/lm32.c (revision 7330f729ccf0bd976a06f95fad452fe774fc7fd1)
1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2    Contributed by Jon Beniston <jon@beniston.com>
3 
4    Copyright (C) 2009-2017 Free Software Foundation, Inc.
5 
6    This file is part of GCC.
7 
8    GCC is free software; you can redistribute it and/or modify it
9    under the terms of the GNU General Public License as published
10    by the Free Software Foundation; either version 3, or (at your
11    option) any later version.
12 
13    GCC is distributed in the hope that it will be useful, but WITHOUT
14    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16    License for more details.
17 
18    You should have received a copy of the GNU General Public License
19    along with GCC; see the file COPYING3.  If not see
20    <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "optabs.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "calls.h"
38 #include "alias.h"
39 #include "explow.h"
40 #include "expr.h"
41 #include "tm-constrs.h"
42 #include "builtins.h"
43 
44 /* This file should be included last.  */
45 #include "target-def.h"
46 
47 struct lm32_frame_info
48 {
49   HOST_WIDE_INT total_size;	/* number of bytes of entire frame.  */
50   HOST_WIDE_INT callee_size;	/* number of bytes to save callee saves.  */
51   HOST_WIDE_INT pretend_size;	/* number of bytes we pretend caller did.  */
52   HOST_WIDE_INT args_size;	/* number of bytes for outgoing arguments.  */
53   HOST_WIDE_INT locals_size;	/* number of bytes for local variables.  */
54   unsigned int reg_save_mask;	/* mask of saved registers.  */
55 };
56 
57 /* Prototypes for static functions.  */
58 static rtx emit_add (rtx dest, rtx src0, rtx src1);
59 static void expand_save_restore (struct lm32_frame_info *info, int op);
60 static void stack_adjust (HOST_WIDE_INT amount);
61 static bool lm32_in_small_data_p (const_tree);
62 static void lm32_setup_incoming_varargs (cumulative_args_t cum,
63 					 machine_mode mode, tree type,
64 					 int *pretend_size, int no_rtl);
65 static bool lm32_rtx_costs (rtx x, machine_mode mode, int outer_code, int opno,
66 			    int *total, bool speed);
67 static bool lm32_can_eliminate (const int, const int);
68 static bool
69 lm32_legitimate_address_p (machine_mode mode, rtx x, bool strict);
70 static HOST_WIDE_INT lm32_compute_frame_size (int size);
71 static void lm32_option_override (void);
72 static rtx lm32_function_arg (cumulative_args_t cum,
73 			      machine_mode mode, const_tree type,
74 			      bool named);
75 static void lm32_function_arg_advance (cumulative_args_t cum,
76 				       machine_mode mode,
77 				       const_tree type, bool named);
78 
79 #undef TARGET_OPTION_OVERRIDE
80 #define TARGET_OPTION_OVERRIDE lm32_option_override
81 #undef TARGET_ADDRESS_COST
82 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
83 #undef TARGET_RTX_COSTS
84 #define TARGET_RTX_COSTS lm32_rtx_costs
85 #undef TARGET_IN_SMALL_DATA_P
86 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
87 #undef TARGET_PROMOTE_FUNCTION_MODE
88 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
89 #undef TARGET_SETUP_INCOMING_VARARGS
90 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
91 #undef TARGET_FUNCTION_ARG
92 #define TARGET_FUNCTION_ARG lm32_function_arg
93 #undef TARGET_FUNCTION_ARG_ADVANCE
94 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
95 #undef TARGET_PROMOTE_PROTOTYPES
96 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
97 #undef TARGET_MIN_ANCHOR_OFFSET
98 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
99 #undef TARGET_MAX_ANCHOR_OFFSET
100 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
101 #undef TARGET_CAN_ELIMINATE
102 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
103 #undef TARGET_LRA_P
104 #define TARGET_LRA_P hook_bool_void_false
105 #undef TARGET_LEGITIMATE_ADDRESS_P
106 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
107 
108 struct gcc_target targetm = TARGET_INITIALIZER;
109 
110 /* Current frame information calculated by lm32_compute_frame_size.  */
111 static struct lm32_frame_info current_frame_info;
112 
113 /* Return non-zero if the given return type should be returned in memory.  */
114 
115 int
116 lm32_return_in_memory (tree type)
117 {
118   HOST_WIDE_INT size;
119 
120   if (!AGGREGATE_TYPE_P (type))
121     {
122       /* All simple types are returned in registers.  */
123       return 0;
124     }
125 
126   size = int_size_in_bytes (type);
127   if (size >= 0 && size <= UNITS_PER_WORD)
128     {
129       /* If it can fit in one register.  */
130       return 0;
131     }
132 
133   return 1;
134 }
135 
136 /* Generate an emit a word sized add instruction.  */
137 
138 static rtx
139 emit_add (rtx dest, rtx src0, rtx src1)
140 {
141   rtx insn;
142   insn = emit_insn (gen_addsi3 (dest, src0, src1));
143   return insn;
144 }
145 
146 /* Generate the code to compare (and possibly branch) two integer values
147    TEST_CODE is the comparison code we are trying to emulate
148      (or implement directly)
149    RESULT is where to store the result of the comparison,
150      or null to emit a branch
151    CMP0 CMP1 are the two comparison operands
152    DESTINATION is the destination of the branch, or null to only compare
153    */
154 
155 static void
156 gen_int_relational (enum rtx_code code,
157 		    rtx result,
158 		    rtx cmp0,
159 		    rtx cmp1,
160 		    rtx destination)
161 {
162   machine_mode mode;
163   int branch_p;
164 
165   mode = GET_MODE (cmp0);
166   if (mode == VOIDmode)
167     mode = GET_MODE (cmp1);
168 
169   /* Is this a branch or compare.  */
170   branch_p = (destination != 0);
171 
172   /* Instruction set doesn't support LE or LT, so swap operands and use
173      GE, GT.  */
174   switch (code)
175     {
176     case LE:
177     case LT:
178     case LEU:
179     case LTU:
180       {
181 	rtx temp;
182 
183 	code = swap_condition (code);
184 	temp = cmp0;
185 	cmp0 = cmp1;
186 	cmp1 = temp;
187 	break;
188       }
189     default:
190       break;
191     }
192 
193   if (branch_p)
194     {
195       rtx insn, cond, label;
196 
197       /* Operands must be in registers.  */
198       if (!register_operand (cmp0, mode))
199 	cmp0 = force_reg (mode, cmp0);
200       if (!register_operand (cmp1, mode))
201 	cmp1 = force_reg (mode, cmp1);
202 
203       /* Generate conditional branch instruction.  */
204       cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
205       label = gen_rtx_LABEL_REF (VOIDmode, destination);
206       insn = gen_rtx_SET (pc_rtx, gen_rtx_IF_THEN_ELSE (VOIDmode,
207 							cond, label, pc_rtx));
208       emit_jump_insn (insn);
209     }
210   else
211     {
212       /* We can't have const_ints in cmp0, other than 0.  */
213       if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
214 	cmp0 = force_reg (mode, cmp0);
215 
216       /* If the comparison is against an int not in legal range
217          move it into a register.  */
218       if (GET_CODE (cmp1) == CONST_INT)
219 	{
220 	  switch (code)
221 	    {
222 	    case EQ:
223 	    case NE:
224 	    case LE:
225 	    case LT:
226 	    case GE:
227 	    case GT:
228 	      if (!satisfies_constraint_K (cmp1))
229 		cmp1 = force_reg (mode, cmp1);
230 	      break;
231 	    case LEU:
232 	    case LTU:
233 	    case GEU:
234 	    case GTU:
235 	      if (!satisfies_constraint_L (cmp1))
236 		cmp1 = force_reg (mode, cmp1);
237 	      break;
238 	    default:
239 	      gcc_unreachable ();
240 	    }
241 	}
242 
243       /* Generate compare instruction.  */
244       emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
245     }
246 }
247 
248 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
249    and OPERAND[3].  Store the result in OPERANDS[0].  */
250 
251 void
252 lm32_expand_scc (rtx operands[])
253 {
254   rtx target = operands[0];
255   enum rtx_code code = GET_CODE (operands[1]);
256   rtx op0 = operands[2];
257   rtx op1 = operands[3];
258 
259   gen_int_relational (code, target, op0, op1, NULL_RTX);
260 }
261 
262 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
263    CODE and jump to OPERANDS[3] if the condition holds.  */
264 
265 void
266 lm32_expand_conditional_branch (rtx operands[])
267 {
268   enum rtx_code code = GET_CODE (operands[0]);
269   rtx op0 = operands[1];
270   rtx op1 = operands[2];
271   rtx destination = operands[3];
272 
273   gen_int_relational (code, NULL_RTX, op0, op1, destination);
274 }
275 
276 /* Generate and emit RTL to save or restore callee save registers.  */
277 static void
278 expand_save_restore (struct lm32_frame_info *info, int op)
279 {
280   unsigned int reg_save_mask = info->reg_save_mask;
281   int regno;
282   HOST_WIDE_INT offset;
283   rtx insn;
284 
285   /* Callee saves are below locals and above outgoing arguments.  */
286   offset = info->args_size + info->callee_size;
287   for (regno = 0; regno <= 31; regno++)
288     {
289       if ((reg_save_mask & (1 << regno)) != 0)
290 	{
291 	  rtx offset_rtx;
292 	  rtx mem;
293 
294 	  offset_rtx = GEN_INT (offset);
295 	  if (satisfies_constraint_K (offset_rtx))
296 	    {
297               mem = gen_rtx_MEM (word_mode,
298                                  gen_rtx_PLUS (Pmode,
299                                                stack_pointer_rtx,
300                                                offset_rtx));
301             }
302           else
303             {
304               /* r10 is caller saved so it can be used as a temp reg.  */
305               rtx r10;
306 
307               r10 = gen_rtx_REG (word_mode, 10);
308               insn = emit_move_insn (r10, offset_rtx);
309               if (op == 0)
310                 RTX_FRAME_RELATED_P (insn) = 1;
311               insn = emit_add (r10, r10, stack_pointer_rtx);
312               if (op == 0)
313                 RTX_FRAME_RELATED_P (insn) = 1;
314               mem = gen_rtx_MEM (word_mode, r10);
315             }
316 
317 	  if (op == 0)
318 	    insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
319 	  else
320 	    insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
321 
322 	  /* only prologue instructions which set the sp fp or save a
323 	     register should be marked as frame related.  */
324 	  if (op == 0)
325 	    RTX_FRAME_RELATED_P (insn) = 1;
326 	  offset -= UNITS_PER_WORD;
327 	}
328     }
329 }
330 
331 static void
332 stack_adjust (HOST_WIDE_INT amount)
333 {
334   rtx insn;
335 
336   if (!IN_RANGE (amount, -32776, 32768))
337     {
338       /* r10 is caller saved so it can be used as a temp reg.  */
339       rtx r10;
340       r10 = gen_rtx_REG (word_mode, 10);
341       insn = emit_move_insn (r10, GEN_INT (amount));
342       if (amount < 0)
343 	RTX_FRAME_RELATED_P (insn) = 1;
344       insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
345       if (amount < 0)
346 	RTX_FRAME_RELATED_P (insn) = 1;
347     }
348   else
349     {
350       insn = emit_add (stack_pointer_rtx,
351 		       stack_pointer_rtx, GEN_INT (amount));
352       if (amount < 0)
353 	RTX_FRAME_RELATED_P (insn) = 1;
354     }
355 }
356 
357 
358 /* Create and emit instructions for a functions prologue.  */
359 void
360 lm32_expand_prologue (void)
361 {
362   rtx insn;
363 
364   lm32_compute_frame_size (get_frame_size ());
365 
366   if (current_frame_info.total_size > 0)
367     {
368       /* Add space on stack new frame.  */
369       stack_adjust (-current_frame_info.total_size);
370 
371       /* Save callee save registers.  */
372       if (current_frame_info.reg_save_mask != 0)
373 	expand_save_restore (&current_frame_info, 0);
374 
375       /* Setup frame pointer if it's needed.  */
376       if (frame_pointer_needed == 1)
377 	{
378 	  /* Move sp to fp.  */
379 	  insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
380 	  RTX_FRAME_RELATED_P (insn) = 1;
381 
382 	  /* Add offset - Don't use total_size, as that includes pretend_size,
383              which isn't part of this frame?  */
384 	  insn = emit_add (frame_pointer_rtx,
385 			   frame_pointer_rtx,
386 			   GEN_INT (current_frame_info.args_size +
387 				    current_frame_info.callee_size +
388 				    current_frame_info.locals_size));
389 	  RTX_FRAME_RELATED_P (insn) = 1;
390 	}
391 
392       /* Prevent prologue from being scheduled into function body.  */
393       emit_insn (gen_blockage ());
394     }
395 }
396 
397 /* Create an emit instructions for a functions epilogue.  */
398 void
399 lm32_expand_epilogue (void)
400 {
401   rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
402 
403   lm32_compute_frame_size (get_frame_size ());
404 
405   if (current_frame_info.total_size > 0)
406     {
407       /* Prevent stack code from being reordered.  */
408       emit_insn (gen_blockage ());
409 
410       /* Restore callee save registers.  */
411       if (current_frame_info.reg_save_mask != 0)
412 	expand_save_restore (&current_frame_info, 1);
413 
414       /* Deallocate stack.  */
415       stack_adjust (current_frame_info.total_size);
416 
417       /* Return to calling function.  */
418       emit_jump_insn (gen_return_internal (ra_rtx));
419     }
420   else
421     {
422       /* Return to calling function.  */
423       emit_jump_insn (gen_return_internal (ra_rtx));
424     }
425 }
426 
427 /* Return the bytes needed to compute the frame pointer from the current
428    stack pointer.  */
429 static HOST_WIDE_INT
430 lm32_compute_frame_size (int size)
431 {
432   int regno;
433   HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
434   unsigned int reg_save_mask;
435 
436   locals_size = size;
437   args_size = crtl->outgoing_args_size;
438   pretend_size = crtl->args.pretend_args_size;
439   callee_size = 0;
440   reg_save_mask = 0;
441 
442   /* Build mask that actually determines which regsiters we save
443      and calculate size required to store them in the stack.  */
444   for (regno = 1; regno < SP_REGNUM; regno++)
445     {
446       if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
447 	{
448 	  reg_save_mask |= 1 << regno;
449 	  callee_size += UNITS_PER_WORD;
450 	}
451     }
452   if (df_regs_ever_live_p (RA_REGNUM) || ! crtl->is_leaf
453       || !optimize)
454     {
455       reg_save_mask |= 1 << RA_REGNUM;
456       callee_size += UNITS_PER_WORD;
457     }
458   if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
459     {
460       reg_save_mask |= 1 << FP_REGNUM;
461       callee_size += UNITS_PER_WORD;
462     }
463 
464   /* Compute total frame size.  */
465   total_size = pretend_size + args_size + locals_size + callee_size;
466 
467   /* Align frame to appropriate boundary.  */
468   total_size = (total_size + 3) & ~3;
469 
470   /* Save computed information.  */
471   current_frame_info.total_size = total_size;
472   current_frame_info.callee_size = callee_size;
473   current_frame_info.pretend_size = pretend_size;
474   current_frame_info.locals_size = locals_size;
475   current_frame_info.args_size = args_size;
476   current_frame_info.reg_save_mask = reg_save_mask;
477 
478   return total_size;
479 }
480 
481 void
482 lm32_print_operand (FILE * file, rtx op, int letter)
483 {
484   enum rtx_code code;
485 
486   code = GET_CODE (op);
487 
488   if (code == SIGN_EXTEND)
489     op = XEXP (op, 0), code = GET_CODE (op);
490   else if (code == REG || code == SUBREG)
491     {
492       int regnum;
493 
494       if (code == REG)
495 	regnum = REGNO (op);
496       else
497 	regnum = true_regnum (op);
498 
499       fprintf (file, "%s", reg_names[regnum]);
500     }
501   else if (code == HIGH)
502     output_addr_const (file, XEXP (op, 0));
503   else if (code == MEM)
504     output_address (GET_MODE (op), XEXP (op, 0));
505   else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
506     fprintf (file, "%s", reg_names[0]);
507   else if (GET_CODE (op) == CONST_DOUBLE)
508     {
509       if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
510 	output_operand_lossage ("only 0.0 can be loaded as an immediate");
511       else
512 	fprintf (file, "0");
513     }
514   else if (code == EQ)
515     fprintf (file, "e  ");
516   else if (code == NE)
517     fprintf (file, "ne ");
518   else if (code == GT)
519     fprintf (file, "g  ");
520   else if (code == GTU)
521     fprintf (file, "gu ");
522   else if (code == LT)
523     fprintf (file, "l  ");
524   else if (code == LTU)
525     fprintf (file, "lu ");
526   else if (code == GE)
527     fprintf (file, "ge ");
528   else if (code == GEU)
529     fprintf (file, "geu");
530   else if (code == LE)
531     fprintf (file, "le ");
532   else if (code == LEU)
533     fprintf (file, "leu");
534   else
535     output_addr_const (file, op);
536 }
537 
538 /* A C compound statement to output to stdio stream STREAM the
539    assembler syntax for an instruction operand that is a memory
540    reference whose address is ADDR.  ADDR is an RTL expression.
541 
542    On some machines, the syntax for a symbolic address depends on
543    the section that the address refers to.  On these machines,
544    define the macro `ENCODE_SECTION_INFO' to store the information
545    into the `symbol_ref', and then check for it here.  */
546 
547 void
548 lm32_print_operand_address (FILE * file, rtx addr)
549 {
550   switch (GET_CODE (addr))
551     {
552     case REG:
553       fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
554       break;
555 
556     case MEM:
557       output_address (VOIDmode, XEXP (addr, 0));
558       break;
559 
560     case PLUS:
561       {
562 	rtx arg0 = XEXP (addr, 0);
563 	rtx arg1 = XEXP (addr, 1);
564 
565 	if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
566 	  {
567 	    if (GET_CODE (arg1) == CONST_INT)
568 	      fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
569 		       INTVAL (arg1));
570 	    else
571 	      {
572 		fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
573 		output_addr_const (file, arg1);
574 		fprintf (file, ")");
575 	      }
576 	  }
577 	else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
578 	  output_addr_const (file, addr);
579 	else
580 	  fatal_insn ("bad operand", addr);
581       }
582       break;
583 
584     case SYMBOL_REF:
585       if (SYMBOL_REF_SMALL_P (addr))
586 	{
587 	  fprintf (file, "gp(");
588 	  output_addr_const (file, addr);
589 	  fprintf (file, ")");
590 	}
591       else
592 	fatal_insn ("can't use non gp relative absolute address", addr);
593       break;
594 
595     default:
596       fatal_insn ("invalid addressing mode", addr);
597       break;
598     }
599 }
600 
601 /* Determine where to put an argument to a function.
602    Value is zero to push the argument on the stack,
603    or a hard register in which to store the argument.
604 
605    MODE is the argument's machine mode.
606    TYPE is the data type of the argument (as a tree).
607     This is null for libcalls where that information may
608     not be available.
609    CUM is a variable of type CUMULATIVE_ARGS which gives info about
610     the preceding args and about the function being called.
611    NAMED is nonzero if this argument is a named parameter
612     (otherwise it is an extra parameter matching an ellipsis).  */
613 
614 static rtx
615 lm32_function_arg (cumulative_args_t cum_v, machine_mode mode,
616 		   const_tree type, bool named)
617 {
618   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
619 
620   if (mode == VOIDmode)
621     /* Compute operand 2 of the call insn.  */
622     return GEN_INT (0);
623 
624   if (targetm.calls.must_pass_in_stack (mode, type))
625     return NULL_RTX;
626 
627   if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
628     return NULL_RTX;
629 
630   return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
631 }
632 
633 static void
634 lm32_function_arg_advance (cumulative_args_t cum, machine_mode mode,
635 			   const_tree type, bool named ATTRIBUTE_UNUSED)
636 {
637   *get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
638 }
639 
640 HOST_WIDE_INT
641 lm32_compute_initial_elimination_offset (int from, int to)
642 {
643   HOST_WIDE_INT offset = 0;
644 
645   switch (from)
646     {
647     case ARG_POINTER_REGNUM:
648       switch (to)
649 	{
650 	case FRAME_POINTER_REGNUM:
651 	  offset = 0;
652 	  break;
653 	case STACK_POINTER_REGNUM:
654 	  offset =
655 	    lm32_compute_frame_size (get_frame_size ()) -
656 	    current_frame_info.pretend_size;
657 	  break;
658 	default:
659 	  gcc_unreachable ();
660 	}
661       break;
662     default:
663       gcc_unreachable ();
664     }
665 
666   return offset;
667 }
668 
669 static void
670 lm32_setup_incoming_varargs (cumulative_args_t cum_v, machine_mode mode,
671 			     tree type, int *pretend_size, int no_rtl)
672 {
673   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
674   int first_anon_arg;
675   tree fntype;
676 
677   fntype = TREE_TYPE (current_function_decl);
678 
679   if (stdarg_p (fntype))
680     first_anon_arg = *cum + LM32_FIRST_ARG_REG;
681   else
682     {
683       /* this is the common case, we have been passed details setup
684          for the last named argument, we want to skip over the
685          registers, if any used in passing this named paramter in
686          order to determine which is the first registers used to pass
687          anonymous arguments.  */
688       int size;
689 
690       if (mode == BLKmode)
691 	size = int_size_in_bytes (type);
692       else
693 	size = GET_MODE_SIZE (mode);
694 
695       first_anon_arg =
696 	*cum + LM32_FIRST_ARG_REG +
697 	((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
698     }
699 
700   if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
701     {
702       int first_reg_offset = first_anon_arg;
703       int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
704       rtx regblock;
705 
706       regblock = gen_rtx_MEM (BLKmode,
707 			      plus_constant (Pmode, arg_pointer_rtx,
708 					     FIRST_PARM_OFFSET (0)));
709       move_block_from_reg (first_reg_offset, regblock, size);
710 
711       *pretend_size = size * UNITS_PER_WORD;
712     }
713 }
714 
715 /* Override command line options.  */
716 static void
717 lm32_option_override (void)
718 {
719   /* We must have sign-extend enabled if barrel-shift isn't.  */
720   if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
721     target_flags |= MASK_SIGN_EXTEND_ENABLED;
722 }
723 
724 /* Return nonzero if this function is known to have a null epilogue.
725    This allows the optimizer to omit jumps to jumps if no stack
726    was created.  */
727 int
728 lm32_can_use_return (void)
729 {
730   if (!reload_completed)
731     return 0;
732 
733   if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
734     return 0;
735 
736   if (lm32_compute_frame_size (get_frame_size ()) != 0)
737     return 0;
738 
739   return 1;
740 }
741 
742 /* Support function to determine the return address of the function
743    'count' frames back up the stack.  */
744 rtx
745 lm32_return_addr_rtx (int count, rtx frame)
746 {
747   rtx r;
748   if (count == 0)
749     {
750       if (!df_regs_ever_live_p (RA_REGNUM))
751 	r = gen_rtx_REG (Pmode, RA_REGNUM);
752       else
753 	{
754 	  r = gen_rtx_MEM (Pmode,
755 			   gen_rtx_PLUS (Pmode, frame,
756 					 GEN_INT (-2 * UNITS_PER_WORD)));
757 	  set_mem_alias_set (r, get_frame_alias_set ());
758 	}
759     }
760   else if (flag_omit_frame_pointer)
761     r = NULL_RTX;
762   else
763     {
764       r = gen_rtx_MEM (Pmode,
765 		       gen_rtx_PLUS (Pmode, frame,
766 				     GEN_INT (-2 * UNITS_PER_WORD)));
767       set_mem_alias_set (r, get_frame_alias_set ());
768     }
769   return r;
770 }
771 
772 /* Return true if EXP should be placed in the small data section.  */
773 
774 static bool
775 lm32_in_small_data_p (const_tree exp)
776 {
777   /* We want to merge strings, so we never consider them small data.  */
778   if (TREE_CODE (exp) == STRING_CST)
779     return false;
780 
781   /* Functions are never in the small data area.  Duh.  */
782   if (TREE_CODE (exp) == FUNCTION_DECL)
783     return false;
784 
785   if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
786     {
787       const char *section = DECL_SECTION_NAME (exp);
788       if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
789 	return true;
790     }
791   else
792     {
793       HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
794 
795       /* If this is an incomplete type with size 0, then we can't put it
796          in sdata because it might be too big when completed.  */
797       if (size > 0 && size <= g_switch_value)
798 	return true;
799     }
800 
801   return false;
802 }
803 
804 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
805    Assume that the areas do not overlap.  */
806 
807 static void
808 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
809 			HOST_WIDE_INT alignment)
810 {
811   HOST_WIDE_INT offset, delta;
812   unsigned HOST_WIDE_INT bits;
813   int i;
814   machine_mode mode;
815   rtx *regs;
816 
817   /* Work out how many bits to move at a time.  */
818   switch (alignment)
819     {
820     case 1:
821       bits = 8;
822       break;
823     case 2:
824       bits = 16;
825       break;
826     default:
827       bits = 32;
828       break;
829     }
830 
831   mode = mode_for_size (bits, MODE_INT, 0);
832   delta = bits / BITS_PER_UNIT;
833 
834   /* Allocate a buffer for the temporary registers.  */
835   regs = XALLOCAVEC (rtx, length / delta);
836 
837   /* Load as many BITS-sized chunks as possible.  */
838   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
839     {
840       regs[i] = gen_reg_rtx (mode);
841       emit_move_insn (regs[i], adjust_address (src, mode, offset));
842     }
843 
844   /* Copy the chunks to the destination.  */
845   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
846     emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
847 
848   /* Mop up any left-over bytes.  */
849   if (offset < length)
850     {
851       src = adjust_address (src, BLKmode, offset);
852       dest = adjust_address (dest, BLKmode, offset);
853       move_by_pieces (dest, src, length - offset,
854 		      MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
855     }
856 }
857 
858 /* Expand string/block move operations.
859 
860    operands[0] is the pointer to the destination.
861    operands[1] is the pointer to the source.
862    operands[2] is the number of bytes to move.
863    operands[3] is the alignment.  */
864 
865 int
866 lm32_expand_block_move (rtx * operands)
867 {
868   if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
869     {
870       lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
871 			      INTVAL (operands[3]));
872       return 1;
873     }
874   return 0;
875 }
876 
877 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
878    isn't protected by a PIC unspec.  */
879 int
880 nonpic_symbol_mentioned_p (rtx x)
881 {
882   const char *fmt;
883   int i;
884 
885   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
886       || GET_CODE (x) == PC)
887     return 1;
888 
889   /* We don't want to look into the possible MEM location of a
890      CONST_DOUBLE, since we're not going to use it, in general.  */
891   if (GET_CODE (x) == CONST_DOUBLE)
892     return 0;
893 
894   if (GET_CODE (x) == UNSPEC)
895     return 0;
896 
897   fmt = GET_RTX_FORMAT (GET_CODE (x));
898   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
899     {
900       if (fmt[i] == 'E')
901 	{
902 	  int j;
903 
904 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
905 	    if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
906 	      return 1;
907 	}
908       else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
909 	return 1;
910     }
911 
912   return 0;
913 }
914 
915 /* Compute a (partial) cost for rtx X.  Return true if the complete
916    cost has been computed, and false if subexpressions should be
917    scanned.  In either case, *TOTAL contains the cost result.  */
918 
919 static bool
920 lm32_rtx_costs (rtx x, machine_mode mode, int outer_code,
921 		int opno ATTRIBUTE_UNUSED, int *total, bool speed)
922 {
923   int code = GET_CODE (x);
924   bool small_mode;
925 
926   const int arithmetic_latency = 1;
927   const int shift_latency = 1;
928   const int compare_latency = 2;
929   const int multiply_latency = 3;
930   const int load_latency = 3;
931   const int libcall_size_cost = 5;
932 
933   /* Determine if we can handle the given mode size in a single instruction.  */
934   small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
935 
936   switch (code)
937     {
938 
939     case PLUS:
940     case MINUS:
941     case AND:
942     case IOR:
943     case XOR:
944     case NOT:
945     case NEG:
946       if (!speed)
947 	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
948       else
949 	*total =
950 	  COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
951       break;
952 
953     case COMPARE:
954       if (small_mode)
955 	{
956 	  if (!speed)
957 	    *total = COSTS_N_INSNS (1);
958 	  else
959 	    *total = COSTS_N_INSNS (compare_latency);
960 	}
961       else
962 	{
963 	  /* FIXME. Guessing here.  */
964 	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
965 	}
966       break;
967 
968     case ASHIFT:
969     case ASHIFTRT:
970     case LSHIFTRT:
971       if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
972 	{
973 	  if (!speed)
974 	    *total = COSTS_N_INSNS (1);
975 	  else
976 	    *total = COSTS_N_INSNS (shift_latency);
977 	}
978       else if (TARGET_BARREL_SHIFT_ENABLED)
979 	{
980 	  /* FIXME: Guessing here.  */
981 	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
982 	}
983       else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
984 	{
985 	  *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
986 	}
987       else
988 	{
989 	  /* Libcall.  */
990 	  if (!speed)
991 	    *total = COSTS_N_INSNS (libcall_size_cost);
992 	  else
993 	    *total = COSTS_N_INSNS (100);
994 	}
995       break;
996 
997     case MULT:
998       if (TARGET_MULTIPLY_ENABLED && small_mode)
999 	{
1000 	  if (!speed)
1001 	    *total = COSTS_N_INSNS (1);
1002 	  else
1003 	    *total = COSTS_N_INSNS (multiply_latency);
1004 	}
1005       else
1006 	{
1007 	  /* Libcall.  */
1008 	  if (!speed)
1009 	    *total = COSTS_N_INSNS (libcall_size_cost);
1010 	  else
1011 	    *total = COSTS_N_INSNS (100);
1012 	}
1013       break;
1014 
1015     case DIV:
1016     case MOD:
1017     case UDIV:
1018     case UMOD:
1019       if (TARGET_DIVIDE_ENABLED && small_mode)
1020 	{
1021 	  if (!speed)
1022 	    *total = COSTS_N_INSNS (1);
1023 	  else
1024 	    {
1025 	      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1026 		{
1027 		  int cycles = 0;
1028 		  unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1029 
1030 		  while (i)
1031 		    {
1032 		      i >>= 2;
1033 		      cycles++;
1034 		    }
1035 		  if (IN_RANGE (i, 0, 65536))
1036 		    *total = COSTS_N_INSNS (1 + 1 + cycles);
1037 		  else
1038 		    *total = COSTS_N_INSNS (2 + 1 + cycles);
1039 		  return true;
1040 		}
1041 	      else if (GET_CODE (XEXP (x, 1)) == REG)
1042 		{
1043 		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1044 		  return true;
1045 		}
1046 	      else
1047 		{
1048 		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1049 		  return false;
1050 		}
1051 	    }
1052 	}
1053       else
1054 	{
1055 	  /* Libcall.  */
1056 	  if (!speed)
1057 	    *total = COSTS_N_INSNS (libcall_size_cost);
1058 	  else
1059 	    *total = COSTS_N_INSNS (100);
1060 	}
1061       break;
1062 
1063     case HIGH:
1064     case LO_SUM:
1065       if (!speed)
1066 	*total = COSTS_N_INSNS (1);
1067       else
1068 	*total = COSTS_N_INSNS (arithmetic_latency);
1069       break;
1070 
1071     case ZERO_EXTEND:
1072       if (MEM_P (XEXP (x, 0)))
1073 	*total = COSTS_N_INSNS (0);
1074       else if (small_mode)
1075 	{
1076 	  if (!speed)
1077 	    *total = COSTS_N_INSNS (1);
1078 	  else
1079 	    *total = COSTS_N_INSNS (arithmetic_latency);
1080 	}
1081       else
1082 	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1083       break;
1084 
1085     case CONST_INT:
1086       {
1087 	switch (outer_code)
1088 	  {
1089 	  case HIGH:
1090 	  case LO_SUM:
1091 	    *total = COSTS_N_INSNS (0);
1092 	    return true;
1093 
1094 	  case AND:
1095 	  case XOR:
1096 	  case IOR:
1097 	  case ASHIFT:
1098 	  case ASHIFTRT:
1099 	  case LSHIFTRT:
1100 	  case ROTATE:
1101 	  case ROTATERT:
1102 	    if (satisfies_constraint_L (x))
1103 	      *total = COSTS_N_INSNS (0);
1104 	    else
1105 	      *total = COSTS_N_INSNS (2);
1106 	    return true;
1107 
1108 	  case SET:
1109 	  case PLUS:
1110 	  case MINUS:
1111 	  case COMPARE:
1112 	    if (satisfies_constraint_K (x))
1113 	      *total = COSTS_N_INSNS (0);
1114 	    else
1115 	      *total = COSTS_N_INSNS (2);
1116 	    return true;
1117 
1118 	  case MULT:
1119 	    if (TARGET_MULTIPLY_ENABLED)
1120 	      {
1121 	        if (satisfies_constraint_K (x))
1122 	         *total = COSTS_N_INSNS (0);
1123 	        else
1124 	          *total = COSTS_N_INSNS (2);
1125 		return true;
1126 	      }
1127 	    /* Fall through.  */
1128 
1129 	  default:
1130             if (satisfies_constraint_K (x))
1131 	      *total = COSTS_N_INSNS (1);
1132 	    else
1133 	      *total = COSTS_N_INSNS (2);
1134 	    return true;
1135 	  }
1136       }
1137 
1138     case SYMBOL_REF:
1139     case CONST:
1140       switch (outer_code)
1141 	{
1142 	case HIGH:
1143 	case LO_SUM:
1144 	  *total = COSTS_N_INSNS (0);
1145 	  return true;
1146 
1147 	case MEM:
1148 	case SET:
1149 	  if (g_switch_value)
1150 	    {
1151 	      *total = COSTS_N_INSNS (0);
1152 	      return true;
1153 	    }
1154 	  break;
1155 	}
1156       /* Fall through.  */
1157 
1158     case LABEL_REF:
1159     case CONST_DOUBLE:
1160       *total = COSTS_N_INSNS (2);
1161       return true;
1162 
1163     case SET:
1164       *total = COSTS_N_INSNS (1);
1165       break;
1166 
1167     case MEM:
1168       if (!speed)
1169 	*total = COSTS_N_INSNS (1);
1170       else
1171 	*total = COSTS_N_INSNS (load_latency);
1172       break;
1173 
1174     }
1175 
1176   return false;
1177 }
1178 
1179 /* Implemenent TARGET_CAN_ELIMINATE.  */
1180 
1181 bool
1182 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1183 {
1184   return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1185 }
1186 
1187 /* Implement TARGET_LEGITIMATE_ADDRESS_P.  */
1188 
1189 static bool
1190 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1191 {
1192    /* (rM) */
1193   if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1194     return true;
1195   if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1196     return true;
1197 
1198   /* (rM)+literal) */
1199   if (GET_CODE (x) == PLUS
1200      && REG_P (XEXP (x, 0))
1201      && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1202          || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1203      && GET_CODE (XEXP (x, 1)) == CONST_INT
1204      && satisfies_constraint_K (XEXP ((x), 1)))
1205     return true;
1206 
1207   /* gp(sym)  */
1208   if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1209     return true;
1210 
1211   return false;
1212 }
1213 
1214 /* Check a move is not memory to memory.  */
1215 
1216 bool
1217 lm32_move_ok (machine_mode mode, rtx operands[2]) {
1218   if (memory_operand (operands[0], mode))
1219     return register_or_zero_operand (operands[1], mode);
1220   return true;
1221 }
1222