xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/lm32/lm32.c (revision 53d1339bf7f9c7367b35a9e1ebe693f9b047a47b)
1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2    Contributed by Jon Beniston <jon@beniston.com>
3 
4    Copyright (C) 2009-2019 Free Software Foundation, Inc.
5 
6    This file is part of GCC.
7 
8    GCC is free software; you can redistribute it and/or modify it
9    under the terms of the GNU General Public License as published
10    by the Free Software Foundation; either version 3, or (at your
11    option) any later version.
12 
13    GCC is distributed in the hope that it will be useful, but WITHOUT
14    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15    or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
16    License for more details.
17 
18    You should have received a copy of the GNU General Public License
19    along with GCC; see the file COPYING3.  If not see
20    <http://www.gnu.org/licenses/>.  */
21 
22 #define IN_TARGET_CODE 1
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "attribs.h"
33 #include "df.h"
34 #include "memmodel.h"
35 #include "tm_p.h"
36 #include "optabs.h"
37 #include "regs.h"
38 #include "emit-rtl.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "calls.h"
42 #include "alias.h"
43 #include "explow.h"
44 #include "expr.h"
45 #include "tm-constrs.h"
46 #include "builtins.h"
47 
48 /* This file should be included last.  */
49 #include "target-def.h"
50 
51 struct lm32_frame_info
52 {
53   HOST_WIDE_INT total_size;	/* number of bytes of entire frame.  */
54   HOST_WIDE_INT callee_size;	/* number of bytes to save callee saves.  */
55   HOST_WIDE_INT pretend_size;	/* number of bytes we pretend caller did.  */
56   HOST_WIDE_INT args_size;	/* number of bytes for outgoing arguments.  */
57   HOST_WIDE_INT locals_size;	/* number of bytes for local variables.  */
58   unsigned int reg_save_mask;	/* mask of saved registers.  */
59 };
60 
61 /* Prototypes for static functions.  */
62 static rtx emit_add (rtx dest, rtx src0, rtx src1);
63 static void expand_save_restore (struct lm32_frame_info *info, int op);
64 static void stack_adjust (HOST_WIDE_INT amount);
65 static bool lm32_in_small_data_p (const_tree);
66 static void lm32_setup_incoming_varargs (cumulative_args_t cum,
67 					 machine_mode mode, tree type,
68 					 int *pretend_size, int no_rtl);
69 static bool lm32_rtx_costs (rtx x, machine_mode mode, int outer_code, int opno,
70 			    int *total, bool speed);
71 static bool lm32_can_eliminate (const int, const int);
72 static bool
73 lm32_legitimate_address_p (machine_mode mode, rtx x, bool strict);
74 static HOST_WIDE_INT lm32_compute_frame_size (int size);
75 static void lm32_option_override (void);
76 static rtx lm32_function_arg (cumulative_args_t cum,
77 			      machine_mode mode, const_tree type,
78 			      bool named);
79 static void lm32_function_arg_advance (cumulative_args_t cum,
80 				       machine_mode mode,
81 				       const_tree type, bool named);
82 static bool lm32_hard_regno_mode_ok (unsigned int, machine_mode);
83 static bool lm32_modes_tieable_p (machine_mode, machine_mode);
84 static HOST_WIDE_INT lm32_starting_frame_offset (void);
85 
86 #undef TARGET_OPTION_OVERRIDE
87 #define TARGET_OPTION_OVERRIDE lm32_option_override
88 #undef TARGET_ADDRESS_COST
89 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
90 #undef TARGET_RTX_COSTS
91 #define TARGET_RTX_COSTS lm32_rtx_costs
92 #undef TARGET_IN_SMALL_DATA_P
93 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
94 #undef TARGET_PROMOTE_FUNCTION_MODE
95 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
96 #undef TARGET_SETUP_INCOMING_VARARGS
97 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
98 #undef TARGET_FUNCTION_ARG
99 #define TARGET_FUNCTION_ARG lm32_function_arg
100 #undef TARGET_FUNCTION_ARG_ADVANCE
101 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
102 #undef TARGET_PROMOTE_PROTOTYPES
103 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
104 #undef TARGET_MIN_ANCHOR_OFFSET
105 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
106 #undef TARGET_MAX_ANCHOR_OFFSET
107 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
108 #undef TARGET_CAN_ELIMINATE
109 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
110 #undef TARGET_LRA_P
111 #define TARGET_LRA_P hook_bool_void_false
112 #undef TARGET_LEGITIMATE_ADDRESS_P
113 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
114 #undef TARGET_HARD_REGNO_MODE_OK
115 #define TARGET_HARD_REGNO_MODE_OK lm32_hard_regno_mode_ok
116 #undef TARGET_MODES_TIEABLE_P
117 #define TARGET_MODES_TIEABLE_P lm32_modes_tieable_p
118 
119 #undef TARGET_CONSTANT_ALIGNMENT
120 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
121 
122 #undef TARGET_STARTING_FRAME_OFFSET
123 #define TARGET_STARTING_FRAME_OFFSET lm32_starting_frame_offset
124 
125 struct gcc_target targetm = TARGET_INITIALIZER;
126 
127 /* Current frame information calculated by lm32_compute_frame_size.  */
128 static struct lm32_frame_info current_frame_info;
129 
130 /* Return non-zero if the given return type should be returned in memory.  */
131 
132 int
133 lm32_return_in_memory (tree type)
134 {
135   HOST_WIDE_INT size;
136 
137   if (!AGGREGATE_TYPE_P (type))
138     {
139       /* All simple types are returned in registers.  */
140       return 0;
141     }
142 
143   size = int_size_in_bytes (type);
144   if (size >= 0 && size <= UNITS_PER_WORD)
145     {
146       /* If it can fit in one register.  */
147       return 0;
148     }
149 
150   return 1;
151 }
152 
153 /* Generate an emit a word sized add instruction.  */
154 
155 static rtx
156 emit_add (rtx dest, rtx src0, rtx src1)
157 {
158   rtx insn;
159   insn = emit_insn (gen_addsi3 (dest, src0, src1));
160   return insn;
161 }
162 
163 /* Generate the code to compare (and possibly branch) two integer values
164    TEST_CODE is the comparison code we are trying to emulate
165      (or implement directly)
166    RESULT is where to store the result of the comparison,
167      or null to emit a branch
168    CMP0 CMP1 are the two comparison operands
169    DESTINATION is the destination of the branch, or null to only compare
170    */
171 
172 static void
173 gen_int_relational (enum rtx_code code,
174 		    rtx result,
175 		    rtx cmp0,
176 		    rtx cmp1,
177 		    rtx destination)
178 {
179   machine_mode mode;
180   int branch_p;
181 
182   mode = GET_MODE (cmp0);
183   if (mode == VOIDmode)
184     mode = GET_MODE (cmp1);
185 
186   /* Is this a branch or compare.  */
187   branch_p = (destination != 0);
188 
189   /* Instruction set doesn't support LE or LT, so swap operands and use
190      GE, GT.  */
191   switch (code)
192     {
193     case LE:
194     case LT:
195     case LEU:
196     case LTU:
197       {
198 	rtx temp;
199 
200 	code = swap_condition (code);
201 	temp = cmp0;
202 	cmp0 = cmp1;
203 	cmp1 = temp;
204 	break;
205       }
206     default:
207       break;
208     }
209 
210   if (branch_p)
211     {
212       rtx insn, cond, label;
213 
214       /* Operands must be in registers.  */
215       if (!register_operand (cmp0, mode))
216 	cmp0 = force_reg (mode, cmp0);
217       if (!register_operand (cmp1, mode))
218 	cmp1 = force_reg (mode, cmp1);
219 
220       /* Generate conditional branch instruction.  */
221       cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
222       label = gen_rtx_LABEL_REF (VOIDmode, destination);
223       insn = gen_rtx_SET (pc_rtx, gen_rtx_IF_THEN_ELSE (VOIDmode,
224 							cond, label, pc_rtx));
225       emit_jump_insn (insn);
226     }
227   else
228     {
229       /* We can't have const_ints in cmp0, other than 0.  */
230       if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
231 	cmp0 = force_reg (mode, cmp0);
232 
233       /* If the comparison is against an int not in legal range
234          move it into a register.  */
235       if (GET_CODE (cmp1) == CONST_INT)
236 	{
237 	  switch (code)
238 	    {
239 	    case EQ:
240 	    case NE:
241 	    case LE:
242 	    case LT:
243 	    case GE:
244 	    case GT:
245 	      if (!satisfies_constraint_K (cmp1))
246 		cmp1 = force_reg (mode, cmp1);
247 	      break;
248 	    case LEU:
249 	    case LTU:
250 	    case GEU:
251 	    case GTU:
252 	      if (!satisfies_constraint_L (cmp1))
253 		cmp1 = force_reg (mode, cmp1);
254 	      break;
255 	    default:
256 	      gcc_unreachable ();
257 	    }
258 	}
259 
260       /* Generate compare instruction.  */
261       emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
262     }
263 }
264 
265 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
266    and OPERAND[3].  Store the result in OPERANDS[0].  */
267 
268 void
269 lm32_expand_scc (rtx operands[])
270 {
271   rtx target = operands[0];
272   enum rtx_code code = GET_CODE (operands[1]);
273   rtx op0 = operands[2];
274   rtx op1 = operands[3];
275 
276   gen_int_relational (code, target, op0, op1, NULL_RTX);
277 }
278 
279 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
280    CODE and jump to OPERANDS[3] if the condition holds.  */
281 
282 void
283 lm32_expand_conditional_branch (rtx operands[])
284 {
285   enum rtx_code code = GET_CODE (operands[0]);
286   rtx op0 = operands[1];
287   rtx op1 = operands[2];
288   rtx destination = operands[3];
289 
290   gen_int_relational (code, NULL_RTX, op0, op1, destination);
291 }
292 
293 /* Generate and emit RTL to save or restore callee save registers.  */
294 static void
295 expand_save_restore (struct lm32_frame_info *info, int op)
296 {
297   unsigned int reg_save_mask = info->reg_save_mask;
298   int regno;
299   HOST_WIDE_INT offset;
300   rtx insn;
301 
302   /* Callee saves are below locals and above outgoing arguments.  */
303   offset = info->args_size + info->callee_size;
304   for (regno = 0; regno <= 31; regno++)
305     {
306       if ((reg_save_mask & (1 << regno)) != 0)
307 	{
308 	  rtx offset_rtx;
309 	  rtx mem;
310 
311 	  offset_rtx = GEN_INT (offset);
312 	  if (satisfies_constraint_K (offset_rtx))
313 	    {
314               mem = gen_rtx_MEM (word_mode,
315                                  gen_rtx_PLUS (Pmode,
316                                                stack_pointer_rtx,
317                                                offset_rtx));
318             }
319           else
320             {
321               /* r10 is caller saved so it can be used as a temp reg.  */
322               rtx r10;
323 
324               r10 = gen_rtx_REG (word_mode, 10);
325               insn = emit_move_insn (r10, offset_rtx);
326               if (op == 0)
327                 RTX_FRAME_RELATED_P (insn) = 1;
328               insn = emit_add (r10, r10, stack_pointer_rtx);
329               if (op == 0)
330                 RTX_FRAME_RELATED_P (insn) = 1;
331               mem = gen_rtx_MEM (word_mode, r10);
332             }
333 
334 	  if (op == 0)
335 	    insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
336 	  else
337 	    insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
338 
339 	  /* only prologue instructions which set the sp fp or save a
340 	     register should be marked as frame related.  */
341 	  if (op == 0)
342 	    RTX_FRAME_RELATED_P (insn) = 1;
343 	  offset -= UNITS_PER_WORD;
344 	}
345     }
346 }
347 
348 static void
349 stack_adjust (HOST_WIDE_INT amount)
350 {
351   rtx insn;
352 
353   if (!IN_RANGE (amount, -32776, 32768))
354     {
355       /* r10 is caller saved so it can be used as a temp reg.  */
356       rtx r10;
357       r10 = gen_rtx_REG (word_mode, 10);
358       insn = emit_move_insn (r10, GEN_INT (amount));
359       if (amount < 0)
360 	RTX_FRAME_RELATED_P (insn) = 1;
361       insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
362       if (amount < 0)
363 	RTX_FRAME_RELATED_P (insn) = 1;
364     }
365   else
366     {
367       insn = emit_add (stack_pointer_rtx,
368 		       stack_pointer_rtx, GEN_INT (amount));
369       if (amount < 0)
370 	RTX_FRAME_RELATED_P (insn) = 1;
371     }
372 }
373 
374 
375 /* Create and emit instructions for a functions prologue.  */
376 void
377 lm32_expand_prologue (void)
378 {
379   rtx insn;
380 
381   lm32_compute_frame_size (get_frame_size ());
382 
383   if (current_frame_info.total_size > 0)
384     {
385       /* Add space on stack new frame.  */
386       stack_adjust (-current_frame_info.total_size);
387 
388       /* Save callee save registers.  */
389       if (current_frame_info.reg_save_mask != 0)
390 	expand_save_restore (&current_frame_info, 0);
391 
392       /* Setup frame pointer if it's needed.  */
393       if (frame_pointer_needed == 1)
394 	{
395 	  /* Move sp to fp.  */
396 	  insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
397 	  RTX_FRAME_RELATED_P (insn) = 1;
398 
399 	  /* Add offset - Don't use total_size, as that includes pretend_size,
400              which isn't part of this frame?  */
401 	  insn = emit_add (frame_pointer_rtx,
402 			   frame_pointer_rtx,
403 			   GEN_INT (current_frame_info.args_size +
404 				    current_frame_info.callee_size +
405 				    current_frame_info.locals_size));
406 	  RTX_FRAME_RELATED_P (insn) = 1;
407 	}
408 
409       /* Prevent prologue from being scheduled into function body.  */
410       emit_insn (gen_blockage ());
411     }
412 }
413 
414 /* Create an emit instructions for a functions epilogue.  */
415 void
416 lm32_expand_epilogue (void)
417 {
418   rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
419 
420   lm32_compute_frame_size (get_frame_size ());
421 
422   if (current_frame_info.total_size > 0)
423     {
424       /* Prevent stack code from being reordered.  */
425       emit_insn (gen_blockage ());
426 
427       /* Restore callee save registers.  */
428       if (current_frame_info.reg_save_mask != 0)
429 	expand_save_restore (&current_frame_info, 1);
430 
431       /* Deallocate stack.  */
432       stack_adjust (current_frame_info.total_size);
433 
434       /* Return to calling function.  */
435       emit_jump_insn (gen_return_internal (ra_rtx));
436     }
437   else
438     {
439       /* Return to calling function.  */
440       emit_jump_insn (gen_return_internal (ra_rtx));
441     }
442 }
443 
444 /* Return the bytes needed to compute the frame pointer from the current
445    stack pointer.  */
446 static HOST_WIDE_INT
447 lm32_compute_frame_size (int size)
448 {
449   int regno;
450   HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
451   unsigned int reg_save_mask;
452 
453   locals_size = size;
454   args_size = crtl->outgoing_args_size;
455   pretend_size = crtl->args.pretend_args_size;
456   callee_size = 0;
457   reg_save_mask = 0;
458 
459   /* Build mask that actually determines which regsiters we save
460      and calculate size required to store them in the stack.  */
461   for (regno = 1; regno < SP_REGNUM; regno++)
462     {
463       if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
464 	{
465 	  reg_save_mask |= 1 << regno;
466 	  callee_size += UNITS_PER_WORD;
467 	}
468     }
469   if (df_regs_ever_live_p (RA_REGNUM) || ! crtl->is_leaf
470       || !optimize)
471     {
472       reg_save_mask |= 1 << RA_REGNUM;
473       callee_size += UNITS_PER_WORD;
474     }
475   if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
476     {
477       reg_save_mask |= 1 << FP_REGNUM;
478       callee_size += UNITS_PER_WORD;
479     }
480 
481   /* Compute total frame size.  */
482   total_size = pretend_size + args_size + locals_size + callee_size;
483 
484   /* Align frame to appropriate boundary.  */
485   total_size = (total_size + 3) & ~3;
486 
487   /* Save computed information.  */
488   current_frame_info.total_size = total_size;
489   current_frame_info.callee_size = callee_size;
490   current_frame_info.pretend_size = pretend_size;
491   current_frame_info.locals_size = locals_size;
492   current_frame_info.args_size = args_size;
493   current_frame_info.reg_save_mask = reg_save_mask;
494 
495   return total_size;
496 }
497 
498 void
499 lm32_print_operand (FILE * file, rtx op, int letter)
500 {
501   enum rtx_code code;
502 
503   code = GET_CODE (op);
504 
505   if (code == SIGN_EXTEND)
506     op = XEXP (op, 0), code = GET_CODE (op);
507   else if (code == REG || code == SUBREG)
508     {
509       int regnum;
510 
511       if (code == REG)
512 	regnum = REGNO (op);
513       else
514 	regnum = true_regnum (op);
515 
516       fprintf (file, "%s", reg_names[regnum]);
517     }
518   else if (code == HIGH)
519     output_addr_const (file, XEXP (op, 0));
520   else if (code == MEM)
521     output_address (GET_MODE (op), XEXP (op, 0));
522   else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
523     fprintf (file, "%s", reg_names[0]);
524   else if (GET_CODE (op) == CONST_DOUBLE)
525     {
526       if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
527 	output_operand_lossage ("only 0.0 can be loaded as an immediate");
528       else
529 	fprintf (file, "0");
530     }
531   else if (code == EQ)
532     fprintf (file, "e  ");
533   else if (code == NE)
534     fprintf (file, "ne ");
535   else if (code == GT)
536     fprintf (file, "g  ");
537   else if (code == GTU)
538     fprintf (file, "gu ");
539   else if (code == LT)
540     fprintf (file, "l  ");
541   else if (code == LTU)
542     fprintf (file, "lu ");
543   else if (code == GE)
544     fprintf (file, "ge ");
545   else if (code == GEU)
546     fprintf (file, "geu");
547   else if (code == LE)
548     fprintf (file, "le ");
549   else if (code == LEU)
550     fprintf (file, "leu");
551   else
552     output_addr_const (file, op);
553 }
554 
555 /* A C compound statement to output to stdio stream STREAM the
556    assembler syntax for an instruction operand that is a memory
557    reference whose address is ADDR.  ADDR is an RTL expression.
558 
559    On some machines, the syntax for a symbolic address depends on
560    the section that the address refers to.  On these machines,
561    define the macro `ENCODE_SECTION_INFO' to store the information
562    into the `symbol_ref', and then check for it here.  */
563 
564 void
565 lm32_print_operand_address (FILE * file, rtx addr)
566 {
567   switch (GET_CODE (addr))
568     {
569     case REG:
570       fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
571       break;
572 
573     case MEM:
574       output_address (VOIDmode, XEXP (addr, 0));
575       break;
576 
577     case PLUS:
578       {
579 	rtx arg0 = XEXP (addr, 0);
580 	rtx arg1 = XEXP (addr, 1);
581 
582 	if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
583 	  {
584 	    if (GET_CODE (arg1) == CONST_INT)
585 	      fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
586 		       INTVAL (arg1));
587 	    else
588 	      {
589 		fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
590 		output_addr_const (file, arg1);
591 		fprintf (file, ")");
592 	      }
593 	  }
594 	else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
595 	  output_addr_const (file, addr);
596 	else
597 	  fatal_insn ("bad operand", addr);
598       }
599       break;
600 
601     case SYMBOL_REF:
602       if (SYMBOL_REF_SMALL_P (addr))
603 	{
604 	  fprintf (file, "gp(");
605 	  output_addr_const (file, addr);
606 	  fprintf (file, ")");
607 	}
608       else
609 	fatal_insn ("can't use non gp relative absolute address", addr);
610       break;
611 
612     default:
613       fatal_insn ("invalid addressing mode", addr);
614       break;
615     }
616 }
617 
618 /* Determine where to put an argument to a function.
619    Value is zero to push the argument on the stack,
620    or a hard register in which to store the argument.
621 
622    MODE is the argument's machine mode.
623    TYPE is the data type of the argument (as a tree).
624     This is null for libcalls where that information may
625     not be available.
626    CUM is a variable of type CUMULATIVE_ARGS which gives info about
627     the preceding args and about the function being called.
628    NAMED is nonzero if this argument is a named parameter
629     (otherwise it is an extra parameter matching an ellipsis).  */
630 
631 static rtx
632 lm32_function_arg (cumulative_args_t cum_v, machine_mode mode,
633 		   const_tree type, bool named)
634 {
635   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
636 
637   if (mode == VOIDmode)
638     /* Compute operand 2 of the call insn.  */
639     return GEN_INT (0);
640 
641   if (targetm.calls.must_pass_in_stack (mode, type))
642     return NULL_RTX;
643 
644   if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
645     return NULL_RTX;
646 
647   return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
648 }
649 
650 static void
651 lm32_function_arg_advance (cumulative_args_t cum, machine_mode mode,
652 			   const_tree type, bool named ATTRIBUTE_UNUSED)
653 {
654   *get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
655 }
656 
657 HOST_WIDE_INT
658 lm32_compute_initial_elimination_offset (int from, int to)
659 {
660   HOST_WIDE_INT offset = 0;
661 
662   switch (from)
663     {
664     case ARG_POINTER_REGNUM:
665       switch (to)
666 	{
667 	case FRAME_POINTER_REGNUM:
668 	  offset = 0;
669 	  break;
670 	case STACK_POINTER_REGNUM:
671 	  offset =
672 	    lm32_compute_frame_size (get_frame_size ()) -
673 	    current_frame_info.pretend_size;
674 	  break;
675 	default:
676 	  gcc_unreachable ();
677 	}
678       break;
679     default:
680       gcc_unreachable ();
681     }
682 
683   return offset;
684 }
685 
686 static void
687 lm32_setup_incoming_varargs (cumulative_args_t cum_v, machine_mode mode,
688 			     tree type, int *pretend_size, int no_rtl)
689 {
690   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
691   int first_anon_arg;
692   tree fntype;
693 
694   fntype = TREE_TYPE (current_function_decl);
695 
696   if (stdarg_p (fntype))
697     first_anon_arg = *cum + LM32_FIRST_ARG_REG;
698   else
699     {
700       /* this is the common case, we have been passed details setup
701          for the last named argument, we want to skip over the
702          registers, if any used in passing this named paramter in
703          order to determine which is the first registers used to pass
704          anonymous arguments.  */
705       int size;
706 
707       if (mode == BLKmode)
708 	size = int_size_in_bytes (type);
709       else
710 	size = GET_MODE_SIZE (mode);
711 
712       first_anon_arg =
713 	*cum + LM32_FIRST_ARG_REG +
714 	((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
715     }
716 
717   if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
718     {
719       int first_reg_offset = first_anon_arg;
720       int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
721       rtx regblock;
722 
723       regblock = gen_rtx_MEM (BLKmode,
724 			      plus_constant (Pmode, arg_pointer_rtx,
725 					     FIRST_PARM_OFFSET (0)));
726       move_block_from_reg (first_reg_offset, regblock, size);
727 
728       *pretend_size = size * UNITS_PER_WORD;
729     }
730 }
731 
732 /* Override command line options.  */
733 static void
734 lm32_option_override (void)
735 {
736   /* We must have sign-extend enabled if barrel-shift isn't.  */
737   if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
738     target_flags |= MASK_SIGN_EXTEND_ENABLED;
739 }
740 
741 /* Return nonzero if this function is known to have a null epilogue.
742    This allows the optimizer to omit jumps to jumps if no stack
743    was created.  */
744 int
745 lm32_can_use_return (void)
746 {
747   if (!reload_completed)
748     return 0;
749 
750   if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
751     return 0;
752 
753   if (lm32_compute_frame_size (get_frame_size ()) != 0)
754     return 0;
755 
756   return 1;
757 }
758 
759 /* Support function to determine the return address of the function
760    'count' frames back up the stack.  */
761 rtx
762 lm32_return_addr_rtx (int count, rtx frame)
763 {
764   rtx r;
765   if (count == 0)
766     {
767       if (!df_regs_ever_live_p (RA_REGNUM))
768 	r = gen_rtx_REG (Pmode, RA_REGNUM);
769       else
770 	{
771 	  r = gen_rtx_MEM (Pmode,
772 			   gen_rtx_PLUS (Pmode, frame,
773 					 GEN_INT (-2 * UNITS_PER_WORD)));
774 	  set_mem_alias_set (r, get_frame_alias_set ());
775 	}
776     }
777   else if (flag_omit_frame_pointer)
778     r = NULL_RTX;
779   else
780     {
781       r = gen_rtx_MEM (Pmode,
782 		       gen_rtx_PLUS (Pmode, frame,
783 				     GEN_INT (-2 * UNITS_PER_WORD)));
784       set_mem_alias_set (r, get_frame_alias_set ());
785     }
786   return r;
787 }
788 
789 /* Return true if EXP should be placed in the small data section.  */
790 
791 static bool
792 lm32_in_small_data_p (const_tree exp)
793 {
794   /* We want to merge strings, so we never consider them small data.  */
795   if (TREE_CODE (exp) == STRING_CST)
796     return false;
797 
798   /* Functions are never in the small data area.  Duh.  */
799   if (TREE_CODE (exp) == FUNCTION_DECL)
800     return false;
801 
802   if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
803     {
804       const char *section = DECL_SECTION_NAME (exp);
805       if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
806 	return true;
807     }
808   else
809     {
810       HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
811 
812       /* If this is an incomplete type with size 0, then we can't put it
813          in sdata because it might be too big when completed.  */
814       if (size > 0 && size <= g_switch_value)
815 	return true;
816     }
817 
818   return false;
819 }
820 
821 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
822    Assume that the areas do not overlap.  */
823 
824 static void
825 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
826 			HOST_WIDE_INT alignment)
827 {
828   HOST_WIDE_INT offset, delta;
829   unsigned HOST_WIDE_INT bits;
830   int i;
831   machine_mode mode;
832   rtx *regs;
833 
834   /* Work out how many bits to move at a time.  */
835   switch (alignment)
836     {
837     case 1:
838       bits = 8;
839       break;
840     case 2:
841       bits = 16;
842       break;
843     default:
844       bits = 32;
845       break;
846     }
847 
848   mode = int_mode_for_size (bits, 0).require ();
849   delta = bits / BITS_PER_UNIT;
850 
851   /* Allocate a buffer for the temporary registers.  */
852   regs = XALLOCAVEC (rtx, length / delta);
853 
854   /* Load as many BITS-sized chunks as possible.  */
855   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
856     {
857       regs[i] = gen_reg_rtx (mode);
858       emit_move_insn (regs[i], adjust_address (src, mode, offset));
859     }
860 
861   /* Copy the chunks to the destination.  */
862   for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
863     emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
864 
865   /* Mop up any left-over bytes.  */
866   if (offset < length)
867     {
868       src = adjust_address (src, BLKmode, offset);
869       dest = adjust_address (dest, BLKmode, offset);
870       move_by_pieces (dest, src, length - offset,
871 		      MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), RETURN_BEGIN);
872     }
873 }
874 
875 /* Expand string/block move operations.
876 
877    operands[0] is the pointer to the destination.
878    operands[1] is the pointer to the source.
879    operands[2] is the number of bytes to move.
880    operands[3] is the alignment.  */
881 
882 int
883 lm32_expand_block_move (rtx * operands)
884 {
885   if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
886     {
887       lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
888 			      INTVAL (operands[3]));
889       return 1;
890     }
891   return 0;
892 }
893 
894 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
895    isn't protected by a PIC unspec.  */
896 int
897 nonpic_symbol_mentioned_p (rtx x)
898 {
899   const char *fmt;
900   int i;
901 
902   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
903       || GET_CODE (x) == PC)
904     return 1;
905 
906   /* We don't want to look into the possible MEM location of a
907      CONST_DOUBLE, since we're not going to use it, in general.  */
908   if (GET_CODE (x) == CONST_DOUBLE)
909     return 0;
910 
911   if (GET_CODE (x) == UNSPEC)
912     return 0;
913 
914   fmt = GET_RTX_FORMAT (GET_CODE (x));
915   for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
916     {
917       if (fmt[i] == 'E')
918 	{
919 	  int j;
920 
921 	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
922 	    if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
923 	      return 1;
924 	}
925       else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
926 	return 1;
927     }
928 
929   return 0;
930 }
931 
932 /* Compute a (partial) cost for rtx X.  Return true if the complete
933    cost has been computed, and false if subexpressions should be
934    scanned.  In either case, *TOTAL contains the cost result.  */
935 
936 static bool
937 lm32_rtx_costs (rtx x, machine_mode mode, int outer_code,
938 		int opno ATTRIBUTE_UNUSED, int *total, bool speed)
939 {
940   int code = GET_CODE (x);
941   bool small_mode;
942 
943   const int arithmetic_latency = 1;
944   const int shift_latency = 1;
945   const int compare_latency = 2;
946   const int multiply_latency = 3;
947   const int load_latency = 3;
948   const int libcall_size_cost = 5;
949 
950   /* Determine if we can handle the given mode size in a single instruction.  */
951   small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
952 
953   switch (code)
954     {
955 
956     case PLUS:
957     case MINUS:
958     case AND:
959     case IOR:
960     case XOR:
961     case NOT:
962     case NEG:
963       if (!speed)
964 	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
965       else
966 	*total =
967 	  COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
968       break;
969 
970     case COMPARE:
971       if (small_mode)
972 	{
973 	  if (!speed)
974 	    *total = COSTS_N_INSNS (1);
975 	  else
976 	    *total = COSTS_N_INSNS (compare_latency);
977 	}
978       else
979 	{
980 	  /* FIXME. Guessing here.  */
981 	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
982 	}
983       break;
984 
985     case ASHIFT:
986     case ASHIFTRT:
987     case LSHIFTRT:
988       if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
989 	{
990 	  if (!speed)
991 	    *total = COSTS_N_INSNS (1);
992 	  else
993 	    *total = COSTS_N_INSNS (shift_latency);
994 	}
995       else if (TARGET_BARREL_SHIFT_ENABLED)
996 	{
997 	  /* FIXME: Guessing here.  */
998 	  *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
999 	}
1000       else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
1001 	{
1002 	  *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
1003 	}
1004       else
1005 	{
1006 	  /* Libcall.  */
1007 	  if (!speed)
1008 	    *total = COSTS_N_INSNS (libcall_size_cost);
1009 	  else
1010 	    *total = COSTS_N_INSNS (100);
1011 	}
1012       break;
1013 
1014     case MULT:
1015       if (TARGET_MULTIPLY_ENABLED && small_mode)
1016 	{
1017 	  if (!speed)
1018 	    *total = COSTS_N_INSNS (1);
1019 	  else
1020 	    *total = COSTS_N_INSNS (multiply_latency);
1021 	}
1022       else
1023 	{
1024 	  /* Libcall.  */
1025 	  if (!speed)
1026 	    *total = COSTS_N_INSNS (libcall_size_cost);
1027 	  else
1028 	    *total = COSTS_N_INSNS (100);
1029 	}
1030       break;
1031 
1032     case DIV:
1033     case MOD:
1034     case UDIV:
1035     case UMOD:
1036       if (TARGET_DIVIDE_ENABLED && small_mode)
1037 	{
1038 	  if (!speed)
1039 	    *total = COSTS_N_INSNS (1);
1040 	  else
1041 	    {
1042 	      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1043 		{
1044 		  int cycles = 0;
1045 		  unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1046 
1047 		  while (i)
1048 		    {
1049 		      i >>= 2;
1050 		      cycles++;
1051 		    }
1052 		  if (IN_RANGE (i, 0, 65536))
1053 		    *total = COSTS_N_INSNS (1 + 1 + cycles);
1054 		  else
1055 		    *total = COSTS_N_INSNS (2 + 1 + cycles);
1056 		  return true;
1057 		}
1058 	      else if (GET_CODE (XEXP (x, 1)) == REG)
1059 		{
1060 		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1061 		  return true;
1062 		}
1063 	      else
1064 		{
1065 		  *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1066 		  return false;
1067 		}
1068 	    }
1069 	}
1070       else
1071 	{
1072 	  /* Libcall.  */
1073 	  if (!speed)
1074 	    *total = COSTS_N_INSNS (libcall_size_cost);
1075 	  else
1076 	    *total = COSTS_N_INSNS (100);
1077 	}
1078       break;
1079 
1080     case HIGH:
1081     case LO_SUM:
1082       if (!speed)
1083 	*total = COSTS_N_INSNS (1);
1084       else
1085 	*total = COSTS_N_INSNS (arithmetic_latency);
1086       break;
1087 
1088     case ZERO_EXTEND:
1089       if (MEM_P (XEXP (x, 0)))
1090 	*total = COSTS_N_INSNS (0);
1091       else if (small_mode)
1092 	{
1093 	  if (!speed)
1094 	    *total = COSTS_N_INSNS (1);
1095 	  else
1096 	    *total = COSTS_N_INSNS (arithmetic_latency);
1097 	}
1098       else
1099 	*total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1100       break;
1101 
1102     case CONST_INT:
1103       {
1104 	switch (outer_code)
1105 	  {
1106 	  case HIGH:
1107 	  case LO_SUM:
1108 	    *total = COSTS_N_INSNS (0);
1109 	    return true;
1110 
1111 	  case AND:
1112 	  case XOR:
1113 	  case IOR:
1114 	  case ASHIFT:
1115 	  case ASHIFTRT:
1116 	  case LSHIFTRT:
1117 	  case ROTATE:
1118 	  case ROTATERT:
1119 	    if (satisfies_constraint_L (x))
1120 	      *total = COSTS_N_INSNS (0);
1121 	    else
1122 	      *total = COSTS_N_INSNS (2);
1123 	    return true;
1124 
1125 	  case SET:
1126 	  case PLUS:
1127 	  case MINUS:
1128 	  case COMPARE:
1129 	    if (satisfies_constraint_K (x))
1130 	      *total = COSTS_N_INSNS (0);
1131 	    else
1132 	      *total = COSTS_N_INSNS (2);
1133 	    return true;
1134 
1135 	  case MULT:
1136 	    if (TARGET_MULTIPLY_ENABLED)
1137 	      {
1138 	        if (satisfies_constraint_K (x))
1139 	         *total = COSTS_N_INSNS (0);
1140 	        else
1141 	          *total = COSTS_N_INSNS (2);
1142 		return true;
1143 	      }
1144 	    /* Fall through.  */
1145 
1146 	  default:
1147             if (satisfies_constraint_K (x))
1148 	      *total = COSTS_N_INSNS (1);
1149 	    else
1150 	      *total = COSTS_N_INSNS (2);
1151 	    return true;
1152 	  }
1153       }
1154 
1155     case SYMBOL_REF:
1156     case CONST:
1157       switch (outer_code)
1158 	{
1159 	case HIGH:
1160 	case LO_SUM:
1161 	  *total = COSTS_N_INSNS (0);
1162 	  return true;
1163 
1164 	case MEM:
1165 	case SET:
1166 	  if (g_switch_value)
1167 	    {
1168 	      *total = COSTS_N_INSNS (0);
1169 	      return true;
1170 	    }
1171 	  break;
1172 	}
1173       /* Fall through.  */
1174 
1175     case LABEL_REF:
1176     case CONST_DOUBLE:
1177       *total = COSTS_N_INSNS (2);
1178       return true;
1179 
1180     case SET:
1181       *total = COSTS_N_INSNS (1);
1182       break;
1183 
1184     case MEM:
1185       if (!speed)
1186 	*total = COSTS_N_INSNS (1);
1187       else
1188 	*total = COSTS_N_INSNS (load_latency);
1189       break;
1190 
1191     }
1192 
1193   return false;
1194 }
1195 
1196 /* Implemenent TARGET_CAN_ELIMINATE.  */
1197 
1198 bool
1199 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1200 {
1201   return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1202 }
1203 
1204 /* Implement TARGET_LEGITIMATE_ADDRESS_P.  */
1205 
1206 static bool
1207 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1208 {
1209    /* (rM) */
1210   if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1211     return true;
1212   if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1213     return true;
1214 
1215   /* (rM)+literal) */
1216   if (GET_CODE (x) == PLUS
1217      && REG_P (XEXP (x, 0))
1218      && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1219          || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1220      && GET_CODE (XEXP (x, 1)) == CONST_INT
1221      && satisfies_constraint_K (XEXP ((x), 1)))
1222     return true;
1223 
1224   /* gp(sym)  */
1225   if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1226     return true;
1227 
1228   return false;
1229 }
1230 
1231 /* Check a move is not memory to memory.  */
1232 
1233 bool
1234 lm32_move_ok (machine_mode mode, rtx operands[2]) {
1235   if (memory_operand (operands[0], mode))
1236     return register_or_zero_operand (operands[1], mode);
1237   return true;
1238 }
1239 
1240 /* Implement TARGET_HARD_REGNO_MODE_OK.  */
1241 
1242 static bool
1243 lm32_hard_regno_mode_ok (unsigned int regno, machine_mode)
1244 {
1245   return G_REG_P (regno);
1246 }
1247 
1248 /* Implement TARGET_MODES_TIEABLE_P.  */
1249 
1250 static bool
1251 lm32_modes_tieable_p (machine_mode mode1, machine_mode mode2)
1252 {
1253   return (GET_MODE_CLASS (mode1) == MODE_INT
1254 	  && GET_MODE_CLASS (mode2) == MODE_INT
1255 	  && GET_MODE_SIZE (mode1) <= UNITS_PER_WORD
1256 	  && GET_MODE_SIZE (mode2) <= UNITS_PER_WORD);
1257 }
1258 
1259 /* Implement TARGET_STARTING_FRAME_OFFSET.  */
1260 
1261 static HOST_WIDE_INT
1262 lm32_starting_frame_offset (void)
1263 {
1264   return UNITS_PER_WORD;
1265 }
1266