xref: /openbsd-src/gnu/gcc/gcc/config/xtensa/xtensa.c (revision 404b540a9034ac75a6199ad1a32d1bbc7a0d4210)
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2    Copyright 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3    Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "tree.h"
39 #include "expr.h"
40 #include "flags.h"
41 #include "reload.h"
42 #include "tm_p.h"
43 #include "function.h"
44 #include "toplev.h"
45 #include "optabs.h"
46 #include "libfuncs.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "langhooks.h"
51 #include "tree-gimple.h"
52 
53 
54 /* Enumeration for all of the relational tests, so that we can build
55    arrays indexed by the test type, and not worry about the order
56    of EQ, NE, etc.  */
57 
58 enum internal_test
59 {
60   ITEST_EQ,
61   ITEST_NE,
62   ITEST_GT,
63   ITEST_GE,
64   ITEST_LT,
65   ITEST_LE,
66   ITEST_GTU,
67   ITEST_GEU,
68   ITEST_LTU,
69   ITEST_LEU,
70   ITEST_MAX
71 };
72 
73 /* Cached operands, and operator to compare for use in set/branch on
74    condition codes.  */
75 rtx branch_cmp[2];
76 
77 /* what type of branch to use */
78 enum cmp_type branch_type;
79 
80 /* Array giving truth value on whether or not a given hard register
81    can support a given mode.  */
82 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
83 
84 /* Current frame size calculated by compute_frame_size.  */
85 unsigned xtensa_current_frame_size;
86 
87 /* Largest block move to handle in-line.  */
88 #define LARGEST_MOVE_RATIO 15
89 
90 /* Define the structure for the machine field in struct function.  */
91 struct machine_function GTY(())
92 {
93   int accesses_prev_frame;
94   bool need_a7_copy;
95   bool vararg_a7;
96   rtx set_frame_ptr_insn;
97 };
98 
99 /* Vector, indexed by hard register number, which contains 1 for a
100    register that is allowable in a candidate for leaf function
101    treatment.  */
102 
103 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
104 {
105   1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
106   1, 1, 1,
107   1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
108   1
109 };
110 
111 /* Map hard register number to register class */
112 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
113 {
114   RL_REGS,	SP_REG,		RL_REGS,	RL_REGS,
115   RL_REGS,	RL_REGS,	RL_REGS,	GR_REGS,
116   RL_REGS,	RL_REGS,	RL_REGS,	RL_REGS,
117   RL_REGS,	RL_REGS,	RL_REGS,	RL_REGS,
118   AR_REGS,	AR_REGS,	BR_REGS,
119   FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
120   FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
121   FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
122   FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
123   ACC_REG,
124 };
125 
126 /* Map register constraint character to register class.  */
127 enum reg_class xtensa_char_to_class[256] =
128 {
129   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
130   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
131   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
132   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
133   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
134   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
135   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
136   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
137   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
138   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
139   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
140   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
141   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
142   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
143   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
144   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
145   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
146   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
147   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
148   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
149   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
150   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
151   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
152   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
153   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
154   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
155   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
156   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
157   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
158   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
159   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
160   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
161   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
162   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
163   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
164   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
165   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
166   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
167   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
168   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
169   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
170   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
171   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
172   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
173   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
174   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
175   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
176   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
177   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
178   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
179   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
180   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
181   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
182   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
183   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
184   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
185   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
186   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
187   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
188   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
189   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
190   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
191   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
192   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
193 };
194 
195 static enum internal_test map_test_to_internal_test (enum rtx_code);
196 static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
197 static rtx gen_float_relational (enum rtx_code, rtx, rtx);
198 static rtx gen_conditional_move (rtx);
199 static rtx fixup_subreg_mem (rtx);
200 static struct machine_function * xtensa_init_machine_status (void);
201 static bool xtensa_return_in_msb (tree);
202 static void printx (FILE *, signed int);
203 static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
204 static rtx xtensa_builtin_saveregs (void);
205 static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
206 							int) ATTRIBUTE_UNUSED;
207 static section *xtensa_select_rtx_section (enum machine_mode, rtx,
208 					   unsigned HOST_WIDE_INT);
209 static bool xtensa_rtx_costs (rtx, int, int, int *);
210 static tree xtensa_build_builtin_va_list (void);
211 static bool xtensa_return_in_memory (tree, tree);
212 static tree xtensa_gimplify_va_arg_expr (tree, tree, tree *, tree *);
213 
214 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
215   REG_ALLOC_ORDER;
216 
217 
218 /* This macro generates the assembly code for function exit,
219    on machines that need it.  If FUNCTION_EPILOGUE is not defined
220    then individual return instructions are generated for each
221    return statement.  Args are same as for FUNCTION_PROLOGUE.  */
222 
223 #undef TARGET_ASM_FUNCTION_EPILOGUE
224 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
225 
226 /* These hooks specify assembly directives for creating certain kinds
227    of integer object.  */
228 
229 #undef TARGET_ASM_ALIGNED_SI_OP
230 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
231 
232 #undef TARGET_ASM_SELECT_RTX_SECTION
233 #define TARGET_ASM_SELECT_RTX_SECTION  xtensa_select_rtx_section
234 
235 #undef TARGET_DEFAULT_TARGET_FLAGS
236 #define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
237 
238 #undef TARGET_RTX_COSTS
239 #define TARGET_RTX_COSTS xtensa_rtx_costs
240 #undef TARGET_ADDRESS_COST
241 #define TARGET_ADDRESS_COST hook_int_rtx_0
242 
243 #undef TARGET_BUILD_BUILTIN_VA_LIST
244 #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
245 
246 #undef TARGET_PROMOTE_FUNCTION_ARGS
247 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
248 #undef TARGET_PROMOTE_FUNCTION_RETURN
249 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
250 #undef TARGET_PROMOTE_PROTOTYPES
251 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
252 
253 #undef TARGET_RETURN_IN_MEMORY
254 #define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
255 #undef TARGET_SPLIT_COMPLEX_ARG
256 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
257 #undef TARGET_MUST_PASS_IN_STACK
258 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
259 
260 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
261 #define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
262 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
263 #define TARGET_GIMPLIFY_VA_ARG_EXPR xtensa_gimplify_va_arg_expr
264 
265 #undef TARGET_RETURN_IN_MSB
266 #define TARGET_RETURN_IN_MSB xtensa_return_in_msb
267 
268 struct gcc_target targetm = TARGET_INITIALIZER;
269 
270 
271 /*
272  * Functions to test Xtensa immediate operand validity.
273  */
274 
275 bool
xtensa_simm8(HOST_WIDE_INT v)276 xtensa_simm8 (HOST_WIDE_INT v)
277 {
278   return v >= -128 && v <= 127;
279 }
280 
281 
282 bool
xtensa_simm8x256(HOST_WIDE_INT v)283 xtensa_simm8x256 (HOST_WIDE_INT v)
284 {
285   return (v & 255) == 0 && (v >= -32768 && v <= 32512);
286 }
287 
288 
289 bool
xtensa_simm12b(HOST_WIDE_INT v)290 xtensa_simm12b (HOST_WIDE_INT v)
291 {
292   return v >= -2048 && v <= 2047;
293 }
294 
295 
296 static bool
xtensa_uimm8(HOST_WIDE_INT v)297 xtensa_uimm8 (HOST_WIDE_INT v)
298 {
299   return v >= 0 && v <= 255;
300 }
301 
302 
303 static bool
xtensa_uimm8x2(HOST_WIDE_INT v)304 xtensa_uimm8x2 (HOST_WIDE_INT v)
305 {
306   return (v & 1) == 0 && (v >= 0 && v <= 510);
307 }
308 
309 
310 static bool
xtensa_uimm8x4(HOST_WIDE_INT v)311 xtensa_uimm8x4 (HOST_WIDE_INT v)
312 {
313   return (v & 3) == 0 && (v >= 0 && v <= 1020);
314 }
315 
316 
317 static bool
xtensa_b4const(HOST_WIDE_INT v)318 xtensa_b4const (HOST_WIDE_INT v)
319 {
320   switch (v)
321     {
322     case -1:
323     case 1:
324     case 2:
325     case 3:
326     case 4:
327     case 5:
328     case 6:
329     case 7:
330     case 8:
331     case 10:
332     case 12:
333     case 16:
334     case 32:
335     case 64:
336     case 128:
337     case 256:
338       return true;
339     }
340   return false;
341 }
342 
343 
344 bool
xtensa_b4const_or_zero(HOST_WIDE_INT v)345 xtensa_b4const_or_zero (HOST_WIDE_INT v)
346 {
347   if (v == 0)
348     return true;
349   return xtensa_b4const (v);
350 }
351 
352 
353 bool
xtensa_b4constu(HOST_WIDE_INT v)354 xtensa_b4constu (HOST_WIDE_INT v)
355 {
356   switch (v)
357     {
358     case 32768:
359     case 65536:
360     case 2:
361     case 3:
362     case 4:
363     case 5:
364     case 6:
365     case 7:
366     case 8:
367     case 10:
368     case 12:
369     case 16:
370     case 32:
371     case 64:
372     case 128:
373     case 256:
374       return true;
375     }
376   return false;
377 }
378 
379 
380 bool
xtensa_mask_immediate(HOST_WIDE_INT v)381 xtensa_mask_immediate (HOST_WIDE_INT v)
382 {
383 #define MAX_MASK_SIZE 16
384   int mask_size;
385 
386   for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
387     {
388       if ((v & 1) == 0)
389 	return false;
390       v = v >> 1;
391       if (v == 0)
392 	return true;
393     }
394 
395   return false;
396 }
397 
398 
399 bool
xtensa_const_ok_for_letter_p(HOST_WIDE_INT v,int c)400 xtensa_const_ok_for_letter_p (HOST_WIDE_INT v, int c)
401 {
402   switch (c)
403     {
404     case 'I': return xtensa_simm12b (v);
405     case 'J': return xtensa_simm8 (v);
406     case 'K': return (v == 0 || xtensa_b4const (v));
407     case 'L': return xtensa_b4constu (v);
408     case 'M': return (v >= -32 && v <= 95);
409     case 'N': return xtensa_simm8x256 (v);
410     case 'O': return (v == -1 || (v >= 1 && v <= 15));
411     case 'P': return xtensa_mask_immediate (v);
412     default: break;
413     }
414   return false;
415 }
416 
417 
418 /* This is just like the standard true_regnum() function except that it
419    works even when reg_renumber is not initialized.  */
420 
421 int
xt_true_regnum(rtx x)422 xt_true_regnum (rtx x)
423 {
424   if (GET_CODE (x) == REG)
425     {
426       if (reg_renumber
427 	  && REGNO (x) >= FIRST_PSEUDO_REGISTER
428 	  && reg_renumber[REGNO (x)] >= 0)
429 	return reg_renumber[REGNO (x)];
430       return REGNO (x);
431     }
432   if (GET_CODE (x) == SUBREG)
433     {
434       int base = xt_true_regnum (SUBREG_REG (x));
435       if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
436         return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
437                                            GET_MODE (SUBREG_REG (x)),
438                                            SUBREG_BYTE (x), GET_MODE (x));
439     }
440   return -1;
441 }
442 
443 
444 int
xtensa_valid_move(enum machine_mode mode,rtx * operands)445 xtensa_valid_move (enum machine_mode mode, rtx *operands)
446 {
447   /* Either the destination or source must be a register, and the
448      MAC16 accumulator doesn't count.  */
449 
450   if (register_operand (operands[0], mode))
451     {
452       int dst_regnum = xt_true_regnum (operands[0]);
453 
454       /* The stack pointer can only be assigned with a MOVSP opcode.  */
455       if (dst_regnum == STACK_POINTER_REGNUM)
456 	return (mode == SImode
457 		&& register_operand (operands[1], mode)
458 		&& !ACC_REG_P (xt_true_regnum (operands[1])));
459 
460       if (!ACC_REG_P (dst_regnum))
461 	return true;
462     }
463   if (register_operand (operands[1], mode))
464     {
465       int src_regnum = xt_true_regnum (operands[1]);
466       if (!ACC_REG_P (src_regnum))
467 	return true;
468     }
469   return FALSE;
470 }
471 
472 
473 int
smalloffset_mem_p(rtx op)474 smalloffset_mem_p (rtx op)
475 {
476   if (GET_CODE (op) == MEM)
477     {
478       rtx addr = XEXP (op, 0);
479       if (GET_CODE (addr) == REG)
480 	return REG_OK_FOR_BASE_P (addr);
481       if (GET_CODE (addr) == PLUS)
482 	{
483 	  rtx offset = XEXP (addr, 0);
484 	  HOST_WIDE_INT val;
485 	  if (GET_CODE (offset) != CONST_INT)
486 	    offset = XEXP (addr, 1);
487 	  if (GET_CODE (offset) != CONST_INT)
488 	    return FALSE;
489 
490 	  val = INTVAL (offset);
491 	  return (val & 3) == 0 && (val >= 0 && val <= 60);
492 	}
493     }
494   return FALSE;
495 }
496 
497 
498 int
constantpool_address_p(rtx addr)499 constantpool_address_p (rtx addr)
500 {
501   rtx sym = addr;
502 
503   if (GET_CODE (addr) == CONST)
504     {
505       rtx offset;
506 
507       /* Only handle (PLUS (SYM, OFFSET)) form.  */
508       addr = XEXP (addr, 0);
509       if (GET_CODE (addr) != PLUS)
510 	return FALSE;
511 
512       /* Make sure the address is word aligned.  */
513       offset = XEXP (addr, 1);
514       if ((GET_CODE (offset) != CONST_INT)
515 	  || ((INTVAL (offset) & 3) != 0))
516 	return FALSE;
517 
518       sym = XEXP (addr, 0);
519     }
520 
521   if ((GET_CODE (sym) == SYMBOL_REF)
522       && CONSTANT_POOL_ADDRESS_P (sym))
523     return TRUE;
524   return FALSE;
525 }
526 
527 
528 int
constantpool_mem_p(rtx op)529 constantpool_mem_p (rtx op)
530 {
531   if (GET_CODE (op) == SUBREG)
532     op = SUBREG_REG (op);
533   if (GET_CODE (op) == MEM)
534     return constantpool_address_p (XEXP (op, 0));
535   return FALSE;
536 }
537 
538 
539 void
xtensa_extend_reg(rtx dst,rtx src)540 xtensa_extend_reg (rtx dst, rtx src)
541 {
542   rtx temp = gen_reg_rtx (SImode);
543   rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
544 
545   /* Generate paradoxical subregs as needed so that the modes match.  */
546   src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
547   dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
548 
549   emit_insn (gen_ashlsi3 (temp, src, shift));
550   emit_insn (gen_ashrsi3 (dst, temp, shift));
551 }
552 
553 
554 bool
xtensa_mem_offset(unsigned v,enum machine_mode mode)555 xtensa_mem_offset (unsigned v, enum machine_mode mode)
556 {
557   switch (mode)
558     {
559     case BLKmode:
560       /* Handle the worst case for block moves.  See xtensa_expand_block_move
561 	 where we emit an optimized block move operation if the block can be
562 	 moved in < "move_ratio" pieces.  The worst case is when the block is
563 	 aligned but has a size of (3 mod 4) (does this happen?) so that the
564 	 last piece requires a byte load/store.  */
565       return (xtensa_uimm8 (v)
566 	      && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
567 
568     case QImode:
569       return xtensa_uimm8 (v);
570 
571     case HImode:
572       return xtensa_uimm8x2 (v);
573 
574     case DFmode:
575       return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
576 
577     default:
578       break;
579     }
580 
581   return xtensa_uimm8x4 (v);
582 }
583 
584 
585 bool
xtensa_extra_constraint(rtx op,int c)586 xtensa_extra_constraint (rtx op, int c)
587 {
588   /* Allow pseudo registers during reload.  */
589   if (GET_CODE (op) != MEM)
590     return (c >= 'R' && c <= 'U'
591 	    && reload_in_progress && GET_CODE (op) == REG
592 	    && REGNO (op) >= FIRST_PSEUDO_REGISTER);
593 
594   switch (c)
595     {
596     case 'R': return smalloffset_mem_p (op);
597     case 'T': return !TARGET_CONST16 && constantpool_mem_p (op);
598     case 'U': return !constantpool_mem_p (op);
599     default: break;
600     }
601   return false;
602 }
603 
604 
605 /* Make normal rtx_code into something we can index from an array.  */
606 
607 static enum internal_test
map_test_to_internal_test(enum rtx_code test_code)608 map_test_to_internal_test (enum rtx_code test_code)
609 {
610   enum internal_test test = ITEST_MAX;
611 
612   switch (test_code)
613     {
614     default:			break;
615     case EQ:  test = ITEST_EQ;  break;
616     case NE:  test = ITEST_NE;  break;
617     case GT:  test = ITEST_GT;  break;
618     case GE:  test = ITEST_GE;  break;
619     case LT:  test = ITEST_LT;  break;
620     case LE:  test = ITEST_LE;  break;
621     case GTU: test = ITEST_GTU; break;
622     case GEU: test = ITEST_GEU; break;
623     case LTU: test = ITEST_LTU; break;
624     case LEU: test = ITEST_LEU; break;
625     }
626 
627   return test;
628 }
629 
630 
631 /* Generate the code to compare two integer values.  The return value is
632    the comparison expression.  */
633 
634 static rtx
gen_int_relational(enum rtx_code test_code,rtx cmp0,rtx cmp1,int * p_invert)635 gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
636 		    rtx cmp0, /* first operand to compare */
637 		    rtx cmp1, /* second operand to compare */
638 		    int *p_invert /* whether branch needs to reverse test */)
639 {
640   struct cmp_info
641   {
642     enum rtx_code test_code;	/* test code to use in insn */
643     bool (*const_range_p) (HOST_WIDE_INT); /* range check function */
644     int const_add;		/* constant to add (convert LE -> LT) */
645     int reverse_regs;		/* reverse registers in test */
646     int invert_const;		/* != 0 if invert value if cmp1 is constant */
647     int invert_reg;		/* != 0 if invert value if cmp1 is register */
648     int unsignedp;		/* != 0 for unsigned comparisons.  */
649   };
650 
651   static struct cmp_info info[ (int)ITEST_MAX ] = {
652 
653     { EQ,	xtensa_b4const_or_zero,	0, 0, 0, 0, 0 },	/* EQ  */
654     { NE,	xtensa_b4const_or_zero,	0, 0, 0, 0, 0 },	/* NE  */
655 
656     { LT,	xtensa_b4const_or_zero,	1, 1, 1, 0, 0 },	/* GT  */
657     { GE,	xtensa_b4const_or_zero,	0, 0, 0, 0, 0 },	/* GE  */
658     { LT,	xtensa_b4const_or_zero,	0, 0, 0, 0, 0 },	/* LT  */
659     { GE,	xtensa_b4const_or_zero,	1, 1, 1, 0, 0 },	/* LE  */
660 
661     { LTU,	xtensa_b4constu,	1, 1, 1, 0, 1 },	/* GTU */
662     { GEU,	xtensa_b4constu,	0, 0, 0, 0, 1 },	/* GEU */
663     { LTU,	xtensa_b4constu,	0, 0, 0, 0, 1 },	/* LTU */
664     { GEU,	xtensa_b4constu,	1, 1, 1, 0, 1 },	/* LEU */
665   };
666 
667   enum internal_test test;
668   enum machine_mode mode;
669   struct cmp_info *p_info;
670 
671   test = map_test_to_internal_test (test_code);
672   gcc_assert (test != ITEST_MAX);
673 
674   p_info = &info[ (int)test ];
675 
676   mode = GET_MODE (cmp0);
677   if (mode == VOIDmode)
678     mode = GET_MODE (cmp1);
679 
680   /* Make sure we can handle any constants given to us.  */
681   if (GET_CODE (cmp1) == CONST_INT)
682     {
683       HOST_WIDE_INT value = INTVAL (cmp1);
684       unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
685 
686       /* if the immediate overflows or does not fit in the immediate field,
687 	 spill it to a register */
688 
689       if ((p_info->unsignedp ?
690 	   (uvalue + p_info->const_add > uvalue) :
691 	   (value + p_info->const_add > value)) != (p_info->const_add > 0))
692 	{
693 	  cmp1 = force_reg (mode, cmp1);
694 	}
695       else if (!(p_info->const_range_p) (value + p_info->const_add))
696 	{
697 	  cmp1 = force_reg (mode, cmp1);
698 	}
699     }
700   else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
701     {
702       cmp1 = force_reg (mode, cmp1);
703     }
704 
705   /* See if we need to invert the result.  */
706   *p_invert = ((GET_CODE (cmp1) == CONST_INT)
707 	       ? p_info->invert_const
708 	       : p_info->invert_reg);
709 
710   /* Comparison to constants, may involve adding 1 to change a LT into LE.
711      Comparison between two registers, may involve switching operands.  */
712   if (GET_CODE (cmp1) == CONST_INT)
713     {
714       if (p_info->const_add != 0)
715 	cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
716 
717     }
718   else if (p_info->reverse_regs)
719     {
720       rtx temp = cmp0;
721       cmp0 = cmp1;
722       cmp1 = temp;
723     }
724 
725   return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
726 }
727 
728 
729 /* Generate the code to compare two float values.  The return value is
730    the comparison expression.  */
731 
732 static rtx
gen_float_relational(enum rtx_code test_code,rtx cmp0,rtx cmp1)733 gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
734 		      rtx cmp0, /* first operand to compare */
735 		      rtx cmp1 /* second operand to compare */)
736 {
737   rtx (*gen_fn) (rtx, rtx, rtx);
738   rtx brtmp;
739   int reverse_regs, invert;
740 
741   switch (test_code)
742     {
743     case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
744     case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
745     case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
746     case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
747     case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
748     case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
749     default:
750       fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
751       reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
752     }
753 
754   if (reverse_regs)
755     {
756       rtx temp = cmp0;
757       cmp0 = cmp1;
758       cmp1 = temp;
759     }
760 
761   brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
762   emit_insn (gen_fn (brtmp, cmp0, cmp1));
763 
764   return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
765 }
766 
767 
768 void
xtensa_expand_conditional_branch(rtx * operands,enum rtx_code test_code)769 xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
770 {
771   enum cmp_type type = branch_type;
772   rtx cmp0 = branch_cmp[0];
773   rtx cmp1 = branch_cmp[1];
774   rtx cmp;
775   int invert;
776   rtx label1, label2;
777 
778   switch (type)
779     {
780     case CMP_DF:
781     default:
782       fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
783 
784     case CMP_SI:
785       invert = FALSE;
786       cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
787       break;
788 
789     case CMP_SF:
790       if (!TARGET_HARD_FLOAT)
791 	fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
792       invert = FALSE;
793       cmp = gen_float_relational (test_code, cmp0, cmp1);
794       break;
795     }
796 
797   /* Generate the branch.  */
798 
799   label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
800   label2 = pc_rtx;
801 
802   if (invert)
803     {
804       label2 = label1;
805       label1 = pc_rtx;
806     }
807 
808   emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
809 			       gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
810 						     label1,
811 						     label2)));
812 }
813 
814 
815 static rtx
gen_conditional_move(rtx cmp)816 gen_conditional_move (rtx cmp)
817 {
818   enum rtx_code code = GET_CODE (cmp);
819   rtx op0 = branch_cmp[0];
820   rtx op1 = branch_cmp[1];
821 
822   if (branch_type == CMP_SI)
823     {
824       /* Jump optimization calls get_condition() which canonicalizes
825 	 comparisons like (GE x <const>) to (GT x <const-1>).
826 	 Transform those comparisons back to GE, since that is the
827 	 comparison supported in Xtensa.  We shouldn't have to
828 	 transform <LE x const> comparisons, because neither
829 	 xtensa_expand_conditional_branch() nor get_condition() will
830 	 produce them.  */
831 
832       if ((code == GT) && (op1 == constm1_rtx))
833 	{
834 	  code = GE;
835 	  op1 = const0_rtx;
836 	}
837       cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
838 
839       if (boolean_operator (cmp, VOIDmode))
840 	{
841 	  /* Swap the operands to make const0 second.  */
842 	  if (op0 == const0_rtx)
843 	    {
844 	      op0 = op1;
845 	      op1 = const0_rtx;
846 	    }
847 
848 	  /* If not comparing against zero, emit a comparison (subtract).  */
849 	  if (op1 != const0_rtx)
850 	    {
851 	      op0 = expand_binop (SImode, sub_optab, op0, op1,
852 				  0, 0, OPTAB_LIB_WIDEN);
853 	      op1 = const0_rtx;
854 	    }
855 	}
856       else if (branch_operator (cmp, VOIDmode))
857 	{
858 	  /* Swap the operands to make const0 second.  */
859 	  if (op0 == const0_rtx)
860 	    {
861 	      op0 = op1;
862 	      op1 = const0_rtx;
863 
864 	      switch (code)
865 		{
866 		case LT: code = GE; break;
867 		case GE: code = LT; break;
868 		default: gcc_unreachable ();
869 		}
870 	    }
871 
872 	  if (op1 != const0_rtx)
873 	    return 0;
874 	}
875       else
876 	return 0;
877 
878       return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
879     }
880 
881   if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
882     return gen_float_relational (code, op0, op1);
883 
884   return 0;
885 }
886 
887 
888 int
xtensa_expand_conditional_move(rtx * operands,int isflt)889 xtensa_expand_conditional_move (rtx *operands, int isflt)
890 {
891   rtx cmp;
892   rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
893 
894   if (!(cmp = gen_conditional_move (operands[1])))
895     return 0;
896 
897   if (isflt)
898     gen_fn = (branch_type == CMP_SI
899 	      ? gen_movsfcc_internal0
900 	      : gen_movsfcc_internal1);
901   else
902     gen_fn = (branch_type == CMP_SI
903 	      ? gen_movsicc_internal0
904 	      : gen_movsicc_internal1);
905 
906   emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
907 		     operands[2], operands[3], cmp));
908   return 1;
909 }
910 
911 
912 int
xtensa_expand_scc(rtx * operands)913 xtensa_expand_scc (rtx *operands)
914 {
915   rtx dest = operands[0];
916   rtx cmp = operands[1];
917   rtx one_tmp, zero_tmp;
918   rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
919 
920   if (!(cmp = gen_conditional_move (cmp)))
921     return 0;
922 
923   one_tmp = gen_reg_rtx (SImode);
924   zero_tmp = gen_reg_rtx (SImode);
925   emit_insn (gen_movsi (one_tmp, const_true_rtx));
926   emit_insn (gen_movsi (zero_tmp, const0_rtx));
927 
928   gen_fn = (branch_type == CMP_SI
929 	    ? gen_movsicc_internal0
930 	    : gen_movsicc_internal1);
931   emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
932   return 1;
933 }
934 
935 
936 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1].  MODE is
937    for the output, i.e., the input operands are twice as big as MODE.  */
938 
939 void
xtensa_split_operand_pair(rtx operands[4],enum machine_mode mode)940 xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
941 {
942   switch (GET_CODE (operands[1]))
943     {
944     case REG:
945       operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
946       operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
947       break;
948 
949     case MEM:
950       operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
951       operands[2] = adjust_address (operands[1], mode, 0);
952       break;
953 
954     case CONST_INT:
955     case CONST_DOUBLE:
956       split_double (operands[1], &operands[2], &operands[3]);
957       break;
958 
959     default:
960       gcc_unreachable ();
961     }
962 
963   switch (GET_CODE (operands[0]))
964     {
965     case REG:
966       operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
967       operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
968       break;
969 
970     case MEM:
971       operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
972       operands[0] = adjust_address (operands[0], mode, 0);
973       break;
974 
975     default:
976       gcc_unreachable ();
977     }
978 }
979 
980 
981 /* Emit insns to move operands[1] into operands[0].
982    Return 1 if we have written out everything that needs to be done to
983    do the move.  Otherwise, return 0 and the caller will emit the move
984    normally.  */
985 
986 int
xtensa_emit_move_sequence(rtx * operands,enum machine_mode mode)987 xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
988 {
989   if (CONSTANT_P (operands[1])
990       && (GET_CODE (operands[1]) != CONST_INT
991 	  || !xtensa_simm12b (INTVAL (operands[1]))))
992     {
993       if (!TARGET_CONST16)
994 	operands[1] = force_const_mem (SImode, operands[1]);
995 
996       /* PC-relative loads are always SImode, and CONST16 is only
997 	 supported in the movsi pattern, so add a SUBREG for any other
998 	 (smaller) mode.  */
999 
1000       if (mode != SImode)
1001 	{
1002 	  if (register_operand (operands[0], mode))
1003 	    {
1004 	      operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
1005 	      emit_move_insn (operands[0], operands[1]);
1006 	      return 1;
1007 	    }
1008 	  else
1009 	    {
1010 	      operands[1] = force_reg (SImode, operands[1]);
1011 	      operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
1012 	    }
1013 	}
1014     }
1015 
1016   if (!(reload_in_progress | reload_completed)
1017       && !xtensa_valid_move (mode, operands))
1018     operands[1] = force_reg (mode, operands[1]);
1019 
1020   operands[1] = xtensa_copy_incoming_a7 (operands[1]);
1021 
1022   /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1023      instruction won't be recognized after reload, so we remove the
1024      subreg and adjust mem accordingly.  */
1025   if (reload_in_progress)
1026     {
1027       operands[0] = fixup_subreg_mem (operands[0]);
1028       operands[1] = fixup_subreg_mem (operands[1]);
1029     }
1030   return 0;
1031 }
1032 
1033 
1034 static rtx
fixup_subreg_mem(rtx x)1035 fixup_subreg_mem (rtx x)
1036 {
1037   if (GET_CODE (x) == SUBREG
1038       && GET_CODE (SUBREG_REG (x)) == REG
1039       && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1040     {
1041       rtx temp =
1042 	gen_rtx_SUBREG (GET_MODE (x),
1043 			reg_equiv_mem [REGNO (SUBREG_REG (x))],
1044 			SUBREG_BYTE (x));
1045       x = alter_subreg (&temp);
1046     }
1047   return x;
1048 }
1049 
1050 
1051 /* Check if an incoming argument in a7 is expected to be used soon and
1052    if OPND is a register or register pair that includes a7.  If so,
1053    create a new pseudo and copy a7 into that pseudo at the very
1054    beginning of the function, followed by the special "set_frame_ptr"
1055    unspec_volatile insn.  The return value is either the original
1056    operand, if it is not a7, or the new pseudo containing a copy of
1057    the incoming argument.  This is necessary because the register
1058    allocator will ignore conflicts with a7 and may either assign some
1059    other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
1060    the incoming argument in a7.  By copying the argument out of a7 as
1061    the very first thing, and then immediately following that with an
1062    unspec_volatile to keep the scheduler away, we should avoid any
1063    problems.  Putting the set_frame_ptr insn at the beginning, with
1064    only the a7 copy before it, also makes it easier for the prologue
1065    expander to initialize the frame pointer after the a7 copy and to
1066    fix up the a7 copy to use the stack pointer instead of the frame
1067    pointer.  */
1068 
1069 rtx
xtensa_copy_incoming_a7(rtx opnd)1070 xtensa_copy_incoming_a7 (rtx opnd)
1071 {
1072   rtx entry_insns = 0;
1073   rtx reg, tmp;
1074   enum machine_mode mode;
1075 
1076   if (!cfun->machine->need_a7_copy)
1077     return opnd;
1078 
1079   /* This function should never be called again once a7 has been copied.  */
1080   gcc_assert (!cfun->machine->set_frame_ptr_insn);
1081 
1082   mode = GET_MODE (opnd);
1083 
1084   /* The operand using a7 may come in a later instruction, so just return
1085      the original operand if it doesn't use a7.  */
1086   reg = opnd;
1087   if (GET_CODE (reg) == SUBREG)
1088     {
1089       gcc_assert (SUBREG_BYTE (reg) == 0);
1090       reg = SUBREG_REG (reg);
1091     }
1092   if (GET_CODE (reg) != REG
1093       || REGNO (reg) > A7_REG
1094       || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
1095     return opnd;
1096 
1097   /* 1-word args will always be in a7; 2-word args in a6/a7.  */
1098   gcc_assert (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 == A7_REG);
1099 
1100   cfun->machine->need_a7_copy = false;
1101 
1102   /* Copy a7 to a new pseudo at the function entry.  Use gen_raw_REG to
1103      create the REG for a7 so that hard_frame_pointer_rtx is not used.  */
1104 
1105   push_to_sequence (entry_insns);
1106   tmp = gen_reg_rtx (mode);
1107 
1108   switch (mode)
1109     {
1110     case DFmode:
1111     case DImode:
1112       emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1113 				     gen_rtx_REG (SImode, A7_REG - 1)));
1114       emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1115 				     gen_raw_REG (SImode, A7_REG)));
1116       break;
1117     case SFmode:
1118       emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1119       break;
1120     case SImode:
1121       emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1122       break;
1123     case HImode:
1124       emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1125       break;
1126     case QImode:
1127       emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1128       break;
1129     default:
1130       gcc_unreachable ();
1131     }
1132 
1133   cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1134   entry_insns = get_insns ();
1135   end_sequence ();
1136 
1137   if (cfun->machine->vararg_a7)
1138     {
1139       /* This is called from within builtin_savereg, so we're already
1140 	 inside a start_sequence that will be placed at the start of
1141 	 the function.  */
1142       emit_insn (entry_insns);
1143     }
1144   else
1145     {
1146       /* Put entry_insns after the NOTE that starts the function.  If
1147 	 this is inside a start_sequence, make the outer-level insn
1148 	 chain current, so the code is placed at the start of the
1149 	 function.  */
1150       push_topmost_sequence ();
1151       emit_insn_after (entry_insns, get_insns ());
1152       pop_topmost_sequence ();
1153     }
1154 
1155   return tmp;
1156 }
1157 
1158 
1159 /* Try to expand a block move operation to a sequence of RTL move
1160    instructions.  If not optimizing, or if the block size is not a
1161    constant, or if the block is too large, the expansion fails and GCC
1162    falls back to calling memcpy().
1163 
1164    operands[0] is the destination
1165    operands[1] is the source
1166    operands[2] is the length
1167    operands[3] is the alignment */
1168 
1169 int
xtensa_expand_block_move(rtx * operands)1170 xtensa_expand_block_move (rtx *operands)
1171 {
1172   static const enum machine_mode mode_from_align[] =
1173   {
1174     VOIDmode, QImode, HImode, VOIDmode, SImode,
1175   };
1176 
1177   rtx dst_mem = operands[0];
1178   rtx src_mem = operands[1];
1179   HOST_WIDE_INT bytes, align;
1180   int num_pieces, move_ratio;
1181   rtx temp[2];
1182   enum machine_mode mode[2];
1183   int amount[2];
1184   bool active[2];
1185   int phase = 0;
1186   int next;
1187   int offset_ld = 0;
1188   int offset_st = 0;
1189   rtx x;
1190 
1191   /* If this is not a fixed size move, just call memcpy.  */
1192   if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1193     return 0;
1194 
1195   bytes = INTVAL (operands[2]);
1196   align = INTVAL (operands[3]);
1197 
1198   /* Anything to move?  */
1199   if (bytes <= 0)
1200     return 0;
1201 
1202   if (align > MOVE_MAX)
1203     align = MOVE_MAX;
1204 
1205   /* Decide whether to expand inline based on the optimization level.  */
1206   move_ratio = 4;
1207   if (optimize > 2)
1208     move_ratio = LARGEST_MOVE_RATIO;
1209   num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway.  */
1210   if (num_pieces > move_ratio)
1211     return 0;
1212 
1213   x = XEXP (dst_mem, 0);
1214   if (!REG_P (x))
1215     {
1216       x = force_reg (Pmode, x);
1217       dst_mem = replace_equiv_address (dst_mem, x);
1218     }
1219 
1220   x = XEXP (src_mem, 0);
1221   if (!REG_P (x))
1222     {
1223       x = force_reg (Pmode, x);
1224       src_mem = replace_equiv_address (src_mem, x);
1225     }
1226 
1227   active[0] = active[1] = false;
1228 
1229   do
1230     {
1231       next = phase;
1232       phase ^= 1;
1233 
1234       if (bytes > 0)
1235 	{
1236 	  int next_amount;
1237 
1238 	  next_amount = (bytes >= 4 ? 4 : (bytes >= 2 ? 2 : 1));
1239 	  next_amount = MIN (next_amount, align);
1240 
1241 	  amount[next] = next_amount;
1242 	  mode[next] = mode_from_align[next_amount];
1243 	  temp[next] = gen_reg_rtx (mode[next]);
1244 
1245 	  x = adjust_address (src_mem, mode[next], offset_ld);
1246 	  emit_insn (gen_rtx_SET (VOIDmode, temp[next], x));
1247 
1248 	  offset_ld += next_amount;
1249 	  bytes -= next_amount;
1250 	  active[next] = true;
1251 	}
1252 
1253       if (active[phase])
1254 	{
1255 	  active[phase] = false;
1256 
1257 	  x = adjust_address (dst_mem, mode[phase], offset_st);
1258 	  emit_insn (gen_rtx_SET (VOIDmode, x, temp[phase]));
1259 
1260 	  offset_st += amount[phase];
1261 	}
1262     }
1263   while (active[next]);
1264 
1265   return 1;
1266 }
1267 
1268 
1269 void
xtensa_expand_nonlocal_goto(rtx * operands)1270 xtensa_expand_nonlocal_goto (rtx *operands)
1271 {
1272   rtx goto_handler = operands[1];
1273   rtx containing_fp = operands[3];
1274 
1275   /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1276      is too big to generate in-line.  */
1277 
1278   if (GET_CODE (containing_fp) != REG)
1279     containing_fp = force_reg (Pmode, containing_fp);
1280 
1281   goto_handler = replace_rtx (copy_rtx (goto_handler),
1282 			      virtual_stack_vars_rtx,
1283 			      containing_fp);
1284 
1285   emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1286 		     0, VOIDmode, 2,
1287 		     containing_fp, Pmode,
1288 		     goto_handler, Pmode);
1289 }
1290 
1291 
1292 static struct machine_function *
xtensa_init_machine_status(void)1293 xtensa_init_machine_status (void)
1294 {
1295   return ggc_alloc_cleared (sizeof (struct machine_function));
1296 }
1297 
1298 
1299 void
xtensa_setup_frame_addresses(void)1300 xtensa_setup_frame_addresses (void)
1301 {
1302   /* Set flag to cause FRAME_POINTER_REQUIRED to be set.  */
1303   cfun->machine->accesses_prev_frame = 1;
1304 
1305   emit_library_call
1306     (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1307      0, VOIDmode, 0);
1308 }
1309 
1310 
1311 /* Emit the assembly for the end of a zero-cost loop.  Normally we just emit
1312    a comment showing where the end of the loop is.  However, if there is a
1313    label or a branch at the end of the loop then we need to place a nop
1314    there.  If the loop ends with a label we need the nop so that branches
1315    targeting that label will target the nop (and thus remain in the loop),
1316    instead of targeting the instruction after the loop (and thus exiting
1317    the loop).  If the loop ends with a branch, we need the nop in case the
1318    branch is targeting a location inside the loop.  When the branch
1319    executes it will cause the loop count to be decremented even if it is
1320    taken (because it is the last instruction in the loop), so we need to
1321    nop after the branch to prevent the loop count from being decremented
1322    when the branch is taken.  */
1323 
1324 void
xtensa_emit_loop_end(rtx insn,rtx * operands)1325 xtensa_emit_loop_end (rtx insn, rtx *operands)
1326 {
1327   char done = 0;
1328 
1329   for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1330     {
1331       switch (GET_CODE (insn))
1332 	{
1333 	case NOTE:
1334 	case BARRIER:
1335 	  break;
1336 
1337 	case CODE_LABEL:
1338 	  output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1339 	  done = 1;
1340 	  break;
1341 
1342 	default:
1343 	  {
1344 	    rtx body = PATTERN (insn);
1345 
1346 	    if (GET_CODE (body) == JUMP_INSN)
1347 	      {
1348 		output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1349 		done = 1;
1350 	      }
1351 	    else if ((GET_CODE (body) != USE)
1352 		     && (GET_CODE (body) != CLOBBER))
1353 	      done = 1;
1354 	  }
1355 	  break;
1356         }
1357     }
1358 
1359   output_asm_insn ("# loop end for %0", operands);
1360 }
1361 
1362 
1363 char *
xtensa_emit_call(int callop,rtx * operands)1364 xtensa_emit_call (int callop, rtx *operands)
1365 {
1366   static char result[64];
1367   rtx tgt = operands[callop];
1368 
1369   if (GET_CODE (tgt) == CONST_INT)
1370     sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1371   else if (register_operand (tgt, VOIDmode))
1372     sprintf (result, "callx8\t%%%d", callop);
1373   else
1374     sprintf (result, "call8\t%%%d", callop);
1375 
1376   return result;
1377 }
1378 
1379 
1380 /* Return the debugger register number to use for 'regno'.  */
1381 
1382 int
xtensa_dbx_register_number(int regno)1383 xtensa_dbx_register_number (int regno)
1384 {
1385   int first = -1;
1386 
1387   if (GP_REG_P (regno))
1388     {
1389       regno -= GP_REG_FIRST;
1390       first = 0;
1391     }
1392   else if (BR_REG_P (regno))
1393     {
1394       regno -= BR_REG_FIRST;
1395       first = 16;
1396     }
1397   else if (FP_REG_P (regno))
1398     {
1399       regno -= FP_REG_FIRST;
1400       first = 48;
1401     }
1402   else if (ACC_REG_P (regno))
1403     {
1404       first = 0x200;	/* Start of Xtensa special registers.  */
1405       regno = 16;	/* ACCLO is special register 16.  */
1406     }
1407 
1408   /* When optimizing, we sometimes get asked about pseudo-registers
1409      that don't represent hard registers.  Return 0 for these.  */
1410   if (first == -1)
1411     return 0;
1412 
1413   return first + regno;
1414 }
1415 
1416 
1417 /* Argument support functions.  */
1418 
1419 /* Initialize CUMULATIVE_ARGS for a function.  */
1420 
1421 void
init_cumulative_args(CUMULATIVE_ARGS * cum,int incoming)1422 init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
1423 {
1424   cum->arg_words = 0;
1425   cum->incoming = incoming;
1426 }
1427 
1428 
1429 /* Advance the argument to the next argument position.  */
1430 
1431 void
function_arg_advance(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type)1432 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
1433 {
1434   int words, max;
1435   int *arg_words;
1436 
1437   arg_words = &cum->arg_words;
1438   max = MAX_ARGS_IN_REGISTERS;
1439 
1440   words = (((mode != BLKmode)
1441 	    ? (int) GET_MODE_SIZE (mode)
1442 	    : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1443 
1444   if (*arg_words < max
1445       && (targetm.calls.must_pass_in_stack (mode, type)
1446 	  || *arg_words + words > max))
1447     *arg_words = max;
1448 
1449   *arg_words += words;
1450 }
1451 
1452 
1453 /* Return an RTL expression containing the register for the given mode,
1454    or 0 if the argument is to be passed on the stack.  INCOMING_P is nonzero
1455    if this is an incoming argument to the current function.  */
1456 
1457 rtx
function_arg(CUMULATIVE_ARGS * cum,enum machine_mode mode,tree type,int incoming_p)1458 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1459 	      int incoming_p)
1460 {
1461   int regbase, words, max;
1462   int *arg_words;
1463   int regno;
1464 
1465   arg_words = &cum->arg_words;
1466   regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1467   max = MAX_ARGS_IN_REGISTERS;
1468 
1469   words = (((mode != BLKmode)
1470 	    ? (int) GET_MODE_SIZE (mode)
1471 	    : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1472 
1473   if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1474     {
1475       int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_WORD;
1476       *arg_words = (*arg_words + align - 1) & -align;
1477     }
1478 
1479   if (*arg_words + words > max)
1480     return (rtx)0;
1481 
1482   regno = regbase + *arg_words;
1483 
1484   if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
1485     cfun->machine->need_a7_copy = true;
1486 
1487   return gen_rtx_REG (mode, regno);
1488 }
1489 
1490 
1491 int
function_arg_boundary(enum machine_mode mode,tree type)1492 function_arg_boundary (enum machine_mode mode, tree type)
1493 {
1494   unsigned int alignment;
1495 
1496   alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
1497   if (alignment < PARM_BOUNDARY)
1498     alignment = PARM_BOUNDARY;
1499   if (alignment > STACK_BOUNDARY)
1500     alignment = STACK_BOUNDARY;
1501   return alignment;
1502 }
1503 
1504 
1505 static bool
xtensa_return_in_msb(tree valtype)1506 xtensa_return_in_msb (tree valtype)
1507 {
1508   return (TARGET_BIG_ENDIAN
1509 	  && AGGREGATE_TYPE_P (valtype)
1510 	  && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
1511 }
1512 
1513 
1514 void
override_options(void)1515 override_options (void)
1516 {
1517   int regno;
1518   enum machine_mode mode;
1519 
1520   if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1521     error ("boolean registers required for the floating-point option");
1522 
1523   xtensa_char_to_class['q'] = SP_REG;
1524   xtensa_char_to_class['a'] = GR_REGS;
1525   xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1526   xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1527   xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1528   xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1529   xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1530   xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1531   xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1532   xtensa_char_to_class['W'] = ((TARGET_CONST16) ? GR_REGS: NO_REGS);
1533 
1534   /* Set up array giving whether a given register can hold a given mode.  */
1535   for (mode = VOIDmode;
1536        mode != MAX_MACHINE_MODE;
1537        mode = (enum machine_mode) ((int) mode + 1))
1538     {
1539       int size = GET_MODE_SIZE (mode);
1540       enum mode_class class = GET_MODE_CLASS (mode);
1541 
1542       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1543 	{
1544 	  int temp;
1545 
1546 	  if (ACC_REG_P (regno))
1547 	    temp = (TARGET_MAC16
1548 		    && (class == MODE_INT) && (size <= UNITS_PER_WORD));
1549 	  else if (GP_REG_P (regno))
1550 	    temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1551 	  else if (FP_REG_P (regno))
1552 	    temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1553 	  else if (BR_REG_P (regno))
1554 	    temp = (TARGET_BOOLEANS && (mode == CCmode));
1555 	  else
1556 	    temp = FALSE;
1557 
1558 	  xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1559 	}
1560     }
1561 
1562   init_machine_status = xtensa_init_machine_status;
1563 
1564   /* Check PIC settings.  PIC is only supported when using L32R
1565      instructions, and some targets need to always use PIC.  */
1566   if (flag_pic && TARGET_CONST16)
1567     error ("-f%s is not supported with CONST16 instructions",
1568 	   (flag_pic > 1 ? "PIC" : "pic"));
1569   else if (XTENSA_ALWAYS_PIC)
1570     {
1571       if (TARGET_CONST16)
1572 	error ("PIC is required but not supported with CONST16 instructions");
1573       flag_pic = 1;
1574     }
1575   /* There's no need for -fPIC (as opposed to -fpic) on Xtensa.  */
1576   if (flag_pic > 1)
1577     flag_pic = 1;
1578 
1579   /* Hot/cold partitioning does not work on this architecture, because of
1580      constant pools (the load instruction cannot necessarily reach that far).
1581      Therefore disable it on this architecture.  */
1582   if (flag_reorder_blocks_and_partition)
1583     {
1584       flag_reorder_blocks_and_partition = 0;
1585       flag_reorder_blocks = 1;
1586     }
1587 }
1588 
1589 
1590 /* A C compound statement to output to stdio stream STREAM the
1591    assembler syntax for an instruction operand X.  X is an RTL
1592    expression.
1593 
1594    CODE is a value that can be used to specify one of several ways
1595    of printing the operand.  It is used when identical operands
1596    must be printed differently depending on the context.  CODE
1597    comes from the '%' specification that was used to request
1598    printing of the operand.  If the specification was just '%DIGIT'
1599    then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1600    is the ASCII code for LTR.
1601 
1602    If X is a register, this macro should print the register's name.
1603    The names can be found in an array 'reg_names' whose type is
1604    'char *[]'.  'reg_names' is initialized from 'REGISTER_NAMES'.
1605 
1606    When the machine description has a specification '%PUNCT' (a '%'
1607    followed by a punctuation character), this macro is called with
1608    a null pointer for X and the punctuation character for CODE.
1609 
1610    'a', 'c', 'l', and 'n' are reserved.
1611 
1612    The Xtensa specific codes are:
1613 
1614    'd'  CONST_INT, print as signed decimal
1615    'x'  CONST_INT, print as signed hexadecimal
1616    'K'  CONST_INT, print number of bits in mask for EXTUI
1617    'R'  CONST_INT, print (X & 0x1f)
1618    'L'  CONST_INT, print ((32 - X) & 0x1f)
1619    'D'  REG, print second register of double-word register operand
1620    'N'  MEM, print address of next word following a memory operand
1621    'v'  MEM, if memory reference is volatile, output a MEMW before it
1622    't'  any constant, add "@h" suffix for top 16 bits
1623    'b'  any constant, add "@l" suffix for bottom 16 bits
1624 */
1625 
1626 static void
printx(FILE * file,signed int val)1627 printx (FILE *file, signed int val)
1628 {
1629   /* Print a hexadecimal value in a nice way.  */
1630   if ((val > -0xa) && (val < 0xa))
1631     fprintf (file, "%d", val);
1632   else if (val < 0)
1633     fprintf (file, "-0x%x", -val);
1634   else
1635     fprintf (file, "0x%x", val);
1636 }
1637 
1638 
1639 void
print_operand(FILE * file,rtx x,int letter)1640 print_operand (FILE *file, rtx x, int letter)
1641 {
1642   if (!x)
1643     error ("PRINT_OPERAND null pointer");
1644 
1645   switch (letter)
1646     {
1647     case 'D':
1648       if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1649 	fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1650       else
1651 	output_operand_lossage ("invalid %%D value");
1652       break;
1653 
1654     case 'v':
1655       if (GET_CODE (x) == MEM)
1656 	{
1657 	  /* For a volatile memory reference, emit a MEMW before the
1658 	     load or store.  */
1659 	  if (MEM_VOLATILE_P (x))
1660 	    fprintf (file, "memw\n\t");
1661 	}
1662       else
1663 	output_operand_lossage ("invalid %%v value");
1664       break;
1665 
1666     case 'N':
1667       if (GET_CODE (x) == MEM
1668 	  && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1669 	{
1670 	  x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1671 	  output_address (XEXP (x, 0));
1672 	}
1673       else
1674 	output_operand_lossage ("invalid %%N value");
1675       break;
1676 
1677     case 'K':
1678       if (GET_CODE (x) == CONST_INT)
1679 	{
1680 	  int num_bits = 0;
1681 	  unsigned val = INTVAL (x);
1682 	  while (val & 1)
1683 	    {
1684 	      num_bits += 1;
1685 	      val = val >> 1;
1686 	    }
1687 	  if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1688 	    fatal_insn ("invalid mask", x);
1689 
1690 	  fprintf (file, "%d", num_bits);
1691 	}
1692       else
1693 	output_operand_lossage ("invalid %%K value");
1694       break;
1695 
1696     case 'L':
1697       if (GET_CODE (x) == CONST_INT)
1698 	fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
1699       else
1700 	output_operand_lossage ("invalid %%L value");
1701       break;
1702 
1703     case 'R':
1704       if (GET_CODE (x) == CONST_INT)
1705 	fprintf (file, "%ld", INTVAL (x) & 0x1f);
1706       else
1707 	output_operand_lossage ("invalid %%R value");
1708       break;
1709 
1710     case 'x':
1711       if (GET_CODE (x) == CONST_INT)
1712 	printx (file, INTVAL (x));
1713       else
1714 	output_operand_lossage ("invalid %%x value");
1715       break;
1716 
1717     case 'd':
1718       if (GET_CODE (x) == CONST_INT)
1719 	fprintf (file, "%ld", INTVAL (x));
1720       else
1721 	output_operand_lossage ("invalid %%d value");
1722       break;
1723 
1724     case 't':
1725     case 'b':
1726       if (GET_CODE (x) == CONST_INT)
1727 	{
1728 	  printx (file, INTVAL (x));
1729 	  fputs (letter == 't' ? "@h" : "@l", file);
1730 	}
1731       else if (GET_CODE (x) == CONST_DOUBLE)
1732 	{
1733 	  REAL_VALUE_TYPE r;
1734 	  REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1735 	  if (GET_MODE (x) == SFmode)
1736 	    {
1737 	      long l;
1738 	      REAL_VALUE_TO_TARGET_SINGLE (r, l);
1739 	      fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
1740 	    }
1741 	  else
1742 	    output_operand_lossage ("invalid %%t/%%b value");
1743 	}
1744       else if (GET_CODE (x) == CONST)
1745 	{
1746 	  /* X must be a symbolic constant on ELF.  Write an expression
1747 	     suitable for 'const16' that sets the high or low 16 bits.  */
1748 	  if (GET_CODE (XEXP (x, 0)) != PLUS
1749 	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1750 		  && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
1751 	      || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1752 	    output_operand_lossage ("invalid %%t/%%b value");
1753 	  print_operand (file, XEXP (XEXP (x, 0), 0), 0);
1754 	  fputs (letter == 't' ? "@h" : "@l", file);
1755 	  /* There must be a non-alphanumeric character between 'h' or 'l'
1756 	     and the number.  The '-' is added by print_operand() already.  */
1757 	  if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
1758 	    fputs ("+", file);
1759 	  print_operand (file, XEXP (XEXP (x, 0), 1), 0);
1760 	}
1761       else
1762 	{
1763 	  output_addr_const (file, x);
1764 	  fputs (letter == 't' ? "@h" : "@l", file);
1765 	}
1766       break;
1767 
1768     default:
1769       if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1770 	fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
1771       else if (GET_CODE (x) == MEM)
1772 	output_address (XEXP (x, 0));
1773       else if (GET_CODE (x) == CONST_INT)
1774 	fprintf (file, "%ld", INTVAL (x));
1775       else
1776 	output_addr_const (file, x);
1777     }
1778 }
1779 
1780 
1781 /* A C compound statement to output to stdio stream STREAM the
1782    assembler syntax for an instruction operand that is a memory
1783    reference whose address is ADDR.  ADDR is an RTL expression.  */
1784 
1785 void
print_operand_address(FILE * file,rtx addr)1786 print_operand_address (FILE *file, rtx addr)
1787 {
1788   if (!addr)
1789     error ("PRINT_OPERAND_ADDRESS, null pointer");
1790 
1791   switch (GET_CODE (addr))
1792     {
1793     default:
1794       fatal_insn ("invalid address", addr);
1795       break;
1796 
1797     case REG:
1798       fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
1799       break;
1800 
1801     case PLUS:
1802       {
1803 	rtx reg = (rtx)0;
1804 	rtx offset = (rtx)0;
1805 	rtx arg0 = XEXP (addr, 0);
1806 	rtx arg1 = XEXP (addr, 1);
1807 
1808 	if (GET_CODE (arg0) == REG)
1809 	  {
1810 	    reg = arg0;
1811 	    offset = arg1;
1812 	  }
1813 	else if (GET_CODE (arg1) == REG)
1814 	  {
1815 	    reg = arg1;
1816 	    offset = arg0;
1817 	  }
1818 	else
1819 	  fatal_insn ("no register in address", addr);
1820 
1821 	if (CONSTANT_P (offset))
1822 	  {
1823 	    fprintf (file, "%s, ", reg_names [REGNO (reg)]);
1824 	    output_addr_const (file, offset);
1825 	  }
1826 	else
1827 	  fatal_insn ("address offset not a constant", addr);
1828       }
1829       break;
1830 
1831     case LABEL_REF:
1832     case SYMBOL_REF:
1833     case CONST_INT:
1834     case CONST:
1835       output_addr_const (file, addr);
1836       break;
1837     }
1838 }
1839 
1840 
1841 void
xtensa_output_literal(FILE * file,rtx x,enum machine_mode mode,int labelno)1842 xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
1843 {
1844   long value_long[2];
1845   REAL_VALUE_TYPE r;
1846   int size;
1847 
1848   fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
1849 
1850   switch (GET_MODE_CLASS (mode))
1851     {
1852     case MODE_FLOAT:
1853       gcc_assert (GET_CODE (x) == CONST_DOUBLE);
1854 
1855       REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1856       switch (mode)
1857 	{
1858 	case SFmode:
1859 	  REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
1860 	  fprintf (file, "0x%08lx\n", value_long[0]);
1861 	  break;
1862 
1863 	case DFmode:
1864 	  REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
1865 	  fprintf (file, "0x%08lx, 0x%08lx\n",
1866 		   value_long[0], value_long[1]);
1867 	  break;
1868 
1869 	default:
1870 	  gcc_unreachable ();
1871 	}
1872 
1873       break;
1874 
1875     case MODE_INT:
1876     case MODE_PARTIAL_INT:
1877       size = GET_MODE_SIZE (mode);
1878       switch (size)
1879 	{
1880 	case 4:
1881 	  output_addr_const (file, x);
1882 	  fputs ("\n", file);
1883 	  break;
1884 
1885 	case 8:
1886 	  output_addr_const (file, operand_subword (x, 0, 0, DImode));
1887 	  fputs (", ", file);
1888 	  output_addr_const (file, operand_subword (x, 1, 0, DImode));
1889 	  fputs ("\n", file);
1890 	  break;
1891 
1892 	default:
1893 	  gcc_unreachable ();
1894 	}
1895       break;
1896 
1897     default:
1898       gcc_unreachable ();
1899     }
1900 }
1901 
1902 
1903 /* Return the bytes needed to compute the frame pointer from the current
1904    stack pointer.  */
1905 
1906 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
1907 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
1908 
1909 long
compute_frame_size(int size)1910 compute_frame_size (int size)
1911 {
1912   /* Add space for the incoming static chain value.  */
1913   if (cfun->static_chain_decl != NULL)
1914     size += (1 * UNITS_PER_WORD);
1915 
1916   xtensa_current_frame_size =
1917     XTENSA_STACK_ALIGN (size
1918 			+ current_function_outgoing_args_size
1919 			+ (WINDOW_SIZE * UNITS_PER_WORD));
1920   return xtensa_current_frame_size;
1921 }
1922 
1923 
1924 int
xtensa_frame_pointer_required(void)1925 xtensa_frame_pointer_required (void)
1926 {
1927   /* The code to expand builtin_frame_addr and builtin_return_addr
1928      currently uses the hard_frame_pointer instead of frame_pointer.
1929      This seems wrong but maybe it's necessary for other architectures.
1930      This function is derived from the i386 code.  */
1931 
1932   if (cfun->machine->accesses_prev_frame)
1933     return 1;
1934 
1935   return 0;
1936 }
1937 
1938 
1939 void
xtensa_expand_prologue(void)1940 xtensa_expand_prologue (void)
1941 {
1942   HOST_WIDE_INT total_size;
1943   rtx size_rtx;
1944 
1945   total_size = compute_frame_size (get_frame_size ());
1946   size_rtx = GEN_INT (total_size);
1947 
1948   if (total_size < (1 << (12+3)))
1949     emit_insn (gen_entry (size_rtx, size_rtx));
1950   else
1951     {
1952       /* Use a8 as a temporary since a0-a7 may be live.  */
1953       rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
1954       emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
1955       emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
1956       emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
1957       emit_move_insn (stack_pointer_rtx, tmp_reg);
1958     }
1959 
1960   if (frame_pointer_needed)
1961     {
1962       if (cfun->machine->set_frame_ptr_insn)
1963 	{
1964 	  rtx first, insn;
1965 
1966 	  push_topmost_sequence ();
1967 	  first = get_insns ();
1968 	  pop_topmost_sequence ();
1969 
1970 	  /* For all instructions prior to set_frame_ptr_insn, replace
1971 	     hard_frame_pointer references with stack_pointer.  */
1972 	  for (insn = first;
1973 	       insn != cfun->machine->set_frame_ptr_insn;
1974 	       insn = NEXT_INSN (insn))
1975 	    {
1976 	      if (INSN_P (insn))
1977 		PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
1978 					      hard_frame_pointer_rtx,
1979 					      stack_pointer_rtx);
1980 	    }
1981 	}
1982       else
1983 	emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1984     }
1985 }
1986 
1987 
1988 /* Clear variables at function end.  */
1989 
1990 void
xtensa_function_epilogue(FILE * file ATTRIBUTE_UNUSED,HOST_WIDE_INT size ATTRIBUTE_UNUSED)1991 xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
1992 			  HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1993 {
1994   xtensa_current_frame_size = 0;
1995 }
1996 
1997 
1998 rtx
xtensa_return_addr(int count,rtx frame)1999 xtensa_return_addr (int count, rtx frame)
2000 {
2001   rtx result, retaddr;
2002 
2003   if (count == -1)
2004     retaddr = gen_rtx_REG (Pmode, A0_REG);
2005   else
2006     {
2007       rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2008       addr = memory_address (Pmode, addr);
2009       retaddr = gen_reg_rtx (Pmode);
2010       emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2011     }
2012 
2013   /* The 2 most-significant bits of the return address on Xtensa hold
2014      the register window size.  To get the real return address, these
2015      bits must be replaced with the high bits from the current PC.  */
2016 
2017   result = gen_reg_rtx (Pmode);
2018   emit_insn (gen_fix_return_addr (result, retaddr));
2019   return result;
2020 }
2021 
2022 
2023 /* Create the va_list data type.
2024 
2025    This structure is set up by __builtin_saveregs.  The __va_reg field
2026    points to a stack-allocated region holding the contents of the
2027    incoming argument registers.  The __va_ndx field is an index
2028    initialized to the position of the first unnamed (variable)
2029    argument.  This same index is also used to address the arguments
2030    passed in memory.  Thus, the __va_stk field is initialized to point
2031    to the position of the first argument in memory offset to account
2032    for the arguments passed in registers and to account for the size
2033    of the argument registers not being 16-byte aligned.  E.G., there
2034    are 6 argument registers of 4 bytes each, but we want the __va_ndx
2035    for the first stack argument to have the maximal alignment of 16
2036    bytes, so we offset the __va_stk address by 32 bytes so that
2037    __va_stk[32] references the first argument on the stack.  */
2038 
2039 static tree
xtensa_build_builtin_va_list(void)2040 xtensa_build_builtin_va_list (void)
2041 {
2042   tree f_stk, f_reg, f_ndx, record, type_decl;
2043 
2044   record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2045   type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2046 
2047   f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2048 		      ptr_type_node);
2049   f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2050 		      ptr_type_node);
2051   f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2052 		      integer_type_node);
2053 
2054   DECL_FIELD_CONTEXT (f_stk) = record;
2055   DECL_FIELD_CONTEXT (f_reg) = record;
2056   DECL_FIELD_CONTEXT (f_ndx) = record;
2057 
2058   TREE_CHAIN (record) = type_decl;
2059   TYPE_NAME (record) = type_decl;
2060   TYPE_FIELDS (record) = f_stk;
2061   TREE_CHAIN (f_stk) = f_reg;
2062   TREE_CHAIN (f_reg) = f_ndx;
2063 
2064   layout_type (record);
2065   return record;
2066 }
2067 
2068 
2069 /* Save the incoming argument registers on the stack.  Returns the
2070    address of the saved registers.  */
2071 
2072 static rtx
xtensa_builtin_saveregs(void)2073 xtensa_builtin_saveregs (void)
2074 {
2075   rtx gp_regs, dest;
2076   int arg_words = current_function_args_info.arg_words;
2077   int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2078 
2079   if (gp_left <= 0)
2080     return const0_rtx;
2081 
2082   /* Allocate the general-purpose register space.  */
2083   gp_regs = assign_stack_local
2084     (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2085   set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2086 
2087   /* Now store the incoming registers.  */
2088   dest = change_address (gp_regs, SImode,
2089 			 plus_constant (XEXP (gp_regs, 0),
2090 					arg_words * UNITS_PER_WORD));
2091   cfun->machine->need_a7_copy = true;
2092   cfun->machine->vararg_a7 = true;
2093   move_block_from_reg (GP_ARG_FIRST + arg_words, dest, gp_left);
2094 
2095   return XEXP (gp_regs, 0);
2096 }
2097 
2098 
2099 /* Implement `va_start' for varargs and stdarg.  We look at the
2100    current function to fill in an initial va_list.  */
2101 
2102 void
xtensa_va_start(tree valist,rtx nextarg ATTRIBUTE_UNUSED)2103 xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
2104 {
2105   tree f_stk, stk;
2106   tree f_reg, reg;
2107   tree f_ndx, ndx;
2108   tree t, u;
2109   int arg_words;
2110 
2111   arg_words = current_function_args_info.arg_words;
2112 
2113   f_stk = TYPE_FIELDS (va_list_type_node);
2114   f_reg = TREE_CHAIN (f_stk);
2115   f_ndx = TREE_CHAIN (f_reg);
2116 
2117   stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2118   reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2119   ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
2120 
2121   /* Call __builtin_saveregs; save the result in __va_reg */
2122   u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2123   t = build2 (MODIFY_EXPR, ptr_type_node, reg, u);
2124   TREE_SIDE_EFFECTS (t) = 1;
2125   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2126 
2127   /* Set the __va_stk member to ($arg_ptr - 32).  */
2128   u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2129   u = fold_build2 (PLUS_EXPR, ptr_type_node, u,
2130 		   build_int_cst (NULL_TREE, -32));
2131   t = build2 (MODIFY_EXPR, ptr_type_node, stk, u);
2132   TREE_SIDE_EFFECTS (t) = 1;
2133   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2134 
2135   /* Set the __va_ndx member.  If the first variable argument is on
2136      the stack, adjust __va_ndx by 2 words to account for the extra
2137      alignment offset for __va_stk.  */
2138   if (arg_words >= MAX_ARGS_IN_REGISTERS)
2139     arg_words += 2;
2140   u = build_int_cst (NULL_TREE, arg_words * UNITS_PER_WORD);
2141   t = build2 (MODIFY_EXPR, integer_type_node, ndx, u);
2142   TREE_SIDE_EFFECTS (t) = 1;
2143   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2144 }
2145 
2146 
2147 /* Implement `va_arg'.  */
2148 
2149 static tree
xtensa_gimplify_va_arg_expr(tree valist,tree type,tree * pre_p,tree * post_p ATTRIBUTE_UNUSED)2150 xtensa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
2151 			     tree *post_p ATTRIBUTE_UNUSED)
2152 {
2153   tree f_stk, stk;
2154   tree f_reg, reg;
2155   tree f_ndx, ndx;
2156   tree type_size, array, orig_ndx, addr, size, va_size, t;
2157   tree lab_false, lab_over, lab_false2;
2158   bool indirect;
2159 
2160   indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
2161   if (indirect)
2162     type = build_pointer_type (type);
2163 
2164   /* Handle complex values as separate real and imaginary parts.  */
2165   if (TREE_CODE (type) == COMPLEX_TYPE)
2166     {
2167       tree real_part, imag_part;
2168 
2169       real_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2170 					       pre_p, NULL);
2171       real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
2172 
2173       imag_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2174 					       pre_p, NULL);
2175       imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
2176 
2177       return build2 (COMPLEX_EXPR, type, real_part, imag_part);
2178     }
2179 
2180   f_stk = TYPE_FIELDS (va_list_type_node);
2181   f_reg = TREE_CHAIN (f_stk);
2182   f_ndx = TREE_CHAIN (f_reg);
2183 
2184   stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2185   reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2186   ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
2187 
2188   type_size = size_in_bytes (type);
2189   va_size = round_up (type_size, UNITS_PER_WORD);
2190   gimplify_expr (&va_size, pre_p, NULL, is_gimple_val, fb_rvalue);
2191 
2192 
2193   /* First align __va_ndx if necessary for this arg:
2194 
2195      orig_ndx = (AP).__va_ndx;
2196      if (__alignof__ (TYPE) > 4 )
2197        orig_ndx = ((orig_ndx + __alignof__ (TYPE) - 1)
2198 			& -__alignof__ (TYPE)); */
2199 
2200   orig_ndx = get_initialized_tmp_var (ndx, pre_p, NULL);
2201 
2202   if (TYPE_ALIGN (type) > BITS_PER_WORD)
2203     {
2204       int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_UNIT;
2205 
2206       t = build2 (PLUS_EXPR, integer_type_node, orig_ndx,
2207 		  build_int_cst (NULL_TREE, align - 1));
2208       t = build2 (BIT_AND_EXPR, integer_type_node, t,
2209 		  build_int_cst (NULL_TREE, -align));
2210       t = build2 (MODIFY_EXPR, integer_type_node, orig_ndx, t);
2211       gimplify_and_add (t, pre_p);
2212     }
2213 
2214 
2215   /* Increment __va_ndx to point past the argument:
2216 
2217      (AP).__va_ndx = orig_ndx + __va_size (TYPE); */
2218 
2219   t = fold_convert (integer_type_node, va_size);
2220   t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, t);
2221   t = build2 (MODIFY_EXPR, integer_type_node, ndx, t);
2222   gimplify_and_add (t, pre_p);
2223 
2224 
2225   /* Check if the argument is in registers:
2226 
2227      if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2228          && !must_pass_in_stack (type))
2229         __array = (AP).__va_reg; */
2230 
2231   array = create_tmp_var (ptr_type_node, NULL);
2232 
2233   lab_over = NULL;
2234   if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
2235     {
2236       lab_false = create_artificial_label ();
2237       lab_over = create_artificial_label ();
2238 
2239       t = build_int_cst (NULL_TREE, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD);
2240       t = build2 (GT_EXPR, boolean_type_node, ndx, t);
2241       t = build3 (COND_EXPR, void_type_node, t,
2242 		  build1 (GOTO_EXPR, void_type_node, lab_false),
2243 		  NULL_TREE);
2244       gimplify_and_add (t, pre_p);
2245 
2246       t = build2 (MODIFY_EXPR, void_type_node, array, reg);
2247       gimplify_and_add (t, pre_p);
2248 
2249       t = build1 (GOTO_EXPR, void_type_node, lab_over);
2250       gimplify_and_add (t, pre_p);
2251 
2252       t = build1 (LABEL_EXPR, void_type_node, lab_false);
2253       gimplify_and_add (t, pre_p);
2254     }
2255 
2256 
2257   /* ...otherwise, the argument is on the stack (never split between
2258      registers and the stack -- change __va_ndx if necessary):
2259 
2260      else
2261        {
2262 	 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2263 	     (AP).__va_ndx = 32 + __va_size (TYPE);
2264 	 __array = (AP).__va_stk;
2265        } */
2266 
2267   lab_false2 = create_artificial_label ();
2268 
2269   t = build_int_cst (NULL_TREE, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD);
2270   t = build2 (GT_EXPR, boolean_type_node, orig_ndx, t);
2271   t = build3 (COND_EXPR, void_type_node, t,
2272 	      build1 (GOTO_EXPR, void_type_node, lab_false2),
2273 	      NULL_TREE);
2274   gimplify_and_add (t, pre_p);
2275 
2276   t = size_binop (PLUS_EXPR, va_size, size_int (32));
2277   t = fold_convert (integer_type_node, t);
2278   t = build2 (MODIFY_EXPR, integer_type_node, ndx, t);
2279   gimplify_and_add (t, pre_p);
2280 
2281   t = build1 (LABEL_EXPR, void_type_node, lab_false2);
2282   gimplify_and_add (t, pre_p);
2283 
2284   t = build2 (MODIFY_EXPR, void_type_node, array, stk);
2285   gimplify_and_add (t, pre_p);
2286 
2287   if (lab_over)
2288     {
2289       t = build1 (LABEL_EXPR, void_type_node, lab_over);
2290       gimplify_and_add (t, pre_p);
2291     }
2292 
2293 
2294   /* Given the base array pointer (__array) and index to the subsequent
2295      argument (__va_ndx), find the address:
2296 
2297      __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2298 				? sizeof (TYPE)
2299 				: __va_size (TYPE))
2300 
2301      The results are endian-dependent because values smaller than one word
2302      are aligned differently.  */
2303 
2304 
2305   if (BYTES_BIG_ENDIAN && TREE_CODE (type_size) == INTEGER_CST)
2306     {
2307       t = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
2308       t = fold_build2 (GE_EXPR, boolean_type_node, type_size, t);
2309       t = fold_build3 (COND_EXPR, sizetype, t, va_size, type_size);
2310       size = t;
2311     }
2312   else
2313     size = va_size;
2314 
2315   t = fold_convert (ptr_type_node, ndx);
2316   addr = build2 (PLUS_EXPR, ptr_type_node, array, t);
2317   t = fold_convert (ptr_type_node, size);
2318   addr = build2 (MINUS_EXPR, ptr_type_node, addr, t);
2319 
2320   addr = fold_convert (build_pointer_type (type), addr);
2321   if (indirect)
2322     addr = build_va_arg_indirect_ref (addr);
2323   return build_va_arg_indirect_ref (addr);
2324 }
2325 
2326 
2327 enum reg_class
xtensa_preferred_reload_class(rtx x,enum reg_class class,int isoutput)2328 xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
2329 {
2330   if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2331     return NO_REGS;
2332 
2333   /* Don't use the stack pointer or hard frame pointer for reloads!
2334      The hard frame pointer would normally be OK except that it may
2335      briefly hold an incoming argument in the prologue, and reload
2336      won't know that it is live because the hard frame pointer is
2337      treated specially.  */
2338 
2339   if (class == AR_REGS || class == GR_REGS)
2340     return RL_REGS;
2341 
2342   return class;
2343 }
2344 
2345 
2346 enum reg_class
xtensa_secondary_reload_class(enum reg_class class,enum machine_mode mode ATTRIBUTE_UNUSED,rtx x,int isoutput)2347 xtensa_secondary_reload_class (enum reg_class class,
2348 			       enum machine_mode mode ATTRIBUTE_UNUSED,
2349 			       rtx x, int isoutput)
2350 {
2351   int regno;
2352 
2353   if (GET_CODE (x) == SIGN_EXTEND)
2354     x = XEXP (x, 0);
2355   regno = xt_true_regnum (x);
2356 
2357   if (!isoutput)
2358     {
2359       if (class == FP_REGS && constantpool_mem_p (x))
2360 	return RL_REGS;
2361     }
2362 
2363   if (ACC_REG_P (regno))
2364     return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2365   if (class == ACC_REG)
2366     return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2367 
2368   return NO_REGS;
2369 }
2370 
2371 
2372 void
order_regs_for_local_alloc(void)2373 order_regs_for_local_alloc (void)
2374 {
2375   if (!leaf_function_p ())
2376     {
2377       memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2378 	      FIRST_PSEUDO_REGISTER * sizeof (int));
2379     }
2380   else
2381     {
2382       int i, num_arg_regs;
2383       int nxt = 0;
2384 
2385       /* Use the AR registers in increasing order (skipping a0 and a1)
2386 	 but save the incoming argument registers for a last resort.  */
2387       num_arg_regs = current_function_args_info.arg_words;
2388       if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2389 	num_arg_regs = MAX_ARGS_IN_REGISTERS;
2390       for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2391 	reg_alloc_order[nxt++] = i + num_arg_regs;
2392       for (i = 0; i < num_arg_regs; i++)
2393 	reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2394 
2395       /* List the coprocessor registers in order.  */
2396       for (i = 0; i < BR_REG_NUM; i++)
2397 	reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2398 
2399       /* List the FP registers in order for now.  */
2400       for (i = 0; i < 16; i++)
2401 	reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2402 
2403       /* GCC requires that we list *all* the registers....  */
2404       reg_alloc_order[nxt++] = 0;	/* a0 = return address */
2405       reg_alloc_order[nxt++] = 1;	/* a1 = stack pointer */
2406       reg_alloc_order[nxt++] = 16;	/* pseudo frame pointer */
2407       reg_alloc_order[nxt++] = 17;	/* pseudo arg pointer */
2408 
2409       reg_alloc_order[nxt++] = ACC_REG_FIRST;	/* MAC16 accumulator */
2410     }
2411 }
2412 
2413 
2414 /* Some Xtensa targets support multiple bss sections.  If the section
2415    name ends with ".bss", add SECTION_BSS to the flags.  */
2416 
2417 static unsigned int
xtensa_multibss_section_type_flags(tree decl,const char * name,int reloc)2418 xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
2419 {
2420   unsigned int flags = default_section_type_flags (decl, name, reloc);
2421   const char *suffix;
2422 
2423   suffix = strrchr (name, '.');
2424   if (suffix && strcmp (suffix, ".bss") == 0)
2425     {
2426       if (!decl || (TREE_CODE (decl) == VAR_DECL
2427 		    && DECL_INITIAL (decl) == NULL_TREE))
2428 	flags |= SECTION_BSS;  /* @nobits */
2429       else
2430 	warning (0, "only uninitialized variables can be placed in a "
2431 		 ".bss section");
2432     }
2433 
2434   return flags;
2435 }
2436 
2437 
2438 /* The literal pool stays with the function.  */
2439 
2440 static section *
xtensa_select_rtx_section(enum machine_mode mode ATTRIBUTE_UNUSED,rtx x ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)2441 xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2442 			   rtx x ATTRIBUTE_UNUSED,
2443 			   unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2444 {
2445   return function_section (current_function_decl);
2446 }
2447 
2448 
2449 /* Compute a (partial) cost for rtx X.  Return true if the complete
2450    cost has been computed, and false if subexpressions should be
2451    scanned.  In either case, *TOTAL contains the cost result.  */
2452 
2453 static bool
xtensa_rtx_costs(rtx x,int code,int outer_code,int * total)2454 xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
2455 {
2456   switch (code)
2457     {
2458     case CONST_INT:
2459       switch (outer_code)
2460 	{
2461 	case SET:
2462 	  if (xtensa_simm12b (INTVAL (x)))
2463 	    {
2464 	      *total = 4;
2465 	      return true;
2466 	    }
2467 	  break;
2468 	case PLUS:
2469 	  if (xtensa_simm8 (INTVAL (x))
2470 	      || xtensa_simm8x256 (INTVAL (x)))
2471 	    {
2472 	      *total = 0;
2473 	      return true;
2474 	    }
2475 	  break;
2476 	case AND:
2477 	  if (xtensa_mask_immediate (INTVAL (x)))
2478 	    {
2479 	      *total = 0;
2480 	      return true;
2481 	    }
2482 	  break;
2483 	case COMPARE:
2484 	  if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2485 	    {
2486 	      *total = 0;
2487 	      return true;
2488 	    }
2489 	  break;
2490 	case ASHIFT:
2491 	case ASHIFTRT:
2492 	case LSHIFTRT:
2493 	case ROTATE:
2494 	case ROTATERT:
2495 	  /* No way to tell if X is the 2nd operand so be conservative.  */
2496 	default: break;
2497 	}
2498       if (xtensa_simm12b (INTVAL (x)))
2499 	*total = 5;
2500       else if (TARGET_CONST16)
2501 	*total = COSTS_N_INSNS (2);
2502       else
2503 	*total = 6;
2504       return true;
2505 
2506     case CONST:
2507     case LABEL_REF:
2508     case SYMBOL_REF:
2509       if (TARGET_CONST16)
2510 	*total = COSTS_N_INSNS (2);
2511       else
2512 	*total = 5;
2513       return true;
2514 
2515     case CONST_DOUBLE:
2516       if (TARGET_CONST16)
2517 	*total = COSTS_N_INSNS (4);
2518       else
2519 	*total = 7;
2520       return true;
2521 
2522     case MEM:
2523       {
2524 	int num_words =
2525 	  (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ?  2 : 1;
2526 
2527 	if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2528 	  *total = COSTS_N_INSNS (num_words);
2529 	else
2530 	  *total = COSTS_N_INSNS (2*num_words);
2531 	return true;
2532       }
2533 
2534     case FFS:
2535       *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2536       return true;
2537 
2538     case NOT:
2539       *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2540       return true;
2541 
2542     case AND:
2543     case IOR:
2544     case XOR:
2545       if (GET_MODE (x) == DImode)
2546 	*total = COSTS_N_INSNS (2);
2547       else
2548 	*total = COSTS_N_INSNS (1);
2549       return true;
2550 
2551     case ASHIFT:
2552     case ASHIFTRT:
2553     case LSHIFTRT:
2554       if (GET_MODE (x) == DImode)
2555 	*total = COSTS_N_INSNS (50);
2556       else
2557 	*total = COSTS_N_INSNS (1);
2558       return true;
2559 
2560     case ABS:
2561       {
2562 	enum machine_mode xmode = GET_MODE (x);
2563 	if (xmode == SFmode)
2564 	  *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2565 	else if (xmode == DFmode)
2566 	  *total = COSTS_N_INSNS (50);
2567 	else
2568 	  *total = COSTS_N_INSNS (4);
2569 	return true;
2570       }
2571 
2572     case PLUS:
2573     case MINUS:
2574       {
2575 	enum machine_mode xmode = GET_MODE (x);
2576 	if (xmode == SFmode)
2577 	  *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2578 	else if (xmode == DFmode || xmode == DImode)
2579 	  *total = COSTS_N_INSNS (50);
2580 	else
2581 	  *total = COSTS_N_INSNS (1);
2582 	return true;
2583       }
2584 
2585     case NEG:
2586       *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2587       return true;
2588 
2589     case MULT:
2590       {
2591 	enum machine_mode xmode = GET_MODE (x);
2592 	if (xmode == SFmode)
2593 	  *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2594 	else if (xmode == DFmode || xmode == DImode)
2595 	  *total = COSTS_N_INSNS (50);
2596 	else if (TARGET_MUL32)
2597 	  *total = COSTS_N_INSNS (4);
2598 	else if (TARGET_MAC16)
2599 	  *total = COSTS_N_INSNS (16);
2600 	else if (TARGET_MUL16)
2601 	  *total = COSTS_N_INSNS (12);
2602 	else
2603 	  *total = COSTS_N_INSNS (50);
2604 	return true;
2605       }
2606 
2607     case DIV:
2608     case MOD:
2609       {
2610 	enum machine_mode xmode = GET_MODE (x);
2611 	if (xmode == SFmode)
2612 	  {
2613 	    *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2614 	    return true;
2615 	  }
2616 	else if (xmode == DFmode)
2617 	  {
2618 	    *total = COSTS_N_INSNS (50);
2619 	    return true;
2620 	  }
2621       }
2622       /* Fall through.  */
2623 
2624     case UDIV:
2625     case UMOD:
2626       {
2627 	enum machine_mode xmode = GET_MODE (x);
2628 	if (xmode == DImode)
2629 	  *total = COSTS_N_INSNS (50);
2630 	else if (TARGET_DIV32)
2631 	  *total = COSTS_N_INSNS (32);
2632 	else
2633 	  *total = COSTS_N_INSNS (50);
2634 	return true;
2635       }
2636 
2637     case SQRT:
2638       if (GET_MODE (x) == SFmode)
2639 	*total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
2640       else
2641 	*total = COSTS_N_INSNS (50);
2642       return true;
2643 
2644     case SMIN:
2645     case UMIN:
2646     case SMAX:
2647     case UMAX:
2648       *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
2649       return true;
2650 
2651     case SIGN_EXTRACT:
2652     case SIGN_EXTEND:
2653       *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
2654       return true;
2655 
2656     case ZERO_EXTRACT:
2657     case ZERO_EXTEND:
2658       *total = COSTS_N_INSNS (1);
2659       return true;
2660 
2661     default:
2662       return false;
2663     }
2664 }
2665 
2666 /* Worker function for TARGET_RETURN_IN_MEMORY.  */
2667 
2668 static bool
xtensa_return_in_memory(tree type,tree fntype ATTRIBUTE_UNUSED)2669 xtensa_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2670 {
2671   return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
2672 	  > 4 * UNITS_PER_WORD);
2673 }
2674 
2675 #include "gt-xtensa.h"
2676