xref: /openbsd-src/gnu/usr.bin/gcc/gcc/config/xtensa/xtensa.c (revision 4e43c760ad4cd5f644ec700462679d05749498d8)
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2    Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3    Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "regs.h"
26 #include "machmode.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h"
36 #include "output.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "tm_p.h"
42 #include "function.h"
43 #include "toplev.h"
44 #include "optabs.h"
45 #include "output.h"
46 #include "libfuncs.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "langhooks.h"
51 
52 /* Enumeration for all of the relational tests, so that we can build
53    arrays indexed by the test type, and not worry about the order
54    of EQ, NE, etc. */
55 
56 enum internal_test {
57     ITEST_EQ,
58     ITEST_NE,
59     ITEST_GT,
60     ITEST_GE,
61     ITEST_LT,
62     ITEST_LE,
63     ITEST_GTU,
64     ITEST_GEU,
65     ITEST_LTU,
66     ITEST_LEU,
67     ITEST_MAX
68   };
69 
70 /* Cached operands, and operator to compare for use in set/branch on
71    condition codes.  */
72 rtx branch_cmp[2];
73 
74 /* what type of branch to use */
75 enum cmp_type branch_type;
76 
77 /* Array giving truth value on whether or not a given hard register
78    can support a given mode.  */
79 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
80 
81 /* Current frame size calculated by compute_frame_size.  */
82 unsigned xtensa_current_frame_size;
83 
84 /* Tables of ld/st opcode names for block moves */
85 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
86 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
87 #define LARGEST_MOVE_RATIO 15
88 
89 /* Define the structure for the machine field in struct function.  */
90 struct machine_function GTY(())
91 {
92   int accesses_prev_frame;
93   bool need_a7_copy;
94   bool vararg_a7;
95   rtx set_frame_ptr_insn;
96 };
97 
98 /* Vector, indexed by hard register number, which contains 1 for a
99    register that is allowable in a candidate for leaf function
100    treatment. */
101 
102 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
103 {
104   1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
105   1, 1, 1,
106   1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
107   1
108 };
109 
110 /* Map hard register number to register class */
111 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
112 {
113   RL_REGS,	SP_REG,		RL_REGS,	RL_REGS,
114   RL_REGS,	RL_REGS,	RL_REGS,	GR_REGS,
115   RL_REGS,	RL_REGS,	RL_REGS,	RL_REGS,
116   RL_REGS,	RL_REGS,	RL_REGS,	RL_REGS,
117   AR_REGS,	AR_REGS,	BR_REGS,
118   FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
119   FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
120   FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
121   FP_REGS,	FP_REGS,	FP_REGS,	FP_REGS,
122   ACC_REG,
123 };
124 
125 /* Map register constraint character to register class.  */
126 enum reg_class xtensa_char_to_class[256] =
127 {
128   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
129   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
130   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
131   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
132   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
133   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
134   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
135   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
136   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
137   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
138   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
139   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
140   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
141   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
142   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
143   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
144   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
145   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
146   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
147   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
148   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
149   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
150   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
151   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
152   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
153   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
154   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
155   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
156   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
157   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
158   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
159   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
160   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
161   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
162   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
163   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
164   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
165   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
166   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
167   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
168   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
169   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
170   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
171   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
172   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
173   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
174   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
175   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
176   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
177   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
178   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
179   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
180   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
181   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
182   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
183   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
184   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
185   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
186   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
187   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
188   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
189   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
190   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
191   NO_REGS,	NO_REGS,	NO_REGS,	NO_REGS,
192 };
193 
194 static int b4const_or_zero PARAMS ((int));
195 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
196 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
197 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
198 static rtx gen_conditional_move PARAMS ((rtx));
199 static rtx fixup_subreg_mem PARAMS ((rtx x));
200 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
201 static struct machine_function * xtensa_init_machine_status PARAMS ((void));
202 static void printx PARAMS ((FILE *, signed int));
203 static unsigned int xtensa_multibss_section_type_flags
204   PARAMS ((tree, const char *, int));
205 static void xtensa_select_rtx_section
206   PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
207 static void xtensa_encode_section_info PARAMS ((tree, int));
208 
209 static rtx frame_size_const;
210 static int current_function_arg_words;
211 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
212   REG_ALLOC_ORDER;
213 
214 /* This macro generates the assembly code for function entry.
215    FILE is a stdio stream to output the code to.
216    SIZE is an int: how many units of temporary storage to allocate.
217    Refer to the array 'regs_ever_live' to determine which registers
218    to save; 'regs_ever_live[I]' is nonzero if register number I
219    is ever used in the function.  This macro is responsible for
220    knowing which registers should not be saved even if used.  */
221 
222 #undef TARGET_ASM_FUNCTION_PROLOGUE
223 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
224 
225 /* This macro generates the assembly code for function exit,
226    on machines that need it.  If FUNCTION_EPILOGUE is not defined
227    then individual return instructions are generated for each
228    return statement.  Args are same as for FUNCTION_PROLOGUE.  */
229 
230 #undef TARGET_ASM_FUNCTION_EPILOGUE
231 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
232 
233 /* These hooks specify assembly directives for creating certain kinds
234    of integer object.  */
235 
236 #undef TARGET_ASM_ALIGNED_SI_OP
237 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
238 
239 #undef TARGET_ASM_SELECT_RTX_SECTION
240 #define TARGET_ASM_SELECT_RTX_SECTION  xtensa_select_rtx_section
241 #undef TARGET_ENCODE_SECTION_INFO
242 #define TARGET_ENCODE_SECTION_INFO  xtensa_encode_section_info
243 
244 struct gcc_target targetm = TARGET_INITIALIZER;
245 
246 
247 /*
248  * Functions to test Xtensa immediate operand validity.
249  */
250 
251 int
xtensa_b4constu(v)252 xtensa_b4constu (v)
253      int v;
254 {
255   switch (v)
256     {
257     case 32768:
258     case 65536:
259     case 2:
260     case 3:
261     case 4:
262     case 5:
263     case 6:
264     case 7:
265     case 8:
266     case 10:
267     case 12:
268     case 16:
269     case 32:
270     case 64:
271     case 128:
272     case 256:
273       return 1;
274     }
275   return 0;
276 }
277 
278 int
xtensa_simm8x256(v)279 xtensa_simm8x256 (v)
280      int v;
281 {
282   return (v & 255) == 0 && (v >= -32768 && v <= 32512);
283 }
284 
285 int
xtensa_ai4const(v)286 xtensa_ai4const (v)
287      int v;
288 {
289   return (v == -1 || (v >= 1 && v <= 15));
290 }
291 
292 int
xtensa_simm7(v)293 xtensa_simm7 (v)
294      int v;
295 {
296   return v >= -32 && v <= 95;
297 }
298 
299 int
xtensa_b4const(v)300 xtensa_b4const (v)
301      int v;
302 {
303   switch (v)
304     {
305     case -1:
306     case 1:
307     case 2:
308     case 3:
309     case 4:
310     case 5:
311     case 6:
312     case 7:
313     case 8:
314     case 10:
315     case 12:
316     case 16:
317     case 32:
318     case 64:
319     case 128:
320     case 256:
321       return 1;
322     }
323   return 0;
324 }
325 
326 int
xtensa_simm8(v)327 xtensa_simm8 (v)
328      int v;
329 {
330   return v >= -128 && v <= 127;
331 }
332 
333 int
xtensa_tp7(v)334 xtensa_tp7 (v)
335      int v;
336 {
337   return (v >= 7 && v <= 22);
338 }
339 
340 int
xtensa_lsi4x4(v)341 xtensa_lsi4x4 (v)
342      int v;
343 {
344   return (v & 3) == 0 && (v >= 0 && v <= 60);
345 }
346 
347 int
xtensa_simm12b(v)348 xtensa_simm12b (v)
349      int v;
350 {
351   return v >= -2048 && v <= 2047;
352 }
353 
354 int
xtensa_uimm8(v)355 xtensa_uimm8 (v)
356      int v;
357 {
358   return v >= 0 && v <= 255;
359 }
360 
361 int
xtensa_uimm8x2(v)362 xtensa_uimm8x2 (v)
363      int v;
364 {
365   return (v & 1) == 0 && (v >= 0 && v <= 510);
366 }
367 
368 int
xtensa_uimm8x4(v)369 xtensa_uimm8x4 (v)
370      int v;
371 {
372   return (v & 3) == 0 && (v >= 0 && v <= 1020);
373 }
374 
375 
376 /* This is just like the standard true_regnum() function except that it
377    works even when reg_renumber is not initialized. */
378 
379 int
xt_true_regnum(x)380 xt_true_regnum (x)
381      rtx x;
382 {
383   if (GET_CODE (x) == REG)
384     {
385       if (reg_renumber
386 	  && REGNO (x) >= FIRST_PSEUDO_REGISTER
387 	  && reg_renumber[REGNO (x)] >= 0)
388 	return reg_renumber[REGNO (x)];
389       return REGNO (x);
390     }
391   if (GET_CODE (x) == SUBREG)
392     {
393       int base = xt_true_regnum (SUBREG_REG (x));
394       if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
395         return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
396                                            GET_MODE (SUBREG_REG (x)),
397                                            SUBREG_BYTE (x), GET_MODE (x));
398     }
399   return -1;
400 }
401 
402 
403 int
add_operand(op,mode)404 add_operand (op, mode)
405     rtx op;
406     enum machine_mode mode;
407 {
408     if (GET_CODE (op) == CONST_INT)
409 	return (xtensa_simm8 (INTVAL (op)) ||
410 		xtensa_simm8x256 (INTVAL (op)));
411 
412     return register_operand (op, mode);
413 }
414 
415 
416 int
arith_operand(op,mode)417 arith_operand (op, mode)
418     rtx op;
419     enum machine_mode mode;
420 {
421     if (GET_CODE (op) == CONST_INT)
422 	return xtensa_simm8 (INTVAL (op));
423 
424     return register_operand (op, mode);
425 }
426 
427 
428 int
nonimmed_operand(op,mode)429 nonimmed_operand (op, mode)
430     rtx op;
431     enum machine_mode mode;
432 {
433     /* We cannot use the standard nonimmediate_operand() predicate because
434        it includes constant pool memory operands. */
435 
436     if (memory_operand (op, mode))
437 	return !constantpool_address_p (XEXP (op, 0));
438 
439     return register_operand (op, mode);
440 }
441 
442 
443 int
mem_operand(op,mode)444 mem_operand (op, mode)
445     rtx op;
446     enum machine_mode mode;
447 {
448     /* We cannot use the standard memory_operand() predicate because
449        it includes constant pool memory operands. */
450 
451     if (memory_operand (op, mode))
452 	return !constantpool_address_p (XEXP (op, 0));
453 
454     return FALSE;
455 }
456 
457 
458 int
xtensa_valid_move(mode,operands)459 xtensa_valid_move (mode, operands)
460      enum machine_mode mode;
461      rtx *operands;
462 {
463   /* Either the destination or source must be a register, and the
464      MAC16 accumulator doesn't count.  */
465 
466   if (register_operand (operands[0], mode))
467     {
468       int dst_regnum = xt_true_regnum (operands[0]);
469 
470       /* The stack pointer can only be assigned with a MOVSP opcode. */
471       if (dst_regnum == STACK_POINTER_REGNUM)
472 	return (mode == SImode
473 		&& register_operand (operands[1], mode)
474 		&& !ACC_REG_P (xt_true_regnum (operands[1])));
475 
476       if (!ACC_REG_P (dst_regnum))
477 	return true;
478     }
479   if (register_operand (operands[1], mode))
480     {
481       int src_regnum = xt_true_regnum (operands[1]);
482       if (!ACC_REG_P (src_regnum))
483 	return true;
484     }
485   return FALSE;
486 }
487 
488 
489 int
mask_operand(op,mode)490 mask_operand (op, mode)
491      rtx op;
492      enum machine_mode mode;
493 {
494   if (GET_CODE (op) == CONST_INT)
495     return xtensa_mask_immediate (INTVAL (op));
496 
497   return register_operand (op, mode);
498 }
499 
500 
501 int
extui_fldsz_operand(op,mode)502 extui_fldsz_operand (op, mode)
503      rtx op;
504      enum machine_mode mode ATTRIBUTE_UNUSED;
505 {
506   return ((GET_CODE (op) == CONST_INT)
507 	  && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
508 }
509 
510 
511 int
sext_operand(op,mode)512 sext_operand (op, mode)
513      rtx op;
514      enum machine_mode mode;
515 {
516   if (TARGET_SEXT)
517     return nonimmed_operand (op, mode);
518   return mem_operand (op, mode);
519 }
520 
521 
522 int
sext_fldsz_operand(op,mode)523 sext_fldsz_operand (op, mode)
524      rtx op;
525      enum machine_mode mode ATTRIBUTE_UNUSED;
526 {
527   return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
528 }
529 
530 
531 int
lsbitnum_operand(op,mode)532 lsbitnum_operand (op, mode)
533      rtx op;
534      enum machine_mode mode ATTRIBUTE_UNUSED;
535 {
536   if (GET_CODE (op) == CONST_INT)
537     {
538       return (BITS_BIG_ENDIAN
539 	      ? (INTVAL (op) == BITS_PER_WORD-1)
540 	      : (INTVAL (op) == 0));
541     }
542   return FALSE;
543 }
544 
545 
546 static int
b4const_or_zero(v)547 b4const_or_zero (v)
548      int v;
549 {
550   if (v == 0)
551     return TRUE;
552   return xtensa_b4const (v);
553 }
554 
555 
556 int
branch_operand(op,mode)557 branch_operand (op, mode)
558      rtx op;
559      enum machine_mode mode;
560 {
561   if (GET_CODE (op) == CONST_INT)
562     return b4const_or_zero (INTVAL (op));
563 
564   return register_operand (op, mode);
565 }
566 
567 
568 int
ubranch_operand(op,mode)569 ubranch_operand (op, mode)
570      rtx op;
571      enum machine_mode mode;
572 {
573   if (GET_CODE (op) == CONST_INT)
574     return xtensa_b4constu (INTVAL (op));
575 
576   return register_operand (op, mode);
577 }
578 
579 
580 int
call_insn_operand(op,mode)581 call_insn_operand (op, mode)
582      rtx op;
583      enum machine_mode mode ATTRIBUTE_UNUSED;
584 {
585   if ((GET_CODE (op) == REG)
586       && (op != arg_pointer_rtx)
587       && ((REGNO (op) < FRAME_POINTER_REGNUM)
588 	  || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
589     return TRUE;
590 
591   if (CONSTANT_ADDRESS_P (op))
592     {
593       /* Direct calls only allowed to static functions with PIC.  */
594       return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
595 			    && SYMBOL_REF_FLAG (op)));
596     }
597 
598   return FALSE;
599 }
600 
601 
602 int
move_operand(op,mode)603 move_operand (op, mode)
604      rtx op;
605      enum machine_mode mode;
606 {
607   if (register_operand (op, mode))
608     return TRUE;
609 
610   /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
611      result in 0/1. */
612   if (GET_CODE (op) == CONSTANT_P_RTX)
613     return TRUE;
614 
615   if (GET_CODE (op) == CONST_INT)
616     return xtensa_simm12b (INTVAL (op));
617 
618   if (GET_CODE (op) == MEM)
619     return memory_address_p (mode, XEXP (op, 0));
620 
621   return FALSE;
622 }
623 
624 
625 int
smalloffset_mem_p(op)626 smalloffset_mem_p (op)
627      rtx op;
628 {
629   if (GET_CODE (op) == MEM)
630     {
631       rtx addr = XEXP (op, 0);
632       if (GET_CODE (addr) == REG)
633 	return REG_OK_FOR_BASE_P (addr);
634       if (GET_CODE (addr) == PLUS)
635 	{
636 	  rtx offset = XEXP (addr, 0);
637 	  if (GET_CODE (offset) != CONST_INT)
638 	    offset = XEXP (addr, 1);
639 	  if (GET_CODE (offset) != CONST_INT)
640 	    return FALSE;
641 	  return xtensa_lsi4x4 (INTVAL (offset));
642 	}
643     }
644   return FALSE;
645 }
646 
647 
648 int
smalloffset_double_mem_p(op)649 smalloffset_double_mem_p (op)
650      rtx op;
651 {
652   if (!smalloffset_mem_p (op))
653     return FALSE;
654   return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
655 }
656 
657 
658 int
constantpool_address_p(addr)659 constantpool_address_p (addr)
660      rtx addr;
661 {
662   rtx sym = addr;
663 
664   if (GET_CODE (addr) == CONST)
665     {
666       rtx offset;
667 
668       /* only handle (PLUS (SYM, OFFSET)) form */
669       addr = XEXP (addr, 0);
670       if (GET_CODE (addr) != PLUS)
671 	return FALSE;
672 
673       /* make sure the address is word aligned */
674       offset = XEXP (addr, 1);
675       if ((GET_CODE (offset) != CONST_INT)
676 	  || ((INTVAL (offset) & 3) != 0))
677 	return FALSE;
678 
679       sym = XEXP (addr, 0);
680     }
681 
682   if ((GET_CODE (sym) == SYMBOL_REF)
683       && CONSTANT_POOL_ADDRESS_P (sym))
684     return TRUE;
685   return FALSE;
686 }
687 
688 
689 int
constantpool_mem_p(op)690 constantpool_mem_p (op)
691      rtx op;
692 {
693   if (GET_CODE (op) == MEM)
694     return constantpool_address_p (XEXP (op, 0));
695   return FALSE;
696 }
697 
698 
699 int
non_const_move_operand(op,mode)700 non_const_move_operand (op, mode)
701      rtx op;
702      enum machine_mode mode;
703 {
704   if (register_operand (op, mode))
705     return 1;
706   if (GET_CODE (op) == SUBREG)
707     op = SUBREG_REG (op);
708   if (GET_CODE (op) == MEM)
709     return memory_address_p (mode, XEXP (op, 0));
710   return FALSE;
711 }
712 
713 
714 /* Accept the floating point constant 1 in the appropriate mode.  */
715 
716 int
const_float_1_operand(op,mode)717 const_float_1_operand (op, mode)
718      rtx op;
719      enum machine_mode mode;
720 {
721   REAL_VALUE_TYPE d;
722   static REAL_VALUE_TYPE onedf;
723   static REAL_VALUE_TYPE onesf;
724   static int one_initialized;
725 
726   if ((GET_CODE (op) != CONST_DOUBLE)
727       || (mode != GET_MODE (op))
728       || (mode != DFmode && mode != SFmode))
729     return FALSE;
730 
731   REAL_VALUE_FROM_CONST_DOUBLE (d, op);
732 
733   if (! one_initialized)
734     {
735       onedf = REAL_VALUE_ATOF ("1.0", DFmode);
736       onesf = REAL_VALUE_ATOF ("1.0", SFmode);
737       one_initialized = TRUE;
738     }
739 
740   if (mode == DFmode)
741     return REAL_VALUES_EQUAL (d, onedf);
742   else
743     return REAL_VALUES_EQUAL (d, onesf);
744 }
745 
746 
747 int
fpmem_offset_operand(op,mode)748 fpmem_offset_operand (op, mode)
749      rtx op;
750      enum machine_mode mode ATTRIBUTE_UNUSED;
751 {
752   if (GET_CODE (op) == CONST_INT)
753     return xtensa_mem_offset (INTVAL (op), SFmode);
754   return 0;
755 }
756 
757 
758 void
xtensa_extend_reg(dst,src)759 xtensa_extend_reg (dst, src)
760      rtx dst;
761      rtx src;
762 {
763   rtx temp = gen_reg_rtx (SImode);
764   rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
765 
766   /* generate paradoxical subregs as needed so that the modes match */
767   src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
768   dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
769 
770   emit_insn (gen_ashlsi3 (temp, src, shift));
771   emit_insn (gen_ashrsi3 (dst, temp, shift));
772 }
773 
774 
775 void
xtensa_load_constant(dst,src)776 xtensa_load_constant (dst, src)
777      rtx dst;
778      rtx src;
779 {
780   enum machine_mode mode = GET_MODE (dst);
781   src = force_const_mem (SImode, src);
782 
783   /* PC-relative loads are always SImode so we have to add a SUBREG if that
784      is not the desired mode */
785 
786   if (mode != SImode)
787     {
788       if (register_operand (dst, mode))
789 	dst = simplify_gen_subreg (SImode, dst, mode, 0);
790       else
791 	{
792 	  src = force_reg (SImode, src);
793 	  src = gen_lowpart_SUBREG (mode, src);
794 	}
795     }
796 
797   emit_move_insn (dst, src);
798 }
799 
800 
801 int
branch_operator(x,mode)802 branch_operator (x, mode)
803      rtx x;
804      enum machine_mode mode;
805 {
806   if (GET_MODE (x) != mode)
807     return FALSE;
808 
809   switch (GET_CODE (x))
810     {
811     case EQ:
812     case NE:
813     case LT:
814     case GE:
815       return TRUE;
816     default:
817       break;
818     }
819   return FALSE;
820 }
821 
822 
823 int
ubranch_operator(x,mode)824 ubranch_operator (x, mode)
825      rtx x;
826      enum machine_mode mode;
827 {
828   if (GET_MODE (x) != mode)
829     return FALSE;
830 
831   switch (GET_CODE (x))
832     {
833     case LTU:
834     case GEU:
835       return TRUE;
836     default:
837       break;
838     }
839   return FALSE;
840 }
841 
842 
843 int
boolean_operator(x,mode)844 boolean_operator (x, mode)
845      rtx x;
846      enum machine_mode mode;
847 {
848   if (GET_MODE (x) != mode)
849     return FALSE;
850 
851   switch (GET_CODE (x))
852     {
853     case EQ:
854     case NE:
855       return TRUE;
856     default:
857       break;
858     }
859   return FALSE;
860 }
861 
862 
863 int
xtensa_mask_immediate(v)864 xtensa_mask_immediate (v)
865      int v;
866 {
867 #define MAX_MASK_SIZE 16
868   int mask_size;
869 
870   for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
871     {
872       if ((v & 1) == 0)
873 	return FALSE;
874       v = v >> 1;
875       if (v == 0)
876 	return TRUE;
877     }
878 
879   return FALSE;
880 }
881 
882 
883 int
xtensa_mem_offset(v,mode)884 xtensa_mem_offset (v, mode)
885      unsigned v;
886      enum machine_mode mode;
887 {
888   switch (mode)
889     {
890     case BLKmode:
891       /* Handle the worst case for block moves.  See xtensa_expand_block_move
892 	 where we emit an optimized block move operation if the block can be
893 	 moved in < "move_ratio" pieces.  The worst case is when the block is
894 	 aligned but has a size of (3 mod 4) (does this happen?) so that the
895 	 last piece requires a byte load/store. */
896       return (xtensa_uimm8 (v) &&
897 	      xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
898 
899     case QImode:
900       return xtensa_uimm8 (v);
901 
902     case HImode:
903       return xtensa_uimm8x2 (v);
904 
905     case DFmode:
906       return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
907 
908     default:
909       break;
910     }
911 
912   return xtensa_uimm8x4 (v);
913 }
914 
915 
916 /* Make normal rtx_code into something we can index from an array */
917 
918 static enum internal_test
map_test_to_internal_test(test_code)919 map_test_to_internal_test (test_code)
920      enum rtx_code test_code;
921 {
922   enum internal_test test = ITEST_MAX;
923 
924   switch (test_code)
925     {
926     default:			break;
927     case EQ:  test = ITEST_EQ;  break;
928     case NE:  test = ITEST_NE;  break;
929     case GT:  test = ITEST_GT;  break;
930     case GE:  test = ITEST_GE;  break;
931     case LT:  test = ITEST_LT;  break;
932     case LE:  test = ITEST_LE;  break;
933     case GTU: test = ITEST_GTU; break;
934     case GEU: test = ITEST_GEU; break;
935     case LTU: test = ITEST_LTU; break;
936     case LEU: test = ITEST_LEU; break;
937     }
938 
939   return test;
940 }
941 
942 
943 /* Generate the code to compare two integer values.  The return value is
944    the comparison expression. */
945 
946 static rtx
gen_int_relational(test_code,cmp0,cmp1,p_invert)947 gen_int_relational (test_code, cmp0, cmp1, p_invert)
948      enum rtx_code test_code;	/* relational test (EQ, etc) */
949      rtx cmp0;			/* first operand to compare */
950      rtx cmp1;			/* second operand to compare */
951      int *p_invert;		/* whether branch needs to reverse its test */
952 {
953   struct cmp_info {
954     enum rtx_code test_code;	/* test code to use in insn */
955     int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
956     int const_add;		/* constant to add (convert LE -> LT) */
957     int reverse_regs;		/* reverse registers in test */
958     int invert_const;		/* != 0 if invert value if cmp1 is constant */
959     int invert_reg;		/* != 0 if invert value if cmp1 is register */
960     int unsignedp;		/* != 0 for unsigned comparisons.  */
961   };
962 
963   static struct cmp_info info[ (int)ITEST_MAX ] = {
964 
965     { EQ,	b4const_or_zero,	0, 0, 0, 0, 0 },	/* EQ  */
966     { NE,	b4const_or_zero,	0, 0, 0, 0, 0 },	/* NE  */
967 
968     { LT,	b4const_or_zero,	1, 1, 1, 0, 0 },	/* GT  */
969     { GE,	b4const_or_zero,	0, 0, 0, 0, 0 },	/* GE  */
970     { LT,	b4const_or_zero,	0, 0, 0, 0, 0 },	/* LT  */
971     { GE,	b4const_or_zero,	1, 1, 1, 0, 0 },	/* LE  */
972 
973     { LTU,	xtensa_b4constu,	1, 1, 1, 0, 1 },	/* GTU */
974     { GEU,	xtensa_b4constu,	0, 0, 0, 0, 1 },	/* GEU */
975     { LTU,	xtensa_b4constu,	0, 0, 0, 0, 1 },	/* LTU */
976     { GEU,	xtensa_b4constu,	1, 1, 1, 0, 1 },	/* LEU */
977   };
978 
979   enum internal_test test;
980   enum machine_mode mode;
981   struct cmp_info *p_info;
982 
983   test = map_test_to_internal_test (test_code);
984   if (test == ITEST_MAX)
985     abort ();
986 
987   p_info = &info[ (int)test ];
988 
989   mode = GET_MODE (cmp0);
990   if (mode == VOIDmode)
991     mode = GET_MODE (cmp1);
992 
993   /* Make sure we can handle any constants given to us.  */
994   if (GET_CODE (cmp1) == CONST_INT)
995     {
996       HOST_WIDE_INT value = INTVAL (cmp1);
997       unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
998 
999       /* if the immediate overflows or does not fit in the immediate field,
1000 	 spill it to a register */
1001 
1002       if ((p_info->unsignedp ?
1003 	   (uvalue + p_info->const_add > uvalue) :
1004 	   (value + p_info->const_add > value)) != (p_info->const_add > 0))
1005 	{
1006 	  cmp1 = force_reg (mode, cmp1);
1007 	}
1008       else if (!(p_info->const_range_p) (value + p_info->const_add))
1009 	{
1010 	  cmp1 = force_reg (mode, cmp1);
1011 	}
1012     }
1013   else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1014     {
1015       cmp1 = force_reg (mode, cmp1);
1016     }
1017 
1018   /* See if we need to invert the result.  */
1019   *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1020 	       ? p_info->invert_const
1021 	       : p_info->invert_reg);
1022 
1023   /* Comparison to constants, may involve adding 1 to change a LT into LE.
1024      Comparison between two registers, may involve switching operands.  */
1025   if (GET_CODE (cmp1) == CONST_INT)
1026     {
1027       if (p_info->const_add != 0)
1028 	cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1029 
1030     }
1031   else if (p_info->reverse_regs)
1032     {
1033       rtx temp = cmp0;
1034       cmp0 = cmp1;
1035       cmp1 = temp;
1036     }
1037 
1038   return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1039 }
1040 
1041 
1042 /* Generate the code to compare two float values.  The return value is
1043    the comparison expression. */
1044 
1045 static rtx
gen_float_relational(test_code,cmp0,cmp1)1046 gen_float_relational (test_code, cmp0, cmp1)
1047      enum rtx_code test_code;	/* relational test (EQ, etc) */
1048      rtx cmp0;			/* first operand to compare */
1049      rtx cmp1;			/* second operand to compare */
1050 {
1051   rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1052   rtx brtmp;
1053   int reverse_regs, invert;
1054 
1055   switch (test_code)
1056     {
1057     case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1058     case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1059     case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1060     case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1061     case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1062     case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1063     default:
1064       fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1065       reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1066     }
1067 
1068   if (reverse_regs)
1069     {
1070       rtx temp = cmp0;
1071       cmp0 = cmp1;
1072       cmp1 = temp;
1073     }
1074 
1075   brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1076   emit_insn (gen_fn (brtmp, cmp0, cmp1));
1077 
1078   return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1079 }
1080 
1081 
1082 void
xtensa_expand_conditional_branch(operands,test_code)1083 xtensa_expand_conditional_branch (operands, test_code)
1084      rtx *operands;
1085      enum rtx_code test_code;
1086 {
1087   enum cmp_type type = branch_type;
1088   rtx cmp0 = branch_cmp[0];
1089   rtx cmp1 = branch_cmp[1];
1090   rtx cmp;
1091   int invert;
1092   rtx label1, label2;
1093 
1094   switch (type)
1095     {
1096     case CMP_DF:
1097     default:
1098       fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1099 
1100     case CMP_SI:
1101       invert = FALSE;
1102       cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1103       break;
1104 
1105     case CMP_SF:
1106       if (!TARGET_HARD_FLOAT)
1107 	fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1108       invert = FALSE;
1109       cmp = gen_float_relational (test_code, cmp0, cmp1);
1110       break;
1111     }
1112 
1113   /* Generate the branch.  */
1114 
1115   label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1116   label2 = pc_rtx;
1117 
1118   if (invert)
1119     {
1120       label2 = label1;
1121       label1 = pc_rtx;
1122     }
1123 
1124   emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1125 			       gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1126 						     label1,
1127 						     label2)));
1128 }
1129 
1130 
1131 static rtx
gen_conditional_move(cmp)1132 gen_conditional_move (cmp)
1133      rtx cmp;
1134 {
1135   enum rtx_code code = GET_CODE (cmp);
1136   rtx op0 = branch_cmp[0];
1137   rtx op1 = branch_cmp[1];
1138 
1139   if (branch_type == CMP_SI)
1140     {
1141       /* Jump optimization calls get_condition() which canonicalizes
1142 	 comparisons like (GE x <const>) to (GT x <const-1>).
1143 	 Transform those comparisons back to GE, since that is the
1144 	 comparison supported in Xtensa.  We shouldn't have to
1145 	 transform <LE x const> comparisons, because neither
1146 	 xtensa_expand_conditional_branch() nor get_condition() will
1147 	 produce them. */
1148 
1149       if ((code == GT) && (op1 == constm1_rtx))
1150 	{
1151 	  code = GE;
1152 	  op1 = const0_rtx;
1153 	}
1154       cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1155 
1156       if (boolean_operator (cmp, VOIDmode))
1157 	{
1158 	  /* swap the operands to make const0 second */
1159 	  if (op0 == const0_rtx)
1160 	    {
1161 	      op0 = op1;
1162 	      op1 = const0_rtx;
1163 	    }
1164 
1165 	  /* if not comparing against zero, emit a comparison (subtract) */
1166 	  if (op1 != const0_rtx)
1167 	    {
1168 	      op0 = expand_binop (SImode, sub_optab, op0, op1,
1169 				  0, 0, OPTAB_LIB_WIDEN);
1170 	      op1 = const0_rtx;
1171 	    }
1172 	}
1173       else if (branch_operator (cmp, VOIDmode))
1174 	{
1175 	  /* swap the operands to make const0 second */
1176 	  if (op0 == const0_rtx)
1177 	    {
1178 	      op0 = op1;
1179 	      op1 = const0_rtx;
1180 
1181 	      switch (code)
1182 		{
1183 		case LT: code = GE; break;
1184 		case GE: code = LT; break;
1185 		default: abort ();
1186 		}
1187 	    }
1188 
1189 	  if (op1 != const0_rtx)
1190 	    return 0;
1191 	}
1192       else
1193 	return 0;
1194 
1195       return gen_rtx (code, VOIDmode, op0, op1);
1196     }
1197 
1198   if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1199     return gen_float_relational (code, op0, op1);
1200 
1201   return 0;
1202 }
1203 
1204 
1205 int
xtensa_expand_conditional_move(operands,isflt)1206 xtensa_expand_conditional_move (operands, isflt)
1207     rtx *operands;
1208     int isflt;
1209 {
1210   rtx cmp;
1211   rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1212 
1213   if (!(cmp = gen_conditional_move (operands[1])))
1214     return 0;
1215 
1216   if (isflt)
1217     gen_fn = (branch_type == CMP_SI
1218 	      ? gen_movsfcc_internal0
1219 	      : gen_movsfcc_internal1);
1220   else
1221     gen_fn = (branch_type == CMP_SI
1222 	      ? gen_movsicc_internal0
1223 	      : gen_movsicc_internal1);
1224 
1225   emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1226 		     operands[2], operands[3], cmp));
1227   return 1;
1228 }
1229 
1230 
1231 int
xtensa_expand_scc(operands)1232 xtensa_expand_scc (operands)
1233      rtx *operands;
1234 {
1235   rtx dest = operands[0];
1236   rtx cmp = operands[1];
1237   rtx one_tmp, zero_tmp;
1238   rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1239 
1240   if (!(cmp = gen_conditional_move (cmp)))
1241     return 0;
1242 
1243   one_tmp = gen_reg_rtx (SImode);
1244   zero_tmp = gen_reg_rtx (SImode);
1245   emit_insn (gen_movsi (one_tmp, const_true_rtx));
1246   emit_insn (gen_movsi (zero_tmp, const0_rtx));
1247 
1248   gen_fn = (branch_type == CMP_SI
1249 	    ? gen_movsicc_internal0
1250 	    : gen_movsicc_internal1);
1251   emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1252   return 1;
1253 }
1254 
1255 
1256 /* Emit insns to move operands[1] into operands[0].
1257 
1258    Return 1 if we have written out everything that needs to be done to
1259    do the move.  Otherwise, return 0 and the caller will emit the move
1260    normally.  */
1261 
1262 int
xtensa_emit_move_sequence(operands,mode)1263 xtensa_emit_move_sequence (operands, mode)
1264      rtx *operands;
1265      enum machine_mode mode;
1266 {
1267   if (CONSTANT_P (operands[1])
1268       && GET_CODE (operands[1]) != CONSTANT_P_RTX
1269       && (GET_CODE (operands[1]) != CONST_INT
1270 	  || !xtensa_simm12b (INTVAL (operands[1]))))
1271     {
1272       xtensa_load_constant (operands[0], operands[1]);
1273       return 1;
1274     }
1275 
1276   if (!(reload_in_progress | reload_completed))
1277     {
1278       if (!xtensa_valid_move (mode, operands))
1279 	operands[1] = force_reg (mode, operands[1]);
1280 
1281       operands[1] = xtensa_copy_incoming_a7 (operands[1]);
1282     }
1283 
1284   /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1285      instruction won't be recognized after reload. So we remove the
1286      subreg and adjust mem accordingly. */
1287   if (reload_in_progress)
1288     {
1289       operands[0] = fixup_subreg_mem (operands[0]);
1290       operands[1] = fixup_subreg_mem (operands[1]);
1291     }
1292   return 0;
1293 }
1294 
1295 static rtx
fixup_subreg_mem(x)1296 fixup_subreg_mem (x)
1297      rtx x;
1298 {
1299   if (GET_CODE (x) == SUBREG
1300       && GET_CODE (SUBREG_REG (x)) == REG
1301       && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1302     {
1303       rtx temp =
1304 	gen_rtx_SUBREG (GET_MODE (x),
1305 			reg_equiv_mem [REGNO (SUBREG_REG (x))],
1306 			SUBREG_BYTE (x));
1307       x = alter_subreg (&temp);
1308     }
1309   return x;
1310 }
1311 
1312 
1313 /* Check if an incoming argument in a7 is expected to be used soon and
1314    if OPND is a register or register pair that includes a7.  If so,
1315    create a new pseudo and copy a7 into that pseudo at the very
1316    beginning of the function, followed by the special "set_frame_ptr"
1317    unspec_volatile insn.  The return value is either the original
1318    operand, if it is not a7, or the new pseudo containing a copy of
1319    the incoming argument.  This is necessary because the register
1320    allocator will ignore conflicts with a7 and may either assign some
1321    other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
1322    the incoming argument in a7.  By copying the argument out of a7 as
1323    the very first thing, and then immediately following that with an
1324    unspec_volatile to keep the scheduler away, we should avoid any
1325    problems.  Putting the set_frame_ptr insn at the beginning, with
1326    only the a7 copy before it, also makes it easier for the prologue
1327    expander to initialize the frame pointer after the a7 copy and to
1328    fix up the a7 copy to use the stack pointer instead of the frame
1329    pointer.  */
1330 
1331 rtx
xtensa_copy_incoming_a7(opnd)1332 xtensa_copy_incoming_a7 (opnd)
1333      rtx opnd;
1334 {
1335   rtx entry_insns = 0;
1336   rtx reg, tmp;
1337   enum machine_mode mode;
1338 
1339   if (!cfun->machine->need_a7_copy)
1340     return opnd;
1341 
1342   /* This function should never be called again once a7 has been copied.  */
1343   if (cfun->machine->set_frame_ptr_insn)
1344     abort ();
1345 
1346   mode = GET_MODE (opnd);
1347 
1348   /* The operand using a7 may come in a later instruction, so just return
1349      the original operand if it doesn't use a7.  */
1350   reg = opnd;
1351   if (GET_CODE (reg) == SUBREG)
1352     {
1353       if (SUBREG_BYTE (reg) != 0)
1354 	abort ();
1355       reg = SUBREG_REG (reg);
1356     }
1357   if (GET_CODE (reg) != REG
1358       || REGNO (reg) > A7_REG
1359       || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
1360     return opnd;
1361 
1362   /* 1-word args will always be in a7; 2-word args in a6/a7.  */
1363   if (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 != A7_REG)
1364     abort ();
1365 
1366   cfun->machine->need_a7_copy = false;
1367 
1368   /* Copy a7 to a new pseudo at the function entry.  Use gen_raw_REG to
1369      create the REG for a7 so that hard_frame_pointer_rtx is not used.  */
1370 
1371   push_to_sequence (entry_insns);
1372   tmp = gen_reg_rtx (mode);
1373 
1374   switch (mode)
1375     {
1376     case DFmode:
1377     case DImode:
1378       emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1379 				     gen_rtx_REG (SImode, A7_REG - 1)));
1380       emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1381 				     gen_raw_REG (SImode, A7_REG)));
1382       break;
1383     case SFmode:
1384       emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1385       break;
1386     case SImode:
1387       emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1388       break;
1389     case HImode:
1390       emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1391       break;
1392     case QImode:
1393       emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1394       break;
1395     default:
1396       abort ();
1397     }
1398 
1399   cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1400   entry_insns = get_insns ();
1401   end_sequence ();
1402 
1403   if (cfun->machine->vararg_a7)
1404     {
1405       /* This is called from within builtin_savereg, so we're already
1406 	 inside a start_sequence that will be placed at the start of
1407 	 the function.  */
1408       emit_insn (entry_insns);
1409     }
1410   else
1411     {
1412       /* Put entry_insns after the NOTE that starts the function.  If
1413 	 this is inside a start_sequence, make the outer-level insn
1414 	 chain current, so the code is placed at the start of the
1415 	 function.  */
1416       push_topmost_sequence ();
1417       emit_insn_after (entry_insns, get_insns ());
1418       pop_topmost_sequence ();
1419     }
1420 
1421   return tmp;
1422 }
1423 
1424 
1425 /* Try to expand a block move operation to an RTL block move instruction.
1426    If not optimizing or if the block size is not a constant or if the
1427    block is small, the expansion fails and GCC falls back to calling
1428    memcpy().
1429 
1430    operands[0] is the destination
1431    operands[1] is the source
1432    operands[2] is the length
1433    operands[3] is the alignment */
1434 
1435 int
xtensa_expand_block_move(operands)1436 xtensa_expand_block_move (operands)
1437      rtx *operands;
1438 {
1439   rtx dest = operands[0];
1440   rtx src = operands[1];
1441   int bytes = INTVAL (operands[2]);
1442   int align = XINT (operands[3], 0);
1443   int num_pieces, move_ratio;
1444 
1445   /* If this is not a fixed size move, just call memcpy */
1446   if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1447     return 0;
1448 
1449   /* Anything to move? */
1450   if (bytes <= 0)
1451     return 1;
1452 
1453   if (align > MOVE_MAX)
1454     align = MOVE_MAX;
1455 
1456   /* decide whether to expand inline based on the optimization level */
1457   move_ratio = 4;
1458   if (optimize > 2)
1459     move_ratio = LARGEST_MOVE_RATIO;
1460   num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1461   if (num_pieces >= move_ratio)
1462     return 0;
1463 
1464   /* make sure the memory addresses are valid */
1465   operands[0] = validize_mem (dest);
1466   operands[1] = validize_mem (src);
1467 
1468   emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1469 				    operands[2], operands[3]));
1470   return 1;
1471 }
1472 
1473 
1474 /*  Emit a sequence of instructions to implement a block move, trying
1475     to hide load delay slots as much as possible.  Load N values into
1476     temporary registers, store those N values, and repeat until the
1477     complete block has been moved.  N=delay_slots+1 */
1478 
1479 struct meminsnbuf {
1480   char template[30];
1481   rtx operands[2];
1482 };
1483 
1484 void
xtensa_emit_block_move(operands,tmpregs,delay_slots)1485 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1486      rtx *operands;
1487      rtx *tmpregs;
1488      int delay_slots;
1489 {
1490   rtx dest = operands[0];
1491   rtx src = operands[1];
1492   int bytes = INTVAL (operands[2]);
1493   int align = XINT (operands[3], 0);
1494   rtx from_addr = XEXP (src, 0);
1495   rtx to_addr = XEXP (dest, 0);
1496   int from_struct = MEM_IN_STRUCT_P (src);
1497   int to_struct = MEM_IN_STRUCT_P (dest);
1498   int offset = 0;
1499   int chunk_size, item_size;
1500   struct meminsnbuf *ldinsns, *stinsns;
1501   const char *ldname, *stname;
1502   enum machine_mode mode;
1503 
1504   if (align > MOVE_MAX)
1505     align = MOVE_MAX;
1506   item_size = align;
1507   chunk_size = delay_slots + 1;
1508 
1509   ldinsns = (struct meminsnbuf *)
1510     alloca (chunk_size * sizeof (struct meminsnbuf));
1511   stinsns = (struct meminsnbuf *)
1512     alloca (chunk_size * sizeof (struct meminsnbuf));
1513 
1514   mode = xtensa_find_mode_for_size (item_size);
1515   item_size = GET_MODE_SIZE (mode);
1516   ldname = xtensa_ld_opcodes[(int) mode];
1517   stname = xtensa_st_opcodes[(int) mode];
1518 
1519   while (bytes > 0)
1520     {
1521       int n;
1522 
1523       for (n = 0; n < chunk_size; n++)
1524 	{
1525 	  rtx addr, mem;
1526 
1527 	  if (bytes == 0)
1528 	    {
1529 	      chunk_size = n;
1530 	      break;
1531 	    }
1532 
1533 	  if (bytes < item_size)
1534 	    {
1535 	      /* find a smaller item_size which we can load & store */
1536 	      item_size = bytes;
1537 	      mode = xtensa_find_mode_for_size (item_size);
1538 	      item_size = GET_MODE_SIZE (mode);
1539 	      ldname = xtensa_ld_opcodes[(int) mode];
1540 	      stname = xtensa_st_opcodes[(int) mode];
1541 	    }
1542 
1543 	  /* record the load instruction opcode and operands */
1544 	  addr = plus_constant (from_addr, offset);
1545 	  mem = gen_rtx_MEM (mode, addr);
1546 	  if (! memory_address_p (mode, addr))
1547 	    abort ();
1548 	  MEM_IN_STRUCT_P (mem) = from_struct;
1549 	  ldinsns[n].operands[0] = tmpregs[n];
1550 	  ldinsns[n].operands[1] = mem;
1551 	  sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1552 
1553 	  /* record the store instruction opcode and operands */
1554 	  addr = plus_constant (to_addr, offset);
1555 	  mem = gen_rtx_MEM (mode, addr);
1556 	  if (! memory_address_p (mode, addr))
1557 	    abort ();
1558 	  MEM_IN_STRUCT_P (mem) = to_struct;
1559 	  stinsns[n].operands[0] = tmpregs[n];
1560 	  stinsns[n].operands[1] = mem;
1561 	  sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1562 
1563 	  offset += item_size;
1564 	  bytes -= item_size;
1565 	}
1566 
1567       /* now output the loads followed by the stores */
1568       for (n = 0; n < chunk_size; n++)
1569 	output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1570       for (n = 0; n < chunk_size; n++)
1571 	output_asm_insn (stinsns[n].template, stinsns[n].operands);
1572     }
1573 }
1574 
1575 
1576 static enum machine_mode
xtensa_find_mode_for_size(item_size)1577 xtensa_find_mode_for_size (item_size)
1578      unsigned item_size;
1579 {
1580   enum machine_mode mode, tmode;
1581 
1582   while (1)
1583     {
1584       mode = VOIDmode;
1585 
1586       /* find mode closest to but not bigger than item_size */
1587       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1588 	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1589 	if (GET_MODE_SIZE (tmode) <= item_size)
1590 	  mode = tmode;
1591       if (mode == VOIDmode)
1592 	abort ();
1593 
1594       item_size = GET_MODE_SIZE (mode);
1595 
1596       if (xtensa_ld_opcodes[(int) mode]
1597 	  && xtensa_st_opcodes[(int) mode])
1598 	break;
1599 
1600       /* cannot load & store this mode; try something smaller */
1601       item_size -= 1;
1602     }
1603 
1604   return mode;
1605 }
1606 
1607 
1608 void
xtensa_expand_nonlocal_goto(operands)1609 xtensa_expand_nonlocal_goto (operands)
1610      rtx *operands;
1611 {
1612   rtx goto_handler = operands[1];
1613   rtx containing_fp = operands[3];
1614 
1615   /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1616      is too big to generate in-line */
1617 
1618   if (GET_CODE (containing_fp) != REG)
1619     containing_fp = force_reg (Pmode, containing_fp);
1620 
1621   goto_handler = replace_rtx (copy_rtx (goto_handler),
1622 			      virtual_stack_vars_rtx,
1623 			      containing_fp);
1624 
1625   emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1626 		     0, VOIDmode, 2,
1627 		     containing_fp, Pmode,
1628 		     goto_handler, Pmode);
1629 }
1630 
1631 
1632 static struct machine_function *
xtensa_init_machine_status()1633 xtensa_init_machine_status ()
1634 {
1635   return ggc_alloc_cleared (sizeof (struct machine_function));
1636 }
1637 
1638 
1639 void
xtensa_setup_frame_addresses()1640 xtensa_setup_frame_addresses ()
1641 {
1642   /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1643   cfun->machine->accesses_prev_frame = 1;
1644 
1645   emit_library_call
1646     (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1647      0, VOIDmode, 0);
1648 }
1649 
1650 
1651 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1652    a comment showing where the end of the loop is. However, if there is a
1653    label or a branch at the end of the loop then we need to place a nop
1654    there. If the loop ends with a label we need the nop so that branches
1655    targetting that label will target the nop (and thus remain in the loop),
1656    instead of targetting the instruction after the loop (and thus exiting
1657    the loop). If the loop ends with a branch, we need the nop in case the
1658    branch is targetting a location inside the loop. When the branch
1659    executes it will cause the loop count to be decremented even if it is
1660    taken (because it is the last instruction in the loop), so we need to
1661    nop after the branch to prevent the loop count from being decremented
1662    when the branch is taken. */
1663 
1664 void
xtensa_emit_loop_end(insn,operands)1665 xtensa_emit_loop_end (insn, operands)
1666      rtx insn;
1667      rtx *operands;
1668 {
1669   char done = 0;
1670 
1671   for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1672     {
1673       switch (GET_CODE (insn))
1674 	{
1675 	case NOTE:
1676 	case BARRIER:
1677 	  break;
1678 
1679 	case CODE_LABEL:
1680 	  output_asm_insn ("nop.n", operands);
1681 	  done = 1;
1682 	  break;
1683 
1684 	default:
1685 	  {
1686 	    rtx body = PATTERN (insn);
1687 
1688 	    if (GET_CODE (body) == JUMP_INSN)
1689 	      {
1690 		output_asm_insn ("nop.n", operands);
1691 		done = 1;
1692 	      }
1693 	    else if ((GET_CODE (body) != USE)
1694 		     && (GET_CODE (body) != CLOBBER))
1695 	      done = 1;
1696 	  }
1697 	  break;
1698         }
1699     }
1700 
1701   output_asm_insn ("# loop end for %0", operands);
1702 }
1703 
1704 
1705 char *
xtensa_emit_call(callop,operands)1706 xtensa_emit_call (callop, operands)
1707      int callop;
1708      rtx *operands;
1709 {
1710   static char result[64];
1711   rtx tgt = operands[callop];
1712 
1713   if (GET_CODE (tgt) == CONST_INT)
1714     sprintf (result, "call8\t0x%x", INTVAL (tgt));
1715   else if (register_operand (tgt, VOIDmode))
1716     sprintf (result, "callx8\t%%%d", callop);
1717   else
1718     sprintf (result, "call8\t%%%d", callop);
1719 
1720   return result;
1721 }
1722 
1723 
1724 /* Return the stabs register number to use for 'regno'. */
1725 
1726 int
xtensa_dbx_register_number(regno)1727 xtensa_dbx_register_number (regno)
1728      int regno;
1729 {
1730   int first = -1;
1731 
1732   if (GP_REG_P (regno)) {
1733     regno -= GP_REG_FIRST;
1734     first = 0;
1735   }
1736   else if (BR_REG_P (regno)) {
1737     regno -= BR_REG_FIRST;
1738     first = 16;
1739   }
1740   else if (FP_REG_P (regno)) {
1741     regno -= FP_REG_FIRST;
1742     /* The current numbering convention is that TIE registers are
1743        numbered in libcc order beginning with 256.  We can't guarantee
1744        that the FP registers will come first, so the following is just
1745        a guess.  It seems like we should make a special case for FP
1746        registers and give them fixed numbers < 256. */
1747     first = 256;
1748   }
1749   else if (ACC_REG_P (regno))
1750     {
1751       first = 0;
1752       regno = -1;
1753     }
1754 
1755   /* When optimizing, we sometimes get asked about pseudo-registers
1756      that don't represent hard registers. Return 0 for these. */
1757   if (first == -1)
1758     return 0;
1759 
1760   return first + regno;
1761 }
1762 
1763 
1764 /* Argument support functions.  */
1765 
1766 /* Initialize CUMULATIVE_ARGS for a function.  */
1767 
1768 void
init_cumulative_args(cum,incoming)1769 init_cumulative_args (cum, incoming)
1770      CUMULATIVE_ARGS *cum;	/* argument info to initialize */
1771      int incoming;
1772 {
1773   cum->arg_words = 0;
1774   cum->incoming = incoming;
1775 }
1776 
1777 /* Advance the argument to the next argument position.  */
1778 
1779 void
function_arg_advance(cum,mode,type)1780 function_arg_advance (cum, mode, type)
1781      CUMULATIVE_ARGS *cum;	/* current arg information */
1782      enum machine_mode mode;	/* current arg mode */
1783      tree type;			/* type of the argument or 0 if lib support */
1784 {
1785   int words, max;
1786   int *arg_words;
1787 
1788   arg_words = &cum->arg_words;
1789   max = MAX_ARGS_IN_REGISTERS;
1790 
1791   words = (((mode != BLKmode)
1792 	    ? (int) GET_MODE_SIZE (mode)
1793 	    : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1794 
1795   if ((*arg_words + words > max) && (*arg_words < max))
1796     *arg_words = max;
1797 
1798   *arg_words += words;
1799 }
1800 
1801 
1802 /* Return an RTL expression containing the register for the given mode,
1803    or 0 if the argument is to be passed on the stack.  */
1804 
1805 rtx
function_arg(cum,mode,type,incoming_p)1806 function_arg (cum, mode, type, incoming_p)
1807      CUMULATIVE_ARGS *cum;	/* current arg information */
1808      enum machine_mode mode;	/* current arg mode */
1809      tree type;			/* type of the argument or 0 if lib support */
1810      int incoming_p;		/* computing the incoming registers? */
1811 {
1812   int regbase, words, max;
1813   int *arg_words;
1814   int regno;
1815 
1816   arg_words = &cum->arg_words;
1817   regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1818   max = MAX_ARGS_IN_REGISTERS;
1819 
1820   words = (((mode != BLKmode)
1821 	    ? (int) GET_MODE_SIZE (mode)
1822 	    : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1823 
1824   if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1825     *arg_words += (*arg_words & 1);
1826 
1827   if (*arg_words + words > max)
1828     return (rtx)0;
1829 
1830   regno = regbase + *arg_words;
1831 
1832   if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
1833     cfun->machine->need_a7_copy = true;
1834 
1835   return gen_rtx_REG (mode, regno);
1836 }
1837 
1838 
1839 void
override_options()1840 override_options ()
1841 {
1842   int regno;
1843   enum machine_mode mode;
1844 
1845   if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1846     error ("boolean registers required for the floating-point option");
1847 
1848   /* set up the tables of ld/st opcode names for block moves */
1849   xtensa_ld_opcodes[(int) SImode] = "l32i";
1850   xtensa_ld_opcodes[(int) HImode] = "l16ui";
1851   xtensa_ld_opcodes[(int) QImode] = "l8ui";
1852   xtensa_st_opcodes[(int) SImode] = "s32i";
1853   xtensa_st_opcodes[(int) HImode] = "s16i";
1854   xtensa_st_opcodes[(int) QImode] = "s8i";
1855 
1856   xtensa_char_to_class['q'] = SP_REG;
1857   xtensa_char_to_class['a'] = GR_REGS;
1858   xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1859   xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1860   xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1861   xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1862   xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1863   xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1864   xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1865 
1866   /* Set up array giving whether a given register can hold a given mode. */
1867   for (mode = VOIDmode;
1868        mode != MAX_MACHINE_MODE;
1869        mode = (enum machine_mode) ((int) mode + 1))
1870     {
1871       int size = GET_MODE_SIZE (mode);
1872       enum mode_class class = GET_MODE_CLASS (mode);
1873 
1874       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1875 	{
1876 	  int temp;
1877 
1878 	  if (ACC_REG_P (regno))
1879 	    temp = (TARGET_MAC16 &&
1880 		    (class == MODE_INT) && (size <= UNITS_PER_WORD));
1881 	  else if (GP_REG_P (regno))
1882 	    temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1883 	  else if (FP_REG_P (regno))
1884 	    temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1885 	  else if (BR_REG_P (regno))
1886 	    temp = (TARGET_BOOLEANS && (mode == CCmode));
1887 	  else
1888 	    temp = FALSE;
1889 
1890 	  xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1891 	}
1892     }
1893 
1894   init_machine_status = xtensa_init_machine_status;
1895 
1896   /* Check PIC settings.  There's no need for -fPIC on Xtensa and
1897      some targets need to always use PIC.  */
1898   if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
1899     flag_pic = 1;
1900 }
1901 
1902 
1903 /* A C compound statement to output to stdio stream STREAM the
1904    assembler syntax for an instruction operand X.  X is an RTL
1905    expression.
1906 
1907    CODE is a value that can be used to specify one of several ways
1908    of printing the operand.  It is used when identical operands
1909    must be printed differently depending on the context.  CODE
1910    comes from the '%' specification that was used to request
1911    printing of the operand.  If the specification was just '%DIGIT'
1912    then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1913    is the ASCII code for LTR.
1914 
1915    If X is a register, this macro should print the register's name.
1916    The names can be found in an array 'reg_names' whose type is
1917    'char *[]'.  'reg_names' is initialized from 'REGISTER_NAMES'.
1918 
1919    When the machine description has a specification '%PUNCT' (a '%'
1920    followed by a punctuation character), this macro is called with
1921    a null pointer for X and the punctuation character for CODE.
1922 
1923    'a', 'c', 'l', and 'n' are reserved.
1924 
1925    The Xtensa specific codes are:
1926 
1927    'd'  CONST_INT, print as signed decimal
1928    'x'  CONST_INT, print as signed hexadecimal
1929    'K'  CONST_INT, print number of bits in mask for EXTUI
1930    'R'  CONST_INT, print (X & 0x1f)
1931    'L'  CONST_INT, print ((32 - X) & 0x1f)
1932    'D'  REG, print second register of double-word register operand
1933    'N'  MEM, print address of next word following a memory operand
1934    'v'  MEM, if memory reference is volatile, output a MEMW before it
1935 */
1936 
1937 static void
printx(file,val)1938 printx (file, val)
1939      FILE *file;
1940      signed int val;
1941 {
1942   /* print a hexadecimal value in a nice way */
1943   if ((val > -0xa) && (val < 0xa))
1944     fprintf (file, "%d", val);
1945   else if (val < 0)
1946     fprintf (file, "-0x%x", -val);
1947   else
1948     fprintf (file, "0x%x", val);
1949 }
1950 
1951 
1952 void
print_operand(file,op,letter)1953 print_operand (file, op, letter)
1954      FILE *file;		/* file to write to */
1955      rtx op;		/* operand to print */
1956      int letter;		/* %<letter> or 0 */
1957 {
1958   enum rtx_code code;
1959 
1960   if (! op)
1961     error ("PRINT_OPERAND null pointer");
1962 
1963   code = GET_CODE (op);
1964   switch (code)
1965     {
1966     case REG:
1967     case SUBREG:
1968       {
1969 	int regnum = xt_true_regnum (op);
1970 	if (letter == 'D')
1971 	  regnum++;
1972 	fprintf (file, "%s", reg_names[regnum]);
1973 	break;
1974       }
1975 
1976     case MEM:
1977       /* For a volatile memory reference, emit a MEMW before the
1978 	 load or store.  */
1979  	if (letter == 'v')
1980 	  {
1981 	    if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1982 	      fprintf (file, "memw\n\t");
1983 	    break;
1984 	  }
1985  	else if (letter == 'N')
1986 	  {
1987 	    enum machine_mode mode;
1988 	    switch (GET_MODE (op))
1989 	      {
1990 	      case DFmode: mode = SFmode; break;
1991 	      case DImode: mode = SImode; break;
1992 	      default: abort ();
1993 	      }
1994 	    op = adjust_address (op, mode, 4);
1995 	  }
1996 
1997 	output_address (XEXP (op, 0));
1998 	break;
1999 
2000     case CONST_INT:
2001       switch (letter)
2002 	{
2003 	case 'K':
2004 	  {
2005 	    int num_bits = 0;
2006 	    unsigned val = INTVAL (op);
2007 	    while (val & 1)
2008 	      {
2009 		num_bits += 1;
2010 		val = val >> 1;
2011 	      }
2012 	    if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2013 	      fatal_insn ("invalid mask", op);
2014 
2015 	    fprintf (file, "%d", num_bits);
2016 	    break;
2017 	  }
2018 
2019 	case 'L':
2020 	  fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
2021 	  break;
2022 
2023 	case 'R':
2024 	  fprintf (file, "%d", INTVAL (op) & 0x1f);
2025 	  break;
2026 
2027 	case 'x':
2028 	  printx (file, INTVAL (op));
2029 	  break;
2030 
2031 	case 'd':
2032 	default:
2033 	  fprintf (file, "%d", INTVAL (op));
2034 	  break;
2035 
2036 	}
2037       break;
2038 
2039     default:
2040       output_addr_const (file, op);
2041     }
2042 }
2043 
2044 
2045 /* A C compound statement to output to stdio stream STREAM the
2046    assembler syntax for an instruction operand that is a memory
2047    reference whose address is ADDR.  ADDR is an RTL expression.  */
2048 
2049 void
print_operand_address(file,addr)2050 print_operand_address (file, addr)
2051      FILE *file;
2052      rtx addr;
2053 {
2054   if (!addr)
2055     error ("PRINT_OPERAND_ADDRESS, null pointer");
2056 
2057   switch (GET_CODE (addr))
2058     {
2059     default:
2060       fatal_insn ("invalid address", addr);
2061       break;
2062 
2063     case REG:
2064       fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2065       break;
2066 
2067     case PLUS:
2068       {
2069 	rtx reg = (rtx)0;
2070 	rtx offset = (rtx)0;
2071 	rtx arg0 = XEXP (addr, 0);
2072 	rtx arg1 = XEXP (addr, 1);
2073 
2074 	if (GET_CODE (arg0) == REG)
2075 	  {
2076 	    reg = arg0;
2077 	    offset = arg1;
2078 	  }
2079 	else if (GET_CODE (arg1) == REG)
2080 	  {
2081 	    reg = arg1;
2082 	    offset = arg0;
2083 	  }
2084 	else
2085 	  fatal_insn ("no register in address", addr);
2086 
2087 	if (CONSTANT_P (offset))
2088 	  {
2089 	    fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2090 	    output_addr_const (file, offset);
2091 	  }
2092 	else
2093 	  fatal_insn ("address offset not a constant", addr);
2094       }
2095       break;
2096 
2097     case LABEL_REF:
2098     case SYMBOL_REF:
2099     case CONST_INT:
2100     case CONST:
2101       output_addr_const (file, addr);
2102       break;
2103     }
2104 }
2105 
2106 
2107 void
xtensa_output_literal(file,x,mode,labelno)2108 xtensa_output_literal (file, x, mode, labelno)
2109      FILE *file;
2110      rtx x;
2111      enum machine_mode mode;
2112      int labelno;
2113 {
2114   long value_long[2];
2115   REAL_VALUE_TYPE r;
2116   int size;
2117 
2118   fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2119 
2120   switch (GET_MODE_CLASS (mode))
2121     {
2122     case MODE_FLOAT:
2123       if (GET_CODE (x) != CONST_DOUBLE)
2124 	abort ();
2125 
2126       REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2127       switch (mode)
2128 	{
2129 	case SFmode:
2130 	  REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2131 	  fprintf (file, "0x%08lx\n", value_long[0]);
2132 	  break;
2133 
2134 	case DFmode:
2135 	  REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2136 	  fprintf (file, "0x%08lx, 0x%08lx\n",
2137 		   value_long[0], value_long[1]);
2138 	  break;
2139 
2140 	default:
2141 	  abort ();
2142 	}
2143 
2144       break;
2145 
2146     case MODE_INT:
2147     case MODE_PARTIAL_INT:
2148       size = GET_MODE_SIZE (mode);
2149       if (size == 4)
2150 	{
2151 	  output_addr_const (file, x);
2152 	  fputs ("\n", file);
2153 	}
2154       else if (size == 8)
2155 	{
2156 	  output_addr_const (file, operand_subword (x, 0, 0, DImode));
2157 	  fputs (", ", file);
2158 	  output_addr_const (file, operand_subword (x, 1, 0, DImode));
2159 	  fputs ("\n", file);
2160 	}
2161       else
2162 	abort ();
2163       break;
2164 
2165     default:
2166       abort ();
2167     }
2168 }
2169 
2170 
2171 /* Return the bytes needed to compute the frame pointer from the current
2172    stack pointer. */
2173 
2174 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2175 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2176 
2177 long
compute_frame_size(size)2178 compute_frame_size (size)
2179      int size;			/* # of var. bytes allocated */
2180 {
2181   /* add space for the incoming static chain value */
2182   if (current_function_needs_context)
2183     size += (1 * UNITS_PER_WORD);
2184 
2185   xtensa_current_frame_size =
2186     XTENSA_STACK_ALIGN (size
2187 			+ current_function_outgoing_args_size
2188 			+ (WINDOW_SIZE * UNITS_PER_WORD));
2189   return xtensa_current_frame_size;
2190 }
2191 
2192 
2193 int
xtensa_frame_pointer_required()2194 xtensa_frame_pointer_required ()
2195 {
2196   /* The code to expand builtin_frame_addr and builtin_return_addr
2197      currently uses the hard_frame_pointer instead of frame_pointer.
2198      This seems wrong but maybe it's necessary for other architectures.
2199      This function is derived from the i386 code. */
2200 
2201   if (cfun->machine->accesses_prev_frame)
2202     return 1;
2203 
2204   return 0;
2205 }
2206 
2207 
2208 void
xtensa_reorg(first)2209 xtensa_reorg (first)
2210     rtx first;
2211 {
2212   unsigned long tsize = compute_frame_size (get_frame_size ());
2213   if (tsize < (1 << (12+3)))
2214     frame_size_const = 0;
2215   else
2216     {
2217       frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2218 
2219       /* make sure the constant is used so it doesn't get eliminated
2220 	 from the constant pool */
2221       emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2222     }
2223 
2224   if (!frame_pointer_needed)
2225     return;
2226 
2227   if (cfun->machine->set_frame_ptr_insn)
2228     {
2229       rtx insn;
2230 
2231       /* for all instructions prior to set_frame_ptr_insn, replace
2232 	 hard_frame_pointer references with stack_pointer */
2233       for (insn = first;
2234 	   insn != cfun->machine->set_frame_ptr_insn;
2235 	   insn = NEXT_INSN (insn))
2236 	{
2237 	  if (INSN_P (insn))
2238 	    PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2239 					  hard_frame_pointer_rtx,
2240 					  stack_pointer_rtx);
2241 	}
2242     }
2243   else
2244     {
2245       /* emit the frame pointer move immediately after the NOTE that starts
2246 	 the function */
2247       emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2248 				  stack_pointer_rtx), first);
2249     }
2250 }
2251 
2252 
2253 /* Set up the stack and frame (if desired) for the function.  */
2254 
2255 void
xtensa_function_prologue(file,size)2256 xtensa_function_prologue (file, size)
2257      FILE *file;
2258      int size ATTRIBUTE_UNUSED;
2259 {
2260   unsigned long tsize = compute_frame_size (get_frame_size ());
2261 
2262   if (frame_pointer_needed)
2263     fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2264   else
2265     fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2266 
2267 
2268   if (tsize < (1 << (12+3)))
2269     {
2270       fprintf (file, "\tentry\tsp, %ld\n", tsize);
2271     }
2272   else
2273     {
2274       fprintf (file, "\tentry\tsp, 16\n");
2275 
2276       /* use a8 as a temporary since a0-a7 may be live */
2277       fprintf (file, "\tl32r\ta8, ");
2278       print_operand (file, frame_size_const, 0);
2279       fprintf (file, "\n\tsub\ta8, sp, a8\n");
2280       fprintf (file, "\tmovsp\tsp, a8\n");
2281     }
2282 }
2283 
2284 
2285 /* Do any necessary cleanup after a function to restore
2286    stack, frame, and regs. */
2287 
2288 void
xtensa_function_epilogue(file,size)2289 xtensa_function_epilogue (file, size)
2290      FILE *file;
2291      int size ATTRIBUTE_UNUSED;
2292 {
2293   rtx insn = get_last_insn ();
2294   /* If the last insn was a BARRIER, we don't have to write anything. */
2295   if (GET_CODE (insn) == NOTE)
2296     insn = prev_nonnote_insn (insn);
2297   if (insn == 0 || GET_CODE (insn) != BARRIER)
2298     fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2299 
2300   xtensa_current_frame_size = 0;
2301 }
2302 
2303 
2304 rtx
xtensa_return_addr(count,frame)2305 xtensa_return_addr (count, frame)
2306      int count;
2307      rtx frame;
2308 {
2309   rtx result, retaddr;
2310 
2311   if (count == -1)
2312     retaddr = gen_rtx_REG (Pmode, 0);
2313   else
2314     {
2315       rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2316       addr = memory_address (Pmode, addr);
2317       retaddr = gen_reg_rtx (Pmode);
2318       emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2319     }
2320 
2321   /* The 2 most-significant bits of the return address on Xtensa hold
2322      the register window size.  To get the real return address, these
2323      bits must be replaced with the high bits from the current PC.  */
2324 
2325   result = gen_reg_rtx (Pmode);
2326   emit_insn (gen_fix_return_addr (result, retaddr));
2327   return result;
2328 }
2329 
2330 
2331 /* Create the va_list data type.
2332    This structure is set up by __builtin_saveregs.  The __va_reg
2333    field points to a stack-allocated region holding the contents of the
2334    incoming argument registers.  The __va_ndx field is an index initialized
2335    to the position of the first unnamed (variable) argument.  This same index
2336    is also used to address the arguments passed in memory.  Thus, the
2337    __va_stk field is initialized to point to the position of the first
2338    argument in memory offset to account for the arguments passed in
2339    registers.  E.G., if there are 6 argument registers, and each register is
2340    4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2341    references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2342    argument word N for N >= 6. */
2343 
2344 tree
xtensa_build_va_list()2345 xtensa_build_va_list ()
2346 {
2347   tree f_stk, f_reg, f_ndx, record, type_decl;
2348 
2349   record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2350   type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2351 
2352   f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2353 		      ptr_type_node);
2354   f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2355 		      ptr_type_node);
2356   f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2357 		      integer_type_node);
2358 
2359   DECL_FIELD_CONTEXT (f_stk) = record;
2360   DECL_FIELD_CONTEXT (f_reg) = record;
2361   DECL_FIELD_CONTEXT (f_ndx) = record;
2362 
2363   TREE_CHAIN (record) = type_decl;
2364   TYPE_NAME (record) = type_decl;
2365   TYPE_FIELDS (record) = f_stk;
2366   TREE_CHAIN (f_stk) = f_reg;
2367   TREE_CHAIN (f_reg) = f_ndx;
2368 
2369   layout_type (record);
2370   return record;
2371 }
2372 
2373 
2374 /* Save the incoming argument registers on the stack.  Returns the
2375    address of the saved registers. */
2376 
2377 rtx
xtensa_builtin_saveregs()2378 xtensa_builtin_saveregs ()
2379 {
2380   rtx gp_regs, dest;
2381   int arg_words = current_function_arg_words;
2382   int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2383 
2384   if (gp_left <= 0)
2385     return const0_rtx;
2386 
2387   /* allocate the general-purpose register space */
2388   gp_regs = assign_stack_local
2389     (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2390   set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2391 
2392   /* Now store the incoming registers.  */
2393   dest = change_address (gp_regs, SImode,
2394 			 plus_constant (XEXP (gp_regs, 0),
2395 					arg_words * UNITS_PER_WORD));
2396 
2397   cfun->machine->need_a7_copy = true;
2398   cfun->machine->vararg_a7 = true;
2399   move_block_from_reg (GP_ARG_FIRST + arg_words, dest, gp_left,
2400 		       gp_left * UNITS_PER_WORD);
2401 
2402   return XEXP (gp_regs, 0);
2403 }
2404 
2405 
2406 /* Implement `va_start' for varargs and stdarg.  We look at the
2407    current function to fill in an initial va_list. */
2408 
2409 void
xtensa_va_start(valist,nextarg)2410 xtensa_va_start (valist, nextarg)
2411      tree valist;
2412      rtx nextarg ATTRIBUTE_UNUSED;
2413 {
2414   tree f_stk, stk;
2415   tree f_reg, reg;
2416   tree f_ndx, ndx;
2417   tree t, u;
2418   int arg_words;
2419 
2420   arg_words = current_function_args_info.arg_words;
2421 
2422   f_stk = TYPE_FIELDS (va_list_type_node);
2423   f_reg = TREE_CHAIN (f_stk);
2424   f_ndx = TREE_CHAIN (f_reg);
2425 
2426   stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2427   reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2428   ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2429 
2430   /* Call __builtin_saveregs; save the result in __va_reg */
2431   current_function_arg_words = arg_words;
2432   u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2433   t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2434   TREE_SIDE_EFFECTS (t) = 1;
2435   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2436 
2437   /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2438   u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2439   u = fold (build (PLUS_EXPR, ptr_type_node, u,
2440 		   build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2441   t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2442   TREE_SIDE_EFFECTS (t) = 1;
2443   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2444 
2445   /* Set the __va_ndx member. */
2446   u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2447   t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2448   TREE_SIDE_EFFECTS (t) = 1;
2449   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2450 }
2451 
2452 
2453 /* Implement `va_arg'.  */
2454 
2455 rtx
xtensa_va_arg(valist,type)2456 xtensa_va_arg (valist, type)
2457      tree valist, type;
2458 {
2459   tree f_stk, stk;
2460   tree f_reg, reg;
2461   tree f_ndx, ndx;
2462   tree tmp, addr_tree, type_size;
2463   rtx array, orig_ndx, r, addr, size, va_size;
2464   rtx lab_false, lab_over, lab_false2;
2465 
2466   f_stk = TYPE_FIELDS (va_list_type_node);
2467   f_reg = TREE_CHAIN (f_stk);
2468   f_ndx = TREE_CHAIN (f_reg);
2469 
2470   stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2471   reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2472   ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2473 
2474   type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2475 
2476   va_size = gen_reg_rtx (SImode);
2477   tmp = fold (build (MULT_EXPR, sizetype,
2478 		     fold (build (TRUNC_DIV_EXPR, sizetype,
2479 				  fold (build (PLUS_EXPR, sizetype,
2480 					       type_size,
2481 					       size_int (UNITS_PER_WORD - 1))),
2482 				  size_int (UNITS_PER_WORD))),
2483 		     size_int (UNITS_PER_WORD)));
2484   r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2485   if (r != va_size)
2486     emit_move_insn (va_size, r);
2487 
2488 
2489   /* First align __va_ndx to a double word boundary if necessary for this arg:
2490 
2491      if (__alignof__ (TYPE) > 4)
2492        (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2493   */
2494 
2495   if (TYPE_ALIGN (type) > BITS_PER_WORD)
2496     {
2497       tmp = build (PLUS_EXPR, integer_type_node, ndx,
2498 		   build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2499       tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2500 		   build_int_2 (-2 * UNITS_PER_WORD, -1));
2501       tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2502       TREE_SIDE_EFFECTS (tmp) = 1;
2503       expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2504     }
2505 
2506 
2507   /* Increment __va_ndx to point past the argument:
2508 
2509      orig_ndx = (AP).__va_ndx;
2510      (AP).__va_ndx += __va_size (TYPE);
2511   */
2512 
2513   orig_ndx = gen_reg_rtx (SImode);
2514   r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2515   if (r != orig_ndx)
2516     emit_move_insn (orig_ndx, r);
2517 
2518   tmp = build (PLUS_EXPR, integer_type_node, ndx,
2519 	       make_tree (intSI_type_node, va_size));
2520   tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2521   TREE_SIDE_EFFECTS (tmp) = 1;
2522   expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2523 
2524 
2525   /* Check if the argument is in registers:
2526 
2527      if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2528          && !MUST_PASS_IN_STACK (type))
2529         __array = (AP).__va_reg;
2530   */
2531 
2532   array = gen_reg_rtx (Pmode);
2533 
2534   lab_over = NULL_RTX;
2535   if (!MUST_PASS_IN_STACK (VOIDmode, type))
2536     {
2537       lab_false = gen_label_rtx ();
2538       lab_over = gen_label_rtx ();
2539 
2540       emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2541 					    EXPAND_NORMAL),
2542 			       GEN_INT (MAX_ARGS_IN_REGISTERS
2543 					* UNITS_PER_WORD),
2544 			       GT, const1_rtx, SImode, 0, lab_false);
2545 
2546       r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2547       if (r != array)
2548 	emit_move_insn (array, r);
2549 
2550       emit_jump_insn (gen_jump (lab_over));
2551       emit_barrier ();
2552       emit_label (lab_false);
2553     }
2554 
2555   /* ...otherwise, the argument is on the stack (never split between
2556      registers and the stack -- change __va_ndx if necessary):
2557 
2558      else
2559        {
2560 	 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2561 	     (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2562 	 __array = (AP).__va_stk;
2563        }
2564   */
2565 
2566   lab_false2 = gen_label_rtx ();
2567   emit_cmp_and_jump_insns (orig_ndx,
2568 			   GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2569 			   GE, const1_rtx, SImode, 0, lab_false2);
2570 
2571   tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2572 	       build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2573   tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2574   TREE_SIDE_EFFECTS (tmp) = 1;
2575   expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2576 
2577   emit_label (lab_false2);
2578 
2579   r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2580   if (r != array)
2581     emit_move_insn (array, r);
2582 
2583   if (lab_over != NULL_RTX)
2584     emit_label (lab_over);
2585 
2586 
2587   /* Given the base array pointer (__array) and index to the subsequent
2588      argument (__va_ndx), find the address:
2589 
2590      __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2591 				? sizeof (TYPE)
2592 				: __va_size (TYPE))
2593 
2594      The results are endian-dependent because values smaller than one word
2595      are aligned differently.
2596   */
2597 
2598   size = gen_reg_rtx (SImode);
2599   emit_move_insn (size, va_size);
2600 
2601   if (BYTES_BIG_ENDIAN)
2602     {
2603       rtx lab_use_va_size = gen_label_rtx ();
2604 
2605       emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2606 					    EXPAND_NORMAL),
2607 			       GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2608 			       GE, const1_rtx, SImode, 0, lab_use_va_size);
2609 
2610       r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2611       if (r != size)
2612 	emit_move_insn (size, r);
2613 
2614       emit_label (lab_use_va_size);
2615     }
2616 
2617   addr_tree = build (PLUS_EXPR, ptr_type_node,
2618 		     make_tree (ptr_type_node, array),
2619 		     ndx);
2620   addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2621 		     make_tree (intSI_type_node, size));
2622   addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2623   addr = copy_to_reg (addr);
2624   return addr;
2625 }
2626 
2627 
2628 enum reg_class
xtensa_preferred_reload_class(x,class,isoutput)2629 xtensa_preferred_reload_class (x, class, isoutput)
2630      rtx x;
2631      enum reg_class class;
2632      int isoutput;
2633 {
2634   if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2635     return NO_REGS;
2636 
2637   /* Don't use the stack pointer or hard frame pointer for reloads!
2638      The hard frame pointer would normally be OK except that it may
2639      briefly hold an incoming argument in the prologue, and reload
2640      won't know that it is live because the hard frame pointer is
2641      treated specially.  */
2642 
2643   if (class == AR_REGS || class == GR_REGS)
2644     return RL_REGS;
2645 
2646   return class;
2647 }
2648 
2649 
2650 enum reg_class
xtensa_secondary_reload_class(class,mode,x,isoutput)2651 xtensa_secondary_reload_class (class, mode, x, isoutput)
2652      enum reg_class class;
2653      enum machine_mode mode ATTRIBUTE_UNUSED;
2654      rtx x;
2655      int isoutput;
2656 {
2657   int regno;
2658 
2659   if (GET_CODE (x) == SIGN_EXTEND)
2660     x = XEXP (x, 0);
2661   regno = xt_true_regnum (x);
2662 
2663   if (!isoutput)
2664     {
2665       if (class == FP_REGS && constantpool_mem_p (x))
2666 	return RL_REGS;
2667     }
2668 
2669   if (ACC_REG_P (regno))
2670     return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2671   if (class == ACC_REG)
2672     return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2673 
2674   return NO_REGS;
2675 }
2676 
2677 
2678 void
order_regs_for_local_alloc()2679 order_regs_for_local_alloc ()
2680 {
2681   if (!leaf_function_p ())
2682     {
2683       memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2684 	      FIRST_PSEUDO_REGISTER * sizeof (int));
2685     }
2686   else
2687     {
2688       int i, num_arg_regs;
2689       int nxt = 0;
2690 
2691       /* use the AR registers in increasing order (skipping a0 and a1)
2692 	 but save the incoming argument registers for a last resort */
2693       num_arg_regs = current_function_args_info.arg_words;
2694       if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2695 	num_arg_regs = MAX_ARGS_IN_REGISTERS;
2696       for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2697 	reg_alloc_order[nxt++] = i + num_arg_regs;
2698       for (i = 0; i < num_arg_regs; i++)
2699 	reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2700 
2701       /* list the coprocessor registers in order */
2702       for (i = 0; i < BR_REG_NUM; i++)
2703 	reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2704 
2705       /* list the FP registers in order for now */
2706       for (i = 0; i < 16; i++)
2707 	reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2708 
2709       /* GCC requires that we list *all* the registers.... */
2710       reg_alloc_order[nxt++] = 0;	/* a0 = return address */
2711       reg_alloc_order[nxt++] = 1;	/* a1 = stack pointer */
2712       reg_alloc_order[nxt++] = 16;	/* pseudo frame pointer */
2713       reg_alloc_order[nxt++] = 17;	/* pseudo arg pointer */
2714 
2715       reg_alloc_order[nxt++] = ACC_REG_FIRST;	/* MAC16 accumulator */
2716     }
2717 }
2718 
2719 
2720 /* Some Xtensa targets support multiple bss sections.  If the section
2721    name ends with ".bss", add SECTION_BSS to the flags.  */
2722 
2723 static unsigned int
xtensa_multibss_section_type_flags(decl,name,reloc)2724 xtensa_multibss_section_type_flags (decl, name, reloc)
2725      tree decl;
2726      const char *name;
2727      int reloc;
2728 {
2729   unsigned int flags = default_section_type_flags (decl, name, reloc);
2730   const char *suffix;
2731 
2732   suffix = strrchr (name, '.');
2733   if (suffix && strcmp (suffix, ".bss") == 0)
2734     {
2735       if (!decl || (TREE_CODE (decl) == VAR_DECL
2736 		    && DECL_INITIAL (decl) == NULL_TREE))
2737 	flags |= SECTION_BSS;  /* @nobits */
2738       else
2739 	warning ("only uninitialized variables can be placed in a "
2740 		 ".bss section");
2741     }
2742 
2743   return flags;
2744 }
2745 
2746 
2747 /* The literal pool stays with the function.  */
2748 
2749 static void
xtensa_select_rtx_section(mode,x,align)2750 xtensa_select_rtx_section (mode, x, align)
2751      enum machine_mode mode ATTRIBUTE_UNUSED;
2752      rtx x ATTRIBUTE_UNUSED;
2753      unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2754 {
2755   function_section (current_function_decl);
2756 }
2757 
2758 /* If we are referencing a function that is static, make the SYMBOL_REF
2759    special so that we can generate direct calls to it even with -fpic.  */
2760 
2761 static void
xtensa_encode_section_info(decl,first)2762 xtensa_encode_section_info (decl, first)
2763      tree decl;
2764      int first ATTRIBUTE_UNUSED;
2765 {
2766   if (TREE_CODE (decl) == FUNCTION_DECL && ! TREE_PUBLIC (decl))
2767     SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2768 }
2769 
2770 #include "gt-xtensa.h"
2771