xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/avr/avr.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2    Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3    2009, 2010 Free Software Foundation, Inc.
4    Contributed by Denis Chertykov (chertykov@gmail.com)
5 
6    This file is part of GCC.
7 
8    GCC is free software; you can redistribute it and/or modify
9    it under the terms of the GNU General Public License as published by
10    the Free Software Foundation; either version 3, or (at your option)
11    any later version.
12 
13    GCC is distributed in the hope that it will be useful,
14    but WITHOUT ANY WARRANTY; without even the implied warranty of
15    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16    GNU General Public License for more details.
17 
18    You should have received a copy of the GNU General Public License
19    along with GCC; see the file COPYING3.  If not see
20    <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "ggc.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "params.h"
47 #include "df.h"
48 
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_OS_task_function_p (tree);
56 static int avr_OS_main_function_p (tree);
57 static int avr_regs_to_save (HARD_REG_SET *);
58 static int get_sequence_length (rtx insns);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code);
62 static int avr_num_arg_regs (enum machine_mode, tree);
63 
64 static RTX_CODE compare_condition (rtx insn);
65 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
66 static int compare_sign_p (rtx insn);
67 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
69 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
70 static bool avr_assemble_integer (rtx, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
74 static void avr_asm_function_end_prologue (FILE *);
75 static void avr_asm_function_begin_epilogue (FILE *);
76 static bool avr_cannot_modify_jumps_p (void);
77 static rtx avr_function_value (const_tree, const_tree, bool);
78 static void avr_insert_attributes (tree, tree *);
79 static void avr_asm_init_sections (void);
80 static unsigned int avr_section_type_flags (tree, const char *, int);
81 
82 static void avr_reorg (void);
83 static void avr_asm_out_ctor (rtx, int);
84 static void avr_asm_out_dtor (rtx, int);
85 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
86 static bool avr_rtx_costs (rtx, int, int, int *, bool);
87 static int avr_address_cost (rtx, bool);
88 static bool avr_return_in_memory (const_tree, const_tree);
89 static struct machine_function * avr_init_machine_status (void);
90 static rtx avr_builtin_setjmp_frame_value (void);
91 static bool avr_hard_regno_scratch_ok (unsigned int);
92 static unsigned int avr_case_values_threshold (void);
93 static bool avr_frame_pointer_required_p (void);
94 static bool avr_can_eliminate (const int, const int);
95 static void avr_help (void);
96 
97 /* Allocate registers from r25 to r8 for parameters for function calls.  */
98 #define FIRST_CUM_REG 26
99 
100 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
101 static GTY(()) rtx tmp_reg_rtx;
102 
103 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
104 static GTY(()) rtx zero_reg_rtx;
105 
106 /* AVR register names {"r0", "r1", ..., "r31"} */
107 static const char *const avr_regnames[] = REGISTER_NAMES;
108 
109 /* Preprocessor macros to define depending on MCU type.  */
110 const char *avr_extra_arch_macro;
111 
112 /* Current architecture.  */
113 const struct base_arch_s *avr_current_arch;
114 
115 /* Current device.  */
116 const struct mcu_type_s *avr_current_device;
117 
118 section *progmem_section;
119 
120 /* AVR attributes.  */
121 static const struct attribute_spec avr_attribute_table[] =
122 {
123   /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
124   { "progmem",   0, 0, false, false, false,  avr_handle_progmem_attribute },
125   { "signal",    0, 0, true,  false, false,  avr_handle_fndecl_attribute },
126   { "interrupt", 0, 0, true,  false, false,  avr_handle_fndecl_attribute },
127   { "naked",     0, 0, false, true,  true,   avr_handle_fntype_attribute },
128   { "OS_task",   0, 0, false, true,  true,   avr_handle_fntype_attribute },
129   { "OS_main",   0, 0, false, true,  true,   avr_handle_fntype_attribute },
130   { NULL,        0, 0, false, false, false, NULL }
131 };
132 
133 /* Initialize the GCC target structure.  */
134 #undef TARGET_ASM_ALIGNED_HI_OP
135 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
136 #undef TARGET_ASM_ALIGNED_SI_OP
137 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
138 #undef TARGET_ASM_UNALIGNED_HI_OP
139 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
140 #undef TARGET_ASM_UNALIGNED_SI_OP
141 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
142 #undef TARGET_ASM_INTEGER
143 #define TARGET_ASM_INTEGER avr_assemble_integer
144 #undef TARGET_ASM_FILE_START
145 #define TARGET_ASM_FILE_START avr_file_start
146 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
147 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
148 #undef TARGET_ASM_FILE_END
149 #define TARGET_ASM_FILE_END avr_file_end
150 
151 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
152 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
153 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
154 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
155 #undef TARGET_FUNCTION_VALUE
156 #define TARGET_FUNCTION_VALUE avr_function_value
157 #undef TARGET_ATTRIBUTE_TABLE
158 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
159 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
160 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
161 #undef TARGET_INSERT_ATTRIBUTES
162 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
163 #undef TARGET_SECTION_TYPE_FLAGS
164 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
165 #undef TARGET_RTX_COSTS
166 #define TARGET_RTX_COSTS avr_rtx_costs
167 #undef TARGET_ADDRESS_COST
168 #define TARGET_ADDRESS_COST avr_address_cost
169 #undef TARGET_MACHINE_DEPENDENT_REORG
170 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
171 
172 #undef TARGET_LEGITIMIZE_ADDRESS
173 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
174 
175 #undef TARGET_RETURN_IN_MEMORY
176 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
177 
178 #undef TARGET_STRICT_ARGUMENT_NAMING
179 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
180 
181 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
182 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
183 
184 #undef TARGET_HARD_REGNO_SCRATCH_OK
185 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
186 #undef TARGET_CASE_VALUES_THRESHOLD
187 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
188 
189 #undef TARGET_LEGITIMATE_ADDRESS_P
190 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
191 
192 #undef TARGET_FRAME_POINTER_REQUIRED
193 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
194 #undef TARGET_CAN_ELIMINATE
195 #define TARGET_CAN_ELIMINATE avr_can_eliminate
196 
197 #undef TARGET_HELP
198 #define TARGET_HELP avr_help
199 
200 #undef TARGET_CANNOT_MODIFY_JUMPS_P
201 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
202 
203 struct gcc_target targetm = TARGET_INITIALIZER;
204 
205 void
206 avr_override_options (void)
207 {
208   const struct mcu_type_s *t;
209 
210   flag_delete_null_pointer_checks = 0;
211 
212   for (t = avr_mcu_types; t->name; t++)
213     if (strcmp (t->name, avr_mcu_name) == 0)
214       break;
215 
216   if (!t->name)
217     {
218       error ("unrecognized argument to -mmcu= option: %qs", avr_mcu_name);
219       inform (input_location,  "See --target-help for supported MCUs");
220     }
221 
222   avr_current_device = t;
223   avr_current_arch = &avr_arch_types[avr_current_device->arch];
224   avr_extra_arch_macro = avr_current_device->macro;
225 
226   tmp_reg_rtx  = gen_rtx_REG (QImode, TMP_REGNO);
227   zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
228 
229   init_machine_status = avr_init_machine_status;
230 }
231 
232 /* Implement TARGET_HELP */
233 /* Report extra information for --target-help */
234 
235 static void
236 avr_help (void)
237 {
238   const struct mcu_type_s *t;
239   const char * const indent = "  ";
240   int len;
241 
242   /* Give a list of MCUs that are accepted by -mmcu=* .
243      Note that MCUs supported by the compiler might differ from
244      MCUs supported by binutils. */
245 
246   len = strlen (indent);
247   printf ("Known MCU names:\n%s", indent);
248 
249   /* Print a blank-separated list of all supported MCUs */
250 
251   for (t = avr_mcu_types; t->name; t++)
252     {
253       printf ("%s ", t->name);
254       len += 1 + strlen (t->name);
255 
256       /* Break long lines */
257 
258       if (len > 66 && (t+1)->name)
259         {
260           printf ("\n%s", indent);
261           len = strlen (indent);
262         }
263     }
264 
265   printf ("\n\n");
266 }
267 
268 /*  return register class from register number.  */
269 
270 static const enum reg_class reg_class_tab[]={
271   GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
272   GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
273   GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
274   GENERAL_REGS, /* r0 - r15 */
275   LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
276   LD_REGS,                      /* r16 - 23 */
277   ADDW_REGS,ADDW_REGS,          /* r24,r25 */
278   POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
279   POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
280   POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
281   STACK_REG,STACK_REG           /* SPL,SPH */
282 };
283 
284 /* Function to set up the backend function structure.  */
285 
286 static struct machine_function *
287 avr_init_machine_status (void)
288 {
289   return ((struct machine_function *)
290           ggc_alloc_cleared (sizeof (struct machine_function)));
291 }
292 
293 /* Return register class for register R.  */
294 
295 enum reg_class
296 avr_regno_reg_class (int r)
297 {
298   if (r <= 33)
299     return reg_class_tab[r];
300   return ALL_REGS;
301 }
302 
303 /* Return nonzero if FUNC is a naked function.  */
304 
305 static int
306 avr_naked_function_p (tree func)
307 {
308   tree a;
309 
310   gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
311 
312   a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
313   return a != NULL_TREE;
314 }
315 
316 /* Return nonzero if FUNC is an interrupt function as specified
317    by the "interrupt" attribute.  */
318 
319 static int
320 interrupt_function_p (tree func)
321 {
322   tree a;
323 
324   if (TREE_CODE (func) != FUNCTION_DECL)
325     return 0;
326 
327   a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
328   return a != NULL_TREE;
329 }
330 
331 /* Return nonzero if FUNC is a signal function as specified
332    by the "signal" attribute.  */
333 
334 static int
335 signal_function_p (tree func)
336 {
337   tree a;
338 
339   if (TREE_CODE (func) != FUNCTION_DECL)
340     return 0;
341 
342   a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
343   return a != NULL_TREE;
344 }
345 
346 /* Return nonzero if FUNC is a OS_task function.  */
347 
348 static int
349 avr_OS_task_function_p (tree func)
350 {
351   tree a;
352 
353   gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
354 
355   a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
356   return a != NULL_TREE;
357 }
358 
359 /* Return nonzero if FUNC is a OS_main function.  */
360 
361 static int
362 avr_OS_main_function_p (tree func)
363 {
364   tree a;
365 
366   gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
367 
368   a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
369   return a != NULL_TREE;
370 }
371 
372 /* Return the number of hard registers to push/pop in the prologue/epilogue
373    of the current function, and optionally store these registers in SET.  */
374 
375 static int
376 avr_regs_to_save (HARD_REG_SET *set)
377 {
378   int reg, count;
379   int int_or_sig_p = (interrupt_function_p (current_function_decl)
380 		      || signal_function_p (current_function_decl));
381 
382   if (set)
383     CLEAR_HARD_REG_SET (*set);
384   count = 0;
385 
386   /* No need to save any registers if the function never returns or
387      is have "OS_task" or "OS_main" attribute.  */
388   if (TREE_THIS_VOLATILE (current_function_decl)
389       || cfun->machine->is_OS_task
390       || cfun->machine->is_OS_main)
391     return 0;
392 
393   for (reg = 0; reg < 32; reg++)
394     {
395       /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
396 	 any global register variables.  */
397       if (fixed_regs[reg])
398 	continue;
399 
400       if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
401 	  || (df_regs_ever_live_p (reg)
402 	      && (int_or_sig_p || !call_used_regs[reg])
403 	      && !(frame_pointer_needed
404 		   && (reg == REG_Y || reg == (REG_Y+1)))))
405 	{
406 	  if (set)
407 	    SET_HARD_REG_BIT (*set, reg);
408 	  count++;
409 	}
410     }
411   return count;
412 }
413 
414 /* Return true if register FROM can be eliminated via register TO.  */
415 
416 bool
417 avr_can_eliminate (const int from, const int to)
418 {
419   return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
420 	  || ((from == FRAME_POINTER_REGNUM
421 	       || from == FRAME_POINTER_REGNUM + 1)
422 	      && !frame_pointer_needed));
423 }
424 
425 /* Compute offset between arg_pointer and frame_pointer.  */
426 
427 int
428 avr_initial_elimination_offset (int from, int to)
429 {
430   if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
431     return 0;
432   else
433     {
434       int offset = frame_pointer_needed ? 2 : 0;
435       int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
436 
437       offset += avr_regs_to_save (NULL);
438       return get_frame_size () + (avr_pc_size) + 1 + offset;
439     }
440 }
441 
442 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
443    frame pointer by +STARTING_FRAME_OFFSET.
444    Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
445    avoids creating add/sub of offset in nonlocal goto and setjmp.  */
446 
447 rtx avr_builtin_setjmp_frame_value (void)
448 {
449   return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
450 			 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
451 }
452 
453 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
454    This is return address of function.  */
455 rtx
456 avr_return_addr_rtx (int count, const_rtx tem)
457 {
458   rtx r;
459 
460   /* Can only return this functions return address. Others not supported.  */
461   if (count)
462      return NULL;
463 
464   if (AVR_3_BYTE_PC)
465     {
466       r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
467       warning (0, "'builtin_return_address' contains only 2 bytes of address");
468     }
469   else
470     r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
471 
472   r = gen_rtx_PLUS (Pmode, tem, r);
473   r = gen_frame_mem (Pmode, memory_address (Pmode, r));
474   r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
475   return  r;
476 }
477 
478 /* Return 1 if the function epilogue is just a single "ret".  */
479 
480 int
481 avr_simple_epilogue (void)
482 {
483   return (! frame_pointer_needed
484 	  && get_frame_size () == 0
485 	  && avr_regs_to_save (NULL) == 0
486 	  && ! interrupt_function_p (current_function_decl)
487 	  && ! signal_function_p (current_function_decl)
488 	  && ! avr_naked_function_p (current_function_decl)
489 	  && ! TREE_THIS_VOLATILE (current_function_decl));
490 }
491 
492 /* This function checks sequence of live registers.  */
493 
494 static int
495 sequent_regs_live (void)
496 {
497   int reg;
498   int live_seq=0;
499   int cur_seq=0;
500 
501   for (reg = 0; reg < 18; ++reg)
502     {
503       if (!call_used_regs[reg])
504 	{
505 	  if (df_regs_ever_live_p (reg))
506 	    {
507 	      ++live_seq;
508 	      ++cur_seq;
509 	    }
510 	  else
511 	    cur_seq = 0;
512 	}
513     }
514 
515   if (!frame_pointer_needed)
516     {
517       if (df_regs_ever_live_p (REG_Y))
518 	{
519 	  ++live_seq;
520 	  ++cur_seq;
521 	}
522       else
523 	cur_seq = 0;
524 
525       if (df_regs_ever_live_p (REG_Y+1))
526 	{
527 	  ++live_seq;
528 	  ++cur_seq;
529 	}
530       else
531 	cur_seq = 0;
532     }
533   else
534     {
535       cur_seq += 2;
536       live_seq += 2;
537     }
538   return (cur_seq == live_seq) ? live_seq : 0;
539 }
540 
541 /* Obtain the length sequence of insns.  */
542 
543 int
544 get_sequence_length (rtx insns)
545 {
546   rtx insn;
547   int length;
548 
549   for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
550     length += get_attr_length (insn);
551 
552   return length;
553 }
554 
555 /*  Output function prologue.  */
556 
557 void
558 expand_prologue (void)
559 {
560   int live_seq;
561   HARD_REG_SET set;
562   int minimize;
563   HOST_WIDE_INT size = get_frame_size();
564   /* Define templates for push instructions.  */
565   rtx pushbyte = gen_rtx_MEM (QImode,
566                   gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
567   rtx pushword = gen_rtx_MEM (HImode,
568                   gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
569   rtx insn;
570 
571   /* Init cfun->machine.  */
572   cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
573   cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
574   cfun->machine->is_signal = signal_function_p (current_function_decl);
575   cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
576   cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
577   cfun->machine->stack_usage = 0;
578 
579   /* Prologue: naked.  */
580   if (cfun->machine->is_naked)
581     {
582       return;
583     }
584 
585   avr_regs_to_save (&set);
586   live_seq = sequent_regs_live ();
587   minimize = (TARGET_CALL_PROLOGUES
588 	      && !cfun->machine->is_interrupt
589 	      && !cfun->machine->is_signal
590 	      && !cfun->machine->is_OS_task
591 	      && !cfun->machine->is_OS_main
592 	      && live_seq);
593 
594   if (cfun->machine->is_interrupt || cfun->machine->is_signal)
595     {
596       if (cfun->machine->is_interrupt)
597         {
598           /* Enable interrupts.  */
599           insn = emit_insn (gen_enable_interrupt ());
600           RTX_FRAME_RELATED_P (insn) = 1;
601         }
602 
603       /* Push zero reg.  */
604       insn = emit_move_insn (pushbyte, zero_reg_rtx);
605       RTX_FRAME_RELATED_P (insn) = 1;
606       cfun->machine->stack_usage++;
607 
608       /* Push tmp reg.  */
609       insn = emit_move_insn (pushbyte, tmp_reg_rtx);
610       RTX_FRAME_RELATED_P (insn) = 1;
611       cfun->machine->stack_usage++;
612 
613       /* Push SREG.  */
614       insn = emit_move_insn (tmp_reg_rtx,
615                              gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
616       RTX_FRAME_RELATED_P (insn) = 1;
617       insn = emit_move_insn (pushbyte, tmp_reg_rtx);
618       RTX_FRAME_RELATED_P (insn) = 1;
619       cfun->machine->stack_usage++;
620 
621       /* Push RAMPZ.  */
622       if(AVR_HAVE_RAMPZ
623          && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
624         {
625           insn = emit_move_insn (tmp_reg_rtx,
626                                  gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
627           RTX_FRAME_RELATED_P (insn) = 1;
628           insn = emit_move_insn (pushbyte, tmp_reg_rtx);
629           RTX_FRAME_RELATED_P (insn) = 1;
630           cfun->machine->stack_usage++;
631         }
632 
633       /* Clear zero reg.  */
634       insn = emit_move_insn (zero_reg_rtx, const0_rtx);
635       RTX_FRAME_RELATED_P (insn) = 1;
636 
637       /* Prevent any attempt to delete the setting of ZERO_REG!  */
638       emit_use (zero_reg_rtx);
639     }
640   if (minimize && (frame_pointer_needed
641 		   || (AVR_2_BYTE_PC && live_seq > 6)
642 		   || live_seq > 7))
643     {
644       insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
645                              gen_int_mode (size, HImode));
646       RTX_FRAME_RELATED_P (insn) = 1;
647 
648       insn =
649         emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
650 					    gen_int_mode (size + live_seq, HImode)));
651       RTX_FRAME_RELATED_P (insn) = 1;
652       cfun->machine->stack_usage += size + live_seq;
653     }
654   else
655     {
656       int reg;
657       for (reg = 0; reg < 32; ++reg)
658         {
659           if (TEST_HARD_REG_BIT (set, reg))
660             {
661               /* Emit push of register to save.  */
662               insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
663               RTX_FRAME_RELATED_P (insn) = 1;
664               cfun->machine->stack_usage++;
665             }
666         }
667       if (frame_pointer_needed)
668         {
669 	  if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
670 	    {
671               /* Push frame pointer.  */
672 	      insn = emit_move_insn (pushword, frame_pointer_rtx);
673               RTX_FRAME_RELATED_P (insn) = 1;
674 	      cfun->machine->stack_usage += 2;
675 	    }
676 
677           if (!size)
678             {
679               insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
680               RTX_FRAME_RELATED_P (insn) = 1;
681             }
682           else
683             {
684               /*  Creating a frame can be done by direct manipulation of the
685                   stack or via the frame pointer. These two methods are:
686                     fp=sp
687                     fp-=size
688                     sp=fp
689                 OR
690                     sp-=size
691                     fp=sp
692               the optimum method depends on function type, stack and frame size.
693               To avoid a complex logic, both methods are tested and shortest
694               is selected.  */
695               rtx myfp;
696 	      rtx fp_plus_insns;
697 	      rtx sp_plus_insns = NULL_RTX;
698 
699               if (AVR_HAVE_8BIT_SP)
700                 {
701                   /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
702                      over 'sbiw' (2 cycles, same size).  */
703                   myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
704                 }
705               else
706                 {
707                   /*  Normal sized addition.  */
708                   myfp = frame_pointer_rtx;
709                 }
710 
711 	      /* Method 1-Adjust frame pointer.  */
712 	      start_sequence ();
713 
714               insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
715               RTX_FRAME_RELATED_P (insn) = 1;
716 
717               insn =
718 	        emit_move_insn (myfp,
719 				gen_rtx_PLUS (GET_MODE(myfp), myfp,
720 					      gen_int_mode (-size,
721 							    GET_MODE(myfp))));
722               RTX_FRAME_RELATED_P (insn) = 1;
723 
724 	      /* Copy to stack pointer.  */
725 	      if (AVR_HAVE_8BIT_SP)
726 		{
727 		  insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
728 		  RTX_FRAME_RELATED_P (insn) = 1;
729 		}
730 	      else if (TARGET_NO_INTERRUPTS
731 		       || cfun->machine->is_signal
732 		       || cfun->machine->is_OS_main)
733 		{
734 		  insn =
735 		    emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
736 						       frame_pointer_rtx));
737 		  RTX_FRAME_RELATED_P (insn) = 1;
738 		}
739 	      else if (cfun->machine->is_interrupt)
740 		{
741 		  insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
742 							   frame_pointer_rtx));
743 		  RTX_FRAME_RELATED_P (insn) = 1;
744 		}
745 	      else
746 		{
747 		  insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
748 		  RTX_FRAME_RELATED_P (insn) = 1;
749 		}
750 
751 	      fp_plus_insns = get_insns ();
752 	      end_sequence ();
753 
754 	      /* Method 2-Adjust Stack pointer.  */
755               if (size <= 6)
756                 {
757 		  start_sequence ();
758 
759 		  insn =
760 		    emit_move_insn (stack_pointer_rtx,
761 				    gen_rtx_PLUS (HImode,
762 						  stack_pointer_rtx,
763 						  gen_int_mode (-size,
764 								HImode)));
765 		  RTX_FRAME_RELATED_P (insn) = 1;
766 
767 		  insn =
768 		    emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
769 		  RTX_FRAME_RELATED_P (insn) = 1;
770 
771 		  sp_plus_insns = get_insns ();
772 		  end_sequence ();
773                 }
774 
775               /* Use shortest method.  */
776               if (size <= 6 && (get_sequence_length (sp_plus_insns)
777 				 < get_sequence_length (fp_plus_insns)))
778 		emit_insn (sp_plus_insns);
779               else
780 		emit_insn (fp_plus_insns);
781 	      cfun->machine->stack_usage += size;
782             }
783         }
784     }
785 }
786 
787 /* Output summary at end of function prologue.  */
788 
789 static void
790 avr_asm_function_end_prologue (FILE *file)
791 {
792   if (cfun->machine->is_naked)
793     {
794       fputs ("/* prologue: naked */\n", file);
795     }
796   else
797     {
798       if (cfun->machine->is_interrupt)
799         {
800           fputs ("/* prologue: Interrupt */\n", file);
801         }
802       else if (cfun->machine->is_signal)
803         {
804           fputs ("/* prologue: Signal */\n", file);
805         }
806       else
807         fputs ("/* prologue: function */\n", file);
808     }
809   fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
810                  get_frame_size());
811   fprintf (file, "/* stack size = %d */\n",
812                  cfun->machine->stack_usage);
813   /* Create symbol stack offset here so all functions have it. Add 1 to stack
814      usage for offset so that SP + .L__stack_offset = return address.  */
815   fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
816 }
817 
818 
819 /* Implement EPILOGUE_USES.  */
820 
821 int
822 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
823 {
824   if (reload_completed
825       && cfun->machine
826       && (cfun->machine->is_interrupt || cfun->machine->is_signal))
827     return 1;
828   return 0;
829 }
830 
831 /*  Output RTL epilogue.  */
832 
833 void
834 expand_epilogue (void)
835 {
836   int reg;
837   int live_seq;
838   HARD_REG_SET set;
839   int minimize;
840   HOST_WIDE_INT size = get_frame_size();
841 
842   /* epilogue: naked  */
843   if (cfun->machine->is_naked)
844     {
845       emit_jump_insn (gen_return ());
846       return;
847     }
848 
849   avr_regs_to_save (&set);
850   live_seq = sequent_regs_live ();
851   minimize = (TARGET_CALL_PROLOGUES
852 	      && !cfun->machine->is_interrupt
853 	      && !cfun->machine->is_signal
854 	      && !cfun->machine->is_OS_task
855 	      && !cfun->machine->is_OS_main
856 	      && live_seq);
857 
858   if (minimize && (frame_pointer_needed || live_seq > 4))
859     {
860       if (frame_pointer_needed)
861 	{
862           /*  Get rid of frame.  */
863 	  emit_move_insn(frame_pointer_rtx,
864                          gen_rtx_PLUS (HImode, frame_pointer_rtx,
865                                        gen_int_mode (size, HImode)));
866 	}
867       else
868 	{
869           emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
870 	}
871 
872       emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
873     }
874   else
875     {
876       if (frame_pointer_needed)
877 	{
878 	  if (size)
879 	    {
880               /* Try two methods to adjust stack and select shortest.  */
881 	      rtx myfp;
882 	      rtx fp_plus_insns;
883 	      rtx sp_plus_insns = NULL_RTX;
884 
885 	      if (AVR_HAVE_8BIT_SP)
886                 {
887                   /* The high byte (r29) doesn't change - prefer 'subi'
888                      (1 cycle) over 'sbiw' (2 cycles, same size).  */
889                   myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
890                 }
891               else
892                 {
893                   /* Normal sized addition.  */
894                   myfp = frame_pointer_rtx;
895                 }
896 
897               /* Method 1-Adjust frame pointer.  */
898 	      start_sequence ();
899 
900 	      emit_move_insn (myfp,
901 			      gen_rtx_PLUS (GET_MODE (myfp), myfp,
902 					    gen_int_mode (size,
903 							  GET_MODE(myfp))));
904 
905 	      /* Copy to stack pointer.  */
906 	      if (AVR_HAVE_8BIT_SP)
907 		{
908 		  emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
909 		}
910 	      else if (TARGET_NO_INTERRUPTS
911 		       || cfun->machine->is_signal)
912 		{
913 		  emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
914 						     frame_pointer_rtx));
915 		}
916 	      else if (cfun->machine->is_interrupt)
917 		{
918 		  emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
919 						    frame_pointer_rtx));
920 		}
921 	      else
922 		{
923 		  emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
924 		}
925 
926 	      fp_plus_insns = get_insns ();
927 	      end_sequence ();
928 
929               /* Method 2-Adjust Stack pointer.  */
930               if (size <= 5)
931                 {
932 		  start_sequence ();
933 
934 		  emit_move_insn (stack_pointer_rtx,
935 				  gen_rtx_PLUS (HImode, stack_pointer_rtx,
936 						gen_int_mode (size,
937 							      HImode)));
938 
939 		  sp_plus_insns = get_insns ();
940 		  end_sequence ();
941                 }
942 
943               /* Use shortest method.  */
944               if (size <= 5 && (get_sequence_length (sp_plus_insns)
945 				 < get_sequence_length (fp_plus_insns)))
946 	      	emit_insn (sp_plus_insns);
947               else
948 		emit_insn (fp_plus_insns);
949             }
950 	  if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
951 	    {
952               /* Restore previous frame_pointer.  */
953 	      emit_insn (gen_pophi (frame_pointer_rtx));
954 	    }
955 	}
956       /* Restore used registers.  */
957       for (reg = 31; reg >= 0; --reg)
958         {
959           if (TEST_HARD_REG_BIT (set, reg))
960               emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
961         }
962       if (cfun->machine->is_interrupt || cfun->machine->is_signal)
963         {
964           /* Restore RAMPZ using tmp reg as scratch.  */
965 	  if(AVR_HAVE_RAMPZ
966              && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
967             {
968 	      emit_insn (gen_popqi (tmp_reg_rtx));
969 	      emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
970 			      tmp_reg_rtx);
971 	    }
972 
973           /* Restore SREG using tmp reg as scratch.  */
974           emit_insn (gen_popqi (tmp_reg_rtx));
975 
976           emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
977 			  tmp_reg_rtx);
978 
979           /* Restore tmp REG.  */
980           emit_insn (gen_popqi (tmp_reg_rtx));
981 
982           /* Restore zero REG.  */
983           emit_insn (gen_popqi (zero_reg_rtx));
984         }
985 
986       emit_jump_insn (gen_return ());
987     }
988 }
989 
990 /* Output summary messages at beginning of function epilogue.  */
991 
992 static void
993 avr_asm_function_begin_epilogue (FILE *file)
994 {
995   fprintf (file, "/* epilogue start */\n");
996 }
997 
998 
999 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1000 
1001 static bool
1002 avr_cannot_modify_jumps_p (void)
1003 {
1004 
1005   /* Naked Functions must not have any instructions after
1006      their epilogue, see PR42240 */
1007 
1008   if (reload_completed
1009       && cfun->machine
1010       && cfun->machine->is_naked)
1011     {
1012       return true;
1013     }
1014 
1015   return false;
1016 }
1017 
1018 
1019 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1020    machine for a memory operand of mode MODE.  */
1021 
1022 bool
1023 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1024 {
1025   enum reg_class r = NO_REGS;
1026 
1027   if (TARGET_ALL_DEBUG)
1028     {
1029       fprintf (stderr, "mode: (%s) %s %s %s %s:",
1030 	       GET_MODE_NAME(mode),
1031 	       strict ? "(strict)": "",
1032 	       reload_completed ? "(reload_completed)": "",
1033 	       reload_in_progress ? "(reload_in_progress)": "",
1034 	       reg_renumber ? "(reg_renumber)" : "");
1035       if (GET_CODE (x) == PLUS
1036 	  && REG_P (XEXP (x, 0))
1037 	  && GET_CODE (XEXP (x, 1)) == CONST_INT
1038 	  && INTVAL (XEXP (x, 1)) >= 0
1039 	  && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1040 	  && reg_renumber
1041 	  )
1042 	fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1043 		 true_regnum (XEXP (x, 0)));
1044       debug_rtx (x);
1045     }
1046 
1047   if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1048                     : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1049     r = POINTER_REGS;
1050   else if (CONSTANT_ADDRESS_P (x))
1051     r = ALL_REGS;
1052   else if (GET_CODE (x) == PLUS
1053            && REG_P (XEXP (x, 0))
1054 	   && GET_CODE (XEXP (x, 1)) == CONST_INT
1055 	   && INTVAL (XEXP (x, 1)) >= 0)
1056     {
1057       int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1058       if (fit)
1059 	{
1060 	  if (! strict
1061 	      || REGNO (XEXP (x,0)) == REG_X
1062 	      || REGNO (XEXP (x,0)) == REG_Y
1063 	      || REGNO (XEXP (x,0)) == REG_Z)
1064 	    r = BASE_POINTER_REGS;
1065 	  if (XEXP (x,0) == frame_pointer_rtx
1066 	      || XEXP (x,0) == arg_pointer_rtx)
1067 	    r = BASE_POINTER_REGS;
1068 	}
1069       else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1070 	r = POINTER_Y_REGS;
1071     }
1072   else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1073            && REG_P (XEXP (x, 0))
1074            && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1075                : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1076     {
1077       r = POINTER_REGS;
1078     }
1079   if (TARGET_ALL_DEBUG)
1080     {
1081       fprintf (stderr, "   ret = %c\n", r + '0');
1082     }
1083   return r == NO_REGS ? 0 : (int)r;
1084 }
1085 
1086 /* Attempts to replace X with a valid
1087    memory address for an operand of mode MODE  */
1088 
1089 rtx
1090 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1091 {
1092   x = oldx;
1093   if (TARGET_ALL_DEBUG)
1094     {
1095       fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1096       debug_rtx (oldx);
1097     }
1098 
1099   if (GET_CODE (oldx) == PLUS
1100       && REG_P (XEXP (oldx,0)))
1101     {
1102       if (REG_P (XEXP (oldx,1)))
1103 	x = force_reg (GET_MODE (oldx), oldx);
1104       else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1105 	{
1106 	  int offs = INTVAL (XEXP (oldx,1));
1107 	  if (frame_pointer_rtx != XEXP (oldx,0))
1108 	    if (offs > MAX_LD_OFFSET (mode))
1109 	      {
1110 		if (TARGET_ALL_DEBUG)
1111 		  fprintf (stderr, "force_reg (big offset)\n");
1112 		x = force_reg (GET_MODE (oldx), oldx);
1113 	      }
1114 	}
1115     }
1116   return x;
1117 }
1118 
1119 
1120 /* Return a pointer register name as a string.  */
1121 
1122 static const char *
1123 ptrreg_to_str (int regno)
1124 {
1125   switch (regno)
1126     {
1127     case REG_X: return "X";
1128     case REG_Y: return "Y";
1129     case REG_Z: return "Z";
1130     default:
1131       output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1132     }
1133   return NULL;
1134 }
1135 
1136 /* Return the condition name as a string.
1137    Used in conditional jump constructing  */
1138 
1139 static const char *
1140 cond_string (enum rtx_code code)
1141 {
1142   switch (code)
1143     {
1144     case NE:
1145       return "ne";
1146     case EQ:
1147       return "eq";
1148     case GE:
1149       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1150 	return "pl";
1151       else
1152 	return "ge";
1153     case LT:
1154       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1155 	return "mi";
1156       else
1157 	return "lt";
1158     case GEU:
1159       return "sh";
1160     case LTU:
1161       return "lo";
1162     default:
1163       gcc_unreachable ();
1164     }
1165 }
1166 
1167 /* Output ADDR to FILE as address.  */
1168 
1169 void
1170 print_operand_address (FILE *file, rtx addr)
1171 {
1172   switch (GET_CODE (addr))
1173     {
1174     case REG:
1175       fprintf (file, ptrreg_to_str (REGNO (addr)));
1176       break;
1177 
1178     case PRE_DEC:
1179       fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1180       break;
1181 
1182     case POST_INC:
1183       fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1184       break;
1185 
1186     default:
1187       if (CONSTANT_ADDRESS_P (addr)
1188 	  && text_segment_operand (addr, VOIDmode))
1189 	{
1190 	  rtx x = XEXP (addr,0);
1191 	  if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1192 	    {
1193 	      /* Assembler gs() will implant word address. Make offset
1194 		 a byte offset inside gs() for assembler. This is
1195 		 needed because the more logical (constant+gs(sym)) is not
1196 		 accepted by gas. For 128K and lower devices this is ok. For
1197 		 large devices it will create a Trampoline to offset from symbol
1198 		 which may not be what the user really wanted.  */
1199 	      fprintf (file, "gs(");
1200 	      output_addr_const (file, XEXP (x,0));
1201 	      fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1202 	      if (AVR_3_BYTE_PC)
1203 	        if (warning ( 0, "Pointer offset from symbol maybe incorrect."))
1204 		  {
1205 		    output_addr_const (stderr, addr);
1206 		    fprintf(stderr,"\n");
1207 		  }
1208 	    }
1209 	  else
1210 	    {
1211 	      fprintf (file, "gs(");
1212 	      output_addr_const (file, addr);
1213 	      fprintf (file, ")");
1214 	    }
1215 	}
1216       else
1217 	output_addr_const (file, addr);
1218     }
1219 }
1220 
1221 
1222 /* Output X as assembler operand to file FILE.  */
1223 
1224 void
1225 print_operand (FILE *file, rtx x, int code)
1226 {
1227   int abcd = 0;
1228 
1229   if (code >= 'A' && code <= 'D')
1230     abcd = code - 'A';
1231 
1232   if (code == '~')
1233     {
1234       if (!AVR_HAVE_JMP_CALL)
1235 	fputc ('r', file);
1236     }
1237   else if (code == '!')
1238     {
1239       if (AVR_HAVE_EIJMP_EICALL)
1240 	fputc ('e', file);
1241     }
1242   else if (REG_P (x))
1243     {
1244       if (x == zero_reg_rtx)
1245 	fprintf (file, "__zero_reg__");
1246       else
1247 	fprintf (file, reg_names[true_regnum (x) + abcd]);
1248     }
1249   else if (GET_CODE (x) == CONST_INT)
1250     fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1251   else if (GET_CODE (x) == MEM)
1252     {
1253       rtx addr = XEXP (x,0);
1254       if (code == 'm')
1255 	{
1256 	   if (!CONSTANT_P (addr))
1257 	    fatal_insn ("bad address, not a constant):", addr);
1258 	  /* Assembler template with m-code is data - not progmem section */
1259 	  if (text_segment_operand (addr, VOIDmode))
1260 	    if (warning ( 0, "accessing data memory with program memory address"))
1261 	      {
1262 		output_addr_const (stderr, addr);
1263 		fprintf(stderr,"\n");
1264 	      }
1265 	  output_addr_const (file, addr);
1266 	}
1267       else if (code == 'o')
1268 	{
1269 	  if (GET_CODE (addr) != PLUS)
1270 	    fatal_insn ("bad address, not (reg+disp):", addr);
1271 
1272 	  print_operand (file, XEXP (addr, 1), 0);
1273 	}
1274       else if (code == 'p' || code == 'r')
1275         {
1276           if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1277             fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1278 
1279           if (code == 'p')
1280             print_operand_address (file, XEXP (addr, 0));  /* X, Y, Z */
1281           else
1282             print_operand (file, XEXP (addr, 0), 0);  /* r26, r28, r30 */
1283         }
1284       else if (GET_CODE (addr) == PLUS)
1285 	{
1286 	  print_operand_address (file, XEXP (addr,0));
1287 	  if (REGNO (XEXP (addr, 0)) == REG_X)
1288 	    fatal_insn ("internal compiler error.  Bad address:"
1289 			,addr);
1290 	  fputc ('+', file);
1291 	  print_operand (file, XEXP (addr,1), code);
1292 	}
1293       else
1294 	print_operand_address (file, addr);
1295     }
1296   else if (code == 'x')
1297     {
1298       /* Constant progmem address - like used in jmp or call */
1299       if (0 == text_segment_operand (x, VOIDmode))
1300 	    if (warning ( 0, "accessing program  memory with data memory address"))
1301 	  {
1302 	    output_addr_const (stderr, x);
1303 	    fprintf(stderr,"\n");
1304 	  }
1305       /* Use normal symbol for direct address no linker trampoline needed */
1306       output_addr_const (file, x);
1307     }
1308   else if (GET_CODE (x) == CONST_DOUBLE)
1309     {
1310       long val;
1311       REAL_VALUE_TYPE rv;
1312       if (GET_MODE (x) != SFmode)
1313 	fatal_insn ("internal compiler error.  Unknown mode:", x);
1314       REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1315       REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1316       fprintf (file, "0x%lx", val);
1317     }
1318   else if (code == 'j')
1319     fputs (cond_string (GET_CODE (x)), file);
1320   else if (code == 'k')
1321     fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1322   else
1323     print_operand_address (file, x);
1324 }
1325 
1326 /* Update the condition code in the INSN.  */
1327 
1328 void
1329 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1330 {
1331   rtx set;
1332 
1333   switch (get_attr_cc (insn))
1334     {
1335     case CC_NONE:
1336       /* Insn does not affect CC at all.  */
1337       break;
1338 
1339     case CC_SET_N:
1340       CC_STATUS_INIT;
1341       break;
1342 
1343     case CC_SET_ZN:
1344       set = single_set (insn);
1345       CC_STATUS_INIT;
1346       if (set)
1347 	{
1348 	  cc_status.flags |= CC_NO_OVERFLOW;
1349 	  cc_status.value1 = SET_DEST (set);
1350 	}
1351       break;
1352 
1353     case CC_SET_CZN:
1354       /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1355          The V flag may or may not be known but that's ok because
1356          alter_cond will change tests to use EQ/NE.  */
1357       set = single_set (insn);
1358       CC_STATUS_INIT;
1359       if (set)
1360 	{
1361 	  cc_status.value1 = SET_DEST (set);
1362 	  cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1363 	}
1364       break;
1365 
1366     case CC_COMPARE:
1367       set = single_set (insn);
1368       CC_STATUS_INIT;
1369       if (set)
1370 	cc_status.value1 = SET_SRC (set);
1371       break;
1372 
1373     case CC_CLOBBER:
1374       /* Insn doesn't leave CC in a usable state.  */
1375       CC_STATUS_INIT;
1376 
1377       /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1378       set = single_set (insn);
1379       if (set)
1380 	{
1381 	  rtx src = SET_SRC (set);
1382 
1383 	  if (GET_CODE (src) == ASHIFTRT
1384 	      && GET_MODE (src) == QImode)
1385 	    {
1386 	      rtx x = XEXP (src, 1);
1387 
1388 	      if (CONST_INT_P (x)
1389 		  && IN_RANGE (INTVAL (x), 1, 5))
1390 		{
1391 		  cc_status.value1 = SET_DEST (set);
1392 		  cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1393 		}
1394 	    }
1395 	}
1396       break;
1397     }
1398 }
1399 
1400 /* Return maximum number of consecutive registers of
1401    class CLASS needed to hold a value of mode MODE.  */
1402 
1403 int
1404 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1405 {
1406   return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1407 }
1408 
1409 /* Choose mode for jump insn:
1410    1 - relative jump in range -63 <= x <= 62 ;
1411    2 - relative jump in range -2046 <= x <= 2045 ;
1412    3 - absolute jump (only for ATmega[16]03).  */
1413 
1414 int
1415 avr_jump_mode (rtx x, rtx insn)
1416 {
1417   int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1418 					    ? XEXP (x, 0) : x));
1419   int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1420   int jump_distance = cur_addr - dest_addr;
1421 
1422   if (-63 <= jump_distance && jump_distance <= 62)
1423     return 1;
1424   else if (-2046 <= jump_distance && jump_distance <= 2045)
1425     return 2;
1426   else if (AVR_HAVE_JMP_CALL)
1427     return 3;
1428 
1429   return 2;
1430 }
1431 
1432 /* return an AVR condition jump commands.
1433    X is a comparison RTX.
1434    LEN is a number returned by avr_jump_mode function.
1435    if REVERSE nonzero then condition code in X must be reversed.  */
1436 
1437 const char *
1438 ret_cond_branch (rtx x, int len, int reverse)
1439 {
1440   RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1441 
1442   switch (cond)
1443     {
1444     case GT:
1445       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1446 	return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1447 			    AS1 (brpl,%0)) :
1448 		len == 2 ? (AS1 (breq,.+4) CR_TAB
1449 			    AS1 (brmi,.+2) CR_TAB
1450 			    AS1 (rjmp,%0)) :
1451 		(AS1 (breq,.+6) CR_TAB
1452 		 AS1 (brmi,.+4) CR_TAB
1453 		 AS1 (jmp,%0)));
1454 
1455       else
1456 	return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1457 			    AS1 (brge,%0)) :
1458 		len == 2 ? (AS1 (breq,.+4) CR_TAB
1459 			    AS1 (brlt,.+2) CR_TAB
1460 			    AS1 (rjmp,%0)) :
1461 		(AS1 (breq,.+6) CR_TAB
1462 		 AS1 (brlt,.+4) CR_TAB
1463 		 AS1 (jmp,%0)));
1464     case GTU:
1465       return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1466                           AS1 (brsh,%0)) :
1467               len == 2 ? (AS1 (breq,.+4) CR_TAB
1468                           AS1 (brlo,.+2) CR_TAB
1469                           AS1 (rjmp,%0)) :
1470               (AS1 (breq,.+6) CR_TAB
1471                AS1 (brlo,.+4) CR_TAB
1472                AS1 (jmp,%0)));
1473     case LE:
1474       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1475 	return (len == 1 ? (AS1 (breq,%0) CR_TAB
1476 			    AS1 (brmi,%0)) :
1477 		len == 2 ? (AS1 (breq,.+2) CR_TAB
1478 			    AS1 (brpl,.+2) CR_TAB
1479 			    AS1 (rjmp,%0)) :
1480 		(AS1 (breq,.+2) CR_TAB
1481 		 AS1 (brpl,.+4) CR_TAB
1482 		 AS1 (jmp,%0)));
1483       else
1484 	return (len == 1 ? (AS1 (breq,%0) CR_TAB
1485 			    AS1 (brlt,%0)) :
1486 		len == 2 ? (AS1 (breq,.+2) CR_TAB
1487 			    AS1 (brge,.+2) CR_TAB
1488 			    AS1 (rjmp,%0)) :
1489 		(AS1 (breq,.+2) CR_TAB
1490 		 AS1 (brge,.+4) CR_TAB
1491 		 AS1 (jmp,%0)));
1492     case LEU:
1493       return (len == 1 ? (AS1 (breq,%0) CR_TAB
1494                           AS1 (brlo,%0)) :
1495               len == 2 ? (AS1 (breq,.+2) CR_TAB
1496                           AS1 (brsh,.+2) CR_TAB
1497 			  AS1 (rjmp,%0)) :
1498               (AS1 (breq,.+2) CR_TAB
1499                AS1 (brsh,.+4) CR_TAB
1500 	       AS1 (jmp,%0)));
1501     default:
1502       if (reverse)
1503 	{
1504 	  switch (len)
1505 	    {
1506 	    case 1:
1507 	      return AS1 (br%k1,%0);
1508 	    case 2:
1509 	      return (AS1 (br%j1,.+2) CR_TAB
1510 		      AS1 (rjmp,%0));
1511 	    default:
1512 	      return (AS1 (br%j1,.+4) CR_TAB
1513 		      AS1 (jmp,%0));
1514 	    }
1515 	}
1516 	else
1517 	  {
1518 	    switch (len)
1519 	      {
1520 	      case 1:
1521 		return AS1 (br%j1,%0);
1522 	      case 2:
1523 		return (AS1 (br%k1,.+2) CR_TAB
1524 			AS1 (rjmp,%0));
1525 	      default:
1526 		return (AS1 (br%k1,.+4) CR_TAB
1527 			AS1 (jmp,%0));
1528 	      }
1529 	  }
1530     }
1531   return "";
1532 }
1533 
1534 /* Predicate function for immediate operand which fits to byte (8bit) */
1535 
1536 int
1537 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1538 {
1539   return (GET_CODE (op) == CONST_INT
1540           && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1541 }
1542 
1543 /* Output insn cost for next insn.  */
1544 
1545 void
1546 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1547 		    int num_operands ATTRIBUTE_UNUSED)
1548 {
1549   if (TARGET_ALL_DEBUG)
1550     {
1551       fprintf (asm_out_file, "/* DEBUG: cost = %d.  */\n",
1552 	       rtx_cost (PATTERN (insn), INSN, !optimize_size));
1553     }
1554 }
1555 
1556 /* Return 0 if undefined, 1 if always true or always false.  */
1557 
1558 int
1559 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1560 {
1561   unsigned int max = (mode == QImode ? 0xff :
1562                       mode == HImode ? 0xffff :
1563                       mode == SImode ? 0xffffffff : 0);
1564   if (max && op && GET_CODE (x) == CONST_INT)
1565     {
1566       if (unsigned_condition (op) != op)
1567 	max >>= 1;
1568 
1569       if (max != (INTVAL (x) & max)
1570 	  && INTVAL (x) != 0xff)
1571 	return 1;
1572     }
1573   return 0;
1574 }
1575 
1576 
1577 /* Returns nonzero if REGNO is the number of a hard
1578    register in which function arguments are sometimes passed.  */
1579 
1580 int
1581 function_arg_regno_p(int r)
1582 {
1583   return (r >= 8 && r <= 25);
1584 }
1585 
1586 /* Initializing the variable cum for the state at the beginning
1587    of the argument list.  */
1588 
1589 void
1590 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1591 		      tree fndecl ATTRIBUTE_UNUSED)
1592 {
1593   cum->nregs = 18;
1594   cum->regno = FIRST_CUM_REG;
1595   if (!libname && fntype)
1596     {
1597       int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1598                     && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1599                         != void_type_node));
1600       if (stdarg)
1601         cum->nregs = 0;
1602     }
1603 }
1604 
1605 /* Returns the number of registers to allocate for a function argument.  */
1606 
1607 static int
1608 avr_num_arg_regs (enum machine_mode mode, tree type)
1609 {
1610   int size;
1611 
1612   if (mode == BLKmode)
1613     size = int_size_in_bytes (type);
1614   else
1615     size = GET_MODE_SIZE (mode);
1616 
1617   /* Align all function arguments to start in even-numbered registers.
1618      Odd-sized arguments leave holes above them.  */
1619 
1620   return (size + 1) & ~1;
1621 }
1622 
1623 /* Controls whether a function argument is passed
1624    in a register, and which register.  */
1625 
1626 rtx
1627 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1628 	      int named ATTRIBUTE_UNUSED)
1629 {
1630   int bytes = avr_num_arg_regs (mode, type);
1631 
1632   if (cum->nregs && bytes <= cum->nregs)
1633     return gen_rtx_REG (mode, cum->regno - bytes);
1634 
1635   return NULL_RTX;
1636 }
1637 
1638 /* Update the summarizer variable CUM to advance past an argument
1639    in the argument list.  */
1640 
1641 void
1642 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1643 		      int named ATTRIBUTE_UNUSED)
1644 {
1645   int bytes = avr_num_arg_regs (mode, type);
1646 
1647   cum->nregs -= bytes;
1648   cum->regno -= bytes;
1649 
1650   if (cum->nregs <= 0)
1651     {
1652       cum->nregs = 0;
1653       cum->regno = FIRST_CUM_REG;
1654     }
1655 }
1656 
1657 /***********************************************************************
1658   Functions for outputting various mov's for a various modes
1659 ************************************************************************/
1660 const char *
1661 output_movqi (rtx insn, rtx operands[], int *l)
1662 {
1663   int dummy;
1664   rtx dest = operands[0];
1665   rtx src = operands[1];
1666   int *real_l = l;
1667 
1668   if (!l)
1669     l = &dummy;
1670 
1671   *l = 1;
1672 
1673   if (register_operand (dest, QImode))
1674     {
1675       if (register_operand (src, QImode)) /* mov r,r */
1676 	{
1677 	  if (test_hard_reg_class (STACK_REG, dest))
1678 	    return AS2 (out,%0,%1);
1679 	  else if (test_hard_reg_class (STACK_REG, src))
1680 	    return AS2 (in,%0,%1);
1681 
1682 	  return AS2 (mov,%0,%1);
1683 	}
1684       else if (CONSTANT_P (src))
1685 	{
1686 	  if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1687 	    return AS2 (ldi,%0,lo8(%1));
1688 
1689 	  if (GET_CODE (src) == CONST_INT)
1690 	    {
1691 	      if (src == const0_rtx) /* mov r,L */
1692 		return AS1 (clr,%0);
1693 	      else if (src == const1_rtx)
1694 		{
1695 		  *l = 2;
1696 		  return (AS1 (clr,%0) CR_TAB
1697 			  AS1 (inc,%0));
1698 		}
1699 	      else if (src == constm1_rtx)
1700 		{
1701 		  /* Immediate constants -1 to any register */
1702 		  *l = 2;
1703 		  return (AS1 (clr,%0) CR_TAB
1704 			  AS1 (dec,%0));
1705 		}
1706 	      else
1707 		{
1708 		  int bit_nr = exact_log2 (INTVAL (src));
1709 
1710 		  if (bit_nr >= 0)
1711 		    {
1712 		      *l = 3;
1713 		      if (!real_l)
1714 			output_asm_insn ((AS1 (clr,%0) CR_TAB
1715 					  "set"), operands);
1716 		      if (!real_l)
1717 			avr_output_bld (operands, bit_nr);
1718 
1719 		      return "";
1720 		    }
1721 		}
1722 	    }
1723 
1724 	  /* Last resort, larger than loading from memory.  */
1725 	  *l = 4;
1726 	  return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1727 		  AS2 (ldi,r31,lo8(%1))     CR_TAB
1728 		  AS2 (mov,%0,r31)          CR_TAB
1729 		  AS2 (mov,r31,__tmp_reg__));
1730 	}
1731       else if (GET_CODE (src) == MEM)
1732 	return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1733     }
1734   else if (GET_CODE (dest) == MEM)
1735     {
1736       const char *templ;
1737 
1738       if (src == const0_rtx)
1739 	operands[1] = zero_reg_rtx;
1740 
1741       templ = out_movqi_mr_r (insn, operands, real_l);
1742 
1743       if (!real_l)
1744 	output_asm_insn (templ, operands);
1745 
1746       operands[1] = src;
1747     }
1748   return "";
1749 }
1750 
1751 
1752 const char *
1753 output_movhi (rtx insn, rtx operands[], int *l)
1754 {
1755   int dummy;
1756   rtx dest = operands[0];
1757   rtx src = operands[1];
1758   int *real_l = l;
1759 
1760   if (!l)
1761     l = &dummy;
1762 
1763   if (register_operand (dest, HImode))
1764     {
1765       if (register_operand (src, HImode)) /* mov r,r */
1766 	{
1767 	  if (test_hard_reg_class (STACK_REG, dest))
1768 	    {
1769 	      if (AVR_HAVE_8BIT_SP)
1770 		return *l = 1, AS2 (out,__SP_L__,%A1);
1771               /* Use simple load of stack pointer if no interrupts are
1772 		 used.  */
1773 	      else if (TARGET_NO_INTERRUPTS)
1774 		return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1775 				AS2 (out,__SP_L__,%A1));
1776 	      *l = 5;
1777 	      return (AS2 (in,__tmp_reg__,__SREG__)  CR_TAB
1778 		      "cli"                          CR_TAB
1779 		      AS2 (out,__SP_H__,%B1)         CR_TAB
1780 		      AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1781 		      AS2 (out,__SP_L__,%A1));
1782 	    }
1783 	  else if (test_hard_reg_class (STACK_REG, src))
1784 	    {
1785 	      *l = 2;
1786 	      return (AS2 (in,%A0,__SP_L__) CR_TAB
1787 		      AS2 (in,%B0,__SP_H__));
1788 	    }
1789 
1790 	  if (AVR_HAVE_MOVW)
1791 	    {
1792 	      *l = 1;
1793 	      return (AS2 (movw,%0,%1));
1794 	    }
1795 	  else
1796 	    {
1797 	      *l = 2;
1798 	      return (AS2 (mov,%A0,%A1) CR_TAB
1799 		      AS2 (mov,%B0,%B1));
1800 	    }
1801 	}
1802       else if (CONSTANT_P (src))
1803 	{
1804 	  if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1805 	    {
1806 	      *l = 2;
1807 	      return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1808 		      AS2 (ldi,%B0,hi8(%1)));
1809 	    }
1810 
1811 	  if (GET_CODE (src) == CONST_INT)
1812 	    {
1813 	      if (src == const0_rtx) /* mov r,L */
1814 		{
1815 		  *l = 2;
1816 		  return (AS1 (clr,%A0) CR_TAB
1817 			  AS1 (clr,%B0));
1818 		}
1819 	      else if (src == const1_rtx)
1820 		{
1821 		  *l = 3;
1822 		  return (AS1 (clr,%A0) CR_TAB
1823 			  AS1 (clr,%B0) CR_TAB
1824 			  AS1 (inc,%A0));
1825 		}
1826 	      else if (src == constm1_rtx)
1827 		{
1828 		  /* Immediate constants -1 to any register */
1829 		  *l = 3;
1830 		  return (AS1 (clr,%0)  CR_TAB
1831 			  AS1 (dec,%A0) CR_TAB
1832 			  AS2 (mov,%B0,%A0));
1833 		}
1834 	      else
1835 		{
1836 		  int bit_nr = exact_log2 (INTVAL (src));
1837 
1838 		  if (bit_nr >= 0)
1839 		    {
1840 		      *l = 4;
1841 		      if (!real_l)
1842 			output_asm_insn ((AS1 (clr,%A0) CR_TAB
1843 					  AS1 (clr,%B0) CR_TAB
1844 					  "set"), operands);
1845 		      if (!real_l)
1846 			avr_output_bld (operands, bit_nr);
1847 
1848 		      return "";
1849 		    }
1850 		}
1851 
1852 	      if ((INTVAL (src) & 0xff) == 0)
1853 		{
1854 		  *l = 5;
1855 		  return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1856 			  AS1 (clr,%A0)             CR_TAB
1857 			  AS2 (ldi,r31,hi8(%1))     CR_TAB
1858 			  AS2 (mov,%B0,r31)         CR_TAB
1859 			  AS2 (mov,r31,__tmp_reg__));
1860 		}
1861 	      else if ((INTVAL (src) & 0xff00) == 0)
1862 		{
1863 		  *l = 5;
1864 		  return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1865 			  AS2 (ldi,r31,lo8(%1))     CR_TAB
1866 			  AS2 (mov,%A0,r31)         CR_TAB
1867 			  AS1 (clr,%B0)             CR_TAB
1868 			  AS2 (mov,r31,__tmp_reg__));
1869 		}
1870 	    }
1871 
1872 	  /* Last resort, equal to loading from memory.  */
1873 	  *l = 6;
1874 	  return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1875 		  AS2 (ldi,r31,lo8(%1))     CR_TAB
1876 		  AS2 (mov,%A0,r31)         CR_TAB
1877 		  AS2 (ldi,r31,hi8(%1))     CR_TAB
1878 		  AS2 (mov,%B0,r31)         CR_TAB
1879 		  AS2 (mov,r31,__tmp_reg__));
1880 	}
1881       else if (GET_CODE (src) == MEM)
1882 	return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1883     }
1884   else if (GET_CODE (dest) == MEM)
1885     {
1886       const char *templ;
1887 
1888       if (src == const0_rtx)
1889 	operands[1] = zero_reg_rtx;
1890 
1891       templ = out_movhi_mr_r (insn, operands, real_l);
1892 
1893       if (!real_l)
1894 	output_asm_insn (templ, operands);
1895 
1896       operands[1] = src;
1897       return "";
1898     }
1899   fatal_insn ("invalid insn:", insn);
1900   return "";
1901 }
1902 
1903 const char *
1904 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1905 {
1906   rtx dest = op[0];
1907   rtx src = op[1];
1908   rtx x = XEXP (src, 0);
1909   int dummy;
1910 
1911   if (!l)
1912     l = &dummy;
1913 
1914   if (CONSTANT_ADDRESS_P (x))
1915     {
1916       if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1917 	{
1918 	  *l = 1;
1919 	  return AS2 (in,%0,__SREG__);
1920 	}
1921       if (optimize > 0 && io_address_operand (x, QImode))
1922 	{
1923 	  *l = 1;
1924 	  return AS2 (in,%0,%m1-0x20);
1925 	}
1926       *l = 2;
1927       return AS2 (lds,%0,%m1);
1928     }
1929   /* memory access by reg+disp */
1930   else if (GET_CODE (x) == PLUS
1931       && REG_P (XEXP (x,0))
1932       && GET_CODE (XEXP (x,1)) == CONST_INT)
1933     {
1934       if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1935 	{
1936 	  int disp = INTVAL (XEXP (x,1));
1937 	  if (REGNO (XEXP (x,0)) != REG_Y)
1938 	    fatal_insn ("incorrect insn:",insn);
1939 
1940 	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1941 	    return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1942 			    AS2 (ldd,%0,Y+63)     CR_TAB
1943 			    AS2 (sbiw,r28,%o1-63));
1944 
1945 	  return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1946 			  AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1947 			  AS2 (ld,%0,Y)            CR_TAB
1948 			  AS2 (subi,r28,lo8(%o1))  CR_TAB
1949 			  AS2 (sbci,r29,hi8(%o1)));
1950 	}
1951       else if (REGNO (XEXP (x,0)) == REG_X)
1952 	{
1953 	  /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1954 	     it but I have this situation with extremal optimizing options.  */
1955 	  if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1956 	      || reg_unused_after (insn, XEXP (x,0)))
1957 	    return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1958 			    AS2 (ld,%0,X));
1959 
1960 	  return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1961 			  AS2 (ld,%0,X)      CR_TAB
1962 			  AS2 (sbiw,r26,%o1));
1963 	}
1964       *l = 1;
1965       return AS2 (ldd,%0,%1);
1966     }
1967   *l = 1;
1968   return AS2 (ld,%0,%1);
1969 }
1970 
1971 const char *
1972 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1973 {
1974   rtx dest = op[0];
1975   rtx src = op[1];
1976   rtx base = XEXP (src, 0);
1977   int reg_dest = true_regnum (dest);
1978   int reg_base = true_regnum (base);
1979   /* "volatile" forces reading low byte first, even if less efficient,
1980      for correct operation with 16-bit I/O registers.  */
1981   int mem_volatile_p = MEM_VOLATILE_P (src);
1982   int tmp;
1983 
1984   if (!l)
1985     l = &tmp;
1986 
1987   if (reg_base > 0)
1988     {
1989       if (reg_dest == reg_base)         /* R = (R) */
1990 	{
1991 	  *l = 3;
1992 	  return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1993 		  AS2 (ld,%B0,%1) CR_TAB
1994 		  AS2 (mov,%A0,__tmp_reg__));
1995 	}
1996       else if (reg_base == REG_X)        /* (R26) */
1997         {
1998           if (reg_unused_after (insn, base))
1999 	    {
2000 	      *l = 2;
2001 	      return (AS2 (ld,%A0,X+) CR_TAB
2002 		      AS2 (ld,%B0,X));
2003 	    }
2004 	  *l  = 3;
2005 	  return (AS2 (ld,%A0,X+) CR_TAB
2006 		  AS2 (ld,%B0,X) CR_TAB
2007 		  AS2 (sbiw,r26,1));
2008         }
2009       else                      /* (R)  */
2010 	{
2011 	  *l = 2;
2012 	  return (AS2 (ld,%A0,%1)    CR_TAB
2013 		  AS2 (ldd,%B0,%1+1));
2014 	}
2015     }
2016   else if (GET_CODE (base) == PLUS) /* (R + i) */
2017     {
2018       int disp = INTVAL (XEXP (base, 1));
2019       int reg_base = true_regnum (XEXP (base, 0));
2020 
2021       if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2022 	{
2023 	  if (REGNO (XEXP (base, 0)) != REG_Y)
2024 	    fatal_insn ("incorrect insn:",insn);
2025 
2026 	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2027 	    return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2028 			    AS2 (ldd,%A0,Y+62)    CR_TAB
2029 			    AS2 (ldd,%B0,Y+63)    CR_TAB
2030 			    AS2 (sbiw,r28,%o1-62));
2031 
2032 	  return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2033 			  AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2034 			  AS2 (ld,%A0,Y)           CR_TAB
2035 			  AS2 (ldd,%B0,Y+1)        CR_TAB
2036 			  AS2 (subi,r28,lo8(%o1))  CR_TAB
2037 			  AS2 (sbci,r29,hi8(%o1)));
2038 	}
2039       if (reg_base == REG_X)
2040 	{
2041 	  /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2042 	     it but I have this situation with extremal
2043 	     optimization options.  */
2044 
2045 	  *l = 4;
2046 	  if (reg_base == reg_dest)
2047 	    return (AS2 (adiw,r26,%o1)      CR_TAB
2048 		    AS2 (ld,__tmp_reg__,X+) CR_TAB
2049 		    AS2 (ld,%B0,X)          CR_TAB
2050 		    AS2 (mov,%A0,__tmp_reg__));
2051 
2052 	  return (AS2 (adiw,r26,%o1) CR_TAB
2053 		  AS2 (ld,%A0,X+)    CR_TAB
2054 		  AS2 (ld,%B0,X)     CR_TAB
2055 		  AS2 (sbiw,r26,%o1+1));
2056 	}
2057 
2058       if (reg_base == reg_dest)
2059 	{
2060 	  *l = 3;
2061 	  return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2062 		  AS2 (ldd,%B0,%B1)         CR_TAB
2063 		  AS2 (mov,%A0,__tmp_reg__));
2064 	}
2065 
2066       *l = 2;
2067       return (AS2 (ldd,%A0,%A1) CR_TAB
2068 	      AS2 (ldd,%B0,%B1));
2069     }
2070   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2071     {
2072       if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2073 	fatal_insn ("incorrect insn:", insn);
2074 
2075       if (mem_volatile_p)
2076         {
2077           if (REGNO (XEXP (base, 0)) == REG_X)
2078             {
2079               *l = 4;
2080               return (AS2 (sbiw,r26,2)  CR_TAB
2081                       AS2 (ld,%A0,X+)   CR_TAB
2082                       AS2 (ld,%B0,X)    CR_TAB
2083                       AS2 (sbiw,r26,1));
2084             }
2085           else
2086             {
2087               *l = 3;
2088               return (AS2 (sbiw,%r1,2)   CR_TAB
2089                       AS2 (ld,%A0,%p1)  CR_TAB
2090                       AS2 (ldd,%B0,%p1+1));
2091             }
2092         }
2093 
2094       *l = 2;
2095       return (AS2 (ld,%B0,%1) CR_TAB
2096 	      AS2 (ld,%A0,%1));
2097     }
2098   else if (GET_CODE (base) == POST_INC) /* (R++) */
2099     {
2100       if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2101 	fatal_insn ("incorrect insn:", insn);
2102 
2103       *l = 2;
2104       return (AS2 (ld,%A0,%1)  CR_TAB
2105 	      AS2 (ld,%B0,%1));
2106     }
2107   else if (CONSTANT_ADDRESS_P (base))
2108     {
2109       if (optimize > 0 && io_address_operand (base, HImode))
2110 	{
2111 	  *l = 2;
2112 	  return (AS2 (in,%A0,%m1-0x20) CR_TAB
2113 		  AS2 (in,%B0,%m1+1-0x20));
2114 	}
2115       *l = 4;
2116       return (AS2 (lds,%A0,%m1) CR_TAB
2117 	      AS2 (lds,%B0,%m1+1));
2118     }
2119 
2120   fatal_insn ("unknown move insn:",insn);
2121   return "";
2122 }
2123 
2124 const char *
2125 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2126 {
2127   rtx dest = op[0];
2128   rtx src = op[1];
2129   rtx base = XEXP (src, 0);
2130   int reg_dest = true_regnum (dest);
2131   int reg_base = true_regnum (base);
2132   int tmp;
2133 
2134   if (!l)
2135     l = &tmp;
2136 
2137   if (reg_base > 0)
2138     {
2139       if (reg_base == REG_X)        /* (R26) */
2140         {
2141           if (reg_dest == REG_X)
2142 	    /* "ld r26,-X" is undefined */
2143 	    return *l=7, (AS2 (adiw,r26,3)        CR_TAB
2144 			  AS2 (ld,r29,X)          CR_TAB
2145 			  AS2 (ld,r28,-X)         CR_TAB
2146 			  AS2 (ld,__tmp_reg__,-X) CR_TAB
2147 			  AS2 (sbiw,r26,1)        CR_TAB
2148 			  AS2 (ld,r26,X)          CR_TAB
2149 			  AS2 (mov,r27,__tmp_reg__));
2150           else if (reg_dest == REG_X - 2)
2151             return *l=5, (AS2 (ld,%A0,X+)  CR_TAB
2152                           AS2 (ld,%B0,X+) CR_TAB
2153                           AS2 (ld,__tmp_reg__,X+)  CR_TAB
2154                           AS2 (ld,%D0,X)  CR_TAB
2155                           AS2 (mov,%C0,__tmp_reg__));
2156           else if (reg_unused_after (insn, base))
2157             return  *l=4, (AS2 (ld,%A0,X+)  CR_TAB
2158                            AS2 (ld,%B0,X+) CR_TAB
2159                            AS2 (ld,%C0,X+) CR_TAB
2160                            AS2 (ld,%D0,X));
2161           else
2162             return  *l=5, (AS2 (ld,%A0,X+)  CR_TAB
2163                            AS2 (ld,%B0,X+) CR_TAB
2164                            AS2 (ld,%C0,X+) CR_TAB
2165                            AS2 (ld,%D0,X)  CR_TAB
2166                            AS2 (sbiw,r26,3));
2167         }
2168       else
2169         {
2170           if (reg_dest == reg_base)
2171             return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2172                           AS2 (ldd,%C0,%1+2) CR_TAB
2173                           AS2 (ldd,__tmp_reg__,%1+1)  CR_TAB
2174                           AS2 (ld,%A0,%1)  CR_TAB
2175                           AS2 (mov,%B0,__tmp_reg__));
2176           else if (reg_base == reg_dest + 2)
2177             return *l=5, (AS2 (ld ,%A0,%1)    CR_TAB
2178                           AS2 (ldd,%B0,%1+1) CR_TAB
2179                           AS2 (ldd,__tmp_reg__,%1+2)  CR_TAB
2180                           AS2 (ldd,%D0,%1+3) CR_TAB
2181                           AS2 (mov,%C0,__tmp_reg__));
2182           else
2183             return *l=4, (AS2 (ld ,%A0,%1)   CR_TAB
2184                           AS2 (ldd,%B0,%1+1) CR_TAB
2185                           AS2 (ldd,%C0,%1+2) CR_TAB
2186                           AS2 (ldd,%D0,%1+3));
2187         }
2188     }
2189   else if (GET_CODE (base) == PLUS) /* (R + i) */
2190     {
2191       int disp = INTVAL (XEXP (base, 1));
2192 
2193       if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2194 	{
2195 	  if (REGNO (XEXP (base, 0)) != REG_Y)
2196 	    fatal_insn ("incorrect insn:",insn);
2197 
2198 	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2199 	    return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2200 			    AS2 (ldd,%A0,Y+60)    CR_TAB
2201 			    AS2 (ldd,%B0,Y+61)    CR_TAB
2202 			    AS2 (ldd,%C0,Y+62)    CR_TAB
2203 			    AS2 (ldd,%D0,Y+63)    CR_TAB
2204 			    AS2 (sbiw,r28,%o1-60));
2205 
2206 	  return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2207 			  AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2208 			  AS2 (ld,%A0,Y)           CR_TAB
2209 			  AS2 (ldd,%B0,Y+1)        CR_TAB
2210 			  AS2 (ldd,%C0,Y+2)        CR_TAB
2211 			  AS2 (ldd,%D0,Y+3)        CR_TAB
2212 			  AS2 (subi,r28,lo8(%o1))  CR_TAB
2213 			  AS2 (sbci,r29,hi8(%o1)));
2214 	}
2215 
2216       reg_base = true_regnum (XEXP (base, 0));
2217       if (reg_base == REG_X)
2218 	{
2219 	  /* R = (X + d) */
2220 	  if (reg_dest == REG_X)
2221 	    {
2222 	      *l = 7;
2223 	      /* "ld r26,-X" is undefined */
2224 	      return (AS2 (adiw,r26,%o1+3)    CR_TAB
2225 		      AS2 (ld,r29,X)          CR_TAB
2226 		      AS2 (ld,r28,-X)         CR_TAB
2227 		      AS2 (ld,__tmp_reg__,-X) CR_TAB
2228 		      AS2 (sbiw,r26,1)        CR_TAB
2229 		      AS2 (ld,r26,X)          CR_TAB
2230 		      AS2 (mov,r27,__tmp_reg__));
2231 	    }
2232 	  *l = 6;
2233 	  if (reg_dest == REG_X - 2)
2234 	    return (AS2 (adiw,r26,%o1)      CR_TAB
2235 		    AS2 (ld,r24,X+)         CR_TAB
2236 		    AS2 (ld,r25,X+)         CR_TAB
2237 		    AS2 (ld,__tmp_reg__,X+) CR_TAB
2238 		    AS2 (ld,r27,X)          CR_TAB
2239 		    AS2 (mov,r26,__tmp_reg__));
2240 
2241 	  return (AS2 (adiw,r26,%o1) CR_TAB
2242 		  AS2 (ld,%A0,X+)    CR_TAB
2243 		  AS2 (ld,%B0,X+)    CR_TAB
2244 		  AS2 (ld,%C0,X+)    CR_TAB
2245 		  AS2 (ld,%D0,X)     CR_TAB
2246 		  AS2 (sbiw,r26,%o1+3));
2247 	}
2248       if (reg_dest == reg_base)
2249         return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2250                       AS2 (ldd,%C0,%C1) CR_TAB
2251                       AS2 (ldd,__tmp_reg__,%B1)  CR_TAB
2252                       AS2 (ldd,%A0,%A1) CR_TAB
2253                       AS2 (mov,%B0,__tmp_reg__));
2254       else if (reg_dest == reg_base - 2)
2255         return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2256                       AS2 (ldd,%B0,%B1) CR_TAB
2257                       AS2 (ldd,__tmp_reg__,%C1)  CR_TAB
2258                       AS2 (ldd,%D0,%D1) CR_TAB
2259                       AS2 (mov,%C0,__tmp_reg__));
2260       return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2261                     AS2 (ldd,%B0,%B1) CR_TAB
2262                     AS2 (ldd,%C0,%C1) CR_TAB
2263                     AS2 (ldd,%D0,%D1));
2264     }
2265   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2266     return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2267 		  AS2 (ld,%C0,%1) CR_TAB
2268 		  AS2 (ld,%B0,%1) CR_TAB
2269 		  AS2 (ld,%A0,%1));
2270   else if (GET_CODE (base) == POST_INC) /* (R++) */
2271     return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2272 		  AS2 (ld,%B0,%1) CR_TAB
2273 		  AS2 (ld,%C0,%1) CR_TAB
2274 		  AS2 (ld,%D0,%1));
2275   else if (CONSTANT_ADDRESS_P (base))
2276       return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2277 		    AS2 (lds,%B0,%m1+1) CR_TAB
2278 		    AS2 (lds,%C0,%m1+2) CR_TAB
2279 		    AS2 (lds,%D0,%m1+3));
2280 
2281   fatal_insn ("unknown move insn:",insn);
2282   return "";
2283 }
2284 
2285 const char *
2286 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2287 {
2288   rtx dest = op[0];
2289   rtx src = op[1];
2290   rtx base = XEXP (dest, 0);
2291   int reg_base = true_regnum (base);
2292   int reg_src = true_regnum (src);
2293   int tmp;
2294 
2295   if (!l)
2296     l = &tmp;
2297 
2298   if (CONSTANT_ADDRESS_P (base))
2299     return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2300 		 AS2 (sts,%m0+1,%B1) CR_TAB
2301 		 AS2 (sts,%m0+2,%C1) CR_TAB
2302 		 AS2 (sts,%m0+3,%D1));
2303   if (reg_base > 0)                 /* (r) */
2304     {
2305       if (reg_base == REG_X)                /* (R26) */
2306         {
2307           if (reg_src == REG_X)
2308             {
2309 	      /* "st X+,r26" is undefined */
2310               if (reg_unused_after (insn, base))
2311 		return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2312 			      AS2 (st,X,r26)            CR_TAB
2313 			      AS2 (adiw,r26,1)          CR_TAB
2314 			      AS2 (st,X+,__tmp_reg__)   CR_TAB
2315 			      AS2 (st,X+,r28)           CR_TAB
2316 			      AS2 (st,X,r29));
2317               else
2318                 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2319 			      AS2 (st,X,r26)            CR_TAB
2320 			      AS2 (adiw,r26,1)          CR_TAB
2321 			      AS2 (st,X+,__tmp_reg__)   CR_TAB
2322 			      AS2 (st,X+,r28)           CR_TAB
2323 			      AS2 (st,X,r29)            CR_TAB
2324 			      AS2 (sbiw,r26,3));
2325             }
2326           else if (reg_base == reg_src + 2)
2327             {
2328               if (reg_unused_after (insn, base))
2329                 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2330                               AS2 (mov,__tmp_reg__,%D1) CR_TAB
2331                               AS2 (st,%0+,%A1) CR_TAB
2332                               AS2 (st,%0+,%B1) CR_TAB
2333                               AS2 (st,%0+,__zero_reg__)  CR_TAB
2334                               AS2 (st,%0,__tmp_reg__)   CR_TAB
2335                               AS1 (clr,__zero_reg__));
2336               else
2337                 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2338                               AS2 (mov,__tmp_reg__,%D1) CR_TAB
2339                               AS2 (st,%0+,%A1) CR_TAB
2340                               AS2 (st,%0+,%B1) CR_TAB
2341                               AS2 (st,%0+,__zero_reg__)  CR_TAB
2342                               AS2 (st,%0,__tmp_reg__)   CR_TAB
2343                               AS1 (clr,__zero_reg__)     CR_TAB
2344                               AS2 (sbiw,r26,3));
2345             }
2346           return *l=5, (AS2 (st,%0+,%A1)  CR_TAB
2347                         AS2 (st,%0+,%B1) CR_TAB
2348                         AS2 (st,%0+,%C1) CR_TAB
2349                         AS2 (st,%0,%D1)  CR_TAB
2350                         AS2 (sbiw,r26,3));
2351         }
2352       else
2353         return *l=4, (AS2 (st,%0,%A1)    CR_TAB
2354 		      AS2 (std,%0+1,%B1) CR_TAB
2355 		      AS2 (std,%0+2,%C1) CR_TAB
2356 		      AS2 (std,%0+3,%D1));
2357     }
2358   else if (GET_CODE (base) == PLUS) /* (R + i) */
2359     {
2360       int disp = INTVAL (XEXP (base, 1));
2361       reg_base = REGNO (XEXP (base, 0));
2362       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2363 	{
2364 	  if (reg_base != REG_Y)
2365 	    fatal_insn ("incorrect insn:",insn);
2366 
2367 	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2368 	    return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2369 			    AS2 (std,Y+60,%A1)    CR_TAB
2370 			    AS2 (std,Y+61,%B1)    CR_TAB
2371 			    AS2 (std,Y+62,%C1)    CR_TAB
2372 			    AS2 (std,Y+63,%D1)    CR_TAB
2373 			    AS2 (sbiw,r28,%o0-60));
2374 
2375 	  return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2376 			  AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2377 			  AS2 (st,Y,%A1)           CR_TAB
2378 			  AS2 (std,Y+1,%B1)        CR_TAB
2379 			  AS2 (std,Y+2,%C1)        CR_TAB
2380 			  AS2 (std,Y+3,%D1)        CR_TAB
2381 			  AS2 (subi,r28,lo8(%o0))  CR_TAB
2382 			  AS2 (sbci,r29,hi8(%o0)));
2383 	}
2384       if (reg_base == REG_X)
2385 	{
2386 	  /* (X + d) = R */
2387 	  if (reg_src == REG_X)
2388 	    {
2389 	      *l = 9;
2390 	      return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2391 		      AS2 (mov,__zero_reg__,r27) CR_TAB
2392 		      AS2 (adiw,r26,%o0)         CR_TAB
2393 		      AS2 (st,X+,__tmp_reg__)    CR_TAB
2394 		      AS2 (st,X+,__zero_reg__)   CR_TAB
2395 		      AS2 (st,X+,r28)            CR_TAB
2396 		      AS2 (st,X,r29)             CR_TAB
2397 		      AS1 (clr,__zero_reg__)     CR_TAB
2398 		      AS2 (sbiw,r26,%o0+3));
2399 	    }
2400 	  else if (reg_src == REG_X - 2)
2401 	    {
2402 	      *l = 9;
2403 	      return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2404 		      AS2 (mov,__zero_reg__,r27) CR_TAB
2405 		      AS2 (adiw,r26,%o0)         CR_TAB
2406 		      AS2 (st,X+,r24)            CR_TAB
2407 		      AS2 (st,X+,r25)            CR_TAB
2408 		      AS2 (st,X+,__tmp_reg__)    CR_TAB
2409 		      AS2 (st,X,__zero_reg__)    CR_TAB
2410 		      AS1 (clr,__zero_reg__)     CR_TAB
2411 		      AS2 (sbiw,r26,%o0+3));
2412 	    }
2413 	  *l = 6;
2414 	  return (AS2 (adiw,r26,%o0) CR_TAB
2415 		  AS2 (st,X+,%A1)    CR_TAB
2416 		  AS2 (st,X+,%B1)    CR_TAB
2417 		  AS2 (st,X+,%C1)    CR_TAB
2418 		  AS2 (st,X,%D1)     CR_TAB
2419 		  AS2 (sbiw,r26,%o0+3));
2420 	}
2421       return *l=4, (AS2 (std,%A0,%A1)    CR_TAB
2422 		    AS2 (std,%B0,%B1) CR_TAB
2423 		    AS2 (std,%C0,%C1) CR_TAB
2424 		    AS2 (std,%D0,%D1));
2425     }
2426   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2427     return *l=4, (AS2 (st,%0,%D1) CR_TAB
2428 		  AS2 (st,%0,%C1) CR_TAB
2429 		  AS2 (st,%0,%B1) CR_TAB
2430 		  AS2 (st,%0,%A1));
2431   else if (GET_CODE (base) == POST_INC) /* (R++) */
2432     return *l=4, (AS2 (st,%0,%A1)  CR_TAB
2433 		  AS2 (st,%0,%B1) CR_TAB
2434 		  AS2 (st,%0,%C1) CR_TAB
2435 		  AS2 (st,%0,%D1));
2436   fatal_insn ("unknown move insn:",insn);
2437   return "";
2438 }
2439 
2440 const char *
2441 output_movsisf(rtx insn, rtx operands[], int *l)
2442 {
2443   int dummy;
2444   rtx dest = operands[0];
2445   rtx src = operands[1];
2446   int *real_l = l;
2447 
2448   if (!l)
2449     l = &dummy;
2450 
2451   if (register_operand (dest, VOIDmode))
2452     {
2453       if (register_operand (src, VOIDmode)) /* mov r,r */
2454 	{
2455 	  if (true_regnum (dest) > true_regnum (src))
2456 	    {
2457 	      if (AVR_HAVE_MOVW)
2458 		{
2459 		  *l = 2;
2460 		  return (AS2 (movw,%C0,%C1) CR_TAB
2461 			  AS2 (movw,%A0,%A1));
2462 		}
2463 	      *l = 4;
2464 	      return (AS2 (mov,%D0,%D1) CR_TAB
2465 		      AS2 (mov,%C0,%C1) CR_TAB
2466 		      AS2 (mov,%B0,%B1) CR_TAB
2467 		      AS2 (mov,%A0,%A1));
2468 	    }
2469 	  else
2470 	    {
2471 	      if (AVR_HAVE_MOVW)
2472 		{
2473 		  *l = 2;
2474 		  return (AS2 (movw,%A0,%A1) CR_TAB
2475 			  AS2 (movw,%C0,%C1));
2476 		}
2477 	      *l = 4;
2478 	      return (AS2 (mov,%A0,%A1) CR_TAB
2479 		      AS2 (mov,%B0,%B1) CR_TAB
2480 		      AS2 (mov,%C0,%C1) CR_TAB
2481 		      AS2 (mov,%D0,%D1));
2482 	    }
2483 	}
2484       else if (CONSTANT_P (src))
2485 	{
2486 	  if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2487 	    {
2488 	      *l = 4;
2489 	      return (AS2 (ldi,%A0,lo8(%1))  CR_TAB
2490 		      AS2 (ldi,%B0,hi8(%1))  CR_TAB
2491 		      AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2492 		      AS2 (ldi,%D0,hhi8(%1)));
2493 	    }
2494 
2495 	  if (GET_CODE (src) == CONST_INT)
2496 	    {
2497 	      const char *const clr_op0 =
2498 		AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2499 				AS1 (clr,%B0) CR_TAB
2500 				AS2 (movw,%C0,%A0))
2501 			     : (AS1 (clr,%A0) CR_TAB
2502 				AS1 (clr,%B0) CR_TAB
2503 				AS1 (clr,%C0) CR_TAB
2504 				AS1 (clr,%D0));
2505 
2506 	      if (src == const0_rtx) /* mov r,L */
2507 		{
2508 		  *l = AVR_HAVE_MOVW ? 3 : 4;
2509 		  return clr_op0;
2510 		}
2511 	      else if (src == const1_rtx)
2512 		{
2513 		  if (!real_l)
2514 		    output_asm_insn (clr_op0, operands);
2515 		  *l = AVR_HAVE_MOVW ? 4 : 5;
2516 		  return AS1 (inc,%A0);
2517 		}
2518 	      else if (src == constm1_rtx)
2519 		{
2520 		  /* Immediate constants -1 to any register */
2521 		  if (AVR_HAVE_MOVW)
2522 		    {
2523 		      *l = 4;
2524 		      return (AS1 (clr,%A0)     CR_TAB
2525 			      AS1 (dec,%A0)     CR_TAB
2526 			      AS2 (mov,%B0,%A0) CR_TAB
2527 			      AS2 (movw,%C0,%A0));
2528 		    }
2529 		  *l = 5;
2530 		  return (AS1 (clr,%A0)     CR_TAB
2531 			  AS1 (dec,%A0)     CR_TAB
2532 			  AS2 (mov,%B0,%A0) CR_TAB
2533 			  AS2 (mov,%C0,%A0) CR_TAB
2534 			  AS2 (mov,%D0,%A0));
2535 		}
2536 	      else
2537 		{
2538 		  int bit_nr = exact_log2 (INTVAL (src));
2539 
2540 		  if (bit_nr >= 0)
2541 		    {
2542 		      *l = AVR_HAVE_MOVW ? 5 : 6;
2543 		      if (!real_l)
2544 			{
2545 			  output_asm_insn (clr_op0, operands);
2546 			  output_asm_insn ("set", operands);
2547 			}
2548 		      if (!real_l)
2549 			avr_output_bld (operands, bit_nr);
2550 
2551 		      return "";
2552 		    }
2553 		}
2554 	    }
2555 
2556 	  /* Last resort, better than loading from memory.  */
2557 	  *l = 10;
2558 	  return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2559 		  AS2 (ldi,r31,lo8(%1))     CR_TAB
2560 		  AS2 (mov,%A0,r31)         CR_TAB
2561 		  AS2 (ldi,r31,hi8(%1))     CR_TAB
2562 		  AS2 (mov,%B0,r31)         CR_TAB
2563 		  AS2 (ldi,r31,hlo8(%1))    CR_TAB
2564 		  AS2 (mov,%C0,r31)         CR_TAB
2565 		  AS2 (ldi,r31,hhi8(%1))    CR_TAB
2566 		  AS2 (mov,%D0,r31)         CR_TAB
2567 		  AS2 (mov,r31,__tmp_reg__));
2568 	}
2569       else if (GET_CODE (src) == MEM)
2570 	return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2571     }
2572   else if (GET_CODE (dest) == MEM)
2573     {
2574       const char *templ;
2575 
2576       if (src == const0_rtx)
2577 	  operands[1] = zero_reg_rtx;
2578 
2579       templ = out_movsi_mr_r (insn, operands, real_l);
2580 
2581       if (!real_l)
2582 	output_asm_insn (templ, operands);
2583 
2584       operands[1] = src;
2585       return "";
2586     }
2587   fatal_insn ("invalid insn:", insn);
2588   return "";
2589 }
2590 
2591 const char *
2592 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2593 {
2594   rtx dest = op[0];
2595   rtx src = op[1];
2596   rtx x = XEXP (dest, 0);
2597   int dummy;
2598 
2599   if (!l)
2600     l = &dummy;
2601 
2602   if (CONSTANT_ADDRESS_P (x))
2603     {
2604       if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2605 	{
2606 	  *l = 1;
2607 	  return AS2 (out,__SREG__,%1);
2608 	}
2609       if (optimize > 0 && io_address_operand (x, QImode))
2610 	{
2611 	  *l = 1;
2612 	  return AS2 (out,%m0-0x20,%1);
2613 	}
2614       *l = 2;
2615       return AS2 (sts,%m0,%1);
2616     }
2617   /* memory access by reg+disp */
2618   else if (GET_CODE (x) == PLUS
2619       && REG_P (XEXP (x,0))
2620       && GET_CODE (XEXP (x,1)) == CONST_INT)
2621     {
2622       if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2623 	{
2624 	  int disp = INTVAL (XEXP (x,1));
2625 	  if (REGNO (XEXP (x,0)) != REG_Y)
2626 	    fatal_insn ("incorrect insn:",insn);
2627 
2628 	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2629 	    return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2630 			    AS2 (std,Y+63,%1)     CR_TAB
2631 			    AS2 (sbiw,r28,%o0-63));
2632 
2633 	  return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2634 			  AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2635 			  AS2 (st,Y,%1)            CR_TAB
2636 			  AS2 (subi,r28,lo8(%o0))  CR_TAB
2637 			  AS2 (sbci,r29,hi8(%o0)));
2638 	}
2639       else if (REGNO (XEXP (x,0)) == REG_X)
2640 	{
2641 	  if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2642 	    {
2643 	      if (reg_unused_after (insn, XEXP (x,0)))
2644 		return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2645 				AS2 (adiw,r26,%o0)       CR_TAB
2646 				AS2 (st,X,__tmp_reg__));
2647 
2648 	      return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2649 			      AS2 (adiw,r26,%o0)       CR_TAB
2650 			      AS2 (st,X,__tmp_reg__)   CR_TAB
2651 			      AS2 (sbiw,r26,%o0));
2652 	    }
2653 	  else
2654 	    {
2655 	      if (reg_unused_after (insn, XEXP (x,0)))
2656 		return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2657 				AS2 (st,X,%1));
2658 
2659 	      return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2660 			      AS2 (st,X,%1)      CR_TAB
2661 			      AS2 (sbiw,r26,%o0));
2662 	    }
2663 	}
2664       *l = 1;
2665       return AS2 (std,%0,%1);
2666     }
2667   *l = 1;
2668   return AS2 (st,%0,%1);
2669 }
2670 
2671 const char *
2672 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2673 {
2674   rtx dest = op[0];
2675   rtx src = op[1];
2676   rtx base = XEXP (dest, 0);
2677   int reg_base = true_regnum (base);
2678   int reg_src = true_regnum (src);
2679   /* "volatile" forces writing high byte first, even if less efficient,
2680      for correct operation with 16-bit I/O registers.  */
2681   int mem_volatile_p = MEM_VOLATILE_P (dest);
2682   int tmp;
2683 
2684   if (!l)
2685     l = &tmp;
2686   if (CONSTANT_ADDRESS_P (base))
2687     {
2688       if (optimize > 0 && io_address_operand (base, HImode))
2689 	{
2690 	  *l = 2;
2691 	  return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2692 		  AS2 (out,%m0-0x20,%A1));
2693 	}
2694       return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2695 		      AS2 (sts,%m0,%A1));
2696     }
2697   if (reg_base > 0)
2698     {
2699       if (reg_base == REG_X)
2700         {
2701           if (reg_src == REG_X)
2702             {
2703               /* "st X+,r26" and "st -X,r26" are undefined.  */
2704               if (!mem_volatile_p && reg_unused_after (insn, src))
2705 		return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2706 			      AS2 (st,X,r26)            CR_TAB
2707 			      AS2 (adiw,r26,1)          CR_TAB
2708 			      AS2 (st,X,__tmp_reg__));
2709               else
2710 		return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2711 			      AS2 (adiw,r26,1)          CR_TAB
2712 			      AS2 (st,X,__tmp_reg__)    CR_TAB
2713                               AS2 (sbiw,r26,1)          CR_TAB
2714                               AS2 (st,X,r26));
2715             }
2716           else
2717             {
2718               if (!mem_volatile_p && reg_unused_after (insn, base))
2719                 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2720                               AS2 (st,X,%B1));
2721               else
2722                 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2723                               AS2 (st,X,%B1)   CR_TAB
2724                               AS2 (st,-X,%A1));
2725             }
2726         }
2727       else
2728         return  *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2729                        AS2 (st,%0,%A1));
2730     }
2731   else if (GET_CODE (base) == PLUS)
2732     {
2733       int disp = INTVAL (XEXP (base, 1));
2734       reg_base = REGNO (XEXP (base, 0));
2735       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2736 	{
2737 	  if (reg_base != REG_Y)
2738 	    fatal_insn ("incorrect insn:",insn);
2739 
2740 	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2741 	    return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2742 			    AS2 (std,Y+63,%B1)    CR_TAB
2743 			    AS2 (std,Y+62,%A1)    CR_TAB
2744 			    AS2 (sbiw,r28,%o0-62));
2745 
2746 	  return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2747 			  AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2748 			  AS2 (std,Y+1,%B1)        CR_TAB
2749 			  AS2 (st,Y,%A1)           CR_TAB
2750 			  AS2 (subi,r28,lo8(%o0))  CR_TAB
2751 			  AS2 (sbci,r29,hi8(%o0)));
2752 	}
2753       if (reg_base == REG_X)
2754 	{
2755 	  /* (X + d) = R */
2756 	  if (reg_src == REG_X)
2757             {
2758 	      *l = 7;
2759 	      return (AS2 (mov,__tmp_reg__,r26)  CR_TAB
2760 		      AS2 (mov,__zero_reg__,r27) CR_TAB
2761                       AS2 (adiw,r26,%o0+1)       CR_TAB
2762 		      AS2 (st,X,__zero_reg__)    CR_TAB
2763 		      AS2 (st,-X,__tmp_reg__)    CR_TAB
2764 		      AS1 (clr,__zero_reg__)     CR_TAB
2765                       AS2 (sbiw,r26,%o0));
2766 	    }
2767 	  *l = 4;
2768           return (AS2 (adiw,r26,%o0+1) CR_TAB
2769                   AS2 (st,X,%B1)       CR_TAB
2770                   AS2 (st,-X,%A1)      CR_TAB
2771                   AS2 (sbiw,r26,%o0));
2772 	}
2773       return *l=2, (AS2 (std,%B0,%B1)    CR_TAB
2774                     AS2 (std,%A0,%A1));
2775     }
2776   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2777     return *l=2, (AS2 (st,%0,%B1) CR_TAB
2778 		  AS2 (st,%0,%A1));
2779   else if (GET_CODE (base) == POST_INC) /* (R++) */
2780     {
2781       if (mem_volatile_p)
2782         {
2783           if (REGNO (XEXP (base, 0)) == REG_X)
2784             {
2785               *l = 4;
2786               return (AS2 (adiw,r26,1)  CR_TAB
2787                       AS2 (st,X,%B1)    CR_TAB
2788                       AS2 (st,-X,%A1)   CR_TAB
2789                       AS2 (adiw,r26,2));
2790             }
2791           else
2792             {
2793               *l = 3;
2794               return (AS2 (std,%p0+1,%B1) CR_TAB
2795                       AS2 (st,%p0,%A1)    CR_TAB
2796                       AS2 (adiw,%r0,2));
2797             }
2798         }
2799 
2800       *l = 2;
2801       return (AS2 (st,%0,%A1)  CR_TAB
2802             AS2 (st,%0,%B1));
2803     }
2804   fatal_insn ("unknown move insn:",insn);
2805   return "";
2806 }
2807 
2808 /* Return 1 if frame pointer for current function required.  */
2809 
2810 bool
2811 avr_frame_pointer_required_p (void)
2812 {
2813   return (cfun->calls_alloca
2814 	  || crtl->args.info.nregs == 0
2815   	  || get_frame_size () > 0);
2816 }
2817 
2818 /* Returns the condition of compare insn INSN, or UNKNOWN.  */
2819 
2820 static RTX_CODE
2821 compare_condition (rtx insn)
2822 {
2823   rtx next = next_real_insn (insn);
2824   RTX_CODE cond = UNKNOWN;
2825   if (next && GET_CODE (next) == JUMP_INSN)
2826     {
2827       rtx pat = PATTERN (next);
2828       rtx src = SET_SRC (pat);
2829       rtx t = XEXP (src, 0);
2830       cond = GET_CODE (t);
2831     }
2832   return cond;
2833 }
2834 
2835 /* Returns nonzero if INSN is a tst insn that only tests the sign.  */
2836 
2837 static int
2838 compare_sign_p (rtx insn)
2839 {
2840   RTX_CODE cond = compare_condition (insn);
2841   return (cond == GE || cond == LT);
2842 }
2843 
2844 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2845    that needs to be swapped (GT, GTU, LE, LEU).  */
2846 
2847 int
2848 compare_diff_p (rtx insn)
2849 {
2850   RTX_CODE cond = compare_condition (insn);
2851   return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2852 }
2853 
2854 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition.  */
2855 
2856 int
2857 compare_eq_p (rtx insn)
2858 {
2859   RTX_CODE cond = compare_condition (insn);
2860   return (cond == EQ || cond == NE);
2861 }
2862 
2863 
2864 /* Output test instruction for HImode.  */
2865 
2866 const char *
2867 out_tsthi (rtx insn, rtx op, int *l)
2868 {
2869   if (compare_sign_p (insn))
2870     {
2871       if (l) *l = 1;
2872       return AS1 (tst,%B0);
2873     }
2874   if (reg_unused_after (insn, op)
2875       && compare_eq_p (insn))
2876     {
2877       /* Faster than sbiw if we can clobber the operand.  */
2878       if (l) *l = 1;
2879       return "or %A0,%B0";
2880     }
2881   if (test_hard_reg_class (ADDW_REGS, op))
2882     {
2883       if (l) *l = 1;
2884       return AS2 (sbiw,%0,0);
2885     }
2886   if (l) *l = 2;
2887   return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2888           AS2 (cpc,%B0,__zero_reg__));
2889 }
2890 
2891 
2892 /* Output test instruction for SImode.  */
2893 
2894 const char *
2895 out_tstsi (rtx insn, rtx op, int *l)
2896 {
2897   if (compare_sign_p (insn))
2898     {
2899       if (l) *l = 1;
2900       return AS1 (tst,%D0);
2901     }
2902   if (test_hard_reg_class (ADDW_REGS, op))
2903     {
2904       if (l) *l = 3;
2905       return (AS2 (sbiw,%A0,0) CR_TAB
2906               AS2 (cpc,%C0,__zero_reg__) CR_TAB
2907               AS2 (cpc,%D0,__zero_reg__));
2908     }
2909   if (l) *l = 4;
2910   return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2911           AS2 (cpc,%B0,__zero_reg__) CR_TAB
2912           AS2 (cpc,%C0,__zero_reg__) CR_TAB
2913           AS2 (cpc,%D0,__zero_reg__));
2914 }
2915 
2916 
2917 /* Generate asm equivalent for various shifts.
2918    Shift count is a CONST_INT, MEM or REG.
2919    This only handles cases that are not already
2920    carefully hand-optimized in ?sh??i3_out.  */
2921 
2922 void
2923 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2924 		    int *len, int t_len)
2925 {
2926   rtx op[10];
2927   char str[500];
2928   int second_label = 1;
2929   int saved_in_tmp = 0;
2930   int use_zero_reg = 0;
2931 
2932   op[0] = operands[0];
2933   op[1] = operands[1];
2934   op[2] = operands[2];
2935   op[3] = operands[3];
2936   str[0] = 0;
2937 
2938   if (len)
2939     *len = 1;
2940 
2941   if (GET_CODE (operands[2]) == CONST_INT)
2942     {
2943       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2944       int count = INTVAL (operands[2]);
2945       int max_len = 10;  /* If larger than this, always use a loop.  */
2946 
2947       if (count <= 0)
2948 	{
2949 	  if (len)
2950 	    *len = 0;
2951 	  return;
2952 	}
2953 
2954       if (count < 8 && !scratch)
2955 	use_zero_reg = 1;
2956 
2957       if (optimize_size)
2958 	max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2959 
2960       if (t_len * count <= max_len)
2961 	{
2962 	  /* Output shifts inline with no loop - faster.  */
2963 	  if (len)
2964 	    *len = t_len * count;
2965 	  else
2966 	    {
2967 	      while (count-- > 0)
2968 		output_asm_insn (templ, op);
2969 	    }
2970 
2971 	  return;
2972 	}
2973 
2974       if (scratch)
2975 	{
2976 	  if (!len)
2977 	    strcat (str, AS2 (ldi,%3,%2));
2978 	}
2979       else if (use_zero_reg)
2980 	{
2981 	  /* Hack to save one word: use __zero_reg__ as loop counter.
2982 	     Set one bit, then shift in a loop until it is 0 again.  */
2983 
2984 	  op[3] = zero_reg_rtx;
2985 	  if (len)
2986 	    *len = 2;
2987 	  else
2988 	    strcat (str, ("set" CR_TAB
2989 			  AS2 (bld,%3,%2-1)));
2990 	}
2991       else
2992 	{
2993 	  /* No scratch register available, use one from LD_REGS (saved in
2994 	     __tmp_reg__) that doesn't overlap with registers to shift.  */
2995 
2996 	  op[3] = gen_rtx_REG (QImode,
2997 			   ((true_regnum (operands[0]) - 1) & 15) + 16);
2998 	  op[4] = tmp_reg_rtx;
2999 	  saved_in_tmp = 1;
3000 
3001 	  if (len)
3002 	    *len = 3;  /* Includes "mov %3,%4" after the loop.  */
3003 	  else
3004 	    strcat (str, (AS2 (mov,%4,%3) CR_TAB
3005 			  AS2 (ldi,%3,%2)));
3006 	}
3007 
3008       second_label = 0;
3009     }
3010   else if (GET_CODE (operands[2]) == MEM)
3011     {
3012       rtx op_mov[10];
3013 
3014       op[3] = op_mov[0] = tmp_reg_rtx;
3015       op_mov[1] = op[2];
3016 
3017       if (len)
3018 	out_movqi_r_mr (insn, op_mov, len);
3019       else
3020 	output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3021     }
3022   else if (register_operand (operands[2], QImode))
3023     {
3024       if (reg_unused_after (insn, operands[2]))
3025 	op[3] = op[2];
3026       else
3027 	{
3028 	  op[3] = tmp_reg_rtx;
3029 	  if (!len)
3030 	    strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3031 	}
3032     }
3033   else
3034     fatal_insn ("bad shift insn:", insn);
3035 
3036   if (second_label)
3037     {
3038       if (len)
3039 	++*len;
3040       else
3041 	strcat (str, AS1 (rjmp,2f));
3042     }
3043 
3044   if (len)
3045     *len += t_len + 2;  /* template + dec + brXX */
3046   else
3047     {
3048       strcat (str, "\n1:\t");
3049       strcat (str, templ);
3050       strcat (str, second_label ? "\n2:\t" : "\n\t");
3051       strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3052       strcat (str, CR_TAB);
3053       strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3054       if (saved_in_tmp)
3055 	strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3056       output_asm_insn (str, op);
3057     }
3058 }
3059 
3060 
3061 /* 8bit shift left ((char)x << i)   */
3062 
3063 const char *
3064 ashlqi3_out (rtx insn, rtx operands[], int *len)
3065 {
3066   if (GET_CODE (operands[2]) == CONST_INT)
3067     {
3068       int k;
3069 
3070       if (!len)
3071 	len = &k;
3072 
3073       switch (INTVAL (operands[2]))
3074 	{
3075 	default:
3076 	  if (INTVAL (operands[2]) < 8)
3077 	    break;
3078 
3079 	  *len = 1;
3080 	  return AS1 (clr,%0);
3081 
3082 	case 1:
3083 	  *len = 1;
3084 	  return AS1 (lsl,%0);
3085 
3086 	case 2:
3087 	  *len = 2;
3088 	  return (AS1 (lsl,%0) CR_TAB
3089 		  AS1 (lsl,%0));
3090 
3091 	case 3:
3092 	  *len = 3;
3093 	  return (AS1 (lsl,%0) CR_TAB
3094 		  AS1 (lsl,%0) CR_TAB
3095 		  AS1 (lsl,%0));
3096 
3097 	case 4:
3098 	  if (test_hard_reg_class (LD_REGS, operands[0]))
3099 	    {
3100 	      *len = 2;
3101 	      return (AS1 (swap,%0) CR_TAB
3102 		      AS2 (andi,%0,0xf0));
3103 	    }
3104 	  *len = 4;
3105 	  return (AS1 (lsl,%0) CR_TAB
3106 		  AS1 (lsl,%0) CR_TAB
3107 		  AS1 (lsl,%0) CR_TAB
3108 		  AS1 (lsl,%0));
3109 
3110 	case 5:
3111 	  if (test_hard_reg_class (LD_REGS, operands[0]))
3112 	    {
3113 	      *len = 3;
3114 	      return (AS1 (swap,%0) CR_TAB
3115 		      AS1 (lsl,%0)  CR_TAB
3116 		      AS2 (andi,%0,0xe0));
3117 	    }
3118 	  *len = 5;
3119 	  return (AS1 (lsl,%0) CR_TAB
3120 		  AS1 (lsl,%0) CR_TAB
3121 		  AS1 (lsl,%0) CR_TAB
3122 		  AS1 (lsl,%0) CR_TAB
3123 		  AS1 (lsl,%0));
3124 
3125 	case 6:
3126 	  if (test_hard_reg_class (LD_REGS, operands[0]))
3127 	    {
3128 	      *len = 4;
3129 	      return (AS1 (swap,%0) CR_TAB
3130 		      AS1 (lsl,%0)  CR_TAB
3131 		      AS1 (lsl,%0)  CR_TAB
3132 		      AS2 (andi,%0,0xc0));
3133 	    }
3134 	  *len = 6;
3135 	  return (AS1 (lsl,%0) CR_TAB
3136 		  AS1 (lsl,%0) CR_TAB
3137 		  AS1 (lsl,%0) CR_TAB
3138 		  AS1 (lsl,%0) CR_TAB
3139 		  AS1 (lsl,%0) CR_TAB
3140 		  AS1 (lsl,%0));
3141 
3142 	case 7:
3143 	  *len = 3;
3144 	  return (AS1 (ror,%0) CR_TAB
3145 		  AS1 (clr,%0) CR_TAB
3146 		  AS1 (ror,%0));
3147 	}
3148     }
3149   else if (CONSTANT_P (operands[2]))
3150     fatal_insn ("internal compiler error.  Incorrect shift:", insn);
3151 
3152   out_shift_with_cnt (AS1 (lsl,%0),
3153 		      insn, operands, len, 1);
3154   return "";
3155 }
3156 
3157 
3158 /* 16bit shift left ((short)x << i)   */
3159 
3160 const char *
3161 ashlhi3_out (rtx insn, rtx operands[], int *len)
3162 {
3163   if (GET_CODE (operands[2]) == CONST_INT)
3164     {
3165       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3166       int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3167       int k;
3168       int *t = len;
3169 
3170       if (!len)
3171 	len = &k;
3172 
3173       switch (INTVAL (operands[2]))
3174 	{
3175 	default:
3176 	  if (INTVAL (operands[2]) < 16)
3177 	    break;
3178 
3179 	  *len = 2;
3180 	  return (AS1 (clr,%B0) CR_TAB
3181 		  AS1 (clr,%A0));
3182 
3183 	case 4:
3184 	  if (optimize_size && scratch)
3185 	    break;  /* 5 */
3186 	  if (ldi_ok)
3187 	    {
3188 	      *len = 6;
3189 	      return (AS1 (swap,%A0)      CR_TAB
3190 		      AS1 (swap,%B0)      CR_TAB
3191 		      AS2 (andi,%B0,0xf0) CR_TAB
3192 		      AS2 (eor,%B0,%A0)   CR_TAB
3193 		      AS2 (andi,%A0,0xf0) CR_TAB
3194 		      AS2 (eor,%B0,%A0));
3195 	    }
3196 	  if (scratch)
3197 	    {
3198 	      *len = 7;
3199 	      return (AS1 (swap,%A0)    CR_TAB
3200 		      AS1 (swap,%B0)    CR_TAB
3201 		      AS2 (ldi,%3,0xf0) CR_TAB
3202 		      "and %B0,%3"      CR_TAB
3203 		      AS2 (eor,%B0,%A0) CR_TAB
3204 		      "and %A0,%3"      CR_TAB
3205 		      AS2 (eor,%B0,%A0));
3206 	    }
3207 	  break;  /* optimize_size ? 6 : 8 */
3208 
3209 	case 5:
3210 	  if (optimize_size)
3211 	    break;  /* scratch ? 5 : 6 */
3212 	  if (ldi_ok)
3213 	    {
3214 	      *len = 8;
3215 	      return (AS1 (lsl,%A0)       CR_TAB
3216 		      AS1 (rol,%B0)       CR_TAB
3217 		      AS1 (swap,%A0)      CR_TAB
3218 		      AS1 (swap,%B0)      CR_TAB
3219 		      AS2 (andi,%B0,0xf0) CR_TAB
3220 		      AS2 (eor,%B0,%A0)   CR_TAB
3221 		      AS2 (andi,%A0,0xf0) CR_TAB
3222 		      AS2 (eor,%B0,%A0));
3223 	    }
3224 	  if (scratch)
3225 	    {
3226 	      *len = 9;
3227 	      return (AS1 (lsl,%A0)     CR_TAB
3228 		      AS1 (rol,%B0)     CR_TAB
3229 		      AS1 (swap,%A0)    CR_TAB
3230 		      AS1 (swap,%B0)    CR_TAB
3231 		      AS2 (ldi,%3,0xf0) CR_TAB
3232 		      "and %B0,%3"      CR_TAB
3233 		      AS2 (eor,%B0,%A0) CR_TAB
3234 		      "and %A0,%3"      CR_TAB
3235 		      AS2 (eor,%B0,%A0));
3236 	    }
3237 	  break;  /* 10 */
3238 
3239 	case 6:
3240 	  if (optimize_size)
3241 	    break;  /* scratch ? 5 : 6 */
3242 	  *len = 9;
3243 	  return (AS1 (clr,__tmp_reg__) CR_TAB
3244 		  AS1 (lsr,%B0)         CR_TAB
3245 		  AS1 (ror,%A0)         CR_TAB
3246 		  AS1 (ror,__tmp_reg__) CR_TAB
3247 		  AS1 (lsr,%B0)         CR_TAB
3248 		  AS1 (ror,%A0)         CR_TAB
3249 		  AS1 (ror,__tmp_reg__) CR_TAB
3250 		  AS2 (mov,%B0,%A0)     CR_TAB
3251 		  AS2 (mov,%A0,__tmp_reg__));
3252 
3253 	case 7:
3254 	  *len = 5;
3255 	  return (AS1 (lsr,%B0)     CR_TAB
3256 		  AS2 (mov,%B0,%A0) CR_TAB
3257 		  AS1 (clr,%A0)     CR_TAB
3258 		  AS1 (ror,%B0)     CR_TAB
3259 		  AS1 (ror,%A0));
3260 
3261 	case 8:
3262 	  return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3263 			    AS1 (clr,%A0));
3264 
3265 	case 9:
3266 	  *len = 3;
3267 	  return (AS2 (mov,%B0,%A0) CR_TAB
3268 		  AS1 (clr,%A0)     CR_TAB
3269 		  AS1 (lsl,%B0));
3270 
3271 	case 10:
3272 	  *len = 4;
3273 	  return (AS2 (mov,%B0,%A0) CR_TAB
3274 		  AS1 (clr,%A0)     CR_TAB
3275 		  AS1 (lsl,%B0)     CR_TAB
3276 		  AS1 (lsl,%B0));
3277 
3278 	case 11:
3279 	  *len = 5;
3280 	  return (AS2 (mov,%B0,%A0) CR_TAB
3281 		  AS1 (clr,%A0)     CR_TAB
3282 		  AS1 (lsl,%B0)     CR_TAB
3283 		  AS1 (lsl,%B0)     CR_TAB
3284 		  AS1 (lsl,%B0));
3285 
3286 	case 12:
3287 	  if (ldi_ok)
3288 	    {
3289 	      *len = 4;
3290 	      return (AS2 (mov,%B0,%A0) CR_TAB
3291 		      AS1 (clr,%A0)     CR_TAB
3292 		      AS1 (swap,%B0)    CR_TAB
3293 		      AS2 (andi,%B0,0xf0));
3294 	    }
3295 	  if (scratch)
3296 	    {
3297 	      *len = 5;
3298 	      return (AS2 (mov,%B0,%A0) CR_TAB
3299 		      AS1 (clr,%A0)     CR_TAB
3300 		      AS1 (swap,%B0)    CR_TAB
3301 		      AS2 (ldi,%3,0xf0) CR_TAB
3302 		      "and %B0,%3");
3303 	    }
3304 	  *len = 6;
3305 	  return (AS2 (mov,%B0,%A0) CR_TAB
3306 		  AS1 (clr,%A0)     CR_TAB
3307 		  AS1 (lsl,%B0)     CR_TAB
3308 		  AS1 (lsl,%B0)     CR_TAB
3309 		  AS1 (lsl,%B0)     CR_TAB
3310 		  AS1 (lsl,%B0));
3311 
3312 	case 13:
3313 	  if (ldi_ok)
3314 	    {
3315 	      *len = 5;
3316 	      return (AS2 (mov,%B0,%A0) CR_TAB
3317 		      AS1 (clr,%A0)     CR_TAB
3318 		      AS1 (swap,%B0)    CR_TAB
3319 		      AS1 (lsl,%B0)     CR_TAB
3320 		      AS2 (andi,%B0,0xe0));
3321 	    }
3322 	  if (AVR_HAVE_MUL && scratch)
3323 	    {
3324 	      *len = 5;
3325 	      return (AS2 (ldi,%3,0x20) CR_TAB
3326 		      AS2 (mul,%A0,%3)  CR_TAB
3327 		      AS2 (mov,%B0,r0)  CR_TAB
3328 		      AS1 (clr,%A0)     CR_TAB
3329 		      AS1 (clr,__zero_reg__));
3330 	    }
3331 	  if (optimize_size && scratch)
3332 	    break;  /* 5 */
3333 	  if (scratch)
3334 	    {
3335 	      *len = 6;
3336 	      return (AS2 (mov,%B0,%A0) CR_TAB
3337 		      AS1 (clr,%A0)     CR_TAB
3338 		      AS1 (swap,%B0)    CR_TAB
3339 		      AS1 (lsl,%B0)     CR_TAB
3340 		      AS2 (ldi,%3,0xe0) CR_TAB
3341 		      "and %B0,%3");
3342 	    }
3343 	  if (AVR_HAVE_MUL)
3344 	    {
3345 	      *len = 6;
3346 	      return ("set"            CR_TAB
3347 		      AS2 (bld,r1,5)   CR_TAB
3348 		      AS2 (mul,%A0,r1) CR_TAB
3349 		      AS2 (mov,%B0,r0) CR_TAB
3350 		      AS1 (clr,%A0)    CR_TAB
3351 		      AS1 (clr,__zero_reg__));
3352 	    }
3353 	  *len = 7;
3354 	  return (AS2 (mov,%B0,%A0) CR_TAB
3355 		  AS1 (clr,%A0)     CR_TAB
3356 		  AS1 (lsl,%B0)     CR_TAB
3357 		  AS1 (lsl,%B0)     CR_TAB
3358 		  AS1 (lsl,%B0)     CR_TAB
3359 		  AS1 (lsl,%B0)     CR_TAB
3360 		  AS1 (lsl,%B0));
3361 
3362 	case 14:
3363 	  if (AVR_HAVE_MUL && ldi_ok)
3364 	    {
3365 	      *len = 5;
3366 	      return (AS2 (ldi,%B0,0x40) CR_TAB
3367 		      AS2 (mul,%A0,%B0)  CR_TAB
3368 		      AS2 (mov,%B0,r0)   CR_TAB
3369 		      AS1 (clr,%A0)      CR_TAB
3370 		      AS1 (clr,__zero_reg__));
3371 	    }
3372 	  if (AVR_HAVE_MUL && scratch)
3373 	    {
3374 	      *len = 5;
3375 	      return (AS2 (ldi,%3,0x40) CR_TAB
3376 		      AS2 (mul,%A0,%3)  CR_TAB
3377 		      AS2 (mov,%B0,r0)  CR_TAB
3378 		      AS1 (clr,%A0)     CR_TAB
3379 		      AS1 (clr,__zero_reg__));
3380 	    }
3381 	  if (optimize_size && ldi_ok)
3382 	    {
3383 	      *len = 5;
3384 	      return (AS2 (mov,%B0,%A0) CR_TAB
3385 		      AS2 (ldi,%A0,6) "\n1:\t"
3386 		      AS1 (lsl,%B0)     CR_TAB
3387 		      AS1 (dec,%A0)     CR_TAB
3388 		      AS1 (brne,1b));
3389 	    }
3390 	  if (optimize_size && scratch)
3391 	    break;  /* 5 */
3392 	  *len = 6;
3393 	  return (AS1 (clr,%B0) CR_TAB
3394 		  AS1 (lsr,%A0) CR_TAB
3395 		  AS1 (ror,%B0) CR_TAB
3396 		  AS1 (lsr,%A0) CR_TAB
3397 		  AS1 (ror,%B0) CR_TAB
3398 		  AS1 (clr,%A0));
3399 
3400 	case 15:
3401 	  *len = 4;
3402 	  return (AS1 (clr,%B0) CR_TAB
3403 		  AS1 (lsr,%A0) CR_TAB
3404 		  AS1 (ror,%B0) CR_TAB
3405 		  AS1 (clr,%A0));
3406 	}
3407       len = t;
3408     }
3409   out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3410 		       AS1 (rol,%B0)),
3411 		       insn, operands, len, 2);
3412   return "";
3413 }
3414 
3415 
3416 /* 32bit shift left ((long)x << i)   */
3417 
3418 const char *
3419 ashlsi3_out (rtx insn, rtx operands[], int *len)
3420 {
3421   if (GET_CODE (operands[2]) == CONST_INT)
3422     {
3423       int k;
3424       int *t = len;
3425 
3426       if (!len)
3427 	len = &k;
3428 
3429       switch (INTVAL (operands[2]))
3430 	{
3431 	default:
3432 	  if (INTVAL (operands[2]) < 32)
3433 	    break;
3434 
3435 	  if (AVR_HAVE_MOVW)
3436 	    return *len = 3, (AS1 (clr,%D0) CR_TAB
3437 			      AS1 (clr,%C0) CR_TAB
3438 			      AS2 (movw,%A0,%C0));
3439 	  *len = 4;
3440 	  return (AS1 (clr,%D0) CR_TAB
3441 		  AS1 (clr,%C0) CR_TAB
3442 		  AS1 (clr,%B0) CR_TAB
3443 		  AS1 (clr,%A0));
3444 
3445 	case 8:
3446 	  {
3447 	    int reg0 = true_regnum (operands[0]);
3448 	    int reg1 = true_regnum (operands[1]);
3449 	    *len = 4;
3450 	    if (reg0 >= reg1)
3451 	      return (AS2 (mov,%D0,%C1)  CR_TAB
3452 		      AS2 (mov,%C0,%B1)  CR_TAB
3453 		      AS2 (mov,%B0,%A1)  CR_TAB
3454 		      AS1 (clr,%A0));
3455 	    else
3456 	      return (AS1 (clr,%A0)      CR_TAB
3457 		      AS2 (mov,%B0,%A1)  CR_TAB
3458 		      AS2 (mov,%C0,%B1)  CR_TAB
3459 		      AS2 (mov,%D0,%C1));
3460 	  }
3461 
3462 	case 16:
3463 	  {
3464 	    int reg0 = true_regnum (operands[0]);
3465 	    int reg1 = true_regnum (operands[1]);
3466 	    if (reg0 + 2 == reg1)
3467 	      return *len = 2, (AS1 (clr,%B0)      CR_TAB
3468 				AS1 (clr,%A0));
3469 	    if (AVR_HAVE_MOVW)
3470 	      return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3471 				AS1 (clr,%B0)      CR_TAB
3472 				AS1 (clr,%A0));
3473 	    else
3474 	      return *len = 4, (AS2 (mov,%C0,%A1)  CR_TAB
3475 				AS2 (mov,%D0,%B1)  CR_TAB
3476 				AS1 (clr,%B0)      CR_TAB
3477 				AS1 (clr,%A0));
3478 	  }
3479 
3480 	case 24:
3481 	  *len = 4;
3482 	  return (AS2 (mov,%D0,%A1)  CR_TAB
3483 		  AS1 (clr,%C0)      CR_TAB
3484 		  AS1 (clr,%B0)      CR_TAB
3485 		  AS1 (clr,%A0));
3486 
3487 	case 31:
3488 	  *len = 6;
3489 	  return (AS1 (clr,%D0) CR_TAB
3490 		  AS1 (lsr,%A0) CR_TAB
3491 		  AS1 (ror,%D0) CR_TAB
3492 		  AS1 (clr,%C0) CR_TAB
3493 		  AS1 (clr,%B0) CR_TAB
3494 		  AS1 (clr,%A0));
3495 	}
3496       len = t;
3497     }
3498   out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3499 		       AS1 (rol,%B0) CR_TAB
3500 		       AS1 (rol,%C0) CR_TAB
3501 		       AS1 (rol,%D0)),
3502 		       insn, operands, len, 4);
3503   return "";
3504 }
3505 
3506 /* 8bit arithmetic shift right  ((signed char)x >> i) */
3507 
3508 const char *
3509 ashrqi3_out (rtx insn, rtx operands[], int *len)
3510 {
3511   if (GET_CODE (operands[2]) == CONST_INT)
3512     {
3513       int k;
3514 
3515       if (!len)
3516 	len = &k;
3517 
3518       switch (INTVAL (operands[2]))
3519 	{
3520 	case 1:
3521 	  *len = 1;
3522 	  return AS1 (asr,%0);
3523 
3524 	case 2:
3525 	  *len = 2;
3526 	  return (AS1 (asr,%0) CR_TAB
3527 		  AS1 (asr,%0));
3528 
3529 	case 3:
3530 	  *len = 3;
3531 	  return (AS1 (asr,%0) CR_TAB
3532 		  AS1 (asr,%0) CR_TAB
3533 		  AS1 (asr,%0));
3534 
3535 	case 4:
3536 	  *len = 4;
3537 	  return (AS1 (asr,%0) CR_TAB
3538 		  AS1 (asr,%0) CR_TAB
3539 		  AS1 (asr,%0) CR_TAB
3540 		  AS1 (asr,%0));
3541 
3542 	case 5:
3543 	  *len = 5;
3544 	  return (AS1 (asr,%0) CR_TAB
3545 		  AS1 (asr,%0) CR_TAB
3546 		  AS1 (asr,%0) CR_TAB
3547 		  AS1 (asr,%0) CR_TAB
3548 		  AS1 (asr,%0));
3549 
3550 	case 6:
3551 	  *len = 4;
3552 	  return (AS2 (bst,%0,6)  CR_TAB
3553 		  AS1 (lsl,%0)    CR_TAB
3554 		  AS2 (sbc,%0,%0) CR_TAB
3555 		  AS2 (bld,%0,0));
3556 
3557 	default:
3558 	  if (INTVAL (operands[2]) < 8)
3559 	    break;
3560 
3561 	  /* fall through */
3562 
3563 	case 7:
3564 	  *len = 2;
3565 	  return (AS1 (lsl,%0) CR_TAB
3566 		  AS2 (sbc,%0,%0));
3567 	}
3568     }
3569   else if (CONSTANT_P (operands[2]))
3570     fatal_insn ("internal compiler error.  Incorrect shift:", insn);
3571 
3572   out_shift_with_cnt (AS1 (asr,%0),
3573 		      insn, operands, len, 1);
3574   return "";
3575 }
3576 
3577 
3578 /* 16bit arithmetic shift right  ((signed short)x >> i) */
3579 
3580 const char *
3581 ashrhi3_out (rtx insn, rtx operands[], int *len)
3582 {
3583   if (GET_CODE (operands[2]) == CONST_INT)
3584     {
3585       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3586       int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3587       int k;
3588       int *t = len;
3589 
3590       if (!len)
3591 	len = &k;
3592 
3593       switch (INTVAL (operands[2]))
3594 	{
3595 	case 4:
3596 	case 5:
3597 	  /* XXX try to optimize this too? */
3598 	  break;
3599 
3600 	case 6:
3601 	  if (optimize_size)
3602 	    break;  /* scratch ? 5 : 6 */
3603 	  *len = 8;
3604 	  return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3605 		  AS2 (mov,%A0,%B0)         CR_TAB
3606 		  AS1 (lsl,__tmp_reg__)     CR_TAB
3607 		  AS1 (rol,%A0)             CR_TAB
3608 		  AS2 (sbc,%B0,%B0)         CR_TAB
3609 		  AS1 (lsl,__tmp_reg__)     CR_TAB
3610 		  AS1 (rol,%A0)             CR_TAB
3611 		  AS1 (rol,%B0));
3612 
3613 	case 7:
3614 	  *len = 4;
3615 	  return (AS1 (lsl,%A0)     CR_TAB
3616 		  AS2 (mov,%A0,%B0) CR_TAB
3617 		  AS1 (rol,%A0)     CR_TAB
3618 		  AS2 (sbc,%B0,%B0));
3619 
3620 	case 8:
3621 	  {
3622 	    int reg0 = true_regnum (operands[0]);
3623 	    int reg1 = true_regnum (operands[1]);
3624 
3625 	    if (reg0 == reg1)
3626 	      return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3627 				AS1 (lsl,%B0)     CR_TAB
3628 				AS2 (sbc,%B0,%B0));
3629 	    else
3630 	      return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3631 			        AS1 (clr,%B0)     CR_TAB
3632 			        AS2 (sbrc,%A0,7)  CR_TAB
3633 			        AS1 (dec,%B0));
3634 	  }
3635 
3636 	case 9:
3637 	  *len = 4;
3638 	  return (AS2 (mov,%A0,%B0) CR_TAB
3639 		  AS1 (lsl,%B0)      CR_TAB
3640 		  AS2 (sbc,%B0,%B0) CR_TAB
3641 		  AS1 (asr,%A0));
3642 
3643 	case 10:
3644 	  *len = 5;
3645 	  return (AS2 (mov,%A0,%B0) CR_TAB
3646 		  AS1 (lsl,%B0)     CR_TAB
3647 		  AS2 (sbc,%B0,%B0) CR_TAB
3648 		  AS1 (asr,%A0)     CR_TAB
3649 		  AS1 (asr,%A0));
3650 
3651 	case 11:
3652 	  if (AVR_HAVE_MUL && ldi_ok)
3653 	    {
3654 	      *len = 5;
3655 	      return (AS2 (ldi,%A0,0x20) CR_TAB
3656 		      AS2 (muls,%B0,%A0) CR_TAB
3657 		      AS2 (mov,%A0,r1)   CR_TAB
3658 		      AS2 (sbc,%B0,%B0)  CR_TAB
3659 		      AS1 (clr,__zero_reg__));
3660 	    }
3661 	  if (optimize_size && scratch)
3662 	    break;  /* 5 */
3663 	  *len = 6;
3664 	  return (AS2 (mov,%A0,%B0) CR_TAB
3665 		  AS1 (lsl,%B0)     CR_TAB
3666 		  AS2 (sbc,%B0,%B0) CR_TAB
3667 		  AS1 (asr,%A0)     CR_TAB
3668 		  AS1 (asr,%A0)     CR_TAB
3669 		  AS1 (asr,%A0));
3670 
3671 	case 12:
3672 	  if (AVR_HAVE_MUL && ldi_ok)
3673 	    {
3674 	      *len = 5;
3675 	      return (AS2 (ldi,%A0,0x10) CR_TAB
3676 		      AS2 (muls,%B0,%A0) CR_TAB
3677 		      AS2 (mov,%A0,r1)   CR_TAB
3678 		      AS2 (sbc,%B0,%B0)  CR_TAB
3679 		      AS1 (clr,__zero_reg__));
3680 	    }
3681 	  if (optimize_size && scratch)
3682 	    break;  /* 5 */
3683 	  *len = 7;
3684 	  return (AS2 (mov,%A0,%B0) CR_TAB
3685 		  AS1 (lsl,%B0)     CR_TAB
3686 		  AS2 (sbc,%B0,%B0) CR_TAB
3687 		  AS1 (asr,%A0)     CR_TAB
3688 		  AS1 (asr,%A0)     CR_TAB
3689 		  AS1 (asr,%A0)     CR_TAB
3690 		  AS1 (asr,%A0));
3691 
3692 	case 13:
3693 	  if (AVR_HAVE_MUL && ldi_ok)
3694 	    {
3695 	      *len = 5;
3696 	      return (AS2 (ldi,%A0,0x08) CR_TAB
3697 		      AS2 (muls,%B0,%A0) CR_TAB
3698 		      AS2 (mov,%A0,r1)   CR_TAB
3699 		      AS2 (sbc,%B0,%B0)  CR_TAB
3700 		      AS1 (clr,__zero_reg__));
3701 	    }
3702 	  if (optimize_size)
3703 	    break;  /* scratch ? 5 : 7 */
3704 	  *len = 8;
3705 	  return (AS2 (mov,%A0,%B0) CR_TAB
3706 		  AS1 (lsl,%B0)     CR_TAB
3707 		  AS2 (sbc,%B0,%B0) CR_TAB
3708 		  AS1 (asr,%A0)     CR_TAB
3709 		  AS1 (asr,%A0)     CR_TAB
3710 		  AS1 (asr,%A0)     CR_TAB
3711 		  AS1 (asr,%A0)     CR_TAB
3712 		  AS1 (asr,%A0));
3713 
3714 	case 14:
3715 	  *len = 5;
3716 	  return (AS1 (lsl,%B0)     CR_TAB
3717 		  AS2 (sbc,%A0,%A0) CR_TAB
3718 		  AS1 (lsl,%B0)     CR_TAB
3719 		  AS2 (mov,%B0,%A0) CR_TAB
3720 		  AS1 (rol,%A0));
3721 
3722 	default:
3723 	  if (INTVAL (operands[2]) < 16)
3724 	    break;
3725 
3726 	  /* fall through */
3727 
3728 	case 15:
3729 	  return *len = 3, (AS1 (lsl,%B0)     CR_TAB
3730 			    AS2 (sbc,%A0,%A0) CR_TAB
3731 			    AS2 (mov,%B0,%A0));
3732 	}
3733       len = t;
3734     }
3735   out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3736 		       AS1 (ror,%A0)),
3737 		       insn, operands, len, 2);
3738   return "";
3739 }
3740 
3741 
3742 /* 32bit arithmetic shift right  ((signed long)x >> i) */
3743 
3744 const char *
3745 ashrsi3_out (rtx insn, rtx operands[], int *len)
3746 {
3747   if (GET_CODE (operands[2]) == CONST_INT)
3748     {
3749       int k;
3750       int *t = len;
3751 
3752       if (!len)
3753 	len = &k;
3754 
3755       switch (INTVAL (operands[2]))
3756 	{
3757 	case 8:
3758 	  {
3759 	    int reg0 = true_regnum (operands[0]);
3760 	    int reg1 = true_regnum (operands[1]);
3761 	    *len=6;
3762 	    if (reg0 <= reg1)
3763 	      return (AS2 (mov,%A0,%B1) CR_TAB
3764 		      AS2 (mov,%B0,%C1) CR_TAB
3765 		      AS2 (mov,%C0,%D1) CR_TAB
3766 		      AS1 (clr,%D0)     CR_TAB
3767 		      AS2 (sbrc,%C0,7)  CR_TAB
3768 		      AS1 (dec,%D0));
3769 	    else
3770 	      return (AS1 (clr,%D0)     CR_TAB
3771 		      AS2 (sbrc,%D1,7)  CR_TAB
3772 		      AS1 (dec,%D0)     CR_TAB
3773 		      AS2 (mov,%C0,%D1) CR_TAB
3774 		      AS2 (mov,%B0,%C1) CR_TAB
3775 		      AS2 (mov,%A0,%B1));
3776 	  }
3777 
3778 	case 16:
3779 	  {
3780 	    int reg0 = true_regnum (operands[0]);
3781 	    int reg1 = true_regnum (operands[1]);
3782 
3783 	    if (reg0 == reg1 + 2)
3784 	      return *len = 4, (AS1 (clr,%D0)     CR_TAB
3785 				AS2 (sbrc,%B0,7)  CR_TAB
3786 				AS1 (com,%D0)     CR_TAB
3787 				AS2 (mov,%C0,%D0));
3788 	    if (AVR_HAVE_MOVW)
3789 	      return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3790 				AS1 (clr,%D0)      CR_TAB
3791 				AS2 (sbrc,%B0,7)   CR_TAB
3792 				AS1 (com,%D0)      CR_TAB
3793 				AS2 (mov,%C0,%D0));
3794 	    else
3795 	      return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3796 				AS2 (mov,%A0,%C1) CR_TAB
3797 				AS1 (clr,%D0)     CR_TAB
3798 				AS2 (sbrc,%B0,7)  CR_TAB
3799 				AS1 (com,%D0)     CR_TAB
3800 				AS2 (mov,%C0,%D0));
3801 	  }
3802 
3803 	case 24:
3804 	  return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3805 			    AS1 (clr,%D0)     CR_TAB
3806 			    AS2 (sbrc,%A0,7)  CR_TAB
3807 			    AS1 (com,%D0)     CR_TAB
3808 			    AS2 (mov,%B0,%D0) CR_TAB
3809 			    AS2 (mov,%C0,%D0));
3810 
3811 	default:
3812 	  if (INTVAL (operands[2]) < 32)
3813 	    break;
3814 
3815 	  /* fall through */
3816 
3817 	case 31:
3818 	  if (AVR_HAVE_MOVW)
3819 	    return *len = 4, (AS1 (lsl,%D0)     CR_TAB
3820 			      AS2 (sbc,%A0,%A0) CR_TAB
3821 			      AS2 (mov,%B0,%A0) CR_TAB
3822 			      AS2 (movw,%C0,%A0));
3823 	  else
3824 	    return *len = 5, (AS1 (lsl,%D0)     CR_TAB
3825 			      AS2 (sbc,%A0,%A0) CR_TAB
3826 			      AS2 (mov,%B0,%A0) CR_TAB
3827 			      AS2 (mov,%C0,%A0) CR_TAB
3828 			      AS2 (mov,%D0,%A0));
3829 	}
3830       len = t;
3831     }
3832   out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3833 		       AS1 (ror,%C0) CR_TAB
3834 		       AS1 (ror,%B0) CR_TAB
3835 		       AS1 (ror,%A0)),
3836 		       insn, operands, len, 4);
3837   return "";
3838 }
3839 
3840 /* 8bit logic shift right ((unsigned char)x >> i) */
3841 
3842 const char *
3843 lshrqi3_out (rtx insn, rtx operands[], int *len)
3844 {
3845   if (GET_CODE (operands[2]) == CONST_INT)
3846     {
3847       int k;
3848 
3849       if (!len)
3850 	len = &k;
3851 
3852       switch (INTVAL (operands[2]))
3853 	{
3854 	default:
3855 	  if (INTVAL (operands[2]) < 8)
3856 	    break;
3857 
3858 	  *len = 1;
3859 	  return AS1 (clr,%0);
3860 
3861 	case 1:
3862 	  *len = 1;
3863 	  return AS1 (lsr,%0);
3864 
3865 	case 2:
3866 	  *len = 2;
3867 	  return (AS1 (lsr,%0) CR_TAB
3868 		  AS1 (lsr,%0));
3869 	case 3:
3870 	  *len = 3;
3871 	  return (AS1 (lsr,%0) CR_TAB
3872 		  AS1 (lsr,%0) CR_TAB
3873 		  AS1 (lsr,%0));
3874 
3875 	case 4:
3876 	  if (test_hard_reg_class (LD_REGS, operands[0]))
3877 	    {
3878 	      *len=2;
3879 	      return (AS1 (swap,%0) CR_TAB
3880 		      AS2 (andi,%0,0x0f));
3881 	    }
3882 	  *len = 4;
3883 	  return (AS1 (lsr,%0) CR_TAB
3884 		  AS1 (lsr,%0) CR_TAB
3885 		  AS1 (lsr,%0) CR_TAB
3886 		  AS1 (lsr,%0));
3887 
3888 	case 5:
3889 	  if (test_hard_reg_class (LD_REGS, operands[0]))
3890 	    {
3891 	      *len = 3;
3892 	      return (AS1 (swap,%0) CR_TAB
3893 		      AS1 (lsr,%0)  CR_TAB
3894 		      AS2 (andi,%0,0x7));
3895 	    }
3896 	  *len = 5;
3897 	  return (AS1 (lsr,%0) CR_TAB
3898 		  AS1 (lsr,%0) CR_TAB
3899 		  AS1 (lsr,%0) CR_TAB
3900 		  AS1 (lsr,%0) CR_TAB
3901 		  AS1 (lsr,%0));
3902 
3903 	case 6:
3904 	  if (test_hard_reg_class (LD_REGS, operands[0]))
3905 	    {
3906 	      *len = 4;
3907 	      return (AS1 (swap,%0) CR_TAB
3908 		      AS1 (lsr,%0)  CR_TAB
3909 		      AS1 (lsr,%0)  CR_TAB
3910 		      AS2 (andi,%0,0x3));
3911 	    }
3912 	  *len = 6;
3913 	  return (AS1 (lsr,%0) CR_TAB
3914 		  AS1 (lsr,%0) CR_TAB
3915 		  AS1 (lsr,%0) CR_TAB
3916 		  AS1 (lsr,%0) CR_TAB
3917 		  AS1 (lsr,%0) CR_TAB
3918 		  AS1 (lsr,%0));
3919 
3920 	case 7:
3921 	  *len = 3;
3922 	  return (AS1 (rol,%0) CR_TAB
3923 		  AS1 (clr,%0) CR_TAB
3924 		  AS1 (rol,%0));
3925 	}
3926     }
3927   else if (CONSTANT_P (operands[2]))
3928     fatal_insn ("internal compiler error.  Incorrect shift:", insn);
3929 
3930   out_shift_with_cnt (AS1 (lsr,%0),
3931 		      insn, operands, len, 1);
3932   return "";
3933 }
3934 
3935 /* 16bit logic shift right ((unsigned short)x >> i) */
3936 
3937 const char *
3938 lshrhi3_out (rtx insn, rtx operands[], int *len)
3939 {
3940   if (GET_CODE (operands[2]) == CONST_INT)
3941     {
3942       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3943       int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3944       int k;
3945       int *t = len;
3946 
3947       if (!len)
3948 	len = &k;
3949 
3950       switch (INTVAL (operands[2]))
3951 	{
3952 	default:
3953 	  if (INTVAL (operands[2]) < 16)
3954 	    break;
3955 
3956 	  *len = 2;
3957 	  return (AS1 (clr,%B0) CR_TAB
3958 		  AS1 (clr,%A0));
3959 
3960 	case 4:
3961 	  if (optimize_size && scratch)
3962 	    break;  /* 5 */
3963 	  if (ldi_ok)
3964 	    {
3965 	      *len = 6;
3966 	      return (AS1 (swap,%B0)      CR_TAB
3967 		      AS1 (swap,%A0)      CR_TAB
3968 		      AS2 (andi,%A0,0x0f) CR_TAB
3969 		      AS2 (eor,%A0,%B0)   CR_TAB
3970 		      AS2 (andi,%B0,0x0f) CR_TAB
3971 		      AS2 (eor,%A0,%B0));
3972 	    }
3973 	  if (scratch)
3974 	    {
3975 	      *len = 7;
3976 	      return (AS1 (swap,%B0)    CR_TAB
3977 		      AS1 (swap,%A0)    CR_TAB
3978 		      AS2 (ldi,%3,0x0f) CR_TAB
3979 		      "and %A0,%3"      CR_TAB
3980 		      AS2 (eor,%A0,%B0) CR_TAB
3981 		      "and %B0,%3"      CR_TAB
3982 		      AS2 (eor,%A0,%B0));
3983 	    }
3984 	  break;  /* optimize_size ? 6 : 8 */
3985 
3986 	case 5:
3987 	  if (optimize_size)
3988 	    break;  /* scratch ? 5 : 6 */
3989 	  if (ldi_ok)
3990 	    {
3991 	      *len = 8;
3992 	      return (AS1 (lsr,%B0)       CR_TAB
3993 		      AS1 (ror,%A0)       CR_TAB
3994 		      AS1 (swap,%B0)      CR_TAB
3995 		      AS1 (swap,%A0)      CR_TAB
3996 		      AS2 (andi,%A0,0x0f) CR_TAB
3997 		      AS2 (eor,%A0,%B0)   CR_TAB
3998 		      AS2 (andi,%B0,0x0f) CR_TAB
3999 		      AS2 (eor,%A0,%B0));
4000 	    }
4001 	  if (scratch)
4002 	    {
4003 	      *len = 9;
4004 	      return (AS1 (lsr,%B0)     CR_TAB
4005 		      AS1 (ror,%A0)     CR_TAB
4006 		      AS1 (swap,%B0)    CR_TAB
4007 		      AS1 (swap,%A0)    CR_TAB
4008 		      AS2 (ldi,%3,0x0f) CR_TAB
4009 		      "and %A0,%3"      CR_TAB
4010 		      AS2 (eor,%A0,%B0) CR_TAB
4011 		      "and %B0,%3"      CR_TAB
4012 		      AS2 (eor,%A0,%B0));
4013 	    }
4014 	  break;  /* 10 */
4015 
4016 	case 6:
4017 	  if (optimize_size)
4018 	    break;  /* scratch ? 5 : 6 */
4019 	  *len = 9;
4020 	  return (AS1 (clr,__tmp_reg__) CR_TAB
4021 		  AS1 (lsl,%A0)         CR_TAB
4022 		  AS1 (rol,%B0)         CR_TAB
4023 		  AS1 (rol,__tmp_reg__) CR_TAB
4024 		  AS1 (lsl,%A0)         CR_TAB
4025 		  AS1 (rol,%B0)         CR_TAB
4026 		  AS1 (rol,__tmp_reg__) CR_TAB
4027 		  AS2 (mov,%A0,%B0)     CR_TAB
4028 		  AS2 (mov,%B0,__tmp_reg__));
4029 
4030 	case 7:
4031 	  *len = 5;
4032 	  return (AS1 (lsl,%A0)     CR_TAB
4033 		  AS2 (mov,%A0,%B0) CR_TAB
4034 		  AS1 (rol,%A0)     CR_TAB
4035 		  AS2 (sbc,%B0,%B0) CR_TAB
4036 		  AS1 (neg,%B0));
4037 
4038 	case 8:
4039 	  return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4040 			    AS1 (clr,%B0));
4041 
4042 	case 9:
4043 	  *len = 3;
4044 	  return (AS2 (mov,%A0,%B0) CR_TAB
4045 		  AS1 (clr,%B0)     CR_TAB
4046 		  AS1 (lsr,%A0));
4047 
4048 	case 10:
4049 	  *len = 4;
4050 	  return (AS2 (mov,%A0,%B0) CR_TAB
4051 		  AS1 (clr,%B0)     CR_TAB
4052 		  AS1 (lsr,%A0)     CR_TAB
4053 		  AS1 (lsr,%A0));
4054 
4055 	case 11:
4056 	  *len = 5;
4057 	  return (AS2 (mov,%A0,%B0) CR_TAB
4058 		  AS1 (clr,%B0)     CR_TAB
4059 		  AS1 (lsr,%A0)     CR_TAB
4060 		  AS1 (lsr,%A0)     CR_TAB
4061 		  AS1 (lsr,%A0));
4062 
4063 	case 12:
4064 	  if (ldi_ok)
4065 	    {
4066 	      *len = 4;
4067 	      return (AS2 (mov,%A0,%B0) CR_TAB
4068 		      AS1 (clr,%B0)     CR_TAB
4069 		      AS1 (swap,%A0)    CR_TAB
4070 		      AS2 (andi,%A0,0x0f));
4071 	    }
4072 	  if (scratch)
4073 	    {
4074 	      *len = 5;
4075 	      return (AS2 (mov,%A0,%B0) CR_TAB
4076 		      AS1 (clr,%B0)     CR_TAB
4077 		      AS1 (swap,%A0)    CR_TAB
4078 		      AS2 (ldi,%3,0x0f) CR_TAB
4079 		      "and %A0,%3");
4080 	    }
4081 	  *len = 6;
4082 	  return (AS2 (mov,%A0,%B0) CR_TAB
4083 		  AS1 (clr,%B0)     CR_TAB
4084 		  AS1 (lsr,%A0)     CR_TAB
4085 		  AS1 (lsr,%A0)     CR_TAB
4086 		  AS1 (lsr,%A0)     CR_TAB
4087 		  AS1 (lsr,%A0));
4088 
4089 	case 13:
4090 	  if (ldi_ok)
4091 	    {
4092 	      *len = 5;
4093 	      return (AS2 (mov,%A0,%B0) CR_TAB
4094 		      AS1 (clr,%B0)     CR_TAB
4095 		      AS1 (swap,%A0)    CR_TAB
4096 		      AS1 (lsr,%A0)     CR_TAB
4097 		      AS2 (andi,%A0,0x07));
4098 	    }
4099 	  if (AVR_HAVE_MUL && scratch)
4100 	    {
4101 	      *len = 5;
4102 	      return (AS2 (ldi,%3,0x08) CR_TAB
4103 		      AS2 (mul,%B0,%3)  CR_TAB
4104 		      AS2 (mov,%A0,r1)  CR_TAB
4105 		      AS1 (clr,%B0)     CR_TAB
4106 		      AS1 (clr,__zero_reg__));
4107 	    }
4108 	  if (optimize_size && scratch)
4109 	    break;  /* 5 */
4110 	  if (scratch)
4111 	    {
4112 	      *len = 6;
4113 	      return (AS2 (mov,%A0,%B0) CR_TAB
4114 		      AS1 (clr,%B0)     CR_TAB
4115 		      AS1 (swap,%A0)    CR_TAB
4116 		      AS1 (lsr,%A0)     CR_TAB
4117 		      AS2 (ldi,%3,0x07) CR_TAB
4118 		      "and %A0,%3");
4119 	    }
4120 	  if (AVR_HAVE_MUL)
4121 	    {
4122 	      *len = 6;
4123 	      return ("set"            CR_TAB
4124 		      AS2 (bld,r1,3)   CR_TAB
4125 		      AS2 (mul,%B0,r1) CR_TAB
4126 		      AS2 (mov,%A0,r1) CR_TAB
4127 		      AS1 (clr,%B0)    CR_TAB
4128 		      AS1 (clr,__zero_reg__));
4129 	    }
4130 	  *len = 7;
4131 	  return (AS2 (mov,%A0,%B0) CR_TAB
4132 		  AS1 (clr,%B0)     CR_TAB
4133 		  AS1 (lsr,%A0)     CR_TAB
4134 		  AS1 (lsr,%A0)     CR_TAB
4135 		  AS1 (lsr,%A0)     CR_TAB
4136 		  AS1 (lsr,%A0)     CR_TAB
4137 		  AS1 (lsr,%A0));
4138 
4139 	case 14:
4140 	  if (AVR_HAVE_MUL && ldi_ok)
4141 	    {
4142 	      *len = 5;
4143 	      return (AS2 (ldi,%A0,0x04) CR_TAB
4144 		      AS2 (mul,%B0,%A0)  CR_TAB
4145 		      AS2 (mov,%A0,r1)   CR_TAB
4146 		      AS1 (clr,%B0)      CR_TAB
4147 		      AS1 (clr,__zero_reg__));
4148 	    }
4149 	  if (AVR_HAVE_MUL && scratch)
4150 	    {
4151 	      *len = 5;
4152 	      return (AS2 (ldi,%3,0x04) CR_TAB
4153 		      AS2 (mul,%B0,%3)  CR_TAB
4154 		      AS2 (mov,%A0,r1)  CR_TAB
4155 		      AS1 (clr,%B0)     CR_TAB
4156 		      AS1 (clr,__zero_reg__));
4157 	    }
4158 	  if (optimize_size && ldi_ok)
4159 	    {
4160 	      *len = 5;
4161 	      return (AS2 (mov,%A0,%B0) CR_TAB
4162 		      AS2 (ldi,%B0,6) "\n1:\t"
4163 		      AS1 (lsr,%A0)     CR_TAB
4164 		      AS1 (dec,%B0)     CR_TAB
4165 		      AS1 (brne,1b));
4166 	    }
4167 	  if (optimize_size && scratch)
4168 	    break;  /* 5 */
4169 	  *len = 6;
4170 	  return (AS1 (clr,%A0) CR_TAB
4171 		  AS1 (lsl,%B0) CR_TAB
4172 		  AS1 (rol,%A0) CR_TAB
4173 		  AS1 (lsl,%B0) CR_TAB
4174 		  AS1 (rol,%A0) CR_TAB
4175 		  AS1 (clr,%B0));
4176 
4177 	case 15:
4178 	  *len = 4;
4179 	  return (AS1 (clr,%A0) CR_TAB
4180 		  AS1 (lsl,%B0) CR_TAB
4181 		  AS1 (rol,%A0) CR_TAB
4182 		  AS1 (clr,%B0));
4183 	}
4184       len = t;
4185     }
4186   out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4187 		       AS1 (ror,%A0)),
4188 		       insn, operands, len, 2);
4189   return "";
4190 }
4191 
4192 /* 32bit logic shift right ((unsigned int)x >> i) */
4193 
4194 const char *
4195 lshrsi3_out (rtx insn, rtx operands[], int *len)
4196 {
4197   if (GET_CODE (operands[2]) == CONST_INT)
4198     {
4199       int k;
4200       int *t = len;
4201 
4202       if (!len)
4203 	len = &k;
4204 
4205       switch (INTVAL (operands[2]))
4206 	{
4207 	default:
4208 	  if (INTVAL (operands[2]) < 32)
4209 	    break;
4210 
4211 	  if (AVR_HAVE_MOVW)
4212 	    return *len = 3, (AS1 (clr,%D0) CR_TAB
4213 			      AS1 (clr,%C0) CR_TAB
4214 			      AS2 (movw,%A0,%C0));
4215 	  *len = 4;
4216 	  return (AS1 (clr,%D0) CR_TAB
4217 		  AS1 (clr,%C0) CR_TAB
4218 		  AS1 (clr,%B0) CR_TAB
4219 		  AS1 (clr,%A0));
4220 
4221 	case 8:
4222 	  {
4223 	    int reg0 = true_regnum (operands[0]);
4224 	    int reg1 = true_regnum (operands[1]);
4225 	    *len = 4;
4226 	    if (reg0 <= reg1)
4227 	      return (AS2 (mov,%A0,%B1) CR_TAB
4228 		      AS2 (mov,%B0,%C1) CR_TAB
4229 		      AS2 (mov,%C0,%D1) CR_TAB
4230 		      AS1 (clr,%D0));
4231 	    else
4232 	      return (AS1 (clr,%D0)     CR_TAB
4233 		      AS2 (mov,%C0,%D1) CR_TAB
4234 		      AS2 (mov,%B0,%C1) CR_TAB
4235 		      AS2 (mov,%A0,%B1));
4236 	  }
4237 
4238 	case 16:
4239 	  {
4240 	    int reg0 = true_regnum (operands[0]);
4241 	    int reg1 = true_regnum (operands[1]);
4242 
4243 	    if (reg0 == reg1 + 2)
4244 	      return *len = 2, (AS1 (clr,%C0)     CR_TAB
4245 				AS1 (clr,%D0));
4246 	    if (AVR_HAVE_MOVW)
4247 	      return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4248 				AS1 (clr,%C0)      CR_TAB
4249 				AS1 (clr,%D0));
4250 	    else
4251 	      return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4252 				AS2 (mov,%A0,%C1) CR_TAB
4253 				AS1 (clr,%C0)     CR_TAB
4254 				AS1 (clr,%D0));
4255 	  }
4256 
4257 	case 24:
4258 	  return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4259 			    AS1 (clr,%B0)     CR_TAB
4260 			    AS1 (clr,%C0)     CR_TAB
4261 			    AS1 (clr,%D0));
4262 
4263 	case 31:
4264 	  *len = 6;
4265 	  return (AS1 (clr,%A0)    CR_TAB
4266 		  AS2 (sbrc,%D0,7) CR_TAB
4267 		  AS1 (inc,%A0)    CR_TAB
4268 		  AS1 (clr,%B0)    CR_TAB
4269 		  AS1 (clr,%C0)    CR_TAB
4270 		  AS1 (clr,%D0));
4271 	}
4272       len = t;
4273     }
4274   out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4275 		       AS1 (ror,%C0) CR_TAB
4276 		       AS1 (ror,%B0) CR_TAB
4277 		       AS1 (ror,%A0)),
4278 		      insn, operands, len, 4);
4279   return "";
4280 }
4281 
4282 /* Create RTL split patterns for byte sized rotate expressions.  This
4283   produces a series of move instructions and considers overlap situations.
4284   Overlapping non-HImode operands need a scratch register.  */
4285 
4286 bool
4287 avr_rotate_bytes (rtx operands[])
4288 {
4289     int i, j;
4290     enum machine_mode mode = GET_MODE (operands[0]);
4291     bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4292     bool same_reg = rtx_equal_p (operands[0], operands[1]);
4293     int num = INTVAL (operands[2]);
4294     rtx scratch = operands[3];
4295     /* Work out if byte or word move is needed.  Odd byte rotates need QImode.
4296        Word move if no scratch is needed, otherwise use size of scratch.  */
4297     enum machine_mode move_mode = QImode;
4298     if (num & 0xf)
4299       move_mode = QImode;
4300     else if ((mode == SImode && !same_reg) || !overlapped)
4301       move_mode = HImode;
4302     else
4303       move_mode = GET_MODE (scratch);
4304 
4305     /* Force DI rotate to use QI moves since other DI moves are currently split
4306        into QI moves so forward propagation works better.  */
4307     if (mode == DImode)
4308       move_mode = QImode;
4309     /* Make scratch smaller if needed.  */
4310     if (GET_MODE (scratch) == HImode && move_mode == QImode)
4311       scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4312 
4313     int move_size = GET_MODE_SIZE (move_mode);
4314     /* Number of bytes/words to rotate.  */
4315     int offset = (num  >> 3) / move_size;
4316     /* Number of moves needed.  */
4317     int size = GET_MODE_SIZE (mode) / move_size;
4318     /* Himode byte swap is special case to avoid a scratch register.  */
4319     if (mode == HImode && same_reg)
4320       {
4321 	/* HImode byte swap, using xor.  This is as quick as using scratch.  */
4322 	rtx src, dst;
4323 	src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4324 	dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4325 	if (!rtx_equal_p (dst, src))
4326 	  {
4327 	     emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4328 	     emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4329 	     emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4330 	  }
4331       }
4332     else
4333       {
4334 	/* Create linked list of moves to determine move order.  */
4335 	struct {
4336 	  rtx src, dst;
4337 	  int links;
4338 	} move[size + 8];
4339 
4340 	/* Generate list of subreg moves.  */
4341 	for (i = 0; i < size; i++)
4342 	  {
4343 	    int from = i;
4344 	    int to = (from + offset) % size;
4345 	    move[i].src = simplify_gen_subreg (move_mode, operands[1],
4346 						mode, from * move_size);
4347 	    move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4348 						mode, to   * move_size);
4349 	    move[i].links = -1;
4350 	   }
4351 	/* Mark dependence where a dst of one move is the src of another move.
4352 	   The first move is a conflict as it must wait until second is
4353 	   performed.  We ignore moves to self - we catch this later.  */
4354 	if (overlapped)
4355 	  for (i = 0; i < size; i++)
4356 	    if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4357 	      for (j = 0; j < size; j++)
4358 		if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4359 		  {
4360 		    /* The dst of move i is the src of move j.  */
4361 		    move[i].links = j;
4362 		    break;
4363 		  }
4364 
4365 	int blocked = -1;
4366 	int moves = 0;
4367 	/* Go through move list and perform non-conflicting moves.  As each
4368 	   non-overlapping move is made, it may remove other conflicts
4369 	   so the process is repeated until no conflicts remain.  */
4370 	do
4371 	  {
4372 	    blocked = -1;
4373 	    moves = 0;
4374 	    /* Emit move where dst is not also a src or we have used that
4375 	       src already.  */
4376 	    for (i = 0; i < size; i++)
4377 	      if (move[i].src != NULL_RTX)
4378 		if  (move[i].links == -1 || move[move[i].links].src == NULL_RTX)
4379 		  {
4380 		    moves++;
4381 		    /* Ignore NOP moves to self.  */
4382 		    if (!rtx_equal_p (move[i].dst, move[i].src))
4383 		      emit_move_insn (move[i].dst, move[i].src);
4384 
4385 		    /* Remove  conflict from list.  */
4386 		    move[i].src = NULL_RTX;
4387 		  }
4388 		else
4389 		  blocked = i;
4390 
4391 	    /* Check for deadlock. This is when no moves occurred and we have
4392 	       at least one blocked move.  */
4393 	    if (moves == 0 && blocked != -1)
4394 	      {
4395 		/* Need to use scratch register to break deadlock.
4396 		   Add move to put dst of blocked move into scratch.
4397 		   When this move occurs, it will break chain deadlock.
4398 		   The scratch register is substituted for real move.  */
4399 
4400 		move[size].src = move[blocked].dst;
4401 		move[size].dst =  scratch;
4402 		/* Scratch move is never blocked.  */
4403 		move[size].links = -1;
4404 		/* Make sure we have valid link.  */
4405 		gcc_assert (move[blocked].links != -1);
4406 		/* Replace src of  blocking move with scratch reg.  */
4407 		move[move[blocked].links].src = scratch;
4408 		/* Make dependent on scratch move occuring.  */
4409 		move[blocked].links = size;
4410 		size=size+1;
4411 	      }
4412 	  }
4413 	while (blocked != -1);
4414       }
4415     return true;
4416 }
4417 
4418 /* Modifies the length assigned to instruction INSN
4419  LEN is the initially computed length of the insn.  */
4420 
4421 int
4422 adjust_insn_length (rtx insn, int len)
4423 {
4424   rtx patt = PATTERN (insn);
4425   rtx set;
4426 
4427   if (GET_CODE (patt) == SET)
4428     {
4429       rtx op[10];
4430       op[1] = SET_SRC (patt);
4431       op[0] = SET_DEST (patt);
4432       if (general_operand (op[1], VOIDmode)
4433 	  && general_operand (op[0], VOIDmode))
4434 	{
4435 	  switch (GET_MODE (op[0]))
4436 	    {
4437 	    case QImode:
4438 	      output_movqi (insn, op, &len);
4439 	      break;
4440 	    case HImode:
4441 	      output_movhi (insn, op, &len);
4442 	      break;
4443 	    case SImode:
4444 	    case SFmode:
4445 	      output_movsisf (insn, op, &len);
4446 	      break;
4447 	    default:
4448 	      break;
4449 	    }
4450 	}
4451       else if (op[0] == cc0_rtx && REG_P (op[1]))
4452 	{
4453 	  switch (GET_MODE (op[1]))
4454 	    {
4455 	    case HImode: out_tsthi (insn, op[1], &len); break;
4456 	    case SImode: out_tstsi (insn, op[1], &len); break;
4457 	    default: break;
4458 	    }
4459 	}
4460       else if (GET_CODE (op[1]) == AND)
4461 	{
4462 	  if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4463 	    {
4464 	      HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4465 	      if (GET_MODE (op[1]) == SImode)
4466 		len = (((mask & 0xff) != 0xff)
4467 		       + ((mask & 0xff00) != 0xff00)
4468 		       + ((mask & 0xff0000L) != 0xff0000L)
4469 		       + ((mask & 0xff000000L) != 0xff000000L));
4470 	      else if (GET_MODE (op[1]) == HImode)
4471 		len = (((mask & 0xff) != 0xff)
4472 		       + ((mask & 0xff00) != 0xff00));
4473 	    }
4474 	}
4475       else if (GET_CODE (op[1]) == IOR)
4476 	{
4477 	  if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4478 	    {
4479 	      HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4480 	      if (GET_MODE (op[1]) == SImode)
4481 		len = (((mask & 0xff) != 0)
4482 		       + ((mask & 0xff00) != 0)
4483 		       + ((mask & 0xff0000L) != 0)
4484 		       + ((mask & 0xff000000L) != 0));
4485 	      else if (GET_MODE (op[1]) == HImode)
4486 		len = (((mask & 0xff) != 0)
4487 		       + ((mask & 0xff00) != 0));
4488 	    }
4489 	}
4490     }
4491   set = single_set (insn);
4492   if (set)
4493     {
4494       rtx op[10];
4495 
4496       op[1] = SET_SRC (set);
4497       op[0] = SET_DEST (set);
4498 
4499       if (GET_CODE (patt) == PARALLEL
4500 	  && general_operand (op[1], VOIDmode)
4501 	  && general_operand (op[0], VOIDmode))
4502 	{
4503 	  if (XVECLEN (patt, 0) == 2)
4504 	    op[2] = XVECEXP (patt, 0, 1);
4505 
4506 	  switch (GET_MODE (op[0]))
4507 	    {
4508 	    case QImode:
4509 	      len = 2;
4510 	      break;
4511 	    case HImode:
4512 	      output_reload_inhi (insn, op, &len);
4513 	      break;
4514 	    case SImode:
4515 	    case SFmode:
4516 	      output_reload_insisf (insn, op, &len);
4517 	      break;
4518 	    default:
4519 	      break;
4520 	    }
4521 	}
4522       else if (GET_CODE (op[1]) == ASHIFT
4523 	  || GET_CODE (op[1]) == ASHIFTRT
4524 	  || GET_CODE (op[1]) == LSHIFTRT)
4525 	{
4526 	  rtx ops[10];
4527 	  ops[0] = op[0];
4528 	  ops[1] = XEXP (op[1],0);
4529 	  ops[2] = XEXP (op[1],1);
4530 	  switch (GET_CODE (op[1]))
4531 	    {
4532 	    case ASHIFT:
4533 	      switch (GET_MODE (op[0]))
4534 		{
4535 		case QImode: ashlqi3_out (insn,ops,&len); break;
4536 		case HImode: ashlhi3_out (insn,ops,&len); break;
4537 		case SImode: ashlsi3_out (insn,ops,&len); break;
4538 		default: break;
4539 		}
4540 	      break;
4541 	    case ASHIFTRT:
4542 	      switch (GET_MODE (op[0]))
4543 		{
4544 		case QImode: ashrqi3_out (insn,ops,&len); break;
4545 		case HImode: ashrhi3_out (insn,ops,&len); break;
4546 		case SImode: ashrsi3_out (insn,ops,&len); break;
4547 		default: break;
4548 		}
4549 	      break;
4550 	    case LSHIFTRT:
4551 	      switch (GET_MODE (op[0]))
4552 		{
4553 		case QImode: lshrqi3_out (insn,ops,&len); break;
4554 		case HImode: lshrhi3_out (insn,ops,&len); break;
4555 		case SImode: lshrsi3_out (insn,ops,&len); break;
4556 		default: break;
4557 		}
4558 	      break;
4559 	    default:
4560 	      break;
4561 	    }
4562 	}
4563     }
4564   return len;
4565 }
4566 
4567 /* Return nonzero if register REG dead after INSN.  */
4568 
4569 int
4570 reg_unused_after (rtx insn, rtx reg)
4571 {
4572   return (dead_or_set_p (insn, reg)
4573 	  || (REG_P(reg) && _reg_unused_after (insn, reg)));
4574 }
4575 
4576 /* Return nonzero if REG is not used after INSN.
4577    We assume REG is a reload reg, and therefore does
4578    not live past labels.  It may live past calls or jumps though.  */
4579 
4580 int
4581 _reg_unused_after (rtx insn, rtx reg)
4582 {
4583   enum rtx_code code;
4584   rtx set;
4585 
4586   /* If the reg is set by this instruction, then it is safe for our
4587      case.  Disregard the case where this is a store to memory, since
4588      we are checking a register used in the store address.  */
4589   set = single_set (insn);
4590   if (set && GET_CODE (SET_DEST (set)) != MEM
4591       && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4592     return 1;
4593 
4594   while ((insn = NEXT_INSN (insn)))
4595     {
4596       rtx set;
4597       code = GET_CODE (insn);
4598 
4599 #if 0
4600       /* If this is a label that existed before reload, then the register
4601 	 if dead here.  However, if this is a label added by reorg, then
4602 	 the register may still be live here.  We can't tell the difference,
4603 	 so we just ignore labels completely.  */
4604       if (code == CODE_LABEL)
4605 	return 1;
4606       /* else */
4607 #endif
4608 
4609       if (!INSN_P (insn))
4610 	continue;
4611 
4612       if (code == JUMP_INSN)
4613 	return 0;
4614 
4615       /* If this is a sequence, we must handle them all at once.
4616 	 We could have for instance a call that sets the target register,
4617 	 and an insn in a delay slot that uses the register.  In this case,
4618 	 we must return 0.  */
4619       else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4620 	{
4621 	  int i;
4622 	  int retval = 0;
4623 
4624 	  for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4625 	    {
4626 	      rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4627 	      rtx set = single_set (this_insn);
4628 
4629 	      if (GET_CODE (this_insn) == CALL_INSN)
4630 		code = CALL_INSN;
4631 	      else if (GET_CODE (this_insn) == JUMP_INSN)
4632 		{
4633 		  if (INSN_ANNULLED_BRANCH_P (this_insn))
4634 		    return 0;
4635 		  code = JUMP_INSN;
4636 		}
4637 
4638 	      if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4639 		return 0;
4640 	      if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4641 		{
4642 		  if (GET_CODE (SET_DEST (set)) != MEM)
4643 		    retval = 1;
4644 		  else
4645 		    return 0;
4646 		}
4647 	      if (set == 0
4648 		  && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4649 		return 0;
4650 	    }
4651 	  if (retval == 1)
4652 	    return 1;
4653 	  else if (code == JUMP_INSN)
4654 	    return 0;
4655 	}
4656 
4657       if (code == CALL_INSN)
4658 	{
4659 	  rtx tem;
4660 	  for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4661 	    if (GET_CODE (XEXP (tem, 0)) == USE
4662 		&& REG_P (XEXP (XEXP (tem, 0), 0))
4663 		&& reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4664 	      return 0;
4665 	  if (call_used_regs[REGNO (reg)])
4666 	    return 1;
4667 	}
4668 
4669       set = single_set (insn);
4670 
4671       if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4672 	return 0;
4673       if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4674 	return GET_CODE (SET_DEST (set)) != MEM;
4675       if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4676 	return 0;
4677     }
4678   return 1;
4679 }
4680 
4681 /* Target hook for assembling integer objects.  The AVR version needs
4682    special handling for references to certain labels.  */
4683 
4684 static bool
4685 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4686 {
4687   if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4688       && text_segment_operand (x, VOIDmode) )
4689     {
4690       fputs ("\t.word\tgs(", asm_out_file);
4691       output_addr_const (asm_out_file, x);
4692       fputs (")\n", asm_out_file);
4693       return true;
4694     }
4695   return default_assemble_integer (x, size, aligned_p);
4696 }
4697 
4698 /* Worker function for ASM_DECLARE_FUNCTION_NAME.  */
4699 
4700 void
4701 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4702 {
4703 
4704   /* If the function has the 'signal' or 'interrupt' attribute, test to
4705      make sure that the name of the function is "__vector_NN" so as to
4706      catch when the user misspells the interrupt vector name.  */
4707 
4708   if (cfun->machine->is_interrupt)
4709     {
4710       if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4711         {
4712           warning_at (DECL_SOURCE_LOCATION (decl), 0,
4713                       "%qs appears to be a misspelled interrupt handler",
4714                       name);
4715         }
4716     }
4717   else if (cfun->machine->is_signal)
4718     {
4719       if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4720         {
4721            warning_at (DECL_SOURCE_LOCATION (decl), 0,
4722                        "%qs appears to be a misspelled signal handler",
4723                        name);
4724         }
4725     }
4726 
4727   ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4728   ASM_OUTPUT_LABEL (file, name);
4729 }
4730 
4731 /* The routine used to output NUL terminated strings.  We use a special
4732    version of this for most svr4 targets because doing so makes the
4733    generated assembly code more compact (and thus faster to assemble)
4734    as well as more readable, especially for targets like the i386
4735    (where the only alternative is to output character sequences as
4736    comma separated lists of numbers).  */
4737 
4738 void
4739 gas_output_limited_string(FILE *file, const char *str)
4740 {
4741   const unsigned char *_limited_str = (const unsigned char *) str;
4742   unsigned ch;
4743   fprintf (file, "%s\"", STRING_ASM_OP);
4744   for (; (ch = *_limited_str); _limited_str++)
4745     {
4746       int escape;
4747       switch (escape = ESCAPES[ch])
4748 	{
4749 	case 0:
4750 	  putc (ch, file);
4751 	  break;
4752 	case 1:
4753 	  fprintf (file, "\\%03o", ch);
4754 	  break;
4755 	default:
4756 	  putc ('\\', file);
4757 	  putc (escape, file);
4758 	  break;
4759 	}
4760     }
4761   fprintf (file, "\"\n");
4762 }
4763 
4764 /* The routine used to output sequences of byte values.  We use a special
4765    version of this for most svr4 targets because doing so makes the
4766    generated assembly code more compact (and thus faster to assemble)
4767    as well as more readable.  Note that if we find subparts of the
4768    character sequence which end with NUL (and which are shorter than
4769    STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING.  */
4770 
4771 void
4772 gas_output_ascii(FILE *file, const char *str, size_t length)
4773 {
4774   const unsigned char *_ascii_bytes = (const unsigned char *) str;
4775   const unsigned char *limit = _ascii_bytes + length;
4776   unsigned bytes_in_chunk = 0;
4777   for (; _ascii_bytes < limit; _ascii_bytes++)
4778     {
4779       const unsigned char *p;
4780       if (bytes_in_chunk >= 60)
4781 	{
4782 	  fprintf (file, "\"\n");
4783 	  bytes_in_chunk = 0;
4784 	}
4785       for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4786 	continue;
4787       if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4788 	{
4789 	  if (bytes_in_chunk > 0)
4790 	    {
4791 	      fprintf (file, "\"\n");
4792 	      bytes_in_chunk = 0;
4793 	    }
4794 	  gas_output_limited_string (file, (const char*)_ascii_bytes);
4795 	  _ascii_bytes = p;
4796 	}
4797       else
4798 	{
4799 	  int escape;
4800 	  unsigned ch;
4801 	  if (bytes_in_chunk == 0)
4802 	    fprintf (file, "\t.ascii\t\"");
4803 	  switch (escape = ESCAPES[ch = *_ascii_bytes])
4804 	    {
4805 	    case 0:
4806 	      putc (ch, file);
4807 	      bytes_in_chunk++;
4808 	      break;
4809 	    case 1:
4810 	      fprintf (file, "\\%03o", ch);
4811 	      bytes_in_chunk += 4;
4812 	      break;
4813 	    default:
4814 	      putc ('\\', file);
4815 	      putc (escape, file);
4816 	      bytes_in_chunk += 2;
4817 	      break;
4818 	    }
4819 	}
4820     }
4821   if (bytes_in_chunk > 0)
4822     fprintf (file, "\"\n");
4823 }
4824 
4825 /* Return value is nonzero if pseudos that have been
4826    assigned to registers of class CLASS would likely be spilled
4827    because registers of CLASS are needed for spill registers.  */
4828 
4829 bool
4830 class_likely_spilled_p (int c)
4831 {
4832   return (c != ALL_REGS && c != ADDW_REGS);
4833 }
4834 
4835 /* Valid attributes:
4836    progmem - put data to program memory;
4837    signal - make a function to be hardware interrupt. After function
4838    prologue interrupts are disabled;
4839    interrupt - make a function to be hardware interrupt. After function
4840    prologue interrupts are enabled;
4841    naked     - don't generate function prologue/epilogue and `ret' command.
4842 
4843    Only `progmem' attribute valid for type.  */
4844 
4845 /* Handle a "progmem" attribute; arguments as in
4846    struct attribute_spec.handler.  */
4847 static tree
4848 avr_handle_progmem_attribute (tree *node, tree name,
4849 			      tree args ATTRIBUTE_UNUSED,
4850 			      int flags ATTRIBUTE_UNUSED,
4851 			      bool *no_add_attrs)
4852 {
4853   if (DECL_P (*node))
4854     {
4855       if (TREE_CODE (*node) == TYPE_DECL)
4856 	{
4857 	  /* This is really a decl attribute, not a type attribute,
4858 	     but try to handle it for GCC 3.0 backwards compatibility.  */
4859 
4860 	  tree type = TREE_TYPE (*node);
4861 	  tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4862 	  tree newtype = build_type_attribute_variant (type, attr);
4863 
4864 	  TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4865 	  TREE_TYPE (*node) = newtype;
4866 	  *no_add_attrs = true;
4867 	}
4868       else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4869 	{
4870 	  if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4871 	    {
4872 	      warning (0, "only initialized variables can be placed into "
4873 		       "program memory area");
4874 	      *no_add_attrs = true;
4875 	    }
4876 	}
4877       else
4878 	{
4879 	  warning (OPT_Wattributes, "%qE attribute ignored",
4880 		   name);
4881 	  *no_add_attrs = true;
4882 	}
4883     }
4884 
4885   return NULL_TREE;
4886 }
4887 
4888 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4889    struct attribute_spec.handler.  */
4890 
4891 static tree
4892 avr_handle_fndecl_attribute (tree *node, tree name,
4893 			     tree args ATTRIBUTE_UNUSED,
4894 			     int flags ATTRIBUTE_UNUSED,
4895 			     bool *no_add_attrs)
4896 {
4897   if (TREE_CODE (*node) != FUNCTION_DECL)
4898     {
4899       warning (OPT_Wattributes, "%qE attribute only applies to functions",
4900 	       name);
4901       *no_add_attrs = true;
4902     }
4903 
4904   return NULL_TREE;
4905 }
4906 
4907 static tree
4908 avr_handle_fntype_attribute (tree *node, tree name,
4909                              tree args ATTRIBUTE_UNUSED,
4910                              int flags ATTRIBUTE_UNUSED,
4911                              bool *no_add_attrs)
4912 {
4913   if (TREE_CODE (*node) != FUNCTION_TYPE)
4914     {
4915       warning (OPT_Wattributes, "%qE attribute only applies to functions",
4916 	       name);
4917       *no_add_attrs = true;
4918     }
4919 
4920   return NULL_TREE;
4921 }
4922 
4923 /* Look for attribute `progmem' in DECL
4924    if found return 1, otherwise 0.  */
4925 
4926 int
4927 avr_progmem_p (tree decl, tree attributes)
4928 {
4929   tree a;
4930 
4931   if (TREE_CODE (decl) != VAR_DECL)
4932     return 0;
4933 
4934   if (NULL_TREE
4935       != lookup_attribute ("progmem", attributes))
4936     return 1;
4937 
4938   a=decl;
4939   do
4940     a = TREE_TYPE(a);
4941   while (TREE_CODE (a) == ARRAY_TYPE);
4942 
4943   if (a == error_mark_node)
4944     return 0;
4945 
4946   if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4947     return 1;
4948 
4949   return 0;
4950 }
4951 
4952 /* Add the section attribute if the variable is in progmem.  */
4953 
4954 static void
4955 avr_insert_attributes (tree node, tree *attributes)
4956 {
4957   if (TREE_CODE (node) == VAR_DECL
4958       && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4959       && avr_progmem_p (node, *attributes))
4960     {
4961       static const char dsec[] = ".progmem.data";
4962       *attributes = tree_cons (get_identifier ("section"),
4963 		build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4964 		*attributes);
4965 
4966       /* ??? This seems sketchy.  Why can't the user declare the
4967 	 thing const in the first place?  */
4968       TREE_READONLY (node) = 1;
4969     }
4970 }
4971 
4972 /* A get_unnamed_section callback for switching to progmem_section.  */
4973 
4974 static void
4975 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4976 {
4977   fprintf (asm_out_file,
4978 	   "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4979 	   AVR_HAVE_JMP_CALL ? "a" : "ax");
4980   /* Should already be aligned, this is just to be safe if it isn't.  */
4981   fprintf (asm_out_file, "\t.p2align 1\n");
4982 }
4983 
4984 /* Implement TARGET_ASM_INIT_SECTIONS.  */
4985 
4986 static void
4987 avr_asm_init_sections (void)
4988 {
4989   progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4990 					 avr_output_progmem_section_asm_op,
4991 					 NULL);
4992   readonly_data_section = data_section;
4993 }
4994 
4995 static unsigned int
4996 avr_section_type_flags (tree decl, const char *name, int reloc)
4997 {
4998   unsigned int flags = default_section_type_flags (decl, name, reloc);
4999 
5000   if (strncmp (name, ".noinit", 7) == 0)
5001     {
5002       if (decl && TREE_CODE (decl) == VAR_DECL
5003 	  && DECL_INITIAL (decl) == NULL_TREE)
5004 	flags |= SECTION_BSS;  /* @nobits */
5005       else
5006 	warning (0, "only uninitialized variables can be placed in the "
5007 		 ".noinit section");
5008     }
5009 
5010   return flags;
5011 }
5012 
5013 /* Outputs some appropriate text to go at the start of an assembler
5014    file.  */
5015 
5016 static void
5017 avr_file_start (void)
5018 {
5019   if (avr_current_arch->asm_only)
5020     error ("MCU %qs supported for assembler only", avr_mcu_name);
5021 
5022   default_file_start ();
5023 
5024 /*  fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
5025   fputs ("__SREG__ = 0x3f\n"
5026 	 "__SP_H__ = 0x3e\n"
5027 	 "__SP_L__ = 0x3d\n", asm_out_file);
5028 
5029   fputs ("__tmp_reg__ = 0\n"
5030          "__zero_reg__ = 1\n", asm_out_file);
5031 
5032   /* FIXME: output these only if there is anything in the .data / .bss
5033      sections - some code size could be saved by not linking in the
5034      initialization code from libgcc if one or both sections are empty.  */
5035   fputs ("\t.global __do_copy_data\n", asm_out_file);
5036   fputs ("\t.global __do_clear_bss\n", asm_out_file);
5037 }
5038 
5039 /* Outputs to the stdio stream FILE some
5040    appropriate text to go at the end of an assembler file.  */
5041 
5042 static void
5043 avr_file_end (void)
5044 {
5045 }
5046 
5047 /* Choose the order in which to allocate hard registers for
5048    pseudo-registers local to a basic block.
5049 
5050    Store the desired register order in the array `reg_alloc_order'.
5051    Element 0 should be the register to allocate first; element 1, the
5052    next register; and so on.  */
5053 
5054 void
5055 order_regs_for_local_alloc (void)
5056 {
5057   unsigned int i;
5058   static const int order_0[] = {
5059     24,25,
5060     18,19,
5061     20,21,
5062     22,23,
5063     30,31,
5064     26,27,
5065     28,29,
5066     17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5067     0,1,
5068     32,33,34,35
5069   };
5070   static const int order_1[] = {
5071     18,19,
5072     20,21,
5073     22,23,
5074     24,25,
5075     30,31,
5076     26,27,
5077     28,29,
5078     17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5079     0,1,
5080     32,33,34,35
5081   };
5082   static const int order_2[] = {
5083     25,24,
5084     23,22,
5085     21,20,
5086     19,18,
5087     30,31,
5088     26,27,
5089     28,29,
5090     17,16,
5091     15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5092     1,0,
5093     32,33,34,35
5094   };
5095 
5096   const int *order = (TARGET_ORDER_1 ? order_1 :
5097 		      TARGET_ORDER_2 ? order_2 :
5098 		      order_0);
5099   for (i=0; i < ARRAY_SIZE (order_0); ++i)
5100       reg_alloc_order[i] = order[i];
5101 }
5102 
5103 
5104 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5105    cost of an RTX operand given its context.  X is the rtx of the
5106    operand, MODE is its mode, and OUTER is the rtx_code of this
5107    operand's parent operator.  */
5108 
5109 static int
5110 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5111 		      bool speed)
5112 {
5113   enum rtx_code code = GET_CODE (x);
5114   int total;
5115 
5116   switch (code)
5117     {
5118     case REG:
5119     case SUBREG:
5120       return 0;
5121 
5122     case CONST_INT:
5123     case CONST_DOUBLE:
5124       return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5125 
5126     default:
5127       break;
5128     }
5129 
5130   total = 0;
5131   avr_rtx_costs (x, code, outer, &total, speed);
5132   return total;
5133 }
5134 
5135 /* The AVR backend's rtx_cost function.  X is rtx expression whose cost
5136    is to be calculated.  Return true if the complete cost has been
5137    computed, and false if subexpressions should be scanned.  In either
5138    case, *TOTAL contains the cost result.  */
5139 
5140 static bool
5141 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5142 	       bool speed)
5143 {
5144   enum rtx_code code = (enum rtx_code) codearg;
5145   enum machine_mode mode = GET_MODE (x);
5146   HOST_WIDE_INT val;
5147 
5148   switch (code)
5149     {
5150     case CONST_INT:
5151     case CONST_DOUBLE:
5152       /* Immediate constants are as cheap as registers.  */
5153       *total = 0;
5154       return true;
5155 
5156     case MEM:
5157     case CONST:
5158     case LABEL_REF:
5159     case SYMBOL_REF:
5160       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5161       return true;
5162 
5163     case NEG:
5164       switch (mode)
5165 	{
5166 	case QImode:
5167 	case SFmode:
5168 	  *total = COSTS_N_INSNS (1);
5169 	  break;
5170 
5171 	case HImode:
5172 	  *total = COSTS_N_INSNS (3);
5173 	  break;
5174 
5175 	case SImode:
5176 	  *total = COSTS_N_INSNS (7);
5177 	  break;
5178 
5179 	default:
5180 	  return false;
5181 	}
5182       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5183       return true;
5184 
5185     case ABS:
5186       switch (mode)
5187 	{
5188 	case QImode:
5189 	case SFmode:
5190 	  *total = COSTS_N_INSNS (1);
5191 	  break;
5192 
5193 	default:
5194 	  return false;
5195 	}
5196       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5197       return true;
5198 
5199     case NOT:
5200       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5201       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5202       return true;
5203 
5204     case ZERO_EXTEND:
5205       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5206 			      - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5207       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5208       return true;
5209 
5210     case SIGN_EXTEND:
5211       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5212 			      - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5213       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5214       return true;
5215 
5216     case PLUS:
5217       switch (mode)
5218 	{
5219 	case QImode:
5220 	  *total = COSTS_N_INSNS (1);
5221 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5222 	    *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5223 	  break;
5224 
5225 	case HImode:
5226 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5227 	    {
5228 	      *total = COSTS_N_INSNS (2);
5229 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5230 	    }
5231 	  else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5232 	    *total = COSTS_N_INSNS (1);
5233 	  else
5234 	    *total = COSTS_N_INSNS (2);
5235 	  break;
5236 
5237 	case SImode:
5238 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5239 	    {
5240 	      *total = COSTS_N_INSNS (4);
5241 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5242 	    }
5243 	  else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5244 	    *total = COSTS_N_INSNS (1);
5245 	  else
5246 	    *total = COSTS_N_INSNS (4);
5247 	  break;
5248 
5249 	default:
5250 	  return false;
5251 	}
5252       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5253       return true;
5254 
5255     case MINUS:
5256     case AND:
5257     case IOR:
5258       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5259       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5260       if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5261           *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5262       return true;
5263 
5264     case XOR:
5265       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5266       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5267       *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5268       return true;
5269 
5270     case MULT:
5271       switch (mode)
5272 	{
5273 	case QImode:
5274 	  if (AVR_HAVE_MUL)
5275 	    *total = COSTS_N_INSNS (!speed ? 3 : 4);
5276 	  else if (!speed)
5277 	    *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5278 	  else
5279 	    return false;
5280 	  break;
5281 
5282 	case HImode:
5283 	  if (AVR_HAVE_MUL)
5284 	    *total = COSTS_N_INSNS (!speed ? 7 : 10);
5285 	  else if (!speed)
5286 	    *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5287 	  else
5288 	    return false;
5289 	  break;
5290 
5291 	default:
5292 	  return false;
5293 	}
5294       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5295       *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5296       return true;
5297 
5298     case DIV:
5299     case MOD:
5300     case UDIV:
5301     case UMOD:
5302       if (!speed)
5303 	*total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5304       else
5305 	return false;
5306       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5307       *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5308       return true;
5309 
5310     case ROTATE:
5311       switch (mode)
5312 	{
5313 	case QImode:
5314 	  if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5315 	    *total = COSTS_N_INSNS (1);
5316 
5317 	  break;
5318 
5319 	case HImode:
5320 	  if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5321 	    *total = COSTS_N_INSNS (3);
5322 
5323 	  break;
5324 
5325 	case SImode:
5326 	  if (CONST_INT_P (XEXP (x, 1)))
5327 	    switch (INTVAL (XEXP (x, 1)))
5328 	      {
5329 	      case 8:
5330 	      case 24:
5331 		*total = COSTS_N_INSNS (5);
5332 		break;
5333 	      case 16:
5334 		*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5335 		break;
5336 	      }
5337 	  break;
5338 
5339 	default:
5340 	  return false;
5341 	}
5342       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5343       return true;
5344 
5345     case ASHIFT:
5346       switch (mode)
5347 	{
5348 	case QImode:
5349 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5350 	    {
5351 	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
5352 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5353 	    }
5354 	  else
5355 	    {
5356 	      val = INTVAL (XEXP (x, 1));
5357 	      if (val == 7)
5358 		*total = COSTS_N_INSNS (3);
5359 	      else if (val >= 0 && val <= 7)
5360 		*total = COSTS_N_INSNS (val);
5361 	      else
5362 		*total = COSTS_N_INSNS (1);
5363 	    }
5364 	  break;
5365 
5366 	case HImode:
5367 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5368 	    {
5369 	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
5370 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5371 	    }
5372 	  else
5373 	    switch (INTVAL (XEXP (x, 1)))
5374 	      {
5375 	      case 0:
5376 		*total = 0;
5377 		break;
5378 	      case 1:
5379 	      case 8:
5380 		*total = COSTS_N_INSNS (2);
5381 		break;
5382 	      case 9:
5383 		*total = COSTS_N_INSNS (3);
5384 		break;
5385 	      case 2:
5386 	      case 3:
5387 	      case 10:
5388 	      case 15:
5389 		*total = COSTS_N_INSNS (4);
5390 		break;
5391 	      case 7:
5392 	      case 11:
5393 	      case 12:
5394 		*total = COSTS_N_INSNS (5);
5395 		break;
5396 	      case 4:
5397 		*total = COSTS_N_INSNS (!speed ? 5 : 8);
5398 		break;
5399 	      case 6:
5400 		*total = COSTS_N_INSNS (!speed ? 5 : 9);
5401 		break;
5402 	      case 5:
5403 		*total = COSTS_N_INSNS (!speed ? 5 : 10);
5404 		break;
5405 	      default:
5406 	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
5407 	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5408 	      }
5409 	  break;
5410 
5411 	case SImode:
5412 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5413 	    {
5414 	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
5415 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5416 	    }
5417 	  else
5418 	    switch (INTVAL (XEXP (x, 1)))
5419 	      {
5420 	      case 0:
5421 		*total = 0;
5422 		break;
5423 	      case 24:
5424 		*total = COSTS_N_INSNS (3);
5425 		break;
5426 	      case 1:
5427 	      case 8:
5428 	      case 16:
5429 		*total = COSTS_N_INSNS (4);
5430 		break;
5431 	      case 31:
5432 		*total = COSTS_N_INSNS (6);
5433 		break;
5434 	      case 2:
5435 		*total = COSTS_N_INSNS (!speed ? 7 : 8);
5436 		break;
5437 	      default:
5438 		*total = COSTS_N_INSNS (!speed ? 7 : 113);
5439 		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5440 	      }
5441 	  break;
5442 
5443 	default:
5444 	  return false;
5445 	}
5446       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5447       return true;
5448 
5449     case ASHIFTRT:
5450       switch (mode)
5451 	{
5452 	case QImode:
5453 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5454 	    {
5455 	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
5456 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5457 	    }
5458 	  else
5459 	    {
5460 	      val = INTVAL (XEXP (x, 1));
5461 	      if (val == 6)
5462 		*total = COSTS_N_INSNS (4);
5463 	      else if (val == 7)
5464 		*total = COSTS_N_INSNS (2);
5465 	      else if (val >= 0 && val <= 7)
5466 		*total = COSTS_N_INSNS (val);
5467 	      else
5468 		*total = COSTS_N_INSNS (1);
5469 	    }
5470 	  break;
5471 
5472 	case HImode:
5473 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5474 	    {
5475 	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
5476 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5477 	    }
5478 	  else
5479 	    switch (INTVAL (XEXP (x, 1)))
5480 	      {
5481 	      case 0:
5482 		*total = 0;
5483 		break;
5484 	      case 1:
5485 		*total = COSTS_N_INSNS (2);
5486 		break;
5487 	      case 15:
5488 		*total = COSTS_N_INSNS (3);
5489 		break;
5490 	      case 2:
5491 	      case 7:
5492               case 8:
5493               case 9:
5494 		*total = COSTS_N_INSNS (4);
5495 		break;
5496               case 10:
5497 	      case 14:
5498 		*total = COSTS_N_INSNS (5);
5499 		break;
5500               case 11:
5501                 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5502 		break;
5503               case 12:
5504                 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5505 		break;
5506               case 6:
5507 	      case 13:
5508                 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5509 		break;
5510 	      default:
5511 	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
5512 	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5513 	      }
5514 	  break;
5515 
5516 	case SImode:
5517 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5518 	    {
5519 	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
5520 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5521 	    }
5522 	  else
5523 	    switch (INTVAL (XEXP (x, 1)))
5524 	      {
5525 	      case 0:
5526 		*total = 0;
5527 		break;
5528 	      case 1:
5529 		*total = COSTS_N_INSNS (4);
5530 		break;
5531 	      case 8:
5532 	      case 16:
5533 	      case 24:
5534 		*total = COSTS_N_INSNS (6);
5535 		break;
5536 	      case 2:
5537 		*total = COSTS_N_INSNS (!speed ? 7 : 8);
5538 		break;
5539 	      case 31:
5540 		*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5541 		break;
5542 	      default:
5543 		*total = COSTS_N_INSNS (!speed ? 7 : 113);
5544 		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5545 	      }
5546 	  break;
5547 
5548 	default:
5549 	  return false;
5550 	}
5551       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5552       return true;
5553 
5554     case LSHIFTRT:
5555       switch (mode)
5556 	{
5557 	case QImode:
5558 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5559 	    {
5560 	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
5561 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5562 	    }
5563 	  else
5564 	    {
5565 	      val = INTVAL (XEXP (x, 1));
5566 	      if (val == 7)
5567 		*total = COSTS_N_INSNS (3);
5568 	      else if (val >= 0 && val <= 7)
5569 		*total = COSTS_N_INSNS (val);
5570 	      else
5571 		*total = COSTS_N_INSNS (1);
5572 	    }
5573 	  break;
5574 
5575 	case HImode:
5576 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5577 	    {
5578 	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
5579 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5580 	    }
5581 	  else
5582 	    switch (INTVAL (XEXP (x, 1)))
5583 	      {
5584 	      case 0:
5585 		*total = 0;
5586 		break;
5587 	      case 1:
5588 	      case 8:
5589 		*total = COSTS_N_INSNS (2);
5590 		break;
5591 	      case 9:
5592 		*total = COSTS_N_INSNS (3);
5593 		break;
5594 	      case 2:
5595 	      case 10:
5596 	      case 15:
5597 		*total = COSTS_N_INSNS (4);
5598 		break;
5599 	      case 7:
5600               case 11:
5601 		*total = COSTS_N_INSNS (5);
5602 		break;
5603 	      case 3:
5604 	      case 12:
5605 	      case 13:
5606 	      case 14:
5607 		*total = COSTS_N_INSNS (!speed ? 5 : 6);
5608 		break;
5609 	      case 4:
5610 		*total = COSTS_N_INSNS (!speed ? 5 : 7);
5611 		break;
5612 	      case 5:
5613 	      case 6:
5614 		*total = COSTS_N_INSNS (!speed ? 5 : 9);
5615 		break;
5616 	      default:
5617 	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
5618 	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5619 	      }
5620 	  break;
5621 
5622 	case SImode:
5623 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5624 	    {
5625 	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
5626 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5627 	    }
5628 	  else
5629 	    switch (INTVAL (XEXP (x, 1)))
5630 	      {
5631 	      case 0:
5632 		*total = 0;
5633 		break;
5634 	      case 1:
5635 		*total = COSTS_N_INSNS (4);
5636 		break;
5637 	      case 2:
5638 		*total = COSTS_N_INSNS (!speed ? 7 : 8);
5639 		break;
5640 	      case 8:
5641 	      case 16:
5642 	      case 24:
5643 		*total = COSTS_N_INSNS (4);
5644 		break;
5645 	      case 31:
5646 		*total = COSTS_N_INSNS (6);
5647 		break;
5648 	      default:
5649 		*total = COSTS_N_INSNS (!speed ? 7 : 113);
5650 		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5651 	      }
5652 	  break;
5653 
5654 	default:
5655 	  return false;
5656 	}
5657       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5658       return true;
5659 
5660     case COMPARE:
5661       switch (GET_MODE (XEXP (x, 0)))
5662 	{
5663 	case QImode:
5664 	  *total = COSTS_N_INSNS (1);
5665 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5666 	    *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5667 	  break;
5668 
5669         case HImode:
5670 	  *total = COSTS_N_INSNS (2);
5671 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5672             *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5673 	  else if (INTVAL (XEXP (x, 1)) != 0)
5674 	    *total += COSTS_N_INSNS (1);
5675           break;
5676 
5677         case SImode:
5678           *total = COSTS_N_INSNS (4);
5679           if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5680             *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5681 	  else if (INTVAL (XEXP (x, 1)) != 0)
5682 	    *total += COSTS_N_INSNS (3);
5683           break;
5684 
5685 	default:
5686 	  return false;
5687 	}
5688       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5689       return true;
5690 
5691     default:
5692       break;
5693     }
5694   return false;
5695 }
5696 
5697 /* Calculate the cost of a memory address.  */
5698 
5699 static int
5700 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5701 {
5702   if (GET_CODE (x) == PLUS
5703       && GET_CODE (XEXP (x,1)) == CONST_INT
5704       && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5705       && INTVAL (XEXP (x,1)) >= 61)
5706     return 18;
5707   if (CONSTANT_ADDRESS_P (x))
5708     {
5709       if (optimize > 0 && io_address_operand (x, QImode))
5710 	return 2;
5711       return 4;
5712     }
5713   return 4;
5714 }
5715 
5716 /* Test for extra memory constraint 'Q'.
5717    It's a memory address based on Y or Z pointer with valid displacement.  */
5718 
5719 int
5720 extra_constraint_Q (rtx x)
5721 {
5722   if (GET_CODE (XEXP (x,0)) == PLUS
5723       && REG_P (XEXP (XEXP (x,0), 0))
5724       && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5725       && (INTVAL (XEXP (XEXP (x,0), 1))
5726 	  <= MAX_LD_OFFSET (GET_MODE (x))))
5727     {
5728       rtx xx = XEXP (XEXP (x,0), 0);
5729       int regno = REGNO (xx);
5730       if (TARGET_ALL_DEBUG)
5731 	{
5732 	  fprintf (stderr, ("extra_constraint:\n"
5733 			    "reload_completed: %d\n"
5734 			    "reload_in_progress: %d\n"),
5735 		   reload_completed, reload_in_progress);
5736 	  debug_rtx (x);
5737 	}
5738       if (regno >= FIRST_PSEUDO_REGISTER)
5739 	return 1;		/* allocate pseudos */
5740       else if (regno == REG_Z || regno == REG_Y)
5741 	return 1;		/* strictly check */
5742       else if (xx == frame_pointer_rtx
5743 	       || xx == arg_pointer_rtx)
5744 	return 1;		/* XXX frame & arg pointer checks */
5745     }
5746   return 0;
5747 }
5748 
5749 /* Convert condition code CONDITION to the valid AVR condition code.  */
5750 
5751 RTX_CODE
5752 avr_normalize_condition (RTX_CODE condition)
5753 {
5754   switch (condition)
5755     {
5756     case GT:
5757       return GE;
5758     case GTU:
5759       return GEU;
5760     case LE:
5761       return LT;
5762     case LEU:
5763       return LTU;
5764     default:
5765       gcc_unreachable ();
5766     }
5767 }
5768 
5769 /* This function optimizes conditional jumps.  */
5770 
5771 static void
5772 avr_reorg (void)
5773 {
5774   rtx insn, pattern;
5775 
5776   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5777     {
5778       if (! (GET_CODE (insn) == INSN
5779 	     || GET_CODE (insn) == CALL_INSN
5780 	     || GET_CODE (insn) == JUMP_INSN)
5781 	  || !single_set (insn))
5782 	continue;
5783 
5784       pattern = PATTERN (insn);
5785 
5786       if (GET_CODE (pattern) == PARALLEL)
5787 	pattern = XVECEXP (pattern, 0, 0);
5788       if (GET_CODE (pattern) == SET
5789 	  && SET_DEST (pattern) == cc0_rtx
5790 	  && compare_diff_p (insn))
5791 	{
5792 	  if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5793 	    {
5794 	      /* Now we work under compare insn.  */
5795 
5796 	      pattern = SET_SRC (pattern);
5797 	      if (true_regnum (XEXP (pattern,0)) >= 0
5798 		  && true_regnum (XEXP (pattern,1)) >= 0 )
5799 		{
5800 		  rtx x = XEXP (pattern,0);
5801 		  rtx next = next_real_insn (insn);
5802 		  rtx pat = PATTERN (next);
5803 		  rtx src = SET_SRC (pat);
5804 		  rtx t = XEXP (src,0);
5805 		  PUT_CODE (t, swap_condition (GET_CODE (t)));
5806 		  XEXP (pattern,0) = XEXP (pattern,1);
5807 		  XEXP (pattern,1) = x;
5808 		  INSN_CODE (next) = -1;
5809 		}
5810 	      else if (true_regnum (XEXP (pattern, 0)) >= 0
5811 		       && XEXP (pattern, 1) == const0_rtx)
5812 	        {
5813 	          /* This is a tst insn, we can reverse it.  */
5814 	          rtx next = next_real_insn (insn);
5815 	          rtx pat = PATTERN (next);
5816 	          rtx src = SET_SRC (pat);
5817 	          rtx t = XEXP (src,0);
5818 
5819 	          PUT_CODE (t, swap_condition (GET_CODE (t)));
5820 	          XEXP (pattern, 1) = XEXP (pattern, 0);
5821 	          XEXP (pattern, 0) = const0_rtx;
5822 	          INSN_CODE (next) = -1;
5823 	          INSN_CODE (insn) = -1;
5824 	        }
5825 	      else if (true_regnum (XEXP (pattern,0)) >= 0
5826 		       && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5827 		{
5828 		  rtx x = XEXP (pattern,1);
5829 		  rtx next = next_real_insn (insn);
5830 		  rtx pat = PATTERN (next);
5831 		  rtx src = SET_SRC (pat);
5832 		  rtx t = XEXP (src,0);
5833 		  enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5834 
5835 		  if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5836 		    {
5837 		      XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5838 		      PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5839 		      INSN_CODE (next) = -1;
5840 		      INSN_CODE (insn) = -1;
5841 		    }
5842 		}
5843 	    }
5844 	}
5845     }
5846 }
5847 
5848 /* Returns register number for function return value.*/
5849 
5850 int
5851 avr_ret_register (void)
5852 {
5853   return 24;
5854 }
5855 
5856 /* Create an RTX representing the place where a
5857    library function returns a value of mode MODE.  */
5858 
5859 rtx
5860 avr_libcall_value (enum machine_mode mode)
5861 {
5862   int offs = GET_MODE_SIZE (mode);
5863   if (offs < 2)
5864     offs = 2;
5865   return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5866 }
5867 
5868 /* Create an RTX representing the place where a
5869    function returns a value of data type VALTYPE.  */
5870 
5871 rtx
5872 avr_function_value (const_tree type,
5873 		    const_tree func ATTRIBUTE_UNUSED,
5874 		    bool outgoing ATTRIBUTE_UNUSED)
5875 {
5876   unsigned int offs;
5877 
5878   if (TYPE_MODE (type) != BLKmode)
5879     return avr_libcall_value (TYPE_MODE (type));
5880 
5881   offs = int_size_in_bytes (type);
5882   if (offs < 2)
5883     offs = 2;
5884   if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5885     offs = GET_MODE_SIZE (SImode);
5886   else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5887     offs = GET_MODE_SIZE (DImode);
5888 
5889   return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5890 }
5891 
5892 /* Places additional restrictions on the register class to
5893    use when it is necessary to copy value X into a register
5894    in class CLASS.  */
5895 
5896 enum reg_class
5897 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5898 {
5899   return rclass;
5900 }
5901 
5902 int
5903 test_hard_reg_class (enum reg_class rclass, rtx x)
5904 {
5905   int regno = true_regnum (x);
5906   if (regno < 0)
5907     return 0;
5908 
5909   if (TEST_HARD_REG_CLASS (rclass, regno))
5910     return 1;
5911 
5912   return 0;
5913 }
5914 
5915 
5916 int
5917 jump_over_one_insn_p (rtx insn, rtx dest)
5918 {
5919   int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5920 		      ? XEXP (dest, 0)
5921 		      : dest);
5922   int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5923   int dest_addr = INSN_ADDRESSES (uid);
5924   return dest_addr - jump_addr == get_attr_length (insn) + 1;
5925 }
5926 
5927 /* Returns 1 if a value of mode MODE can be stored starting with hard
5928    register number REGNO.  On the enhanced core, anything larger than
5929    1 byte must start in even numbered register for "movw" to work
5930    (this way we don't have to check for odd registers everywhere).  */
5931 
5932 int
5933 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5934 {
5935   /* NOTE: 8-bit values must not be disallowed for R28 or R29.
5936         Disallowing QI et al. in these regs might lead to code like
5937             (set (subreg:QI (reg:HI 28) n) ...)
5938         which will result in wrong code because reload does not
5939         handle SUBREGs of hard regsisters like this, see PR46779.
5940         This could be fixed in reload.  However, it appears
5941         that fixing reload is not wanted by reload people.  */
5942 
5943   /* Any GENERAL_REGS register can hold 8-bit values.  */
5944 
5945   if (GET_MODE_SIZE (mode) == 1)
5946     return 1;
5947 
5948   /* FIXME: Ideally, the following test is not needed.
5949         However, it turned out that it can reduce the number
5950         of spill fails.  AVR and it's poor endowment with
5951         address registers is extreme stress test for reload.  */
5952 
5953   if (GET_MODE_SIZE (mode) >= 4
5954       && regno >= REG_X)
5955     return 0;
5956 
5957   /* All modes larger than 8 bits should start in an even register.  */
5958 
5959   return !(regno & 1);
5960 }
5961 
5962 const char *
5963 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5964 {
5965   int tmp;
5966   if (!len)
5967     len = &tmp;
5968 
5969   if (GET_CODE (operands[1]) == CONST_INT)
5970     {
5971       int val = INTVAL (operands[1]);
5972       if ((val & 0xff) == 0)
5973 	{
5974 	  *len = 3;
5975 	  return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5976 		  AS2 (ldi,%2,hi8(%1))       CR_TAB
5977 		  AS2 (mov,%B0,%2));
5978 	}
5979       else if ((val & 0xff00) == 0)
5980 	{
5981 	  *len = 3;
5982 	  return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5983 		  AS2 (mov,%A0,%2)     CR_TAB
5984 		  AS2 (mov,%B0,__zero_reg__));
5985 	}
5986       else if ((val & 0xff) == ((val & 0xff00) >> 8))
5987 	{
5988 	  *len = 3;
5989 	  return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5990 		  AS2 (mov,%A0,%2)     CR_TAB
5991 		  AS2 (mov,%B0,%2));
5992 	}
5993     }
5994   *len = 4;
5995   return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5996 	  AS2 (mov,%A0,%2)     CR_TAB
5997 	  AS2 (ldi,%2,hi8(%1)) CR_TAB
5998 	  AS2 (mov,%B0,%2));
5999 }
6000 
6001 
6002 const char *
6003 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6004 {
6005   rtx src = operands[1];
6006   int cnst = (GET_CODE (src) == CONST_INT);
6007 
6008   if (len)
6009     {
6010       if (cnst)
6011 	*len = 4 + ((INTVAL (src) & 0xff) != 0)
6012 		+ ((INTVAL (src) & 0xff00) != 0)
6013 		+ ((INTVAL (src) & 0xff0000) != 0)
6014 		+ ((INTVAL (src) & 0xff000000) != 0);
6015       else
6016 	*len = 8;
6017 
6018       return "";
6019     }
6020 
6021   if (cnst && ((INTVAL (src) & 0xff) == 0))
6022     output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
6023   else
6024     {
6025       output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
6026       output_asm_insn (AS2 (mov, %A0, %2), operands);
6027     }
6028   if (cnst && ((INTVAL (src) & 0xff00) == 0))
6029     output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
6030   else
6031     {
6032       output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
6033       output_asm_insn (AS2 (mov, %B0, %2), operands);
6034     }
6035   if (cnst && ((INTVAL (src) & 0xff0000) == 0))
6036     output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
6037   else
6038     {
6039       output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
6040       output_asm_insn (AS2 (mov, %C0, %2), operands);
6041     }
6042   if (cnst && ((INTVAL (src) & 0xff000000) == 0))
6043     output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
6044   else
6045     {
6046       output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
6047       output_asm_insn (AS2 (mov, %D0, %2), operands);
6048     }
6049   return "";
6050 }
6051 
6052 void
6053 avr_output_bld (rtx operands[], int bit_nr)
6054 {
6055   static char s[] = "bld %A0,0";
6056 
6057   s[5] = 'A' + (bit_nr >> 3);
6058   s[8] = '0' + (bit_nr & 7);
6059   output_asm_insn (s, operands);
6060 }
6061 
6062 void
6063 avr_output_addr_vec_elt (FILE *stream, int value)
6064 {
6065   switch_to_section (progmem_section);
6066   if (AVR_HAVE_JMP_CALL)
6067     fprintf (stream, "\t.word gs(.L%d)\n", value);
6068   else
6069     fprintf (stream, "\trjmp .L%d\n", value);
6070 }
6071 
6072 /* Returns true if SCRATCH are safe to be allocated as a scratch
6073    registers (for a define_peephole2) in the current function.  */
6074 
6075 bool
6076 avr_hard_regno_scratch_ok (unsigned int regno)
6077 {
6078   /* Interrupt functions can only use registers that have already been saved
6079      by the prologue, even if they would normally be call-clobbered.  */
6080 
6081   if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6082       && !df_regs_ever_live_p (regno))
6083     return false;
6084 
6085   /* Don't allow hard registers that might be part of the frame pointer.
6086      Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6087      and don't care for a frame pointer that spans more than one register.  */
6088 
6089   if ((!reload_completed || frame_pointer_needed)
6090       && (regno == REG_Y || regno == REG_Y + 1))
6091     {
6092       return false;
6093     }
6094 
6095   return true;
6096 }
6097 
6098 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG.  */
6099 
6100 int
6101 avr_hard_regno_rename_ok (unsigned int old_reg,
6102 			  unsigned int new_reg)
6103 {
6104   /* Interrupt functions can only use registers that have already been
6105      saved by the prologue, even if they would normally be
6106      call-clobbered.  */
6107 
6108   if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6109       && !df_regs_ever_live_p (new_reg))
6110     return 0;
6111 
6112   /* Don't allow hard registers that might be part of the frame pointer.
6113      Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6114      and don't care for a frame pointer that spans more than one register.  */
6115 
6116   if ((!reload_completed || frame_pointer_needed)
6117       && (old_reg == REG_Y || old_reg == REG_Y + 1
6118           || new_reg == REG_Y || new_reg == REG_Y + 1))
6119     {
6120       return 0;
6121     }
6122 
6123   return 1;
6124 }
6125 
6126 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6127    or memory location in the I/O space (QImode only).
6128 
6129    Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6130    Operand 1: register operand to test, or CONST_INT memory address.
6131    Operand 2: bit number.
6132    Operand 3: label to jump to if the test is true.  */
6133 
6134 const char *
6135 avr_out_sbxx_branch (rtx insn, rtx operands[])
6136 {
6137   enum rtx_code comp = GET_CODE (operands[0]);
6138   int long_jump = (get_attr_length (insn) >= 4);
6139   int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6140 
6141   if (comp == GE)
6142     comp = EQ;
6143   else if (comp == LT)
6144     comp = NE;
6145 
6146   if (reverse)
6147     comp = reverse_condition (comp);
6148 
6149   if (GET_CODE (operands[1]) == CONST_INT)
6150     {
6151       if (INTVAL (operands[1]) < 0x40)
6152 	{
6153 	  if (comp == EQ)
6154 	    output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6155 	  else
6156 	    output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6157 	}
6158       else
6159 	{
6160 	  output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6161 	  if (comp == EQ)
6162 	    output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6163 	  else
6164 	    output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6165 	}
6166     }
6167   else  /* GET_CODE (operands[1]) == REG */
6168     {
6169       if (GET_MODE (operands[1]) == QImode)
6170 	{
6171 	  if (comp == EQ)
6172 	    output_asm_insn (AS2 (sbrs,%1,%2), operands);
6173 	  else
6174 	    output_asm_insn (AS2 (sbrc,%1,%2), operands);
6175 	}
6176       else  /* HImode or SImode */
6177 	{
6178 	  static char buf[] = "sbrc %A1,0";
6179 	  int bit_nr = INTVAL (operands[2]);
6180 	  buf[3] = (comp == EQ) ? 's' : 'c';
6181 	  buf[6] = 'A' + (bit_nr >> 3);
6182 	  buf[9] = '0' + (bit_nr & 7);
6183 	  output_asm_insn (buf, operands);
6184 	}
6185     }
6186 
6187   if (long_jump)
6188     return (AS1 (rjmp,.+4) CR_TAB
6189 	    AS1 (jmp,%x3));
6190   if (!reverse)
6191     return AS1 (rjmp,%x3);
6192   return "";
6193 }
6194 
6195 /* Worker function for TARGET_ASM_CONSTRUCTOR.  */
6196 
6197 static void
6198 avr_asm_out_ctor (rtx symbol, int priority)
6199 {
6200   fputs ("\t.global __do_global_ctors\n", asm_out_file);
6201   default_ctor_section_asm_out_constructor (symbol, priority);
6202 }
6203 
6204 /* Worker function for TARGET_ASM_DESTRUCTOR.  */
6205 
6206 static void
6207 avr_asm_out_dtor (rtx symbol, int priority)
6208 {
6209   fputs ("\t.global __do_global_dtors\n", asm_out_file);
6210   default_dtor_section_asm_out_destructor (symbol, priority);
6211 }
6212 
6213 /* Worker function for TARGET_RETURN_IN_MEMORY.  */
6214 
6215 static bool
6216 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6217 {
6218   if (TYPE_MODE (type) == BLKmode)
6219     {
6220       HOST_WIDE_INT size = int_size_in_bytes (type);
6221       return (size == -1 || size > 8);
6222     }
6223   else
6224     return false;
6225 }
6226 
6227 /* Worker function for CASE_VALUES_THRESHOLD.  */
6228 
6229 unsigned int avr_case_values_threshold (void)
6230 {
6231   return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6232 }
6233 
6234 #include "gt-avr.h"
6235