xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/avr/avr.c (revision d909946ca08dceb44d7d0f22ec9488679695d976)
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2    Copyright (C) 1998-2013 Free Software Foundation, Inc.
3    Contributed by Denis Chertykov (chertykov@gmail.com)
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3, or (at your option)
10    any later version.
11 
12    GCC is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
31 #include "insn-codes.h"
32 #include "flags.h"
33 #include "reload.h"
34 #include "tree.h"
35 #include "output.h"
36 #include "expr.h"
37 #include "c-family/c-common.h"
38 #include "diagnostic-core.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "optabs.h"
43 #include "ggc.h"
44 #include "langhooks.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "params.h"
49 #include "df.h"
50 
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
53 
54 /* Return true if STR starts with PREFIX and false, otherwise.  */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
56 
57 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
58    address space where data is to be located.
59    As the only non-generic address spaces are all located in flash,
60    this can be used to test if data shall go into some .progmem* section.
61    This must be the rightmost field of machine dependent section flags.  */
62 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
63 
64 /* Similar 4-bit region for SYMBOL_REF_FLAGS.  */
65 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
66 
67 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
68    Set address-space AS in SYMBOL_REF_FLAGS of SYM  */
69 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS)                       \
70   do {                                                          \
71     SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM;         \
72     SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP;      \
73   } while (0)
74 
75 /* Read address-space from SYMBOL_REF_FLAGS of SYM  */
76 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM)                          \
77   ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM)           \
78    / SYMBOL_FLAG_MACH_DEP)
79 
80 /* Known address spaces.  The order must be the same as in the respective
81    enum from avr.h (or designated initialized must be used).  */
82 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
83 {
84   { ADDR_SPACE_RAM,  0, 2, "", 0, NULL },
85   { ADDR_SPACE_FLASH,  1, 2, "__flash",   0, ".progmem.data" },
86   { ADDR_SPACE_FLASH1, 1, 2, "__flash1",  1, ".progmem1.data" },
87   { ADDR_SPACE_FLASH2, 1, 2, "__flash2",  2, ".progmem2.data" },
88   { ADDR_SPACE_FLASH3, 1, 2, "__flash3",  3, ".progmem3.data" },
89   { ADDR_SPACE_FLASH4, 1, 2, "__flash4",  4, ".progmem4.data" },
90   { ADDR_SPACE_FLASH5, 1, 2, "__flash5",  5, ".progmem5.data" },
91   { ADDR_SPACE_MEMX, 1, 3, "__memx",  0, ".progmemx.data" },
92 };
93 
94 
95 /* Holding RAM addresses of some SFRs used by the compiler and that
96    are unique over all devices in an architecture like 'avr4'.  */
97 
98 typedef struct
99 {
100   /* SREG: The processor status */
101   int sreg;
102 
103   /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
104   int ccp;
105   int rampd;
106   int rampx;
107   int rampy;
108 
109   /* RAMPZ: The high byte of 24-bit address used with ELPM */
110   int rampz;
111 
112   /* SP: The stack pointer and its low and high byte */
113   int sp_l;
114   int sp_h;
115 } avr_addr_t;
116 
117 static avr_addr_t avr_addr;
118 
119 
120 /* Prototypes for local helper functions.  */
121 
122 static const char* out_movqi_r_mr (rtx, rtx[], int*);
123 static const char* out_movhi_r_mr (rtx, rtx[], int*);
124 static const char* out_movsi_r_mr (rtx, rtx[], int*);
125 static const char* out_movqi_mr_r (rtx, rtx[], int*);
126 static const char* out_movhi_mr_r (rtx, rtx[], int*);
127 static const char* out_movsi_mr_r (rtx, rtx[], int*);
128 
129 static int get_sequence_length (rtx insns);
130 static int sequent_regs_live (void);
131 static const char *ptrreg_to_str (int);
132 static const char *cond_string (enum rtx_code);
133 static int avr_num_arg_regs (enum machine_mode, const_tree);
134 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
135                                  int, bool);
136 static void output_reload_in_const (rtx*, rtx, int*, bool);
137 static struct machine_function * avr_init_machine_status (void);
138 
139 
140 /* Prototypes for hook implementors if needed before their implementation.  */
141 
142 static bool avr_rtx_costs (rtx, int, int, int, int*, bool);
143 
144 
145 /* Allocate registers from r25 to r8 for parameters for function calls.  */
146 #define FIRST_CUM_REG 26
147 
148 /* Implicit target register of LPM instruction (R0) */
149 extern GTY(()) rtx lpm_reg_rtx;
150 rtx lpm_reg_rtx;
151 
152 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
153 extern GTY(()) rtx lpm_addr_reg_rtx;
154 rtx lpm_addr_reg_rtx;
155 
156 /* Temporary register RTX (reg:QI TMP_REGNO) */
157 extern GTY(()) rtx tmp_reg_rtx;
158 rtx tmp_reg_rtx;
159 
160 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
161 extern GTY(()) rtx zero_reg_rtx;
162 rtx zero_reg_rtx;
163 
164 /* RTXs for all general purpose registers as QImode */
165 extern GTY(()) rtx all_regs_rtx[32];
166 rtx all_regs_rtx[32];
167 
168 /* SREG, the processor status */
169 extern GTY(()) rtx sreg_rtx;
170 rtx sreg_rtx;
171 
172 /* RAMP* special function registers */
173 extern GTY(()) rtx rampd_rtx;
174 extern GTY(()) rtx rampx_rtx;
175 extern GTY(()) rtx rampy_rtx;
176 extern GTY(()) rtx rampz_rtx;
177 rtx rampd_rtx;
178 rtx rampx_rtx;
179 rtx rampy_rtx;
180 rtx rampz_rtx;
181 
182 /* RTX containing the strings "" and "e", respectively */
183 static GTY(()) rtx xstring_empty;
184 static GTY(()) rtx xstring_e;
185 
186 /* Current architecture.  */
187 const avr_arch_t *avr_current_arch;
188 
189 /* Current device.  */
190 const avr_mcu_t *avr_current_device;
191 
192 /* Section to put switch tables in.  */
193 static GTY(()) section *progmem_swtable_section;
194 
195 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
196    or to address space __flash* or __memx.  Only used as singletons inside
197    avr_asm_select_section, but it must not be local there because of GTY.  */
198 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
199 
200 /* Condition for insns/expanders from avr-dimode.md.  */
201 bool avr_have_dimode = true;
202 
203 /* To track if code will use .bss and/or .data.  */
204 bool avr_need_clear_bss_p = false;
205 bool avr_need_copy_data_p = false;
206 
207 
208 /* Transform UP into lowercase and write the result to LO.
209    You must provide enough space for LO.  Return LO.  */
210 
211 static char*
212 avr_tolower (char *lo, const char *up)
213 {
214   char *lo0 = lo;
215 
216   for (; *up; up++, lo++)
217     *lo = TOLOWER (*up);
218 
219   *lo = '\0';
220 
221   return lo0;
222 }
223 
224 
225 /* Custom function to count number of set bits.  */
226 
227 static inline int
228 avr_popcount (unsigned int val)
229 {
230   int pop = 0;
231 
232   while (val)
233     {
234       val &= val-1;
235       pop++;
236     }
237 
238   return pop;
239 }
240 
241 
242 /* Constraint helper function.  XVAL is a CONST_INT or a CONST_DOUBLE.
243    Return true if the least significant N_BYTES bytes of XVAL all have a
244    popcount in POP_MASK and false, otherwise.  POP_MASK represents a subset
245    of integers which contains an integer N iff bit N of POP_MASK is set.  */
246 
247 bool
248 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
249 {
250   int i;
251 
252   enum machine_mode mode = GET_MODE (xval);
253 
254   if (VOIDmode == mode)
255     mode = SImode;
256 
257   for (i = 0; i < n_bytes; i++)
258     {
259       rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
260       unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
261 
262       if (0 == (pop_mask & (1 << avr_popcount (val8))))
263         return false;
264     }
265 
266   return true;
267 }
268 
269 
270 /* Access some RTX as INT_MODE.  If X is a CONST_FIXED we can get
271    the bit representation of X by "casting" it to CONST_INT.  */
272 
273 rtx
274 avr_to_int_mode (rtx x)
275 {
276   enum machine_mode mode = GET_MODE (x);
277 
278   return VOIDmode == mode
279     ? x
280     : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
281 }
282 
283 
284 /* Implement `TARGET_OPTION_OVERRIDE'.  */
285 
286 static void
287 avr_option_override (void)
288 {
289   flag_delete_null_pointer_checks = 0;
290 
291   /* caller-save.c looks for call-clobbered hard registers that are assigned
292      to pseudos that cross calls and tries so save-restore them around calls
293      in order to reduce the number of stack slots needed.
294 
295      This might lead to situations where reload is no more able to cope
296      with the challenge of AVR's very few address registers and fails to
297      perform the requested spills.  */
298 
299   if (avr_strict_X)
300     flag_caller_saves = 0;
301 
302   /* Unwind tables currently require a frame pointer for correctness,
303      see toplev.c:process_options().  */
304 
305   if ((flag_unwind_tables
306        || flag_non_call_exceptions
307        || flag_asynchronous_unwind_tables)
308       && !ACCUMULATE_OUTGOING_ARGS)
309     {
310       flag_omit_frame_pointer = 0;
311     }
312 
313   avr_current_device = &avr_mcu_types[avr_mcu_index];
314   avr_current_arch = &avr_arch_types[avr_current_device->arch];
315 
316   /* RAM addresses of some SFRs common to all devices in respective arch. */
317 
318   /* SREG: Status Register containing flags like I (global IRQ) */
319   avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
320 
321   /* RAMPZ: Address' high part when loading via ELPM */
322   avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
323 
324   avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
325   avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
326   avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
327   avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
328 
329   /* SP: Stack Pointer (SP_H:SP_L) */
330   avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
331   avr_addr.sp_h = avr_addr.sp_l + 1;
332 
333   init_machine_status = avr_init_machine_status;
334 
335   avr_log_set_avr_log();
336 }
337 
338 /* Function to set up the backend function structure.  */
339 
340 static struct machine_function *
341 avr_init_machine_status (void)
342 {
343   return ggc_alloc_cleared_machine_function ();
344 }
345 
346 
347 /* Implement `INIT_EXPANDERS'.  */
348 /* The function works like a singleton.  */
349 
350 void
351 avr_init_expanders (void)
352 {
353   int regno;
354 
355   for (regno = 0; regno < 32; regno ++)
356     all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
357 
358   lpm_reg_rtx  = all_regs_rtx[LPM_REGNO];
359   tmp_reg_rtx  = all_regs_rtx[TMP_REGNO];
360   zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
361 
362   lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
363 
364   sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
365   rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
366   rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
367   rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
368   rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
369 
370   xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
371   xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
372 }
373 
374 
375 /* Implement `REGNO_REG_CLASS'.  */
376 /* Return register class for register R.  */
377 
378 enum reg_class
379 avr_regno_reg_class (int r)
380 {
381   static const enum reg_class reg_class_tab[] =
382     {
383       R0_REG,
384       /* r1 - r15 */
385       NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
386       NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
387       NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
388       NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
389       /* r16 - r23 */
390       SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
391       SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
392       /* r24, r25 */
393       ADDW_REGS, ADDW_REGS,
394       /* X: r26, 27 */
395       POINTER_X_REGS, POINTER_X_REGS,
396       /* Y: r28, r29 */
397       POINTER_Y_REGS, POINTER_Y_REGS,
398       /* Z: r30, r31 */
399       POINTER_Z_REGS, POINTER_Z_REGS,
400       /* SP: SPL, SPH */
401       STACK_REG, STACK_REG
402     };
403 
404   if (r <= 33)
405     return reg_class_tab[r];
406 
407   return ALL_REGS;
408 }
409 
410 
411 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'.  */
412 
413 static bool
414 avr_scalar_mode_supported_p (enum machine_mode mode)
415 {
416   if (ALL_FIXED_POINT_MODE_P (mode))
417     return true;
418 
419   if (PSImode == mode)
420     return true;
421 
422   return default_scalar_mode_supported_p (mode);
423 }
424 
425 
426 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise.  */
427 
428 static bool
429 avr_decl_flash_p (tree decl)
430 {
431   if (TREE_CODE (decl) != VAR_DECL
432       || TREE_TYPE (decl) == error_mark_node)
433     {
434       return false;
435     }
436 
437   return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
438 }
439 
440 
441 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
442    address space and FALSE, otherwise.  */
443 
444 static bool
445 avr_decl_memx_p (tree decl)
446 {
447   if (TREE_CODE (decl) != VAR_DECL
448       || TREE_TYPE (decl) == error_mark_node)
449     {
450       return false;
451     }
452 
453   return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
454 }
455 
456 
457 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise.  */
458 
459 bool
460 avr_mem_flash_p (rtx x)
461 {
462   return (MEM_P (x)
463           && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
464 }
465 
466 
467 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
468    address space and FALSE, otherwise.  */
469 
470 bool
471 avr_mem_memx_p (rtx x)
472 {
473   return (MEM_P (x)
474           && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
475 }
476 
477 
478 /* A helper for the subsequent function attribute used to dig for
479    attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
480 
481 static inline int
482 avr_lookup_function_attribute1 (const_tree func, const char *name)
483 {
484   if (FUNCTION_DECL == TREE_CODE (func))
485     {
486       if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
487         {
488           return true;
489         }
490 
491       func = TREE_TYPE (func);
492     }
493 
494   gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
495               || TREE_CODE (func) == METHOD_TYPE);
496 
497   return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
498 }
499 
500 /* Return nonzero if FUNC is a naked function.  */
501 
502 static int
503 avr_naked_function_p (tree func)
504 {
505   return avr_lookup_function_attribute1 (func, "naked");
506 }
507 
508 /* Return nonzero if FUNC is an interrupt function as specified
509    by the "interrupt" attribute.  */
510 
511 static int
512 avr_interrupt_function_p (tree func)
513 {
514   return avr_lookup_function_attribute1 (func, "interrupt");
515 }
516 
517 /* Return nonzero if FUNC is a signal function as specified
518    by the "signal" attribute.  */
519 
520 static int
521 avr_signal_function_p (tree func)
522 {
523   return avr_lookup_function_attribute1 (func, "signal");
524 }
525 
526 /* Return nonzero if FUNC is an OS_task function.  */
527 
528 static int
529 avr_OS_task_function_p (tree func)
530 {
531   return avr_lookup_function_attribute1 (func, "OS_task");
532 }
533 
534 /* Return nonzero if FUNC is an OS_main function.  */
535 
536 static int
537 avr_OS_main_function_p (tree func)
538 {
539   return avr_lookup_function_attribute1 (func, "OS_main");
540 }
541 
542 
543 /* Implement `TARGET_SET_CURRENT_FUNCTION'.  */
544 /* Sanity cheching for above function attributes.  */
545 
546 static void
547 avr_set_current_function (tree decl)
548 {
549   location_t loc;
550   const char *isr;
551 
552   if (decl == NULL_TREE
553       || current_function_decl == NULL_TREE
554       || current_function_decl == error_mark_node
555       || ! cfun->machine
556       || cfun->machine->attributes_checked_p)
557     return;
558 
559   loc = DECL_SOURCE_LOCATION (decl);
560 
561   cfun->machine->is_naked = avr_naked_function_p (decl);
562   cfun->machine->is_signal = avr_signal_function_p (decl);
563   cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
564   cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
565   cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
566 
567   isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
568 
569   /* Too much attributes make no sense as they request conflicting features. */
570 
571   if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
572       + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
573     error_at (loc, "function attributes %qs, %qs and %qs are mutually"
574                " exclusive", "OS_task", "OS_main", isr);
575 
576   /* 'naked' will hide effects of 'OS_task' and 'OS_main'.  */
577 
578   if (cfun->machine->is_naked
579       && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
580     warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
581                 " no effect on %qs function", "OS_task", "OS_main", "naked");
582 
583   if (cfun->machine->is_interrupt || cfun->machine->is_signal)
584     {
585       tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
586       tree ret = TREE_TYPE (TREE_TYPE (decl));
587       const char *name;
588 
589       name = DECL_ASSEMBLER_NAME_SET_P (decl)
590         ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
591         : IDENTIFIER_POINTER (DECL_NAME (decl));
592 
593       /* Skip a leading '*' that might still prefix the assembler name,
594          e.g. in non-LTO runs.  */
595 
596       name = default_strip_name_encoding (name);
597 
598       /* Silently ignore 'signal' if 'interrupt' is present.  AVR-LibC startet
599          using this when it switched from SIGNAL and INTERRUPT to ISR.  */
600 
601       if (cfun->machine->is_interrupt)
602         cfun->machine->is_signal = 0;
603 
604       /* Interrupt handlers must be  void __vector (void)  functions.  */
605 
606       if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
607         error_at (loc, "%qs function cannot have arguments", isr);
608 
609       if (TREE_CODE (ret) != VOID_TYPE)
610         error_at (loc, "%qs function cannot return a value", isr);
611 
612       /* If the function has the 'signal' or 'interrupt' attribute, ensure
613          that the name of the function is "__vector_NN" so as to catch
614          when the user misspells the vector name.  */
615 
616       if (!STR_PREFIX_P (name, "__vector"))
617         warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
618                     name, isr);
619     }
620 
621   /* Don't print the above diagnostics more than once.  */
622 
623   cfun->machine->attributes_checked_p = 1;
624 }
625 
626 
627 /* Implement `ACCUMULATE_OUTGOING_ARGS'.  */
628 
629 int
630 avr_accumulate_outgoing_args (void)
631 {
632   if (!cfun)
633     return TARGET_ACCUMULATE_OUTGOING_ARGS;
634 
635   /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
636         what offset is correct.  In some cases it is relative to
637         virtual_outgoing_args_rtx and in others it is relative to
638         virtual_stack_vars_rtx.  For example code see
639             gcc.c-torture/execute/built-in-setjmp.c
640             gcc.c-torture/execute/builtins/sprintf-chk.c   */
641 
642   return (TARGET_ACCUMULATE_OUTGOING_ARGS
643           && !(cfun->calls_setjmp
644                || cfun->has_nonlocal_label));
645 }
646 
647 
648 /* Report contribution of accumulated outgoing arguments to stack size.  */
649 
650 static inline int
651 avr_outgoing_args_size (void)
652 {
653   return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
654 }
655 
656 
657 /* Implement `STARTING_FRAME_OFFSET'.  */
658 /* This is the offset from the frame pointer register to the first stack slot
659    that contains a variable living in the frame.  */
660 
661 int
662 avr_starting_frame_offset (void)
663 {
664   return 1 + avr_outgoing_args_size ();
665 }
666 
667 
668 /* Return the number of hard registers to push/pop in the prologue/epilogue
669    of the current function, and optionally store these registers in SET.  */
670 
671 static int
672 avr_regs_to_save (HARD_REG_SET *set)
673 {
674   int reg, count;
675   int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
676 
677   if (set)
678     CLEAR_HARD_REG_SET (*set);
679   count = 0;
680 
681   /* No need to save any registers if the function never returns or
682      has the "OS_task" or "OS_main" attribute.  */
683 
684   if (TREE_THIS_VOLATILE (current_function_decl)
685       || cfun->machine->is_OS_task
686       || cfun->machine->is_OS_main)
687     return 0;
688 
689   for (reg = 0; reg < 32; reg++)
690     {
691       /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
692          any global register variables.  */
693 
694       if (fixed_regs[reg])
695         continue;
696 
697       if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
698           || (df_regs_ever_live_p (reg)
699               && (int_or_sig_p || !call_used_regs[reg])
700               /* Don't record frame pointer registers here.  They are treated
701                  indivitually in prologue.  */
702               && !(frame_pointer_needed
703                    && (reg == REG_Y || reg == (REG_Y+1)))))
704         {
705           if (set)
706             SET_HARD_REG_BIT (*set, reg);
707           count++;
708         }
709     }
710   return count;
711 }
712 
713 
714 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
715 
716 static bool
717 avr_allocate_stack_slots_for_args (void)
718 {
719   return !cfun->machine->is_naked;
720 }
721 
722 
723 /* Return true if register FROM can be eliminated via register TO.  */
724 
725 static bool
726 avr_can_eliminate (const int from, const int to)
727 {
728   return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
729           || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
730           || ((from == FRAME_POINTER_REGNUM
731                || from == FRAME_POINTER_REGNUM + 1)
732               && !frame_pointer_needed));
733 }
734 
735 
736 /* Implement `TARGET_WARN_FUNC_RETURN'.  */
737 
738 static bool
739 avr_warn_func_return (tree decl)
740 {
741   /* Naked functions are implemented entirely in assembly, including the
742      return sequence, so suppress warnings about this.  */
743 
744   return !avr_naked_function_p (decl);
745 }
746 
747 /* Compute offset between arg_pointer and frame_pointer.  */
748 
749 int
750 avr_initial_elimination_offset (int from, int to)
751 {
752   if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
753     return 0;
754   else
755     {
756       int offset = frame_pointer_needed ? 2 : 0;
757       int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
758 
759       offset += avr_regs_to_save (NULL);
760       return (get_frame_size () + avr_outgoing_args_size()
761               + avr_pc_size + 1 + offset);
762     }
763 }
764 
765 
766 /* Helper for the function below.  */
767 
768 static void
769 avr_adjust_type_node (tree *node, enum machine_mode mode, int sat_p)
770 {
771   *node = make_node (FIXED_POINT_TYPE);
772   TYPE_SATURATING (*node) = sat_p;
773   TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
774   TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
775   TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
776   TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
777   TYPE_ALIGN (*node) = 8;
778   SET_TYPE_MODE (*node, mode);
779 
780   layout_type (*node);
781 }
782 
783 
784 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'.  */
785 
786 static tree
787 avr_build_builtin_va_list (void)
788 {
789   /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
790      This is more appropriate for the 8-bit machine AVR than 128-bit modes.
791      The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
792      which is auto-generated by genmodes, but the compiler assigns [U]DAmode
793      to the long long accum modes instead of the desired [U]TAmode.
794 
795      Fix this now, right after node setup in tree.c:build_common_tree_nodes().
796      This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
797      which built-in defines macros like __ULLACCUM_FBIT__ that are used by
798      libgcc to detect IBIT and FBIT.  */
799 
800   avr_adjust_type_node (&ta_type_node, TAmode, 0);
801   avr_adjust_type_node (&uta_type_node, UTAmode, 0);
802   avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
803   avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
804 
805   unsigned_long_long_accum_type_node = uta_type_node;
806   long_long_accum_type_node = ta_type_node;
807   sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
808   sat_long_long_accum_type_node = sat_ta_type_node;
809 
810   /* Dispatch to the default handler.  */
811 
812   return std_build_builtin_va_list ();
813 }
814 
815 
816 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'.  */
817 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
818    frame pointer by +STARTING_FRAME_OFFSET.
819    Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
820    avoids creating add/sub of offset in nonlocal goto and setjmp.  */
821 
822 static rtx
823 avr_builtin_setjmp_frame_value (void)
824 {
825   rtx xval = gen_reg_rtx (Pmode);
826   emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
827                          gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
828   return xval;
829 }
830 
831 
832 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
833    This is return address of function.  */
834 
835 rtx
836 avr_return_addr_rtx (int count, rtx tem)
837 {
838   rtx r;
839 
840   /* Can only return this function's return address. Others not supported.  */
841   if (count)
842      return NULL;
843 
844   if (AVR_3_BYTE_PC)
845     {
846       r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
847       warning (0, "%<builtin_return_address%> contains only 2 bytes"
848                " of address");
849     }
850   else
851     r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
852 
853   r = gen_rtx_PLUS (Pmode, tem, r);
854   r = gen_frame_mem (Pmode, memory_address (Pmode, r));
855   r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
856   return  r;
857 }
858 
859 /* Return 1 if the function epilogue is just a single "ret".  */
860 
861 int
862 avr_simple_epilogue (void)
863 {
864   return (! frame_pointer_needed
865           && get_frame_size () == 0
866           && avr_outgoing_args_size() == 0
867           && avr_regs_to_save (NULL) == 0
868           && ! cfun->machine->is_interrupt
869           && ! cfun->machine->is_signal
870           && ! cfun->machine->is_naked
871           && ! TREE_THIS_VOLATILE (current_function_decl));
872 }
873 
874 /* This function checks sequence of live registers.  */
875 
876 static int
877 sequent_regs_live (void)
878 {
879   int reg;
880   int live_seq = 0;
881   int cur_seq = 0;
882 
883   for (reg = 0; reg < 18; ++reg)
884     {
885       if (fixed_regs[reg])
886         {
887           /* Don't recognize sequences that contain global register
888              variables.  */
889 
890           if (live_seq != 0)
891             return 0;
892           else
893             continue;
894         }
895 
896       if (!call_used_regs[reg])
897         {
898           if (df_regs_ever_live_p (reg))
899             {
900               ++live_seq;
901               ++cur_seq;
902             }
903           else
904             cur_seq = 0;
905         }
906     }
907 
908   if (!frame_pointer_needed)
909     {
910       if (df_regs_ever_live_p (REG_Y))
911         {
912           ++live_seq;
913           ++cur_seq;
914         }
915       else
916         cur_seq = 0;
917 
918       if (df_regs_ever_live_p (REG_Y+1))
919         {
920           ++live_seq;
921           ++cur_seq;
922         }
923       else
924         cur_seq = 0;
925     }
926   else
927     {
928       cur_seq += 2;
929       live_seq += 2;
930     }
931   return (cur_seq == live_seq) ? live_seq : 0;
932 }
933 
934 /* Obtain the length sequence of insns.  */
935 
936 int
937 get_sequence_length (rtx insns)
938 {
939   rtx insn;
940   int length;
941 
942   for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
943     length += get_attr_length (insn);
944 
945   return length;
946 }
947 
948 
949 /*  Implement `INCOMING_RETURN_ADDR_RTX'.  */
950 
951 rtx
952 avr_incoming_return_addr_rtx (void)
953 {
954   /* The return address is at the top of the stack.  Note that the push
955      was via post-decrement, which means the actual address is off by one.  */
956   return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
957 }
958 
959 /*  Helper for expand_prologue.  Emit a push of a byte register.  */
960 
961 static void
962 emit_push_byte (unsigned regno, bool frame_related_p)
963 {
964   rtx mem, reg, insn;
965 
966   mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
967   mem = gen_frame_mem (QImode, mem);
968   reg = gen_rtx_REG (QImode, regno);
969 
970   insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
971   if (frame_related_p)
972     RTX_FRAME_RELATED_P (insn) = 1;
973 
974   cfun->machine->stack_usage++;
975 }
976 
977 
978 /*  Helper for expand_prologue.  Emit a push of a SFR via tmp_reg.
979     SFR is a MEM representing the memory location of the SFR.
980     If CLR_P then clear the SFR after the push using zero_reg.  */
981 
982 static void
983 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
984 {
985   rtx insn;
986 
987   gcc_assert (MEM_P (sfr));
988 
989   /* IN __tmp_reg__, IO(SFR) */
990   insn = emit_move_insn (tmp_reg_rtx, sfr);
991   if (frame_related_p)
992     RTX_FRAME_RELATED_P (insn) = 1;
993 
994   /* PUSH __tmp_reg__ */
995   emit_push_byte (TMP_REGNO, frame_related_p);
996 
997   if (clr_p)
998     {
999       /* OUT IO(SFR), __zero_reg__ */
1000       insn = emit_move_insn (sfr, const0_rtx);
1001       if (frame_related_p)
1002         RTX_FRAME_RELATED_P (insn) = 1;
1003     }
1004 }
1005 
1006 static void
1007 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1008 {
1009   rtx insn;
1010   bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1011   int live_seq = sequent_regs_live ();
1012 
1013   HOST_WIDE_INT size_max
1014     = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1015 
1016   bool minimize = (TARGET_CALL_PROLOGUES
1017                    && size < size_max
1018                    && live_seq
1019                    && !isr_p
1020                    && !cfun->machine->is_OS_task
1021                    && !cfun->machine->is_OS_main);
1022 
1023   if (minimize
1024       && (frame_pointer_needed
1025           || avr_outgoing_args_size() > 8
1026           || (AVR_2_BYTE_PC && live_seq > 6)
1027           || live_seq > 7))
1028     {
1029       rtx pattern;
1030       int first_reg, reg, offset;
1031 
1032       emit_move_insn (gen_rtx_REG (HImode, REG_X),
1033                       gen_int_mode (size, HImode));
1034 
1035       pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1036                                          gen_int_mode (live_seq+size, HImode));
1037       insn = emit_insn (pattern);
1038       RTX_FRAME_RELATED_P (insn) = 1;
1039 
1040       /* Describe the effect of the unspec_volatile call to prologue_saves.
1041          Note that this formulation assumes that add_reg_note pushes the
1042          notes to the front.  Thus we build them in the reverse order of
1043          how we want dwarf2out to process them.  */
1044 
1045       /* The function does always set frame_pointer_rtx, but whether that
1046          is going to be permanent in the function is frame_pointer_needed.  */
1047 
1048       add_reg_note (insn, REG_CFA_ADJUST_CFA,
1049                     gen_rtx_SET (VOIDmode, (frame_pointer_needed
1050                                             ? frame_pointer_rtx
1051                                             : stack_pointer_rtx),
1052                                  plus_constant (Pmode, stack_pointer_rtx,
1053                                                 -(size + live_seq))));
1054 
1055       /* Note that live_seq always contains r28+r29, but the other
1056          registers to be saved are all below 18.  */
1057 
1058       first_reg = 18 - (live_seq - 2);
1059 
1060       for (reg = 29, offset = -live_seq + 1;
1061            reg >= first_reg;
1062            reg = (reg == 28 ? 17 : reg - 1), ++offset)
1063         {
1064           rtx m, r;
1065 
1066           m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1067                                                   offset));
1068           r = gen_rtx_REG (QImode, reg);
1069           add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
1070         }
1071 
1072       cfun->machine->stack_usage += size + live_seq;
1073     }
1074   else /* !minimize */
1075     {
1076       int reg;
1077 
1078       for (reg = 0; reg < 32; ++reg)
1079         if (TEST_HARD_REG_BIT (set, reg))
1080           emit_push_byte (reg, true);
1081 
1082       if (frame_pointer_needed
1083           && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1084         {
1085           /* Push frame pointer.  Always be consistent about the
1086              ordering of pushes -- epilogue_restores expects the
1087              register pair to be pushed low byte first.  */
1088 
1089           emit_push_byte (REG_Y, true);
1090           emit_push_byte (REG_Y + 1, true);
1091         }
1092 
1093       if (frame_pointer_needed
1094           && size == 0)
1095         {
1096           insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1097           RTX_FRAME_RELATED_P (insn) = 1;
1098         }
1099 
1100       if (size != 0)
1101         {
1102           /*  Creating a frame can be done by direct manipulation of the
1103               stack or via the frame pointer. These two methods are:
1104                   fp =  sp
1105                   fp -= size
1106                   sp =  fp
1107               or
1108                   sp -= size
1109                   fp =  sp    (*)
1110               the optimum method depends on function type, stack and
1111               frame size.  To avoid a complex logic, both methods are
1112               tested and shortest is selected.
1113 
1114               There is also the case where SIZE != 0 and no frame pointer is
1115               needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1116               In that case, insn (*) is not needed in that case.
1117               We use the X register as scratch. This is save because in X
1118               is call-clobbered.
1119                  In an interrupt routine, the case of SIZE != 0 together with
1120               !frame_pointer_needed can only occur if the function is not a
1121               leaf function and thus X has already been saved.  */
1122 
1123           int irq_state = -1;
1124           HOST_WIDE_INT size_cfa = size, neg_size;
1125           rtx fp_plus_insns, fp, my_fp;
1126 
1127           gcc_assert (frame_pointer_needed
1128                       || !isr_p
1129                       || !crtl->is_leaf);
1130 
1131           fp = my_fp = (frame_pointer_needed
1132                         ? frame_pointer_rtx
1133                         : gen_rtx_REG (Pmode, REG_X));
1134 
1135           if (AVR_HAVE_8BIT_SP)
1136             {
1137               /* The high byte (r29) does not change:
1138                  Prefer SUBI (1 cycle) over SBIW (2 cycles, same size).  */
1139 
1140               my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1141             }
1142 
1143           /* Cut down size and avoid size = 0 so that we don't run
1144              into ICE like PR52488 in the remainder.  */
1145 
1146           if (size > size_max)
1147             {
1148               /* Don't error so that insane code from newlib still compiles
1149                  and does not break building newlib.  As PR51345 is implemented
1150                  now, there are multilib variants with -msp8.
1151 
1152                  If user wants sanity checks he can use -Wstack-usage=
1153                  or similar options.
1154 
1155                  For CFA we emit the original, non-saturated size so that
1156                  the generic machinery is aware of the real stack usage and
1157                  will print the above diagnostic as expected.  */
1158 
1159               size = size_max;
1160             }
1161 
1162           size = trunc_int_for_mode (size, GET_MODE (my_fp));
1163           neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1164 
1165           /************  Method 1: Adjust frame pointer  ************/
1166 
1167           start_sequence ();
1168 
1169           /* Normally, the dwarf2out frame-related-expr interpreter does
1170              not expect to have the CFA change once the frame pointer is
1171              set up.  Thus, we avoid marking the move insn below and
1172              instead indicate that the entire operation is complete after
1173              the frame pointer subtraction is done.  */
1174 
1175           insn = emit_move_insn (fp, stack_pointer_rtx);
1176           if (frame_pointer_needed)
1177             {
1178               RTX_FRAME_RELATED_P (insn) = 1;
1179               add_reg_note (insn, REG_CFA_ADJUST_CFA,
1180                             gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1181             }
1182 
1183           insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1184                                                        my_fp, neg_size));
1185 
1186           if (frame_pointer_needed)
1187             {
1188               RTX_FRAME_RELATED_P (insn) = 1;
1189               add_reg_note (insn, REG_CFA_ADJUST_CFA,
1190                             gen_rtx_SET (VOIDmode, fp,
1191                                          plus_constant (Pmode, fp,
1192                                                         -size_cfa)));
1193             }
1194 
1195           /* Copy to stack pointer.  Note that since we've already
1196              changed the CFA to the frame pointer this operation
1197              need not be annotated if frame pointer is needed.
1198              Always move through unspec, see PR50063.
1199              For meaning of irq_state see movhi_sp_r insn.  */
1200 
1201           if (cfun->machine->is_interrupt)
1202             irq_state = 1;
1203 
1204           if (TARGET_NO_INTERRUPTS
1205               || cfun->machine->is_signal
1206               || cfun->machine->is_OS_main)
1207             irq_state = 0;
1208 
1209           if (AVR_HAVE_8BIT_SP)
1210             irq_state = 2;
1211 
1212           insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1213                                             fp, GEN_INT (irq_state)));
1214           if (!frame_pointer_needed)
1215             {
1216               RTX_FRAME_RELATED_P (insn) = 1;
1217               add_reg_note (insn, REG_CFA_ADJUST_CFA,
1218                             gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1219                                          plus_constant (Pmode,
1220                                                         stack_pointer_rtx,
1221                                                         -size_cfa)));
1222             }
1223 
1224           fp_plus_insns = get_insns ();
1225           end_sequence ();
1226 
1227           /************  Method 2: Adjust Stack pointer  ************/
1228 
1229           /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1230              can only handle specific offsets.  */
1231 
1232           if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1233             {
1234               rtx sp_plus_insns;
1235 
1236               start_sequence ();
1237 
1238               insn = emit_move_insn (stack_pointer_rtx,
1239                                      plus_constant (Pmode, stack_pointer_rtx,
1240                                                     -size));
1241               RTX_FRAME_RELATED_P (insn) = 1;
1242               add_reg_note (insn, REG_CFA_ADJUST_CFA,
1243                             gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1244                                          plus_constant (Pmode,
1245                                                         stack_pointer_rtx,
1246                                                         -size_cfa)));
1247               if (frame_pointer_needed)
1248                 {
1249                   insn = emit_move_insn (fp, stack_pointer_rtx);
1250                   RTX_FRAME_RELATED_P (insn) = 1;
1251                 }
1252 
1253               sp_plus_insns = get_insns ();
1254               end_sequence ();
1255 
1256               /************ Use shortest method  ************/
1257 
1258               emit_insn (get_sequence_length (sp_plus_insns)
1259                          < get_sequence_length (fp_plus_insns)
1260                          ? sp_plus_insns
1261                          : fp_plus_insns);
1262             }
1263           else
1264             {
1265               emit_insn (fp_plus_insns);
1266             }
1267 
1268           cfun->machine->stack_usage += size_cfa;
1269         } /* !minimize && size != 0 */
1270     } /* !minimize */
1271 }
1272 
1273 
1274 /*  Output function prologue.  */
1275 
1276 void
1277 avr_expand_prologue (void)
1278 {
1279   HARD_REG_SET set;
1280   HOST_WIDE_INT size;
1281 
1282   size = get_frame_size() + avr_outgoing_args_size();
1283 
1284   cfun->machine->stack_usage = 0;
1285 
1286   /* Prologue: naked.  */
1287   if (cfun->machine->is_naked)
1288     {
1289       return;
1290     }
1291 
1292   avr_regs_to_save (&set);
1293 
1294   if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1295     {
1296       /* Enable interrupts.  */
1297       if (cfun->machine->is_interrupt)
1298         emit_insn (gen_enable_interrupt ());
1299 
1300       /* Push zero reg.  */
1301       emit_push_byte (ZERO_REGNO, true);
1302 
1303       /* Push tmp reg.  */
1304       emit_push_byte (TMP_REGNO, true);
1305 
1306       /* Push SREG.  */
1307       /* ??? There's no dwarf2 column reserved for SREG.  */
1308       emit_push_sfr (sreg_rtx, false, false /* clr */);
1309 
1310       /* Clear zero reg.  */
1311       emit_move_insn (zero_reg_rtx, const0_rtx);
1312 
1313       /* Prevent any attempt to delete the setting of ZERO_REG!  */
1314       emit_use (zero_reg_rtx);
1315 
1316       /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1317          ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z.  */
1318 
1319       if (AVR_HAVE_RAMPD)
1320         emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1321 
1322       if (AVR_HAVE_RAMPX
1323           && TEST_HARD_REG_BIT (set, REG_X)
1324           && TEST_HARD_REG_BIT (set, REG_X + 1))
1325         {
1326           emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1327         }
1328 
1329       if (AVR_HAVE_RAMPY
1330           && (frame_pointer_needed
1331               || (TEST_HARD_REG_BIT (set, REG_Y)
1332                   && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1333         {
1334           emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1335         }
1336 
1337       if (AVR_HAVE_RAMPZ
1338           && TEST_HARD_REG_BIT (set, REG_Z)
1339           && TEST_HARD_REG_BIT (set, REG_Z + 1))
1340         {
1341           emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1342         }
1343     }  /* is_interrupt is_signal */
1344 
1345   avr_prologue_setup_frame (size, set);
1346 
1347   if (flag_stack_usage_info)
1348     current_function_static_stack_size = cfun->machine->stack_usage;
1349 }
1350 
1351 
1352 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'.  */
1353 /* Output summary at end of function prologue.  */
1354 
1355 static void
1356 avr_asm_function_end_prologue (FILE *file)
1357 {
1358   if (cfun->machine->is_naked)
1359     {
1360       fputs ("/* prologue: naked */\n", file);
1361     }
1362   else
1363     {
1364       if (cfun->machine->is_interrupt)
1365         {
1366           fputs ("/* prologue: Interrupt */\n", file);
1367         }
1368       else if (cfun->machine->is_signal)
1369         {
1370           fputs ("/* prologue: Signal */\n", file);
1371         }
1372       else
1373         fputs ("/* prologue: function */\n", file);
1374     }
1375 
1376   if (ACCUMULATE_OUTGOING_ARGS)
1377     fprintf (file, "/* outgoing args size = %d */\n",
1378              avr_outgoing_args_size());
1379 
1380   fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1381                  get_frame_size());
1382   fprintf (file, "/* stack size = %d */\n",
1383                  cfun->machine->stack_usage);
1384   /* Create symbol stack offset here so all functions have it. Add 1 to stack
1385      usage for offset so that SP + .L__stack_offset = return address.  */
1386   fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1387 }
1388 
1389 
1390 /* Implement `EPILOGUE_USES'.  */
1391 
1392 int
1393 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1394 {
1395   if (reload_completed
1396       && cfun->machine
1397       && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1398     return 1;
1399   return 0;
1400 }
1401 
1402 /*  Helper for avr_expand_epilogue.  Emit a pop of a byte register.  */
1403 
1404 static void
1405 emit_pop_byte (unsigned regno)
1406 {
1407   rtx mem, reg;
1408 
1409   mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1410   mem = gen_frame_mem (QImode, mem);
1411   reg = gen_rtx_REG (QImode, regno);
1412 
1413   emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1414 }
1415 
1416 /*  Output RTL epilogue.  */
1417 
1418 void
1419 avr_expand_epilogue (bool sibcall_p)
1420 {
1421   int reg;
1422   int live_seq;
1423   HARD_REG_SET set;
1424   int minimize;
1425   HOST_WIDE_INT size;
1426   bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1427 
1428   size = get_frame_size() + avr_outgoing_args_size();
1429 
1430   /* epilogue: naked  */
1431   if (cfun->machine->is_naked)
1432     {
1433       gcc_assert (!sibcall_p);
1434 
1435       emit_jump_insn (gen_return ());
1436       return;
1437     }
1438 
1439   avr_regs_to_save (&set);
1440   live_seq = sequent_regs_live ();
1441 
1442   minimize = (TARGET_CALL_PROLOGUES
1443               && live_seq
1444               && !isr_p
1445               && !cfun->machine->is_OS_task
1446               && !cfun->machine->is_OS_main);
1447 
1448   if (minimize
1449       && (live_seq > 4
1450           || frame_pointer_needed
1451           || size))
1452     {
1453       /*  Get rid of frame.  */
1454 
1455       if (!frame_pointer_needed)
1456         {
1457           emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1458         }
1459 
1460       if (size)
1461         {
1462           emit_move_insn (frame_pointer_rtx,
1463                           plus_constant (Pmode, frame_pointer_rtx, size));
1464         }
1465 
1466       emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1467       return;
1468     }
1469 
1470   if (size)
1471     {
1472       /* Try two methods to adjust stack and select shortest.  */
1473 
1474       int irq_state = -1;
1475       rtx fp, my_fp;
1476       rtx fp_plus_insns;
1477       HOST_WIDE_INT size_max;
1478 
1479       gcc_assert (frame_pointer_needed
1480                   || !isr_p
1481                   || !crtl->is_leaf);
1482 
1483       fp = my_fp = (frame_pointer_needed
1484                     ? frame_pointer_rtx
1485                     : gen_rtx_REG (Pmode, REG_X));
1486 
1487       if (AVR_HAVE_8BIT_SP)
1488         {
1489           /* The high byte (r29) does not change:
1490              Prefer SUBI (1 cycle) over SBIW (2 cycles).  */
1491 
1492           my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1493         }
1494 
1495       /* For rationale see comment in prologue generation.  */
1496 
1497       size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1498       if (size > size_max)
1499         size = size_max;
1500       size = trunc_int_for_mode (size, GET_MODE (my_fp));
1501 
1502       /********** Method 1: Adjust fp register  **********/
1503 
1504       start_sequence ();
1505 
1506       if (!frame_pointer_needed)
1507         emit_move_insn (fp, stack_pointer_rtx);
1508 
1509       emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
1510 
1511       /* Copy to stack pointer.  */
1512 
1513       if (TARGET_NO_INTERRUPTS)
1514         irq_state = 0;
1515 
1516       if (AVR_HAVE_8BIT_SP)
1517         irq_state = 2;
1518 
1519       emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1520                                  GEN_INT (irq_state)));
1521 
1522       fp_plus_insns = get_insns ();
1523       end_sequence ();
1524 
1525       /********** Method 2: Adjust Stack pointer  **********/
1526 
1527       if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1528         {
1529           rtx sp_plus_insns;
1530 
1531           start_sequence ();
1532 
1533           emit_move_insn (stack_pointer_rtx,
1534                           plus_constant (Pmode, stack_pointer_rtx, size));
1535 
1536           sp_plus_insns = get_insns ();
1537           end_sequence ();
1538 
1539           /************ Use shortest method  ************/
1540 
1541           emit_insn (get_sequence_length (sp_plus_insns)
1542                      < get_sequence_length (fp_plus_insns)
1543                      ? sp_plus_insns
1544                      : fp_plus_insns);
1545         }
1546       else
1547         emit_insn (fp_plus_insns);
1548     } /* size != 0 */
1549 
1550   if (frame_pointer_needed
1551       && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1552     {
1553       /* Restore previous frame_pointer.  See avr_expand_prologue for
1554          rationale for not using pophi.  */
1555 
1556       emit_pop_byte (REG_Y + 1);
1557       emit_pop_byte (REG_Y);
1558     }
1559 
1560   /* Restore used registers.  */
1561 
1562   for (reg = 31; reg >= 0; --reg)
1563     if (TEST_HARD_REG_BIT (set, reg))
1564       emit_pop_byte (reg);
1565 
1566   if (isr_p)
1567     {
1568       /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1569          The conditions to restore them must be tha same as in prologue.  */
1570 
1571       if (AVR_HAVE_RAMPZ
1572           && TEST_HARD_REG_BIT (set, REG_Z)
1573           && TEST_HARD_REG_BIT (set, REG_Z + 1))
1574         {
1575           emit_pop_byte (TMP_REGNO);
1576           emit_move_insn (rampz_rtx, tmp_reg_rtx);
1577         }
1578 
1579       if (AVR_HAVE_RAMPY
1580           && (frame_pointer_needed
1581               || (TEST_HARD_REG_BIT (set, REG_Y)
1582                   && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1583         {
1584           emit_pop_byte (TMP_REGNO);
1585           emit_move_insn (rampy_rtx, tmp_reg_rtx);
1586         }
1587 
1588       if (AVR_HAVE_RAMPX
1589           && TEST_HARD_REG_BIT (set, REG_X)
1590           && TEST_HARD_REG_BIT (set, REG_X + 1))
1591         {
1592           emit_pop_byte (TMP_REGNO);
1593           emit_move_insn (rampx_rtx, tmp_reg_rtx);
1594         }
1595 
1596       if (AVR_HAVE_RAMPD)
1597         {
1598           emit_pop_byte (TMP_REGNO);
1599           emit_move_insn (rampd_rtx, tmp_reg_rtx);
1600         }
1601 
1602       /* Restore SREG using tmp_reg as scratch.  */
1603 
1604       emit_pop_byte (TMP_REGNO);
1605       emit_move_insn (sreg_rtx, tmp_reg_rtx);
1606 
1607       /* Restore tmp REG.  */
1608       emit_pop_byte (TMP_REGNO);
1609 
1610       /* Restore zero REG.  */
1611       emit_pop_byte (ZERO_REGNO);
1612     }
1613 
1614   if (!sibcall_p)
1615     emit_jump_insn (gen_return ());
1616 }
1617 
1618 
1619 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'.  */
1620 
1621 static void
1622 avr_asm_function_begin_epilogue (FILE *file)
1623 {
1624   fprintf (file, "/* epilogue start */\n");
1625 }
1626 
1627 
1628 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'.  */
1629 
1630 static bool
1631 avr_cannot_modify_jumps_p (void)
1632 {
1633 
1634   /* Naked Functions must not have any instructions after
1635      their epilogue, see PR42240 */
1636 
1637   if (reload_completed
1638       && cfun->machine
1639       && cfun->machine->is_naked)
1640     {
1641       return true;
1642     }
1643 
1644   return false;
1645 }
1646 
1647 
1648 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'.  */
1649 
1650 static bool
1651 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
1652 {
1653   /* FIXME:  Non-generic addresses are not mode-dependent in themselves.
1654        This hook just serves to hack around PR rtl-optimization/52543 by
1655        claiming that non-generic addresses were mode-dependent so that
1656        lower-subreg.c will skip these addresses.  lower-subreg.c sets up fake
1657        RTXes to probe SET and MEM costs and assumes that MEM is always in the
1658        generic address space which is not true.  */
1659 
1660   return !ADDR_SPACE_GENERIC_P (as);
1661 }
1662 
1663 
1664 /* Helper function for `avr_legitimate_address_p'.  */
1665 
1666 static inline bool
1667 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1668                        RTX_CODE outer_code, bool strict)
1669 {
1670   return (REG_P (reg)
1671           && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1672                                                  as, outer_code, UNKNOWN)
1673               || (!strict
1674                   && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1675 }
1676 
1677 
1678 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1679    machine for a memory operand of mode MODE.  */
1680 
1681 static bool
1682 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1683 {
1684   bool ok = CONSTANT_ADDRESS_P (x);
1685 
1686   switch (GET_CODE (x))
1687     {
1688     case REG:
1689       ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1690                                   MEM, strict);
1691 
1692       if (strict
1693           && GET_MODE_SIZE (mode) > 4
1694           && REG_X == REGNO (x))
1695         {
1696           ok = false;
1697         }
1698       break;
1699 
1700     case POST_INC:
1701     case PRE_DEC:
1702       ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1703                                   GET_CODE (x), strict);
1704       break;
1705 
1706     case PLUS:
1707       {
1708         rtx reg = XEXP (x, 0);
1709         rtx op1 = XEXP (x, 1);
1710 
1711         if (REG_P (reg)
1712             && CONST_INT_P (op1)
1713             && INTVAL (op1) >= 0)
1714           {
1715             bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1716 
1717             if (fit)
1718               {
1719                 ok = (! strict
1720                       || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1721                                                 PLUS, strict));
1722 
1723                 if (reg == frame_pointer_rtx
1724                     || reg == arg_pointer_rtx)
1725                   {
1726                     ok = true;
1727                   }
1728               }
1729             else if (frame_pointer_needed
1730                      && reg == frame_pointer_rtx)
1731               {
1732                 ok = true;
1733               }
1734           }
1735       }
1736       break;
1737 
1738     default:
1739       break;
1740     }
1741 
1742   if (avr_log.legitimate_address_p)
1743     {
1744       avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1745                  "reload_completed=%d reload_in_progress=%d %s:",
1746                  ok, mode, strict, reload_completed, reload_in_progress,
1747                  reg_renumber ? "(reg_renumber)" : "");
1748 
1749       if (GET_CODE (x) == PLUS
1750           && REG_P (XEXP (x, 0))
1751           && CONST_INT_P (XEXP (x, 1))
1752           && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1753           && reg_renumber)
1754         {
1755           avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1756                      true_regnum (XEXP (x, 0)));
1757         }
1758 
1759       avr_edump ("\n%r\n", x);
1760     }
1761 
1762   return ok;
1763 }
1764 
1765 
1766 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1767    now only a helper for avr_addr_space_legitimize_address.  */
1768 /* Attempts to replace X with a valid
1769    memory address for an operand of mode MODE  */
1770 
1771 static rtx
1772 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1773 {
1774   bool big_offset_p = false;
1775 
1776   x = oldx;
1777 
1778   if (GET_CODE (oldx) == PLUS
1779       && REG_P (XEXP (oldx, 0)))
1780     {
1781       if (REG_P (XEXP (oldx, 1)))
1782         x = force_reg (GET_MODE (oldx), oldx);
1783       else if (CONST_INT_P (XEXP (oldx, 1)))
1784         {
1785           int offs = INTVAL (XEXP (oldx, 1));
1786           if (frame_pointer_rtx != XEXP (oldx, 0)
1787               && offs > MAX_LD_OFFSET (mode))
1788             {
1789               big_offset_p = true;
1790               x = force_reg (GET_MODE (oldx), oldx);
1791             }
1792         }
1793     }
1794 
1795   if (avr_log.legitimize_address)
1796     {
1797       avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1798 
1799       if (x != oldx)
1800         avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1801     }
1802 
1803   return x;
1804 }
1805 
1806 
1807 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'.  */
1808 /* This will allow register R26/27 to be used where it is no worse than normal
1809    base pointers R28/29 or R30/31.  For example, if base offset is greater
1810    than 63 bytes or for R++ or --R addressing.  */
1811 
1812 rtx
1813 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1814                                int opnum, int type, int addr_type,
1815                                int ind_levels ATTRIBUTE_UNUSED,
1816                                rtx (*mk_memloc)(rtx,int))
1817 {
1818   rtx x = *px;
1819 
1820   if (avr_log.legitimize_reload_address)
1821     avr_edump ("\n%?:%m %r\n", mode, x);
1822 
1823   if (1 && (GET_CODE (x) == POST_INC
1824             || GET_CODE (x) == PRE_DEC))
1825     {
1826       push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1827                    POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1828                    opnum, RELOAD_OTHER);
1829 
1830       if (avr_log.legitimize_reload_address)
1831         avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1832                    POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1833 
1834       return x;
1835     }
1836 
1837   if (GET_CODE (x) == PLUS
1838       && REG_P (XEXP (x, 0))
1839       && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1840       && CONST_INT_P (XEXP (x, 1))
1841       && INTVAL (XEXP (x, 1)) >= 1)
1842     {
1843       bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1844 
1845       if (fit)
1846         {
1847           if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1848             {
1849               int regno = REGNO (XEXP (x, 0));
1850               rtx mem = mk_memloc (x, regno);
1851 
1852               push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1853                            POINTER_REGS, Pmode, VOIDmode, 0, 0,
1854                            1, (enum reload_type) addr_type);
1855 
1856               if (avr_log.legitimize_reload_address)
1857                 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1858                            POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1859 
1860               push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1861                            BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1862                            opnum, (enum reload_type) type);
1863 
1864               if (avr_log.legitimize_reload_address)
1865                 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1866                            BASE_POINTER_REGS, mem, NULL_RTX);
1867 
1868               return x;
1869             }
1870         }
1871       else if (! (frame_pointer_needed
1872                   && XEXP (x, 0) == frame_pointer_rtx))
1873         {
1874           push_reload (x, NULL_RTX, px, NULL,
1875                        POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1876                        opnum, (enum reload_type) type);
1877 
1878           if (avr_log.legitimize_reload_address)
1879             avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1880                        POINTER_REGS, x, NULL_RTX);
1881 
1882           return x;
1883         }
1884     }
1885 
1886   return NULL_RTX;
1887 }
1888 
1889 
1890 /* Implement `TARGET_SECONDARY_RELOAD' */
1891 
1892 static reg_class_t
1893 avr_secondary_reload (bool in_p, rtx x,
1894                       reg_class_t reload_class ATTRIBUTE_UNUSED,
1895                       enum machine_mode mode, secondary_reload_info *sri)
1896 {
1897   if (in_p
1898       && MEM_P (x)
1899       && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1900       && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
1901     {
1902       /* For the non-generic 16-bit spaces we need a d-class scratch.  */
1903 
1904       switch (mode)
1905         {
1906         default:
1907           gcc_unreachable();
1908 
1909         case QImode:  sri->icode = CODE_FOR_reload_inqi; break;
1910         case QQmode:  sri->icode = CODE_FOR_reload_inqq; break;
1911         case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
1912 
1913         case HImode:  sri->icode = CODE_FOR_reload_inhi; break;
1914         case HQmode:  sri->icode = CODE_FOR_reload_inhq; break;
1915         case HAmode:  sri->icode = CODE_FOR_reload_inha; break;
1916         case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
1917         case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
1918 
1919         case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
1920 
1921         case SImode:  sri->icode = CODE_FOR_reload_insi; break;
1922         case SFmode:  sri->icode = CODE_FOR_reload_insf; break;
1923         case SQmode:  sri->icode = CODE_FOR_reload_insq; break;
1924         case SAmode:  sri->icode = CODE_FOR_reload_insa; break;
1925         case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
1926         case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
1927         }
1928     }
1929 
1930   return NO_REGS;
1931 }
1932 
1933 
1934 /* Helper function to print assembler resp. track instruction
1935    sequence lengths.  Always return "".
1936 
1937    If PLEN == NULL:
1938        Output assembler code from template TPL with operands supplied
1939        by OPERANDS.  This is just forwarding to output_asm_insn.
1940 
1941    If PLEN != NULL:
1942        If N_WORDS >= 0  Add N_WORDS to *PLEN.
1943        If N_WORDS < 0   Set *PLEN to -N_WORDS.
1944        Don't output anything.
1945 */
1946 
1947 static const char*
1948 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1949 {
1950   if (NULL == plen)
1951     {
1952       output_asm_insn (tpl, operands);
1953     }
1954   else
1955     {
1956       if (n_words < 0)
1957         *plen = -n_words;
1958       else
1959         *plen += n_words;
1960     }
1961 
1962   return "";
1963 }
1964 
1965 
1966 /* Return a pointer register name as a string.  */
1967 
1968 static const char*
1969 ptrreg_to_str (int regno)
1970 {
1971   switch (regno)
1972     {
1973     case REG_X: return "X";
1974     case REG_Y: return "Y";
1975     case REG_Z: return "Z";
1976     default:
1977       output_operand_lossage ("address operand requires constraint for"
1978                               " X, Y, or Z register");
1979     }
1980   return NULL;
1981 }
1982 
1983 /* Return the condition name as a string.
1984    Used in conditional jump constructing  */
1985 
1986 static const char*
1987 cond_string (enum rtx_code code)
1988 {
1989   switch (code)
1990     {
1991     case NE:
1992       return "ne";
1993     case EQ:
1994       return "eq";
1995     case GE:
1996       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1997         return "pl";
1998       else
1999         return "ge";
2000     case LT:
2001       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2002         return "mi";
2003       else
2004         return "lt";
2005     case GEU:
2006       return "sh";
2007     case LTU:
2008       return "lo";
2009     default:
2010       gcc_unreachable ();
2011     }
2012 
2013   return "";
2014 }
2015 
2016 
2017 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'.  */
2018 /* Output ADDR to FILE as address.  */
2019 
2020 static void
2021 avr_print_operand_address (FILE *file, rtx addr)
2022 {
2023   switch (GET_CODE (addr))
2024     {
2025     case REG:
2026       fprintf (file, ptrreg_to_str (REGNO (addr)));
2027       break;
2028 
2029     case PRE_DEC:
2030       fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2031       break;
2032 
2033     case POST_INC:
2034       fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2035       break;
2036 
2037     default:
2038       if (CONSTANT_ADDRESS_P (addr)
2039           && text_segment_operand (addr, VOIDmode))
2040         {
2041           rtx x = addr;
2042           if (GET_CODE (x) == CONST)
2043             x = XEXP (x, 0);
2044           if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2045             {
2046               /* Assembler gs() will implant word address.  Make offset
2047                  a byte offset inside gs() for assembler.  This is
2048                  needed because the more logical (constant+gs(sym)) is not
2049                  accepted by gas.  For 128K and smaller devices this is ok.
2050                  For large devices it will create a trampoline to offset
2051                  from symbol which may not be what the user really wanted.  */
2052 
2053               fprintf (file, "gs(");
2054               output_addr_const (file, XEXP (x,0));
2055               fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2056                        2 * INTVAL (XEXP (x, 1)));
2057               if (AVR_3_BYTE_PC)
2058                 if (warning (0, "pointer offset from symbol maybe incorrect"))
2059                   {
2060                     output_addr_const (stderr, addr);
2061                     fprintf(stderr,"\n");
2062                   }
2063             }
2064           else
2065             {
2066               fprintf (file, "gs(");
2067               output_addr_const (file, addr);
2068               fprintf (file, ")");
2069             }
2070         }
2071       else
2072         output_addr_const (file, addr);
2073     }
2074 }
2075 
2076 
2077 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'.  */
2078 
2079 static bool
2080 avr_print_operand_punct_valid_p (unsigned char code)
2081 {
2082   return code == '~' || code == '!';
2083 }
2084 
2085 
2086 /* Implement `TARGET_PRINT_OPERAND'.  */
2087 /* Output X as assembler operand to file FILE.
2088    For a description of supported %-codes, see top of avr.md.  */
2089 
2090 static void
2091 avr_print_operand (FILE *file, rtx x, int code)
2092 {
2093   int abcd = 0;
2094 
2095   if (code >= 'A' && code <= 'D')
2096     abcd = code - 'A';
2097 
2098   if (code == '~')
2099     {
2100       if (!AVR_HAVE_JMP_CALL)
2101         fputc ('r', file);
2102     }
2103   else if (code == '!')
2104     {
2105       if (AVR_HAVE_EIJMP_EICALL)
2106         fputc ('e', file);
2107     }
2108   else if (code == 't'
2109            || code == 'T')
2110     {
2111       static int t_regno = -1;
2112       static int t_nbits = -1;
2113 
2114       if (REG_P (x) && t_regno < 0 && code == 'T')
2115         {
2116           t_regno = REGNO (x);
2117           t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2118         }
2119       else if (CONST_INT_P (x) && t_regno >= 0
2120                && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2121         {
2122           int bpos = INTVAL (x);
2123 
2124           fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2125           if (code == 'T')
2126             fprintf (file, ",%d", bpos % 8);
2127 
2128           t_regno = -1;
2129         }
2130       else
2131         fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2132     }
2133   else if (REG_P (x))
2134     {
2135       if (x == zero_reg_rtx)
2136         fprintf (file, "__zero_reg__");
2137       else if (code == 'r' && REGNO (x) < 32)
2138         fprintf (file, "%d", (int) REGNO (x));
2139       else
2140         fprintf (file, reg_names[REGNO (x) + abcd]);
2141     }
2142   else if (CONST_INT_P (x))
2143     {
2144       HOST_WIDE_INT ival = INTVAL (x);
2145 
2146       if ('i' != code)
2147         fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2148       else if (low_io_address_operand (x, VOIDmode)
2149                || high_io_address_operand (x, VOIDmode))
2150         {
2151           if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2152             fprintf (file, "__RAMPZ__");
2153           else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2154             fprintf (file, "__RAMPY__");
2155           else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2156             fprintf (file, "__RAMPX__");
2157           else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2158             fprintf (file, "__RAMPD__");
2159           else if (AVR_XMEGA && ival == avr_addr.ccp)
2160             fprintf (file, "__CCP__");
2161           else if (ival == avr_addr.sreg)   fprintf (file, "__SREG__");
2162           else if (ival == avr_addr.sp_l)   fprintf (file, "__SP_L__");
2163           else if (ival == avr_addr.sp_h)   fprintf (file, "__SP_H__");
2164           else
2165             {
2166               fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2167                        ival - avr_current_arch->sfr_offset);
2168             }
2169         }
2170       else
2171         fatal_insn ("bad address, not an I/O address:", x);
2172     }
2173   else if (MEM_P (x))
2174     {
2175       rtx addr = XEXP (x, 0);
2176 
2177       if (code == 'm')
2178         {
2179           if (!CONSTANT_P (addr))
2180             fatal_insn ("bad address, not a constant:", addr);
2181           /* Assembler template with m-code is data - not progmem section */
2182           if (text_segment_operand (addr, VOIDmode))
2183             if (warning (0, "accessing data memory with"
2184                          " program memory address"))
2185               {
2186                 output_addr_const (stderr, addr);
2187                 fprintf(stderr,"\n");
2188               }
2189           output_addr_const (file, addr);
2190         }
2191       else if (code == 'i')
2192         {
2193           avr_print_operand (file, addr, 'i');
2194         }
2195       else if (code == 'o')
2196         {
2197           if (GET_CODE (addr) != PLUS)
2198             fatal_insn ("bad address, not (reg+disp):", addr);
2199 
2200           avr_print_operand (file, XEXP (addr, 1), 0);
2201         }
2202       else if (code == 'p' || code == 'r')
2203         {
2204           if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2205             fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2206 
2207           if (code == 'p')
2208             avr_print_operand_address (file, XEXP (addr, 0));  /* X, Y, Z */
2209           else
2210             avr_print_operand (file, XEXP (addr, 0), 0);  /* r26, r28, r30 */
2211         }
2212       else if (GET_CODE (addr) == PLUS)
2213         {
2214           avr_print_operand_address (file, XEXP (addr,0));
2215           if (REGNO (XEXP (addr, 0)) == REG_X)
2216             fatal_insn ("internal compiler error.  Bad address:"
2217                         ,addr);
2218           fputc ('+', file);
2219           avr_print_operand (file, XEXP (addr,1), code);
2220         }
2221       else
2222         avr_print_operand_address (file, addr);
2223     }
2224   else if (code == 'i')
2225     {
2226       fatal_insn ("bad address, not an I/O address:", x);
2227     }
2228   else if (code == 'x')
2229     {
2230       /* Constant progmem address - like used in jmp or call */
2231       if (0 == text_segment_operand (x, VOIDmode))
2232         if (warning (0, "accessing program memory"
2233                      " with data memory address"))
2234           {
2235             output_addr_const (stderr, x);
2236             fprintf(stderr,"\n");
2237           }
2238       /* Use normal symbol for direct address no linker trampoline needed */
2239       output_addr_const (file, x);
2240     }
2241   else if (CONST_FIXED_P (x))
2242     {
2243       HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2244       if (code != 0)
2245         output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2246                                 code);
2247       fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2248     }
2249   else if (GET_CODE (x) == CONST_DOUBLE)
2250     {
2251       long val;
2252       REAL_VALUE_TYPE rv;
2253       if (GET_MODE (x) != SFmode)
2254         fatal_insn ("internal compiler error.  Unknown mode:", x);
2255       REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2256       REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2257       fprintf (file, "0x%lx", val);
2258     }
2259   else if (GET_CODE (x) == CONST_STRING)
2260     fputs (XSTR (x, 0), file);
2261   else if (code == 'j')
2262     fputs (cond_string (GET_CODE (x)), file);
2263   else if (code == 'k')
2264     fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2265   else
2266     avr_print_operand_address (file, x);
2267 }
2268 
2269 
2270 /* Worker function for `NOTICE_UPDATE_CC'.  */
2271 /* Update the condition code in the INSN.  */
2272 
2273 void
2274 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2275 {
2276   rtx set;
2277   enum attr_cc cc = get_attr_cc (insn);
2278 
2279   switch (cc)
2280     {
2281     default:
2282       break;
2283 
2284     case CC_PLUS:
2285     case CC_LDI:
2286       {
2287         rtx *op = recog_data.operand;
2288         int len_dummy, icc;
2289 
2290         /* Extract insn's operands.  */
2291         extract_constrain_insn_cached (insn);
2292 
2293         switch (cc)
2294           {
2295           default:
2296             gcc_unreachable();
2297 
2298           case CC_PLUS:
2299             avr_out_plus (insn, op, &len_dummy, &icc);
2300             cc = (enum attr_cc) icc;
2301             break;
2302 
2303           case CC_LDI:
2304 
2305             cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2306                   && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2307               /* Loading zero-reg with 0 uses CLR and thus clobbers cc0.  */
2308               ? CC_CLOBBER
2309               /* Any other "r,rL" combination does not alter cc0.  */
2310               : CC_NONE;
2311 
2312             break;
2313           } /* inner switch */
2314 
2315         break;
2316       }
2317     } /* outer swicth */
2318 
2319   switch (cc)
2320     {
2321     default:
2322       /* Special values like CC_OUT_PLUS from above have been
2323          mapped to "standard" CC_* values so we never come here.  */
2324 
2325       gcc_unreachable();
2326       break;
2327 
2328     case CC_NONE:
2329       /* Insn does not affect CC at all.  */
2330       break;
2331 
2332     case CC_SET_N:
2333       CC_STATUS_INIT;
2334       break;
2335 
2336     case CC_SET_ZN:
2337       set = single_set (insn);
2338       CC_STATUS_INIT;
2339       if (set)
2340         {
2341           cc_status.flags |= CC_NO_OVERFLOW;
2342           cc_status.value1 = SET_DEST (set);
2343         }
2344       break;
2345 
2346     case CC_SET_VZN:
2347       /* Insn like INC, DEC, NEG that set Z,N,V.  We currently don't make use
2348          of this combination, cf. also PR61055.  */
2349       CC_STATUS_INIT;
2350       break;
2351 
2352     case CC_SET_CZN:
2353       /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2354          The V flag may or may not be known but that's ok because
2355          alter_cond will change tests to use EQ/NE.  */
2356       set = single_set (insn);
2357       CC_STATUS_INIT;
2358       if (set)
2359         {
2360           cc_status.value1 = SET_DEST (set);
2361           cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2362         }
2363       break;
2364 
2365     case CC_COMPARE:
2366       set = single_set (insn);
2367       CC_STATUS_INIT;
2368       if (set)
2369         cc_status.value1 = SET_SRC (set);
2370       break;
2371 
2372     case CC_CLOBBER:
2373       /* Insn doesn't leave CC in a usable state.  */
2374       CC_STATUS_INIT;
2375       break;
2376     }
2377 }
2378 
2379 /* Choose mode for jump insn:
2380    1 - relative jump in range -63 <= x <= 62 ;
2381    2 - relative jump in range -2046 <= x <= 2045 ;
2382    3 - absolute jump (only for ATmega[16]03).  */
2383 
2384 int
2385 avr_jump_mode (rtx x, rtx insn)
2386 {
2387   int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2388                                             ? XEXP (x, 0) : x));
2389   int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2390   int jump_distance = cur_addr - dest_addr;
2391 
2392   if (-63 <= jump_distance && jump_distance <= 62)
2393     return 1;
2394   else if (-2046 <= jump_distance && jump_distance <= 2045)
2395     return 2;
2396   else if (AVR_HAVE_JMP_CALL)
2397     return 3;
2398 
2399   return 2;
2400 }
2401 
2402 /* Return an AVR condition jump commands.
2403    X is a comparison RTX.
2404    LEN is a number returned by avr_jump_mode function.
2405    If REVERSE nonzero then condition code in X must be reversed.  */
2406 
2407 const char*
2408 ret_cond_branch (rtx x, int len, int reverse)
2409 {
2410   RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2411 
2412   switch (cond)
2413     {
2414     case GT:
2415       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2416 	return (len == 1 ? ("breq .+2" CR_TAB
2417 			    "brpl %0") :
2418 		len == 2 ? ("breq .+4" CR_TAB
2419 			    "brmi .+2" CR_TAB
2420 			    "rjmp %0") :
2421 		("breq .+6" CR_TAB
2422 		 "brmi .+4" CR_TAB
2423 		 "jmp %0"));
2424 
2425       else
2426 	return (len == 1 ? ("breq .+2" CR_TAB
2427 			    "brge %0") :
2428 		len == 2 ? ("breq .+4" CR_TAB
2429 			    "brlt .+2" CR_TAB
2430 			    "rjmp %0") :
2431 		("breq .+6" CR_TAB
2432 		 "brlt .+4" CR_TAB
2433 		 "jmp %0"));
2434     case GTU:
2435       return (len == 1 ? ("breq .+2" CR_TAB
2436                           "brsh %0") :
2437               len == 2 ? ("breq .+4" CR_TAB
2438                           "brlo .+2" CR_TAB
2439                           "rjmp %0") :
2440               ("breq .+6" CR_TAB
2441                "brlo .+4" CR_TAB
2442                "jmp %0"));
2443     case LE:
2444       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2445 	return (len == 1 ? ("breq %0" CR_TAB
2446 			    "brmi %0") :
2447 		len == 2 ? ("breq .+2" CR_TAB
2448 			    "brpl .+2" CR_TAB
2449 			    "rjmp %0") :
2450 		("breq .+2" CR_TAB
2451 		 "brpl .+4" CR_TAB
2452 		 "jmp %0"));
2453       else
2454 	return (len == 1 ? ("breq %0" CR_TAB
2455 			    "brlt %0") :
2456 		len == 2 ? ("breq .+2" CR_TAB
2457 			    "brge .+2" CR_TAB
2458 			    "rjmp %0") :
2459 		("breq .+2" CR_TAB
2460 		 "brge .+4" CR_TAB
2461 		 "jmp %0"));
2462     case LEU:
2463       return (len == 1 ? ("breq %0" CR_TAB
2464                           "brlo %0") :
2465               len == 2 ? ("breq .+2" CR_TAB
2466                           "brsh .+2" CR_TAB
2467 			  "rjmp %0") :
2468               ("breq .+2" CR_TAB
2469                "brsh .+4" CR_TAB
2470 	       "jmp %0"));
2471     default:
2472       if (reverse)
2473 	{
2474 	  switch (len)
2475 	    {
2476 	    case 1:
2477 	      return "br%k1 %0";
2478 	    case 2:
2479 	      return ("br%j1 .+2" CR_TAB
2480 		      "rjmp %0");
2481 	    default:
2482 	      return ("br%j1 .+4" CR_TAB
2483 		      "jmp %0");
2484 	    }
2485 	}
2486       else
2487         {
2488           switch (len)
2489             {
2490             case 1:
2491               return "br%j1 %0";
2492             case 2:
2493               return ("br%k1 .+2" CR_TAB
2494                       "rjmp %0");
2495             default:
2496               return ("br%k1 .+4" CR_TAB
2497                       "jmp %0");
2498             }
2499         }
2500     }
2501   return "";
2502 }
2503 
2504 
2505 /* Worker function for `FINAL_PRESCAN_INSN'.  */
2506 /* Output insn cost for next insn.  */
2507 
2508 void
2509 avr_final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2510                         int num_operands ATTRIBUTE_UNUSED)
2511 {
2512   if (avr_log.rtx_costs)
2513     {
2514       rtx set = single_set (insn);
2515 
2516       if (set)
2517         fprintf (asm_out_file, "/* DEBUG: cost = %d.  */\n",
2518                  set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2519       else
2520         fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d.  */\n",
2521                  rtx_cost (PATTERN (insn), INSN, 0,
2522                            optimize_insn_for_speed_p()));
2523     }
2524 }
2525 
2526 /* Return 0 if undefined, 1 if always true or always false.  */
2527 
2528 int
2529 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2530 {
2531   unsigned int max = (mode == QImode ? 0xff :
2532                       mode == HImode ? 0xffff :
2533                       mode == PSImode ? 0xffffff :
2534                       mode == SImode ? 0xffffffff : 0);
2535   if (max && op && CONST_INT_P (x))
2536     {
2537       if (unsigned_condition (op) != op)
2538         max >>= 1;
2539 
2540       if (max != (INTVAL (x) & max)
2541           && INTVAL (x) != 0xff)
2542         return 1;
2543     }
2544   return 0;
2545 }
2546 
2547 
2548 /* Worker function for `FUNCTION_ARG_REGNO_P'.  */
2549 /* Returns nonzero if REGNO is the number of a hard
2550    register in which function arguments are sometimes passed.  */
2551 
2552 int
2553 avr_function_arg_regno_p(int r)
2554 {
2555   return (r >= 8 && r <= 25);
2556 }
2557 
2558 
2559 /* Worker function for `INIT_CUMULATIVE_ARGS'.  */
2560 /* Initializing the variable cum for the state at the beginning
2561    of the argument list.  */
2562 
2563 void
2564 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2565                           tree fndecl ATTRIBUTE_UNUSED)
2566 {
2567   cum->nregs = 18;
2568   cum->regno = FIRST_CUM_REG;
2569   if (!libname && stdarg_p (fntype))
2570     cum->nregs = 0;
2571 
2572   /* Assume the calle may be tail called */
2573 
2574   cfun->machine->sibcall_fails = 0;
2575 }
2576 
2577 /* Returns the number of registers to allocate for a function argument.  */
2578 
2579 static int
2580 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2581 {
2582   int size;
2583 
2584   if (mode == BLKmode)
2585     size = int_size_in_bytes (type);
2586   else
2587     size = GET_MODE_SIZE (mode);
2588 
2589   /* Align all function arguments to start in even-numbered registers.
2590      Odd-sized arguments leave holes above them.  */
2591 
2592   return (size + 1) & ~1;
2593 }
2594 
2595 
2596 /* Implement `TARGET_FUNCTION_ARG'.  */
2597 /* Controls whether a function argument is passed
2598    in a register, and which register.  */
2599 
2600 static rtx
2601 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2602                   const_tree type, bool named ATTRIBUTE_UNUSED)
2603 {
2604   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2605   int bytes = avr_num_arg_regs (mode, type);
2606 
2607   if (cum->nregs && bytes <= cum->nregs)
2608     return gen_rtx_REG (mode, cum->regno - bytes);
2609 
2610   return NULL_RTX;
2611 }
2612 
2613 
2614 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'.  */
2615 /* Update the summarizer variable CUM to advance past an argument
2616    in the argument list.  */
2617 
2618 static void
2619 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2620                           const_tree type, bool named ATTRIBUTE_UNUSED)
2621 {
2622   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2623   int bytes = avr_num_arg_regs (mode, type);
2624 
2625   cum->nregs -= bytes;
2626   cum->regno -= bytes;
2627 
2628   /* A parameter is being passed in a call-saved register.  As the original
2629      contents of these regs has to be restored before leaving the function,
2630      a function must not pass arguments in call-saved regs in order to get
2631      tail-called.  */
2632 
2633   if (cum->regno >= 8
2634       && cum->nregs >= 0
2635       && !call_used_regs[cum->regno])
2636     {
2637       /* FIXME: We ship info on failing tail-call in struct machine_function.
2638          This uses internals of calls.c:expand_call() and the way args_so_far
2639          is used.  targetm.function_ok_for_sibcall() needs to be extended to
2640          pass &args_so_far, too.  At present, CUMULATIVE_ARGS is target
2641          dependent so that such an extension is not wanted.  */
2642 
2643       cfun->machine->sibcall_fails = 1;
2644     }
2645 
2646   /* Test if all registers needed by the ABI are actually available.  If the
2647      user has fixed a GPR needed to pass an argument, an (implicit) function
2648      call will clobber that fixed register.  See PR45099 for an example.  */
2649 
2650   if (cum->regno >= 8
2651       && cum->nregs >= 0)
2652     {
2653       int regno;
2654 
2655       for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2656         if (fixed_regs[regno])
2657           warning (0, "fixed register %s used to pass parameter to function",
2658                    reg_names[regno]);
2659     }
2660 
2661   if (cum->nregs <= 0)
2662     {
2663       cum->nregs = 0;
2664       cum->regno = FIRST_CUM_REG;
2665     }
2666 }
2667 
2668 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2669 /* Decide whether we can make a sibling call to a function.  DECL is the
2670    declaration of the function being targeted by the call and EXP is the
2671    CALL_EXPR representing the call.  */
2672 
2673 static bool
2674 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2675 {
2676   tree fntype_callee;
2677 
2678   /* Tail-calling must fail if callee-saved regs are used to pass
2679      function args.  We must not tail-call when `epilogue_restores'
2680      is used.  Unfortunately, we cannot tell at this point if that
2681      actually will happen or not, and we cannot step back from
2682      tail-calling.  Thus, we inhibit tail-calling with -mcall-prologues.  */
2683 
2684   if (cfun->machine->sibcall_fails
2685       || TARGET_CALL_PROLOGUES)
2686     {
2687       return false;
2688     }
2689 
2690   fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2691 
2692   if (decl_callee)
2693     {
2694       decl_callee = TREE_TYPE (decl_callee);
2695     }
2696   else
2697     {
2698       decl_callee = fntype_callee;
2699 
2700       while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2701              && METHOD_TYPE != TREE_CODE (decl_callee))
2702         {
2703           decl_callee = TREE_TYPE (decl_callee);
2704         }
2705     }
2706 
2707   /* Ensure that caller and callee have compatible epilogues */
2708 
2709   if (cfun->machine->is_interrupt
2710       || cfun->machine->is_signal
2711       || cfun->machine->is_naked
2712       || avr_naked_function_p (decl_callee)
2713       /* FIXME: For OS_task and OS_main, this might be over-conservative.  */
2714       || (avr_OS_task_function_p (decl_callee)
2715           != cfun->machine->is_OS_task)
2716       || (avr_OS_main_function_p (decl_callee)
2717           != cfun->machine->is_OS_main))
2718     {
2719       return false;
2720     }
2721 
2722   return true;
2723 }
2724 
2725 /***********************************************************************
2726   Functions for outputting various mov's for a various modes
2727 ************************************************************************/
2728 
2729 /* Return true if a value of mode MODE is read from flash by
2730    __load_* function from libgcc.  */
2731 
2732 bool
2733 avr_load_libgcc_p (rtx op)
2734 {
2735   enum machine_mode mode = GET_MODE (op);
2736   int n_bytes = GET_MODE_SIZE (mode);
2737 
2738   return (n_bytes > 2
2739           && !AVR_HAVE_LPMX
2740           && avr_mem_flash_p (op));
2741 }
2742 
2743 /* Return true if a value of mode MODE is read by __xload_* function.  */
2744 
2745 bool
2746 avr_xload_libgcc_p (enum machine_mode mode)
2747 {
2748   int n_bytes = GET_MODE_SIZE (mode);
2749 
2750   return (n_bytes > 1
2751           || avr_current_device->n_flash > 1);
2752 }
2753 
2754 
2755 /* Fixme: This is a hack because secondary reloads don't works as expected.
2756 
2757    Find an unused d-register to be used as scratch in INSN.
2758    EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2759    is a register, skip all possible return values that overlap EXCLUDE.
2760    The policy for the returned register is similar to that of
2761    `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2762    of INSN.
2763 
2764    Return a QImode d-register or NULL_RTX if nothing found.  */
2765 
2766 static rtx
2767 avr_find_unused_d_reg (rtx insn, rtx exclude)
2768 {
2769   int regno;
2770   bool isr_p = (avr_interrupt_function_p (current_function_decl)
2771                 || avr_signal_function_p (current_function_decl));
2772 
2773   for (regno = 16; regno < 32; regno++)
2774     {
2775       rtx reg = all_regs_rtx[regno];
2776 
2777       if ((exclude
2778            && reg_overlap_mentioned_p (exclude, reg))
2779           || fixed_regs[regno])
2780         {
2781           continue;
2782         }
2783 
2784       /* Try non-live register */
2785 
2786       if (!df_regs_ever_live_p (regno)
2787           && (TREE_THIS_VOLATILE (current_function_decl)
2788               || cfun->machine->is_OS_task
2789               || cfun->machine->is_OS_main
2790               || (!isr_p && call_used_regs[regno])))
2791         {
2792           return reg;
2793         }
2794 
2795       /* Any live register can be used if it is unused after.
2796          Prologue/epilogue will care for it as needed.  */
2797 
2798       if (df_regs_ever_live_p (regno)
2799           && reg_unused_after (insn, reg))
2800         {
2801           return reg;
2802         }
2803     }
2804 
2805   return NULL_RTX;
2806 }
2807 
2808 
2809 /* Helper function for the next function in the case where only restricted
2810    version of LPM instruction is available.  */
2811 
2812 static const char*
2813 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2814 {
2815   rtx dest = xop[0];
2816   rtx addr = xop[1];
2817   int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2818   int regno_dest;
2819 
2820   regno_dest = REGNO (dest);
2821 
2822   /* The implicit target register of LPM.  */
2823   xop[3] = lpm_reg_rtx;
2824 
2825   switch (GET_CODE (addr))
2826     {
2827     default:
2828       gcc_unreachable();
2829 
2830     case REG:
2831 
2832       gcc_assert (REG_Z == REGNO (addr));
2833 
2834       switch (n_bytes)
2835         {
2836         default:
2837           gcc_unreachable();
2838 
2839         case 1:
2840           avr_asm_len ("%4lpm", xop, plen, 1);
2841 
2842           if (regno_dest != LPM_REGNO)
2843             avr_asm_len ("mov %0,%3", xop, plen, 1);
2844 
2845           return "";
2846 
2847         case 2:
2848           if (REGNO (dest) == REG_Z)
2849             return avr_asm_len ("%4lpm"      CR_TAB
2850                                 "push %3"    CR_TAB
2851                                 "adiw %2,1"  CR_TAB
2852                                 "%4lpm"      CR_TAB
2853                                 "mov %B0,%3" CR_TAB
2854                                 "pop %A0", xop, plen, 6);
2855 
2856           avr_asm_len ("%4lpm"      CR_TAB
2857                        "mov %A0,%3" CR_TAB
2858                        "adiw %2,1"  CR_TAB
2859                        "%4lpm"      CR_TAB
2860                        "mov %B0,%3", xop, plen, 5);
2861 
2862           if (!reg_unused_after (insn, addr))
2863             avr_asm_len ("sbiw %2,1", xop, plen, 1);
2864 
2865           break; /* 2 */
2866         }
2867 
2868       break; /* REG */
2869 
2870     case POST_INC:
2871 
2872       gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2873                   && n_bytes <= 4);
2874 
2875       if (regno_dest == LPM_REGNO)
2876         avr_asm_len ("%4lpm"      CR_TAB
2877                      "adiw %2,1", xop, plen, 2);
2878       else
2879         avr_asm_len ("%4lpm"      CR_TAB
2880                      "mov %A0,%3" CR_TAB
2881                      "adiw %2,1", xop, plen, 3);
2882 
2883       if (n_bytes >= 2)
2884         avr_asm_len ("%4lpm"      CR_TAB
2885                      "mov %B0,%3" CR_TAB
2886                      "adiw %2,1", xop, plen, 3);
2887 
2888       if (n_bytes >= 3)
2889         avr_asm_len ("%4lpm"      CR_TAB
2890                      "mov %C0,%3" CR_TAB
2891                      "adiw %2,1", xop, plen, 3);
2892 
2893       if (n_bytes >= 4)
2894         avr_asm_len ("%4lpm"      CR_TAB
2895                      "mov %D0,%3" CR_TAB
2896                      "adiw %2,1", xop, plen, 3);
2897 
2898       break; /* POST_INC */
2899 
2900     } /* switch CODE (addr) */
2901 
2902   return "";
2903 }
2904 
2905 
2906 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2907    OP[1] in AS1 to register OP[0].
2908    If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2909    Return "".  */
2910 
2911 const char*
2912 avr_out_lpm (rtx insn, rtx *op, int *plen)
2913 {
2914   rtx xop[7];
2915   rtx dest = op[0];
2916   rtx src = SET_SRC (single_set (insn));
2917   rtx addr;
2918   int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2919   int segment;
2920   RTX_CODE code;
2921   addr_space_t as = MEM_ADDR_SPACE (src);
2922 
2923   if (plen)
2924     *plen = 0;
2925 
2926   if (MEM_P (dest))
2927     {
2928       warning (0, "writing to address space %qs not supported",
2929                avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2930 
2931       return "";
2932     }
2933 
2934   addr = XEXP (src, 0);
2935   code = GET_CODE (addr);
2936 
2937   gcc_assert (REG_P (dest));
2938   gcc_assert (REG == code || POST_INC == code);
2939 
2940   xop[0] = dest;
2941   xop[1] = addr;
2942   xop[2] = lpm_addr_reg_rtx;
2943   xop[4] = xstring_empty;
2944   xop[5] = tmp_reg_rtx;
2945   xop[6] = XEXP (rampz_rtx, 0);
2946 
2947   segment = avr_addrspace[as].segment;
2948 
2949   /* Set RAMPZ as needed.  */
2950 
2951   if (segment)
2952     {
2953       xop[4] = GEN_INT (segment);
2954       xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
2955 
2956       if (xop[3] != NULL_RTX)
2957         {
2958           avr_asm_len ("ldi %3,%4" CR_TAB
2959                        "out %i6,%3", xop, plen, 2);
2960         }
2961       else if (segment == 1)
2962         {
2963           avr_asm_len ("clr %5" CR_TAB
2964                        "inc %5" CR_TAB
2965                        "out %i6,%5", xop, plen, 3);
2966         }
2967       else
2968         {
2969           avr_asm_len ("mov %5,%2"         CR_TAB
2970                        "ldi %2,%4"         CR_TAB
2971                        "out %i6,%2"  CR_TAB
2972                        "mov %2,%5", xop, plen, 4);
2973         }
2974 
2975       xop[4] = xstring_e;
2976 
2977       if (!AVR_HAVE_ELPMX)
2978         return avr_out_lpm_no_lpmx (insn, xop, plen);
2979     }
2980   else if (!AVR_HAVE_LPMX)
2981     {
2982       return avr_out_lpm_no_lpmx (insn, xop, plen);
2983     }
2984 
2985   /* We have [E]LPMX: Output reading from Flash the comfortable way.  */
2986 
2987   switch (GET_CODE (addr))
2988     {
2989     default:
2990       gcc_unreachable();
2991 
2992     case REG:
2993 
2994       gcc_assert (REG_Z == REGNO (addr));
2995 
2996       switch (n_bytes)
2997         {
2998         default:
2999           gcc_unreachable();
3000 
3001         case 1:
3002           return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3003 
3004         case 2:
3005           if (REGNO (dest) == REG_Z)
3006             return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3007                                 "%4lpm %B0,%a2" CR_TAB
3008                                 "mov %A0,%5", xop, plen, 3);
3009           else
3010             {
3011               avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3012                            "%4lpm %B0,%a2", xop, plen, 2);
3013 
3014               if (!reg_unused_after (insn, addr))
3015                 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3016             }
3017 
3018           break; /* 2 */
3019 
3020         case 3:
3021 
3022           avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3023                        "%4lpm %B0,%a2+" CR_TAB
3024                        "%4lpm %C0,%a2", xop, plen, 3);
3025 
3026           if (!reg_unused_after (insn, addr))
3027             avr_asm_len ("sbiw %2,2", xop, plen, 1);
3028 
3029           break; /* 3 */
3030 
3031         case 4:
3032 
3033           avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3034                        "%4lpm %B0,%a2+", xop, plen, 2);
3035 
3036           if (REGNO (dest) == REG_Z - 2)
3037             return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3038                                 "%4lpm %C0,%a2"          CR_TAB
3039                                 "mov %D0,%5", xop, plen, 3);
3040           else
3041             {
3042               avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3043                            "%4lpm %D0,%a2", xop, plen, 2);
3044 
3045               if (!reg_unused_after (insn, addr))
3046                 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3047             }
3048 
3049           break; /* 4 */
3050         } /* n_bytes */
3051 
3052       break; /* REG */
3053 
3054     case POST_INC:
3055 
3056       gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3057                   && n_bytes <= 4);
3058 
3059       avr_asm_len                    ("%4lpm %A0,%a2+", xop, plen, 1);
3060       if (n_bytes >= 2)  avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3061       if (n_bytes >= 3)  avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3062       if (n_bytes >= 4)  avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3063 
3064       break; /* POST_INC */
3065 
3066     } /* switch CODE (addr) */
3067 
3068   if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3069     {
3070       /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM.  */
3071 
3072       xop[0] = zero_reg_rtx;
3073       avr_asm_len ("out %i6,%0", xop, plen, 1);
3074     }
3075 
3076   return "";
3077 }
3078 
3079 
3080 /* Worker function for xload_8 insn.  */
3081 
3082 const char*
3083 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3084 {
3085   rtx xop[4];
3086 
3087   xop[0] = op[0];
3088   xop[1] = op[1];
3089   xop[2] = lpm_addr_reg_rtx;
3090   xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3091 
3092   if (plen)
3093     *plen = 0;
3094 
3095   avr_asm_len ("sbrc %1,7" CR_TAB
3096                "ld %3,%a2" CR_TAB
3097                "sbrs %1,7", xop, plen, 3);
3098 
3099   avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
3100 
3101   if (REGNO (xop[0]) != REGNO (xop[3]))
3102     avr_asm_len ("mov %0,%3", xop, plen, 1);
3103 
3104   return "";
3105 }
3106 
3107 
3108 const char*
3109 output_movqi (rtx insn, rtx operands[], int *plen)
3110 {
3111   rtx dest = operands[0];
3112   rtx src = operands[1];
3113 
3114   if (avr_mem_flash_p (src)
3115       || avr_mem_flash_p (dest))
3116     {
3117       return avr_out_lpm (insn, operands, plen);
3118     }
3119 
3120   gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3121 
3122   if (REG_P (dest))
3123     {
3124       if (REG_P (src)) /* mov r,r */
3125         {
3126           if (test_hard_reg_class (STACK_REG, dest))
3127             return avr_asm_len ("out %0,%1", operands, plen, -1);
3128           else if (test_hard_reg_class (STACK_REG, src))
3129             return avr_asm_len ("in %0,%1", operands, plen, -1);
3130 
3131           return avr_asm_len ("mov %0,%1", operands, plen, -1);
3132         }
3133       else if (CONSTANT_P (src))
3134         {
3135           output_reload_in_const (operands, NULL_RTX, plen, false);
3136           return "";
3137         }
3138       else if (MEM_P (src))
3139         return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3140     }
3141   else if (MEM_P (dest))
3142     {
3143       rtx xop[2];
3144 
3145       xop[0] = dest;
3146       xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3147 
3148       return out_movqi_mr_r (insn, xop, plen);
3149     }
3150 
3151   return "";
3152 }
3153 
3154 
3155 const char *
3156 output_movhi (rtx insn, rtx xop[], int *plen)
3157 {
3158   rtx dest = xop[0];
3159   rtx src = xop[1];
3160 
3161   gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3162 
3163   if (avr_mem_flash_p (src)
3164       || avr_mem_flash_p (dest))
3165     {
3166       return avr_out_lpm (insn, xop, plen);
3167     }
3168 
3169   gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3170 
3171   if (REG_P (dest))
3172     {
3173       if (REG_P (src)) /* mov r,r */
3174         {
3175           if (test_hard_reg_class (STACK_REG, dest))
3176             {
3177               if (AVR_HAVE_8BIT_SP)
3178                 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3179 
3180               if (AVR_XMEGA)
3181                 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3182                                     "out __SP_H__,%B1", xop, plen, -2);
3183 
3184               /* Use simple load of SP if no interrupts are  used.  */
3185 
3186               return TARGET_NO_INTERRUPTS
3187                 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3188                                "out __SP_L__,%A1", xop, plen, -2)
3189                 : avr_asm_len ("in __tmp_reg__,__SREG__"  CR_TAB
3190                                "cli"                      CR_TAB
3191                                "out __SP_H__,%B1"         CR_TAB
3192                                "out __SREG__,__tmp_reg__" CR_TAB
3193                                "out __SP_L__,%A1", xop, plen, -5);
3194             }
3195           else if (test_hard_reg_class (STACK_REG, src))
3196             {
3197               return !AVR_HAVE_SPH
3198                 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3199                                "clr %B0", xop, plen, -2)
3200 
3201                 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3202                                "in %B0,__SP_H__", xop, plen, -2);
3203             }
3204 
3205           return AVR_HAVE_MOVW
3206             ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3207 
3208             : avr_asm_len ("mov %A0,%A1" CR_TAB
3209                            "mov %B0,%B1", xop, plen, -2);
3210         } /* REG_P (src) */
3211       else if (CONSTANT_P (src))
3212         {
3213           return output_reload_inhi (xop, NULL, plen);
3214         }
3215       else if (MEM_P (src))
3216         {
3217           return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3218         }
3219     }
3220   else if (MEM_P (dest))
3221     {
3222       rtx xop[2];
3223 
3224       xop[0] = dest;
3225       xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3226 
3227       return out_movhi_mr_r (insn, xop, plen);
3228     }
3229 
3230   fatal_insn ("invalid insn:", insn);
3231 
3232   return "";
3233 }
3234 
3235 static const char*
3236 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
3237 {
3238   rtx dest = op[0];
3239   rtx src = op[1];
3240   rtx x = XEXP (src, 0);
3241 
3242   if (CONSTANT_ADDRESS_P (x))
3243     {
3244       return optimize > 0 && io_address_operand (x, QImode)
3245         ? avr_asm_len ("in %0,%i1", op, plen, -1)
3246         : avr_asm_len ("lds %0,%m1", op, plen, -2);
3247     }
3248   else if (GET_CODE (x) == PLUS
3249            && REG_P (XEXP (x, 0))
3250            && CONST_INT_P (XEXP (x, 1)))
3251     {
3252       /* memory access by reg+disp */
3253 
3254       int disp = INTVAL (XEXP (x, 1));
3255 
3256       if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3257         {
3258           if (REGNO (XEXP (x, 0)) != REG_Y)
3259             fatal_insn ("incorrect insn:",insn);
3260 
3261           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3262             return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3263                                 "ldd %0,Y+63"     CR_TAB
3264                                 "sbiw r28,%o1-63", op, plen, -3);
3265 
3266           return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3267                               "sbci r29,hi8(-%o1)" CR_TAB
3268                               "ld %0,Y"            CR_TAB
3269                               "subi r28,lo8(%o1)"  CR_TAB
3270                               "sbci r29,hi8(%o1)", op, plen, -5);
3271         }
3272       else if (REGNO (XEXP (x, 0)) == REG_X)
3273         {
3274           /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3275              it but I have this situation with extremal optimizing options.  */
3276 
3277           avr_asm_len ("adiw r26,%o1" CR_TAB
3278                        "ld %0,X", op, plen, -2);
3279 
3280           if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3281               && !reg_unused_after (insn, XEXP (x,0)))
3282             {
3283               avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3284             }
3285 
3286           return "";
3287         }
3288 
3289       return avr_asm_len ("ldd %0,%1", op, plen, -1);
3290     }
3291 
3292   return avr_asm_len ("ld %0,%1", op, plen, -1);
3293 }
3294 
3295 static const char*
3296 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3297 {
3298   rtx dest = op[0];
3299   rtx src = op[1];
3300   rtx base = XEXP (src, 0);
3301   int reg_dest = true_regnum (dest);
3302   int reg_base = true_regnum (base);
3303   /* "volatile" forces reading low byte first, even if less efficient,
3304      for correct operation with 16-bit I/O registers.  */
3305   int mem_volatile_p = MEM_VOLATILE_P (src);
3306 
3307   if (reg_base > 0)
3308     {
3309       if (reg_dest == reg_base)         /* R = (R) */
3310         return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3311                             "ld %B0,%1"          CR_TAB
3312                             "mov %A0,__tmp_reg__", op, plen, -3);
3313 
3314       if (reg_base != REG_X)
3315         return avr_asm_len ("ld %A0,%1" CR_TAB
3316                             "ldd %B0,%1+1", op, plen, -2);
3317 
3318       avr_asm_len ("ld %A0,X+" CR_TAB
3319                    "ld %B0,X", op, plen, -2);
3320 
3321       if (!reg_unused_after (insn, base))
3322         avr_asm_len ("sbiw r26,1", op, plen, 1);
3323 
3324       return "";
3325     }
3326   else if (GET_CODE (base) == PLUS) /* (R + i) */
3327     {
3328       int disp = INTVAL (XEXP (base, 1));
3329       int reg_base = true_regnum (XEXP (base, 0));
3330 
3331       if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3332         {
3333           if (REGNO (XEXP (base, 0)) != REG_Y)
3334             fatal_insn ("incorrect insn:",insn);
3335 
3336           return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3337             ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3338                            "ldd %A0,Y+62"    CR_TAB
3339                            "ldd %B0,Y+63"    CR_TAB
3340                            "sbiw r28,%o1-62", op, plen, -4)
3341 
3342             : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3343                            "sbci r29,hi8(-%o1)" CR_TAB
3344                            "ld %A0,Y"           CR_TAB
3345                            "ldd %B0,Y+1"        CR_TAB
3346                            "subi r28,lo8(%o1)"  CR_TAB
3347                            "sbci r29,hi8(%o1)", op, plen, -6);
3348         }
3349 
3350       /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3351          it but I have this situation with extremal
3352          optimization options.  */
3353 
3354       if (reg_base == REG_X)
3355         return reg_base == reg_dest
3356           ? avr_asm_len ("adiw r26,%o1"      CR_TAB
3357                          "ld __tmp_reg__,X+" CR_TAB
3358                          "ld %B0,X"          CR_TAB
3359                          "mov %A0,__tmp_reg__", op, plen, -4)
3360 
3361           : avr_asm_len ("adiw r26,%o1" CR_TAB
3362                          "ld %A0,X+"    CR_TAB
3363                          "ld %B0,X"     CR_TAB
3364                          "sbiw r26,%o1+1", op, plen, -4);
3365 
3366       return reg_base == reg_dest
3367         ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3368                        "ldd %B0,%B1"         CR_TAB
3369                        "mov %A0,__tmp_reg__", op, plen, -3)
3370 
3371         : avr_asm_len ("ldd %A0,%A1" CR_TAB
3372                        "ldd %B0,%B1", op, plen, -2);
3373     }
3374   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3375     {
3376       if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3377         fatal_insn ("incorrect insn:", insn);
3378 
3379       if (!mem_volatile_p)
3380         return avr_asm_len ("ld %B0,%1" CR_TAB
3381                             "ld %A0,%1", op, plen, -2);
3382 
3383       return REGNO (XEXP (base, 0)) == REG_X
3384         ? avr_asm_len ("sbiw r26,2"  CR_TAB
3385                        "ld %A0,X+"   CR_TAB
3386                        "ld %B0,X"    CR_TAB
3387                        "sbiw r26,1", op, plen, -4)
3388 
3389         : avr_asm_len ("sbiw %r1,2"  CR_TAB
3390                        "ld %A0,%p1"  CR_TAB
3391                        "ldd %B0,%p1+1", op, plen, -3);
3392     }
3393   else if (GET_CODE (base) == POST_INC) /* (R++) */
3394     {
3395       if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3396         fatal_insn ("incorrect insn:", insn);
3397 
3398       return avr_asm_len ("ld %A0,%1"  CR_TAB
3399                           "ld %B0,%1", op, plen, -2);
3400     }
3401   else if (CONSTANT_ADDRESS_P (base))
3402     {
3403       return optimize > 0 && io_address_operand (base, HImode)
3404         ? avr_asm_len ("in %A0,%i1" CR_TAB
3405                        "in %B0,%i1+1", op, plen, -2)
3406 
3407         : avr_asm_len ("lds %A0,%m1" CR_TAB
3408                        "lds %B0,%m1+1", op, plen, -4);
3409     }
3410 
3411   fatal_insn ("unknown move insn:",insn);
3412   return "";
3413 }
3414 
3415 static const char*
3416 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3417 {
3418   rtx dest = op[0];
3419   rtx src = op[1];
3420   rtx base = XEXP (src, 0);
3421   int reg_dest = true_regnum (dest);
3422   int reg_base = true_regnum (base);
3423   int tmp;
3424 
3425   if (!l)
3426     l = &tmp;
3427 
3428   if (reg_base > 0)
3429     {
3430       if (reg_base == REG_X)        /* (R26) */
3431         {
3432           if (reg_dest == REG_X)
3433 	    /* "ld r26,-X" is undefined */
3434 	    return *l=7, ("adiw r26,3"        CR_TAB
3435 			  "ld r29,X"          CR_TAB
3436 			  "ld r28,-X"         CR_TAB
3437 			  "ld __tmp_reg__,-X" CR_TAB
3438 			  "sbiw r26,1"        CR_TAB
3439 			  "ld r26,X"          CR_TAB
3440 			  "mov r27,__tmp_reg__");
3441           else if (reg_dest == REG_X - 2)
3442             return *l=5, ("ld %A0,X+"          CR_TAB
3443                           "ld %B0,X+"          CR_TAB
3444                           "ld __tmp_reg__,X+"  CR_TAB
3445                           "ld %D0,X"           CR_TAB
3446                           "mov %C0,__tmp_reg__");
3447           else if (reg_unused_after (insn, base))
3448             return  *l=4, ("ld %A0,X+"  CR_TAB
3449                            "ld %B0,X+" CR_TAB
3450                            "ld %C0,X+" CR_TAB
3451                            "ld %D0,X");
3452           else
3453             return  *l=5, ("ld %A0,X+"  CR_TAB
3454                            "ld %B0,X+" CR_TAB
3455                            "ld %C0,X+" CR_TAB
3456                            "ld %D0,X"  CR_TAB
3457                            "sbiw r26,3");
3458         }
3459       else
3460         {
3461           if (reg_dest == reg_base)
3462             return *l=5, ("ldd %D0,%1+3" CR_TAB
3463                           "ldd %C0,%1+2" CR_TAB
3464                           "ldd __tmp_reg__,%1+1"  CR_TAB
3465                           "ld %A0,%1"  CR_TAB
3466                           "mov %B0,__tmp_reg__");
3467           else if (reg_base == reg_dest + 2)
3468             return *l=5, ("ld %A0,%1"             CR_TAB
3469                           "ldd %B0,%1+1"          CR_TAB
3470                           "ldd __tmp_reg__,%1+2"  CR_TAB
3471                           "ldd %D0,%1+3"          CR_TAB
3472                           "mov %C0,__tmp_reg__");
3473           else
3474             return *l=4, ("ld %A0,%1"    CR_TAB
3475                           "ldd %B0,%1+1" CR_TAB
3476                           "ldd %C0,%1+2" CR_TAB
3477                           "ldd %D0,%1+3");
3478         }
3479     }
3480   else if (GET_CODE (base) == PLUS) /* (R + i) */
3481     {
3482       int disp = INTVAL (XEXP (base, 1));
3483 
3484       if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3485 	{
3486 	  if (REGNO (XEXP (base, 0)) != REG_Y)
3487 	    fatal_insn ("incorrect insn:",insn);
3488 
3489 	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3490 	    return *l = 6, ("adiw r28,%o1-60" CR_TAB
3491 			    "ldd %A0,Y+60"    CR_TAB
3492 			    "ldd %B0,Y+61"    CR_TAB
3493 			    "ldd %C0,Y+62"    CR_TAB
3494 			    "ldd %D0,Y+63"    CR_TAB
3495 			    "sbiw r28,%o1-60");
3496 
3497 	  return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3498 			  "sbci r29,hi8(-%o1)" CR_TAB
3499 			  "ld %A0,Y"           CR_TAB
3500 			  "ldd %B0,Y+1"        CR_TAB
3501 			  "ldd %C0,Y+2"        CR_TAB
3502 			  "ldd %D0,Y+3"        CR_TAB
3503 			  "subi r28,lo8(%o1)"  CR_TAB
3504 			  "sbci r29,hi8(%o1)");
3505 	}
3506 
3507       reg_base = true_regnum (XEXP (base, 0));
3508       if (reg_base == REG_X)
3509 	{
3510 	  /* R = (X + d) */
3511 	  if (reg_dest == REG_X)
3512 	    {
3513 	      *l = 7;
3514 	      /* "ld r26,-X" is undefined */
3515 	      return ("adiw r26,%o1+3"    CR_TAB
3516 		      "ld r29,X"          CR_TAB
3517 		      "ld r28,-X"         CR_TAB
3518 		      "ld __tmp_reg__,-X" CR_TAB
3519 		      "sbiw r26,1"        CR_TAB
3520 		      "ld r26,X"          CR_TAB
3521 		      "mov r27,__tmp_reg__");
3522 	    }
3523 	  *l = 6;
3524 	  if (reg_dest == REG_X - 2)
3525 	    return ("adiw r26,%o1"      CR_TAB
3526 		    "ld r24,X+"         CR_TAB
3527 		    "ld r25,X+"         CR_TAB
3528 		    "ld __tmp_reg__,X+" CR_TAB
3529 		    "ld r27,X"          CR_TAB
3530 		    "mov r26,__tmp_reg__");
3531 
3532 	  return ("adiw r26,%o1" CR_TAB
3533 		  "ld %A0,X+"    CR_TAB
3534 		  "ld %B0,X+"    CR_TAB
3535 		  "ld %C0,X+"    CR_TAB
3536 		  "ld %D0,X"     CR_TAB
3537 		  "sbiw r26,%o1+3");
3538 	}
3539       if (reg_dest == reg_base)
3540         return *l=5, ("ldd %D0,%D1"          CR_TAB
3541                       "ldd %C0,%C1"          CR_TAB
3542                       "ldd __tmp_reg__,%B1"  CR_TAB
3543                       "ldd %A0,%A1"          CR_TAB
3544                       "mov %B0,__tmp_reg__");
3545       else if (reg_dest == reg_base - 2)
3546         return *l=5, ("ldd %A0,%A1"          CR_TAB
3547                       "ldd %B0,%B1"          CR_TAB
3548                       "ldd __tmp_reg__,%C1"  CR_TAB
3549                       "ldd %D0,%D1"          CR_TAB
3550                       "mov %C0,__tmp_reg__");
3551       return *l=4, ("ldd %A0,%A1" CR_TAB
3552                     "ldd %B0,%B1" CR_TAB
3553                     "ldd %C0,%C1" CR_TAB
3554                     "ldd %D0,%D1");
3555     }
3556   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3557     return *l=4, ("ld %D0,%1" CR_TAB
3558 		  "ld %C0,%1" CR_TAB
3559 		  "ld %B0,%1" CR_TAB
3560 		  "ld %A0,%1");
3561   else if (GET_CODE (base) == POST_INC) /* (R++) */
3562     return *l=4, ("ld %A0,%1" CR_TAB
3563 		  "ld %B0,%1" CR_TAB
3564 		  "ld %C0,%1" CR_TAB
3565 		  "ld %D0,%1");
3566   else if (CONSTANT_ADDRESS_P (base))
3567     return *l=8, ("lds %A0,%m1"   CR_TAB
3568                   "lds %B0,%m1+1" CR_TAB
3569                   "lds %C0,%m1+2" CR_TAB
3570                   "lds %D0,%m1+3");
3571 
3572   fatal_insn ("unknown move insn:",insn);
3573   return "";
3574 }
3575 
3576 static const char*
3577 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3578 {
3579   rtx dest = op[0];
3580   rtx src = op[1];
3581   rtx base = XEXP (dest, 0);
3582   int reg_base = true_regnum (base);
3583   int reg_src = true_regnum (src);
3584   int tmp;
3585 
3586   if (!l)
3587     l = &tmp;
3588 
3589   if (CONSTANT_ADDRESS_P (base))
3590     return *l=8,("sts %m0,%A1" CR_TAB
3591                  "sts %m0+1,%B1" CR_TAB
3592                  "sts %m0+2,%C1" CR_TAB
3593                  "sts %m0+3,%D1");
3594   if (reg_base > 0)                 /* (r) */
3595     {
3596       if (reg_base == REG_X)                /* (R26) */
3597         {
3598           if (reg_src == REG_X)
3599             {
3600 	      /* "st X+,r26" is undefined */
3601               if (reg_unused_after (insn, base))
3602 		return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3603 			      "st X,r26"            CR_TAB
3604 			      "adiw r26,1"          CR_TAB
3605 			      "st X+,__tmp_reg__"   CR_TAB
3606 			      "st X+,r28"           CR_TAB
3607 			      "st X,r29");
3608               else
3609                 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3610 			      "st X,r26"            CR_TAB
3611 			      "adiw r26,1"          CR_TAB
3612 			      "st X+,__tmp_reg__"   CR_TAB
3613 			      "st X+,r28"           CR_TAB
3614 			      "st X,r29"            CR_TAB
3615 			      "sbiw r26,3");
3616             }
3617           else if (reg_base == reg_src + 2)
3618             {
3619               if (reg_unused_after (insn, base))
3620                 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3621                               "mov __tmp_reg__,%D1"  CR_TAB
3622                               "st %0+,%A1"           CR_TAB
3623                               "st %0+,%B1"           CR_TAB
3624                               "st %0+,__zero_reg__"  CR_TAB
3625                               "st %0,__tmp_reg__"    CR_TAB
3626                               "clr __zero_reg__");
3627               else
3628                 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3629                               "mov __tmp_reg__,%D1"  CR_TAB
3630                               "st %0+,%A1"           CR_TAB
3631                               "st %0+,%B1"           CR_TAB
3632                               "st %0+,__zero_reg__"  CR_TAB
3633                               "st %0,__tmp_reg__"    CR_TAB
3634                               "clr __zero_reg__"     CR_TAB
3635                               "sbiw r26,3");
3636             }
3637           return *l=5, ("st %0+,%A1" CR_TAB
3638                         "st %0+,%B1" CR_TAB
3639                         "st %0+,%C1" CR_TAB
3640                         "st %0,%D1"  CR_TAB
3641                         "sbiw r26,3");
3642         }
3643       else
3644         return *l=4, ("st %0,%A1"    CR_TAB
3645 		      "std %0+1,%B1" CR_TAB
3646 		      "std %0+2,%C1" CR_TAB
3647 		      "std %0+3,%D1");
3648     }
3649   else if (GET_CODE (base) == PLUS) /* (R + i) */
3650     {
3651       int disp = INTVAL (XEXP (base, 1));
3652       reg_base = REGNO (XEXP (base, 0));
3653       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3654 	{
3655 	  if (reg_base != REG_Y)
3656 	    fatal_insn ("incorrect insn:",insn);
3657 
3658 	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3659 	    return *l = 6, ("adiw r28,%o0-60" CR_TAB
3660 			    "std Y+60,%A1"    CR_TAB
3661 			    "std Y+61,%B1"    CR_TAB
3662 			    "std Y+62,%C1"    CR_TAB
3663 			    "std Y+63,%D1"    CR_TAB
3664 			    "sbiw r28,%o0-60");
3665 
3666 	  return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3667 			  "sbci r29,hi8(-%o0)" CR_TAB
3668 			  "st Y,%A1"           CR_TAB
3669 			  "std Y+1,%B1"        CR_TAB
3670 			  "std Y+2,%C1"        CR_TAB
3671 			  "std Y+3,%D1"        CR_TAB
3672 			  "subi r28,lo8(%o0)"  CR_TAB
3673 			  "sbci r29,hi8(%o0)");
3674 	}
3675       if (reg_base == REG_X)
3676 	{
3677 	  /* (X + d) = R */
3678 	  if (reg_src == REG_X)
3679 	    {
3680 	      *l = 9;
3681 	      return ("mov __tmp_reg__,r26"  CR_TAB
3682 		      "mov __zero_reg__,r27" CR_TAB
3683 		      "adiw r26,%o0"         CR_TAB
3684 		      "st X+,__tmp_reg__"    CR_TAB
3685 		      "st X+,__zero_reg__"   CR_TAB
3686 		      "st X+,r28"            CR_TAB
3687 		      "st X,r29"             CR_TAB
3688 		      "clr __zero_reg__"     CR_TAB
3689 		      "sbiw r26,%o0+3");
3690 	    }
3691 	  else if (reg_src == REG_X - 2)
3692 	    {
3693 	      *l = 9;
3694 	      return ("mov __tmp_reg__,r26"  CR_TAB
3695 		      "mov __zero_reg__,r27" CR_TAB
3696 		      "adiw r26,%o0"         CR_TAB
3697 		      "st X+,r24"            CR_TAB
3698 		      "st X+,r25"            CR_TAB
3699 		      "st X+,__tmp_reg__"    CR_TAB
3700 		      "st X,__zero_reg__"    CR_TAB
3701 		      "clr __zero_reg__"     CR_TAB
3702 		      "sbiw r26,%o0+3");
3703 	    }
3704 	  *l = 6;
3705 	  return ("adiw r26,%o0" CR_TAB
3706 		  "st X+,%A1"    CR_TAB
3707 		  "st X+,%B1"    CR_TAB
3708 		  "st X+,%C1"    CR_TAB
3709 		  "st X,%D1"     CR_TAB
3710 		  "sbiw r26,%o0+3");
3711 	}
3712       return *l=4, ("std %A0,%A1" CR_TAB
3713 		    "std %B0,%B1" CR_TAB
3714 		    "std %C0,%C1" CR_TAB
3715 		    "std %D0,%D1");
3716     }
3717   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3718     return *l=4, ("st %0,%D1" CR_TAB
3719 		  "st %0,%C1" CR_TAB
3720 		  "st %0,%B1" CR_TAB
3721 		  "st %0,%A1");
3722   else if (GET_CODE (base) == POST_INC) /* (R++) */
3723     return *l=4, ("st %0,%A1" CR_TAB
3724 		  "st %0,%B1" CR_TAB
3725 		  "st %0,%C1" CR_TAB
3726 		  "st %0,%D1");
3727   fatal_insn ("unknown move insn:",insn);
3728   return "";
3729 }
3730 
3731 const char *
3732 output_movsisf (rtx insn, rtx operands[], int *l)
3733 {
3734   int dummy;
3735   rtx dest = operands[0];
3736   rtx src = operands[1];
3737   int *real_l = l;
3738 
3739   if (avr_mem_flash_p (src)
3740       || avr_mem_flash_p (dest))
3741     {
3742       return avr_out_lpm (insn, operands, real_l);
3743     }
3744 
3745   if (!l)
3746     l = &dummy;
3747 
3748   gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
3749   if (REG_P (dest))
3750     {
3751       if (REG_P (src)) /* mov r,r */
3752 	{
3753 	  if (true_regnum (dest) > true_regnum (src))
3754 	    {
3755 	      if (AVR_HAVE_MOVW)
3756 		{
3757 		  *l = 2;
3758 		  return ("movw %C0,%C1" CR_TAB
3759 			  "movw %A0,%A1");
3760 		}
3761 	      *l = 4;
3762 	      return ("mov %D0,%D1" CR_TAB
3763 		      "mov %C0,%C1" CR_TAB
3764 		      "mov %B0,%B1" CR_TAB
3765 		      "mov %A0,%A1");
3766 	    }
3767 	  else
3768 	    {
3769 	      if (AVR_HAVE_MOVW)
3770 		{
3771 		  *l = 2;
3772 		  return ("movw %A0,%A1" CR_TAB
3773 			  "movw %C0,%C1");
3774 		}
3775 	      *l = 4;
3776 	      return ("mov %A0,%A1" CR_TAB
3777 		      "mov %B0,%B1" CR_TAB
3778 		      "mov %C0,%C1" CR_TAB
3779 		      "mov %D0,%D1");
3780 	    }
3781 	}
3782       else if (CONSTANT_P (src))
3783 	{
3784           return output_reload_insisf (operands, NULL_RTX, real_l);
3785         }
3786       else if (MEM_P (src))
3787 	return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3788     }
3789   else if (MEM_P (dest))
3790     {
3791       const char *templ;
3792 
3793       if (src == CONST0_RTX (GET_MODE (dest)))
3794 	  operands[1] = zero_reg_rtx;
3795 
3796       templ = out_movsi_mr_r (insn, operands, real_l);
3797 
3798       if (!real_l)
3799 	output_asm_insn (templ, operands);
3800 
3801       operands[1] = src;
3802       return "";
3803     }
3804   fatal_insn ("invalid insn:", insn);
3805   return "";
3806 }
3807 
3808 
3809 /* Handle loads of 24-bit types from memory to register.  */
3810 
3811 static const char*
3812 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3813 {
3814   rtx dest = op[0];
3815   rtx src = op[1];
3816   rtx base = XEXP (src, 0);
3817   int reg_dest = true_regnum (dest);
3818   int reg_base = true_regnum (base);
3819 
3820   if (reg_base > 0)
3821     {
3822       if (reg_base == REG_X)        /* (R26) */
3823         {
3824           if (reg_dest == REG_X)
3825             /* "ld r26,-X" is undefined */
3826             return avr_asm_len ("adiw r26,2"        CR_TAB
3827                                 "ld r28,X"          CR_TAB
3828                                 "ld __tmp_reg__,-X" CR_TAB
3829                                 "sbiw r26,1"        CR_TAB
3830                                 "ld r26,X"          CR_TAB
3831                                 "mov r27,__tmp_reg__", op, plen, -6);
3832           else
3833             {
3834               avr_asm_len ("ld %A0,X+" CR_TAB
3835                            "ld %B0,X+" CR_TAB
3836                            "ld %C0,X", op, plen, -3);
3837 
3838               if (reg_dest != REG_X - 2
3839                   && !reg_unused_after (insn, base))
3840                 {
3841                   avr_asm_len ("sbiw r26,2", op, plen, 1);
3842                 }
3843 
3844               return "";
3845             }
3846         }
3847       else /* reg_base != REG_X */
3848         {
3849           if (reg_dest == reg_base)
3850             return avr_asm_len ("ldd %C0,%1+2"          CR_TAB
3851                                 "ldd __tmp_reg__,%1+1"  CR_TAB
3852                                 "ld  %A0,%1"            CR_TAB
3853                                 "mov %B0,__tmp_reg__", op, plen, -4);
3854           else
3855             return avr_asm_len ("ld  %A0,%1"    CR_TAB
3856                                 "ldd %B0,%1+1"  CR_TAB
3857                                 "ldd %C0,%1+2", op, plen, -3);
3858         }
3859     }
3860   else if (GET_CODE (base) == PLUS) /* (R + i) */
3861     {
3862       int disp = INTVAL (XEXP (base, 1));
3863 
3864       if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3865         {
3866           if (REGNO (XEXP (base, 0)) != REG_Y)
3867             fatal_insn ("incorrect insn:",insn);
3868 
3869           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3870             return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3871                                 "ldd %A0,Y+61"    CR_TAB
3872                                 "ldd %B0,Y+62"    CR_TAB
3873                                 "ldd %C0,Y+63"    CR_TAB
3874                                 "sbiw r28,%o1-61", op, plen, -5);
3875 
3876           return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3877                               "sbci r29,hi8(-%o1)" CR_TAB
3878                               "ld  %A0,Y"           CR_TAB
3879                               "ldd %B0,Y+1"        CR_TAB
3880                               "ldd %C0,Y+2"        CR_TAB
3881                               "subi r28,lo8(%o1)"  CR_TAB
3882                               "sbci r29,hi8(%o1)", op, plen, -7);
3883         }
3884 
3885       reg_base = true_regnum (XEXP (base, 0));
3886       if (reg_base == REG_X)
3887         {
3888           /* R = (X + d) */
3889           if (reg_dest == REG_X)
3890             {
3891               /* "ld r26,-X" is undefined */
3892               return avr_asm_len ("adiw r26,%o1+2"     CR_TAB
3893                                   "ld  r28,X"          CR_TAB
3894                                   "ld  __tmp_reg__,-X" CR_TAB
3895                                   "sbiw r26,1"         CR_TAB
3896                                   "ld  r26,X"          CR_TAB
3897                                   "mov r27,__tmp_reg__", op, plen, -6);
3898             }
3899 
3900           avr_asm_len ("adiw r26,%o1" CR_TAB
3901                        "ld %A0,X+"    CR_TAB
3902                        "ld %B0,X+"    CR_TAB
3903                        "ld %C0,X", op, plen, -4);
3904 
3905           if (reg_dest != REG_W
3906               && !reg_unused_after (insn, XEXP (base, 0)))
3907             avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3908 
3909           return "";
3910         }
3911 
3912       if (reg_dest == reg_base)
3913         return avr_asm_len ("ldd %C0,%C1" CR_TAB
3914                             "ldd __tmp_reg__,%B1"  CR_TAB
3915                             "ldd %A0,%A1" CR_TAB
3916                             "mov %B0,__tmp_reg__", op, plen, -4);
3917 
3918         return avr_asm_len ("ldd %A0,%A1" CR_TAB
3919                             "ldd %B0,%B1" CR_TAB
3920                             "ldd %C0,%C1", op, plen, -3);
3921     }
3922   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3923     return avr_asm_len ("ld %C0,%1" CR_TAB
3924                         "ld %B0,%1" CR_TAB
3925                         "ld %A0,%1", op, plen, -3);
3926   else if (GET_CODE (base) == POST_INC) /* (R++) */
3927     return avr_asm_len ("ld %A0,%1" CR_TAB
3928                         "ld %B0,%1" CR_TAB
3929                         "ld %C0,%1", op, plen, -3);
3930 
3931   else if (CONSTANT_ADDRESS_P (base))
3932     return avr_asm_len ("lds %A0,%m1" CR_TAB
3933                         "lds %B0,%m1+1" CR_TAB
3934                         "lds %C0,%m1+2", op, plen , -6);
3935 
3936   fatal_insn ("unknown move insn:",insn);
3937   return "";
3938 }
3939 
3940 /* Handle store of 24-bit type from register or zero to memory.  */
3941 
3942 static const char*
3943 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3944 {
3945   rtx dest = op[0];
3946   rtx src = op[1];
3947   rtx base = XEXP (dest, 0);
3948   int reg_base = true_regnum (base);
3949 
3950   if (CONSTANT_ADDRESS_P (base))
3951     return avr_asm_len ("sts %m0,%A1"   CR_TAB
3952                         "sts %m0+1,%B1" CR_TAB
3953                         "sts %m0+2,%C1", op, plen, -6);
3954 
3955   if (reg_base > 0)                 /* (r) */
3956     {
3957       if (reg_base == REG_X)        /* (R26) */
3958         {
3959           gcc_assert (!reg_overlap_mentioned_p (base, src));
3960 
3961           avr_asm_len ("st %0+,%A1"  CR_TAB
3962                        "st %0+,%B1" CR_TAB
3963                        "st %0,%C1", op, plen, -3);
3964 
3965           if (!reg_unused_after (insn, base))
3966             avr_asm_len ("sbiw r26,2", op, plen, 1);
3967 
3968           return "";
3969         }
3970       else
3971         return avr_asm_len ("st %0,%A1"    CR_TAB
3972                             "std %0+1,%B1" CR_TAB
3973                             "std %0+2,%C1", op, plen, -3);
3974     }
3975   else if (GET_CODE (base) == PLUS) /* (R + i) */
3976     {
3977       int disp = INTVAL (XEXP (base, 1));
3978       reg_base = REGNO (XEXP (base, 0));
3979 
3980       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3981         {
3982           if (reg_base != REG_Y)
3983             fatal_insn ("incorrect insn:",insn);
3984 
3985           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3986             return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3987                                 "std Y+61,%A1"    CR_TAB
3988                                 "std Y+62,%B1"    CR_TAB
3989                                 "std Y+63,%C1"    CR_TAB
3990                                 "sbiw r28,%o0-61", op, plen, -5);
3991 
3992           return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3993                               "sbci r29,hi8(-%o0)" CR_TAB
3994                               "st Y,%A1"           CR_TAB
3995                               "std Y+1,%B1"        CR_TAB
3996                               "std Y+2,%C1"        CR_TAB
3997                               "subi r28,lo8(%o0)"  CR_TAB
3998                               "sbci r29,hi8(%o0)", op, plen, -7);
3999         }
4000       if (reg_base == REG_X)
4001         {
4002           /* (X + d) = R */
4003           gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
4004 
4005           avr_asm_len ("adiw r26,%o0" CR_TAB
4006                        "st X+,%A1"    CR_TAB
4007                        "st X+,%B1"    CR_TAB
4008                        "st X,%C1", op, plen, -4);
4009 
4010           if (!reg_unused_after (insn, XEXP (base, 0)))
4011             avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
4012 
4013           return "";
4014         }
4015 
4016       return avr_asm_len ("std %A0,%A1" CR_TAB
4017                           "std %B0,%B1" CR_TAB
4018                           "std %C0,%C1", op, plen, -3);
4019     }
4020   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4021     return avr_asm_len ("st %0,%C1" CR_TAB
4022                         "st %0,%B1" CR_TAB
4023                         "st %0,%A1", op, plen, -3);
4024   else if (GET_CODE (base) == POST_INC) /* (R++) */
4025     return avr_asm_len ("st %0,%A1" CR_TAB
4026                         "st %0,%B1" CR_TAB
4027                         "st %0,%C1", op, plen, -3);
4028 
4029   fatal_insn ("unknown move insn:",insn);
4030   return "";
4031 }
4032 
4033 
4034 /* Move around 24-bit stuff.  */
4035 
4036 const char *
4037 avr_out_movpsi (rtx insn, rtx *op, int *plen)
4038 {
4039   rtx dest = op[0];
4040   rtx src = op[1];
4041 
4042   if (avr_mem_flash_p (src)
4043       || avr_mem_flash_p (dest))
4044     {
4045       return avr_out_lpm (insn, op, plen);
4046     }
4047 
4048   if (register_operand (dest, VOIDmode))
4049     {
4050       if (register_operand (src, VOIDmode)) /* mov r,r */
4051         {
4052           if (true_regnum (dest) > true_regnum (src))
4053             {
4054               avr_asm_len ("mov %C0,%C1", op, plen, -1);
4055 
4056               if (AVR_HAVE_MOVW)
4057                 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4058               else
4059                 return avr_asm_len ("mov %B0,%B1"  CR_TAB
4060                                     "mov %A0,%A1", op, plen, 2);
4061             }
4062           else
4063             {
4064               if (AVR_HAVE_MOVW)
4065                 avr_asm_len ("movw %A0,%A1", op, plen, -1);
4066               else
4067                 avr_asm_len ("mov %A0,%A1"  CR_TAB
4068                              "mov %B0,%B1", op, plen, -2);
4069 
4070               return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4071             }
4072         }
4073       else if (CONSTANT_P (src))
4074         {
4075           return avr_out_reload_inpsi (op, NULL_RTX, plen);
4076         }
4077       else if (MEM_P (src))
4078         return avr_out_load_psi (insn, op, plen); /* mov r,m */
4079     }
4080   else if (MEM_P (dest))
4081     {
4082       rtx xop[2];
4083 
4084       xop[0] = dest;
4085       xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4086 
4087       return avr_out_store_psi (insn, xop, plen);
4088     }
4089 
4090   fatal_insn ("invalid insn:", insn);
4091   return "";
4092 }
4093 
4094 
4095 static const char*
4096 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
4097 {
4098   rtx dest = op[0];
4099   rtx src = op[1];
4100   rtx x = XEXP (dest, 0);
4101 
4102   if (CONSTANT_ADDRESS_P (x))
4103     {
4104       return optimize > 0 && io_address_operand (x, QImode)
4105         ? avr_asm_len ("out %i0,%1", op, plen, -1)
4106         : avr_asm_len ("sts %m0,%1", op, plen, -2);
4107     }
4108   else if (GET_CODE (x) == PLUS
4109            && REG_P (XEXP (x, 0))
4110            && CONST_INT_P (XEXP (x, 1)))
4111     {
4112       /* memory access by reg+disp */
4113 
4114       int disp = INTVAL (XEXP (x, 1));
4115 
4116       if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4117         {
4118           if (REGNO (XEXP (x, 0)) != REG_Y)
4119             fatal_insn ("incorrect insn:",insn);
4120 
4121           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4122             return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4123                                 "std Y+63,%1"     CR_TAB
4124                                 "sbiw r28,%o0-63", op, plen, -3);
4125 
4126           return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4127                               "sbci r29,hi8(-%o0)" CR_TAB
4128                               "st Y,%1"            CR_TAB
4129                               "subi r28,lo8(%o0)"  CR_TAB
4130                               "sbci r29,hi8(%o0)", op, plen, -5);
4131         }
4132       else if (REGNO (XEXP (x,0)) == REG_X)
4133         {
4134           if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4135             {
4136               avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4137                            "adiw r26,%o0"       CR_TAB
4138                            "st X,__tmp_reg__", op, plen, -3);
4139             }
4140           else
4141             {
4142               avr_asm_len ("adiw r26,%o0" CR_TAB
4143                            "st X,%1", op, plen, -2);
4144             }
4145 
4146           if (!reg_unused_after (insn, XEXP (x,0)))
4147             avr_asm_len ("sbiw r26,%o0", op, plen, 1);
4148 
4149           return "";
4150         }
4151 
4152       return avr_asm_len ("std %0,%1", op, plen, -1);
4153     }
4154 
4155   return avr_asm_len ("st %0,%1", op, plen, -1);
4156 }
4157 
4158 
4159 /* Helper for the next function for XMEGA.  It does the same
4160    but with low byte first.  */
4161 
4162 static const char*
4163 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
4164 {
4165   rtx dest = op[0];
4166   rtx src = op[1];
4167   rtx base = XEXP (dest, 0);
4168   int reg_base = true_regnum (base);
4169   int reg_src = true_regnum (src);
4170 
4171   /* "volatile" forces writing low byte first, even if less efficient,
4172      for correct operation with 16-bit I/O registers like SP.  */
4173   int mem_volatile_p = MEM_VOLATILE_P (dest);
4174 
4175   if (CONSTANT_ADDRESS_P (base))
4176     return optimize > 0 && io_address_operand (base, HImode)
4177       ? avr_asm_len ("out %i0,%A1" CR_TAB
4178                      "out %i0+1,%B1", op, plen, -2)
4179 
4180       : avr_asm_len ("sts %m0,%A1" CR_TAB
4181                      "sts %m0+1,%B1", op, plen, -4);
4182 
4183   if (reg_base > 0)
4184     {
4185       if (reg_base != REG_X)
4186         return avr_asm_len ("st %0,%A1" CR_TAB
4187                             "std %0+1,%B1", op, plen, -2);
4188 
4189       if (reg_src == REG_X)
4190         /* "st X+,r26" and "st -X,r26" are undefined.  */
4191         avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4192                      "st X,r26"            CR_TAB
4193                      "adiw r26,1"          CR_TAB
4194                      "st X,__tmp_reg__", op, plen, -4);
4195       else
4196         avr_asm_len ("st X+,%A1" CR_TAB
4197                      "st X,%B1", op, plen, -2);
4198 
4199       return reg_unused_after (insn, base)
4200         ? ""
4201         : avr_asm_len ("sbiw r26,1", op, plen, 1);
4202     }
4203   else if (GET_CODE (base) == PLUS)
4204     {
4205       int disp = INTVAL (XEXP (base, 1));
4206       reg_base = REGNO (XEXP (base, 0));
4207       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4208         {
4209           if (reg_base != REG_Y)
4210             fatal_insn ("incorrect insn:",insn);
4211 
4212           return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4213             ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4214                            "std Y+62,%A1"    CR_TAB
4215                            "std Y+63,%B1"    CR_TAB
4216                            "sbiw r28,%o0-62", op, plen, -4)
4217 
4218             : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4219                            "sbci r29,hi8(-%o0)" CR_TAB
4220                            "st Y,%A1"           CR_TAB
4221                            "std Y+1,%B1"        CR_TAB
4222                            "subi r28,lo8(%o0)"  CR_TAB
4223                            "sbci r29,hi8(%o0)", op, plen, -6);
4224         }
4225 
4226       if (reg_base != REG_X)
4227         return avr_asm_len ("std %A0,%A1" CR_TAB
4228                             "std %B0,%B1", op, plen, -2);
4229       /* (X + d) = R */
4230       return reg_src == REG_X
4231         ? avr_asm_len ("mov __tmp_reg__,r26"  CR_TAB
4232                        "mov __zero_reg__,r27" CR_TAB
4233                        "adiw r26,%o0"         CR_TAB
4234                        "st X+,__tmp_reg__"    CR_TAB
4235                        "st X,__zero_reg__"    CR_TAB
4236                        "clr __zero_reg__"     CR_TAB
4237                        "sbiw r26,%o0+1", op, plen, -7)
4238 
4239         : avr_asm_len ("adiw r26,%o0" CR_TAB
4240                        "st X+,%A1"    CR_TAB
4241                        "st X,%B1"     CR_TAB
4242                        "sbiw r26,%o0+1", op, plen, -4);
4243     }
4244   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4245     {
4246       if (!mem_volatile_p)
4247         return avr_asm_len ("st %0,%B1" CR_TAB
4248                             "st %0,%A1", op, plen, -2);
4249 
4250       return REGNO (XEXP (base, 0)) == REG_X
4251         ? avr_asm_len ("sbiw r26,2"  CR_TAB
4252                        "st X+,%A1"   CR_TAB
4253                        "st X,%B1"    CR_TAB
4254                        "sbiw r26,1", op, plen, -4)
4255 
4256         : avr_asm_len ("sbiw %r0,2"  CR_TAB
4257                        "st %p0,%A1"  CR_TAB
4258                        "std %p0+1,%B1", op, plen, -3);
4259     }
4260   else if (GET_CODE (base) == POST_INC) /* (R++) */
4261     {
4262       return avr_asm_len ("st %0,%A1"  CR_TAB
4263                           "st %0,%B1", op, plen, -2);
4264 
4265     }
4266   fatal_insn ("unknown move insn:",insn);
4267   return "";
4268 }
4269 
4270 
4271 static const char*
4272 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
4273 {
4274   rtx dest = op[0];
4275   rtx src = op[1];
4276   rtx base = XEXP (dest, 0);
4277   int reg_base = true_regnum (base);
4278   int reg_src = true_regnum (src);
4279   int mem_volatile_p;
4280 
4281   /* "volatile" forces writing high-byte first (no-xmega) resp.
4282      low-byte first (xmega) even if less efficient, for correct
4283      operation with 16-bit I/O registers like.  */
4284 
4285   if (AVR_XMEGA)
4286     return avr_out_movhi_mr_r_xmega (insn, op, plen);
4287 
4288   mem_volatile_p = MEM_VOLATILE_P (dest);
4289 
4290   if (CONSTANT_ADDRESS_P (base))
4291     return optimize > 0 && io_address_operand (base, HImode)
4292       ? avr_asm_len ("out %i0+1,%B1" CR_TAB
4293                      "out %i0,%A1", op, plen, -2)
4294 
4295       : avr_asm_len ("sts %m0+1,%B1" CR_TAB
4296                      "sts %m0,%A1", op, plen, -4);
4297 
4298   if (reg_base > 0)
4299     {
4300       if (reg_base != REG_X)
4301         return avr_asm_len ("std %0+1,%B1" CR_TAB
4302                             "st %0,%A1", op, plen, -2);
4303 
4304       if (reg_src == REG_X)
4305         /* "st X+,r26" and "st -X,r26" are undefined.  */
4306         return !mem_volatile_p && reg_unused_after (insn, src)
4307           ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4308                          "st X,r26"            CR_TAB
4309                          "adiw r26,1"          CR_TAB
4310                          "st X,__tmp_reg__", op, plen, -4)
4311 
4312           : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4313                          "adiw r26,1"          CR_TAB
4314                          "st X,__tmp_reg__"    CR_TAB
4315                          "sbiw r26,1"          CR_TAB
4316                          "st X,r26", op, plen, -5);
4317 
4318       return !mem_volatile_p && reg_unused_after (insn, base)
4319         ? avr_asm_len ("st X+,%A1" CR_TAB
4320                        "st X,%B1", op, plen, -2)
4321         : avr_asm_len ("adiw r26,1" CR_TAB
4322                        "st X,%B1"   CR_TAB
4323                        "st -X,%A1", op, plen, -3);
4324     }
4325   else if (GET_CODE (base) == PLUS)
4326     {
4327       int disp = INTVAL (XEXP (base, 1));
4328       reg_base = REGNO (XEXP (base, 0));
4329       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4330         {
4331           if (reg_base != REG_Y)
4332             fatal_insn ("incorrect insn:",insn);
4333 
4334           return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4335             ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4336                            "std Y+63,%B1"    CR_TAB
4337                            "std Y+62,%A1"    CR_TAB
4338                            "sbiw r28,%o0-62", op, plen, -4)
4339 
4340             : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4341                            "sbci r29,hi8(-%o0)" CR_TAB
4342                            "std Y+1,%B1"        CR_TAB
4343                            "st Y,%A1"           CR_TAB
4344                            "subi r28,lo8(%o0)"  CR_TAB
4345                            "sbci r29,hi8(%o0)", op, plen, -6);
4346         }
4347 
4348       if (reg_base != REG_X)
4349         return avr_asm_len ("std %B0,%B1" CR_TAB
4350                             "std %A0,%A1", op, plen, -2);
4351       /* (X + d) = R */
4352       return reg_src == REG_X
4353         ? avr_asm_len ("mov __tmp_reg__,r26"  CR_TAB
4354                        "mov __zero_reg__,r27" CR_TAB
4355                        "adiw r26,%o0+1"       CR_TAB
4356                        "st X,__zero_reg__"    CR_TAB
4357                        "st -X,__tmp_reg__"    CR_TAB
4358                        "clr __zero_reg__"     CR_TAB
4359                        "sbiw r26,%o0", op, plen, -7)
4360 
4361         : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4362                        "st X,%B1"       CR_TAB
4363                        "st -X,%A1"      CR_TAB
4364                        "sbiw r26,%o0", op, plen, -4);
4365     }
4366   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4367     {
4368       return avr_asm_len ("st %0,%B1" CR_TAB
4369                           "st %0,%A1", op, plen, -2);
4370     }
4371   else if (GET_CODE (base) == POST_INC) /* (R++) */
4372     {
4373       if (!mem_volatile_p)
4374         return avr_asm_len ("st %0,%A1"  CR_TAB
4375                             "st %0,%B1", op, plen, -2);
4376 
4377       return REGNO (XEXP (base, 0)) == REG_X
4378         ? avr_asm_len ("adiw r26,1"  CR_TAB
4379                        "st X,%B1"    CR_TAB
4380                        "st -X,%A1"   CR_TAB
4381                        "adiw r26,2", op, plen, -4)
4382 
4383         : avr_asm_len ("std %p0+1,%B1" CR_TAB
4384                        "st %p0,%A1"    CR_TAB
4385                        "adiw %r0,2", op, plen, -3);
4386     }
4387   fatal_insn ("unknown move insn:",insn);
4388   return "";
4389 }
4390 
4391 /* Return 1 if frame pointer for current function required.  */
4392 
4393 static bool
4394 avr_frame_pointer_required_p (void)
4395 {
4396   return (cfun->calls_alloca
4397           || cfun->calls_setjmp
4398           || cfun->has_nonlocal_label
4399           || crtl->args.info.nregs == 0
4400           || get_frame_size () > 0);
4401 }
4402 
4403 /* Returns the condition of compare insn INSN, or UNKNOWN.  */
4404 
4405 static RTX_CODE
4406 compare_condition (rtx insn)
4407 {
4408   rtx next = next_real_insn (insn);
4409 
4410   if (next && JUMP_P (next))
4411     {
4412       rtx pat = PATTERN (next);
4413       rtx src = SET_SRC (pat);
4414 
4415       if (IF_THEN_ELSE == GET_CODE (src))
4416         return GET_CODE (XEXP (src, 0));
4417     }
4418 
4419   return UNKNOWN;
4420 }
4421 
4422 
4423 /* Returns true iff INSN is a tst insn that only tests the sign.  */
4424 
4425 static bool
4426 compare_sign_p (rtx insn)
4427 {
4428   RTX_CODE cond = compare_condition (insn);
4429   return (cond == GE || cond == LT);
4430 }
4431 
4432 
4433 /* Returns true iff the next insn is a JUMP_INSN with a condition
4434    that needs to be swapped (GT, GTU, LE, LEU).  */
4435 
4436 static bool
4437 compare_diff_p (rtx insn)
4438 {
4439   RTX_CODE cond = compare_condition (insn);
4440   return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4441 }
4442 
4443 /* Returns true iff INSN is a compare insn with the EQ or NE condition.  */
4444 
4445 static bool
4446 compare_eq_p (rtx insn)
4447 {
4448   RTX_CODE cond = compare_condition (insn);
4449   return (cond == EQ || cond == NE);
4450 }
4451 
4452 
4453 /* Output compare instruction
4454 
4455       compare (XOP[0], XOP[1])
4456 
4457    for a register XOP[0] and a compile-time constant XOP[1].  Return "".
4458    XOP[2] is an 8-bit scratch register as needed.
4459 
4460    PLEN == NULL:  Output instructions.
4461    PLEN != NULL:  Set *PLEN to the length (in words) of the sequence.
4462                   Don't output anything.  */
4463 
4464 const char*
4465 avr_out_compare (rtx insn, rtx *xop, int *plen)
4466 {
4467   /* Register to compare and value to compare against. */
4468   rtx xreg = xop[0];
4469   rtx xval = xop[1];
4470 
4471   /* MODE of the comparison.  */
4472   enum machine_mode mode;
4473 
4474   /* Number of bytes to operate on.  */
4475   int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
4476 
4477   /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown.  */
4478   int clobber_val = -1;
4479 
4480   /* Map fixed mode operands to integer operands with the same binary
4481      representation.  They are easier to handle in the remainder.  */
4482 
4483   if (CONST_FIXED_P (xval))
4484     {
4485       xreg = avr_to_int_mode (xop[0]);
4486       xval = avr_to_int_mode (xop[1]);
4487     }
4488 
4489   mode = GET_MODE (xreg);
4490 
4491   gcc_assert (REG_P (xreg));
4492   gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4493               || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4494 
4495   if (plen)
4496     *plen = 0;
4497 
4498   /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4499      against 0 by ORing the bytes.  This is one instruction shorter.
4500      Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
4501      and therefore don't use this.  */
4502 
4503   if (!test_hard_reg_class (LD_REGS, xreg)
4504       && compare_eq_p (insn)
4505       && reg_unused_after (insn, xreg))
4506     {
4507       if (xval == const1_rtx)
4508         {
4509           avr_asm_len ("dec %A0" CR_TAB
4510                        "or %A0,%B0", xop, plen, 2);
4511 
4512           if (n_bytes >= 3)
4513             avr_asm_len ("or %A0,%C0", xop, plen, 1);
4514 
4515           if (n_bytes >= 4)
4516             avr_asm_len ("or %A0,%D0", xop, plen, 1);
4517 
4518           return "";
4519         }
4520       else if (xval == constm1_rtx)
4521         {
4522           if (n_bytes >= 4)
4523             avr_asm_len ("and %A0,%D0", xop, plen, 1);
4524 
4525           if (n_bytes >= 3)
4526             avr_asm_len ("and %A0,%C0", xop, plen, 1);
4527 
4528           return avr_asm_len ("and %A0,%B0" CR_TAB
4529                               "com %A0", xop, plen, 2);
4530         }
4531     }
4532 
4533   for (i = 0; i < n_bytes; i++)
4534     {
4535       /* We compare byte-wise.  */
4536       rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4537       rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4538 
4539       /* 8-bit value to compare with this byte.  */
4540       unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4541 
4542       /* Registers R16..R31 can operate with immediate.  */
4543       bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4544 
4545       xop[0] = reg8;
4546       xop[1] = gen_int_mode (val8, QImode);
4547 
4548       /* Word registers >= R24 can use SBIW/ADIW with 0..63.  */
4549 
4550       if (i == 0
4551           && test_hard_reg_class (ADDW_REGS, reg8))
4552         {
4553           int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4554 
4555           if (IN_RANGE (val16, 0, 63)
4556               && (val8 == 0
4557                   || reg_unused_after (insn, xreg)))
4558             {
4559               avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4560               i++;
4561               continue;
4562             }
4563 
4564           if (n_bytes == 2
4565               && IN_RANGE (val16, -63, -1)
4566               && compare_eq_p (insn)
4567               && reg_unused_after (insn, xreg))
4568             {
4569               return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4570             }
4571         }
4572 
4573       /* Comparing against 0 is easy.  */
4574 
4575       if (val8 == 0)
4576         {
4577           avr_asm_len (i == 0
4578                        ? "cp %0,__zero_reg__"
4579                        : "cpc %0,__zero_reg__", xop, plen, 1);
4580           continue;
4581         }
4582 
4583       /* Upper registers can compare and subtract-with-carry immediates.
4584          Notice that compare instructions do the same as respective subtract
4585          instruction; the only difference is that comparisons don't write
4586          the result back to the target register.  */
4587 
4588       if (ld_reg_p)
4589         {
4590           if (i == 0)
4591             {
4592               avr_asm_len ("cpi %0,%1", xop, plen, 1);
4593               continue;
4594             }
4595           else if (reg_unused_after (insn, xreg))
4596             {
4597               avr_asm_len ("sbci %0,%1", xop, plen, 1);
4598               continue;
4599             }
4600         }
4601 
4602       /* Must load the value into the scratch register.  */
4603 
4604       gcc_assert (REG_P (xop[2]));
4605 
4606       if (clobber_val != (int) val8)
4607         avr_asm_len ("ldi %2,%1", xop, plen, 1);
4608       clobber_val = (int) val8;
4609 
4610       avr_asm_len (i == 0
4611                    ? "cp %0,%2"
4612                    : "cpc %0,%2", xop, plen, 1);
4613     }
4614 
4615   return "";
4616 }
4617 
4618 
4619 /* Prepare operands of compare_const_di2 to be used with avr_out_compare.  */
4620 
4621 const char*
4622 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4623 {
4624   rtx xop[3];
4625 
4626   xop[0] = gen_rtx_REG (DImode, 18);
4627   xop[1] = op[0];
4628   xop[2] = op[1];
4629 
4630   return avr_out_compare (insn, xop, plen);
4631 }
4632 
4633 /* Output test instruction for HImode.  */
4634 
4635 const char*
4636 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4637 {
4638   if (compare_sign_p (insn))
4639     {
4640       avr_asm_len ("tst %B0", op, plen, -1);
4641     }
4642   else if (reg_unused_after (insn, op[0])
4643            && compare_eq_p (insn))
4644     {
4645       /* Faster than sbiw if we can clobber the operand.  */
4646       avr_asm_len ("or %A0,%B0", op, plen, -1);
4647     }
4648   else
4649     {
4650       avr_out_compare (insn, op, plen);
4651     }
4652 
4653   return "";
4654 }
4655 
4656 
4657 /* Output test instruction for PSImode.  */
4658 
4659 const char*
4660 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4661 {
4662   if (compare_sign_p (insn))
4663     {
4664       avr_asm_len ("tst %C0", op, plen, -1);
4665     }
4666   else if (reg_unused_after (insn, op[0])
4667            && compare_eq_p (insn))
4668     {
4669       /* Faster than sbiw if we can clobber the operand.  */
4670       avr_asm_len ("or %A0,%B0" CR_TAB
4671                    "or %A0,%C0", op, plen, -2);
4672     }
4673   else
4674     {
4675       avr_out_compare (insn, op, plen);
4676     }
4677 
4678   return "";
4679 }
4680 
4681 
4682 /* Output test instruction for SImode.  */
4683 
4684 const char*
4685 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4686 {
4687   if (compare_sign_p (insn))
4688     {
4689       avr_asm_len ("tst %D0", op, plen, -1);
4690     }
4691   else if (reg_unused_after (insn, op[0])
4692            && compare_eq_p (insn))
4693     {
4694       /* Faster than sbiw if we can clobber the operand.  */
4695       avr_asm_len ("or %A0,%B0" CR_TAB
4696                    "or %A0,%C0" CR_TAB
4697                    "or %A0,%D0", op, plen, -3);
4698     }
4699   else
4700     {
4701       avr_out_compare (insn, op, plen);
4702     }
4703 
4704   return "";
4705 }
4706 
4707 
4708 /* Generate asm equivalent for various shifts.  This only handles cases
4709    that are not already carefully hand-optimized in ?sh??i3_out.
4710 
4711    OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4712    OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4713    OPERANDS[3] is a QImode scratch register from LD regs if
4714                available and SCRATCH, otherwise (no scratch available)
4715 
4716    TEMPL is an assembler template that shifts by one position.
4717    T_LEN is the length of this template.  */
4718 
4719 void
4720 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4721 		    int *plen, int t_len)
4722 {
4723   bool second_label = true;
4724   bool saved_in_tmp = false;
4725   bool use_zero_reg = false;
4726   rtx op[5];
4727 
4728   op[0] = operands[0];
4729   op[1] = operands[1];
4730   op[2] = operands[2];
4731   op[3] = operands[3];
4732 
4733   if (plen)
4734     *plen = 0;
4735 
4736   if (CONST_INT_P (operands[2]))
4737     {
4738       bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4739                       && REG_P (operands[3]));
4740       int count = INTVAL (operands[2]);
4741       int max_len = 10;  /* If larger than this, always use a loop.  */
4742 
4743       if (count <= 0)
4744           return;
4745 
4746       if (count < 8 && !scratch)
4747         use_zero_reg = true;
4748 
4749       if (optimize_size)
4750         max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4751 
4752       if (t_len * count <= max_len)
4753         {
4754           /* Output shifts inline with no loop - faster.  */
4755 
4756           while (count-- > 0)
4757             avr_asm_len (templ, op, plen, t_len);
4758 
4759           return;
4760         }
4761 
4762       if (scratch)
4763         {
4764           avr_asm_len ("ldi %3,%2", op, plen, 1);
4765         }
4766       else if (use_zero_reg)
4767         {
4768           /* Hack to save one word: use __zero_reg__ as loop counter.
4769              Set one bit, then shift in a loop until it is 0 again.  */
4770 
4771           op[3] = zero_reg_rtx;
4772 
4773           avr_asm_len ("set" CR_TAB
4774                        "bld %3,%2-1", op, plen, 2);
4775         }
4776       else
4777         {
4778           /* No scratch register available, use one from LD_REGS (saved in
4779              __tmp_reg__) that doesn't overlap with registers to shift.  */
4780 
4781           op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4782           op[4] = tmp_reg_rtx;
4783           saved_in_tmp = true;
4784 
4785           avr_asm_len ("mov %4,%3" CR_TAB
4786                        "ldi %3,%2", op, plen, 2);
4787         }
4788 
4789       second_label = false;
4790     }
4791   else if (MEM_P (op[2]))
4792     {
4793       rtx op_mov[2];
4794 
4795       op_mov[0] = op[3] = tmp_reg_rtx;
4796       op_mov[1] = op[2];
4797 
4798       out_movqi_r_mr (insn, op_mov, plen);
4799     }
4800   else if (register_operand (op[2], QImode))
4801     {
4802       op[3] = op[2];
4803 
4804       if (!reg_unused_after (insn, op[2])
4805           || reg_overlap_mentioned_p (op[0], op[2]))
4806         {
4807           op[3] = tmp_reg_rtx;
4808           avr_asm_len ("mov %3,%2", op, plen, 1);
4809         }
4810     }
4811   else
4812     fatal_insn ("bad shift insn:", insn);
4813 
4814   if (second_label)
4815       avr_asm_len ("rjmp 2f", op, plen, 1);
4816 
4817   avr_asm_len ("1:", op, plen, 0);
4818   avr_asm_len (templ, op, plen, t_len);
4819 
4820   if (second_label)
4821     avr_asm_len ("2:", op, plen, 0);
4822 
4823   avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4824   avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4825 
4826   if (saved_in_tmp)
4827     avr_asm_len ("mov %3,%4", op, plen, 1);
4828 }
4829 
4830 
4831 /* 8bit shift left ((char)x << i)   */
4832 
4833 const char *
4834 ashlqi3_out (rtx insn, rtx operands[], int *len)
4835 {
4836   if (GET_CODE (operands[2]) == CONST_INT)
4837     {
4838       int k;
4839 
4840       if (!len)
4841 	len = &k;
4842 
4843       switch (INTVAL (operands[2]))
4844 	{
4845 	default:
4846 	  if (INTVAL (operands[2]) < 8)
4847 	    break;
4848 
4849 	  *len = 1;
4850 	  return "clr %0";
4851 
4852 	case 1:
4853 	  *len = 1;
4854 	  return "lsl %0";
4855 
4856 	case 2:
4857 	  *len = 2;
4858 	  return ("lsl %0" CR_TAB
4859 		  "lsl %0");
4860 
4861 	case 3:
4862 	  *len = 3;
4863 	  return ("lsl %0" CR_TAB
4864 		  "lsl %0" CR_TAB
4865 		  "lsl %0");
4866 
4867 	case 4:
4868 	  if (test_hard_reg_class (LD_REGS, operands[0]))
4869 	    {
4870 	      *len = 2;
4871 	      return ("swap %0" CR_TAB
4872 		      "andi %0,0xf0");
4873 	    }
4874 	  *len = 4;
4875 	  return ("lsl %0" CR_TAB
4876 		  "lsl %0" CR_TAB
4877 		  "lsl %0" CR_TAB
4878 		  "lsl %0");
4879 
4880 	case 5:
4881 	  if (test_hard_reg_class (LD_REGS, operands[0]))
4882 	    {
4883 	      *len = 3;
4884 	      return ("swap %0" CR_TAB
4885 		      "lsl %0"  CR_TAB
4886 		      "andi %0,0xe0");
4887 	    }
4888 	  *len = 5;
4889 	  return ("lsl %0" CR_TAB
4890 		  "lsl %0" CR_TAB
4891 		  "lsl %0" CR_TAB
4892 		  "lsl %0" CR_TAB
4893 		  "lsl %0");
4894 
4895 	case 6:
4896 	  if (test_hard_reg_class (LD_REGS, operands[0]))
4897 	    {
4898 	      *len = 4;
4899 	      return ("swap %0" CR_TAB
4900 		      "lsl %0"  CR_TAB
4901 		      "lsl %0"  CR_TAB
4902 		      "andi %0,0xc0");
4903 	    }
4904 	  *len = 6;
4905 	  return ("lsl %0" CR_TAB
4906 		  "lsl %0" CR_TAB
4907 		  "lsl %0" CR_TAB
4908 		  "lsl %0" CR_TAB
4909 		  "lsl %0" CR_TAB
4910 		  "lsl %0");
4911 
4912 	case 7:
4913 	  *len = 3;
4914 	  return ("ror %0" CR_TAB
4915 		  "clr %0" CR_TAB
4916 		  "ror %0");
4917 	}
4918     }
4919   else if (CONSTANT_P (operands[2]))
4920     fatal_insn ("internal compiler error.  Incorrect shift:", insn);
4921 
4922   out_shift_with_cnt ("lsl %0",
4923                       insn, operands, len, 1);
4924   return "";
4925 }
4926 
4927 
4928 /* 16bit shift left ((short)x << i)   */
4929 
4930 const char *
4931 ashlhi3_out (rtx insn, rtx operands[], int *len)
4932 {
4933   if (GET_CODE (operands[2]) == CONST_INT)
4934     {
4935       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4936       int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4937       int k;
4938       int *t = len;
4939 
4940       if (!len)
4941 	len = &k;
4942 
4943       switch (INTVAL (operands[2]))
4944 	{
4945 	default:
4946 	  if (INTVAL (operands[2]) < 16)
4947 	    break;
4948 
4949 	  *len = 2;
4950 	  return ("clr %B0" CR_TAB
4951 		  "clr %A0");
4952 
4953 	case 4:
4954 	  if (optimize_size && scratch)
4955 	    break;  /* 5 */
4956 	  if (ldi_ok)
4957 	    {
4958 	      *len = 6;
4959 	      return ("swap %A0"      CR_TAB
4960 		      "swap %B0"      CR_TAB
4961 		      "andi %B0,0xf0" CR_TAB
4962 		      "eor %B0,%A0"   CR_TAB
4963 		      "andi %A0,0xf0" CR_TAB
4964 		      "eor %B0,%A0");
4965 	    }
4966 	  if (scratch)
4967 	    {
4968 	      *len = 7;
4969 	      return ("swap %A0"    CR_TAB
4970 		      "swap %B0"    CR_TAB
4971 		      "ldi %3,0xf0" CR_TAB
4972 		      "and %B0,%3"      CR_TAB
4973 		      "eor %B0,%A0" CR_TAB
4974 		      "and %A0,%3"      CR_TAB
4975 		      "eor %B0,%A0");
4976 	    }
4977 	  break;  /* optimize_size ? 6 : 8 */
4978 
4979 	case 5:
4980 	  if (optimize_size)
4981 	    break;  /* scratch ? 5 : 6 */
4982 	  if (ldi_ok)
4983 	    {
4984 	      *len = 8;
4985 	      return ("lsl %A0"       CR_TAB
4986 		      "rol %B0"       CR_TAB
4987 		      "swap %A0"      CR_TAB
4988 		      "swap %B0"      CR_TAB
4989 		      "andi %B0,0xf0" CR_TAB
4990 		      "eor %B0,%A0"   CR_TAB
4991 		      "andi %A0,0xf0" CR_TAB
4992 		      "eor %B0,%A0");
4993 	    }
4994 	  if (scratch)
4995 	    {
4996 	      *len = 9;
4997 	      return ("lsl %A0"     CR_TAB
4998 		      "rol %B0"     CR_TAB
4999 		      "swap %A0"    CR_TAB
5000 		      "swap %B0"    CR_TAB
5001 		      "ldi %3,0xf0" CR_TAB
5002 		      "and %B0,%3"      CR_TAB
5003 		      "eor %B0,%A0" CR_TAB
5004 		      "and %A0,%3"      CR_TAB
5005 		      "eor %B0,%A0");
5006 	    }
5007 	  break;  /* 10 */
5008 
5009 	case 6:
5010 	  if (optimize_size)
5011 	    break;  /* scratch ? 5 : 6 */
5012 	  *len = 9;
5013 	  return ("clr __tmp_reg__" CR_TAB
5014 		  "lsr %B0"         CR_TAB
5015 		  "ror %A0"         CR_TAB
5016 		  "ror __tmp_reg__" CR_TAB
5017 		  "lsr %B0"         CR_TAB
5018 		  "ror %A0"         CR_TAB
5019 		  "ror __tmp_reg__" CR_TAB
5020 		  "mov %B0,%A0"     CR_TAB
5021 		  "mov %A0,__tmp_reg__");
5022 
5023 	case 7:
5024 	  *len = 5;
5025 	  return ("lsr %B0"     CR_TAB
5026 		  "mov %B0,%A0" CR_TAB
5027 		  "clr %A0"     CR_TAB
5028 		  "ror %B0"     CR_TAB
5029 		  "ror %A0");
5030 
5031 	case 8:
5032 	  return *len = 2, ("mov %B0,%A1" CR_TAB
5033 			    "clr %A0");
5034 
5035 	case 9:
5036 	  *len = 3;
5037 	  return ("mov %B0,%A0" CR_TAB
5038 		  "clr %A0"     CR_TAB
5039 		  "lsl %B0");
5040 
5041 	case 10:
5042 	  *len = 4;
5043 	  return ("mov %B0,%A0" CR_TAB
5044 		  "clr %A0"     CR_TAB
5045 		  "lsl %B0"     CR_TAB
5046 		  "lsl %B0");
5047 
5048 	case 11:
5049 	  *len = 5;
5050 	  return ("mov %B0,%A0" CR_TAB
5051 		  "clr %A0"     CR_TAB
5052 		  "lsl %B0"     CR_TAB
5053 		  "lsl %B0"     CR_TAB
5054 		  "lsl %B0");
5055 
5056 	case 12:
5057 	  if (ldi_ok)
5058 	    {
5059 	      *len = 4;
5060 	      return ("mov %B0,%A0" CR_TAB
5061 		      "clr %A0"     CR_TAB
5062 		      "swap %B0"    CR_TAB
5063 		      "andi %B0,0xf0");
5064 	    }
5065 	  if (scratch)
5066 	    {
5067 	      *len = 5;
5068 	      return ("mov %B0,%A0" CR_TAB
5069 		      "clr %A0"     CR_TAB
5070 		      "swap %B0"    CR_TAB
5071 		      "ldi %3,0xf0" CR_TAB
5072 		      "and %B0,%3");
5073 	    }
5074 	  *len = 6;
5075 	  return ("mov %B0,%A0" CR_TAB
5076 		  "clr %A0"     CR_TAB
5077 		  "lsl %B0"     CR_TAB
5078 		  "lsl %B0"     CR_TAB
5079 		  "lsl %B0"     CR_TAB
5080 		  "lsl %B0");
5081 
5082 	case 13:
5083 	  if (ldi_ok)
5084 	    {
5085 	      *len = 5;
5086 	      return ("mov %B0,%A0" CR_TAB
5087 		      "clr %A0"     CR_TAB
5088 		      "swap %B0"    CR_TAB
5089 		      "lsl %B0"     CR_TAB
5090 		      "andi %B0,0xe0");
5091 	    }
5092 	  if (AVR_HAVE_MUL && scratch)
5093 	    {
5094 	      *len = 5;
5095 	      return ("ldi %3,0x20" CR_TAB
5096 		      "mul %A0,%3"  CR_TAB
5097 		      "mov %B0,r0"  CR_TAB
5098 		      "clr %A0"     CR_TAB
5099 		      "clr __zero_reg__");
5100 	    }
5101 	  if (optimize_size && scratch)
5102 	    break;  /* 5 */
5103 	  if (scratch)
5104 	    {
5105 	      *len = 6;
5106 	      return ("mov %B0,%A0" CR_TAB
5107 		      "clr %A0"     CR_TAB
5108 		      "swap %B0"    CR_TAB
5109 		      "lsl %B0"     CR_TAB
5110 		      "ldi %3,0xe0" CR_TAB
5111 		      "and %B0,%3");
5112 	    }
5113 	  if (AVR_HAVE_MUL)
5114 	    {
5115 	      *len = 6;
5116 	      return ("set"            CR_TAB
5117 		      "bld r1,5"   CR_TAB
5118 		      "mul %A0,r1" CR_TAB
5119 		      "mov %B0,r0" CR_TAB
5120 		      "clr %A0"    CR_TAB
5121 		      "clr __zero_reg__");
5122 	    }
5123 	  *len = 7;
5124 	  return ("mov %B0,%A0" CR_TAB
5125 		  "clr %A0"     CR_TAB
5126 		  "lsl %B0"     CR_TAB
5127 		  "lsl %B0"     CR_TAB
5128 		  "lsl %B0"     CR_TAB
5129 		  "lsl %B0"     CR_TAB
5130 		  "lsl %B0");
5131 
5132 	case 14:
5133 	  if (AVR_HAVE_MUL && ldi_ok)
5134 	    {
5135 	      *len = 5;
5136 	      return ("ldi %B0,0x40" CR_TAB
5137 		      "mul %A0,%B0"  CR_TAB
5138 		      "mov %B0,r0"   CR_TAB
5139 		      "clr %A0"      CR_TAB
5140 		      "clr __zero_reg__");
5141 	    }
5142 	  if (AVR_HAVE_MUL && scratch)
5143 	    {
5144 	      *len = 5;
5145 	      return ("ldi %3,0x40" CR_TAB
5146 		      "mul %A0,%3"  CR_TAB
5147 		      "mov %B0,r0"  CR_TAB
5148 		      "clr %A0"     CR_TAB
5149 		      "clr __zero_reg__");
5150 	    }
5151 	  if (optimize_size && ldi_ok)
5152 	    {
5153 	      *len = 5;
5154 	      return ("mov %B0,%A0" CR_TAB
5155 		      "ldi %A0,6" "\n1:\t"
5156 		      "lsl %B0"     CR_TAB
5157 		      "dec %A0"     CR_TAB
5158 		      "brne 1b");
5159 	    }
5160 	  if (optimize_size && scratch)
5161 	    break;  /* 5 */
5162 	  *len = 6;
5163 	  return ("clr %B0" CR_TAB
5164 		  "lsr %A0" CR_TAB
5165 		  "ror %B0" CR_TAB
5166 		  "lsr %A0" CR_TAB
5167 		  "ror %B0" CR_TAB
5168 		  "clr %A0");
5169 
5170 	case 15:
5171 	  *len = 4;
5172 	  return ("clr %B0" CR_TAB
5173 		  "lsr %A0" CR_TAB
5174 		  "ror %B0" CR_TAB
5175 		  "clr %A0");
5176 	}
5177       len = t;
5178     }
5179   out_shift_with_cnt ("lsl %A0" CR_TAB
5180                       "rol %B0", insn, operands, len, 2);
5181   return "";
5182 }
5183 
5184 
5185 /* 24-bit shift left */
5186 
5187 const char*
5188 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
5189 {
5190   if (plen)
5191     *plen = 0;
5192 
5193   if (CONST_INT_P (op[2]))
5194     {
5195       switch (INTVAL (op[2]))
5196         {
5197         default:
5198           if (INTVAL (op[2]) < 24)
5199             break;
5200 
5201           return avr_asm_len ("clr %A0" CR_TAB
5202                               "clr %B0" CR_TAB
5203                               "clr %C0", op, plen, 3);
5204 
5205         case 8:
5206           {
5207             int reg0 = REGNO (op[0]);
5208             int reg1 = REGNO (op[1]);
5209 
5210             if (reg0 >= reg1)
5211               return avr_asm_len ("mov %C0,%B1"  CR_TAB
5212                                   "mov %B0,%A1"  CR_TAB
5213                                   "clr %A0", op, plen, 3);
5214             else
5215               return avr_asm_len ("clr %A0"      CR_TAB
5216                                   "mov %B0,%A1"  CR_TAB
5217                                   "mov %C0,%B1", op, plen, 3);
5218           }
5219 
5220         case 16:
5221           {
5222             int reg0 = REGNO (op[0]);
5223             int reg1 = REGNO (op[1]);
5224 
5225             if (reg0 + 2 != reg1)
5226               avr_asm_len ("mov %C0,%A0", op, plen, 1);
5227 
5228             return avr_asm_len ("clr %B0"  CR_TAB
5229                                 "clr %A0", op, plen, 2);
5230           }
5231 
5232         case 23:
5233           return avr_asm_len ("clr %C0" CR_TAB
5234                               "lsr %A0" CR_TAB
5235                               "ror %C0" CR_TAB
5236                               "clr %B0" CR_TAB
5237                               "clr %A0", op, plen, 5);
5238         }
5239     }
5240 
5241   out_shift_with_cnt ("lsl %A0" CR_TAB
5242                       "rol %B0" CR_TAB
5243                       "rol %C0", insn, op, plen, 3);
5244   return "";
5245 }
5246 
5247 
5248 /* 32bit shift left ((long)x << i)   */
5249 
5250 const char *
5251 ashlsi3_out (rtx insn, rtx operands[], int *len)
5252 {
5253   if (GET_CODE (operands[2]) == CONST_INT)
5254     {
5255       int k;
5256       int *t = len;
5257 
5258       if (!len)
5259 	len = &k;
5260 
5261       switch (INTVAL (operands[2]))
5262 	{
5263 	default:
5264 	  if (INTVAL (operands[2]) < 32)
5265 	    break;
5266 
5267 	  if (AVR_HAVE_MOVW)
5268 	    return *len = 3, ("clr %D0" CR_TAB
5269 			      "clr %C0" CR_TAB
5270 			      "movw %A0,%C0");
5271 	  *len = 4;
5272 	  return ("clr %D0" CR_TAB
5273 		  "clr %C0" CR_TAB
5274 		  "clr %B0" CR_TAB
5275 		  "clr %A0");
5276 
5277 	case 8:
5278 	  {
5279 	    int reg0 = true_regnum (operands[0]);
5280 	    int reg1 = true_regnum (operands[1]);
5281 	    *len = 4;
5282 	    if (reg0 >= reg1)
5283 	      return ("mov %D0,%C1"  CR_TAB
5284 		      "mov %C0,%B1"  CR_TAB
5285 		      "mov %B0,%A1"  CR_TAB
5286 		      "clr %A0");
5287 	    else
5288 	      return ("clr %A0"      CR_TAB
5289 		      "mov %B0,%A1"  CR_TAB
5290 		      "mov %C0,%B1"  CR_TAB
5291 		      "mov %D0,%C1");
5292 	  }
5293 
5294 	case 16:
5295 	  {
5296 	    int reg0 = true_regnum (operands[0]);
5297 	    int reg1 = true_regnum (operands[1]);
5298 	    if (reg0 + 2 == reg1)
5299 	      return *len = 2, ("clr %B0"      CR_TAB
5300 				"clr %A0");
5301 	    if (AVR_HAVE_MOVW)
5302 	      return *len = 3, ("movw %C0,%A1" CR_TAB
5303 				"clr %B0"      CR_TAB
5304 				"clr %A0");
5305 	    else
5306 	      return *len = 4, ("mov %C0,%A1"  CR_TAB
5307 				"mov %D0,%B1"  CR_TAB
5308 				"clr %B0"      CR_TAB
5309 				"clr %A0");
5310 	  }
5311 
5312 	case 24:
5313 	  *len = 4;
5314 	  return ("mov %D0,%A1"  CR_TAB
5315 		  "clr %C0"      CR_TAB
5316 		  "clr %B0"      CR_TAB
5317 		  "clr %A0");
5318 
5319 	case 31:
5320 	  *len = 6;
5321 	  return ("clr %D0" CR_TAB
5322 		  "lsr %A0" CR_TAB
5323 		  "ror %D0" CR_TAB
5324 		  "clr %C0" CR_TAB
5325 		  "clr %B0" CR_TAB
5326 		  "clr %A0");
5327 	}
5328       len = t;
5329     }
5330   out_shift_with_cnt ("lsl %A0" CR_TAB
5331                       "rol %B0" CR_TAB
5332                       "rol %C0" CR_TAB
5333                       "rol %D0", insn, operands, len, 4);
5334   return "";
5335 }
5336 
5337 /* 8bit arithmetic shift right  ((signed char)x >> i) */
5338 
5339 const char *
5340 ashrqi3_out (rtx insn, rtx operands[], int *len)
5341 {
5342   if (GET_CODE (operands[2]) == CONST_INT)
5343     {
5344       int k;
5345 
5346       if (!len)
5347 	len = &k;
5348 
5349       switch (INTVAL (operands[2]))
5350 	{
5351 	case 1:
5352 	  *len = 1;
5353 	  return "asr %0";
5354 
5355 	case 2:
5356 	  *len = 2;
5357 	  return ("asr %0" CR_TAB
5358 		  "asr %0");
5359 
5360 	case 3:
5361 	  *len = 3;
5362 	  return ("asr %0" CR_TAB
5363 		  "asr %0" CR_TAB
5364 		  "asr %0");
5365 
5366 	case 4:
5367 	  *len = 4;
5368 	  return ("asr %0" CR_TAB
5369 		  "asr %0" CR_TAB
5370 		  "asr %0" CR_TAB
5371 		  "asr %0");
5372 
5373 	case 5:
5374 	  *len = 5;
5375 	  return ("asr %0" CR_TAB
5376 		  "asr %0" CR_TAB
5377 		  "asr %0" CR_TAB
5378 		  "asr %0" CR_TAB
5379 		  "asr %0");
5380 
5381 	case 6:
5382 	  *len = 4;
5383 	  return ("bst %0,6"  CR_TAB
5384 		  "lsl %0"    CR_TAB
5385 		  "sbc %0,%0" CR_TAB
5386 		  "bld %0,0");
5387 
5388 	default:
5389 	  if (INTVAL (operands[2]) < 8)
5390 	    break;
5391 
5392 	  /* fall through */
5393 
5394 	case 7:
5395 	  *len = 2;
5396 	  return ("lsl %0" CR_TAB
5397 		  "sbc %0,%0");
5398 	}
5399     }
5400   else if (CONSTANT_P (operands[2]))
5401     fatal_insn ("internal compiler error.  Incorrect shift:", insn);
5402 
5403   out_shift_with_cnt ("asr %0",
5404                       insn, operands, len, 1);
5405   return "";
5406 }
5407 
5408 
5409 /* 16bit arithmetic shift right  ((signed short)x >> i) */
5410 
5411 const char *
5412 ashrhi3_out (rtx insn, rtx operands[], int *len)
5413 {
5414   if (GET_CODE (operands[2]) == CONST_INT)
5415     {
5416       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5417       int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5418       int k;
5419       int *t = len;
5420 
5421       if (!len)
5422 	len = &k;
5423 
5424       switch (INTVAL (operands[2]))
5425 	{
5426 	case 4:
5427 	case 5:
5428 	  /* XXX try to optimize this too? */
5429 	  break;
5430 
5431 	case 6:
5432 	  if (optimize_size)
5433 	    break;  /* scratch ? 5 : 6 */
5434 	  *len = 8;
5435 	  return ("mov __tmp_reg__,%A0" CR_TAB
5436 		  "mov %A0,%B0"         CR_TAB
5437 		  "lsl __tmp_reg__"     CR_TAB
5438 		  "rol %A0"             CR_TAB
5439 		  "sbc %B0,%B0"         CR_TAB
5440 		  "lsl __tmp_reg__"     CR_TAB
5441 		  "rol %A0"             CR_TAB
5442 		  "rol %B0");
5443 
5444 	case 7:
5445 	  *len = 4;
5446 	  return ("lsl %A0"     CR_TAB
5447 		  "mov %A0,%B0" CR_TAB
5448 		  "rol %A0"     CR_TAB
5449 		  "sbc %B0,%B0");
5450 
5451 	case 8:
5452 	  {
5453 	    int reg0 = true_regnum (operands[0]);
5454 	    int reg1 = true_regnum (operands[1]);
5455 
5456 	    if (reg0 == reg1)
5457 	      return *len = 3, ("mov %A0,%B0" CR_TAB
5458 				"lsl %B0"     CR_TAB
5459 				"sbc %B0,%B0");
5460 	    else
5461 	      return *len = 4, ("mov %A0,%B1" CR_TAB
5462 			        "clr %B0"     CR_TAB
5463 			        "sbrc %A0,7"  CR_TAB
5464 			        "dec %B0");
5465 	  }
5466 
5467 	case 9:
5468 	  *len = 4;
5469 	  return ("mov %A0,%B0" CR_TAB
5470 		  "lsl %B0"      CR_TAB
5471 		  "sbc %B0,%B0" CR_TAB
5472 		  "asr %A0");
5473 
5474 	case 10:
5475 	  *len = 5;
5476 	  return ("mov %A0,%B0" CR_TAB
5477 		  "lsl %B0"     CR_TAB
5478 		  "sbc %B0,%B0" CR_TAB
5479 		  "asr %A0"     CR_TAB
5480 		  "asr %A0");
5481 
5482 	case 11:
5483 	  if (AVR_HAVE_MUL && ldi_ok)
5484 	    {
5485 	      *len = 5;
5486 	      return ("ldi %A0,0x20" CR_TAB
5487 		      "muls %B0,%A0" CR_TAB
5488 		      "mov %A0,r1"   CR_TAB
5489 		      "sbc %B0,%B0"  CR_TAB
5490 		      "clr __zero_reg__");
5491 	    }
5492 	  if (optimize_size && scratch)
5493 	    break;  /* 5 */
5494 	  *len = 6;
5495 	  return ("mov %A0,%B0" CR_TAB
5496 		  "lsl %B0"     CR_TAB
5497 		  "sbc %B0,%B0" CR_TAB
5498 		  "asr %A0"     CR_TAB
5499 		  "asr %A0"     CR_TAB
5500 		  "asr %A0");
5501 
5502 	case 12:
5503 	  if (AVR_HAVE_MUL && ldi_ok)
5504 	    {
5505 	      *len = 5;
5506 	      return ("ldi %A0,0x10" CR_TAB
5507 		      "muls %B0,%A0" CR_TAB
5508 		      "mov %A0,r1"   CR_TAB
5509 		      "sbc %B0,%B0"  CR_TAB
5510 		      "clr __zero_reg__");
5511 	    }
5512 	  if (optimize_size && scratch)
5513 	    break;  /* 5 */
5514 	  *len = 7;
5515 	  return ("mov %A0,%B0" CR_TAB
5516 		  "lsl %B0"     CR_TAB
5517 		  "sbc %B0,%B0" CR_TAB
5518 		  "asr %A0"     CR_TAB
5519 		  "asr %A0"     CR_TAB
5520 		  "asr %A0"     CR_TAB
5521 		  "asr %A0");
5522 
5523 	case 13:
5524 	  if (AVR_HAVE_MUL && ldi_ok)
5525 	    {
5526 	      *len = 5;
5527 	      return ("ldi %A0,0x08" CR_TAB
5528 		      "muls %B0,%A0" CR_TAB
5529 		      "mov %A0,r1"   CR_TAB
5530 		      "sbc %B0,%B0"  CR_TAB
5531 		      "clr __zero_reg__");
5532 	    }
5533 	  if (optimize_size)
5534 	    break;  /* scratch ? 5 : 7 */
5535 	  *len = 8;
5536 	  return ("mov %A0,%B0" CR_TAB
5537 		  "lsl %B0"     CR_TAB
5538 		  "sbc %B0,%B0" CR_TAB
5539 		  "asr %A0"     CR_TAB
5540 		  "asr %A0"     CR_TAB
5541 		  "asr %A0"     CR_TAB
5542 		  "asr %A0"     CR_TAB
5543 		  "asr %A0");
5544 
5545 	case 14:
5546 	  *len = 5;
5547 	  return ("lsl %B0"     CR_TAB
5548 		  "sbc %A0,%A0" CR_TAB
5549 		  "lsl %B0"     CR_TAB
5550 		  "mov %B0,%A0" CR_TAB
5551 		  "rol %A0");
5552 
5553 	default:
5554 	  if (INTVAL (operands[2]) < 16)
5555 	    break;
5556 
5557 	  /* fall through */
5558 
5559 	case 15:
5560 	  return *len = 3, ("lsl %B0"     CR_TAB
5561 			    "sbc %A0,%A0" CR_TAB
5562 			    "mov %B0,%A0");
5563 	}
5564       len = t;
5565     }
5566   out_shift_with_cnt ("asr %B0" CR_TAB
5567                       "ror %A0", insn, operands, len, 2);
5568   return "";
5569 }
5570 
5571 
5572 /* 24-bit arithmetic shift right */
5573 
5574 const char*
5575 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5576 {
5577   int dest = REGNO (op[0]);
5578   int src = REGNO (op[1]);
5579 
5580   if (CONST_INT_P (op[2]))
5581     {
5582       if (plen)
5583         *plen = 0;
5584 
5585       switch (INTVAL (op[2]))
5586         {
5587         case 8:
5588           if (dest <= src)
5589             return avr_asm_len ("mov %A0,%B1" CR_TAB
5590                                 "mov %B0,%C1" CR_TAB
5591                                 "clr %C0"     CR_TAB
5592                                 "sbrc %B0,7"  CR_TAB
5593                                 "dec %C0", op, plen, 5);
5594           else
5595             return avr_asm_len ("clr %C0"     CR_TAB
5596                                 "sbrc %C1,7"  CR_TAB
5597                                 "dec %C0"     CR_TAB
5598                                 "mov %B0,%C1" CR_TAB
5599                                 "mov %A0,%B1", op, plen, 5);
5600 
5601         case 16:
5602           if (dest != src + 2)
5603             avr_asm_len ("mov %A0,%C1", op, plen, 1);
5604 
5605           return avr_asm_len ("clr %B0"     CR_TAB
5606                               "sbrc %A0,7"  CR_TAB
5607                               "com %B0"     CR_TAB
5608                               "mov %C0,%B0", op, plen, 4);
5609 
5610         default:
5611           if (INTVAL (op[2]) < 24)
5612             break;
5613 
5614           /* fall through */
5615 
5616         case 23:
5617           return avr_asm_len ("lsl %C0"     CR_TAB
5618                               "sbc %A0,%A0" CR_TAB
5619                               "mov %B0,%A0" CR_TAB
5620                               "mov %C0,%A0", op, plen, 4);
5621         } /* switch */
5622     }
5623 
5624   out_shift_with_cnt ("asr %C0" CR_TAB
5625                       "ror %B0" CR_TAB
5626                       "ror %A0", insn, op, plen, 3);
5627   return "";
5628 }
5629 
5630 
5631 /* 32-bit arithmetic shift right  ((signed long)x >> i) */
5632 
5633 const char *
5634 ashrsi3_out (rtx insn, rtx operands[], int *len)
5635 {
5636   if (GET_CODE (operands[2]) == CONST_INT)
5637     {
5638       int k;
5639       int *t = len;
5640 
5641       if (!len)
5642 	len = &k;
5643 
5644       switch (INTVAL (operands[2]))
5645 	{
5646 	case 8:
5647 	  {
5648 	    int reg0 = true_regnum (operands[0]);
5649 	    int reg1 = true_regnum (operands[1]);
5650 	    *len=6;
5651 	    if (reg0 <= reg1)
5652 	      return ("mov %A0,%B1" CR_TAB
5653 		      "mov %B0,%C1" CR_TAB
5654 		      "mov %C0,%D1" CR_TAB
5655 		      "clr %D0"     CR_TAB
5656 		      "sbrc %C0,7"  CR_TAB
5657 		      "dec %D0");
5658 	    else
5659 	      return ("clr %D0"     CR_TAB
5660 		      "sbrc %D1,7"  CR_TAB
5661 		      "dec %D0"     CR_TAB
5662 		      "mov %C0,%D1" CR_TAB
5663 		      "mov %B0,%C1" CR_TAB
5664 		      "mov %A0,%B1");
5665 	  }
5666 
5667 	case 16:
5668 	  {
5669 	    int reg0 = true_regnum (operands[0]);
5670 	    int reg1 = true_regnum (operands[1]);
5671 
5672 	    if (reg0 == reg1 + 2)
5673 	      return *len = 4, ("clr %D0"     CR_TAB
5674 				"sbrc %B0,7"  CR_TAB
5675 				"com %D0"     CR_TAB
5676 				"mov %C0,%D0");
5677 	    if (AVR_HAVE_MOVW)
5678 	      return *len = 5, ("movw %A0,%C1" CR_TAB
5679 				"clr %D0"      CR_TAB
5680 				"sbrc %B0,7"   CR_TAB
5681 				"com %D0"      CR_TAB
5682 				"mov %C0,%D0");
5683 	    else
5684 	      return *len = 6, ("mov %B0,%D1" CR_TAB
5685 				"mov %A0,%C1" CR_TAB
5686 				"clr %D0"     CR_TAB
5687 				"sbrc %B0,7"  CR_TAB
5688 				"com %D0"     CR_TAB
5689 				"mov %C0,%D0");
5690 	  }
5691 
5692 	case 24:
5693 	  return *len = 6, ("mov %A0,%D1" CR_TAB
5694 			    "clr %D0"     CR_TAB
5695 			    "sbrc %A0,7"  CR_TAB
5696 			    "com %D0"     CR_TAB
5697 			    "mov %B0,%D0" CR_TAB
5698 			    "mov %C0,%D0");
5699 
5700 	default:
5701 	  if (INTVAL (operands[2]) < 32)
5702 	    break;
5703 
5704 	  /* fall through */
5705 
5706 	case 31:
5707 	  if (AVR_HAVE_MOVW)
5708 	    return *len = 4, ("lsl %D0"     CR_TAB
5709 			      "sbc %A0,%A0" CR_TAB
5710 			      "mov %B0,%A0" CR_TAB
5711 			      "movw %C0,%A0");
5712 	  else
5713 	    return *len = 5, ("lsl %D0"     CR_TAB
5714 			      "sbc %A0,%A0" CR_TAB
5715 			      "mov %B0,%A0" CR_TAB
5716 			      "mov %C0,%A0" CR_TAB
5717 			      "mov %D0,%A0");
5718 	}
5719       len = t;
5720     }
5721   out_shift_with_cnt ("asr %D0" CR_TAB
5722                       "ror %C0" CR_TAB
5723                       "ror %B0" CR_TAB
5724                       "ror %A0", insn, operands, len, 4);
5725   return "";
5726 }
5727 
5728 /* 8-bit logic shift right ((unsigned char)x >> i) */
5729 
5730 const char *
5731 lshrqi3_out (rtx insn, rtx operands[], int *len)
5732 {
5733   if (GET_CODE (operands[2]) == CONST_INT)
5734     {
5735       int k;
5736 
5737       if (!len)
5738 	len = &k;
5739 
5740       switch (INTVAL (operands[2]))
5741 	{
5742 	default:
5743 	  if (INTVAL (operands[2]) < 8)
5744 	    break;
5745 
5746 	  *len = 1;
5747 	  return "clr %0";
5748 
5749 	case 1:
5750 	  *len = 1;
5751 	  return "lsr %0";
5752 
5753 	case 2:
5754 	  *len = 2;
5755 	  return ("lsr %0" CR_TAB
5756 		  "lsr %0");
5757 	case 3:
5758 	  *len = 3;
5759 	  return ("lsr %0" CR_TAB
5760 		  "lsr %0" CR_TAB
5761 		  "lsr %0");
5762 
5763 	case 4:
5764 	  if (test_hard_reg_class (LD_REGS, operands[0]))
5765 	    {
5766 	      *len=2;
5767 	      return ("swap %0" CR_TAB
5768 		      "andi %0,0x0f");
5769 	    }
5770 	  *len = 4;
5771 	  return ("lsr %0" CR_TAB
5772 		  "lsr %0" CR_TAB
5773 		  "lsr %0" CR_TAB
5774 		  "lsr %0");
5775 
5776 	case 5:
5777 	  if (test_hard_reg_class (LD_REGS, operands[0]))
5778 	    {
5779 	      *len = 3;
5780 	      return ("swap %0" CR_TAB
5781 		      "lsr %0"  CR_TAB
5782 		      "andi %0,0x7");
5783 	    }
5784 	  *len = 5;
5785 	  return ("lsr %0" CR_TAB
5786 		  "lsr %0" CR_TAB
5787 		  "lsr %0" CR_TAB
5788 		  "lsr %0" CR_TAB
5789 		  "lsr %0");
5790 
5791 	case 6:
5792 	  if (test_hard_reg_class (LD_REGS, operands[0]))
5793 	    {
5794 	      *len = 4;
5795 	      return ("swap %0" CR_TAB
5796 		      "lsr %0"  CR_TAB
5797 		      "lsr %0"  CR_TAB
5798 		      "andi %0,0x3");
5799 	    }
5800 	  *len = 6;
5801 	  return ("lsr %0" CR_TAB
5802 		  "lsr %0" CR_TAB
5803 		  "lsr %0" CR_TAB
5804 		  "lsr %0" CR_TAB
5805 		  "lsr %0" CR_TAB
5806 		  "lsr %0");
5807 
5808 	case 7:
5809 	  *len = 3;
5810 	  return ("rol %0" CR_TAB
5811 		  "clr %0" CR_TAB
5812 		  "rol %0");
5813 	}
5814     }
5815   else if (CONSTANT_P (operands[2]))
5816     fatal_insn ("internal compiler error.  Incorrect shift:", insn);
5817 
5818   out_shift_with_cnt ("lsr %0",
5819                       insn, operands, len, 1);
5820   return "";
5821 }
5822 
5823 /* 16-bit logic shift right ((unsigned short)x >> i) */
5824 
5825 const char *
5826 lshrhi3_out (rtx insn, rtx operands[], int *len)
5827 {
5828   if (GET_CODE (operands[2]) == CONST_INT)
5829     {
5830       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5831       int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5832       int k;
5833       int *t = len;
5834 
5835       if (!len)
5836 	len = &k;
5837 
5838       switch (INTVAL (operands[2]))
5839 	{
5840 	default:
5841 	  if (INTVAL (operands[2]) < 16)
5842 	    break;
5843 
5844 	  *len = 2;
5845 	  return ("clr %B0" CR_TAB
5846 		  "clr %A0");
5847 
5848 	case 4:
5849 	  if (optimize_size && scratch)
5850 	    break;  /* 5 */
5851 	  if (ldi_ok)
5852 	    {
5853 	      *len = 6;
5854 	      return ("swap %B0"      CR_TAB
5855 		      "swap %A0"      CR_TAB
5856 		      "andi %A0,0x0f" CR_TAB
5857 		      "eor %A0,%B0"   CR_TAB
5858 		      "andi %B0,0x0f" CR_TAB
5859 		      "eor %A0,%B0");
5860 	    }
5861 	  if (scratch)
5862 	    {
5863 	      *len = 7;
5864 	      return ("swap %B0"    CR_TAB
5865 		      "swap %A0"    CR_TAB
5866 		      "ldi %3,0x0f" CR_TAB
5867 		      "and %A0,%3"      CR_TAB
5868 		      "eor %A0,%B0" CR_TAB
5869 		      "and %B0,%3"      CR_TAB
5870 		      "eor %A0,%B0");
5871 	    }
5872 	  break;  /* optimize_size ? 6 : 8 */
5873 
5874 	case 5:
5875 	  if (optimize_size)
5876 	    break;  /* scratch ? 5 : 6 */
5877 	  if (ldi_ok)
5878 	    {
5879 	      *len = 8;
5880 	      return ("lsr %B0"       CR_TAB
5881 		      "ror %A0"       CR_TAB
5882 		      "swap %B0"      CR_TAB
5883 		      "swap %A0"      CR_TAB
5884 		      "andi %A0,0x0f" CR_TAB
5885 		      "eor %A0,%B0"   CR_TAB
5886 		      "andi %B0,0x0f" CR_TAB
5887 		      "eor %A0,%B0");
5888 	    }
5889 	  if (scratch)
5890 	    {
5891 	      *len = 9;
5892 	      return ("lsr %B0"     CR_TAB
5893 		      "ror %A0"     CR_TAB
5894 		      "swap %B0"    CR_TAB
5895 		      "swap %A0"    CR_TAB
5896 		      "ldi %3,0x0f" CR_TAB
5897 		      "and %A0,%3"      CR_TAB
5898 		      "eor %A0,%B0" CR_TAB
5899 		      "and %B0,%3"      CR_TAB
5900 		      "eor %A0,%B0");
5901 	    }
5902 	  break;  /* 10 */
5903 
5904 	case 6:
5905 	  if (optimize_size)
5906 	    break;  /* scratch ? 5 : 6 */
5907 	  *len = 9;
5908 	  return ("clr __tmp_reg__" CR_TAB
5909 		  "lsl %A0"         CR_TAB
5910 		  "rol %B0"         CR_TAB
5911 		  "rol __tmp_reg__" CR_TAB
5912 		  "lsl %A0"         CR_TAB
5913 		  "rol %B0"         CR_TAB
5914 		  "rol __tmp_reg__" CR_TAB
5915 		  "mov %A0,%B0"     CR_TAB
5916 		  "mov %B0,__tmp_reg__");
5917 
5918 	case 7:
5919 	  *len = 5;
5920 	  return ("lsl %A0"     CR_TAB
5921 		  "mov %A0,%B0" CR_TAB
5922 		  "rol %A0"     CR_TAB
5923 		  "sbc %B0,%B0" CR_TAB
5924 		  "neg %B0");
5925 
5926 	case 8:
5927 	  return *len = 2, ("mov %A0,%B1" CR_TAB
5928 			    "clr %B0");
5929 
5930 	case 9:
5931 	  *len = 3;
5932 	  return ("mov %A0,%B0" CR_TAB
5933 		  "clr %B0"     CR_TAB
5934 		  "lsr %A0");
5935 
5936 	case 10:
5937 	  *len = 4;
5938 	  return ("mov %A0,%B0" CR_TAB
5939 		  "clr %B0"     CR_TAB
5940 		  "lsr %A0"     CR_TAB
5941 		  "lsr %A0");
5942 
5943 	case 11:
5944 	  *len = 5;
5945 	  return ("mov %A0,%B0" CR_TAB
5946 		  "clr %B0"     CR_TAB
5947 		  "lsr %A0"     CR_TAB
5948 		  "lsr %A0"     CR_TAB
5949 		  "lsr %A0");
5950 
5951 	case 12:
5952 	  if (ldi_ok)
5953 	    {
5954 	      *len = 4;
5955 	      return ("mov %A0,%B0" CR_TAB
5956 		      "clr %B0"     CR_TAB
5957 		      "swap %A0"    CR_TAB
5958 		      "andi %A0,0x0f");
5959 	    }
5960 	  if (scratch)
5961 	    {
5962 	      *len = 5;
5963 	      return ("mov %A0,%B0" CR_TAB
5964 		      "clr %B0"     CR_TAB
5965 		      "swap %A0"    CR_TAB
5966 		      "ldi %3,0x0f" CR_TAB
5967 		      "and %A0,%3");
5968 	    }
5969 	  *len = 6;
5970 	  return ("mov %A0,%B0" CR_TAB
5971 		  "clr %B0"     CR_TAB
5972 		  "lsr %A0"     CR_TAB
5973 		  "lsr %A0"     CR_TAB
5974 		  "lsr %A0"     CR_TAB
5975 		  "lsr %A0");
5976 
5977 	case 13:
5978 	  if (ldi_ok)
5979 	    {
5980 	      *len = 5;
5981 	      return ("mov %A0,%B0" CR_TAB
5982 		      "clr %B0"     CR_TAB
5983 		      "swap %A0"    CR_TAB
5984 		      "lsr %A0"     CR_TAB
5985 		      "andi %A0,0x07");
5986 	    }
5987 	  if (AVR_HAVE_MUL && scratch)
5988 	    {
5989 	      *len = 5;
5990 	      return ("ldi %3,0x08" CR_TAB
5991 		      "mul %B0,%3"  CR_TAB
5992 		      "mov %A0,r1"  CR_TAB
5993 		      "clr %B0"     CR_TAB
5994 		      "clr __zero_reg__");
5995 	    }
5996 	  if (optimize_size && scratch)
5997 	    break;  /* 5 */
5998 	  if (scratch)
5999 	    {
6000 	      *len = 6;
6001 	      return ("mov %A0,%B0" CR_TAB
6002 		      "clr %B0"     CR_TAB
6003 		      "swap %A0"    CR_TAB
6004 		      "lsr %A0"     CR_TAB
6005 		      "ldi %3,0x07" CR_TAB
6006 		      "and %A0,%3");
6007 	    }
6008 	  if (AVR_HAVE_MUL)
6009 	    {
6010 	      *len = 6;
6011 	      return ("set"            CR_TAB
6012 		      "bld r1,3"   CR_TAB
6013 		      "mul %B0,r1" CR_TAB
6014 		      "mov %A0,r1" CR_TAB
6015 		      "clr %B0"    CR_TAB
6016 		      "clr __zero_reg__");
6017 	    }
6018 	  *len = 7;
6019 	  return ("mov %A0,%B0" CR_TAB
6020 		  "clr %B0"     CR_TAB
6021 		  "lsr %A0"     CR_TAB
6022 		  "lsr %A0"     CR_TAB
6023 		  "lsr %A0"     CR_TAB
6024 		  "lsr %A0"     CR_TAB
6025 		  "lsr %A0");
6026 
6027 	case 14:
6028 	  if (AVR_HAVE_MUL && ldi_ok)
6029 	    {
6030 	      *len = 5;
6031 	      return ("ldi %A0,0x04" CR_TAB
6032 		      "mul %B0,%A0"  CR_TAB
6033 		      "mov %A0,r1"   CR_TAB
6034 		      "clr %B0"      CR_TAB
6035 		      "clr __zero_reg__");
6036 	    }
6037 	  if (AVR_HAVE_MUL && scratch)
6038 	    {
6039 	      *len = 5;
6040 	      return ("ldi %3,0x04" CR_TAB
6041 		      "mul %B0,%3"  CR_TAB
6042 		      "mov %A0,r1"  CR_TAB
6043 		      "clr %B0"     CR_TAB
6044 		      "clr __zero_reg__");
6045 	    }
6046 	  if (optimize_size && ldi_ok)
6047 	    {
6048 	      *len = 5;
6049 	      return ("mov %A0,%B0" CR_TAB
6050 		      "ldi %B0,6" "\n1:\t"
6051 		      "lsr %A0"     CR_TAB
6052 		      "dec %B0"     CR_TAB
6053 		      "brne 1b");
6054 	    }
6055 	  if (optimize_size && scratch)
6056 	    break;  /* 5 */
6057 	  *len = 6;
6058 	  return ("clr %A0" CR_TAB
6059 		  "lsl %B0" CR_TAB
6060 		  "rol %A0" CR_TAB
6061 		  "lsl %B0" CR_TAB
6062 		  "rol %A0" CR_TAB
6063 		  "clr %B0");
6064 
6065 	case 15:
6066 	  *len = 4;
6067 	  return ("clr %A0" CR_TAB
6068 		  "lsl %B0" CR_TAB
6069 		  "rol %A0" CR_TAB
6070 		  "clr %B0");
6071 	}
6072       len = t;
6073     }
6074   out_shift_with_cnt ("lsr %B0" CR_TAB
6075                       "ror %A0", insn, operands, len, 2);
6076   return "";
6077 }
6078 
6079 
6080 /* 24-bit logic shift right */
6081 
6082 const char*
6083 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
6084 {
6085   int dest = REGNO (op[0]);
6086   int src = REGNO (op[1]);
6087 
6088   if (CONST_INT_P (op[2]))
6089     {
6090       if (plen)
6091         *plen = 0;
6092 
6093       switch (INTVAL (op[2]))
6094         {
6095         case 8:
6096           if (dest <= src)
6097             return avr_asm_len ("mov %A0,%B1" CR_TAB
6098                                 "mov %B0,%C1" CR_TAB
6099                                 "clr %C0", op, plen, 3);
6100           else
6101             return avr_asm_len ("clr %C0"     CR_TAB
6102                                 "mov %B0,%C1" CR_TAB
6103                                 "mov %A0,%B1", op, plen, 3);
6104 
6105         case 16:
6106           if (dest != src + 2)
6107             avr_asm_len ("mov %A0,%C1", op, plen, 1);
6108 
6109           return avr_asm_len ("clr %B0"  CR_TAB
6110                               "clr %C0", op, plen, 2);
6111 
6112         default:
6113           if (INTVAL (op[2]) < 24)
6114             break;
6115 
6116           /* fall through */
6117 
6118         case 23:
6119           return avr_asm_len ("clr %A0"    CR_TAB
6120                               "sbrc %C0,7" CR_TAB
6121                               "inc %A0"    CR_TAB
6122                               "clr %B0"    CR_TAB
6123                               "clr %C0", op, plen, 5);
6124         } /* switch */
6125     }
6126 
6127   out_shift_with_cnt ("lsr %C0" CR_TAB
6128                       "ror %B0" CR_TAB
6129                       "ror %A0", insn, op, plen, 3);
6130   return "";
6131 }
6132 
6133 
6134 /* 32-bit logic shift right ((unsigned int)x >> i) */
6135 
6136 const char *
6137 lshrsi3_out (rtx insn, rtx operands[], int *len)
6138 {
6139   if (GET_CODE (operands[2]) == CONST_INT)
6140     {
6141       int k;
6142       int *t = len;
6143 
6144       if (!len)
6145 	len = &k;
6146 
6147       switch (INTVAL (operands[2]))
6148 	{
6149 	default:
6150 	  if (INTVAL (operands[2]) < 32)
6151 	    break;
6152 
6153 	  if (AVR_HAVE_MOVW)
6154 	    return *len = 3, ("clr %D0" CR_TAB
6155 			      "clr %C0" CR_TAB
6156 			      "movw %A0,%C0");
6157 	  *len = 4;
6158 	  return ("clr %D0" CR_TAB
6159 		  "clr %C0" CR_TAB
6160 		  "clr %B0" CR_TAB
6161 		  "clr %A0");
6162 
6163 	case 8:
6164 	  {
6165 	    int reg0 = true_regnum (operands[0]);
6166 	    int reg1 = true_regnum (operands[1]);
6167 	    *len = 4;
6168 	    if (reg0 <= reg1)
6169 	      return ("mov %A0,%B1" CR_TAB
6170 		      "mov %B0,%C1" CR_TAB
6171 		      "mov %C0,%D1" CR_TAB
6172 		      "clr %D0");
6173 	    else
6174 	      return ("clr %D0"     CR_TAB
6175 		      "mov %C0,%D1" CR_TAB
6176 		      "mov %B0,%C1" CR_TAB
6177 		      "mov %A0,%B1");
6178 	  }
6179 
6180 	case 16:
6181 	  {
6182 	    int reg0 = true_regnum (operands[0]);
6183 	    int reg1 = true_regnum (operands[1]);
6184 
6185 	    if (reg0 == reg1 + 2)
6186 	      return *len = 2, ("clr %C0"     CR_TAB
6187 				"clr %D0");
6188 	    if (AVR_HAVE_MOVW)
6189 	      return *len = 3, ("movw %A0,%C1" CR_TAB
6190 				"clr %C0"      CR_TAB
6191 				"clr %D0");
6192 	    else
6193 	      return *len = 4, ("mov %B0,%D1" CR_TAB
6194 				"mov %A0,%C1" CR_TAB
6195 				"clr %C0"     CR_TAB
6196 				"clr %D0");
6197 	  }
6198 
6199 	case 24:
6200 	  return *len = 4, ("mov %A0,%D1" CR_TAB
6201 			    "clr %B0"     CR_TAB
6202 			    "clr %C0"     CR_TAB
6203 			    "clr %D0");
6204 
6205 	case 31:
6206 	  *len = 6;
6207 	  return ("clr %A0"    CR_TAB
6208 		  "sbrc %D0,7" CR_TAB
6209 		  "inc %A0"    CR_TAB
6210 		  "clr %B0"    CR_TAB
6211 		  "clr %C0"    CR_TAB
6212 		  "clr %D0");
6213 	}
6214       len = t;
6215     }
6216   out_shift_with_cnt ("lsr %D0" CR_TAB
6217                       "ror %C0" CR_TAB
6218                       "ror %B0" CR_TAB
6219                       "ror %A0", insn, operands, len, 4);
6220   return "";
6221 }
6222 
6223 
6224 /* Output addition of register XOP[0] and compile time constant XOP[2].
6225    CODE == PLUS:  perform addition by using ADD instructions or
6226    CODE == MINUS: perform addition by using SUB instructions:
6227 
6228       XOP[0] = XOP[0] + XOP[2]
6229 
6230    Or perform addition/subtraction with register XOP[2] depending on CODE:
6231 
6232       XOP[0] = XOP[0] +/- XOP[2]
6233 
6234    If PLEN == NULL, print assembler instructions to perform the operation;
6235    otherwise, set *PLEN to the length of the instruction sequence (in words)
6236    printed with PLEN == NULL.  XOP[3] is an 8-bit scratch register or NULL_RTX.
6237    Set *PCC to effect on cc0 according to respective CC_* insn attribute.
6238 
6239    CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
6240    CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
6241    If  CODE_SAT != UNKNOWN  then SIGN contains the sign of the summand resp.
6242    the subtrahend in the original insn, provided it is a compile time constant.
6243    In all other cases, SIGN is 0.
6244 
6245    If OUT_LABEL is true, print the final 0: label which is needed for
6246    saturated addition / subtraction.  The only case where OUT_LABEL = false
6247    is useful is for saturated addition / subtraction performed during
6248    fixed-point rounding, cf. `avr_out_round'.  */
6249 
6250 static void
6251 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
6252                 enum rtx_code code_sat, int sign, bool out_label)
6253 {
6254   /* MODE of the operation.  */
6255   enum machine_mode mode = GET_MODE (xop[0]);
6256 
6257   /* INT_MODE of the same size.  */
6258   enum machine_mode imode = int_mode_for_mode (mode);
6259 
6260   /* Number of bytes to operate on.  */
6261   int i, n_bytes = GET_MODE_SIZE (mode);
6262 
6263   /* Value (0..0xff) held in clobber register op[3] or -1 if unknown.  */
6264   int clobber_val = -1;
6265 
6266   /* op[0]: 8-bit destination register
6267      op[1]: 8-bit const int
6268      op[2]: 8-bit scratch register */
6269   rtx op[3];
6270 
6271   /* Started the operation?  Before starting the operation we may skip
6272      adding 0.  This is no more true after the operation started because
6273      carry must be taken into account.  */
6274   bool started = false;
6275 
6276   /* Value to add.  There are two ways to add VAL: R += VAL and R -= -VAL.  */
6277   rtx xval = xop[2];
6278 
6279   /* Output a BRVC instruction.  Only needed with saturation.  */
6280   bool out_brvc = true;
6281 
6282   if (plen)
6283     *plen = 0;
6284 
6285   if (REG_P (xop[2]))
6286     {
6287       *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_CLOBBER;
6288 
6289       for (i = 0; i < n_bytes; i++)
6290         {
6291           /* We operate byte-wise on the destination.  */
6292           op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
6293           op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
6294 
6295           if (i == 0)
6296             avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
6297                          op, plen, 1);
6298           else
6299             avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
6300                          op, plen, 1);
6301         }
6302 
6303       if (reg_overlap_mentioned_p (xop[0], xop[2]))
6304         {
6305           gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
6306 
6307           if (MINUS == code)
6308             return;
6309         }
6310 
6311       goto saturate;
6312     }
6313 
6314   /* Except in the case of ADIW with 16-bit register (see below)
6315      addition does not set cc0 in a usable way.  */
6316 
6317   *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
6318 
6319   if (CONST_FIXED_P (xval))
6320     xval = avr_to_int_mode (xval);
6321 
6322   /* Adding/Subtracting zero is a no-op.  */
6323 
6324   if (xval == const0_rtx)
6325     {
6326       *pcc = CC_NONE;
6327       return;
6328     }
6329 
6330   if (MINUS == code)
6331     xval = simplify_unary_operation (NEG, imode, xval, imode);
6332 
6333   op[2] = xop[3];
6334 
6335   if (SS_PLUS == code_sat && MINUS == code
6336       && sign < 0
6337       && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
6338                   & GET_MODE_MASK (QImode)))
6339     {
6340       /* We compute x + 0x80 by means of SUB instructions.  We negated the
6341          constant subtrahend above and are left with  x - (-128)  so that we
6342          need something like SUBI r,128 which does not exist because SUBI sets
6343          V according to the sign of the subtrahend.  Notice the only case
6344          where this must be done is when NEG overflowed in case [2s] because
6345          the V computation needs the right sign of the subtrahend.  */
6346 
6347       rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6348 
6349       avr_asm_len ("subi %0,128" CR_TAB
6350                    "brmi 0f", &msb, plen, 2);
6351       out_brvc = false;
6352 
6353       goto saturate;
6354     }
6355 
6356   for (i = 0; i < n_bytes; i++)
6357     {
6358       /* We operate byte-wise on the destination.  */
6359       rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6360       rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
6361 
6362       /* 8-bit value to operate with this byte. */
6363       unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6364 
6365       /* Registers R16..R31 can operate with immediate.  */
6366       bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6367 
6368       op[0] = reg8;
6369       op[1] = gen_int_mode (val8, QImode);
6370 
6371       /* To get usable cc0 no low-bytes must have been skipped.  */
6372 
6373       if (i && !started)
6374         *pcc = CC_CLOBBER;
6375 
6376       if (!started
6377           && i % 2 == 0
6378           && i + 2 <= n_bytes
6379           && test_hard_reg_class (ADDW_REGS, reg8))
6380         {
6381           rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
6382           unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
6383 
6384           /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
6385              i.e. operate word-wise.  */
6386 
6387           if (val16 < 64)
6388             {
6389               if (val16 != 0)
6390                 {
6391                   started = true;
6392                   avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
6393                                op, plen, 1);
6394 
6395                   if (n_bytes == 2 && PLUS == code)
6396                     *pcc = CC_SET_CZN;
6397                 }
6398 
6399               i++;
6400               continue;
6401             }
6402         }
6403 
6404       if (val8 == 0)
6405         {
6406           if (started)
6407             avr_asm_len (code == PLUS
6408                          ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6409                          op, plen, 1);
6410           continue;
6411         }
6412       else if ((val8 == 1 || val8 == 0xff)
6413                && UNKNOWN == code_sat
6414                && !started
6415                && i == n_bytes - 1)
6416         {
6417           avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6418                        op, plen, 1);
6419           *pcc = CC_CLOBBER;
6420           break;
6421         }
6422 
6423       switch (code)
6424         {
6425         case PLUS:
6426 
6427           gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
6428 
6429           if (plen != NULL && UNKNOWN != code_sat)
6430             {
6431               /* This belongs to the x + 0x80 corner case.  The code with
6432                  ADD instruction is not smaller, thus make this case
6433                  expensive so that the caller won't pick it.  */
6434 
6435               *plen += 10;
6436               break;
6437             }
6438 
6439           if (clobber_val != (int) val8)
6440             avr_asm_len ("ldi %2,%1", op, plen, 1);
6441           clobber_val = (int) val8;
6442 
6443           avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6444 
6445           break; /* PLUS */
6446 
6447         case MINUS:
6448 
6449           if (ld_reg_p)
6450             avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6451           else
6452             {
6453               gcc_assert (plen != NULL || REG_P (op[2]));
6454 
6455               if (clobber_val != (int) val8)
6456                 avr_asm_len ("ldi %2,%1", op, plen, 1);
6457               clobber_val = (int) val8;
6458 
6459               avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6460             }
6461 
6462           break; /* MINUS */
6463 
6464         default:
6465           /* Unknown code */
6466           gcc_unreachable();
6467         }
6468 
6469       started = true;
6470 
6471     } /* for all sub-bytes */
6472 
6473  saturate:
6474 
6475   if (UNKNOWN == code_sat)
6476     return;
6477 
6478   *pcc = (int) CC_CLOBBER;
6479 
6480   /* Vanilla addition/subtraction is done.  We are left with saturation.
6481 
6482      We have to compute  A = A <op> B  where  A  is a register and
6483      B is a register or a non-zero compile time constant CONST.
6484      A is register class "r" if unsigned && B is REG.  Otherwise, A is in "d".
6485      B stands for the original operand $2 in INSN.  In the case of B = CONST,
6486      SIGN in { -1, 1 } is the sign of B.  Otherwise, SIGN is 0.
6487 
6488      CODE is the instruction flavor we use in the asm sequence to perform <op>.
6489 
6490 
6491      unsigned
6492      operation        |  code |  sat if  |    b is      | sat value |  case
6493      -----------------+-------+----------+--------------+-----------+-------
6494      +  as  a + b     |  add  |  C == 1  |  const, reg  | u+ = 0xff |  [1u]
6495      +  as  a - (-b)  |  sub  |  C == 0  |  const       | u+ = 0xff |  [2u]
6496      -  as  a - b     |  sub  |  C == 1  |  const, reg  | u- = 0    |  [3u]
6497      -  as  a + (-b)  |  add  |  C == 0  |  const       | u- = 0    |  [4u]
6498 
6499 
6500      signed
6501      operation        |  code |  sat if  |    b is      | sat value |  case
6502      -----------------+-------+----------+--------------+-----------+-------
6503      +  as  a + b     |  add  |  V == 1  |  const, reg  | s+        |  [1s]
6504      +  as  a - (-b)  |  sub  |  V == 1  |  const       | s+        |  [2s]
6505      -  as  a - b     |  sub  |  V == 1  |  const, reg  | s-        |  [3s]
6506      -  as  a + (-b)  |  add  |  V == 1  |  const       | s-        |  [4s]
6507 
6508      s+  =  b < 0  ?  -0x80 :  0x7f
6509      s-  =  b < 0  ?   0x7f : -0x80
6510 
6511      The cases a - b actually perform  a - (-(-b))  if B is CONST.
6512   */
6513 
6514   op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
6515   op[1] = n_bytes > 1
6516     ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
6517     : NULL_RTX;
6518 
6519   bool need_copy = true;
6520   int len_call = 1 + AVR_HAVE_JMP_CALL;
6521 
6522   switch (code_sat)
6523     {
6524     default:
6525       gcc_unreachable();
6526 
6527     case SS_PLUS:
6528     case SS_MINUS:
6529 
6530       if (out_brvc)
6531         avr_asm_len ("brvc 0f", op, plen, 1);
6532 
6533       if (reg_overlap_mentioned_p (xop[0], xop[2]))
6534         {
6535           /* [1s,reg] */
6536 
6537           if (n_bytes == 1)
6538             avr_asm_len ("ldi %0,0x7f" CR_TAB
6539                          "adc %0,__zero_reg__", op, plen, 2);
6540           else
6541             avr_asm_len ("ldi %0,0x7f" CR_TAB
6542                          "ldi %1,0xff" CR_TAB
6543                          "adc %1,__zero_reg__" CR_TAB
6544                          "adc %0,__zero_reg__", op, plen, 4);
6545         }
6546       else if (sign == 0 && PLUS == code)
6547         {
6548           /* [1s,reg] */
6549 
6550           op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6551 
6552           if (n_bytes == 1)
6553             avr_asm_len ("ldi %0,0x80" CR_TAB
6554                          "sbrs %2,7"   CR_TAB
6555                          "dec %0", op, plen, 3);
6556           else
6557             avr_asm_len ("ldi %0,0x80" CR_TAB
6558                          "cp %2,%0"    CR_TAB
6559                          "sbc %1,%1"   CR_TAB
6560                          "sbci %0,0", op, plen, 4);
6561         }
6562       else if (sign == 0 && MINUS == code)
6563         {
6564           /* [3s,reg] */
6565 
6566           op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
6567 
6568           if (n_bytes == 1)
6569             avr_asm_len ("ldi %0,0x7f" CR_TAB
6570                          "sbrs %2,7"   CR_TAB
6571                          "inc %0", op, plen, 3);
6572           else
6573             avr_asm_len ("ldi %0,0x7f" CR_TAB
6574                          "cp %0,%2"    CR_TAB
6575                          "sbc %1,%1"   CR_TAB
6576                          "sbci %0,-1", op, plen, 4);
6577         }
6578       else if ((sign < 0) ^ (SS_MINUS == code_sat))
6579         {
6580           /* [1s,const,B < 0] [2s,B < 0] */
6581           /* [3s,const,B > 0] [4s,B > 0] */
6582 
6583           if (n_bytes == 8)
6584             {
6585               avr_asm_len ("%~call __clr_8", op, plen, len_call);
6586               need_copy = false;
6587             }
6588 
6589           avr_asm_len ("ldi %0,0x80", op, plen, 1);
6590           if (n_bytes > 1 && need_copy)
6591             avr_asm_len ("clr %1", op, plen, 1);
6592         }
6593       else if ((sign > 0) ^ (SS_MINUS == code_sat))
6594         {
6595           /* [1s,const,B > 0] [2s,B > 0] */
6596           /* [3s,const,B < 0] [4s,B < 0] */
6597 
6598           if (n_bytes == 8)
6599             {
6600               avr_asm_len ("sec" CR_TAB
6601                            "%~call __sbc_8", op, plen, 1 + len_call);
6602               need_copy = false;
6603             }
6604 
6605           avr_asm_len ("ldi %0,0x7f", op, plen, 1);
6606           if (n_bytes > 1 && need_copy)
6607             avr_asm_len ("ldi %1,0xff", op, plen, 1);
6608         }
6609       else
6610         gcc_unreachable();
6611 
6612       break;
6613 
6614     case US_PLUS:
6615       /* [1u] : [2u] */
6616 
6617       avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
6618 
6619       if (n_bytes == 8)
6620         {
6621           if (MINUS == code)
6622             avr_asm_len ("sec", op, plen, 1);
6623           avr_asm_len ("%~call __sbc_8", op, plen, len_call);
6624 
6625           need_copy = false;
6626         }
6627       else
6628         {
6629           if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
6630             avr_asm_len ("sec" CR_TAB "sbc %0,%0", op, plen, 2);
6631           else
6632             avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
6633                          op, plen, 1);
6634         }
6635       break; /* US_PLUS */
6636 
6637     case US_MINUS:
6638       /* [4u] : [3u] */
6639 
6640       avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
6641 
6642       if (n_bytes == 8)
6643         {
6644           avr_asm_len ("%~call __clr_8", op, plen, len_call);
6645           need_copy = false;
6646         }
6647       else
6648         avr_asm_len ("clr %0", op, plen, 1);
6649 
6650       break;
6651     }
6652 
6653   /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
6654      Now copy the right value to the LSBs.  */
6655 
6656   if (need_copy && n_bytes > 1)
6657     {
6658       if (US_MINUS == code_sat || US_PLUS == code_sat)
6659         {
6660           avr_asm_len ("mov %1,%0", op, plen, 1);
6661 
6662           if (n_bytes > 2)
6663             {
6664               op[0] = xop[0];
6665               if (AVR_HAVE_MOVW)
6666                 avr_asm_len ("movw %0,%1", op, plen, 1);
6667               else
6668                 avr_asm_len ("mov %A0,%1" CR_TAB
6669                              "mov %B0,%1", op, plen, 2);
6670             }
6671         }
6672       else if (n_bytes > 2)
6673         {
6674           op[0] = xop[0];
6675           avr_asm_len ("mov %A0,%1" CR_TAB
6676                        "mov %B0,%1", op, plen, 2);
6677         }
6678     }
6679 
6680   if (need_copy && n_bytes == 8)
6681     {
6682       if (AVR_HAVE_MOVW)
6683         avr_asm_len ("movw %r0+2,%0" CR_TAB
6684                      "movw %r0+4,%0", xop, plen, 2);
6685       else
6686         avr_asm_len ("mov %r0+2,%0" CR_TAB
6687                      "mov %r0+3,%0" CR_TAB
6688                      "mov %r0+4,%0" CR_TAB
6689                      "mov %r0+5,%0", xop, plen, 4);
6690     }
6691 
6692   if (out_label)
6693     avr_asm_len ("0:", op, plen, 0);
6694 }
6695 
6696 
6697 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
6698    is ont a compile-time constant:
6699 
6700       XOP[0] = XOP[0] +/- XOP[2]
6701 
6702    This is a helper for the function below.  The only insns that need this
6703    are additions/subtraction for pointer modes, i.e. HImode and PSImode.  */
6704 
6705 static const char*
6706 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
6707 {
6708   enum machine_mode mode = GET_MODE (xop[0]);
6709 
6710   /* Only pointer modes want to add symbols.  */
6711 
6712   gcc_assert (mode == HImode || mode == PSImode);
6713 
6714   *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
6715 
6716   avr_asm_len (PLUS == code
6717                ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
6718                : "subi %A0,lo8(%2)"    CR_TAB "sbci %B0,hi8(%2)",
6719                xop, plen, -2);
6720 
6721   if (PSImode == mode)
6722     avr_asm_len (PLUS == code
6723                  ? "sbci %C0,hlo8(-(%2))"
6724                  : "sbci %C0,hlo8(%2)", xop, plen, 1);
6725   return "";
6726 }
6727 
6728 
6729 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
6730 
6731    INSN is a single_set insn or an insn pattern with a binary operation as
6732    SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
6733 
6734    XOP are the operands of INSN.  In the case of 64-bit operations with
6735    constant XOP[] has just one element:  The summand/subtrahend in XOP[0].
6736    The non-saturating insns up to 32 bits may or may not supply a "d" class
6737    scratch as XOP[3].
6738 
6739    If PLEN == NULL output the instructions.
6740    If PLEN != NULL set *PLEN to the length of the sequence in words.
6741 
6742    PCC is a pointer to store the instructions' effect on cc0.
6743    PCC may be NULL.
6744 
6745    PLEN and PCC default to NULL.
6746 
6747    OUT_LABEL defaults to TRUE.  For a description, see AVR_OUT_PLUS_1.
6748 
6749    Return ""  */
6750 
6751 const char*
6752 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
6753 {
6754   int cc_plus, cc_minus, cc_dummy;
6755   int len_plus, len_minus;
6756   rtx op[4];
6757   rtx xpattern = INSN_P (insn) ? single_set (insn) : insn;
6758   rtx xdest = SET_DEST (xpattern);
6759   enum machine_mode mode = GET_MODE (xdest);
6760   enum machine_mode imode = int_mode_for_mode (mode);
6761   int n_bytes = GET_MODE_SIZE (mode);
6762   enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
6763   enum rtx_code code
6764     = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
6765        ? PLUS : MINUS);
6766 
6767   if (!pcc)
6768     pcc = &cc_dummy;
6769 
6770   /* PLUS and MINUS don't saturate:  Use modular wrap-around.  */
6771 
6772   if (PLUS == code_sat || MINUS == code_sat)
6773     code_sat = UNKNOWN;
6774 
6775   if (n_bytes <= 4 && REG_P (xop[2]))
6776     {
6777       avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
6778       return "";
6779     }
6780 
6781   if (8 == n_bytes)
6782     {
6783       op[0] = gen_rtx_REG (DImode, ACC_A);
6784       op[1] = gen_rtx_REG (DImode, ACC_A);
6785       op[2] = avr_to_int_mode (xop[0]);
6786     }
6787   else
6788     {
6789       if (!REG_P (xop[2])
6790           && !CONST_INT_P (xop[2])
6791           && !CONST_FIXED_P (xop[2]))
6792         {
6793           return avr_out_plus_symbol (xop, code, plen, pcc);
6794         }
6795 
6796       op[0] = avr_to_int_mode (xop[0]);
6797       op[1] = avr_to_int_mode (xop[1]);
6798       op[2] = avr_to_int_mode (xop[2]);
6799     }
6800 
6801   /* Saturations and 64-bit operations don't have a clobber operand.
6802      For the other cases, the caller will provide a proper XOP[3].  */
6803 
6804   xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
6805   op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
6806 
6807   /* Saturation will need the sign of the original operand.  */
6808 
6809   rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
6810   int sign = INTVAL (xmsb) < 0 ? -1 : 1;
6811 
6812   /* If we subtract and the subtrahend is a constant, then negate it
6813      so that avr_out_plus_1 can be used.  */
6814 
6815   if (MINUS == code)
6816     op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
6817 
6818   /* Work out the shortest sequence.  */
6819 
6820   avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
6821   avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
6822 
6823   if (plen)
6824     {
6825       *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6826       *pcc  = (len_minus <= len_plus) ? cc_minus : cc_plus;
6827     }
6828   else if (len_minus <= len_plus)
6829     avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
6830   else
6831     avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
6832 
6833   return "";
6834 }
6835 
6836 
6837 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6838    time constant XOP[2]:
6839 
6840       XOP[0] = XOP[0] <op> XOP[2]
6841 
6842    and return "".  If PLEN == NULL, print assembler instructions to perform the
6843    operation; otherwise, set *PLEN to the length of the instruction sequence
6844    (in words) printed with PLEN == NULL.  XOP[3] is either an 8-bit clobber
6845    register or SCRATCH if no clobber register is needed for the operation.
6846    INSN is an INSN_P or a pattern of an insn.  */
6847 
6848 const char*
6849 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6850 {
6851   /* CODE and MODE of the operation.  */
6852   rtx xpattern = INSN_P (insn) ? single_set (insn) : insn;
6853   enum rtx_code code = GET_CODE (SET_SRC (xpattern));
6854   enum machine_mode mode = GET_MODE (xop[0]);
6855 
6856   /* Number of bytes to operate on.  */
6857   int i, n_bytes = GET_MODE_SIZE (mode);
6858 
6859   /* Value of T-flag (0 or 1) or -1 if unknow.  */
6860   int set_t = -1;
6861 
6862   /* Value (0..0xff) held in clobber register op[3] or -1 if unknown.  */
6863   int clobber_val = -1;
6864 
6865   /* op[0]: 8-bit destination register
6866      op[1]: 8-bit const int
6867      op[2]: 8-bit clobber register or SCRATCH
6868      op[3]: 8-bit register containing 0xff or NULL_RTX  */
6869   rtx op[4];
6870 
6871   op[2] = xop[3];
6872   op[3] = NULL_RTX;
6873 
6874   if (plen)
6875     *plen = 0;
6876 
6877   for (i = 0; i < n_bytes; i++)
6878     {
6879       /* We operate byte-wise on the destination.  */
6880       rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6881       rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6882 
6883       /* 8-bit value to operate with this byte. */
6884       unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6885 
6886       /* Number of bits set in the current byte of the constant.  */
6887       int pop8 = avr_popcount (val8);
6888 
6889       /* Registers R16..R31 can operate with immediate.  */
6890       bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6891 
6892       op[0] = reg8;
6893       op[1] = GEN_INT (val8);
6894 
6895       switch (code)
6896         {
6897         case IOR:
6898 
6899           if (0 == pop8)
6900             continue;
6901           else if (ld_reg_p)
6902             avr_asm_len ("ori %0,%1", op, plen, 1);
6903           else if (1 == pop8)
6904             {
6905               if (set_t != 1)
6906                 avr_asm_len ("set", op, plen, 1);
6907               set_t = 1;
6908 
6909               op[1] = GEN_INT (exact_log2 (val8));
6910               avr_asm_len ("bld %0,%1", op, plen, 1);
6911             }
6912           else if (8 == pop8)
6913             {
6914               if (op[3] != NULL_RTX)
6915                 avr_asm_len ("mov %0,%3", op, plen, 1);
6916               else
6917                 avr_asm_len ("clr %0" CR_TAB
6918                              "dec %0", op, plen, 2);
6919 
6920               op[3] = op[0];
6921             }
6922           else
6923             {
6924               if (clobber_val != (int) val8)
6925                 avr_asm_len ("ldi %2,%1", op, plen, 1);
6926               clobber_val = (int) val8;
6927 
6928               avr_asm_len ("or %0,%2", op, plen, 1);
6929             }
6930 
6931           continue; /* IOR */
6932 
6933         case AND:
6934 
6935           if (8 == pop8)
6936             continue;
6937           else if (0 == pop8)
6938             avr_asm_len ("clr %0", op, plen, 1);
6939           else if (ld_reg_p)
6940             avr_asm_len ("andi %0,%1", op, plen, 1);
6941           else if (7 == pop8)
6942             {
6943               if (set_t != 0)
6944                 avr_asm_len ("clt", op, plen, 1);
6945               set_t = 0;
6946 
6947               op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6948               avr_asm_len ("bld %0,%1", op, plen, 1);
6949             }
6950           else
6951             {
6952               if (clobber_val != (int) val8)
6953                 avr_asm_len ("ldi %2,%1", op, plen, 1);
6954               clobber_val = (int) val8;
6955 
6956               avr_asm_len ("and %0,%2", op, plen, 1);
6957             }
6958 
6959           continue; /* AND */
6960 
6961         case XOR:
6962 
6963           if (0 == pop8)
6964             continue;
6965           else if (8 == pop8)
6966             avr_asm_len ("com %0", op, plen, 1);
6967           else if (ld_reg_p && val8 == (1 << 7))
6968             avr_asm_len ("subi %0,%1", op, plen, 1);
6969           else
6970             {
6971               if (clobber_val != (int) val8)
6972                 avr_asm_len ("ldi %2,%1", op, plen, 1);
6973               clobber_val = (int) val8;
6974 
6975               avr_asm_len ("eor %0,%2", op, plen, 1);
6976             }
6977 
6978           continue; /* XOR */
6979 
6980         default:
6981           /* Unknown rtx_code */
6982           gcc_unreachable();
6983         }
6984     } /* for all sub-bytes */
6985 
6986   return "";
6987 }
6988 
6989 
6990 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6991    PLEN != NULL: Set *PLEN to the length of that sequence.
6992    Return "".  */
6993 
6994 const char*
6995 avr_out_addto_sp (rtx *op, int *plen)
6996 {
6997   int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6998   int addend = INTVAL (op[0]);
6999 
7000   if (plen)
7001     *plen = 0;
7002 
7003   if (addend < 0)
7004     {
7005       if (flag_verbose_asm || flag_print_asm_name)
7006         avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
7007 
7008       while (addend <= -pc_len)
7009         {
7010           addend += pc_len;
7011           avr_asm_len ("rcall .", op, plen, 1);
7012         }
7013 
7014       while (addend++ < 0)
7015         avr_asm_len ("push __zero_reg__", op, plen, 1);
7016     }
7017   else if (addend > 0)
7018     {
7019       if (flag_verbose_asm || flag_print_asm_name)
7020         avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
7021 
7022       while (addend-- > 0)
7023         avr_asm_len ("pop __tmp_reg__", op, plen, 1);
7024     }
7025 
7026   return "";
7027 }
7028 
7029 
7030 /* Outputs instructions needed for fixed point type conversion.
7031    This includes converting between any fixed point type, as well
7032    as converting to any integer type.  Conversion between integer
7033    types is not supported.
7034 
7035    Converting signed fractional types requires a bit shift if converting
7036    to or from any unsigned fractional type because the decimal place is
7037    shifted by 1 bit.  When the destination is a signed fractional, the sign
7038    is stored in either the carry or T bit.  */
7039 
7040 const char*
7041 avr_out_fract (rtx insn, rtx operands[], bool intsigned, int *plen)
7042 {
7043   size_t i;
7044   rtx xop[6];
7045   RTX_CODE shift = UNKNOWN;
7046   bool sign_in_carry = false;
7047   bool msb_in_carry = false;
7048   bool lsb_in_carry = false;
7049   const char *code_ashift = "lsl %0";
7050 
7051 
7052 #define MAY_CLOBBER(RR)                                                 \
7053   /* Shorthand used below.  */                                          \
7054   ((sign_bytes                                                          \
7055     && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb))  \
7056    || (reg_unused_after (insn, all_regs_rtx[RR])                        \
7057        && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
7058 
7059   struct
7060   {
7061     /* bytes       : Length of operand in bytes.
7062        ibyte       : Length of integral part in bytes.
7063        fbyte, fbit : Length of fractional part in bytes, bits.  */
7064 
7065     bool sbit;
7066     unsigned fbit, bytes, ibyte, fbyte;
7067     unsigned regno, regno_msb;
7068   } dest, src, *val[2] = { &dest, &src };
7069 
7070   if (plen)
7071     *plen = 0;
7072 
7073   /* Step 0:  Determine information on source and destination operand we
7074      ======   will need in the remainder.  */
7075 
7076   for (i = 0; i < sizeof (val) / sizeof (*val); i++)
7077     {
7078       enum machine_mode mode;
7079 
7080       xop[i] = operands[i];
7081 
7082       mode = GET_MODE (xop[i]);
7083 
7084       val[i]->bytes = GET_MODE_SIZE (mode);
7085       val[i]->regno = REGNO (xop[i]);
7086       val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
7087 
7088       if (SCALAR_INT_MODE_P (mode))
7089         {
7090           val[i]->sbit = intsigned;
7091           val[i]->fbit = 0;
7092         }
7093       else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
7094         {
7095           val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
7096           val[i]->fbit = GET_MODE_FBIT (mode);
7097         }
7098       else
7099         fatal_insn ("unsupported fixed-point conversion", insn);
7100 
7101       val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
7102       val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
7103     }
7104 
7105   // Byte offset of the decimal point taking into account different place
7106   // of the decimal point in input and output and different register numbers
7107   // of input and output.
7108   int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
7109 
7110   // Number of destination bytes that will come from sign / zero extension.
7111   int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
7112 
7113   // Number of bytes at the low end to be filled with zeros.
7114   int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
7115 
7116   // Do we have a 16-Bit register that is cleared?
7117   rtx clrw = NULL_RTX;
7118 
7119   bool sign_extend = src.sbit && sign_bytes;
7120 
7121   if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
7122     shift = ASHIFT;
7123   else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
7124     shift = ASHIFTRT;
7125   else if (dest.fbit % 8 == src.fbit % 8)
7126     shift = UNKNOWN;
7127   else
7128     gcc_unreachable();
7129 
7130   /* Step 1:  Clear bytes at the low end and copy payload bits from source
7131      ======   to destination.  */
7132 
7133   int step = offset < 0 ? 1 : -1;
7134   unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
7135 
7136   // We leared at least that number of registers.
7137   int clr_n = 0;
7138 
7139   for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
7140     {
7141       // Next regno of destination is needed for MOVW
7142       unsigned d1 = d0 + step;
7143 
7144       // Current and next regno of source
7145       signed s0 = d0 - offset;
7146       signed s1 = s0 + step;
7147 
7148       // Must current resp. next regno be CLRed?  This applies to the low
7149       // bytes of the destination that have no associated source bytes.
7150       bool clr0 = s0 < (signed) src.regno;
7151       bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
7152 
7153       // First gather what code to emit (if any) and additional step to
7154       // apply if a MOVW is in use.  xop[2] is destination rtx and xop[3]
7155       // is the source rtx for the current loop iteration.
7156       const char *code = NULL;
7157       int stepw = 0;
7158 
7159       if (clr0)
7160         {
7161           if (AVR_HAVE_MOVW && clr1 && clrw)
7162             {
7163               xop[2] = all_regs_rtx[d0 & ~1];
7164               xop[3] = clrw;
7165               code = "movw %2,%3";
7166               stepw = step;
7167             }
7168           else
7169             {
7170               xop[2] = all_regs_rtx[d0];
7171               code = "clr %2";
7172 
7173               if (++clr_n >= 2
7174                   && !clrw
7175                   && d0 % 2 == (step > 0))
7176                 {
7177                   clrw = all_regs_rtx[d0 & ~1];
7178                 }
7179             }
7180         }
7181       else if (offset && s0 <= (signed) src.regno_msb)
7182         {
7183           int movw = AVR_HAVE_MOVW && offset % 2 == 0
7184             && d0 % 2 == (offset > 0)
7185             && d1 <= dest.regno_msb && d1 >= dest.regno
7186             && s1 <= (signed) src.regno_msb  && s1 >= (signed) src.regno;
7187 
7188           xop[2] = all_regs_rtx[d0 & ~movw];
7189           xop[3] = all_regs_rtx[s0 & ~movw];
7190           code = movw ? "movw %2,%3" : "mov %2,%3";
7191           stepw = step * movw;
7192         }
7193 
7194       if (code)
7195         {
7196           if (sign_extend && shift != ASHIFT && !sign_in_carry
7197               && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
7198             {
7199               /* We are going to override the sign bit.  If we sign-extend,
7200                  store the sign in the Carry flag.  This is not needed if
7201                  the destination will be ASHIFT is the remainder because
7202                  the ASHIFT will set Carry without extra instruction.  */
7203 
7204               avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
7205               sign_in_carry = true;
7206             }
7207 
7208           unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
7209 
7210           if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7211               && src.ibyte > dest.ibyte
7212               && (d0 == src_msb || d0 + stepw == src_msb))
7213             {
7214               /* We are going to override the MSB.  If we shift right,
7215                  store the MSB in the Carry flag.  This is only needed if
7216                  we don't sign-extend becaue with sign-extension the MSB
7217                  (the sign) will be produced by the sign extension.  */
7218 
7219               avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
7220               msb_in_carry = true;
7221             }
7222 
7223           unsigned src_lsb = dest.regno - offset -1;
7224 
7225           if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
7226               && (d0 == src_lsb || d0 + stepw == src_lsb))
7227             {
7228               /* We are going to override the new LSB; store it into carry.  */
7229 
7230               avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
7231               code_ashift = "rol %0";
7232               lsb_in_carry = true;
7233             }
7234 
7235           avr_asm_len (code, xop, plen, 1);
7236           d0 += stepw;
7237         }
7238     }
7239 
7240   /* Step 2:  Shift destination left by 1 bit position.  This might be needed
7241      ======   for signed input and unsigned output.  */
7242 
7243   if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
7244     {
7245       unsigned s0 = dest.regno - offset -1;
7246 
7247       if (MAY_CLOBBER (s0))
7248         avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7249       else
7250         avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7251                      "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7252 
7253       code_ashift = "rol %0";
7254       lsb_in_carry = true;
7255     }
7256 
7257   if (shift == ASHIFT)
7258     {
7259       for (d0 = dest.regno + zero_bytes;
7260            d0 <= dest.regno_msb - sign_bytes; d0++)
7261         {
7262           avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
7263           code_ashift = "rol %0";
7264         }
7265 
7266       lsb_in_carry = false;
7267       sign_in_carry = true;
7268     }
7269 
7270   /* Step 4a:  Store MSB in carry if we don't already have it or will produce
7271      =======   it in sign-extension below.  */
7272 
7273   if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
7274       && src.ibyte > dest.ibyte)
7275     {
7276       unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
7277 
7278       if (MAY_CLOBBER (s0))
7279         avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
7280       else
7281         avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7282                      "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7283 
7284       msb_in_carry = true;
7285     }
7286 
7287   /* Step 3:  Sign-extend or zero-extend the destination as needed.
7288      ======   */
7289 
7290   if (sign_extend && !sign_in_carry)
7291     {
7292       unsigned s0 = src.regno_msb;
7293 
7294       if (MAY_CLOBBER (s0))
7295         avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
7296       else
7297         avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
7298                      "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
7299 
7300       sign_in_carry = true;
7301   }
7302 
7303   gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
7304 
7305   unsigned copies = 0;
7306   rtx movw = sign_extend ? NULL_RTX : clrw;
7307 
7308   for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
7309     {
7310       if (AVR_HAVE_MOVW && movw
7311           && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
7312         {
7313           xop[2] = all_regs_rtx[d0];
7314           xop[3] = movw;
7315           avr_asm_len ("movw %2,%3", xop, plen, 1);
7316           d0++;
7317         }
7318       else
7319         {
7320           avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
7321                        &all_regs_rtx[d0], plen, 1);
7322 
7323           if (++copies >= 2 && !movw && d0 % 2 == 1)
7324             movw = all_regs_rtx[d0-1];
7325         }
7326     } /* for */
7327 
7328 
7329   /* Step 4:  Right shift the destination.  This might be needed for
7330      ======   conversions from unsigned to signed.  */
7331 
7332   if (shift == ASHIFTRT)
7333     {
7334       const char *code_ashiftrt = "lsr %0";
7335 
7336       if (sign_extend || msb_in_carry)
7337         code_ashiftrt = "ror %0";
7338 
7339       if (src.sbit && src.ibyte == dest.ibyte)
7340         code_ashiftrt = "asr %0";
7341 
7342       for (d0 = dest.regno_msb - sign_bytes;
7343            d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
7344         {
7345           avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
7346           code_ashiftrt = "ror %0";
7347         }
7348     }
7349 
7350 #undef MAY_CLOBBER
7351 
7352   return "";
7353 }
7354 
7355 
7356 /* Output fixed-point rounding.  XOP[0] = XOP[1] is the operand to round.
7357    XOP[2] is the rounding point, a CONST_INT.  The function prints the
7358    instruction sequence if PLEN = NULL and computes the length in words
7359    of the sequence if PLEN != NULL.  Most of this function deals with
7360    preparing operands for calls to `avr_out_plus' and `avr_out_bitop'.  */
7361 
7362 const char*
7363 avr_out_round (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
7364 {
7365   enum machine_mode mode = GET_MODE (xop[0]);
7366   enum machine_mode imode = int_mode_for_mode (mode);
7367   // The smallest fractional bit not cleared by the rounding is 2^(-RP).
7368   int fbit = (int) GET_MODE_FBIT (mode);
7369   double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
7370   // Lengths of PLUS and AND parts.
7371   int len_add = 0, *plen_add = plen ? &len_add : NULL;
7372   int len_and = 0, *plen_and = plen ? &len_and : NULL;
7373 
7374   // Add-Saturate  1/2 * 2^(-RP).  Don't print the label "0:" when printing
7375   // the saturated addition so that we can emit the "rjmp 1f" before the
7376   // "0:" below.
7377 
7378   rtx xadd = const_fixed_from_double_int (i_add, mode);
7379   rtx xpattern, xsrc, op[4];
7380 
7381   xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
7382     ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
7383     : gen_rtx_US_PLUS (mode, xop[1], xadd);
7384   xpattern = gen_rtx_SET (VOIDmode, xop[0], xsrc);
7385 
7386   op[0] = xop[0];
7387   op[1] = xop[1];
7388   op[2] = xadd;
7389   avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
7390 
7391   avr_asm_len ("rjmp 1f" CR_TAB
7392                "0:", NULL, plen_add, 1);
7393 
7394   // Keep  all bits from RP and higher:   ... 2^(-RP)
7395   // Clear all bits from RP+1 and lower:              2^(-RP-1) ...
7396   // Rounding point                           ^^^^^^^
7397   // Added above                                      ^^^^^^^^^
7398   rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
7399   rtx xmask = immed_double_int_const (-i_add - i_add, imode);
7400 
7401   xpattern = gen_rtx_SET (VOIDmode, xreg, gen_rtx_AND (imode, xreg, xmask));
7402 
7403   op[0] = xreg;
7404   op[1] = xreg;
7405   op[2] = xmask;
7406   op[3] = gen_rtx_SCRATCH (QImode);
7407   avr_out_bitop (xpattern, op, plen_and);
7408   avr_asm_len ("1:", NULL, plen, 0);
7409 
7410   if (plen)
7411     *plen = len_add + len_and;
7412 
7413   return "";
7414 }
7415 
7416 
7417 /* Create RTL split patterns for byte sized rotate expressions.  This
7418   produces a series of move instructions and considers overlap situations.
7419   Overlapping non-HImode operands need a scratch register.  */
7420 
7421 bool
7422 avr_rotate_bytes (rtx operands[])
7423 {
7424     int i, j;
7425     enum machine_mode mode = GET_MODE (operands[0]);
7426     bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
7427     bool same_reg = rtx_equal_p (operands[0], operands[1]);
7428     int num = INTVAL (operands[2]);
7429     rtx scratch = operands[3];
7430     /* Work out if byte or word move is needed.  Odd byte rotates need QImode.
7431        Word move if no scratch is needed, otherwise use size of scratch.  */
7432     enum machine_mode move_mode = QImode;
7433     int move_size, offset, size;
7434 
7435     if (num & 0xf)
7436       move_mode = QImode;
7437     else if ((mode == SImode && !same_reg) || !overlapped)
7438       move_mode = HImode;
7439     else
7440       move_mode = GET_MODE (scratch);
7441 
7442     /* Force DI rotate to use QI moves since other DI moves are currently split
7443        into QI moves so forward propagation works better.  */
7444     if (mode == DImode)
7445       move_mode = QImode;
7446     /* Make scratch smaller if needed.  */
7447     if (SCRATCH != GET_CODE (scratch)
7448         && HImode == GET_MODE (scratch)
7449         && QImode == move_mode)
7450       scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
7451 
7452     move_size = GET_MODE_SIZE (move_mode);
7453     /* Number of bytes/words to rotate.  */
7454     offset = (num  >> 3) / move_size;
7455     /* Number of moves needed.  */
7456     size = GET_MODE_SIZE (mode) / move_size;
7457     /* Himode byte swap is special case to avoid a scratch register.  */
7458     if (mode == HImode && same_reg)
7459       {
7460 	/* HImode byte swap, using xor.  This is as quick as using scratch.  */
7461 	rtx src, dst;
7462 	src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
7463 	dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
7464 	if (!rtx_equal_p (dst, src))
7465 	  {
7466 	     emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7467 	     emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
7468 	     emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
7469 	  }
7470       }
7471     else
7472       {
7473 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode)  */
7474 	/* Create linked list of moves to determine move order.  */
7475 	struct {
7476 	  rtx src, dst;
7477 	  int links;
7478 	} move[MAX_SIZE + 8];
7479 	int blocked, moves;
7480 
7481 	gcc_assert (size <= MAX_SIZE);
7482 	/* Generate list of subreg moves.  */
7483 	for (i = 0; i < size; i++)
7484 	  {
7485 	    int from = i;
7486 	    int to = (from + offset) % size;
7487 	    move[i].src = simplify_gen_subreg (move_mode, operands[1],
7488 						mode, from * move_size);
7489 	    move[i].dst = simplify_gen_subreg (move_mode, operands[0],
7490 						mode, to   * move_size);
7491 	    move[i].links = -1;
7492 	   }
7493 	/* Mark dependence where a dst of one move is the src of another move.
7494 	   The first move is a conflict as it must wait until second is
7495 	   performed.  We ignore moves to self - we catch this later.  */
7496 	if (overlapped)
7497 	  for (i = 0; i < size; i++)
7498 	    if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
7499 	      for (j = 0; j < size; j++)
7500 		if (j != i && rtx_equal_p (move[j].src, move[i].dst))
7501 		  {
7502 		    /* The dst of move i is the src of move j.  */
7503 		    move[i].links = j;
7504 		    break;
7505 		  }
7506 
7507 	blocked = -1;
7508 	moves = 0;
7509 	/* Go through move list and perform non-conflicting moves.  As each
7510 	   non-overlapping move is made, it may remove other conflicts
7511 	   so the process is repeated until no conflicts remain.  */
7512 	do
7513 	  {
7514 	    blocked = -1;
7515 	    moves = 0;
7516 	    /* Emit move where dst is not also a src or we have used that
7517 	       src already.  */
7518 	    for (i = 0; i < size; i++)
7519 	      if (move[i].src != NULL_RTX)
7520 		{
7521 		  if (move[i].links == -1
7522 		      || move[move[i].links].src == NULL_RTX)
7523 		    {
7524 		      moves++;
7525 		      /* Ignore NOP moves to self.  */
7526 		      if (!rtx_equal_p (move[i].dst, move[i].src))
7527 			emit_move_insn (move[i].dst, move[i].src);
7528 
7529 		      /* Remove  conflict from list.  */
7530 		      move[i].src = NULL_RTX;
7531 		    }
7532 		  else
7533 		    blocked = i;
7534 		}
7535 
7536 	    /* Check for deadlock. This is when no moves occurred and we have
7537 	       at least one blocked move.  */
7538 	    if (moves == 0 && blocked != -1)
7539 	      {
7540 		/* Need to use scratch register to break deadlock.
7541 		   Add move to put dst of blocked move into scratch.
7542 		   When this move occurs, it will break chain deadlock.
7543 		   The scratch register is substituted for real move.  */
7544 
7545 		gcc_assert (SCRATCH != GET_CODE (scratch));
7546 
7547 		move[size].src = move[blocked].dst;
7548 		move[size].dst =  scratch;
7549 		/* Scratch move is never blocked.  */
7550 		move[size].links = -1;
7551 		/* Make sure we have valid link.  */
7552 		gcc_assert (move[blocked].links != -1);
7553 		/* Replace src of  blocking move with scratch reg.  */
7554 		move[move[blocked].links].src = scratch;
7555 		/* Make dependent on scratch move occuring.  */
7556 		move[blocked].links = size;
7557 		size=size+1;
7558 	      }
7559 	  }
7560 	while (blocked != -1);
7561       }
7562     return true;
7563 }
7564 
7565 
7566 /* Worker function for `ADJUST_INSN_LENGTH'.  */
7567 /* Modifies the length assigned to instruction INSN
7568    LEN is the initially computed length of the insn.  */
7569 
7570 int
7571 avr_adjust_insn_length (rtx insn, int len)
7572 {
7573   rtx *op = recog_data.operand;
7574   enum attr_adjust_len adjust_len;
7575 
7576   /* Some complex insns don't need length adjustment and therefore
7577      the length need not/must not be adjusted for these insns.
7578      It is easier to state this in an insn attribute "adjust_len" than
7579      to clutter up code here...  */
7580 
7581   if (!NONDEBUG_INSN_P (insn)
7582       || -1 == recog_memoized (insn))
7583     {
7584       return len;
7585     }
7586 
7587   /* Read from insn attribute "adjust_len" if/how length is to be adjusted.  */
7588 
7589   adjust_len = get_attr_adjust_len (insn);
7590 
7591   if (adjust_len == ADJUST_LEN_NO)
7592     {
7593       /* Nothing to adjust: The length from attribute "length" is fine.
7594          This is the default.  */
7595 
7596       return len;
7597     }
7598 
7599   /* Extract insn's operands.  */
7600 
7601   extract_constrain_insn_cached (insn);
7602 
7603   /* Dispatch to right function.  */
7604 
7605   switch (adjust_len)
7606     {
7607     case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
7608     case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
7609     case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
7610 
7611     case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
7612 
7613     case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
7614     case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
7615 
7616     case ADJUST_LEN_MOV8:  output_movqi (insn, op, &len); break;
7617     case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
7618     case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
7619     case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
7620     case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
7621     case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
7622     case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
7623 
7624     case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
7625     case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
7626     case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
7627 
7628     case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
7629     case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
7630     case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
7631     case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
7632     case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
7633 
7634     case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
7635     case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
7636     case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
7637 
7638     case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
7639     case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
7640     case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
7641 
7642     case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
7643     case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
7644     case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
7645 
7646     case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
7647     case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
7648     case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
7649 
7650     case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
7651 
7652     case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
7653 
7654     default:
7655       gcc_unreachable();
7656     }
7657 
7658   return len;
7659 }
7660 
7661 /* Return nonzero if register REG dead after INSN.  */
7662 
7663 int
7664 reg_unused_after (rtx insn, rtx reg)
7665 {
7666   return (dead_or_set_p (insn, reg)
7667 	  || (REG_P(reg) && _reg_unused_after (insn, reg)));
7668 }
7669 
7670 /* Return nonzero if REG is not used after INSN.
7671    We assume REG is a reload reg, and therefore does
7672    not live past labels.  It may live past calls or jumps though.  */
7673 
7674 int
7675 _reg_unused_after (rtx insn, rtx reg)
7676 {
7677   enum rtx_code code;
7678   rtx set;
7679 
7680   /* If the reg is set by this instruction, then it is safe for our
7681      case.  Disregard the case where this is a store to memory, since
7682      we are checking a register used in the store address.  */
7683   set = single_set (insn);
7684   if (set && GET_CODE (SET_DEST (set)) != MEM
7685       && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7686     return 1;
7687 
7688   while ((insn = NEXT_INSN (insn)))
7689     {
7690       rtx set;
7691       code = GET_CODE (insn);
7692 
7693 #if 0
7694       /* If this is a label that existed before reload, then the register
7695 	 if dead here.  However, if this is a label added by reorg, then
7696 	 the register may still be live here.  We can't tell the difference,
7697 	 so we just ignore labels completely.  */
7698       if (code == CODE_LABEL)
7699 	return 1;
7700       /* else */
7701 #endif
7702 
7703       if (!INSN_P (insn))
7704 	continue;
7705 
7706       if (code == JUMP_INSN)
7707 	return 0;
7708 
7709       /* If this is a sequence, we must handle them all at once.
7710 	 We could have for instance a call that sets the target register,
7711 	 and an insn in a delay slot that uses the register.  In this case,
7712 	 we must return 0.  */
7713       else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
7714 	{
7715 	  int i;
7716 	  int retval = 0;
7717 
7718 	  for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7719 	    {
7720 	      rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
7721 	      rtx set = single_set (this_insn);
7722 
7723 	      if (GET_CODE (this_insn) == CALL_INSN)
7724 		code = CALL_INSN;
7725 	      else if (GET_CODE (this_insn) == JUMP_INSN)
7726 		{
7727 		  if (INSN_ANNULLED_BRANCH_P (this_insn))
7728 		    return 0;
7729 		  code = JUMP_INSN;
7730 		}
7731 
7732 	      if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7733 		return 0;
7734 	      if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7735 		{
7736 		  if (GET_CODE (SET_DEST (set)) != MEM)
7737 		    retval = 1;
7738 		  else
7739 		    return 0;
7740 		}
7741 	      if (set == 0
7742 		  && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
7743 		return 0;
7744 	    }
7745 	  if (retval == 1)
7746 	    return 1;
7747 	  else if (code == JUMP_INSN)
7748 	    return 0;
7749 	}
7750 
7751       if (code == CALL_INSN)
7752 	{
7753 	  rtx tem;
7754 	  for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
7755 	    if (GET_CODE (XEXP (tem, 0)) == USE
7756 		&& REG_P (XEXP (XEXP (tem, 0), 0))
7757 		&& reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
7758 	      return 0;
7759 	  if (call_used_regs[REGNO (reg)])
7760 	    return 1;
7761 	}
7762 
7763       set = single_set (insn);
7764 
7765       if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
7766 	return 0;
7767       if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
7768 	return GET_CODE (SET_DEST (set)) != MEM;
7769       if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
7770 	return 0;
7771     }
7772   return 1;
7773 }
7774 
7775 
7776 /* Implement `TARGET_ASM_INTEGER'.  */
7777 /* Target hook for assembling integer objects.  The AVR version needs
7778    special handling for references to certain labels.  */
7779 
7780 static bool
7781 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
7782 {
7783   if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
7784       && text_segment_operand (x, VOIDmode))
7785     {
7786       fputs ("\t.word\tgs(", asm_out_file);
7787       output_addr_const (asm_out_file, x);
7788       fputs (")\n", asm_out_file);
7789 
7790       return true;
7791     }
7792   else if (GET_MODE (x) == PSImode)
7793     {
7794       /* This needs binutils 2.23+, see PR binutils/13503  */
7795 
7796       fputs ("\t.byte\tlo8(", asm_out_file);
7797       output_addr_const (asm_out_file, x);
7798       fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7799 
7800       fputs ("\t.byte\thi8(", asm_out_file);
7801       output_addr_const (asm_out_file, x);
7802       fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7803 
7804       fputs ("\t.byte\thh8(", asm_out_file);
7805       output_addr_const (asm_out_file, x);
7806       fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
7807 
7808       return true;
7809     }
7810   else if (CONST_FIXED_P (x))
7811     {
7812       unsigned n;
7813 
7814       /* varasm fails to handle big fixed modes that don't fit in hwi.  */
7815 
7816       for (n = 0; n < size; n++)
7817         {
7818           rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
7819           default_assemble_integer (xn, 1, aligned_p);
7820         }
7821 
7822       return true;
7823     }
7824 
7825   return default_assemble_integer (x, size, aligned_p);
7826 }
7827 
7828 
7829 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'.  */
7830 /* Return value is nonzero if pseudos that have been
7831    assigned to registers of class CLASS would likely be spilled
7832    because registers of CLASS are needed for spill registers.  */
7833 
7834 static bool
7835 avr_class_likely_spilled_p (reg_class_t c)
7836 {
7837   return (c != ALL_REGS && c != ADDW_REGS);
7838 }
7839 
7840 
7841 /* Valid attributes:
7842    progmem   -  Put data to program memory.
7843    signal    -  Make a function to be hardware interrupt.
7844                 After function prologue interrupts remain disabled.
7845    interrupt -  Make a function to be hardware interrupt. Before function
7846                 prologue interrupts are enabled by means of SEI.
7847    naked     -  Don't generate function prologue/epilogue and RET
7848                 instruction.  */
7849 
7850 /* Handle a "progmem" attribute; arguments as in
7851    struct attribute_spec.handler.  */
7852 
7853 static tree
7854 avr_handle_progmem_attribute (tree *node, tree name,
7855 			      tree args ATTRIBUTE_UNUSED,
7856 			      int flags ATTRIBUTE_UNUSED,
7857 			      bool *no_add_attrs)
7858 {
7859   if (DECL_P (*node))
7860     {
7861       if (TREE_CODE (*node) == TYPE_DECL)
7862 	{
7863 	  /* This is really a decl attribute, not a type attribute,
7864 	     but try to handle it for GCC 3.0 backwards compatibility.  */
7865 
7866 	  tree type = TREE_TYPE (*node);
7867 	  tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
7868 	  tree newtype = build_type_attribute_variant (type, attr);
7869 
7870 	  TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
7871 	  TREE_TYPE (*node) = newtype;
7872 	  *no_add_attrs = true;
7873 	}
7874       else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
7875 	{
7876           *no_add_attrs = false;
7877 	}
7878       else
7879 	{
7880 	  warning (OPT_Wattributes, "%qE attribute ignored",
7881 		   name);
7882 	  *no_add_attrs = true;
7883 	}
7884     }
7885 
7886   return NULL_TREE;
7887 }
7888 
7889 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
7890    struct attribute_spec.handler.  */
7891 
7892 static tree
7893 avr_handle_fndecl_attribute (tree *node, tree name,
7894 			     tree args ATTRIBUTE_UNUSED,
7895 			     int flags ATTRIBUTE_UNUSED,
7896 			     bool *no_add_attrs)
7897 {
7898   if (TREE_CODE (*node) != FUNCTION_DECL)
7899     {
7900       warning (OPT_Wattributes, "%qE attribute only applies to functions",
7901 	       name);
7902       *no_add_attrs = true;
7903     }
7904 
7905   return NULL_TREE;
7906 }
7907 
7908 static tree
7909 avr_handle_fntype_attribute (tree *node, tree name,
7910                              tree args ATTRIBUTE_UNUSED,
7911                              int flags ATTRIBUTE_UNUSED,
7912                              bool *no_add_attrs)
7913 {
7914   if (TREE_CODE (*node) != FUNCTION_TYPE)
7915     {
7916       warning (OPT_Wattributes, "%qE attribute only applies to functions",
7917 	       name);
7918       *no_add_attrs = true;
7919     }
7920 
7921   return NULL_TREE;
7922 }
7923 
7924 
7925 /* AVR attributes.  */
7926 static const struct attribute_spec
7927 avr_attribute_table[] =
7928 {
7929   /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7930        affects_type_identity } */
7931   { "progmem",   0, 0, false, false, false,  avr_handle_progmem_attribute,
7932     false },
7933   { "signal",    0, 0, true,  false, false,  avr_handle_fndecl_attribute,
7934     false },
7935   { "interrupt", 0, 0, true,  false, false,  avr_handle_fndecl_attribute,
7936     false },
7937   { "naked",     0, 0, false, true,  true,   avr_handle_fntype_attribute,
7938     false },
7939   { "OS_task",   0, 0, false, true,  true,   avr_handle_fntype_attribute,
7940     false },
7941   { "OS_main",   0, 0, false, true,  true,   avr_handle_fntype_attribute,
7942     false },
7943   { NULL,        0, 0, false, false, false, NULL, false }
7944 };
7945 
7946 
7947 /* Look if DECL shall be placed in program memory space by
7948    means of attribute `progmem' or some address-space qualifier.
7949    Return non-zero if DECL is data that must end up in Flash and
7950    zero if the data lives in RAM (.bss, .data, .rodata, ...).
7951 
7952    Return 2   if DECL is located in 24-bit flash address-space
7953    Return 1   if DECL is located in 16-bit flash address-space
7954    Return -1  if attribute `progmem' occurs in DECL or ATTRIBUTES
7955    Return 0   otherwise  */
7956 
7957 int
7958 avr_progmem_p (tree decl, tree attributes)
7959 {
7960   tree a;
7961 
7962   if (TREE_CODE (decl) != VAR_DECL)
7963     return 0;
7964 
7965   if (avr_decl_memx_p (decl))
7966     return 2;
7967 
7968   if (avr_decl_flash_p (decl))
7969     return 1;
7970 
7971   if (NULL_TREE
7972       != lookup_attribute ("progmem", attributes))
7973     return -1;
7974 
7975   a = decl;
7976 
7977   do
7978     a = TREE_TYPE(a);
7979   while (TREE_CODE (a) == ARRAY_TYPE);
7980 
7981   if (a == error_mark_node)
7982     return 0;
7983 
7984   if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
7985     return -1;
7986 
7987   return 0;
7988 }
7989 
7990 
7991 /* Scan type TYP for pointer references to address space ASn.
7992    Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
7993    the AS are also declared to be CONST.
7994    Otherwise, return the respective address space, i.e. a value != 0.  */
7995 
7996 static addr_space_t
7997 avr_nonconst_pointer_addrspace (tree typ)
7998 {
7999   while (ARRAY_TYPE == TREE_CODE (typ))
8000     typ = TREE_TYPE (typ);
8001 
8002   if (POINTER_TYPE_P (typ))
8003     {
8004       addr_space_t as;
8005       tree target = TREE_TYPE (typ);
8006 
8007       /* Pointer to function: Test the function's return type.  */
8008 
8009       if (FUNCTION_TYPE == TREE_CODE (target))
8010         return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
8011 
8012       /* "Ordinary" pointers... */
8013 
8014       while (TREE_CODE (target) == ARRAY_TYPE)
8015         target = TREE_TYPE (target);
8016 
8017       /* Pointers to non-generic address space must be const.
8018          Refuse address spaces outside the device's flash.  */
8019 
8020       as = TYPE_ADDR_SPACE (target);
8021 
8022       if (!ADDR_SPACE_GENERIC_P (as)
8023           && (!TYPE_READONLY (target)
8024               || avr_addrspace[as].segment >= avr_current_device->n_flash))
8025         {
8026           return as;
8027         }
8028 
8029       /* Scan pointer's target type.  */
8030 
8031       return avr_nonconst_pointer_addrspace (target);
8032     }
8033 
8034   return ADDR_SPACE_GENERIC;
8035 }
8036 
8037 
8038 /* Sanity check NODE so that all pointers targeting non-generic address spaces
8039    go along with CONST qualifier.  Writing to these address spaces should
8040    be detected and complained about as early as possible.  */
8041 
8042 static bool
8043 avr_pgm_check_var_decl (tree node)
8044 {
8045   const char *reason = NULL;
8046 
8047   addr_space_t as = ADDR_SPACE_GENERIC;
8048 
8049   gcc_assert (as == 0);
8050 
8051   if (avr_log.progmem)
8052     avr_edump ("%?: %t\n", node);
8053 
8054   switch (TREE_CODE (node))
8055     {
8056     default:
8057       break;
8058 
8059     case VAR_DECL:
8060       if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8061         reason = "variable";
8062       break;
8063 
8064     case PARM_DECL:
8065       if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8066         reason = "function parameter";
8067       break;
8068 
8069     case FIELD_DECL:
8070       if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
8071         reason = "structure field";
8072       break;
8073 
8074     case FUNCTION_DECL:
8075       if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
8076           as)
8077         reason = "return type of function";
8078       break;
8079 
8080     case POINTER_TYPE:
8081       if (as = avr_nonconst_pointer_addrspace (node), as)
8082         reason = "pointer";
8083       break;
8084     }
8085 
8086   if (reason)
8087     {
8088       if (avr_addrspace[as].segment >= avr_current_device->n_flash)
8089         {
8090           if (TYPE_P (node))
8091             error ("%qT uses address space %qs beyond flash of %qs",
8092                    node, avr_addrspace[as].name, avr_current_device->name);
8093           else
8094             error ("%s %q+D uses address space %qs beyond flash of %qs",
8095                    reason, node, avr_addrspace[as].name,
8096                    avr_current_device->name);
8097         }
8098       else
8099         {
8100           if (TYPE_P (node))
8101             error ("pointer targeting address space %qs must be const in %qT",
8102                    avr_addrspace[as].name, node);
8103           else
8104             error ("pointer targeting address space %qs must be const"
8105                    " in %s %q+D",
8106                    avr_addrspace[as].name, reason, node);
8107         }
8108     }
8109 
8110   return reason == NULL;
8111 }
8112 
8113 
8114 /* Add the section attribute if the variable is in progmem.  */
8115 
8116 static void
8117 avr_insert_attributes (tree node, tree *attributes)
8118 {
8119   avr_pgm_check_var_decl (node);
8120 
8121   if (TREE_CODE (node) == VAR_DECL
8122       && (TREE_STATIC (node) || DECL_EXTERNAL (node))
8123       && avr_progmem_p (node, *attributes))
8124     {
8125       addr_space_t as;
8126       tree node0 = node;
8127 
8128       /* For C++, we have to peel arrays in order to get correct
8129          determination of readonlyness.  */
8130 
8131       do
8132         node0 = TREE_TYPE (node0);
8133       while (TREE_CODE (node0) == ARRAY_TYPE);
8134 
8135       if (error_mark_node == node0)
8136         return;
8137 
8138       as = TYPE_ADDR_SPACE (TREE_TYPE (node));
8139 
8140       if (avr_addrspace[as].segment >= avr_current_device->n_flash)
8141         {
8142           error ("variable %q+D located in address space %qs"
8143                  " beyond flash of %qs",
8144                  node, avr_addrspace[as].name, avr_current_device->name);
8145         }
8146 
8147       if (!TYPE_READONLY (node0)
8148           && !TREE_READONLY (node))
8149         {
8150           const char *reason = "__attribute__((progmem))";
8151 
8152           if (!ADDR_SPACE_GENERIC_P (as))
8153             reason = avr_addrspace[as].name;
8154 
8155           if (avr_log.progmem)
8156             avr_edump ("\n%?: %t\n%t\n", node, node0);
8157 
8158           error ("variable %q+D must be const in order to be put into"
8159                  " read-only section by means of %qs", node, reason);
8160         }
8161     }
8162 }
8163 
8164 
8165 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'.  */
8166 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'.  */
8167 /* Track need of __do_clear_bss.  */
8168 
8169 void
8170 avr_asm_output_aligned_decl_common (FILE * stream,
8171                                     const_tree decl ATTRIBUTE_UNUSED,
8172                                     const char *name,
8173                                     unsigned HOST_WIDE_INT size,
8174                                     unsigned int align, bool local_p)
8175 {
8176   /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
8177      There is no need to trigger __do_clear_bss code for them.  */
8178 
8179   if (!STR_PREFIX_P (name, "__gnu_lto"))
8180     avr_need_clear_bss_p = true;
8181 
8182   if (local_p)
8183     ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
8184   else
8185     ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
8186 }
8187 
8188 
8189 /* Unnamed section callback for data_section
8190    to track need of __do_copy_data.  */
8191 
8192 static void
8193 avr_output_data_section_asm_op (const void *data)
8194 {
8195   avr_need_copy_data_p = true;
8196 
8197   /* Dispatch to default.  */
8198   output_section_asm_op (data);
8199 }
8200 
8201 
8202 /* Unnamed section callback for bss_section
8203    to track need of __do_clear_bss.  */
8204 
8205 static void
8206 avr_output_bss_section_asm_op (const void *data)
8207 {
8208   avr_need_clear_bss_p = true;
8209 
8210   /* Dispatch to default.  */
8211   output_section_asm_op (data);
8212 }
8213 
8214 
8215 /* Unnamed section callback for progmem*.data sections.  */
8216 
8217 static void
8218 avr_output_progmem_section_asm_op (const void *data)
8219 {
8220   fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
8221            (const char*) data);
8222 }
8223 
8224 
8225 /* Implement `TARGET_ASM_INIT_SECTIONS'.  */
8226 
8227 static void
8228 avr_asm_init_sections (void)
8229 {
8230   /* Set up a section for jump tables.  Alignment is handled by
8231      ASM_OUTPUT_BEFORE_CASE_LABEL.  */
8232 
8233   if (AVR_HAVE_JMP_CALL)
8234     {
8235       progmem_swtable_section
8236         = get_unnamed_section (0, output_section_asm_op,
8237                                "\t.section\t.progmem.gcc_sw_table"
8238                                ",\"a\",@progbits");
8239     }
8240   else
8241     {
8242       progmem_swtable_section
8243         = get_unnamed_section (SECTION_CODE, output_section_asm_op,
8244                                "\t.section\t.progmem.gcc_sw_table"
8245                                ",\"ax\",@progbits");
8246     }
8247 
8248   /* Override section callbacks to keep track of `avr_need_clear_bss_p'
8249      resp. `avr_need_copy_data_p'.  */
8250 
8251   readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
8252   data_section->unnamed.callback = avr_output_data_section_asm_op;
8253   bss_section->unnamed.callback = avr_output_bss_section_asm_op;
8254 }
8255 
8256 
8257 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'.  */
8258 
8259 static section*
8260 avr_asm_function_rodata_section (tree decl)
8261 {
8262   /* If a function is unused and optimized out by -ffunction-sections
8263      and --gc-sections, ensure that the same will happen for its jump
8264      tables by putting them into individual sections.  */
8265 
8266   unsigned int flags;
8267   section * frodata;
8268 
8269   /* Get the frodata section from the default function in varasm.c
8270      but treat function-associated data-like jump tables as code
8271      rather than as user defined data.  AVR has no constant pools.  */
8272   {
8273     int fdata = flag_data_sections;
8274 
8275     flag_data_sections = flag_function_sections;
8276     frodata = default_function_rodata_section (decl);
8277     flag_data_sections = fdata;
8278     flags = frodata->common.flags;
8279   }
8280 
8281   if (frodata != readonly_data_section
8282       && flags & SECTION_NAMED)
8283     {
8284       /* Adjust section flags and replace section name prefix.  */
8285 
8286       unsigned int i;
8287 
8288       static const char* const prefix[] =
8289         {
8290           ".rodata",          ".progmem.gcc_sw_table",
8291           ".gnu.linkonce.r.", ".gnu.linkonce.t."
8292         };
8293 
8294       for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
8295         {
8296           const char * old_prefix = prefix[i];
8297           const char * new_prefix = prefix[i+1];
8298           const char * name = frodata->named.name;
8299 
8300           if (STR_PREFIX_P (name, old_prefix))
8301             {
8302               const char *rname = ACONCAT ((new_prefix,
8303                                             name + strlen (old_prefix), NULL));
8304               flags &= ~SECTION_CODE;
8305               flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
8306 
8307               return get_section (rname, flags, frodata->named.decl);
8308             }
8309         }
8310     }
8311 
8312   return progmem_swtable_section;
8313 }
8314 
8315 
8316 /* Implement `TARGET_ASM_NAMED_SECTION'.  */
8317 /* Track need of __do_clear_bss, __do_copy_data for named sections.  */
8318 
8319 static void
8320 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
8321 {
8322   if (flags & AVR_SECTION_PROGMEM)
8323     {
8324       addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
8325       const char *old_prefix = ".rodata";
8326       const char *new_prefix = avr_addrspace[as].section_name;
8327 
8328       if (STR_PREFIX_P (name, old_prefix))
8329         {
8330           const char *sname = ACONCAT ((new_prefix,
8331                                         name + strlen (old_prefix), NULL));
8332           default_elf_asm_named_section (sname, flags, decl);
8333           return;
8334         }
8335 
8336       default_elf_asm_named_section (new_prefix, flags, decl);
8337       return;
8338     }
8339 
8340   if (!avr_need_copy_data_p)
8341     avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
8342                             || STR_PREFIX_P (name, ".rodata")
8343                             || STR_PREFIX_P (name, ".gnu.linkonce.d"));
8344 
8345   if (!avr_need_clear_bss_p)
8346     avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
8347 
8348   default_elf_asm_named_section (name, flags, decl);
8349 }
8350 
8351 
8352 /* Implement `TARGET_SECTION_TYPE_FLAGS'.  */
8353 
8354 static unsigned int
8355 avr_section_type_flags (tree decl, const char *name, int reloc)
8356 {
8357   unsigned int flags = default_section_type_flags (decl, name, reloc);
8358 
8359   if (STR_PREFIX_P (name, ".noinit"))
8360     {
8361       if (decl && TREE_CODE (decl) == VAR_DECL
8362 	  && DECL_INITIAL (decl) == NULL_TREE)
8363 	flags |= SECTION_BSS;  /* @nobits */
8364       else
8365 	warning (0, "only uninitialized variables can be placed in the "
8366 		 ".noinit section");
8367     }
8368 
8369   if (decl && DECL_P (decl)
8370       && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8371     {
8372       addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8373 
8374       /* Attribute progmem puts data in generic address space.
8375          Set section flags as if it was in __flash to get the right
8376          section prefix in the remainder.  */
8377 
8378       if (ADDR_SPACE_GENERIC_P (as))
8379         as = ADDR_SPACE_FLASH;
8380 
8381       flags |= as * SECTION_MACH_DEP;
8382       flags &= ~SECTION_WRITE;
8383       flags &= ~SECTION_BSS;
8384     }
8385 
8386   return flags;
8387 }
8388 
8389 
8390 /* Implement `TARGET_ENCODE_SECTION_INFO'.  */
8391 
8392 static void
8393 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
8394 {
8395   /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
8396      readily available, see PR34734.  So we postpone the warning
8397      about uninitialized data in program memory section until here.  */
8398 
8399   if (new_decl_p
8400       && decl && DECL_P (decl)
8401       && NULL_TREE == DECL_INITIAL (decl)
8402       && !DECL_EXTERNAL (decl)
8403       && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8404     {
8405       warning (OPT_Wuninitialized,
8406                "uninitialized variable %q+D put into "
8407                "program memory area", decl);
8408     }
8409 
8410   default_encode_section_info (decl, rtl, new_decl_p);
8411 
8412   if (decl && DECL_P (decl)
8413       && TREE_CODE (decl) != FUNCTION_DECL
8414       && MEM_P (rtl)
8415       && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
8416    {
8417       rtx sym = XEXP (rtl, 0);
8418       addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8419 
8420       /* PSTR strings are in generic space but located in flash:
8421          patch address space.  */
8422 
8423       if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8424         as = ADDR_SPACE_FLASH;
8425 
8426       AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
8427     }
8428 }
8429 
8430 
8431 /* Implement `TARGET_ASM_SELECT_SECTION' */
8432 
8433 static section *
8434 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
8435 {
8436   section * sect = default_elf_select_section (decl, reloc, align);
8437 
8438   if (decl && DECL_P (decl)
8439       && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
8440     {
8441       addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
8442 
8443       /* __progmem__ goes in generic space but shall be allocated to
8444          .progmem.data  */
8445 
8446       if (ADDR_SPACE_GENERIC_P (as))
8447         as = ADDR_SPACE_FLASH;
8448 
8449       if (sect->common.flags & SECTION_NAMED)
8450         {
8451           const char * name = sect->named.name;
8452           const char * old_prefix = ".rodata";
8453           const char * new_prefix = avr_addrspace[as].section_name;
8454 
8455           if (STR_PREFIX_P (name, old_prefix))
8456             {
8457               const char *sname = ACONCAT ((new_prefix,
8458                                             name + strlen (old_prefix), NULL));
8459               return get_section (sname, sect->common.flags, sect->named.decl);
8460             }
8461         }
8462 
8463       if (!progmem_section[as])
8464         {
8465           progmem_section[as]
8466             = get_unnamed_section (0, avr_output_progmem_section_asm_op,
8467                                    avr_addrspace[as].section_name);
8468         }
8469 
8470       return progmem_section[as];
8471     }
8472 
8473   return sect;
8474 }
8475 
8476 /* Implement `TARGET_ASM_FILE_START'.  */
8477 /* Outputs some text at the start of each assembler file.  */
8478 
8479 static void
8480 avr_file_start (void)
8481 {
8482   int sfr_offset = avr_current_arch->sfr_offset;
8483 
8484   if (avr_current_arch->asm_only)
8485     error ("MCU %qs supported for assembler only", avr_current_device->name);
8486 
8487   default_file_start ();
8488 
8489   /* Print I/O addresses of some SFRs used with IN and OUT.  */
8490 
8491   if (AVR_HAVE_SPH)
8492     fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
8493 
8494   fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
8495   fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
8496   if (AVR_HAVE_RAMPZ)
8497     fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
8498   if (AVR_HAVE_RAMPY)
8499     fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
8500   if (AVR_HAVE_RAMPX)
8501     fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
8502   if (AVR_HAVE_RAMPD)
8503     fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
8504   if (AVR_XMEGA)
8505     fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
8506   fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
8507   fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
8508 }
8509 
8510 
8511 /* Implement `TARGET_ASM_FILE_END'.  */
8512 /* Outputs to the stdio stream FILE some
8513    appropriate text to go at the end of an assembler file.  */
8514 
8515 static void
8516 avr_file_end (void)
8517 {
8518   /* Output these only if there is anything in the
8519      .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
8520      input section(s) - some code size can be saved by not
8521      linking in the initialization code from libgcc if resp.
8522      sections are empty, see PR18145.  */
8523 
8524   if (avr_need_copy_data_p)
8525     fputs (".global __do_copy_data\n", asm_out_file);
8526 
8527   if (avr_need_clear_bss_p)
8528     fputs (".global __do_clear_bss\n", asm_out_file);
8529 }
8530 
8531 
8532 /* Worker function for `ADJUST_REG_ALLOC_ORDER'.  */
8533 /* Choose the order in which to allocate hard registers for
8534    pseudo-registers local to a basic block.
8535 
8536    Store the desired register order in the array `reg_alloc_order'.
8537    Element 0 should be the register to allocate first; element 1, the
8538    next register; and so on.  */
8539 
8540 void
8541 avr_adjust_reg_alloc_order (void)
8542 {
8543   unsigned int i;
8544   static const int order_0[] =
8545     {
8546       24, 25,
8547       18, 19, 20, 21, 22, 23,
8548       30, 31,
8549       26, 27, 28, 29,
8550       17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8551       0, 1,
8552       32, 33, 34, 35
8553   };
8554   static const int order_1[] =
8555     {
8556       18, 19, 20, 21, 22, 23, 24, 25,
8557       30, 31,
8558       26, 27, 28, 29,
8559       17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8560       0, 1,
8561       32, 33, 34, 35
8562   };
8563   static const int order_2[] =
8564     {
8565       25, 24, 23, 22, 21, 20, 19, 18,
8566       30, 31,
8567       26, 27, 28, 29,
8568       17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
8569       1, 0,
8570       32, 33, 34, 35
8571   };
8572 
8573   const int *order = (TARGET_ORDER_1 ? order_1 :
8574 		      TARGET_ORDER_2 ? order_2 :
8575 		      order_0);
8576   for (i = 0; i < ARRAY_SIZE (order_0); ++i)
8577       reg_alloc_order[i] = order[i];
8578 }
8579 
8580 
8581 /* Implement `TARGET_REGISTER_MOVE_COST' */
8582 
8583 static int
8584 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
8585                         reg_class_t from, reg_class_t to)
8586 {
8587   return (from == STACK_REG ? 6
8588           : to == STACK_REG ? 12
8589           : 2);
8590 }
8591 
8592 
8593 /* Implement `TARGET_MEMORY_MOVE_COST' */
8594 
8595 static int
8596 avr_memory_move_cost (enum machine_mode mode,
8597                       reg_class_t rclass ATTRIBUTE_UNUSED,
8598                       bool in ATTRIBUTE_UNUSED)
8599 {
8600   return (mode == QImode ? 2
8601           : mode == HImode ? 4
8602           : mode == SImode ? 8
8603           : mode == SFmode ? 8
8604           : 16);
8605 }
8606 
8607 
8608 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
8609    cost of an RTX operand given its context.  X is the rtx of the
8610    operand, MODE is its mode, and OUTER is the rtx_code of this
8611    operand's parent operator.  */
8612 
8613 static int
8614 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
8615 		      int opno, bool speed)
8616 {
8617   enum rtx_code code = GET_CODE (x);
8618   int total;
8619 
8620   switch (code)
8621     {
8622     case REG:
8623     case SUBREG:
8624       return 0;
8625 
8626     case CONST_INT:
8627     case CONST_FIXED:
8628     case CONST_DOUBLE:
8629       return COSTS_N_INSNS (GET_MODE_SIZE (mode));
8630 
8631     default:
8632       break;
8633     }
8634 
8635   total = 0;
8636   avr_rtx_costs (x, code, outer, opno, &total, speed);
8637   return total;
8638 }
8639 
8640 /* Worker function for AVR backend's rtx_cost function.
8641    X is rtx expression whose cost is to be calculated.
8642    Return true if the complete cost has been computed.
8643    Return false if subexpressions should be scanned.
8644    In either case, *TOTAL contains the cost result.  */
8645 
8646 static bool
8647 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
8648                  int opno ATTRIBUTE_UNUSED, int *total, bool speed)
8649 {
8650   enum rtx_code code = (enum rtx_code) codearg;
8651   enum machine_mode mode = GET_MODE (x);
8652   HOST_WIDE_INT val;
8653 
8654   switch (code)
8655     {
8656     case CONST_INT:
8657     case CONST_FIXED:
8658     case CONST_DOUBLE:
8659     case SYMBOL_REF:
8660     case CONST:
8661     case LABEL_REF:
8662       /* Immediate constants are as cheap as registers.  */
8663       *total = 0;
8664       return true;
8665 
8666     case MEM:
8667       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8668       return true;
8669 
8670     case NEG:
8671       switch (mode)
8672 	{
8673 	case QImode:
8674 	case SFmode:
8675 	  *total = COSTS_N_INSNS (1);
8676 	  break;
8677 
8678         case HImode:
8679         case PSImode:
8680         case SImode:
8681           *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
8682           break;
8683 
8684 	default:
8685 	  return false;
8686 	}
8687       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8688       return true;
8689 
8690     case ABS:
8691       switch (mode)
8692 	{
8693 	case QImode:
8694 	case SFmode:
8695 	  *total = COSTS_N_INSNS (1);
8696 	  break;
8697 
8698 	default:
8699 	  return false;
8700 	}
8701       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8702       return true;
8703 
8704     case NOT:
8705       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8706       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8707       return true;
8708 
8709     case ZERO_EXTEND:
8710       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
8711 			      - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
8712       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8713       return true;
8714 
8715     case SIGN_EXTEND:
8716       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
8717 			      - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
8718       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8719       return true;
8720 
8721     case PLUS:
8722       switch (mode)
8723 	{
8724 	case QImode:
8725           if (AVR_HAVE_MUL
8726               && MULT == GET_CODE (XEXP (x, 0))
8727               && register_operand (XEXP (x, 1), QImode))
8728             {
8729               /* multiply-add */
8730               *total = COSTS_N_INSNS (speed ? 4 : 3);
8731               /* multiply-add with constant: will be split and load constant. */
8732               if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8733                 *total = COSTS_N_INSNS (1) + *total;
8734               return true;
8735             }
8736 	  *total = COSTS_N_INSNS (1);
8737 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8738 	    *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8739 	  break;
8740 
8741 	case HImode:
8742           if (AVR_HAVE_MUL
8743               && (MULT == GET_CODE (XEXP (x, 0))
8744                   || ASHIFT == GET_CODE (XEXP (x, 0)))
8745               && register_operand (XEXP (x, 1), HImode)
8746               && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
8747                   || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
8748             {
8749               /* multiply-add */
8750               *total = COSTS_N_INSNS (speed ? 5 : 4);
8751               /* multiply-add with constant: will be split and load constant. */
8752               if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8753                 *total = COSTS_N_INSNS (1) + *total;
8754               return true;
8755             }
8756 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8757 	    {
8758 	      *total = COSTS_N_INSNS (2);
8759 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8760 					      speed);
8761 	    }
8762 	  else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8763 	    *total = COSTS_N_INSNS (1);
8764 	  else
8765 	    *total = COSTS_N_INSNS (2);
8766 	  break;
8767 
8768         case PSImode:
8769           if (!CONST_INT_P (XEXP (x, 1)))
8770             {
8771               *total = COSTS_N_INSNS (3);
8772               *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8773                                               speed);
8774             }
8775           else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8776             *total = COSTS_N_INSNS (2);
8777           else
8778             *total = COSTS_N_INSNS (3);
8779           break;
8780 
8781 	case SImode:
8782 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8783 	    {
8784 	      *total = COSTS_N_INSNS (4);
8785 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8786 					      speed);
8787 	    }
8788 	  else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
8789 	    *total = COSTS_N_INSNS (1);
8790 	  else
8791 	    *total = COSTS_N_INSNS (4);
8792 	  break;
8793 
8794 	default:
8795 	  return false;
8796 	}
8797       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8798       return true;
8799 
8800     case MINUS:
8801       if (AVR_HAVE_MUL
8802           && QImode == mode
8803           && register_operand (XEXP (x, 0), QImode)
8804           && MULT == GET_CODE (XEXP (x, 1)))
8805         {
8806           /* multiply-sub */
8807           *total = COSTS_N_INSNS (speed ? 4 : 3);
8808           /* multiply-sub with constant: will be split and load constant. */
8809           if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
8810             *total = COSTS_N_INSNS (1) + *total;
8811           return true;
8812         }
8813       if (AVR_HAVE_MUL
8814           && HImode == mode
8815           && register_operand (XEXP (x, 0), HImode)
8816           && (MULT == GET_CODE (XEXP (x, 1))
8817               || ASHIFT == GET_CODE (XEXP (x, 1)))
8818           && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
8819               || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
8820         {
8821           /* multiply-sub */
8822           *total = COSTS_N_INSNS (speed ? 5 : 4);
8823           /* multiply-sub with constant: will be split and load constant. */
8824           if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
8825             *total = COSTS_N_INSNS (1) + *total;
8826           return true;
8827         }
8828       /* FALLTHRU */
8829     case AND:
8830     case IOR:
8831       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8832       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8833       if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8834 	*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8835       return true;
8836 
8837     case XOR:
8838       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
8839       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8840       *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8841       return true;
8842 
8843     case MULT:
8844       switch (mode)
8845 	{
8846 	case QImode:
8847 	  if (AVR_HAVE_MUL)
8848 	    *total = COSTS_N_INSNS (!speed ? 3 : 4);
8849 	  else if (!speed)
8850 	    *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
8851 	  else
8852 	    return false;
8853 	  break;
8854 
8855 	case HImode:
8856 	  if (AVR_HAVE_MUL)
8857             {
8858               rtx op0 = XEXP (x, 0);
8859               rtx op1 = XEXP (x, 1);
8860               enum rtx_code code0 = GET_CODE (op0);
8861               enum rtx_code code1 = GET_CODE (op1);
8862               bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
8863               bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
8864 
8865               if (ex0
8866                   && (u8_operand (op1, HImode)
8867                       || s8_operand (op1, HImode)))
8868                 {
8869                   *total = COSTS_N_INSNS (!speed ? 4 : 6);
8870                   return true;
8871                 }
8872               if (ex0
8873                   && register_operand (op1, HImode))
8874                 {
8875                   *total = COSTS_N_INSNS (!speed ? 5 : 8);
8876                   return true;
8877                 }
8878               else if (ex0 || ex1)
8879                 {
8880                   *total = COSTS_N_INSNS (!speed ? 3 : 5);
8881                   return true;
8882                 }
8883               else if (register_operand (op0, HImode)
8884                        && (u8_operand (op1, HImode)
8885                            || s8_operand (op1, HImode)))
8886                 {
8887                   *total = COSTS_N_INSNS (!speed ? 6 : 9);
8888                   return true;
8889                 }
8890               else
8891                 *total = COSTS_N_INSNS (!speed ? 7 : 10);
8892             }
8893 	  else if (!speed)
8894 	    *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
8895 	  else
8896 	    return false;
8897 	  break;
8898 
8899         case PSImode:
8900           if (!speed)
8901             *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
8902           else
8903             *total = 10;
8904           break;
8905 
8906 	case SImode:
8907 	  if (AVR_HAVE_MUL)
8908             {
8909               if (!speed)
8910                 {
8911                   /* Add some additional costs besides CALL like moves etc.  */
8912 
8913                   *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
8914                 }
8915               else
8916                 {
8917                   /* Just a rough estimate.  Even with -O2 we don't want bulky
8918                      code expanded inline.  */
8919 
8920                   *total = COSTS_N_INSNS (25);
8921                 }
8922             }
8923           else
8924             {
8925               if (speed)
8926                 *total = COSTS_N_INSNS (300);
8927               else
8928                 /* Add some additional costs besides CALL like moves etc.  */
8929                 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
8930             }
8931 
8932           return true;
8933 
8934 	default:
8935 	  return false;
8936 	}
8937       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8938       *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8939       return true;
8940 
8941     case DIV:
8942     case MOD:
8943     case UDIV:
8944     case UMOD:
8945       if (!speed)
8946         *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
8947       else
8948         *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
8949       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8950       /* For div/mod with const-int divisor we have at least the cost of
8951          loading the divisor. */
8952       if (CONST_INT_P (XEXP (x, 1)))
8953         *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
8954       /* Add some overall penaly for clobbering and moving around registers */
8955       *total += COSTS_N_INSNS (2);
8956       return true;
8957 
8958     case ROTATE:
8959       switch (mode)
8960 	{
8961 	case QImode:
8962 	  if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
8963 	    *total = COSTS_N_INSNS (1);
8964 
8965 	  break;
8966 
8967 	case HImode:
8968 	  if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
8969 	    *total = COSTS_N_INSNS (3);
8970 
8971 	  break;
8972 
8973 	case SImode:
8974 	  if (CONST_INT_P (XEXP (x, 1)))
8975 	    switch (INTVAL (XEXP (x, 1)))
8976 	      {
8977 	      case 8:
8978 	      case 24:
8979 		*total = COSTS_N_INSNS (5);
8980 		break;
8981 	      case 16:
8982 		*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
8983 		break;
8984 	      }
8985 	  break;
8986 
8987 	default:
8988 	  return false;
8989 	}
8990       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8991       return true;
8992 
8993     case ASHIFT:
8994       switch (mode)
8995 	{
8996 	case QImode:
8997 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8998 	    {
8999 	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
9000 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9001 					      speed);
9002 	    }
9003 	  else
9004 	    {
9005 	      val = INTVAL (XEXP (x, 1));
9006 	      if (val == 7)
9007 		*total = COSTS_N_INSNS (3);
9008 	      else if (val >= 0 && val <= 7)
9009 		*total = COSTS_N_INSNS (val);
9010 	      else
9011 		*total = COSTS_N_INSNS (1);
9012 	    }
9013 	  break;
9014 
9015 	case HImode:
9016           if (AVR_HAVE_MUL)
9017             {
9018               if (const_2_to_7_operand (XEXP (x, 1), HImode)
9019                   && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
9020                       || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
9021                 {
9022                   *total = COSTS_N_INSNS (!speed ? 4 : 6);
9023                   return true;
9024                 }
9025             }
9026 
9027           if (const1_rtx == (XEXP (x, 1))
9028               && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
9029             {
9030               *total = COSTS_N_INSNS (2);
9031               return true;
9032             }
9033 
9034 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9035 	    {
9036 	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
9037 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9038 					      speed);
9039 	    }
9040 	  else
9041 	    switch (INTVAL (XEXP (x, 1)))
9042 	      {
9043 	      case 0:
9044 		*total = 0;
9045 		break;
9046 	      case 1:
9047 	      case 8:
9048 		*total = COSTS_N_INSNS (2);
9049 		break;
9050 	      case 9:
9051 		*total = COSTS_N_INSNS (3);
9052 		break;
9053 	      case 2:
9054 	      case 3:
9055 	      case 10:
9056 	      case 15:
9057 		*total = COSTS_N_INSNS (4);
9058 		break;
9059 	      case 7:
9060 	      case 11:
9061 	      case 12:
9062 		*total = COSTS_N_INSNS (5);
9063 		break;
9064 	      case 4:
9065 		*total = COSTS_N_INSNS (!speed ? 5 : 8);
9066 		break;
9067 	      case 6:
9068 		*total = COSTS_N_INSNS (!speed ? 5 : 9);
9069 		break;
9070 	      case 5:
9071 		*total = COSTS_N_INSNS (!speed ? 5 : 10);
9072 		break;
9073 	      default:
9074 	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
9075 	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9076 						speed);
9077 	      }
9078 	  break;
9079 
9080         case PSImode:
9081           if (!CONST_INT_P (XEXP (x, 1)))
9082             {
9083               *total = COSTS_N_INSNS (!speed ? 6 : 73);
9084             }
9085           else
9086             switch (INTVAL (XEXP (x, 1)))
9087               {
9088               case 0:
9089                 *total = 0;
9090                 break;
9091               case 1:
9092               case 8:
9093               case 16:
9094                 *total = COSTS_N_INSNS (3);
9095                 break;
9096               case 23:
9097                 *total = COSTS_N_INSNS (5);
9098                 break;
9099               default:
9100                 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9101                 break;
9102               }
9103           break;
9104 
9105 	case SImode:
9106 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9107 	    {
9108 	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
9109 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9110 					      speed);
9111 	    }
9112 	  else
9113 	    switch (INTVAL (XEXP (x, 1)))
9114 	      {
9115 	      case 0:
9116 		*total = 0;
9117 		break;
9118 	      case 24:
9119 		*total = COSTS_N_INSNS (3);
9120 		break;
9121 	      case 1:
9122 	      case 8:
9123 	      case 16:
9124 		*total = COSTS_N_INSNS (4);
9125 		break;
9126 	      case 31:
9127 		*total = COSTS_N_INSNS (6);
9128 		break;
9129 	      case 2:
9130 		*total = COSTS_N_INSNS (!speed ? 7 : 8);
9131 		break;
9132 	      default:
9133 		*total = COSTS_N_INSNS (!speed ? 7 : 113);
9134 		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9135 						speed);
9136 	      }
9137 	  break;
9138 
9139 	default:
9140 	  return false;
9141 	}
9142       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9143       return true;
9144 
9145     case ASHIFTRT:
9146       switch (mode)
9147 	{
9148 	case QImode:
9149 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9150 	    {
9151 	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
9152 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9153 					      speed);
9154 	    }
9155 	  else
9156 	    {
9157 	      val = INTVAL (XEXP (x, 1));
9158 	      if (val == 6)
9159 		*total = COSTS_N_INSNS (4);
9160 	      else if (val == 7)
9161 		*total = COSTS_N_INSNS (2);
9162 	      else if (val >= 0 && val <= 7)
9163 		*total = COSTS_N_INSNS (val);
9164 	      else
9165 		*total = COSTS_N_INSNS (1);
9166 	    }
9167 	  break;
9168 
9169 	case HImode:
9170 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9171 	    {
9172 	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
9173 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9174 					      speed);
9175 	    }
9176 	  else
9177 	    switch (INTVAL (XEXP (x, 1)))
9178 	      {
9179 	      case 0:
9180 		*total = 0;
9181 		break;
9182 	      case 1:
9183 		*total = COSTS_N_INSNS (2);
9184 		break;
9185 	      case 15:
9186 		*total = COSTS_N_INSNS (3);
9187 		break;
9188 	      case 2:
9189 	      case 7:
9190               case 8:
9191               case 9:
9192 		*total = COSTS_N_INSNS (4);
9193 		break;
9194               case 10:
9195 	      case 14:
9196 		*total = COSTS_N_INSNS (5);
9197 		break;
9198               case 11:
9199                 *total = COSTS_N_INSNS (!speed ? 5 : 6);
9200 		break;
9201               case 12:
9202                 *total = COSTS_N_INSNS (!speed ? 5 : 7);
9203 		break;
9204               case 6:
9205 	      case 13:
9206                 *total = COSTS_N_INSNS (!speed ? 5 : 8);
9207 		break;
9208 	      default:
9209 	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
9210 	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9211 						speed);
9212 	      }
9213 	  break;
9214 
9215         case PSImode:
9216           if (!CONST_INT_P (XEXP (x, 1)))
9217             {
9218               *total = COSTS_N_INSNS (!speed ? 6 : 73);
9219             }
9220           else
9221             switch (INTVAL (XEXP (x, 1)))
9222               {
9223               case 0:
9224                 *total = 0;
9225                 break;
9226               case 1:
9227                 *total = COSTS_N_INSNS (3);
9228                 break;
9229               case 16:
9230               case 8:
9231                 *total = COSTS_N_INSNS (5);
9232                 break;
9233               case 23:
9234                 *total = COSTS_N_INSNS (4);
9235                 break;
9236               default:
9237                 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9238                 break;
9239               }
9240           break;
9241 
9242 	case SImode:
9243 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9244 	    {
9245 	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
9246 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9247 					      speed);
9248 	    }
9249 	  else
9250 	    switch (INTVAL (XEXP (x, 1)))
9251 	      {
9252 	      case 0:
9253 		*total = 0;
9254 		break;
9255 	      case 1:
9256 		*total = COSTS_N_INSNS (4);
9257 		break;
9258 	      case 8:
9259 	      case 16:
9260 	      case 24:
9261 		*total = COSTS_N_INSNS (6);
9262 		break;
9263 	      case 2:
9264 		*total = COSTS_N_INSNS (!speed ? 7 : 8);
9265 		break;
9266 	      case 31:
9267 		*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
9268 		break;
9269 	      default:
9270 		*total = COSTS_N_INSNS (!speed ? 7 : 113);
9271 		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9272 						speed);
9273 	      }
9274 	  break;
9275 
9276 	default:
9277 	  return false;
9278 	}
9279       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9280       return true;
9281 
9282     case LSHIFTRT:
9283       switch (mode)
9284 	{
9285 	case QImode:
9286 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9287 	    {
9288 	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
9289 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9290 					      speed);
9291 	    }
9292 	  else
9293 	    {
9294 	      val = INTVAL (XEXP (x, 1));
9295 	      if (val == 7)
9296 		*total = COSTS_N_INSNS (3);
9297 	      else if (val >= 0 && val <= 7)
9298 		*total = COSTS_N_INSNS (val);
9299 	      else
9300 		*total = COSTS_N_INSNS (1);
9301 	    }
9302 	  break;
9303 
9304 	case HImode:
9305 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9306 	    {
9307 	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
9308 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9309 					      speed);
9310 	    }
9311 	  else
9312 	    switch (INTVAL (XEXP (x, 1)))
9313 	      {
9314 	      case 0:
9315 		*total = 0;
9316 		break;
9317 	      case 1:
9318 	      case 8:
9319 		*total = COSTS_N_INSNS (2);
9320 		break;
9321 	      case 9:
9322 		*total = COSTS_N_INSNS (3);
9323 		break;
9324 	      case 2:
9325 	      case 10:
9326 	      case 15:
9327 		*total = COSTS_N_INSNS (4);
9328 		break;
9329 	      case 7:
9330               case 11:
9331 		*total = COSTS_N_INSNS (5);
9332 		break;
9333 	      case 3:
9334 	      case 12:
9335 	      case 13:
9336 	      case 14:
9337 		*total = COSTS_N_INSNS (!speed ? 5 : 6);
9338 		break;
9339 	      case 4:
9340 		*total = COSTS_N_INSNS (!speed ? 5 : 7);
9341 		break;
9342 	      case 5:
9343 	      case 6:
9344 		*total = COSTS_N_INSNS (!speed ? 5 : 9);
9345 		break;
9346 	      default:
9347 	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
9348 	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9349 						speed);
9350 	      }
9351 	  break;
9352 
9353         case PSImode:
9354           if (!CONST_INT_P (XEXP (x, 1)))
9355             {
9356               *total = COSTS_N_INSNS (!speed ? 6 : 73);
9357             }
9358           else
9359             switch (INTVAL (XEXP (x, 1)))
9360               {
9361               case 0:
9362                 *total = 0;
9363                 break;
9364               case 1:
9365               case 8:
9366               case 16:
9367                 *total = COSTS_N_INSNS (3);
9368                 break;
9369               case 23:
9370                 *total = COSTS_N_INSNS (5);
9371                 break;
9372               default:
9373                 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
9374                 break;
9375               }
9376           break;
9377 
9378 	case SImode:
9379 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9380 	    {
9381 	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
9382 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9383 					      speed);
9384 	    }
9385 	  else
9386 	    switch (INTVAL (XEXP (x, 1)))
9387 	      {
9388 	      case 0:
9389 		*total = 0;
9390 		break;
9391 	      case 1:
9392 		*total = COSTS_N_INSNS (4);
9393 		break;
9394 	      case 2:
9395 		*total = COSTS_N_INSNS (!speed ? 7 : 8);
9396 		break;
9397 	      case 8:
9398 	      case 16:
9399 	      case 24:
9400 		*total = COSTS_N_INSNS (4);
9401 		break;
9402 	      case 31:
9403 		*total = COSTS_N_INSNS (6);
9404 		break;
9405 	      default:
9406 		*total = COSTS_N_INSNS (!speed ? 7 : 113);
9407 		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
9408 						speed);
9409 	      }
9410 	  break;
9411 
9412 	default:
9413 	  return false;
9414 	}
9415       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9416       return true;
9417 
9418     case COMPARE:
9419       switch (GET_MODE (XEXP (x, 0)))
9420 	{
9421 	case QImode:
9422 	  *total = COSTS_N_INSNS (1);
9423 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9424 	    *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9425 	  break;
9426 
9427         case HImode:
9428 	  *total = COSTS_N_INSNS (2);
9429 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9430             *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9431 	  else if (INTVAL (XEXP (x, 1)) != 0)
9432 	    *total += COSTS_N_INSNS (1);
9433           break;
9434 
9435         case PSImode:
9436           *total = COSTS_N_INSNS (3);
9437           if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
9438             *total += COSTS_N_INSNS (2);
9439           break;
9440 
9441         case SImode:
9442           *total = COSTS_N_INSNS (4);
9443           if (GET_CODE (XEXP (x, 1)) != CONST_INT)
9444             *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
9445 	  else if (INTVAL (XEXP (x, 1)) != 0)
9446 	    *total += COSTS_N_INSNS (3);
9447           break;
9448 
9449 	default:
9450 	  return false;
9451 	}
9452       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
9453       return true;
9454 
9455     case TRUNCATE:
9456       if (AVR_HAVE_MUL
9457           && LSHIFTRT == GET_CODE (XEXP (x, 0))
9458           && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
9459           && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
9460         {
9461           if (QImode == mode || HImode == mode)
9462             {
9463               *total = COSTS_N_INSNS (2);
9464               return true;
9465             }
9466         }
9467       break;
9468 
9469     default:
9470       break;
9471     }
9472   return false;
9473 }
9474 
9475 
9476 /* Implement `TARGET_RTX_COSTS'.  */
9477 
9478 static bool
9479 avr_rtx_costs (rtx x, int codearg, int outer_code,
9480 	       int opno, int *total, bool speed)
9481 {
9482   bool done = avr_rtx_costs_1 (x, codearg, outer_code,
9483                                opno, total, speed);
9484 
9485   if (avr_log.rtx_costs)
9486     {
9487       avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
9488                  done, speed ? "speed" : "size", *total, outer_code, x);
9489     }
9490 
9491   return done;
9492 }
9493 
9494 
9495 /* Implement `TARGET_ADDRESS_COST'.  */
9496 
9497 static int
9498 avr_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
9499                   addr_space_t as ATTRIBUTE_UNUSED,
9500                   bool speed ATTRIBUTE_UNUSED)
9501 {
9502   int cost = 4;
9503 
9504   if (GET_CODE (x) == PLUS
9505       && CONST_INT_P (XEXP (x, 1))
9506       && (REG_P (XEXP (x, 0))
9507           || GET_CODE (XEXP (x, 0)) == SUBREG))
9508     {
9509       if (INTVAL (XEXP (x, 1)) >= 61)
9510         cost = 18;
9511     }
9512   else if (CONSTANT_ADDRESS_P (x))
9513     {
9514       if (optimize > 0
9515           && io_address_operand (x, QImode))
9516         cost = 2;
9517     }
9518 
9519   if (avr_log.address_cost)
9520     avr_edump ("\n%?: %d = %r\n", cost, x);
9521 
9522   return cost;
9523 }
9524 
9525 /* Test for extra memory constraint 'Q'.
9526    It's a memory address based on Y or Z pointer with valid displacement.  */
9527 
9528 int
9529 extra_constraint_Q (rtx x)
9530 {
9531   int ok = 0;
9532 
9533   if (GET_CODE (XEXP (x,0)) == PLUS
9534       && REG_P (XEXP (XEXP (x,0), 0))
9535       && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
9536       && (INTVAL (XEXP (XEXP (x,0), 1))
9537 	  <= MAX_LD_OFFSET (GET_MODE (x))))
9538     {
9539       rtx xx = XEXP (XEXP (x,0), 0);
9540       int regno = REGNO (xx);
9541 
9542       ok = (/* allocate pseudos */
9543             regno >= FIRST_PSEUDO_REGISTER
9544             /* strictly check */
9545             || regno == REG_Z || regno == REG_Y
9546             /* XXX frame & arg pointer checks */
9547             || xx == frame_pointer_rtx
9548             || xx == arg_pointer_rtx);
9549 
9550       if (avr_log.constraints)
9551         avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
9552                    ok, reload_completed, reload_in_progress, x);
9553     }
9554 
9555   return ok;
9556 }
9557 
9558 /* Convert condition code CONDITION to the valid AVR condition code.  */
9559 
9560 RTX_CODE
9561 avr_normalize_condition (RTX_CODE condition)
9562 {
9563   switch (condition)
9564     {
9565     case GT:
9566       return GE;
9567     case GTU:
9568       return GEU;
9569     case LE:
9570       return LT;
9571     case LEU:
9572       return LTU;
9573     default:
9574       gcc_unreachable ();
9575     }
9576 }
9577 
9578 /* Helper function for `avr_reorg'.  */
9579 
9580 static rtx
9581 avr_compare_pattern (rtx insn)
9582 {
9583   rtx pattern = single_set (insn);
9584 
9585   if (pattern
9586       && NONJUMP_INSN_P (insn)
9587       && SET_DEST (pattern) == cc0_rtx
9588       && GET_CODE (SET_SRC (pattern)) == COMPARE)
9589     {
9590       enum machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
9591       enum machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
9592 
9593       /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
9594          They must not be swapped, thus skip them.  */
9595 
9596       if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
9597           && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
9598         return pattern;
9599     }
9600 
9601   return NULL_RTX;
9602 }
9603 
9604 /* Helper function for `avr_reorg'.  */
9605 
9606 /* Expansion of switch/case decision trees leads to code like
9607 
9608        cc0 = compare (Reg, Num)
9609        if (cc0 == 0)
9610          goto L1
9611 
9612        cc0 = compare (Reg, Num)
9613        if (cc0 > 0)
9614          goto L2
9615 
9616    The second comparison is superfluous and can be deleted.
9617    The second jump condition can be transformed from a
9618    "difficult" one to a "simple" one because "cc0 > 0" and
9619    "cc0 >= 0" will have the same effect here.
9620 
9621    This function relies on the way switch/case is being expaned
9622    as binary decision tree.  For example code see PR 49903.
9623 
9624    Return TRUE if optimization performed.
9625    Return FALSE if nothing changed.
9626 
9627    INSN1 is a comparison, i.e. avr_compare_pattern != 0.
9628 
9629    We don't want to do this in text peephole because it is
9630    tedious to work out jump offsets there and the second comparison
9631    might have been transormed by `avr_reorg'.
9632 
9633    RTL peephole won't do because peephole2 does not scan across
9634    basic blocks.  */
9635 
9636 static bool
9637 avr_reorg_remove_redundant_compare (rtx insn1)
9638 {
9639   rtx comp1, ifelse1, xcond1, branch1;
9640   rtx comp2, ifelse2, xcond2, branch2, insn2;
9641   enum rtx_code code;
9642   rtx jump, target, cond;
9643 
9644   /* Look out for:  compare1 - branch1 - compare2 - branch2  */
9645 
9646   branch1 = next_nonnote_nondebug_insn (insn1);
9647   if (!branch1 || !JUMP_P (branch1))
9648     return false;
9649 
9650   insn2 = next_nonnote_nondebug_insn (branch1);
9651   if (!insn2 || !avr_compare_pattern (insn2))
9652     return false;
9653 
9654   branch2 = next_nonnote_nondebug_insn (insn2);
9655   if (!branch2 || !JUMP_P (branch2))
9656     return false;
9657 
9658   comp1 = avr_compare_pattern (insn1);
9659   comp2 = avr_compare_pattern (insn2);
9660   xcond1 = single_set (branch1);
9661   xcond2 = single_set (branch2);
9662 
9663   if (!comp1 || !comp2
9664       || !rtx_equal_p (comp1, comp2)
9665       || !xcond1 || SET_DEST (xcond1) != pc_rtx
9666       || !xcond2 || SET_DEST (xcond2) != pc_rtx
9667       || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
9668       || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
9669     {
9670       return false;
9671     }
9672 
9673   comp1 = SET_SRC (comp1);
9674   ifelse1 = SET_SRC (xcond1);
9675   ifelse2 = SET_SRC (xcond2);
9676 
9677   /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE.  */
9678 
9679   if (EQ != GET_CODE (XEXP (ifelse1, 0))
9680       || !REG_P (XEXP (comp1, 0))
9681       || !CONST_INT_P (XEXP (comp1, 1))
9682       || XEXP (ifelse1, 2) != pc_rtx
9683       || XEXP (ifelse2, 2) != pc_rtx
9684       || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
9685       || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
9686       || !COMPARISON_P (XEXP (ifelse2, 0))
9687       || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
9688       || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
9689       || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
9690       || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
9691     {
9692       return false;
9693     }
9694 
9695   /* We filtered the insn sequence to look like
9696 
9697         (set (cc0)
9698              (compare (reg:M N)
9699                       (const_int VAL)))
9700         (set (pc)
9701              (if_then_else (eq (cc0)
9702                                (const_int 0))
9703                            (label_ref L1)
9704                            (pc)))
9705 
9706         (set (cc0)
9707              (compare (reg:M N)
9708                       (const_int VAL)))
9709         (set (pc)
9710              (if_then_else (CODE (cc0)
9711                                  (const_int 0))
9712                            (label_ref L2)
9713                            (pc)))
9714   */
9715 
9716   code = GET_CODE (XEXP (ifelse2, 0));
9717 
9718   /* Map GT/GTU to GE/GEU which is easier for AVR.
9719      The first two instructions compare/branch on EQ
9720      so we may replace the difficult
9721 
9722         if (x == VAL)   goto L1;
9723         if (x > VAL)    goto L2;
9724 
9725      with easy
9726 
9727          if (x == VAL)   goto L1;
9728          if (x >= VAL)   goto L2;
9729 
9730      Similarly, replace LE/LEU by LT/LTU.  */
9731 
9732   switch (code)
9733     {
9734     case EQ:
9735     case LT:  case LTU:
9736     case GE:  case GEU:
9737       break;
9738 
9739     case LE:  case LEU:
9740     case GT:  case GTU:
9741       code = avr_normalize_condition (code);
9742       break;
9743 
9744     default:
9745       return false;
9746     }
9747 
9748   /* Wrap the branches into UNSPECs so they won't be changed or
9749      optimized in the remainder.  */
9750 
9751   target = XEXP (XEXP (ifelse1, 1), 0);
9752   cond = XEXP (ifelse1, 0);
9753   jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
9754 
9755   JUMP_LABEL (jump) = JUMP_LABEL (branch1);
9756 
9757   target = XEXP (XEXP (ifelse2, 1), 0);
9758   cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9759   jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
9760 
9761   JUMP_LABEL (jump) = JUMP_LABEL (branch2);
9762 
9763   /* The comparisons in insn1 and insn2 are exactly the same;
9764      insn2 is superfluous so delete it.  */
9765 
9766   delete_insn (insn2);
9767   delete_insn (branch1);
9768   delete_insn (branch2);
9769 
9770   return true;
9771 }
9772 
9773 
9774 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'.  */
9775 /* Optimize conditional jumps.  */
9776 
9777 static void
9778 avr_reorg (void)
9779 {
9780   rtx insn = get_insns();
9781 
9782   for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
9783     {
9784       rtx pattern = avr_compare_pattern (insn);
9785 
9786       if (!pattern)
9787         continue;
9788 
9789       if (optimize
9790           && avr_reorg_remove_redundant_compare (insn))
9791         {
9792           continue;
9793         }
9794 
9795       if (compare_diff_p (insn))
9796 	{
9797           /* Now we work under compare insn with difficult branch.  */
9798 
9799           rtx next = next_real_insn (insn);
9800           rtx pat = PATTERN (next);
9801 
9802           pattern = SET_SRC (pattern);
9803 
9804           if (true_regnum (XEXP (pattern, 0)) >= 0
9805               && true_regnum (XEXP (pattern, 1)) >= 0)
9806             {
9807               rtx x = XEXP (pattern, 0);
9808               rtx src = SET_SRC (pat);
9809               rtx t = XEXP (src,0);
9810               PUT_CODE (t, swap_condition (GET_CODE (t)));
9811               XEXP (pattern, 0) = XEXP (pattern, 1);
9812               XEXP (pattern, 1) = x;
9813               INSN_CODE (next) = -1;
9814             }
9815           else if (true_regnum (XEXP (pattern, 0)) >= 0
9816                    && XEXP (pattern, 1) == const0_rtx)
9817             {
9818               /* This is a tst insn, we can reverse it.  */
9819               rtx src = SET_SRC (pat);
9820               rtx t = XEXP (src,0);
9821 
9822               PUT_CODE (t, swap_condition (GET_CODE (t)));
9823               XEXP (pattern, 1) = XEXP (pattern, 0);
9824               XEXP (pattern, 0) = const0_rtx;
9825               INSN_CODE (next) = -1;
9826               INSN_CODE (insn) = -1;
9827             }
9828           else if (true_regnum (XEXP (pattern, 0)) >= 0
9829                    && CONST_INT_P (XEXP (pattern, 1)))
9830             {
9831               rtx x = XEXP (pattern, 1);
9832               rtx src = SET_SRC (pat);
9833               rtx t = XEXP (src,0);
9834               enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
9835 
9836               if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
9837                 {
9838                   XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
9839                   PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
9840                   INSN_CODE (next) = -1;
9841                   INSN_CODE (insn) = -1;
9842                 }
9843             }
9844         }
9845     }
9846 }
9847 
9848 /* Returns register number for function return value.*/
9849 
9850 static inline unsigned int
9851 avr_ret_register (void)
9852 {
9853   return 24;
9854 }
9855 
9856 
9857 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'.  */
9858 
9859 static bool
9860 avr_function_value_regno_p (const unsigned int regno)
9861 {
9862   return (regno == avr_ret_register ());
9863 }
9864 
9865 
9866 /* Implement `TARGET_LIBCALL_VALUE'.  */
9867 /* Create an RTX representing the place where a
9868    library function returns a value of mode MODE.  */
9869 
9870 static rtx
9871 avr_libcall_value (enum machine_mode mode,
9872 		   const_rtx func ATTRIBUTE_UNUSED)
9873 {
9874   int offs = GET_MODE_SIZE (mode);
9875 
9876   if (offs <= 4)
9877     offs = (offs + 1) & ~1;
9878 
9879   return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
9880 }
9881 
9882 
9883 /* Implement `TARGET_FUNCTION_VALUE'.  */
9884 /* Create an RTX representing the place where a
9885    function returns a value of data type VALTYPE.  */
9886 
9887 static rtx
9888 avr_function_value (const_tree type,
9889                     const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
9890                     bool outgoing ATTRIBUTE_UNUSED)
9891 {
9892   unsigned int offs;
9893 
9894   if (TYPE_MODE (type) != BLKmode)
9895     return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
9896 
9897   offs = int_size_in_bytes (type);
9898   if (offs < 2)
9899     offs = 2;
9900   if (offs > 2 && offs < GET_MODE_SIZE (SImode))
9901     offs = GET_MODE_SIZE (SImode);
9902   else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
9903     offs = GET_MODE_SIZE (DImode);
9904 
9905   return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
9906 }
9907 
9908 int
9909 test_hard_reg_class (enum reg_class rclass, rtx x)
9910 {
9911   int regno = true_regnum (x);
9912   if (regno < 0)
9913     return 0;
9914 
9915   if (TEST_HARD_REG_CLASS (rclass, regno))
9916     return 1;
9917 
9918   return 0;
9919 }
9920 
9921 
9922 /* Helper for jump_over_one_insn_p:  Test if INSN is a 2-word instruction
9923    and thus is suitable to be skipped by CPSE, SBRC, etc.  */
9924 
9925 static bool
9926 avr_2word_insn_p (rtx insn)
9927 {
9928   if (avr_current_device->errata_skip
9929       || !insn
9930       || 2 != get_attr_length (insn))
9931     {
9932       return false;
9933     }
9934 
9935   switch (INSN_CODE (insn))
9936     {
9937     default:
9938       return false;
9939 
9940     case CODE_FOR_movqi_insn:
9941     case CODE_FOR_movuqq_insn:
9942     case CODE_FOR_movqq_insn:
9943       {
9944         rtx set  = single_set (insn);
9945         rtx src  = SET_SRC (set);
9946         rtx dest = SET_DEST (set);
9947 
9948         /* Factor out LDS and STS from movqi_insn.  */
9949 
9950         if (MEM_P (dest)
9951             && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
9952           {
9953             return CONSTANT_ADDRESS_P (XEXP (dest, 0));
9954           }
9955         else if (REG_P (dest)
9956                  && MEM_P (src))
9957           {
9958             return CONSTANT_ADDRESS_P (XEXP (src, 0));
9959           }
9960 
9961         return false;
9962       }
9963 
9964     case CODE_FOR_call_insn:
9965     case CODE_FOR_call_value_insn:
9966       return true;
9967     }
9968 }
9969 
9970 
9971 int
9972 jump_over_one_insn_p (rtx insn, rtx dest)
9973 {
9974   int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
9975 		      ? XEXP (dest, 0)
9976 		      : dest);
9977   int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
9978   int dest_addr = INSN_ADDRESSES (uid);
9979   int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
9980 
9981   return (jump_offset == 1
9982           || (jump_offset == 2
9983               && avr_2word_insn_p (next_active_insn (insn))));
9984 }
9985 
9986 
9987 /* Worker function for `HARD_REGNO_MODE_OK'.  */
9988 /* Returns 1 if a value of mode MODE can be stored starting with hard
9989    register number REGNO.  On the enhanced core, anything larger than
9990    1 byte must start in even numbered register for "movw" to work
9991    (this way we don't have to check for odd registers everywhere).  */
9992 
9993 int
9994 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
9995 {
9996   /* NOTE: 8-bit values must not be disallowed for R28 or R29.
9997         Disallowing QI et al. in these regs might lead to code like
9998             (set (subreg:QI (reg:HI 28) n) ...)
9999         which will result in wrong code because reload does not
10000         handle SUBREGs of hard regsisters like this.
10001         This could be fixed in reload.  However, it appears
10002         that fixing reload is not wanted by reload people.  */
10003 
10004   /* Any GENERAL_REGS register can hold 8-bit values.  */
10005 
10006   if (GET_MODE_SIZE (mode) == 1)
10007     return 1;
10008 
10009   /* FIXME: Ideally, the following test is not needed.
10010         However, it turned out that it can reduce the number
10011         of spill fails.  AVR and it's poor endowment with
10012         address registers is extreme stress test for reload.  */
10013 
10014   if (GET_MODE_SIZE (mode) >= 4
10015       && regno >= REG_X)
10016     return 0;
10017 
10018   /* All modes larger than 8 bits should start in an even register.  */
10019 
10020   return !(regno & 1);
10021 }
10022 
10023 
10024 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'.  */
10025 
10026 int
10027 avr_hard_regno_call_part_clobbered (unsigned regno, enum machine_mode mode)
10028 {
10029   /* FIXME: This hook gets called with MODE:REGNO combinations that don't
10030         represent valid hard registers like, e.g. HI:29.  Returning TRUE
10031         for such registers can lead to performance degradation as mentioned
10032         in PR53595.  Thus, report invalid hard registers as FALSE.  */
10033 
10034   if (!avr_hard_regno_mode_ok (regno, mode))
10035     return 0;
10036 
10037   /* Return true if any of the following boundaries is crossed:
10038      17/18, 27/28 and 29/30.  */
10039 
10040   return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
10041           || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
10042           || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
10043 }
10044 
10045 
10046 /* Implement `MODE_CODE_BASE_REG_CLASS'.  */
10047 
10048 enum reg_class
10049 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
10050                               addr_space_t as, RTX_CODE outer_code,
10051                               RTX_CODE index_code ATTRIBUTE_UNUSED)
10052 {
10053   if (!ADDR_SPACE_GENERIC_P (as))
10054     {
10055       return POINTER_Z_REGS;
10056     }
10057 
10058   if (!avr_strict_X)
10059     return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
10060 
10061   return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
10062 }
10063 
10064 
10065 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'.  */
10066 
10067 bool
10068 avr_regno_mode_code_ok_for_base_p (int regno,
10069                                    enum machine_mode mode ATTRIBUTE_UNUSED,
10070                                    addr_space_t as ATTRIBUTE_UNUSED,
10071                                    RTX_CODE outer_code,
10072                                    RTX_CODE index_code ATTRIBUTE_UNUSED)
10073 {
10074   bool ok = false;
10075 
10076   if (!ADDR_SPACE_GENERIC_P (as))
10077     {
10078       if (regno < FIRST_PSEUDO_REGISTER
10079           && regno == REG_Z)
10080         {
10081           return true;
10082         }
10083 
10084       if (reg_renumber)
10085         {
10086           regno = reg_renumber[regno];
10087 
10088           if (regno == REG_Z)
10089             {
10090               return true;
10091             }
10092         }
10093 
10094       return false;
10095     }
10096 
10097   if (regno < FIRST_PSEUDO_REGISTER
10098       && (regno == REG_X
10099           || regno == REG_Y
10100           || regno == REG_Z
10101           || regno == ARG_POINTER_REGNUM))
10102     {
10103       ok = true;
10104     }
10105   else if (reg_renumber)
10106     {
10107       regno = reg_renumber[regno];
10108 
10109       if (regno == REG_X
10110           || regno == REG_Y
10111           || regno == REG_Z
10112           || regno == ARG_POINTER_REGNUM)
10113         {
10114           ok = true;
10115         }
10116     }
10117 
10118   if (avr_strict_X
10119       && PLUS == outer_code
10120       && regno == REG_X)
10121     {
10122       ok = false;
10123     }
10124 
10125   return ok;
10126 }
10127 
10128 
10129 /* A helper for `output_reload_insisf' and `output_reload_inhi'.  */
10130 /* Set 32-bit register OP[0] to compile-time constant OP[1].
10131    CLOBBER_REG is a QI clobber register or NULL_RTX.
10132    LEN == NULL: output instructions.
10133    LEN != NULL: set *LEN to the length of the instruction sequence
10134                 (in words) printed with LEN = NULL.
10135    If CLEAR_P is true, OP[0] had been cleard to Zero already.
10136    If CLEAR_P is false, nothing is known about OP[0].
10137 
10138    The effect on cc0 is as follows:
10139 
10140    Load 0 to any register except ZERO_REG : NONE
10141    Load ld register with any value        : NONE
10142    Anything else:                         : CLOBBER  */
10143 
10144 static void
10145 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
10146 {
10147   rtx src = op[1];
10148   rtx dest = op[0];
10149   rtx xval, xdest[4];
10150   int ival[4];
10151   int clobber_val = 1234;
10152   bool cooked_clobber_p = false;
10153   bool set_p = false;
10154   enum machine_mode mode = GET_MODE (dest);
10155   int n, n_bytes = GET_MODE_SIZE (mode);
10156 
10157   gcc_assert (REG_P (dest)
10158               && CONSTANT_P (src));
10159 
10160   if (len)
10161     *len = 0;
10162 
10163   /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
10164      but has some subregs that are in LD_REGS.  Use the MSB (REG:QI 17).  */
10165 
10166   if (REGNO (dest) < 16
10167       && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
10168     {
10169       clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
10170     }
10171 
10172   /* We might need a clobber reg but don't have one.  Look at the value to
10173      be loaded more closely.  A clobber is only needed if it is a symbol
10174      or contains a byte that is neither 0, -1 or a power of 2.  */
10175 
10176   if (NULL_RTX == clobber_reg
10177       && !test_hard_reg_class (LD_REGS, dest)
10178       && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
10179           || !avr_popcount_each_byte (src, n_bytes,
10180                                       (1 << 0) | (1 << 1) | (1 << 8))))
10181     {
10182       /* We have no clobber register but need one.  Cook one up.
10183          That's cheaper than loading from constant pool.  */
10184 
10185       cooked_clobber_p = true;
10186       clobber_reg = all_regs_rtx[REG_Z + 1];
10187       avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
10188     }
10189 
10190   /* Now start filling DEST from LSB to MSB.  */
10191 
10192   for (n = 0; n < n_bytes; n++)
10193     {
10194       int ldreg_p;
10195       bool done_byte = false;
10196       int j;
10197       rtx xop[3];
10198 
10199       /* Crop the n-th destination byte.  */
10200 
10201       xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
10202       ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
10203 
10204       if (!CONST_INT_P (src)
10205           && !CONST_FIXED_P (src)
10206           && !CONST_DOUBLE_P (src))
10207         {
10208           static const char* const asm_code[][2] =
10209             {
10210               { "ldi %2,lo8(%1)"  CR_TAB "mov %0,%2",    "ldi %0,lo8(%1)"  },
10211               { "ldi %2,hi8(%1)"  CR_TAB "mov %0,%2",    "ldi %0,hi8(%1)"  },
10212               { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2",    "ldi %0,hlo8(%1)" },
10213               { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2",    "ldi %0,hhi8(%1)" }
10214             };
10215 
10216           xop[0] = xdest[n];
10217           xop[1] = src;
10218           xop[2] = clobber_reg;
10219 
10220           avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
10221 
10222           continue;
10223         }
10224 
10225       /* Crop the n-th source byte.  */
10226 
10227       xval = simplify_gen_subreg (QImode, src, mode, n);
10228       ival[n] = INTVAL (xval);
10229 
10230       /* Look if we can reuse the low word by means of MOVW.  */
10231 
10232       if (n == 2
10233           && n_bytes >= 4
10234           && AVR_HAVE_MOVW)
10235         {
10236           rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
10237           rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
10238 
10239           if (INTVAL (lo16) == INTVAL (hi16))
10240             {
10241               if (0 != INTVAL (lo16)
10242                   || !clear_p)
10243                 {
10244                   avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
10245                 }
10246 
10247               break;
10248             }
10249         }
10250 
10251       /* Don't use CLR so that cc0 is set as expected.  */
10252 
10253       if (ival[n] == 0)
10254         {
10255           if (!clear_p)
10256             avr_asm_len (ldreg_p ? "ldi %0,0"
10257                          : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
10258                          : "mov %0,__zero_reg__",
10259                          &xdest[n], len, 1);
10260           continue;
10261         }
10262 
10263       if (clobber_val == ival[n]
10264           && REGNO (clobber_reg) == REGNO (xdest[n]))
10265         {
10266           continue;
10267         }
10268 
10269       /* LD_REGS can use LDI to move a constant value */
10270 
10271       if (ldreg_p)
10272         {
10273           xop[0] = xdest[n];
10274           xop[1] = xval;
10275           avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
10276           continue;
10277         }
10278 
10279       /* Try to reuse value already loaded in some lower byte. */
10280 
10281       for (j = 0; j < n; j++)
10282         if (ival[j] == ival[n])
10283           {
10284             xop[0] = xdest[n];
10285             xop[1] = xdest[j];
10286 
10287             avr_asm_len ("mov %0,%1", xop, len, 1);
10288             done_byte = true;
10289             break;
10290           }
10291 
10292       if (done_byte)
10293         continue;
10294 
10295       /* Need no clobber reg for -1: Use CLR/DEC */
10296 
10297       if (-1 == ival[n])
10298         {
10299           if (!clear_p)
10300             avr_asm_len ("clr %0", &xdest[n], len, 1);
10301 
10302           avr_asm_len ("dec %0", &xdest[n], len, 1);
10303           continue;
10304         }
10305       else if (1 == ival[n])
10306         {
10307           if (!clear_p)
10308             avr_asm_len ("clr %0", &xdest[n], len, 1);
10309 
10310           avr_asm_len ("inc %0", &xdest[n], len, 1);
10311           continue;
10312         }
10313 
10314       /* Use T flag or INC to manage powers of 2 if we have
10315          no clobber reg.  */
10316 
10317       if (NULL_RTX == clobber_reg
10318           && single_one_operand (xval, QImode))
10319         {
10320           xop[0] = xdest[n];
10321           xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
10322 
10323           gcc_assert (constm1_rtx != xop[1]);
10324 
10325           if (!set_p)
10326             {
10327               set_p = true;
10328               avr_asm_len ("set", xop, len, 1);
10329             }
10330 
10331           if (!clear_p)
10332             avr_asm_len ("clr %0", xop, len, 1);
10333 
10334           avr_asm_len ("bld %0,%1", xop, len, 1);
10335           continue;
10336         }
10337 
10338       /* We actually need the LD_REGS clobber reg.  */
10339 
10340       gcc_assert (NULL_RTX != clobber_reg);
10341 
10342       xop[0] = xdest[n];
10343       xop[1] = xval;
10344       xop[2] = clobber_reg;
10345       clobber_val = ival[n];
10346 
10347       avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
10348                    "mov %0,%2", xop, len, 2);
10349     }
10350 
10351   /* If we cooked up a clobber reg above, restore it.  */
10352 
10353   if (cooked_clobber_p)
10354     {
10355       avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
10356     }
10357 }
10358 
10359 
10360 /* Reload the constant OP[1] into the HI register OP[0].
10361    CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10362    into a NO_LD_REGS register.  If CLOBBER_REG is NULL_RTX we either don't
10363    need a clobber reg or have to cook one up.
10364 
10365    PLEN == NULL: Output instructions.
10366    PLEN != NULL: Output nothing.  Set *PLEN to number of words occupied
10367                  by the insns printed.
10368 
10369    Return "".  */
10370 
10371 const char*
10372 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
10373 {
10374   output_reload_in_const (op, clobber_reg, plen, false);
10375   return "";
10376 }
10377 
10378 
10379 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
10380    CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
10381    into a NO_LD_REGS register.  If CLOBBER_REG is NULL_RTX we either don't
10382    need a clobber reg or have to cook one up.
10383 
10384    LEN == NULL: Output instructions.
10385 
10386    LEN != NULL: Output nothing.  Set *LEN to number of words occupied
10387                 by the insns printed.
10388 
10389    Return "".  */
10390 
10391 const char *
10392 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
10393 {
10394   if (AVR_HAVE_MOVW
10395       && !test_hard_reg_class (LD_REGS, op[0])
10396       && (CONST_INT_P (op[1])
10397           || CONST_FIXED_P (op[1])
10398           || CONST_DOUBLE_P (op[1])))
10399     {
10400       int len_clr, len_noclr;
10401 
10402       /* In some cases it is better to clear the destination beforehand, e.g.
10403 
10404              CLR R2   CLR R3   MOVW R4,R2   INC R2
10405 
10406          is shorther than
10407 
10408              CLR R2   INC R2   CLR  R3      CLR R4   CLR R5
10409 
10410          We find it too tedious to work that out in the print function.
10411          Instead, we call the print function twice to get the lengths of
10412          both methods and use the shortest one.  */
10413 
10414       output_reload_in_const (op, clobber_reg, &len_clr, true);
10415       output_reload_in_const (op, clobber_reg, &len_noclr, false);
10416 
10417       if (len_noclr - len_clr == 4)
10418         {
10419           /* Default needs 4 CLR instructions: clear register beforehand.  */
10420 
10421           avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
10422                        "mov %B0,__zero_reg__" CR_TAB
10423                        "movw %C0,%A0", &op[0], len, 3);
10424 
10425           output_reload_in_const (op, clobber_reg, len, true);
10426 
10427           if (len)
10428             *len += 3;
10429 
10430           return "";
10431         }
10432     }
10433 
10434   /* Default: destination not pre-cleared.  */
10435 
10436   output_reload_in_const (op, clobber_reg, len, false);
10437   return "";
10438 }
10439 
10440 const char*
10441 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
10442 {
10443   output_reload_in_const (op, clobber_reg, len, false);
10444   return "";
10445 }
10446 
10447 
10448 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'.  */
10449 
10450 void
10451 avr_output_addr_vec_elt (FILE *stream, int value)
10452 {
10453   if (AVR_HAVE_JMP_CALL)
10454     fprintf (stream, "\t.word gs(.L%d)\n", value);
10455   else
10456     fprintf (stream, "\trjmp .L%d\n", value);
10457 }
10458 
10459 
10460 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'.  */
10461 /* Returns true if SCRATCH are safe to be allocated as a scratch
10462    registers (for a define_peephole2) in the current function.  */
10463 
10464 static bool
10465 avr_hard_regno_scratch_ok (unsigned int regno)
10466 {
10467   /* Interrupt functions can only use registers that have already been saved
10468      by the prologue, even if they would normally be call-clobbered.  */
10469 
10470   if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10471       && !df_regs_ever_live_p (regno))
10472     return false;
10473 
10474   /* Don't allow hard registers that might be part of the frame pointer.
10475      Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10476      and don't care for a frame pointer that spans more than one register.  */
10477 
10478   if ((!reload_completed || frame_pointer_needed)
10479       && (regno == REG_Y || regno == REG_Y + 1))
10480     {
10481       return false;
10482     }
10483 
10484   return true;
10485 }
10486 
10487 
10488 /* Worker function for `HARD_REGNO_RENAME_OK'.  */
10489 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG.  */
10490 
10491 int
10492 avr_hard_regno_rename_ok (unsigned int old_reg,
10493 			  unsigned int new_reg)
10494 {
10495   /* Interrupt functions can only use registers that have already been
10496      saved by the prologue, even if they would normally be
10497      call-clobbered.  */
10498 
10499   if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
10500       && !df_regs_ever_live_p (new_reg))
10501     return 0;
10502 
10503   /* Don't allow hard registers that might be part of the frame pointer.
10504      Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
10505      and don't care for a frame pointer that spans more than one register.  */
10506 
10507   if ((!reload_completed || frame_pointer_needed)
10508       && (old_reg == REG_Y || old_reg == REG_Y + 1
10509           || new_reg == REG_Y || new_reg == REG_Y + 1))
10510     {
10511       return 0;
10512     }
10513 
10514   return 1;
10515 }
10516 
10517 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
10518    or memory location in the I/O space (QImode only).
10519 
10520    Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
10521    Operand 1: register operand to test, or CONST_INT memory address.
10522    Operand 2: bit number.
10523    Operand 3: label to jump to if the test is true.  */
10524 
10525 const char*
10526 avr_out_sbxx_branch (rtx insn, rtx operands[])
10527 {
10528   enum rtx_code comp = GET_CODE (operands[0]);
10529   bool long_jump = get_attr_length (insn) >= 4;
10530   bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
10531 
10532   if (comp == GE)
10533     comp = EQ;
10534   else if (comp == LT)
10535     comp = NE;
10536 
10537   if (reverse)
10538     comp = reverse_condition (comp);
10539 
10540   switch (GET_CODE (operands[1]))
10541     {
10542     default:
10543       gcc_unreachable();
10544 
10545     case CONST_INT:
10546 
10547       if (low_io_address_operand (operands[1], QImode))
10548         {
10549           if (comp == EQ)
10550             output_asm_insn ("sbis %i1,%2", operands);
10551           else
10552             output_asm_insn ("sbic %i1,%2", operands);
10553         }
10554       else
10555         {
10556           output_asm_insn ("in __tmp_reg__,%i1", operands);
10557           if (comp == EQ)
10558             output_asm_insn ("sbrs __tmp_reg__,%2", operands);
10559           else
10560             output_asm_insn ("sbrc __tmp_reg__,%2", operands);
10561         }
10562 
10563       break; /* CONST_INT */
10564 
10565     case REG:
10566 
10567       if (comp == EQ)
10568         output_asm_insn ("sbrs %T1%T2", operands);
10569       else
10570         output_asm_insn ("sbrc %T1%T2", operands);
10571 
10572       break; /* REG */
10573     }        /* switch */
10574 
10575   if (long_jump)
10576     return ("rjmp .+4" CR_TAB
10577             "jmp %x3");
10578 
10579   if (!reverse)
10580     return "rjmp %x3";
10581 
10582   return "";
10583 }
10584 
10585 /* Worker function for `TARGET_ASM_CONSTRUCTOR'.  */
10586 
10587 static void
10588 avr_asm_out_ctor (rtx symbol, int priority)
10589 {
10590   fputs ("\t.global __do_global_ctors\n", asm_out_file);
10591   default_ctor_section_asm_out_constructor (symbol, priority);
10592 }
10593 
10594 
10595 /* Worker function for `TARGET_ASM_DESTRUCTOR'.  */
10596 
10597 static void
10598 avr_asm_out_dtor (rtx symbol, int priority)
10599 {
10600   fputs ("\t.global __do_global_dtors\n", asm_out_file);
10601   default_dtor_section_asm_out_destructor (symbol, priority);
10602 }
10603 
10604 
10605 /* Worker function for `TARGET_RETURN_IN_MEMORY'.  */
10606 
10607 static bool
10608 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
10609 {
10610   if (TYPE_MODE (type) == BLKmode)
10611     {
10612       HOST_WIDE_INT size = int_size_in_bytes (type);
10613       return (size == -1 || size > 8);
10614     }
10615   else
10616     return false;
10617 }
10618 
10619 
10620 /* Implement `CASE_VALUES_THRESHOLD'.  */
10621 /* Supply the default for --param case-values-threshold=0  */
10622 
10623 static unsigned int
10624 avr_case_values_threshold (void)
10625 {
10626   /* The exact break-even point between a jump table and an if-else tree
10627      depends on several factors not available here like, e.g. if 8-bit
10628      comparisons can be used in the if-else tree or not, on the
10629      range of the case values, if the case value can be reused, on the
10630      register allocation, etc.  '7' appears to be a good choice.  */
10631 
10632   return 7;
10633 }
10634 
10635 
10636 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'.  */
10637 
10638 static enum machine_mode
10639 avr_addr_space_address_mode (addr_space_t as)
10640 {
10641   return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
10642 }
10643 
10644 
10645 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'.  */
10646 
10647 static enum machine_mode
10648 avr_addr_space_pointer_mode (addr_space_t as)
10649 {
10650   return avr_addr_space_address_mode (as);
10651 }
10652 
10653 
10654 /* Helper for following function.  */
10655 
10656 static bool
10657 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
10658 {
10659   gcc_assert (REG_P (reg));
10660 
10661   if (strict)
10662     {
10663       return REGNO (reg) == REG_Z;
10664     }
10665 
10666   /* Avoid combine to propagate hard regs.  */
10667 
10668   if (can_create_pseudo_p()
10669       && REGNO (reg) < REG_Z)
10670     {
10671       return false;
10672     }
10673 
10674   return true;
10675 }
10676 
10677 
10678 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'.  */
10679 
10680 static bool
10681 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
10682                                      bool strict, addr_space_t as)
10683 {
10684   bool ok = false;
10685 
10686   switch (as)
10687     {
10688     default:
10689       gcc_unreachable();
10690 
10691     case ADDR_SPACE_GENERIC:
10692       return avr_legitimate_address_p (mode, x, strict);
10693 
10694     case ADDR_SPACE_FLASH:
10695     case ADDR_SPACE_FLASH1:
10696     case ADDR_SPACE_FLASH2:
10697     case ADDR_SPACE_FLASH3:
10698     case ADDR_SPACE_FLASH4:
10699     case ADDR_SPACE_FLASH5:
10700 
10701       switch (GET_CODE (x))
10702         {
10703         case REG:
10704           ok = avr_reg_ok_for_pgm_addr (x, strict);
10705           break;
10706 
10707         case POST_INC:
10708           ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
10709           break;
10710 
10711         default:
10712           break;
10713         }
10714 
10715       break; /* FLASH */
10716 
10717     case ADDR_SPACE_MEMX:
10718       if (REG_P (x))
10719         ok = (!strict
10720               && can_create_pseudo_p());
10721 
10722       if (LO_SUM == GET_CODE (x))
10723         {
10724           rtx hi = XEXP (x, 0);
10725           rtx lo = XEXP (x, 1);
10726 
10727           ok = (REG_P (hi)
10728                 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
10729                 && REG_P (lo)
10730                 && REGNO (lo) == REG_Z);
10731         }
10732 
10733       break; /* MEMX */
10734     }
10735 
10736   if (avr_log.legitimate_address_p)
10737     {
10738       avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
10739                  "reload_completed=%d reload_in_progress=%d %s:",
10740                  ok, mode, strict, reload_completed, reload_in_progress,
10741                  reg_renumber ? "(reg_renumber)" : "");
10742 
10743       if (GET_CODE (x) == PLUS
10744           && REG_P (XEXP (x, 0))
10745           && CONST_INT_P (XEXP (x, 1))
10746           && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
10747           && reg_renumber)
10748         {
10749           avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
10750                      true_regnum (XEXP (x, 0)));
10751         }
10752 
10753       avr_edump ("\n%r\n", x);
10754     }
10755 
10756   return ok;
10757 }
10758 
10759 
10760 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'.  */
10761 
10762 static rtx
10763 avr_addr_space_legitimize_address (rtx x, rtx old_x,
10764                                    enum machine_mode mode, addr_space_t as)
10765 {
10766   if (ADDR_SPACE_GENERIC_P (as))
10767     return avr_legitimize_address (x, old_x, mode);
10768 
10769   if (avr_log.legitimize_address)
10770     {
10771       avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
10772     }
10773 
10774   return old_x;
10775 }
10776 
10777 
10778 /* Implement `TARGET_ADDR_SPACE_CONVERT'.  */
10779 
10780 static rtx
10781 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
10782 {
10783   addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
10784   addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
10785 
10786   if (avr_log.progmem)
10787     avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
10788                src, type_from, type_to);
10789 
10790   /* Up-casting from 16-bit to 24-bit pointer.  */
10791 
10792   if (as_from != ADDR_SPACE_MEMX
10793       && as_to == ADDR_SPACE_MEMX)
10794     {
10795       int msb;
10796       rtx sym = src;
10797       rtx reg = gen_reg_rtx (PSImode);
10798 
10799       while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
10800         sym = XEXP (sym, 0);
10801 
10802       /* Look at symbol flags:  avr_encode_section_info set the flags
10803          also if attribute progmem was seen so that we get the right
10804          promotion for, e.g. PSTR-like strings that reside in generic space
10805          but are located in flash.  In that case we patch the incoming
10806          address space.  */
10807 
10808       if (SYMBOL_REF == GET_CODE (sym)
10809           && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
10810         {
10811           as_from = ADDR_SPACE_FLASH;
10812         }
10813 
10814       /* Linearize memory: RAM has bit 23 set.  */
10815 
10816       msb = ADDR_SPACE_GENERIC_P (as_from)
10817         ? 0x80
10818         : avr_addrspace[as_from].segment;
10819 
10820       src = force_reg (Pmode, src);
10821 
10822       emit_insn (msb == 0
10823                  ? gen_zero_extendhipsi2 (reg, src)
10824                  : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
10825 
10826       return reg;
10827     }
10828 
10829   /* Down-casting from 24-bit to 16-bit throws away the high byte.  */
10830 
10831   if (as_from == ADDR_SPACE_MEMX
10832       && as_to != ADDR_SPACE_MEMX)
10833     {
10834       rtx new_src = gen_reg_rtx (Pmode);
10835 
10836       src = force_reg (PSImode, src);
10837 
10838       emit_move_insn (new_src,
10839                       simplify_gen_subreg (Pmode, src, PSImode, 0));
10840       return new_src;
10841     }
10842 
10843   return src;
10844 }
10845 
10846 
10847 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'.  */
10848 
10849 static bool
10850 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
10851                          addr_space_t superset ATTRIBUTE_UNUSED)
10852 {
10853   /* Allow any kind of pointer mess.  */
10854 
10855   return true;
10856 }
10857 
10858 
10859 /* Implement `TARGET_CONVERT_TO_TYPE'.  */
10860 
10861 static tree
10862 avr_convert_to_type (tree type, tree expr)
10863 {
10864   /* Print a diagnose for pointer conversion that changes the address
10865      space of the pointer target to a non-enclosing address space,
10866      provided -Waddr-space-convert is on.
10867 
10868      FIXME: Filter out cases where the target object is known to
10869             be located in the right memory, like in
10870 
10871                 (const __flash*) PSTR ("text")
10872 
10873             Also try to distinguish between explicit casts requested by
10874             the user and implicit casts like
10875 
10876                 void f (const __flash char*);
10877 
10878                 void g (const char *p)
10879                 {
10880                     f ((const __flash*) p);
10881                 }
10882 
10883             under the assumption that an explicit casts means that the user
10884             knows what he is doing, e.g. interface with PSTR or old style
10885             code with progmem and pgm_read_xxx.
10886   */
10887 
10888   if (avr_warn_addr_space_convert
10889       && expr != error_mark_node
10890       && POINTER_TYPE_P (type)
10891       && POINTER_TYPE_P (TREE_TYPE (expr)))
10892     {
10893       addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
10894       addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
10895 
10896       if (avr_log.progmem)
10897         avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
10898 
10899       if (as_new != ADDR_SPACE_MEMX
10900           && as_new != as_old)
10901         {
10902           location_t loc = EXPR_LOCATION (expr);
10903           const char *name_old = avr_addrspace[as_old].name;
10904           const char *name_new = avr_addrspace[as_new].name;
10905 
10906           warning (OPT_Waddr_space_convert,
10907                    "conversion from address space %qs to address space %qs",
10908                    ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
10909                    ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
10910 
10911           return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
10912         }
10913     }
10914 
10915   return NULL_TREE;
10916 }
10917 
10918 
10919 /* Worker function for movmemhi expander.
10920    XOP[0]  Destination as MEM:BLK
10921    XOP[1]  Source      "     "
10922    XOP[2]  # Bytes to copy
10923 
10924    Return TRUE  if the expansion is accomplished.
10925    Return FALSE if the operand compination is not supported.  */
10926 
10927 bool
10928 avr_emit_movmemhi (rtx *xop)
10929 {
10930   HOST_WIDE_INT count;
10931   enum machine_mode loop_mode;
10932   addr_space_t as = MEM_ADDR_SPACE (xop[1]);
10933   rtx loop_reg, addr1, a_src, a_dest, insn, xas;
10934   rtx a_hi8 = NULL_RTX;
10935 
10936   if (avr_mem_flash_p (xop[0]))
10937     return false;
10938 
10939   if (!CONST_INT_P (xop[2]))
10940     return false;
10941 
10942   count = INTVAL (xop[2]);
10943   if (count <= 0)
10944     return false;
10945 
10946   a_src  = XEXP (xop[1], 0);
10947   a_dest = XEXP (xop[0], 0);
10948 
10949   if (PSImode == GET_MODE (a_src))
10950     {
10951       gcc_assert (as == ADDR_SPACE_MEMX);
10952 
10953       loop_mode = (count < 0x100) ? QImode : HImode;
10954       loop_reg = gen_rtx_REG (loop_mode, 24);
10955       emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
10956 
10957       addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
10958       a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
10959     }
10960   else
10961     {
10962       int segment = avr_addrspace[as].segment;
10963 
10964       if (segment
10965           && avr_current_device->n_flash > 1)
10966         {
10967           a_hi8 = GEN_INT (segment);
10968           emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
10969         }
10970       else if (!ADDR_SPACE_GENERIC_P (as))
10971         {
10972           as = ADDR_SPACE_FLASH;
10973         }
10974 
10975       addr1 = a_src;
10976 
10977       loop_mode = (count <= 0x100) ? QImode : HImode;
10978       loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
10979     }
10980 
10981   xas = GEN_INT (as);
10982 
10983   /* FIXME: Register allocator might come up with spill fails if it is left
10984         on its own.  Thus, we allocate the pointer registers by hand:
10985         Z = source address
10986         X = destination address  */
10987 
10988   emit_move_insn (lpm_addr_reg_rtx, addr1);
10989   emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
10990 
10991   /* FIXME: Register allocator does a bad job and might spill address
10992         register(s) inside the loop leading to additional move instruction
10993         to/from stack which could clobber tmp_reg.  Thus, do *not* emit
10994         load and store as separate insns.  Instead, we perform the copy
10995         by means of one monolithic insn.  */
10996 
10997   gcc_assert (TMP_REGNO == LPM_REGNO);
10998 
10999   if (as != ADDR_SPACE_MEMX)
11000     {
11001       /* Load instruction ([E]LPM or LD) is known at compile time:
11002          Do the copy-loop inline.  */
11003 
11004       rtx (*fun) (rtx, rtx, rtx)
11005         = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
11006 
11007       insn = fun (xas, loop_reg, loop_reg);
11008     }
11009   else
11010     {
11011       rtx (*fun) (rtx, rtx)
11012         = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
11013 
11014       emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
11015 
11016       insn = fun (xas, GEN_INT (avr_addr.rampz));
11017     }
11018 
11019   set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
11020   emit_insn (insn);
11021 
11022   return true;
11023 }
11024 
11025 
11026 /* Print assembler for movmem_qi, movmem_hi insns...
11027        $0     : Address Space
11028        $1, $2 : Loop register
11029        Z      : Source address
11030        X      : Destination address
11031 */
11032 
11033 const char*
11034 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
11035 {
11036   addr_space_t as = (addr_space_t) INTVAL (op[0]);
11037   enum machine_mode loop_mode = GET_MODE (op[1]);
11038   bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
11039   rtx xop[3];
11040 
11041   if (plen)
11042     *plen = 0;
11043 
11044   xop[0] = op[0];
11045   xop[1] = op[1];
11046   xop[2] = tmp_reg_rtx;
11047 
11048   /* Loop label */
11049 
11050   avr_asm_len ("0:", xop, plen, 0);
11051 
11052   /* Load with post-increment */
11053 
11054   switch (as)
11055     {
11056     default:
11057       gcc_unreachable();
11058 
11059     case ADDR_SPACE_GENERIC:
11060 
11061       avr_asm_len ("ld %2,Z+", xop, plen, 1);
11062       break;
11063 
11064     case ADDR_SPACE_FLASH:
11065 
11066       if (AVR_HAVE_LPMX)
11067         avr_asm_len ("lpm %2,Z+", xop, plen, 1);
11068       else
11069         avr_asm_len ("lpm" CR_TAB
11070                      "adiw r30,1", xop, plen, 2);
11071       break;
11072 
11073     case ADDR_SPACE_FLASH1:
11074     case ADDR_SPACE_FLASH2:
11075     case ADDR_SPACE_FLASH3:
11076     case ADDR_SPACE_FLASH4:
11077     case ADDR_SPACE_FLASH5:
11078 
11079       if (AVR_HAVE_ELPMX)
11080         avr_asm_len ("elpm %2,Z+", xop, plen, 1);
11081       else
11082         avr_asm_len ("elpm" CR_TAB
11083                      "adiw r30,1", xop, plen, 2);
11084       break;
11085     }
11086 
11087   /* Store with post-increment */
11088 
11089   avr_asm_len ("st X+,%2", xop, plen, 1);
11090 
11091   /* Decrement loop-counter and set Z-flag */
11092 
11093   if (QImode == loop_mode)
11094     {
11095       avr_asm_len ("dec %1", xop, plen, 1);
11096     }
11097   else if (sbiw_p)
11098     {
11099       avr_asm_len ("sbiw %1,1", xop, plen, 1);
11100     }
11101   else
11102     {
11103       avr_asm_len ("subi %A1,1" CR_TAB
11104                    "sbci %B1,0", xop, plen, 2);
11105     }
11106 
11107   /* Loop until zero */
11108 
11109   return avr_asm_len ("brne 0b", xop, plen, 1);
11110 }
11111 
11112 
11113 
11114 /* Helper for __builtin_avr_delay_cycles */
11115 
11116 static rtx
11117 avr_mem_clobber (void)
11118 {
11119   rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
11120   MEM_VOLATILE_P (mem) = 1;
11121   return mem;
11122 }
11123 
11124 static void
11125 avr_expand_delay_cycles (rtx operands0)
11126 {
11127   unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
11128   unsigned HOST_WIDE_INT cycles_used;
11129   unsigned HOST_WIDE_INT loop_count;
11130 
11131   if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
11132     {
11133       loop_count = ((cycles - 9) / 6) + 1;
11134       cycles_used = ((loop_count - 1) * 6) + 9;
11135       emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
11136                                      avr_mem_clobber()));
11137       cycles -= cycles_used;
11138     }
11139 
11140   if (IN_RANGE (cycles, 262145, 83886081))
11141     {
11142       loop_count = ((cycles - 7) / 5) + 1;
11143       if (loop_count > 0xFFFFFF)
11144         loop_count = 0xFFFFFF;
11145       cycles_used = ((loop_count - 1) * 5) + 7;
11146       emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
11147                                      avr_mem_clobber()));
11148       cycles -= cycles_used;
11149     }
11150 
11151   if (IN_RANGE (cycles, 768, 262144))
11152     {
11153       loop_count = ((cycles - 5) / 4) + 1;
11154       if (loop_count > 0xFFFF)
11155         loop_count = 0xFFFF;
11156       cycles_used = ((loop_count - 1) * 4) + 5;
11157       emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
11158                                      avr_mem_clobber()));
11159       cycles -= cycles_used;
11160     }
11161 
11162   if (IN_RANGE (cycles, 6, 767))
11163     {
11164       loop_count = cycles / 3;
11165       if (loop_count > 255)
11166         loop_count = 255;
11167       cycles_used = loop_count * 3;
11168       emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
11169                                      avr_mem_clobber()));
11170       cycles -= cycles_used;
11171       }
11172 
11173   while (cycles >= 2)
11174     {
11175       emit_insn (gen_nopv (GEN_INT(2)));
11176       cycles -= 2;
11177     }
11178 
11179   if (cycles == 1)
11180     {
11181       emit_insn (gen_nopv (GEN_INT(1)));
11182       cycles--;
11183     }
11184 }
11185 
11186 
11187 /* Return VAL * BASE + DIGIT.  BASE = 0 is shortcut for BASE = 2^{32}   */
11188 
11189 static double_int
11190 avr_double_int_push_digit (double_int val, int base,
11191                            unsigned HOST_WIDE_INT digit)
11192 {
11193   val = 0 == base
11194     ? val.llshift (32, 64)
11195     : val * double_int::from_uhwi (base);
11196 
11197   return val + double_int::from_uhwi (digit);
11198 }
11199 
11200 
11201 /* Compute the image of x under f, i.e. perform   x --> f(x)    */
11202 
11203 static int
11204 avr_map (double_int f, int x)
11205 {
11206   return 0xf & f.lrshift (4*x, 64).to_uhwi ();
11207 }
11208 
11209 
11210 /* Return some metrics of map A.  */
11211 
11212 enum
11213   {
11214     /* Number of fixed points in { 0 ... 7 } */
11215     MAP_FIXED_0_7,
11216 
11217     /* Size of preimage of non-fixed points in { 0 ... 7 } */
11218     MAP_NONFIXED_0_7,
11219 
11220     /* Mask representing the fixed points in { 0 ... 7 } */
11221     MAP_MASK_FIXED_0_7,
11222 
11223     /* Size of the preimage of { 0 ... 7 } */
11224     MAP_PREIMAGE_0_7,
11225 
11226     /* Mask that represents the preimage of { f } */
11227     MAP_MASK_PREIMAGE_F
11228   };
11229 
11230 static unsigned
11231 avr_map_metric (double_int a, int mode)
11232 {
11233   unsigned i, metric = 0;
11234 
11235   for (i = 0; i < 8; i++)
11236     {
11237       unsigned ai = avr_map (a, i);
11238 
11239       if (mode == MAP_FIXED_0_7)
11240         metric += ai == i;
11241       else if (mode == MAP_NONFIXED_0_7)
11242         metric += ai < 8 && ai != i;
11243       else if (mode == MAP_MASK_FIXED_0_7)
11244         metric |= ((unsigned) (ai == i)) << i;
11245       else if (mode == MAP_PREIMAGE_0_7)
11246         metric += ai < 8;
11247       else if (mode == MAP_MASK_PREIMAGE_F)
11248         metric |= ((unsigned) (ai == 0xf)) << i;
11249       else
11250         gcc_unreachable();
11251     }
11252 
11253   return metric;
11254 }
11255 
11256 
11257 /* Return true if IVAL has a 0xf in its hexadecimal representation
11258    and false, otherwise.  Only nibbles 0..7 are taken into account.
11259    Used as constraint helper for C0f and Cxf.  */
11260 
11261 bool
11262 avr_has_nibble_0xf (rtx ival)
11263 {
11264   return 0 != avr_map_metric (rtx_to_double_int (ival), MAP_MASK_PREIMAGE_F);
11265 }
11266 
11267 
11268 /* We have a set of bits that are mapped by a function F.
11269    Try to decompose F by means of a second function G so that
11270 
11271       F = F o G^-1 o G
11272 
11273    and
11274 
11275       cost (F o G^-1) + cost (G)  <  cost (F)
11276 
11277    Example:  Suppose builtin insert_bits supplies us with the map
11278    F = 0x3210ffff.  Instead of doing 4 bit insertions to get the high
11279    nibble of the result, we can just as well rotate the bits before inserting
11280    them and use the map 0x7654ffff which is cheaper than the original map.
11281    For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff.  */
11282 
11283 typedef struct
11284 {
11285   /* tree code of binary function G */
11286   enum tree_code code;
11287 
11288   /* The constant second argument of G */
11289   int arg;
11290 
11291   /* G^-1, the inverse of G (*, arg) */
11292   unsigned ginv;
11293 
11294   /* The cost of appplying G (*, arg) */
11295   int cost;
11296 
11297   /* The composition F o G^-1 (*, arg) for some function F */
11298   double_int map;
11299 
11300   /* For debug purpose only */
11301   const char *str;
11302 } avr_map_op_t;
11303 
11304 static const avr_map_op_t avr_map_op[] =
11305   {
11306     { LROTATE_EXPR, 0, 0x76543210, 0, { 0, 0 }, "id" },
11307     { LROTATE_EXPR, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
11308     { LROTATE_EXPR, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
11309     { LROTATE_EXPR, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
11310     { LROTATE_EXPR, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
11311     { LROTATE_EXPR, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
11312     { LROTATE_EXPR, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
11313     { LROTATE_EXPR, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
11314     { RSHIFT_EXPR, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
11315     { RSHIFT_EXPR, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
11316     { RSHIFT_EXPR, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
11317     { RSHIFT_EXPR, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
11318     { RSHIFT_EXPR, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
11319     { LSHIFT_EXPR, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
11320     { LSHIFT_EXPR, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
11321   };
11322 
11323 
11324 /* Try to decompose F as F = (F o G^-1) o G as described above.
11325    The result is a struct representing F o G^-1 and G.
11326    If result.cost < 0 then such a decomposition does not exist.  */
11327 
11328 static avr_map_op_t
11329 avr_map_decompose (double_int f, const avr_map_op_t *g, bool val_const_p)
11330 {
11331   int i;
11332   bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
11333   avr_map_op_t f_ginv = *g;
11334   double_int ginv = double_int::from_uhwi (g->ginv);
11335 
11336   f_ginv.cost = -1;
11337 
11338   /* Step 1:  Computing F o G^-1  */
11339 
11340   for (i = 7; i >= 0; i--)
11341     {
11342       int x = avr_map (f, i);
11343 
11344       if (x <= 7)
11345         {
11346           x = avr_map (ginv, x);
11347 
11348           /* The bit is no element of the image of G: no avail (cost = -1)  */
11349 
11350           if (x > 7)
11351             return f_ginv;
11352         }
11353 
11354       f_ginv.map = avr_double_int_push_digit (f_ginv.map, 16, x);
11355     }
11356 
11357   /* Step 2:  Compute the cost of the operations.
11358      The overall cost of doing an operation prior to the insertion is
11359       the cost of the insertion plus the cost of the operation.  */
11360 
11361   /* Step 2a:  Compute cost of F o G^-1  */
11362 
11363   if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
11364     {
11365       /* The mapping consists only of fixed points and can be folded
11366          to AND/OR logic in the remainder.  Reasonable cost is 3. */
11367 
11368       f_ginv.cost = 2 + (val_used_p && !val_const_p);
11369     }
11370   else
11371     {
11372       rtx xop[4];
11373 
11374       /* Get the cost of the insn by calling the output worker with some
11375          fake values.  Mimic effect of reloading xop[3]: Unused operands
11376          are mapped to 0 and used operands are reloaded to xop[0].  */
11377 
11378       xop[0] = all_regs_rtx[24];
11379       xop[1] = gen_int_mode (f_ginv.map.to_uhwi (), SImode);
11380       xop[2] = all_regs_rtx[25];
11381       xop[3] = val_used_p ? xop[0] : const0_rtx;
11382 
11383       avr_out_insert_bits (xop, &f_ginv.cost);
11384 
11385       f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
11386     }
11387 
11388   /* Step 2b:  Add cost of G  */
11389 
11390   f_ginv.cost += g->cost;
11391 
11392   if (avr_log.builtin)
11393     avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
11394 
11395   return f_ginv;
11396 }
11397 
11398 
11399 /* Insert bits from XOP[1] into XOP[0] according to MAP.
11400    XOP[0] and XOP[1] don't overlap.
11401    If FIXP_P = true:  Move all bits according to MAP using BLD/BST sequences.
11402    If FIXP_P = false: Just move the bit if its position in the destination
11403    is different to its source position.  */
11404 
11405 static void
11406 avr_move_bits (rtx *xop, double_int map, bool fixp_p, int *plen)
11407 {
11408   int bit_dest, b;
11409 
11410   /* T-flag contains this bit of the source, i.e. of XOP[1]  */
11411   int t_bit_src = -1;
11412 
11413   /* We order the operations according to the requested source bit b.  */
11414 
11415   for (b = 0; b < 8; b++)
11416     for (bit_dest = 0; bit_dest < 8; bit_dest++)
11417       {
11418         int bit_src = avr_map (map, bit_dest);
11419 
11420         if (b != bit_src
11421             || bit_src >= 8
11422             /* Same position: No need to copy as requested by FIXP_P.  */
11423             || (bit_dest == bit_src && !fixp_p))
11424           continue;
11425 
11426         if (t_bit_src != bit_src)
11427           {
11428             /* Source bit is not yet in T: Store it to T.  */
11429 
11430             t_bit_src = bit_src;
11431 
11432             xop[3] = GEN_INT (bit_src);
11433             avr_asm_len ("bst %T1%T3", xop, plen, 1);
11434           }
11435 
11436         /* Load destination bit with T.  */
11437 
11438         xop[3] = GEN_INT (bit_dest);
11439         avr_asm_len ("bld %T0%T3", xop, plen, 1);
11440       }
11441 }
11442 
11443 
11444 /* PLEN == 0: Print assembler code for `insert_bits'.
11445    PLEN != 0: Compute code length in bytes.
11446 
11447    OP[0]:  Result
11448    OP[1]:  The mapping composed of nibbles. If nibble no. N is
11449            0:   Bit N of result is copied from bit OP[2].0
11450            ...  ...
11451            7:   Bit N of result is copied from bit OP[2].7
11452            0xf: Bit N of result is copied from bit OP[3].N
11453    OP[2]:  Bits to be inserted
11454    OP[3]:  Target value  */
11455 
11456 const char*
11457 avr_out_insert_bits (rtx *op, int *plen)
11458 {
11459   double_int map = rtx_to_double_int (op[1]);
11460   unsigned mask_fixed;
11461   bool fixp_p = true;
11462   rtx xop[4];
11463 
11464   xop[0] = op[0];
11465   xop[1] = op[2];
11466   xop[2] = op[3];
11467 
11468   gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
11469 
11470   if (plen)
11471     *plen = 0;
11472   else if (flag_print_asm_name)
11473     fprintf (asm_out_file,
11474              ASM_COMMENT_START "map = 0x%08" HOST_LONG_FORMAT "x\n",
11475              map.to_uhwi () & GET_MODE_MASK (SImode));
11476 
11477   /* If MAP has fixed points it might be better to initialize the result
11478      with the bits to be inserted instead of moving all bits by hand.  */
11479 
11480   mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
11481 
11482   if (REGNO (xop[0]) == REGNO (xop[1]))
11483     {
11484       /* Avoid early-clobber conflicts */
11485 
11486       avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
11487       xop[1] = tmp_reg_rtx;
11488       fixp_p = false;
11489     }
11490 
11491   if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
11492     {
11493       /* XOP[2] is used and reloaded to XOP[0] already */
11494 
11495       int n_fix = 0, n_nofix = 0;
11496 
11497       gcc_assert (REG_P (xop[2]));
11498 
11499       /* Get the code size of the bit insertions; once with all bits
11500          moved and once with fixed points omitted.  */
11501 
11502       avr_move_bits (xop, map, true, &n_fix);
11503       avr_move_bits (xop, map, false, &n_nofix);
11504 
11505       if (fixp_p && n_fix - n_nofix > 3)
11506         {
11507           xop[3] = gen_int_mode (~mask_fixed, QImode);
11508 
11509           avr_asm_len ("eor %0,%1"   CR_TAB
11510                        "andi %0,%3"  CR_TAB
11511                        "eor %0,%1", xop, plen, 3);
11512           fixp_p = false;
11513         }
11514     }
11515   else
11516     {
11517       /* XOP[2] is unused */
11518 
11519       if (fixp_p && mask_fixed)
11520         {
11521           avr_asm_len ("mov %0,%1", xop, plen, 1);
11522           fixp_p = false;
11523         }
11524     }
11525 
11526   /* Move/insert remaining bits.  */
11527 
11528   avr_move_bits (xop, map, fixp_p, plen);
11529 
11530   return "";
11531 }
11532 
11533 
11534 /* IDs for all the AVR builtins.  */
11535 
11536 enum avr_builtin_id
11537   {
11538 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME)  \
11539     AVR_BUILTIN_ ## NAME,
11540 #include "builtins.def"
11541 #undef DEF_BUILTIN
11542 
11543     AVR_BUILTIN_COUNT
11544   };
11545 
11546 struct GTY(()) avr_builtin_description
11547 {
11548   enum insn_code icode;
11549   int n_args;
11550   tree fndecl;
11551 };
11552 
11553 
11554 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
11555    that a built-in's ID can be used to access the built-in by means of
11556    avr_bdesc[ID]  */
11557 
11558 static GTY(()) struct avr_builtin_description
11559 avr_bdesc[AVR_BUILTIN_COUNT] =
11560   {
11561 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME)         \
11562     { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
11563 #include "builtins.def"
11564 #undef DEF_BUILTIN
11565   };
11566 
11567 
11568 /* Implement `TARGET_BUILTIN_DECL'.  */
11569 
11570 static tree
11571 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
11572 {
11573   if (id < AVR_BUILTIN_COUNT)
11574     return avr_bdesc[id].fndecl;
11575 
11576   return error_mark_node;
11577 }
11578 
11579 
11580 static void
11581 avr_init_builtin_int24 (void)
11582 {
11583   tree int24_type  = make_signed_type (GET_MODE_BITSIZE (PSImode));
11584   tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
11585 
11586   lang_hooks.types.register_builtin_type (int24_type, "__int24");
11587   lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
11588 }
11589 
11590 
11591 /* Implement `TARGET_INIT_BUILTINS' */
11592 /* Set up all builtin functions for this target.  */
11593 
11594 static void
11595 avr_init_builtins (void)
11596 {
11597   tree void_ftype_void
11598     = build_function_type_list (void_type_node, NULL_TREE);
11599   tree uchar_ftype_uchar
11600     = build_function_type_list (unsigned_char_type_node,
11601                                 unsigned_char_type_node,
11602                                 NULL_TREE);
11603   tree uint_ftype_uchar_uchar
11604     = build_function_type_list (unsigned_type_node,
11605                                 unsigned_char_type_node,
11606                                 unsigned_char_type_node,
11607                                 NULL_TREE);
11608   tree int_ftype_char_char
11609     = build_function_type_list (integer_type_node,
11610                                 char_type_node,
11611                                 char_type_node,
11612                                 NULL_TREE);
11613   tree int_ftype_char_uchar
11614     = build_function_type_list (integer_type_node,
11615                                 char_type_node,
11616                                 unsigned_char_type_node,
11617                                 NULL_TREE);
11618   tree void_ftype_ulong
11619     = build_function_type_list (void_type_node,
11620                                 long_unsigned_type_node,
11621                                 NULL_TREE);
11622 
11623   tree uchar_ftype_ulong_uchar_uchar
11624     = build_function_type_list (unsigned_char_type_node,
11625                                 long_unsigned_type_node,
11626                                 unsigned_char_type_node,
11627                                 unsigned_char_type_node,
11628                                 NULL_TREE);
11629 
11630   tree const_memx_void_node
11631     = build_qualified_type (void_type_node,
11632                             TYPE_QUAL_CONST
11633                             | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
11634 
11635   tree const_memx_ptr_type_node
11636     = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
11637 
11638   tree char_ftype_const_memx_ptr
11639     = build_function_type_list (char_type_node,
11640                                 const_memx_ptr_type_node,
11641                                 NULL);
11642 
11643 #define ITYP(T)                                                         \
11644   lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
11645 
11646 #define FX_FTYPE_FX(fx)                                                 \
11647   tree fx##r_ftype_##fx##r                                              \
11648     = build_function_type_list (node_##fx##r, node_##fx##r, NULL);      \
11649   tree fx##k_ftype_##fx##k                                              \
11650     = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
11651 
11652 #define FX_FTYPE_FX_INT(fx)                                             \
11653   tree fx##r_ftype_##fx##r_int                                          \
11654     = build_function_type_list (node_##fx##r, node_##fx##r,             \
11655                                 integer_type_node, NULL);               \
11656   tree fx##k_ftype_##fx##k_int                                          \
11657     = build_function_type_list (node_##fx##k, node_##fx##k,             \
11658                                 integer_type_node, NULL)
11659 
11660 #define INT_FTYPE_FX(fx)                                                \
11661   tree int_ftype_##fx##r                                                \
11662     = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
11663   tree int_ftype_##fx##k                                                \
11664     = build_function_type_list (integer_type_node, node_##fx##k, NULL)
11665 
11666 #define INTX_FTYPE_FX(fx)                                               \
11667   tree int##fx##r_ftype_##fx##r                                         \
11668     = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
11669   tree int##fx##k_ftype_##fx##k                                         \
11670     = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
11671 
11672 #define FX_FTYPE_INTX(fx)                                               \
11673   tree fx##r_ftype_int##fx##r                                           \
11674     = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
11675   tree fx##k_ftype_int##fx##k                                           \
11676     = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
11677 
11678   tree node_hr = short_fract_type_node;
11679   tree node_nr = fract_type_node;
11680   tree node_lr = long_fract_type_node;
11681   tree node_llr = long_long_fract_type_node;
11682 
11683   tree node_uhr = unsigned_short_fract_type_node;
11684   tree node_unr = unsigned_fract_type_node;
11685   tree node_ulr = unsigned_long_fract_type_node;
11686   tree node_ullr = unsigned_long_long_fract_type_node;
11687 
11688   tree node_hk = short_accum_type_node;
11689   tree node_nk = accum_type_node;
11690   tree node_lk = long_accum_type_node;
11691   tree node_llk = long_long_accum_type_node;
11692 
11693   tree node_uhk = unsigned_short_accum_type_node;
11694   tree node_unk = unsigned_accum_type_node;
11695   tree node_ulk = unsigned_long_accum_type_node;
11696   tree node_ullk = unsigned_long_long_accum_type_node;
11697 
11698 
11699   /* For absfx builtins.  */
11700 
11701   FX_FTYPE_FX (h);
11702   FX_FTYPE_FX (n);
11703   FX_FTYPE_FX (l);
11704   FX_FTYPE_FX (ll);
11705 
11706   /* For roundfx builtins.  */
11707 
11708   FX_FTYPE_FX_INT (h);
11709   FX_FTYPE_FX_INT (n);
11710   FX_FTYPE_FX_INT (l);
11711   FX_FTYPE_FX_INT (ll);
11712 
11713   FX_FTYPE_FX_INT (uh);
11714   FX_FTYPE_FX_INT (un);
11715   FX_FTYPE_FX_INT (ul);
11716   FX_FTYPE_FX_INT (ull);
11717 
11718   /* For countlsfx builtins.  */
11719 
11720   INT_FTYPE_FX (h);
11721   INT_FTYPE_FX (n);
11722   INT_FTYPE_FX (l);
11723   INT_FTYPE_FX (ll);
11724 
11725   INT_FTYPE_FX (uh);
11726   INT_FTYPE_FX (un);
11727   INT_FTYPE_FX (ul);
11728   INT_FTYPE_FX (ull);
11729 
11730   /* For bitsfx builtins.  */
11731 
11732   INTX_FTYPE_FX (h);
11733   INTX_FTYPE_FX (n);
11734   INTX_FTYPE_FX (l);
11735   INTX_FTYPE_FX (ll);
11736 
11737   INTX_FTYPE_FX (uh);
11738   INTX_FTYPE_FX (un);
11739   INTX_FTYPE_FX (ul);
11740   INTX_FTYPE_FX (ull);
11741 
11742   /* For fxbits builtins.  */
11743 
11744   FX_FTYPE_INTX (h);
11745   FX_FTYPE_INTX (n);
11746   FX_FTYPE_INTX (l);
11747   FX_FTYPE_INTX (ll);
11748 
11749   FX_FTYPE_INTX (uh);
11750   FX_FTYPE_INTX (un);
11751   FX_FTYPE_INTX (ul);
11752   FX_FTYPE_INTX (ull);
11753 
11754 
11755 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME)                  \
11756   {                                                                     \
11757     int id = AVR_BUILTIN_ ## NAME;                                      \
11758     const char *Name = "__builtin_avr_" #NAME;                          \
11759     char *name = (char*) alloca (1 + strlen (Name));                    \
11760                                                                         \
11761     gcc_assert (id < AVR_BUILTIN_COUNT);                                \
11762     avr_bdesc[id].fndecl                                                \
11763       = add_builtin_function (avr_tolower (name, Name), TYPE, id,       \
11764                               BUILT_IN_MD, LIBNAME, NULL_TREE);         \
11765   }
11766 #include "builtins.def"
11767 #undef DEF_BUILTIN
11768 
11769   avr_init_builtin_int24 ();
11770 }
11771 
11772 
11773 /* Subroutine of avr_expand_builtin to expand vanilla builtins
11774    with non-void result and 1 ... 3 arguments.  */
11775 
11776 static rtx
11777 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
11778 {
11779   rtx pat, xop[3];
11780   int n, n_args = call_expr_nargs (exp);
11781   enum machine_mode tmode = insn_data[icode].operand[0].mode;
11782 
11783   gcc_assert (n_args >= 1 && n_args <= 3);
11784 
11785   if (target == NULL_RTX
11786       || GET_MODE (target) != tmode
11787       || !insn_data[icode].operand[0].predicate (target, tmode))
11788     {
11789       target = gen_reg_rtx (tmode);
11790     }
11791 
11792   for (n = 0; n < n_args; n++)
11793     {
11794       tree arg = CALL_EXPR_ARG (exp, n);
11795       rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
11796       enum machine_mode opmode = GET_MODE (op);
11797       enum machine_mode mode = insn_data[icode].operand[n+1].mode;
11798 
11799       if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
11800         {
11801           opmode = HImode;
11802           op = gen_lowpart (HImode, op);
11803         }
11804 
11805       /* In case the insn wants input operands in modes different from
11806          the result, abort.  */
11807 
11808       gcc_assert (opmode == mode || opmode == VOIDmode);
11809 
11810       if (!insn_data[icode].operand[n+1].predicate (op, mode))
11811         op = copy_to_mode_reg (mode, op);
11812 
11813       xop[n] = op;
11814     }
11815 
11816   switch (n_args)
11817     {
11818     case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
11819     case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
11820     case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
11821 
11822     default:
11823       gcc_unreachable();
11824     }
11825 
11826   if (pat == NULL_RTX)
11827     return NULL_RTX;
11828 
11829   emit_insn (pat);
11830 
11831   return target;
11832 }
11833 
11834 
11835 /* Implement `TARGET_EXPAND_BUILTIN'.  */
11836 /* Expand an expression EXP that calls a built-in function,
11837    with result going to TARGET if that's convenient
11838    (and in mode MODE if that's convenient).
11839    SUBTARGET may be used as the target for computing one of EXP's operands.
11840    IGNORE is nonzero if the value is to be ignored.  */
11841 
11842 static rtx
11843 avr_expand_builtin (tree exp, rtx target,
11844                     rtx subtarget ATTRIBUTE_UNUSED,
11845                     enum machine_mode mode ATTRIBUTE_UNUSED,
11846                     int ignore)
11847 {
11848   tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
11849   const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
11850   unsigned int id = DECL_FUNCTION_CODE (fndecl);
11851   const struct avr_builtin_description *d = &avr_bdesc[id];
11852   tree arg0;
11853   rtx op0;
11854 
11855   gcc_assert (id < AVR_BUILTIN_COUNT);
11856 
11857   switch (id)
11858     {
11859     case AVR_BUILTIN_NOP:
11860       emit_insn (gen_nopv (GEN_INT(1)));
11861       return 0;
11862 
11863     case AVR_BUILTIN_DELAY_CYCLES:
11864       {
11865         arg0 = CALL_EXPR_ARG (exp, 0);
11866         op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
11867 
11868         if (!CONST_INT_P (op0))
11869           error ("%s expects a compile time integer constant", bname);
11870         else
11871           avr_expand_delay_cycles (op0);
11872 
11873         return NULL_RTX;
11874       }
11875 
11876     case AVR_BUILTIN_INSERT_BITS:
11877       {
11878         arg0 = CALL_EXPR_ARG (exp, 0);
11879         op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
11880 
11881         if (!CONST_INT_P (op0))
11882           {
11883             error ("%s expects a compile time long integer constant"
11884                    " as first argument", bname);
11885             return target;
11886           }
11887 
11888         break;
11889       }
11890 
11891     case AVR_BUILTIN_ROUNDHR:   case AVR_BUILTIN_ROUNDUHR:
11892     case AVR_BUILTIN_ROUNDR:    case AVR_BUILTIN_ROUNDUR:
11893     case AVR_BUILTIN_ROUNDLR:   case AVR_BUILTIN_ROUNDULR:
11894     case AVR_BUILTIN_ROUNDLLR:  case AVR_BUILTIN_ROUNDULLR:
11895 
11896     case AVR_BUILTIN_ROUNDHK:   case AVR_BUILTIN_ROUNDUHK:
11897     case AVR_BUILTIN_ROUNDK:    case AVR_BUILTIN_ROUNDUK:
11898     case AVR_BUILTIN_ROUNDLK:   case AVR_BUILTIN_ROUNDULK:
11899     case AVR_BUILTIN_ROUNDLLK:  case AVR_BUILTIN_ROUNDULLK:
11900 
11901       /* Warn about odd rounding.  Rounding points >= FBIT will have
11902          no effect.  */
11903 
11904       if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
11905         break;
11906 
11907       int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
11908 
11909       if (rbit >= (int) GET_MODE_FBIT (mode))
11910         {
11911           warning (OPT_Wextra, "rounding to %d bits has no effect for "
11912                    "fixed-point value with %d fractional bits",
11913                    rbit, GET_MODE_FBIT (mode));
11914 
11915           return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
11916                               EXPAND_NORMAL);
11917         }
11918       else if (rbit <= - (int) GET_MODE_IBIT (mode))
11919         {
11920           warning (0, "rounding result will always be 0");
11921           return CONST0_RTX (mode);
11922         }
11923 
11924       /* The rounding points RP satisfies now:  -IBIT < RP < FBIT.
11925 
11926          TR 18037 only specifies results for  RP > 0.  However, the
11927          remaining cases of  -IBIT < RP <= 0  can easily be supported
11928          without any additional overhead.  */
11929 
11930       break; /* round */
11931     }
11932 
11933   /* No fold found and no insn:  Call support function from libgcc.  */
11934 
11935   if (d->icode == CODE_FOR_nothing
11936       && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
11937     {
11938       return expand_call (exp, target, ignore);
11939     }
11940 
11941   /* No special treatment needed: vanilla expand.  */
11942 
11943   gcc_assert (d->icode != CODE_FOR_nothing);
11944   gcc_assert (d->n_args == call_expr_nargs (exp));
11945 
11946   if (d->n_args == 0)
11947     {
11948       emit_insn ((GEN_FCN (d->icode)) (target));
11949       return NULL_RTX;
11950     }
11951 
11952   return avr_default_expand_builtin (d->icode, exp, target);
11953 }
11954 
11955 
11956 /* Helper for `avr_fold_builtin' that folds  absfx (FIXED_CST).  */
11957 
11958 static tree
11959 avr_fold_absfx (tree tval)
11960 {
11961   if (FIXED_CST != TREE_CODE (tval))
11962     return NULL_TREE;
11963 
11964   /* Our fixed-points have no padding:  Use double_int payload directly.  */
11965 
11966   FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
11967   unsigned int bits = GET_MODE_BITSIZE (fval.mode);
11968   double_int ival = fval.data.sext (bits);
11969 
11970   if (!ival.is_negative())
11971     return tval;
11972 
11973   /* ISO/IEC TR 18037, 7.18a.6.2:  The absfx functions are saturating.  */
11974 
11975   fval.data = (ival == double_int::min_value (bits, false).sext (bits))
11976     ? double_int::max_value (bits, false)
11977     : -ival;
11978 
11979   return build_fixed (TREE_TYPE (tval), fval);
11980 }
11981 
11982 
11983 /* Implement `TARGET_FOLD_BUILTIN'.  */
11984 
11985 static tree
11986 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
11987                   bool ignore ATTRIBUTE_UNUSED)
11988 {
11989   unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
11990   tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
11991 
11992   if (!optimize)
11993     return NULL_TREE;
11994 
11995   switch (fcode)
11996     {
11997     default:
11998       break;
11999 
12000     case AVR_BUILTIN_SWAP:
12001       {
12002         return fold_build2 (LROTATE_EXPR, val_type, arg[0],
12003                             build_int_cst (val_type, 4));
12004       }
12005 
12006     case AVR_BUILTIN_ABSHR:
12007     case AVR_BUILTIN_ABSR:
12008     case AVR_BUILTIN_ABSLR:
12009     case AVR_BUILTIN_ABSLLR:
12010 
12011     case AVR_BUILTIN_ABSHK:
12012     case AVR_BUILTIN_ABSK:
12013     case AVR_BUILTIN_ABSLK:
12014     case AVR_BUILTIN_ABSLLK:
12015       /* GCC is not good with folding ABS for fixed-point.  Do it by hand.  */
12016 
12017       return avr_fold_absfx (arg[0]);
12018 
12019     case AVR_BUILTIN_BITSHR:    case AVR_BUILTIN_HRBITS:
12020     case AVR_BUILTIN_BITSHK:    case AVR_BUILTIN_HKBITS:
12021     case AVR_BUILTIN_BITSUHR:   case AVR_BUILTIN_UHRBITS:
12022     case AVR_BUILTIN_BITSUHK:   case AVR_BUILTIN_UHKBITS:
12023 
12024     case AVR_BUILTIN_BITSR:     case AVR_BUILTIN_RBITS:
12025     case AVR_BUILTIN_BITSK:     case AVR_BUILTIN_KBITS:
12026     case AVR_BUILTIN_BITSUR:    case AVR_BUILTIN_URBITS:
12027     case AVR_BUILTIN_BITSUK:    case AVR_BUILTIN_UKBITS:
12028 
12029     case AVR_BUILTIN_BITSLR:    case AVR_BUILTIN_LRBITS:
12030     case AVR_BUILTIN_BITSLK:    case AVR_BUILTIN_LKBITS:
12031     case AVR_BUILTIN_BITSULR:   case AVR_BUILTIN_ULRBITS:
12032     case AVR_BUILTIN_BITSULK:   case AVR_BUILTIN_ULKBITS:
12033 
12034     case AVR_BUILTIN_BITSLLR:   case AVR_BUILTIN_LLRBITS:
12035     case AVR_BUILTIN_BITSLLK:   case AVR_BUILTIN_LLKBITS:
12036     case AVR_BUILTIN_BITSULLR:  case AVR_BUILTIN_ULLRBITS:
12037     case AVR_BUILTIN_BITSULLK:  case AVR_BUILTIN_ULLKBITS:
12038 
12039       gcc_assert (TYPE_PRECISION (val_type)
12040                   == TYPE_PRECISION (TREE_TYPE (arg[0])));
12041 
12042       return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
12043 
12044     case AVR_BUILTIN_INSERT_BITS:
12045       {
12046         tree tbits = arg[1];
12047         tree tval = arg[2];
12048         tree tmap;
12049         tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
12050         double_int map;
12051         bool changed = false;
12052         unsigned i;
12053         avr_map_op_t best_g;
12054 
12055         if (TREE_CODE (arg[0]) != INTEGER_CST)
12056           {
12057             /* No constant as first argument: Don't fold this and run into
12058                error in avr_expand_builtin.  */
12059 
12060             break;
12061           }
12062 
12063         map = tree_to_double_int (arg[0]);
12064         tmap = double_int_to_tree (map_type, map);
12065 
12066         if (TREE_CODE (tval) != INTEGER_CST
12067             && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
12068           {
12069             /* There are no F in the map, i.e. 3rd operand is unused.
12070                Replace that argument with some constant to render
12071                respective input unused.  */
12072 
12073             tval = build_int_cst (val_type, 0);
12074             changed = true;
12075           }
12076 
12077         if (TREE_CODE (tbits) != INTEGER_CST
12078             && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
12079           {
12080             /* Similar for the bits to be inserted. If they are unused,
12081                we can just as well pass 0.  */
12082 
12083             tbits = build_int_cst (val_type, 0);
12084           }
12085 
12086         if (TREE_CODE (tbits) == INTEGER_CST)
12087           {
12088             /* Inserting bits known at compile time is easy and can be
12089                performed by AND and OR with appropriate masks.  */
12090 
12091             int bits = TREE_INT_CST_LOW (tbits);
12092             int mask_ior = 0, mask_and = 0xff;
12093 
12094             for (i = 0; i < 8; i++)
12095               {
12096                 int mi = avr_map (map, i);
12097 
12098                 if (mi < 8)
12099                   {
12100                     if (bits & (1 << mi))     mask_ior |=  (1 << i);
12101                     else                      mask_and &= ~(1 << i);
12102                   }
12103               }
12104 
12105             tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
12106                                 build_int_cst (val_type, mask_ior));
12107             return fold_build2 (BIT_AND_EXPR, val_type, tval,
12108                                 build_int_cst (val_type, mask_and));
12109           }
12110 
12111         if (changed)
12112           return build_call_expr (fndecl, 3, tmap, tbits, tval);
12113 
12114         /* If bits don't change their position we can use vanilla logic
12115            to merge the two arguments.  */
12116 
12117         if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
12118           {
12119             int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
12120             tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
12121 
12122             tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
12123             tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
12124             return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
12125           }
12126 
12127         /* Try to decomposing map to reduce overall cost.  */
12128 
12129         if (avr_log.builtin)
12130           avr_edump ("\n%?: %X\n%?: ROL cost: ", map);
12131 
12132         best_g = avr_map_op[0];
12133         best_g.cost = 1000;
12134 
12135         for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
12136           {
12137             avr_map_op_t g
12138               = avr_map_decompose (map, avr_map_op + i,
12139                                    TREE_CODE (tval) == INTEGER_CST);
12140 
12141             if (g.cost >= 0 && g.cost < best_g.cost)
12142               best_g = g;
12143           }
12144 
12145         if (avr_log.builtin)
12146           avr_edump ("\n");
12147 
12148         if (best_g.arg == 0)
12149           /* No optimization found */
12150           break;
12151 
12152         /* Apply operation G to the 2nd argument.  */
12153 
12154         if (avr_log.builtin)
12155           avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
12156                      best_g.str, best_g.arg, best_g.map, best_g.cost);
12157 
12158         /* Do right-shifts arithmetically: They copy the MSB instead of
12159            shifting in a non-usable value (0) as with logic right-shift.  */
12160 
12161         tbits = fold_convert (signed_char_type_node, tbits);
12162         tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
12163                              build_int_cst (val_type, best_g.arg));
12164         tbits = fold_convert (val_type, tbits);
12165 
12166         /* Use map o G^-1 instead of original map to undo the effect of G.  */
12167 
12168         tmap = double_int_to_tree (map_type, best_g.map);
12169 
12170         return build_call_expr (fndecl, 3, tmap, tbits, tval);
12171       } /* AVR_BUILTIN_INSERT_BITS */
12172     }
12173 
12174   return NULL_TREE;
12175 }
12176 
12177 
12178 
12179 /* Initialize the GCC target structure.  */
12180 
12181 #undef  TARGET_ASM_ALIGNED_HI_OP
12182 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
12183 #undef  TARGET_ASM_ALIGNED_SI_OP
12184 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
12185 #undef  TARGET_ASM_UNALIGNED_HI_OP
12186 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
12187 #undef  TARGET_ASM_UNALIGNED_SI_OP
12188 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
12189 #undef  TARGET_ASM_INTEGER
12190 #define TARGET_ASM_INTEGER avr_assemble_integer
12191 #undef  TARGET_ASM_FILE_START
12192 #define TARGET_ASM_FILE_START avr_file_start
12193 #undef  TARGET_ASM_FILE_END
12194 #define TARGET_ASM_FILE_END avr_file_end
12195 
12196 #undef  TARGET_ASM_FUNCTION_END_PROLOGUE
12197 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
12198 #undef  TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
12199 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
12200 
12201 #undef  TARGET_FUNCTION_VALUE
12202 #define TARGET_FUNCTION_VALUE avr_function_value
12203 #undef  TARGET_LIBCALL_VALUE
12204 #define TARGET_LIBCALL_VALUE avr_libcall_value
12205 #undef  TARGET_FUNCTION_VALUE_REGNO_P
12206 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
12207 
12208 #undef  TARGET_ATTRIBUTE_TABLE
12209 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
12210 #undef  TARGET_INSERT_ATTRIBUTES
12211 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
12212 #undef  TARGET_SECTION_TYPE_FLAGS
12213 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
12214 
12215 #undef  TARGET_ASM_NAMED_SECTION
12216 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
12217 #undef  TARGET_ASM_INIT_SECTIONS
12218 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
12219 #undef  TARGET_ENCODE_SECTION_INFO
12220 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
12221 #undef  TARGET_ASM_SELECT_SECTION
12222 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
12223 
12224 #undef  TARGET_REGISTER_MOVE_COST
12225 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
12226 #undef  TARGET_MEMORY_MOVE_COST
12227 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
12228 #undef  TARGET_RTX_COSTS
12229 #define TARGET_RTX_COSTS avr_rtx_costs
12230 #undef  TARGET_ADDRESS_COST
12231 #define TARGET_ADDRESS_COST avr_address_cost
12232 #undef  TARGET_MACHINE_DEPENDENT_REORG
12233 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
12234 #undef  TARGET_FUNCTION_ARG
12235 #define TARGET_FUNCTION_ARG avr_function_arg
12236 #undef  TARGET_FUNCTION_ARG_ADVANCE
12237 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
12238 
12239 #undef  TARGET_SET_CURRENT_FUNCTION
12240 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
12241 
12242 #undef  TARGET_RETURN_IN_MEMORY
12243 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
12244 
12245 #undef  TARGET_STRICT_ARGUMENT_NAMING
12246 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
12247 
12248 #undef  TARGET_BUILTIN_SETJMP_FRAME_VALUE
12249 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
12250 
12251 #undef  TARGET_HARD_REGNO_SCRATCH_OK
12252 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
12253 #undef  TARGET_CASE_VALUES_THRESHOLD
12254 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
12255 
12256 #undef  TARGET_FRAME_POINTER_REQUIRED
12257 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
12258 #undef  TARGET_CAN_ELIMINATE
12259 #define TARGET_CAN_ELIMINATE avr_can_eliminate
12260 
12261 #undef  TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
12262 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
12263 
12264 #undef TARGET_WARN_FUNC_RETURN
12265 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
12266 
12267 #undef  TARGET_CLASS_LIKELY_SPILLED_P
12268 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
12269 
12270 #undef  TARGET_OPTION_OVERRIDE
12271 #define TARGET_OPTION_OVERRIDE avr_option_override
12272 
12273 #undef  TARGET_CANNOT_MODIFY_JUMPS_P
12274 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
12275 
12276 #undef  TARGET_FUNCTION_OK_FOR_SIBCALL
12277 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
12278 
12279 #undef  TARGET_INIT_BUILTINS
12280 #define TARGET_INIT_BUILTINS avr_init_builtins
12281 
12282 #undef  TARGET_BUILTIN_DECL
12283 #define TARGET_BUILTIN_DECL avr_builtin_decl
12284 
12285 #undef  TARGET_EXPAND_BUILTIN
12286 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
12287 
12288 #undef  TARGET_FOLD_BUILTIN
12289 #define TARGET_FOLD_BUILTIN avr_fold_builtin
12290 
12291 #undef  TARGET_ASM_FUNCTION_RODATA_SECTION
12292 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
12293 
12294 #undef  TARGET_SCALAR_MODE_SUPPORTED_P
12295 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
12296 
12297 #undef  TARGET_BUILD_BUILTIN_VA_LIST
12298 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
12299 
12300 #undef  TARGET_FIXED_POINT_SUPPORTED_P
12301 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
12302 
12303 #undef  TARGET_CONVERT_TO_TYPE
12304 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
12305 
12306 #undef  TARGET_ADDR_SPACE_SUBSET_P
12307 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
12308 
12309 #undef  TARGET_ADDR_SPACE_CONVERT
12310 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
12311 
12312 #undef  TARGET_ADDR_SPACE_ADDRESS_MODE
12313 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
12314 
12315 #undef  TARGET_ADDR_SPACE_POINTER_MODE
12316 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
12317 
12318 #undef  TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
12319 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P  \
12320   avr_addr_space_legitimate_address_p
12321 
12322 #undef  TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
12323 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
12324 
12325 #undef  TARGET_MODE_DEPENDENT_ADDRESS_P
12326 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
12327 
12328 #undef  TARGET_SECONDARY_RELOAD
12329 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
12330 
12331 #undef  TARGET_PRINT_OPERAND
12332 #define TARGET_PRINT_OPERAND avr_print_operand
12333 #undef  TARGET_PRINT_OPERAND_ADDRESS
12334 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
12335 #undef  TARGET_PRINT_OPERAND_PUNCT_VALID_P
12336 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
12337 
12338 struct gcc_target targetm = TARGET_INITIALIZER;
12339 
12340 
12341 #include "gt-avr.h"
12342