xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/avr/avr.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2    Copyright (C) 1998-2015 Free Software Foundation, Inc.
3    Contributed by Denis Chertykov (chertykov@gmail.com)
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3, or (at your option)
10    any later version.
11 
12    GCC is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "hash-set.h"
29 #include "symtab.h"
30 #include "inchash.h"
31 #include "tree.h"
32 #include "function.h"
33 #include "hash-map.h"
34 #include "plugin-api.h"
35 #include "ipa-ref.h"
36 #include "cgraph.h"
37 #include "regs.h"
38 #include "insn-config.h"
39 #include "conditions.h"
40 #include "insn-attr.h"
41 #include "insn-codes.h"
42 #include "flags.h"
43 #include "reload.h"
44 #include "hash-set.h"
45 #include "machmode.h"
46 #include "vec.h"
47 #include "double-int.h"
48 #include "input.h"
49 #include "alias.h"
50 #include "symtab.h"
51 #include "wide-int.h"
52 #include "inchash.h"
53 #include "tree.h"
54 #include "fold-const.h"
55 #include "varasm.h"
56 #include "print-tree.h"
57 #include "calls.h"
58 #include "stor-layout.h"
59 #include "stringpool.h"
60 #include "output.h"
61 #include "hashtab.h"
62 #include "function.h"
63 #include "statistics.h"
64 #include "real.h"
65 #include "fixed-value.h"
66 #include "expmed.h"
67 #include "dojump.h"
68 #include "explow.h"
69 #include "emit-rtl.h"
70 #include "stmt.h"
71 #include "expr.h"
72 #include "c-family/c-common.h"
73 #include "diagnostic-core.h"
74 #include "obstack.h"
75 #include "recog.h"
76 #include "optabs.h"
77 #include "ggc.h"
78 #include "langhooks.h"
79 #include "tm_p.h"
80 #include "target.h"
81 #include "target-def.h"
82 #include "params.h"
83 #include "dominance.h"
84 #include "cfg.h"
85 #include "cfgrtl.h"
86 #include "cfganal.h"
87 #include "lcm.h"
88 #include "cfgbuild.h"
89 #include "cfgcleanup.h"
90 #include "predict.h"
91 #include "basic-block.h"
92 #include "df.h"
93 #include "builtins.h"
94 #include "context.h"
95 #include "tree-pass.h"
96 
97 /* Maximal allowed offset for an address in the LD command */
98 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
99 
100 /* Return true if STR starts with PREFIX and false, otherwise.  */
101 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
102 
103 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
104    address space where data is to be located.
105    As the only non-generic address spaces are all located in flash,
106    this can be used to test if data shall go into some .progmem* section.
107    This must be the rightmost field of machine dependent section flags.  */
108 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
109 
110 /* Similar 4-bit region for SYMBOL_REF_FLAGS.  */
111 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
112 
113 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
114    Set address-space AS in SYMBOL_REF_FLAGS of SYM  */
115 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS)                       \
116   do {                                                          \
117     SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM;         \
118     SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP;      \
119   } while (0)
120 
121 /* Read address-space from SYMBOL_REF_FLAGS of SYM  */
122 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM)                          \
123   ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM)           \
124    / SYMBOL_FLAG_MACH_DEP)
125 
126 #define TINY_ADIW(REG1, REG2, I)                                \
127     "subi " #REG1 ",lo8(-(" #I "))" CR_TAB                      \
128     "sbci " #REG2 ",hi8(-(" #I "))"
129 
130 #define TINY_SBIW(REG1, REG2, I)                                \
131     "subi " #REG1 ",lo8((" #I "))" CR_TAB                       \
132     "sbci " #REG2 ",hi8((" #I "))"
133 
134 #define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
135 #define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
136 
137 /* Known address spaces.  The order must be the same as in the respective
138    enum from avr.h (or designated initialized must be used).  */
139 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
140 {
141   { ADDR_SPACE_RAM,  0, 2, "", 0, NULL },
142   { ADDR_SPACE_FLASH,  1, 2, "__flash",   0, ".progmem.data" },
143   { ADDR_SPACE_FLASH1, 1, 2, "__flash1",  1, ".progmem1.data" },
144   { ADDR_SPACE_FLASH2, 1, 2, "__flash2",  2, ".progmem2.data" },
145   { ADDR_SPACE_FLASH3, 1, 2, "__flash3",  3, ".progmem3.data" },
146   { ADDR_SPACE_FLASH4, 1, 2, "__flash4",  4, ".progmem4.data" },
147   { ADDR_SPACE_FLASH5, 1, 2, "__flash5",  5, ".progmem5.data" },
148   { ADDR_SPACE_MEMX, 1, 3, "__memx",  0, ".progmemx.data" },
149 };
150 
151 
152 /* Holding RAM addresses of some SFRs used by the compiler and that
153    are unique over all devices in an architecture like 'avr4'.  */
154 
155 typedef struct
156 {
157   /* SREG: The processor status */
158   int sreg;
159 
160   /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
161   int ccp;
162   int rampd;
163   int rampx;
164   int rampy;
165 
166   /* RAMPZ: The high byte of 24-bit address used with ELPM */
167   int rampz;
168 
169   /* SP: The stack pointer and its low and high byte */
170   int sp_l;
171   int sp_h;
172 } avr_addr_t;
173 
174 static avr_addr_t avr_addr;
175 
176 
177 /* Prototypes for local helper functions.  */
178 
179 static const char* out_movqi_r_mr (rtx_insn *, rtx[], int*);
180 static const char* out_movhi_r_mr (rtx_insn *, rtx[], int*);
181 static const char* out_movsi_r_mr (rtx_insn *, rtx[], int*);
182 static const char* out_movqi_mr_r (rtx_insn *, rtx[], int*);
183 static const char* out_movhi_mr_r (rtx_insn *, rtx[], int*);
184 static const char* out_movsi_mr_r (rtx_insn *, rtx[], int*);
185 
186 static int get_sequence_length (rtx_insn *insns);
187 static int sequent_regs_live (void);
188 static const char *ptrreg_to_str (int);
189 static const char *cond_string (enum rtx_code);
190 static int avr_num_arg_regs (machine_mode, const_tree);
191 static int avr_operand_rtx_cost (rtx, machine_mode, enum rtx_code,
192                                  int, bool);
193 static void output_reload_in_const (rtx*, rtx, int*, bool);
194 static struct machine_function * avr_init_machine_status (void);
195 
196 
197 /* Prototypes for hook implementors if needed before their implementation.  */
198 
199 static bool avr_rtx_costs (rtx, int, int, int, int*, bool);
200 
201 
202 /* Allocate registers from r25 to r8 for parameters for function calls.  */
203 #define FIRST_CUM_REG 26
204 
205 /* Last call saved register */
206 #define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
207 
208 /* Implicit target register of LPM instruction (R0) */
209 extern GTY(()) rtx lpm_reg_rtx;
210 rtx lpm_reg_rtx;
211 
212 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
213 extern GTY(()) rtx lpm_addr_reg_rtx;
214 rtx lpm_addr_reg_rtx;
215 
216 /* Temporary register RTX (reg:QI TMP_REGNO) */
217 extern GTY(()) rtx tmp_reg_rtx;
218 rtx tmp_reg_rtx;
219 
220 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
221 extern GTY(()) rtx zero_reg_rtx;
222 rtx zero_reg_rtx;
223 
224 /* RTXs for all general purpose registers as QImode */
225 extern GTY(()) rtx all_regs_rtx[32];
226 rtx all_regs_rtx[32];
227 
228 /* SREG, the processor status */
229 extern GTY(()) rtx sreg_rtx;
230 rtx sreg_rtx;
231 
232 /* RAMP* special function registers */
233 extern GTY(()) rtx rampd_rtx;
234 extern GTY(()) rtx rampx_rtx;
235 extern GTY(()) rtx rampy_rtx;
236 extern GTY(()) rtx rampz_rtx;
237 rtx rampd_rtx;
238 rtx rampx_rtx;
239 rtx rampy_rtx;
240 rtx rampz_rtx;
241 
242 /* RTX containing the strings "" and "e", respectively */
243 static GTY(()) rtx xstring_empty;
244 static GTY(()) rtx xstring_e;
245 
246 /* Current architecture.  */
247 const avr_arch_t *avr_arch;
248 
249 /* Section to put switch tables in.  */
250 static GTY(()) section *progmem_swtable_section;
251 
252 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
253    or to address space __flash* or __memx.  Only used as singletons inside
254    avr_asm_select_section, but it must not be local there because of GTY.  */
255 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
256 
257 /* Condition for insns/expanders from avr-dimode.md.  */
258 bool avr_have_dimode = true;
259 
260 /* To track if code will use .bss and/or .data.  */
261 bool avr_need_clear_bss_p = false;
262 bool avr_need_copy_data_p = false;
263 
264 
265 /* Transform UP into lowercase and write the result to LO.
266    You must provide enough space for LO.  Return LO.  */
267 
268 static char*
269 avr_tolower (char *lo, const char *up)
270 {
271   char *lo0 = lo;
272 
273   for (; *up; up++, lo++)
274     *lo = TOLOWER (*up);
275 
276   *lo = '\0';
277 
278   return lo0;
279 }
280 
281 
282 /* Custom function to count number of set bits.  */
283 
284 static inline int
285 avr_popcount (unsigned int val)
286 {
287   int pop = 0;
288 
289   while (val)
290     {
291       val &= val-1;
292       pop++;
293     }
294 
295   return pop;
296 }
297 
298 
299 /* Constraint helper function.  XVAL is a CONST_INT or a CONST_DOUBLE.
300    Return true if the least significant N_BYTES bytes of XVAL all have a
301    popcount in POP_MASK and false, otherwise.  POP_MASK represents a subset
302    of integers which contains an integer N iff bit N of POP_MASK is set.  */
303 
304 bool
305 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
306 {
307   int i;
308 
309   machine_mode mode = GET_MODE (xval);
310 
311   if (VOIDmode == mode)
312     mode = SImode;
313 
314   for (i = 0; i < n_bytes; i++)
315     {
316       rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
317       unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
318 
319       if (0 == (pop_mask & (1 << avr_popcount (val8))))
320         return false;
321     }
322 
323   return true;
324 }
325 
326 
327 /* Access some RTX as INT_MODE.  If X is a CONST_FIXED we can get
328    the bit representation of X by "casting" it to CONST_INT.  */
329 
330 rtx
331 avr_to_int_mode (rtx x)
332 {
333   machine_mode mode = GET_MODE (x);
334 
335   return VOIDmode == mode
336     ? x
337     : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
338 }
339 
340 
341 static const pass_data avr_pass_data_recompute_notes =
342 {
343   RTL_PASS,      // type
344   "",            // name (will be patched)
345   OPTGROUP_NONE, // optinfo_flags
346   TV_DF_SCAN,    // tv_id
347   0,             // properties_required
348   0,             // properties_provided
349   0,             // properties_destroyed
350   0,             // todo_flags_start
351   TODO_df_finish | TODO_df_verify // todo_flags_finish
352 };
353 
354 
355 class avr_pass_recompute_notes : public rtl_opt_pass
356 {
357 public:
358   avr_pass_recompute_notes (gcc::context *ctxt, const char *name)
359     : rtl_opt_pass (avr_pass_data_recompute_notes, ctxt)
360   {
361     this->name = name;
362   }
363 
364   virtual unsigned int execute (function*)
365   {
366     df_note_add_problem ();
367     df_analyze ();
368 
369     return 0;
370   }
371 }; // avr_pass_recompute_notes
372 
373 
374 static void
375 avr_register_passes (void)
376 {
377   /* This avr-specific pass (re)computes insn notes, in particular REG_DEAD
378      notes which are used by `avr.c::reg_unused_after' and branch offset
379      computations.  These notes must be correct, i.e. there must be no
380      dangling REG_DEAD notes; otherwise wrong code might result, cf. PR64331.
381 
382      DF needs (correct) CFG, hence right before free_cfg is the last
383      opportunity to rectify notes.  */
384 
385   register_pass (new avr_pass_recompute_notes (g, "avr-notes-free-cfg"),
386                  PASS_POS_INSERT_BEFORE, "*free_cfg", 1);
387 }
388 
389 
390 /* Set `avr_arch' as specified by `-mmcu='.
391    Return true on success.  */
392 
393 static bool
394 avr_set_core_architecture (void)
395 {
396   /* Search for mcu core architecture.  */
397 
398   if (!avr_mmcu)
399     avr_mmcu = AVR_MMCU_DEFAULT;
400 
401   avr_arch = &avr_arch_types[0];
402 
403   for (const avr_mcu_t *mcu = avr_mcu_types; ; mcu++)
404     {
405       if (NULL == mcu->name)
406         {
407           /* Reached the end of `avr_mcu_types'.  This should actually never
408              happen as options are provided by device-specs.  It could be a
409              typo in a device-specs or calling the compiler proper directly
410              with -mmcu=<device>. */
411 
412           error ("unknown core architecture %qs specified with %qs",
413                  avr_mmcu, "-mmcu=");
414           avr_inform_core_architectures ();
415           break;
416         }
417       else if (0 == strcmp (mcu->name, avr_mmcu)
418                // Is this a proper architecture ?
419                && NULL == mcu->macro)
420         {
421           avr_arch = &avr_arch_types[mcu->arch_id];
422           if (avr_n_flash < 0)
423             avr_n_flash = mcu->n_flash;
424 
425           return true;
426         }
427     }
428 
429   return false;
430 }
431 
432 
433 /* Implement `TARGET_OPTION_OVERRIDE'.  */
434 
435 static void
436 avr_option_override (void)
437 {
438   /* Disable -fdelete-null-pointer-checks option for AVR target.
439      This option compiler assumes that dereferencing of a null pointer
440      would halt the program.  For AVR this assumption is not true and
441      programs can safely dereference null pointers.  Changes made by this
442      option may not work properly for AVR.  So disable this option. */
443 
444   flag_delete_null_pointer_checks = 0;
445 
446   /* caller-save.c looks for call-clobbered hard registers that are assigned
447      to pseudos that cross calls and tries so save-restore them around calls
448      in order to reduce the number of stack slots needed.
449 
450      This might lead to situations where reload is no more able to cope
451      with the challenge of AVR's very few address registers and fails to
452      perform the requested spills.  */
453 
454   if (avr_strict_X)
455     flag_caller_saves = 0;
456 
457   /* Unwind tables currently require a frame pointer for correctness,
458      see toplev.c:process_options().  */
459 
460   if ((flag_unwind_tables
461        || flag_non_call_exceptions
462        || flag_asynchronous_unwind_tables)
463       && !ACCUMULATE_OUTGOING_ARGS)
464     {
465       flag_omit_frame_pointer = 0;
466     }
467 
468   if (flag_pic == 1)
469     warning (OPT_fpic, "-fpic is not supported");
470   if (flag_pic == 2)
471     warning (OPT_fPIC, "-fPIC is not supported");
472   if (flag_pie == 1)
473     warning (OPT_fpie, "-fpie is not supported");
474   if (flag_pie == 2)
475     warning (OPT_fPIE, "-fPIE is not supported");
476 
477   if (!avr_set_core_architecture())
478     return;
479 
480   /* RAM addresses of some SFRs common to all devices in respective arch. */
481 
482   /* SREG: Status Register containing flags like I (global IRQ) */
483   avr_addr.sreg = 0x3F + avr_arch->sfr_offset;
484 
485   /* RAMPZ: Address' high part when loading via ELPM */
486   avr_addr.rampz = 0x3B + avr_arch->sfr_offset;
487 
488   avr_addr.rampy = 0x3A + avr_arch->sfr_offset;
489   avr_addr.rampx = 0x39 + avr_arch->sfr_offset;
490   avr_addr.rampd = 0x38 + avr_arch->sfr_offset;
491   avr_addr.ccp = (AVR_TINY ? 0x3C : 0x34) + avr_arch->sfr_offset;
492 
493   /* SP: Stack Pointer (SP_H:SP_L) */
494   avr_addr.sp_l = 0x3D + avr_arch->sfr_offset;
495   avr_addr.sp_h = avr_addr.sp_l + 1;
496 
497   init_machine_status = avr_init_machine_status;
498 
499   avr_log_set_avr_log();
500 
501   /* Register some avr-specific pass(es).  There is no canonical place for
502      pass registration.  This function is convenient.  */
503 
504   avr_register_passes ();
505 }
506 
507 /* Function to set up the backend function structure.  */
508 
509 static struct machine_function *
510 avr_init_machine_status (void)
511 {
512   return ggc_cleared_alloc<machine_function> ();
513 }
514 
515 
516 /* Implement `INIT_EXPANDERS'.  */
517 /* The function works like a singleton.  */
518 
519 void
520 avr_init_expanders (void)
521 {
522   int regno;
523 
524   for (regno = 0; regno < 32; regno ++)
525     all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
526 
527   lpm_reg_rtx  = all_regs_rtx[LPM_REGNO];
528   tmp_reg_rtx  = all_regs_rtx[AVR_TMP_REGNO];
529   zero_reg_rtx = all_regs_rtx[AVR_ZERO_REGNO];
530 
531   lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
532 
533   sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
534   rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
535   rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
536   rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
537   rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
538 
539   xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
540   xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
541 
542   /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
543      to be present */
544   if (AVR_TINY)
545     avr_have_dimode = false;
546 }
547 
548 
549 /* Implement `REGNO_REG_CLASS'.  */
550 /* Return register class for register R.  */
551 
552 enum reg_class
553 avr_regno_reg_class (int r)
554 {
555   static const enum reg_class reg_class_tab[] =
556     {
557       R0_REG,
558       /* r1 - r15 */
559       NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
560       NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
561       NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
562       NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
563       /* r16 - r23 */
564       SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
565       SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
566       /* r24, r25 */
567       ADDW_REGS, ADDW_REGS,
568       /* X: r26, 27 */
569       POINTER_X_REGS, POINTER_X_REGS,
570       /* Y: r28, r29 */
571       POINTER_Y_REGS, POINTER_Y_REGS,
572       /* Z: r30, r31 */
573       POINTER_Z_REGS, POINTER_Z_REGS,
574       /* SP: SPL, SPH */
575       STACK_REG, STACK_REG
576     };
577 
578   if (r <= 33)
579     return reg_class_tab[r];
580 
581   return ALL_REGS;
582 }
583 
584 
585 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'.  */
586 
587 static bool
588 avr_scalar_mode_supported_p (machine_mode mode)
589 {
590   if (ALL_FIXED_POINT_MODE_P (mode))
591     return true;
592 
593   if (PSImode == mode)
594     return true;
595 
596   return default_scalar_mode_supported_p (mode);
597 }
598 
599 
600 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise.  */
601 
602 static bool
603 avr_decl_flash_p (tree decl)
604 {
605   if (TREE_CODE (decl) != VAR_DECL
606       || TREE_TYPE (decl) == error_mark_node)
607     {
608       return false;
609     }
610 
611   return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
612 }
613 
614 
615 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
616    address space and FALSE, otherwise.  */
617 
618 static bool
619 avr_decl_memx_p (tree decl)
620 {
621   if (TREE_CODE (decl) != VAR_DECL
622       || TREE_TYPE (decl) == error_mark_node)
623     {
624       return false;
625     }
626 
627   return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
628 }
629 
630 
631 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise.  */
632 
633 bool
634 avr_mem_flash_p (rtx x)
635 {
636   return (MEM_P (x)
637           && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
638 }
639 
640 
641 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
642    address space and FALSE, otherwise.  */
643 
644 bool
645 avr_mem_memx_p (rtx x)
646 {
647   return (MEM_P (x)
648           && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
649 }
650 
651 
652 /* A helper for the subsequent function attribute used to dig for
653    attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
654 
655 static inline int
656 avr_lookup_function_attribute1 (const_tree func, const char *name)
657 {
658   if (FUNCTION_DECL == TREE_CODE (func))
659     {
660       if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
661         {
662           return true;
663         }
664 
665       func = TREE_TYPE (func);
666     }
667 
668   gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
669               || TREE_CODE (func) == METHOD_TYPE);
670 
671   return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
672 }
673 
674 /* Return nonzero if FUNC is a naked function.  */
675 
676 static int
677 avr_naked_function_p (tree func)
678 {
679   return avr_lookup_function_attribute1 (func, "naked");
680 }
681 
682 /* Return nonzero if FUNC is an interrupt function as specified
683    by the "interrupt" attribute.  */
684 
685 static int
686 avr_interrupt_function_p (tree func)
687 {
688   return avr_lookup_function_attribute1 (func, "interrupt");
689 }
690 
691 /* Return nonzero if FUNC is a signal function as specified
692    by the "signal" attribute.  */
693 
694 static int
695 avr_signal_function_p (tree func)
696 {
697   return avr_lookup_function_attribute1 (func, "signal");
698 }
699 
700 /* Return nonzero if FUNC is an OS_task function.  */
701 
702 static int
703 avr_OS_task_function_p (tree func)
704 {
705   return avr_lookup_function_attribute1 (func, "OS_task");
706 }
707 
708 /* Return nonzero if FUNC is an OS_main function.  */
709 
710 static int
711 avr_OS_main_function_p (tree func)
712 {
713   return avr_lookup_function_attribute1 (func, "OS_main");
714 }
715 
716 
717 /* Implement `TARGET_SET_CURRENT_FUNCTION'.  */
718 /* Sanity cheching for above function attributes.  */
719 
720 static void
721 avr_set_current_function (tree decl)
722 {
723   location_t loc;
724   const char *isr;
725 
726   if (decl == NULL_TREE
727       || current_function_decl == NULL_TREE
728       || current_function_decl == error_mark_node
729       || ! cfun->machine
730       || cfun->machine->attributes_checked_p)
731     return;
732 
733   loc = DECL_SOURCE_LOCATION (decl);
734 
735   cfun->machine->is_naked = avr_naked_function_p (decl);
736   cfun->machine->is_signal = avr_signal_function_p (decl);
737   cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
738   cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
739   cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
740 
741   isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
742 
743   /* Too much attributes make no sense as they request conflicting features. */
744 
745   if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
746       + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
747     error_at (loc, "function attributes %qs, %qs and %qs are mutually"
748                " exclusive", "OS_task", "OS_main", isr);
749 
750   /* 'naked' will hide effects of 'OS_task' and 'OS_main'.  */
751 
752   if (cfun->machine->is_naked
753       && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
754     warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
755                 " no effect on %qs function", "OS_task", "OS_main", "naked");
756 
757   if (cfun->machine->is_interrupt || cfun->machine->is_signal)
758     {
759       tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
760       tree ret = TREE_TYPE (TREE_TYPE (decl));
761       const char *name;
762 
763       name = DECL_ASSEMBLER_NAME_SET_P (decl)
764         ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
765         : IDENTIFIER_POINTER (DECL_NAME (decl));
766 
767       /* Skip a leading '*' that might still prefix the assembler name,
768          e.g. in non-LTO runs.  */
769 
770       name = default_strip_name_encoding (name);
771 
772       /* Interrupt handlers must be  void __vector (void)  functions.  */
773 
774       if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
775         error_at (loc, "%qs function cannot have arguments", isr);
776 
777       if (TREE_CODE (ret) != VOID_TYPE)
778         error_at (loc, "%qs function cannot return a value", isr);
779 
780 #if defined WITH_AVRLIBC
781       /* Silently ignore 'signal' if 'interrupt' is present.  AVR-LibC startet
782          using this when it switched from SIGNAL and INTERRUPT to ISR.  */
783 
784       if (cfun->machine->is_interrupt)
785         cfun->machine->is_signal = 0;
786 
787       /* If the function has the 'signal' or 'interrupt' attribute, ensure
788          that the name of the function is "__vector_NN" so as to catch
789          when the user misspells the vector name.  */
790 
791       if (!STR_PREFIX_P (name, "__vector"))
792         warning_at (loc, OPT_Wmisspelled_isr, "%qs appears to be a misspelled "
793                     "%qs handler, missing %<__vector%> prefix", name, isr);
794 #endif // AVR-LibC naming conventions
795     }
796 
797 #if defined WITH_AVRLIBC
798   // Common problem is using "ISR" without first including avr/interrupt.h.
799   const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
800   name = default_strip_name_encoding (name);
801   if (0 == strcmp ("ISR", name)
802       || 0 == strcmp ("INTERRUPT", name)
803       || 0 == strcmp ("SIGNAL", name))
804     {
805       warning_at (loc, OPT_Wmisspelled_isr, "%qs is a reserved identifier"
806                   " in AVR-LibC.  Consider %<#include <avr/interrupt.h>%>"
807                   " before using the %qs macro", name, name);
808     }
809 #endif // AVR-LibC naming conventions
810 
811   /* Don't print the above diagnostics more than once.  */
812 
813   cfun->machine->attributes_checked_p = 1;
814 }
815 
816 
817 /* Implement `ACCUMULATE_OUTGOING_ARGS'.  */
818 
819 int
820 avr_accumulate_outgoing_args (void)
821 {
822   if (!cfun)
823     return TARGET_ACCUMULATE_OUTGOING_ARGS;
824 
825   /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
826         what offset is correct.  In some cases it is relative to
827         virtual_outgoing_args_rtx and in others it is relative to
828         virtual_stack_vars_rtx.  For example code see
829             gcc.c-torture/execute/built-in-setjmp.c
830             gcc.c-torture/execute/builtins/sprintf-chk.c   */
831 
832   return (TARGET_ACCUMULATE_OUTGOING_ARGS
833           && !(cfun->calls_setjmp
834                || cfun->has_nonlocal_label));
835 }
836 
837 
838 /* Report contribution of accumulated outgoing arguments to stack size.  */
839 
840 static inline int
841 avr_outgoing_args_size (void)
842 {
843   return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
844 }
845 
846 
847 /* Implement `STARTING_FRAME_OFFSET'.  */
848 /* This is the offset from the frame pointer register to the first stack slot
849    that contains a variable living in the frame.  */
850 
851 int
852 avr_starting_frame_offset (void)
853 {
854   return 1 + avr_outgoing_args_size ();
855 }
856 
857 
858 /* Return the number of hard registers to push/pop in the prologue/epilogue
859    of the current function, and optionally store these registers in SET.  */
860 
861 static int
862 avr_regs_to_save (HARD_REG_SET *set)
863 {
864   int reg, count;
865   int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
866 
867   if (set)
868     CLEAR_HARD_REG_SET (*set);
869   count = 0;
870 
871   /* No need to save any registers if the function never returns or
872      has the "OS_task" or "OS_main" attribute.  */
873 
874   if (TREE_THIS_VOLATILE (current_function_decl)
875       || cfun->machine->is_OS_task
876       || cfun->machine->is_OS_main)
877     return 0;
878 
879   for (reg = 0; reg < 32; reg++)
880     {
881       /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
882          any global register variables.  */
883 
884       if (fixed_regs[reg])
885         continue;
886 
887       if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
888           || (df_regs_ever_live_p (reg)
889               && (int_or_sig_p || !call_used_regs[reg])
890               /* Don't record frame pointer registers here.  They are treated
891                  indivitually in prologue.  */
892               && !(frame_pointer_needed
893                    && (reg == REG_Y || reg == (REG_Y+1)))))
894         {
895           if (set)
896             SET_HARD_REG_BIT (*set, reg);
897           count++;
898         }
899     }
900   return count;
901 }
902 
903 
904 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
905 
906 static bool
907 avr_allocate_stack_slots_for_args (void)
908 {
909   return !cfun->machine->is_naked;
910 }
911 
912 
913 /* Return true if register FROM can be eliminated via register TO.  */
914 
915 static bool
916 avr_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
917 {
918   return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
919           || !frame_pointer_needed);
920 }
921 
922 
923 /* Implement `TARGET_WARN_FUNC_RETURN'.  */
924 
925 static bool
926 avr_warn_func_return (tree decl)
927 {
928   /* Naked functions are implemented entirely in assembly, including the
929      return sequence, so suppress warnings about this.  */
930 
931   return !avr_naked_function_p (decl);
932 }
933 
934 /* Compute offset between arg_pointer and frame_pointer.  */
935 
936 int
937 avr_initial_elimination_offset (int from, int to)
938 {
939   if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
940     return 0;
941   else
942     {
943       int offset = frame_pointer_needed ? 2 : 0;
944       int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
945 
946       offset += avr_regs_to_save (NULL);
947       return (get_frame_size () + avr_outgoing_args_size()
948               + avr_pc_size + 1 + offset);
949     }
950 }
951 
952 
953 /* Helper for the function below.  */
954 
955 static void
956 avr_adjust_type_node (tree *node, machine_mode mode, int sat_p)
957 {
958   *node = make_node (FIXED_POINT_TYPE);
959   TYPE_SATURATING (*node) = sat_p;
960   TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
961   TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
962   TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
963   TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
964   TYPE_ALIGN (*node) = 8;
965   SET_TYPE_MODE (*node, mode);
966 
967   layout_type (*node);
968 }
969 
970 
971 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'.  */
972 
973 static tree
974 avr_build_builtin_va_list (void)
975 {
976   /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
977      This is more appropriate for the 8-bit machine AVR than 128-bit modes.
978      The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
979      which is auto-generated by genmodes, but the compiler assigns [U]DAmode
980      to the long long accum modes instead of the desired [U]TAmode.
981 
982      Fix this now, right after node setup in tree.c:build_common_tree_nodes().
983      This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
984      which built-in defines macros like __ULLACCUM_FBIT__ that are used by
985      libgcc to detect IBIT and FBIT.  */
986 
987   avr_adjust_type_node (&ta_type_node, TAmode, 0);
988   avr_adjust_type_node (&uta_type_node, UTAmode, 0);
989   avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
990   avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
991 
992   unsigned_long_long_accum_type_node = uta_type_node;
993   long_long_accum_type_node = ta_type_node;
994   sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
995   sat_long_long_accum_type_node = sat_ta_type_node;
996 
997   /* Dispatch to the default handler.  */
998 
999   return std_build_builtin_va_list ();
1000 }
1001 
1002 
1003 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'.  */
1004 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
1005    frame pointer by +STARTING_FRAME_OFFSET.
1006    Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
1007    avoids creating add/sub of offset in nonlocal goto and setjmp.  */
1008 
1009 static rtx
1010 avr_builtin_setjmp_frame_value (void)
1011 {
1012   rtx xval = gen_reg_rtx (Pmode);
1013   emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
1014                          gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
1015   return xval;
1016 }
1017 
1018 
1019 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
1020    This is return address of function.  */
1021 
1022 rtx
1023 avr_return_addr_rtx (int count, rtx tem)
1024 {
1025   rtx r;
1026 
1027   /* Can only return this function's return address. Others not supported.  */
1028   if (count)
1029      return NULL;
1030 
1031   if (AVR_3_BYTE_PC)
1032     {
1033       r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
1034       warning (0, "%<builtin_return_address%> contains only 2 bytes"
1035                " of address");
1036     }
1037   else
1038     r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
1039 
1040   r = gen_rtx_PLUS (Pmode, tem, r);
1041   r = gen_frame_mem (Pmode, memory_address (Pmode, r));
1042   r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
1043   return  r;
1044 }
1045 
1046 /* Return 1 if the function epilogue is just a single "ret".  */
1047 
1048 int
1049 avr_simple_epilogue (void)
1050 {
1051   return (! frame_pointer_needed
1052           && get_frame_size () == 0
1053           && avr_outgoing_args_size() == 0
1054           && avr_regs_to_save (NULL) == 0
1055           && ! cfun->machine->is_interrupt
1056           && ! cfun->machine->is_signal
1057           && ! cfun->machine->is_naked
1058           && ! TREE_THIS_VOLATILE (current_function_decl));
1059 }
1060 
1061 /* This function checks sequence of live registers.  */
1062 
1063 static int
1064 sequent_regs_live (void)
1065 {
1066   int reg;
1067   int live_seq = 0;
1068   int cur_seq = 0;
1069 
1070   for (reg = 0; reg <= LAST_CALLEE_SAVED_REG; ++reg)
1071     {
1072       if (fixed_regs[reg])
1073         {
1074           /* Don't recognize sequences that contain global register
1075              variables.  */
1076 
1077           if (live_seq != 0)
1078             return 0;
1079           else
1080             continue;
1081         }
1082 
1083       if (!call_used_regs[reg])
1084         {
1085           if (df_regs_ever_live_p (reg))
1086             {
1087               ++live_seq;
1088               ++cur_seq;
1089             }
1090           else
1091             cur_seq = 0;
1092         }
1093     }
1094 
1095   if (!frame_pointer_needed)
1096     {
1097       if (df_regs_ever_live_p (REG_Y))
1098         {
1099           ++live_seq;
1100           ++cur_seq;
1101         }
1102       else
1103         cur_seq = 0;
1104 
1105       if (df_regs_ever_live_p (REG_Y+1))
1106         {
1107           ++live_seq;
1108           ++cur_seq;
1109         }
1110       else
1111         cur_seq = 0;
1112     }
1113   else
1114     {
1115       cur_seq += 2;
1116       live_seq += 2;
1117     }
1118   return (cur_seq == live_seq) ? live_seq : 0;
1119 }
1120 
1121 /* Obtain the length sequence of insns.  */
1122 
1123 int
1124 get_sequence_length (rtx_insn *insns)
1125 {
1126   rtx_insn *insn;
1127   int length;
1128 
1129   for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
1130     length += get_attr_length (insn);
1131 
1132   return length;
1133 }
1134 
1135 
1136 /*  Implement `INCOMING_RETURN_ADDR_RTX'.  */
1137 
1138 rtx
1139 avr_incoming_return_addr_rtx (void)
1140 {
1141   /* The return address is at the top of the stack.  Note that the push
1142      was via post-decrement, which means the actual address is off by one.  */
1143   return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
1144 }
1145 
1146 /*  Helper for expand_prologue.  Emit a push of a byte register.  */
1147 
1148 static void
1149 emit_push_byte (unsigned regno, bool frame_related_p)
1150 {
1151   rtx mem, reg;
1152   rtx_insn *insn;
1153 
1154   mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
1155   mem = gen_frame_mem (QImode, mem);
1156   reg = gen_rtx_REG (QImode, regno);
1157 
1158   insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
1159   if (frame_related_p)
1160     RTX_FRAME_RELATED_P (insn) = 1;
1161 
1162   cfun->machine->stack_usage++;
1163 }
1164 
1165 
1166 /*  Helper for expand_prologue.  Emit a push of a SFR via tmp_reg.
1167     SFR is a MEM representing the memory location of the SFR.
1168     If CLR_P then clear the SFR after the push using zero_reg.  */
1169 
1170 static void
1171 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
1172 {
1173   rtx_insn *insn;
1174 
1175   gcc_assert (MEM_P (sfr));
1176 
1177   /* IN __tmp_reg__, IO(SFR) */
1178   insn = emit_move_insn (tmp_reg_rtx, sfr);
1179   if (frame_related_p)
1180     RTX_FRAME_RELATED_P (insn) = 1;
1181 
1182   /* PUSH __tmp_reg__ */
1183   emit_push_byte (AVR_TMP_REGNO, frame_related_p);
1184 
1185   if (clr_p)
1186     {
1187       /* OUT IO(SFR), __zero_reg__ */
1188       insn = emit_move_insn (sfr, const0_rtx);
1189       if (frame_related_p)
1190         RTX_FRAME_RELATED_P (insn) = 1;
1191     }
1192 }
1193 
1194 static void
1195 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1196 {
1197   rtx_insn *insn;
1198   bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1199   int live_seq = sequent_regs_live ();
1200 
1201   HOST_WIDE_INT size_max
1202     = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1203 
1204   bool minimize = (TARGET_CALL_PROLOGUES
1205                    && size < size_max
1206                    && live_seq
1207                    && !isr_p
1208                    && !cfun->machine->is_OS_task
1209                    && !cfun->machine->is_OS_main
1210                    && !AVR_TINY);
1211 
1212   if (minimize
1213       && (frame_pointer_needed
1214           || avr_outgoing_args_size() > 8
1215           || (AVR_2_BYTE_PC && live_seq > 6)
1216           || live_seq > 7))
1217     {
1218       rtx pattern;
1219       int first_reg, reg, offset;
1220 
1221       emit_move_insn (gen_rtx_REG (HImode, REG_X),
1222                       gen_int_mode (size, HImode));
1223 
1224       pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1225                                          gen_int_mode (live_seq+size, HImode));
1226       insn = emit_insn (pattern);
1227       RTX_FRAME_RELATED_P (insn) = 1;
1228 
1229       /* Describe the effect of the unspec_volatile call to prologue_saves.
1230          Note that this formulation assumes that add_reg_note pushes the
1231          notes to the front.  Thus we build them in the reverse order of
1232          how we want dwarf2out to process them.  */
1233 
1234       /* The function does always set frame_pointer_rtx, but whether that
1235          is going to be permanent in the function is frame_pointer_needed.  */
1236 
1237       add_reg_note (insn, REG_CFA_ADJUST_CFA,
1238                     gen_rtx_SET (VOIDmode, (frame_pointer_needed
1239                                             ? frame_pointer_rtx
1240                                             : stack_pointer_rtx),
1241                                  plus_constant (Pmode, stack_pointer_rtx,
1242                                                 -(size + live_seq))));
1243 
1244       /* Note that live_seq always contains r28+r29, but the other
1245          registers to be saved are all below 18.  */
1246 
1247       first_reg = (LAST_CALLEE_SAVED_REG + 1) - (live_seq - 2);
1248 
1249       for (reg = 29, offset = -live_seq + 1;
1250            reg >= first_reg;
1251            reg = (reg == 28 ? LAST_CALLEE_SAVED_REG : reg - 1), ++offset)
1252         {
1253           rtx m, r;
1254 
1255           m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1256                                                   offset));
1257           r = gen_rtx_REG (QImode, reg);
1258           add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
1259         }
1260 
1261       cfun->machine->stack_usage += size + live_seq;
1262     }
1263   else /* !minimize */
1264     {
1265       int reg;
1266 
1267       for (reg = 0; reg < 32; ++reg)
1268         if (TEST_HARD_REG_BIT (set, reg))
1269           emit_push_byte (reg, true);
1270 
1271       if (frame_pointer_needed
1272           && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1273         {
1274           /* Push frame pointer.  Always be consistent about the
1275              ordering of pushes -- epilogue_restores expects the
1276              register pair to be pushed low byte first.  */
1277 
1278           emit_push_byte (REG_Y, true);
1279           emit_push_byte (REG_Y + 1, true);
1280         }
1281 
1282       if (frame_pointer_needed
1283           && size == 0)
1284         {
1285           insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1286           RTX_FRAME_RELATED_P (insn) = 1;
1287         }
1288 
1289       if (size != 0)
1290         {
1291           /*  Creating a frame can be done by direct manipulation of the
1292               stack or via the frame pointer. These two methods are:
1293                   fp =  sp
1294                   fp -= size
1295                   sp =  fp
1296               or
1297                   sp -= size
1298                   fp =  sp    (*)
1299               the optimum method depends on function type, stack and
1300               frame size.  To avoid a complex logic, both methods are
1301               tested and shortest is selected.
1302 
1303               There is also the case where SIZE != 0 and no frame pointer is
1304               needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1305               In that case, insn (*) is not needed in that case.
1306               We use the X register as scratch. This is save because in X
1307               is call-clobbered.
1308                  In an interrupt routine, the case of SIZE != 0 together with
1309               !frame_pointer_needed can only occur if the function is not a
1310               leaf function and thus X has already been saved.  */
1311 
1312           int irq_state = -1;
1313           HOST_WIDE_INT size_cfa = size, neg_size;
1314           rtx_insn *fp_plus_insns;
1315           rtx fp, my_fp;
1316 
1317           gcc_assert (frame_pointer_needed
1318                       || !isr_p
1319                       || !crtl->is_leaf);
1320 
1321           fp = my_fp = (frame_pointer_needed
1322                         ? frame_pointer_rtx
1323                         : gen_rtx_REG (Pmode, REG_X));
1324 
1325           if (AVR_HAVE_8BIT_SP)
1326             {
1327               /* The high byte (r29) does not change:
1328                  Prefer SUBI (1 cycle) over SBIW (2 cycles, same size).  */
1329 
1330               my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1331             }
1332 
1333           /* Cut down size and avoid size = 0 so that we don't run
1334              into ICE like PR52488 in the remainder.  */
1335 
1336           if (size > size_max)
1337             {
1338               /* Don't error so that insane code from newlib still compiles
1339                  and does not break building newlib.  As PR51345 is implemented
1340                  now, there are multilib variants with -msp8.
1341 
1342                  If user wants sanity checks he can use -Wstack-usage=
1343                  or similar options.
1344 
1345                  For CFA we emit the original, non-saturated size so that
1346                  the generic machinery is aware of the real stack usage and
1347                  will print the above diagnostic as expected.  */
1348 
1349               size = size_max;
1350             }
1351 
1352           size = trunc_int_for_mode (size, GET_MODE (my_fp));
1353           neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1354 
1355           /************  Method 1: Adjust frame pointer  ************/
1356 
1357           start_sequence ();
1358 
1359           /* Normally, the dwarf2out frame-related-expr interpreter does
1360              not expect to have the CFA change once the frame pointer is
1361              set up.  Thus, we avoid marking the move insn below and
1362              instead indicate that the entire operation is complete after
1363              the frame pointer subtraction is done.  */
1364 
1365           insn = emit_move_insn (fp, stack_pointer_rtx);
1366           if (frame_pointer_needed)
1367             {
1368               RTX_FRAME_RELATED_P (insn) = 1;
1369               add_reg_note (insn, REG_CFA_ADJUST_CFA,
1370                             gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1371             }
1372 
1373           insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1374                                                        my_fp, neg_size));
1375 
1376           if (frame_pointer_needed)
1377             {
1378               RTX_FRAME_RELATED_P (insn) = 1;
1379               add_reg_note (insn, REG_CFA_ADJUST_CFA,
1380                             gen_rtx_SET (VOIDmode, fp,
1381                                          plus_constant (Pmode, fp,
1382                                                         -size_cfa)));
1383             }
1384 
1385           /* Copy to stack pointer.  Note that since we've already
1386              changed the CFA to the frame pointer this operation
1387              need not be annotated if frame pointer is needed.
1388              Always move through unspec, see PR50063.
1389              For meaning of irq_state see movhi_sp_r insn.  */
1390 
1391           if (cfun->machine->is_interrupt)
1392             irq_state = 1;
1393 
1394           if (TARGET_NO_INTERRUPTS
1395               || cfun->machine->is_signal
1396               || cfun->machine->is_OS_main)
1397             irq_state = 0;
1398 
1399           if (AVR_HAVE_8BIT_SP)
1400             irq_state = 2;
1401 
1402           insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1403                                             fp, GEN_INT (irq_state)));
1404           if (!frame_pointer_needed)
1405             {
1406               RTX_FRAME_RELATED_P (insn) = 1;
1407               add_reg_note (insn, REG_CFA_ADJUST_CFA,
1408                             gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1409                                          plus_constant (Pmode,
1410                                                         stack_pointer_rtx,
1411                                                         -size_cfa)));
1412             }
1413 
1414           fp_plus_insns = get_insns ();
1415           end_sequence ();
1416 
1417           /************  Method 2: Adjust Stack pointer  ************/
1418 
1419           /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1420              can only handle specific offsets.  */
1421 
1422           if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1423             {
1424               rtx_insn *sp_plus_insns;
1425 
1426               start_sequence ();
1427 
1428               insn = emit_move_insn (stack_pointer_rtx,
1429                                      plus_constant (Pmode, stack_pointer_rtx,
1430                                                     -size));
1431               RTX_FRAME_RELATED_P (insn) = 1;
1432               add_reg_note (insn, REG_CFA_ADJUST_CFA,
1433                             gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1434                                          plus_constant (Pmode,
1435                                                         stack_pointer_rtx,
1436                                                         -size_cfa)));
1437               if (frame_pointer_needed)
1438                 {
1439                   insn = emit_move_insn (fp, stack_pointer_rtx);
1440                   RTX_FRAME_RELATED_P (insn) = 1;
1441                 }
1442 
1443               sp_plus_insns = get_insns ();
1444               end_sequence ();
1445 
1446               /************ Use shortest method  ************/
1447 
1448               emit_insn (get_sequence_length (sp_plus_insns)
1449                          < get_sequence_length (fp_plus_insns)
1450                          ? sp_plus_insns
1451                          : fp_plus_insns);
1452             }
1453           else
1454             {
1455               emit_insn (fp_plus_insns);
1456             }
1457 
1458           cfun->machine->stack_usage += size_cfa;
1459         } /* !minimize && size != 0 */
1460     } /* !minimize */
1461 }
1462 
1463 
1464 /*  Output function prologue.  */
1465 
1466 void
1467 avr_expand_prologue (void)
1468 {
1469   HARD_REG_SET set;
1470   HOST_WIDE_INT size;
1471 
1472   size = get_frame_size() + avr_outgoing_args_size();
1473 
1474   cfun->machine->stack_usage = 0;
1475 
1476   /* Prologue: naked.  */
1477   if (cfun->machine->is_naked)
1478     {
1479       return;
1480     }
1481 
1482   avr_regs_to_save (&set);
1483 
1484   if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1485     {
1486       /* Enable interrupts.  */
1487       if (cfun->machine->is_interrupt)
1488         emit_insn (gen_enable_interrupt ());
1489 
1490       /* Push zero reg.  */
1491       emit_push_byte (AVR_ZERO_REGNO, true);
1492 
1493       /* Push tmp reg.  */
1494       emit_push_byte (AVR_TMP_REGNO, true);
1495 
1496       /* Push SREG.  */
1497       /* ??? There's no dwarf2 column reserved for SREG.  */
1498       emit_push_sfr (sreg_rtx, false, false /* clr */);
1499 
1500       /* Clear zero reg.  */
1501       emit_move_insn (zero_reg_rtx, const0_rtx);
1502 
1503       /* Prevent any attempt to delete the setting of ZERO_REG!  */
1504       emit_use (zero_reg_rtx);
1505 
1506       /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1507          ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z.  */
1508 
1509       if (AVR_HAVE_RAMPD)
1510         emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1511 
1512       if (AVR_HAVE_RAMPX
1513           && TEST_HARD_REG_BIT (set, REG_X)
1514           && TEST_HARD_REG_BIT (set, REG_X + 1))
1515         {
1516           emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1517         }
1518 
1519       if (AVR_HAVE_RAMPY
1520           && (frame_pointer_needed
1521               || (TEST_HARD_REG_BIT (set, REG_Y)
1522                   && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1523         {
1524           emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1525         }
1526 
1527       if (AVR_HAVE_RAMPZ
1528           && TEST_HARD_REG_BIT (set, REG_Z)
1529           && TEST_HARD_REG_BIT (set, REG_Z + 1))
1530         {
1531           emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1532         }
1533     }  /* is_interrupt is_signal */
1534 
1535   avr_prologue_setup_frame (size, set);
1536 
1537   if (flag_stack_usage_info)
1538     current_function_static_stack_size = cfun->machine->stack_usage;
1539 }
1540 
1541 
1542 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'.  */
1543 /* Output summary at end of function prologue.  */
1544 
1545 static void
1546 avr_asm_function_end_prologue (FILE *file)
1547 {
1548   if (cfun->machine->is_naked)
1549     {
1550       fputs ("/* prologue: naked */\n", file);
1551     }
1552   else
1553     {
1554       if (cfun->machine->is_interrupt)
1555         {
1556           fputs ("/* prologue: Interrupt */\n", file);
1557         }
1558       else if (cfun->machine->is_signal)
1559         {
1560           fputs ("/* prologue: Signal */\n", file);
1561         }
1562       else
1563         fputs ("/* prologue: function */\n", file);
1564     }
1565 
1566   if (ACCUMULATE_OUTGOING_ARGS)
1567     fprintf (file, "/* outgoing args size = %d */\n",
1568              avr_outgoing_args_size());
1569 
1570   fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1571                  get_frame_size());
1572   fprintf (file, "/* stack size = %d */\n",
1573                  cfun->machine->stack_usage);
1574   /* Create symbol stack offset here so all functions have it. Add 1 to stack
1575      usage for offset so that SP + .L__stack_offset = return address.  */
1576   fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1577 }
1578 
1579 
1580 /* Implement `EPILOGUE_USES'.  */
1581 
1582 int
1583 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1584 {
1585   if (reload_completed
1586       && cfun->machine
1587       && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1588     return 1;
1589   return 0;
1590 }
1591 
1592 /*  Helper for avr_expand_epilogue.  Emit a pop of a byte register.  */
1593 
1594 static void
1595 emit_pop_byte (unsigned regno)
1596 {
1597   rtx mem, reg;
1598 
1599   mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1600   mem = gen_frame_mem (QImode, mem);
1601   reg = gen_rtx_REG (QImode, regno);
1602 
1603   emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1604 }
1605 
1606 /*  Output RTL epilogue.  */
1607 
1608 void
1609 avr_expand_epilogue (bool sibcall_p)
1610 {
1611   int reg;
1612   int live_seq;
1613   HARD_REG_SET set;
1614   int minimize;
1615   HOST_WIDE_INT size;
1616   bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1617 
1618   size = get_frame_size() + avr_outgoing_args_size();
1619 
1620   /* epilogue: naked  */
1621   if (cfun->machine->is_naked)
1622     {
1623       gcc_assert (!sibcall_p);
1624 
1625       emit_jump_insn (gen_return ());
1626       return;
1627     }
1628 
1629   avr_regs_to_save (&set);
1630   live_seq = sequent_regs_live ();
1631 
1632   minimize = (TARGET_CALL_PROLOGUES
1633               && live_seq
1634               && !isr_p
1635               && !cfun->machine->is_OS_task
1636               && !cfun->machine->is_OS_main
1637               && !AVR_TINY);
1638 
1639   if (minimize
1640       && (live_seq > 4
1641           || frame_pointer_needed
1642           || size))
1643     {
1644       /*  Get rid of frame.  */
1645 
1646       if (!frame_pointer_needed)
1647         {
1648           emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1649         }
1650 
1651       if (size)
1652         {
1653           emit_move_insn (frame_pointer_rtx,
1654                           plus_constant (Pmode, frame_pointer_rtx, size));
1655         }
1656 
1657       emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1658       return;
1659     }
1660 
1661   if (size)
1662     {
1663       /* Try two methods to adjust stack and select shortest.  */
1664 
1665       int irq_state = -1;
1666       rtx fp, my_fp;
1667       rtx_insn *fp_plus_insns;
1668       HOST_WIDE_INT size_max;
1669 
1670       gcc_assert (frame_pointer_needed
1671                   || !isr_p
1672                   || !crtl->is_leaf);
1673 
1674       fp = my_fp = (frame_pointer_needed
1675                     ? frame_pointer_rtx
1676                     : gen_rtx_REG (Pmode, REG_X));
1677 
1678       if (AVR_HAVE_8BIT_SP)
1679         {
1680           /* The high byte (r29) does not change:
1681              Prefer SUBI (1 cycle) over SBIW (2 cycles).  */
1682 
1683           my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1684         }
1685 
1686       /* For rationale see comment in prologue generation.  */
1687 
1688       size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1689       if (size > size_max)
1690         size = size_max;
1691       size = trunc_int_for_mode (size, GET_MODE (my_fp));
1692 
1693       /********** Method 1: Adjust fp register  **********/
1694 
1695       start_sequence ();
1696 
1697       if (!frame_pointer_needed)
1698         emit_move_insn (fp, stack_pointer_rtx);
1699 
1700       emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
1701 
1702       /* Copy to stack pointer.  */
1703 
1704       if (TARGET_NO_INTERRUPTS)
1705         irq_state = 0;
1706 
1707       if (AVR_HAVE_8BIT_SP)
1708         irq_state = 2;
1709 
1710       emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1711                                  GEN_INT (irq_state)));
1712 
1713       fp_plus_insns = get_insns ();
1714       end_sequence ();
1715 
1716       /********** Method 2: Adjust Stack pointer  **********/
1717 
1718       if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1719         {
1720           rtx_insn *sp_plus_insns;
1721 
1722           start_sequence ();
1723 
1724           emit_move_insn (stack_pointer_rtx,
1725                           plus_constant (Pmode, stack_pointer_rtx, size));
1726 
1727           sp_plus_insns = get_insns ();
1728           end_sequence ();
1729 
1730           /************ Use shortest method  ************/
1731 
1732           emit_insn (get_sequence_length (sp_plus_insns)
1733                      < get_sequence_length (fp_plus_insns)
1734                      ? sp_plus_insns
1735                      : fp_plus_insns);
1736         }
1737       else
1738         emit_insn (fp_plus_insns);
1739     } /* size != 0 */
1740 
1741   if (frame_pointer_needed
1742       && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1743     {
1744       /* Restore previous frame_pointer.  See avr_expand_prologue for
1745          rationale for not using pophi.  */
1746 
1747       emit_pop_byte (REG_Y + 1);
1748       emit_pop_byte (REG_Y);
1749     }
1750 
1751   /* Restore used registers.  */
1752 
1753   for (reg = 31; reg >= 0; --reg)
1754     if (TEST_HARD_REG_BIT (set, reg))
1755       emit_pop_byte (reg);
1756 
1757   if (isr_p)
1758     {
1759       /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1760          The conditions to restore them must be tha same as in prologue.  */
1761 
1762       if (AVR_HAVE_RAMPZ
1763           && TEST_HARD_REG_BIT (set, REG_Z)
1764           && TEST_HARD_REG_BIT (set, REG_Z + 1))
1765         {
1766           emit_pop_byte (TMP_REGNO);
1767           emit_move_insn (rampz_rtx, tmp_reg_rtx);
1768         }
1769 
1770       if (AVR_HAVE_RAMPY
1771           && (frame_pointer_needed
1772               || (TEST_HARD_REG_BIT (set, REG_Y)
1773                   && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1774         {
1775           emit_pop_byte (TMP_REGNO);
1776           emit_move_insn (rampy_rtx, tmp_reg_rtx);
1777         }
1778 
1779       if (AVR_HAVE_RAMPX
1780           && TEST_HARD_REG_BIT (set, REG_X)
1781           && TEST_HARD_REG_BIT (set, REG_X + 1))
1782         {
1783           emit_pop_byte (TMP_REGNO);
1784           emit_move_insn (rampx_rtx, tmp_reg_rtx);
1785         }
1786 
1787       if (AVR_HAVE_RAMPD)
1788         {
1789           emit_pop_byte (TMP_REGNO);
1790           emit_move_insn (rampd_rtx, tmp_reg_rtx);
1791         }
1792 
1793       /* Restore SREG using tmp_reg as scratch.  */
1794 
1795       emit_pop_byte (AVR_TMP_REGNO);
1796       emit_move_insn (sreg_rtx, tmp_reg_rtx);
1797 
1798       /* Restore tmp REG.  */
1799       emit_pop_byte (AVR_TMP_REGNO);
1800 
1801       /* Restore zero REG.  */
1802       emit_pop_byte (AVR_ZERO_REGNO);
1803     }
1804 
1805   if (!sibcall_p)
1806     emit_jump_insn (gen_return ());
1807 }
1808 
1809 
1810 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'.  */
1811 
1812 static void
1813 avr_asm_function_begin_epilogue (FILE *file)
1814 {
1815   fprintf (file, "/* epilogue start */\n");
1816 }
1817 
1818 
1819 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'.  */
1820 
1821 static bool
1822 avr_cannot_modify_jumps_p (void)
1823 {
1824 
1825   /* Naked Functions must not have any instructions after
1826      their epilogue, see PR42240 */
1827 
1828   if (reload_completed
1829       && cfun->machine
1830       && cfun->machine->is_naked)
1831     {
1832       return true;
1833     }
1834 
1835   return false;
1836 }
1837 
1838 
1839 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'.  */
1840 
1841 static bool
1842 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
1843 {
1844   /* FIXME:  Non-generic addresses are not mode-dependent in themselves.
1845        This hook just serves to hack around PR rtl-optimization/52543 by
1846        claiming that non-generic addresses were mode-dependent so that
1847        lower-subreg.c will skip these addresses.  lower-subreg.c sets up fake
1848        RTXes to probe SET and MEM costs and assumes that MEM is always in the
1849        generic address space which is not true.  */
1850 
1851   return !ADDR_SPACE_GENERIC_P (as);
1852 }
1853 
1854 
1855 /* Helper function for `avr_legitimate_address_p'.  */
1856 
1857 static inline bool
1858 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1859                        RTX_CODE outer_code, bool strict)
1860 {
1861   return (REG_P (reg)
1862           && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1863                                                  as, outer_code, UNKNOWN)
1864               || (!strict
1865                   && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1866 }
1867 
1868 
1869 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1870    machine for a memory operand of mode MODE.  */
1871 
1872 static bool
1873 avr_legitimate_address_p (machine_mode mode, rtx x, bool strict)
1874 {
1875   bool ok = CONSTANT_ADDRESS_P (x);
1876 
1877   switch (GET_CODE (x))
1878     {
1879     case REG:
1880       ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1881                                   MEM, strict);
1882 
1883       if (strict
1884           && GET_MODE_SIZE (mode) > 4
1885           && REG_X == REGNO (x))
1886         {
1887           ok = false;
1888         }
1889       break;
1890 
1891     case POST_INC:
1892     case PRE_DEC:
1893       ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1894                                   GET_CODE (x), strict);
1895       break;
1896 
1897     case PLUS:
1898       {
1899         rtx reg = XEXP (x, 0);
1900         rtx op1 = XEXP (x, 1);
1901 
1902         if (REG_P (reg)
1903             && CONST_INT_P (op1)
1904             && INTVAL (op1) >= 0)
1905           {
1906             bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1907 
1908             if (fit)
1909               {
1910                 ok = (! strict
1911                       || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1912                                                 PLUS, strict));
1913 
1914                 if (reg == frame_pointer_rtx
1915                     || reg == arg_pointer_rtx)
1916                   {
1917                     ok = true;
1918                   }
1919               }
1920             else if (frame_pointer_needed
1921                      && reg == frame_pointer_rtx)
1922               {
1923                 ok = true;
1924               }
1925           }
1926       }
1927       break;
1928 
1929     default:
1930       break;
1931     }
1932 
1933   if (AVR_TINY
1934       && CONSTANT_ADDRESS_P (x))
1935     {
1936       /* avrtiny's load / store instructions only cover addresses 0..0xbf:
1937          IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf.  */
1938 
1939       ok = (CONST_INT_P (x)
1940             && IN_RANGE (INTVAL (x), 0, 0xc0 - GET_MODE_SIZE (mode)));
1941     }
1942 
1943   if (avr_log.legitimate_address_p)
1944     {
1945       avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1946                  "reload_completed=%d reload_in_progress=%d %s:",
1947                  ok, mode, strict, reload_completed, reload_in_progress,
1948                  reg_renumber ? "(reg_renumber)" : "");
1949 
1950       if (GET_CODE (x) == PLUS
1951           && REG_P (XEXP (x, 0))
1952           && CONST_INT_P (XEXP (x, 1))
1953           && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1954           && reg_renumber)
1955         {
1956           avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1957                      true_regnum (XEXP (x, 0)));
1958         }
1959 
1960       avr_edump ("\n%r\n", x);
1961     }
1962 
1963   return ok;
1964 }
1965 
1966 
1967 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1968    now only a helper for avr_addr_space_legitimize_address.  */
1969 /* Attempts to replace X with a valid
1970    memory address for an operand of mode MODE  */
1971 
1972 static rtx
1973 avr_legitimize_address (rtx x, rtx oldx, machine_mode mode)
1974 {
1975   bool big_offset_p = false;
1976 
1977   x = oldx;
1978 
1979   if (GET_CODE (oldx) == PLUS
1980       && REG_P (XEXP (oldx, 0)))
1981     {
1982       if (REG_P (XEXP (oldx, 1)))
1983         x = force_reg (GET_MODE (oldx), oldx);
1984       else if (CONST_INT_P (XEXP (oldx, 1)))
1985         {
1986           int offs = INTVAL (XEXP (oldx, 1));
1987           if (frame_pointer_rtx != XEXP (oldx, 0)
1988               && offs > MAX_LD_OFFSET (mode))
1989             {
1990               big_offset_p = true;
1991               x = force_reg (GET_MODE (oldx), oldx);
1992             }
1993         }
1994     }
1995 
1996   if (avr_log.legitimize_address)
1997     {
1998       avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1999 
2000       if (x != oldx)
2001         avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
2002     }
2003 
2004   return x;
2005 }
2006 
2007 
2008 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'.  */
2009 /* This will allow register R26/27 to be used where it is no worse than normal
2010    base pointers R28/29 or R30/31.  For example, if base offset is greater
2011    than 63 bytes or for R++ or --R addressing.  */
2012 
2013 rtx
2014 avr_legitimize_reload_address (rtx *px, machine_mode mode,
2015                                int opnum, int type, int addr_type,
2016                                int ind_levels ATTRIBUTE_UNUSED,
2017                                rtx (*mk_memloc)(rtx,int))
2018 {
2019   rtx x = *px;
2020 
2021   if (avr_log.legitimize_reload_address)
2022     avr_edump ("\n%?:%m %r\n", mode, x);
2023 
2024   if (1 && (GET_CODE (x) == POST_INC
2025             || GET_CODE (x) == PRE_DEC))
2026     {
2027       push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
2028                    POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
2029                    opnum, RELOAD_OTHER);
2030 
2031       if (avr_log.legitimize_reload_address)
2032         avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
2033                    POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
2034 
2035       return x;
2036     }
2037 
2038   if (GET_CODE (x) == PLUS
2039       && REG_P (XEXP (x, 0))
2040       && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
2041       && CONST_INT_P (XEXP (x, 1))
2042       && INTVAL (XEXP (x, 1)) >= 1)
2043     {
2044       bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
2045 
2046       if (fit)
2047         {
2048           if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
2049             {
2050               int regno = REGNO (XEXP (x, 0));
2051               rtx mem = mk_memloc (x, regno);
2052 
2053               push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
2054                            POINTER_REGS, Pmode, VOIDmode, 0, 0,
2055                            1, (enum reload_type) addr_type);
2056 
2057               if (avr_log.legitimize_reload_address)
2058                 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2059                            POINTER_REGS, XEXP (mem, 0), NULL_RTX);
2060 
2061               push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
2062                            BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2063                            opnum, (enum reload_type) type);
2064 
2065               if (avr_log.legitimize_reload_address)
2066                 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2067                            BASE_POINTER_REGS, mem, NULL_RTX);
2068 
2069               return x;
2070             }
2071         }
2072       else if (! (frame_pointer_needed
2073                   && XEXP (x, 0) == frame_pointer_rtx))
2074         {
2075           push_reload (x, NULL_RTX, px, NULL,
2076                        POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2077                        opnum, (enum reload_type) type);
2078 
2079           if (avr_log.legitimize_reload_address)
2080             avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
2081                        POINTER_REGS, x, NULL_RTX);
2082 
2083           return x;
2084         }
2085     }
2086 
2087   return NULL_RTX;
2088 }
2089 
2090 
2091 /* Implement `TARGET_SECONDARY_RELOAD' */
2092 
2093 static reg_class_t
2094 avr_secondary_reload (bool in_p, rtx x,
2095                       reg_class_t reload_class ATTRIBUTE_UNUSED,
2096                       machine_mode mode, secondary_reload_info *sri)
2097 {
2098   if (in_p
2099       && MEM_P (x)
2100       && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
2101       && ADDR_SPACE_MEMX != MEM_ADDR_SPACE (x))
2102     {
2103       /* For the non-generic 16-bit spaces we need a d-class scratch.  */
2104 
2105       switch (mode)
2106         {
2107         default:
2108           gcc_unreachable();
2109 
2110         case QImode:  sri->icode = CODE_FOR_reload_inqi; break;
2111         case QQmode:  sri->icode = CODE_FOR_reload_inqq; break;
2112         case UQQmode: sri->icode = CODE_FOR_reload_inuqq; break;
2113 
2114         case HImode:  sri->icode = CODE_FOR_reload_inhi; break;
2115         case HQmode:  sri->icode = CODE_FOR_reload_inhq; break;
2116         case HAmode:  sri->icode = CODE_FOR_reload_inha; break;
2117         case UHQmode: sri->icode = CODE_FOR_reload_inuhq; break;
2118         case UHAmode: sri->icode = CODE_FOR_reload_inuha; break;
2119 
2120         case PSImode: sri->icode = CODE_FOR_reload_inpsi; break;
2121 
2122         case SImode:  sri->icode = CODE_FOR_reload_insi; break;
2123         case SFmode:  sri->icode = CODE_FOR_reload_insf; break;
2124         case SQmode:  sri->icode = CODE_FOR_reload_insq; break;
2125         case SAmode:  sri->icode = CODE_FOR_reload_insa; break;
2126         case USQmode: sri->icode = CODE_FOR_reload_inusq; break;
2127         case USAmode: sri->icode = CODE_FOR_reload_inusa; break;
2128         }
2129     }
2130 
2131   return NO_REGS;
2132 }
2133 
2134 
2135 /* Helper function to print assembler resp. track instruction
2136    sequence lengths.  Always return "".
2137 
2138    If PLEN == NULL:
2139        Output assembler code from template TPL with operands supplied
2140        by OPERANDS.  This is just forwarding to output_asm_insn.
2141 
2142    If PLEN != NULL:
2143        If N_WORDS >= 0  Add N_WORDS to *PLEN.
2144        If N_WORDS < 0   Set *PLEN to -N_WORDS.
2145        Don't output anything.
2146 */
2147 
2148 static const char*
2149 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
2150 {
2151   if (NULL == plen)
2152     {
2153       output_asm_insn (tpl, operands);
2154     }
2155   else
2156     {
2157       if (n_words < 0)
2158         *plen = -n_words;
2159       else
2160         *plen += n_words;
2161     }
2162 
2163   return "";
2164 }
2165 
2166 
2167 /* Return a pointer register name as a string.  */
2168 
2169 static const char*
2170 ptrreg_to_str (int regno)
2171 {
2172   switch (regno)
2173     {
2174     case REG_X: return "X";
2175     case REG_Y: return "Y";
2176     case REG_Z: return "Z";
2177     default:
2178       output_operand_lossage ("address operand requires constraint for"
2179                               " X, Y, or Z register");
2180     }
2181   return NULL;
2182 }
2183 
2184 /* Return the condition name as a string.
2185    Used in conditional jump constructing  */
2186 
2187 static const char*
2188 cond_string (enum rtx_code code)
2189 {
2190   switch (code)
2191     {
2192     case NE:
2193       return "ne";
2194     case EQ:
2195       return "eq";
2196     case GE:
2197       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2198         return "pl";
2199       else
2200         return "ge";
2201     case LT:
2202       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2203         return "mi";
2204       else
2205         return "lt";
2206     case GEU:
2207       return "sh";
2208     case LTU:
2209       return "lo";
2210     default:
2211       gcc_unreachable ();
2212     }
2213 
2214   return "";
2215 }
2216 
2217 
2218 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'.  */
2219 /* Output ADDR to FILE as address.  */
2220 
2221 static void
2222 avr_print_operand_address (FILE *file, rtx addr)
2223 {
2224   switch (GET_CODE (addr))
2225     {
2226     case REG:
2227       fprintf (file, ptrreg_to_str (REGNO (addr)));
2228       break;
2229 
2230     case PRE_DEC:
2231       fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2232       break;
2233 
2234     case POST_INC:
2235       fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2236       break;
2237 
2238     default:
2239       if (CONSTANT_ADDRESS_P (addr)
2240           && text_segment_operand (addr, VOIDmode))
2241         {
2242           rtx x = addr;
2243           if (GET_CODE (x) == CONST)
2244             x = XEXP (x, 0);
2245           if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
2246             {
2247               /* Assembler gs() will implant word address.  Make offset
2248                  a byte offset inside gs() for assembler.  This is
2249                  needed because the more logical (constant+gs(sym)) is not
2250                  accepted by gas.  For 128K and smaller devices this is ok.
2251                  For large devices it will create a trampoline to offset
2252                  from symbol which may not be what the user really wanted.  */
2253 
2254               fprintf (file, "gs(");
2255               output_addr_const (file, XEXP (x,0));
2256               fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2257                        2 * INTVAL (XEXP (x, 1)));
2258               if (AVR_3_BYTE_PC)
2259                 if (warning (0, "pointer offset from symbol maybe incorrect"))
2260                   {
2261                     output_addr_const (stderr, addr);
2262                     fprintf(stderr,"\n");
2263                   }
2264             }
2265           else
2266             {
2267               fprintf (file, "gs(");
2268               output_addr_const (file, addr);
2269               fprintf (file, ")");
2270             }
2271         }
2272       else
2273         output_addr_const (file, addr);
2274     }
2275 }
2276 
2277 
2278 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'.  */
2279 
2280 static bool
2281 avr_print_operand_punct_valid_p (unsigned char code)
2282 {
2283   return code == '~' || code == '!';
2284 }
2285 
2286 
2287 /* Implement `TARGET_PRINT_OPERAND'.  */
2288 /* Output X as assembler operand to file FILE.
2289    For a description of supported %-codes, see top of avr.md.  */
2290 
2291 static void
2292 avr_print_operand (FILE *file, rtx x, int code)
2293 {
2294   int abcd = 0, ef = 0, ij = 0;
2295 
2296   if (code >= 'A' && code <= 'D')
2297     abcd = code - 'A';
2298   else if (code == 'E' || code == 'F')
2299     ef = code - 'E';
2300   else if (code == 'I' || code == 'J')
2301     ij = code - 'I';
2302 
2303   if (code == '~')
2304     {
2305       if (!AVR_HAVE_JMP_CALL)
2306         fputc ('r', file);
2307     }
2308   else if (code == '!')
2309     {
2310       if (AVR_HAVE_EIJMP_EICALL)
2311         fputc ('e', file);
2312     }
2313   else if (code == 't'
2314            || code == 'T')
2315     {
2316       static int t_regno = -1;
2317       static int t_nbits = -1;
2318 
2319       if (REG_P (x) && t_regno < 0 && code == 'T')
2320         {
2321           t_regno = REGNO (x);
2322           t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2323         }
2324       else if (CONST_INT_P (x) && t_regno >= 0
2325                && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2326         {
2327           int bpos = INTVAL (x);
2328 
2329           fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2330           if (code == 'T')
2331             fprintf (file, ",%d", bpos % 8);
2332 
2333           t_regno = -1;
2334         }
2335       else
2336         fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2337     }
2338   else if (code == 'E' || code == 'F')
2339     {
2340       rtx op = XEXP(x, 0);
2341       fprintf (file, reg_names[REGNO (op) + ef]);
2342     }
2343   else if (code == 'I' || code == 'J')
2344     {
2345       rtx op = XEXP(XEXP(x, 0), 0);
2346       fprintf (file, reg_names[REGNO (op) + ij]);
2347     }
2348   else if (REG_P (x))
2349     {
2350       if (x == zero_reg_rtx)
2351         fprintf (file, "__zero_reg__");
2352       else if (code == 'r' && REGNO (x) < 32)
2353         fprintf (file, "%d", (int) REGNO (x));
2354       else
2355         fprintf (file, reg_names[REGNO (x) + abcd]);
2356     }
2357   else if (CONST_INT_P (x))
2358     {
2359       HOST_WIDE_INT ival = INTVAL (x);
2360 
2361       if ('i' != code)
2362         fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2363       else if (low_io_address_operand (x, VOIDmode)
2364                || high_io_address_operand (x, VOIDmode))
2365         {
2366           if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2367             fprintf (file, "__RAMPZ__");
2368           else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2369             fprintf (file, "__RAMPY__");
2370           else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2371             fprintf (file, "__RAMPX__");
2372           else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2373             fprintf (file, "__RAMPD__");
2374           else if ((AVR_XMEGA || AVR_TINY) && ival == avr_addr.ccp)
2375             fprintf (file, "__CCP__");
2376           else if (ival == avr_addr.sreg)   fprintf (file, "__SREG__");
2377           else if (ival == avr_addr.sp_l)   fprintf (file, "__SP_L__");
2378           else if (ival == avr_addr.sp_h)   fprintf (file, "__SP_H__");
2379           else
2380             {
2381               fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2382                        ival - avr_arch->sfr_offset);
2383             }
2384         }
2385       else
2386         fatal_insn ("bad address, not an I/O address:", x);
2387     }
2388   else if (MEM_P (x))
2389     {
2390       rtx addr = XEXP (x, 0);
2391 
2392       if (code == 'm')
2393         {
2394           if (!CONSTANT_P (addr))
2395             fatal_insn ("bad address, not a constant:", addr);
2396           /* Assembler template with m-code is data - not progmem section */
2397           if (text_segment_operand (addr, VOIDmode))
2398             if (warning (0, "accessing data memory with"
2399                          " program memory address"))
2400               {
2401                 output_addr_const (stderr, addr);
2402                 fprintf(stderr,"\n");
2403               }
2404           output_addr_const (file, addr);
2405         }
2406       else if (code == 'i')
2407         {
2408           avr_print_operand (file, addr, 'i');
2409         }
2410       else if (code == 'o')
2411         {
2412           if (GET_CODE (addr) != PLUS)
2413             fatal_insn ("bad address, not (reg+disp):", addr);
2414 
2415           avr_print_operand (file, XEXP (addr, 1), 0);
2416         }
2417       else if (code == 'b')
2418         {
2419           if (GET_CODE (addr) != PLUS)
2420                fatal_insn ("bad address, not (reg+disp):", addr);
2421 
2422           avr_print_operand_address (file, XEXP (addr, 0));
2423         }
2424       else if (code == 'p' || code == 'r')
2425         {
2426           if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2427             fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2428 
2429           if (code == 'p')
2430             avr_print_operand_address (file, XEXP (addr, 0));  /* X, Y, Z */
2431           else
2432             avr_print_operand (file, XEXP (addr, 0), 0);  /* r26, r28, r30 */
2433         }
2434       else if (GET_CODE (addr) == PLUS)
2435         {
2436           avr_print_operand_address (file, XEXP (addr,0));
2437           if (REGNO (XEXP (addr, 0)) == REG_X)
2438             fatal_insn ("internal compiler error.  Bad address:"
2439                         ,addr);
2440           fputc ('+', file);
2441           avr_print_operand (file, XEXP (addr,1), code);
2442         }
2443       else
2444         avr_print_operand_address (file, addr);
2445     }
2446   else if (code == 'i')
2447     {
2448       if (GET_CODE (x) == SYMBOL_REF && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO))
2449 	avr_print_operand_address
2450 	  (file, plus_constant (HImode, x, -avr_arch->sfr_offset));
2451       else
2452 	fatal_insn ("bad address, not an I/O address:", x);
2453     }
2454   else if (code == 'x')
2455     {
2456       /* Constant progmem address - like used in jmp or call */
2457       if (0 == text_segment_operand (x, VOIDmode))
2458         if (warning (0, "accessing program memory"
2459                      " with data memory address"))
2460           {
2461             output_addr_const (stderr, x);
2462             fprintf(stderr,"\n");
2463           }
2464       /* Use normal symbol for direct address no linker trampoline needed */
2465       output_addr_const (file, x);
2466     }
2467   else if (CONST_FIXED_P (x))
2468     {
2469       HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2470       if (code != 0)
2471         output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2472                                 code);
2473       fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2474     }
2475   else if (GET_CODE (x) == CONST_DOUBLE)
2476     {
2477       long val;
2478       REAL_VALUE_TYPE rv;
2479       if (GET_MODE (x) != SFmode)
2480         fatal_insn ("internal compiler error.  Unknown mode:", x);
2481       REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2482       REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2483       fprintf (file, "0x%lx", val);
2484     }
2485   else if (GET_CODE (x) == CONST_STRING)
2486     fputs (XSTR (x, 0), file);
2487   else if (code == 'j')
2488     fputs (cond_string (GET_CODE (x)), file);
2489   else if (code == 'k')
2490     fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2491   else
2492     avr_print_operand_address (file, x);
2493 }
2494 
2495 
2496 /* Worker function for `NOTICE_UPDATE_CC'.  */
2497 /* Update the condition code in the INSN.  */
2498 
2499 void
2500 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx_insn *insn)
2501 {
2502   rtx set;
2503   enum attr_cc cc = get_attr_cc (insn);
2504 
2505   switch (cc)
2506     {
2507     default:
2508       break;
2509 
2510     case CC_PLUS:
2511     case CC_LDI:
2512       {
2513         rtx *op = recog_data.operand;
2514         int len_dummy, icc;
2515 
2516         /* Extract insn's operands.  */
2517         extract_constrain_insn_cached (insn);
2518 
2519         switch (cc)
2520           {
2521           default:
2522             gcc_unreachable();
2523 
2524           case CC_PLUS:
2525             avr_out_plus (insn, op, &len_dummy, &icc);
2526             cc = (enum attr_cc) icc;
2527             break;
2528 
2529           case CC_LDI:
2530 
2531             cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2532                   && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2533               /* Loading zero-reg with 0 uses CLR and thus clobbers cc0.  */
2534               ? CC_CLOBBER
2535               /* Any other "r,rL" combination does not alter cc0.  */
2536               : CC_NONE;
2537 
2538             break;
2539           } /* inner switch */
2540 
2541         break;
2542       }
2543     } /* outer swicth */
2544 
2545   switch (cc)
2546     {
2547     default:
2548       /* Special values like CC_OUT_PLUS from above have been
2549          mapped to "standard" CC_* values so we never come here.  */
2550 
2551       gcc_unreachable();
2552       break;
2553 
2554     case CC_NONE:
2555       /* Insn does not affect CC at all, but it might set some registers
2556          that are stored in cc_status.  If such a register is affected by
2557          the current insn, for example by means of a SET or a CLOBBER,
2558          then we must reset cc_status; cf. PR77326.
2559 
2560          Unfortunately, set_of cannot be used as reg_overlap_mentioned_p
2561          will abort on COMPARE (which might be found in cc_status.value1/2).
2562          Thus work out the registers set by the insn and regs mentioned
2563          in cc_status.value1/2.  */
2564 
2565       if (cc_status.value1
2566           || cc_status.value2)
2567         {
2568           HARD_REG_SET regs_used;
2569           HARD_REG_SET regs_set;
2570           CLEAR_HARD_REG_SET (regs_used);
2571 
2572           if (cc_status.value1
2573               && !CONSTANT_P (cc_status.value1))
2574             {
2575               find_all_hard_regs (cc_status.value1, &regs_used);
2576             }
2577 
2578           if (cc_status.value2
2579               && !CONSTANT_P (cc_status.value2))
2580             {
2581               find_all_hard_regs (cc_status.value2, &regs_used);
2582             }
2583 
2584           find_all_hard_reg_sets (insn, &regs_set, false);
2585 
2586           if (hard_reg_set_intersect_p (regs_used, regs_set))
2587             {
2588               CC_STATUS_INIT;
2589             }
2590         }
2591 
2592       break; // CC_NONE
2593 
2594     case CC_SET_N:
2595       CC_STATUS_INIT;
2596       break;
2597 
2598     case CC_SET_ZN:
2599       set = single_set (insn);
2600       CC_STATUS_INIT;
2601       if (set)
2602         {
2603           cc_status.flags |= CC_NO_OVERFLOW;
2604           cc_status.value1 = SET_DEST (set);
2605         }
2606       break;
2607 
2608     case CC_SET_VZN:
2609       /* Insn like INC, DEC, NEG that set Z,N,V.  We currently don't make use
2610          of this combination, cf. also PR61055.  */
2611       CC_STATUS_INIT;
2612       break;
2613 
2614     case CC_SET_CZN:
2615       /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2616          The V flag may or may not be known but that's ok because
2617          alter_cond will change tests to use EQ/NE.  */
2618       set = single_set (insn);
2619       CC_STATUS_INIT;
2620       if (set)
2621         {
2622           cc_status.value1 = SET_DEST (set);
2623           cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2624         }
2625       break;
2626 
2627     case CC_COMPARE:
2628       set = single_set (insn);
2629       CC_STATUS_INIT;
2630       if (set)
2631         cc_status.value1 = SET_SRC (set);
2632       break;
2633 
2634     case CC_CLOBBER:
2635       /* Insn doesn't leave CC in a usable state.  */
2636       CC_STATUS_INIT;
2637       break;
2638     }
2639 }
2640 
2641 /* Choose mode for jump insn:
2642    1 - relative jump in range -63 <= x <= 62 ;
2643    2 - relative jump in range -2046 <= x <= 2045 ;
2644    3 - absolute jump (only for ATmega[16]03).  */
2645 
2646 int
2647 avr_jump_mode (rtx x, rtx_insn *insn)
2648 {
2649   int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2650                                             ? XEXP (x, 0) : x));
2651   int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2652   int jump_distance = cur_addr - dest_addr;
2653 
2654   if (-63 <= jump_distance && jump_distance <= 62)
2655     return 1;
2656   else if (-2046 <= jump_distance && jump_distance <= 2045)
2657     return 2;
2658   else if (AVR_HAVE_JMP_CALL)
2659     return 3;
2660 
2661   return 2;
2662 }
2663 
2664 /* Return an AVR condition jump commands.
2665    X is a comparison RTX.
2666    LEN is a number returned by avr_jump_mode function.
2667    If REVERSE nonzero then condition code in X must be reversed.  */
2668 
2669 const char*
2670 ret_cond_branch (rtx x, int len, int reverse)
2671 {
2672   RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2673 
2674   switch (cond)
2675     {
2676     case GT:
2677       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2678 	return (len == 1 ? ("breq .+2" CR_TAB
2679 			    "brpl %0") :
2680 		len == 2 ? ("breq .+4" CR_TAB
2681 			    "brmi .+2" CR_TAB
2682 			    "rjmp %0") :
2683 		("breq .+6" CR_TAB
2684 		 "brmi .+4" CR_TAB
2685 		 "jmp %0"));
2686 
2687       else
2688 	return (len == 1 ? ("breq .+2" CR_TAB
2689 			    "brge %0") :
2690 		len == 2 ? ("breq .+4" CR_TAB
2691 			    "brlt .+2" CR_TAB
2692 			    "rjmp %0") :
2693 		("breq .+6" CR_TAB
2694 		 "brlt .+4" CR_TAB
2695 		 "jmp %0"));
2696     case GTU:
2697       return (len == 1 ? ("breq .+2" CR_TAB
2698                           "brsh %0") :
2699               len == 2 ? ("breq .+4" CR_TAB
2700                           "brlo .+2" CR_TAB
2701                           "rjmp %0") :
2702               ("breq .+6" CR_TAB
2703                "brlo .+4" CR_TAB
2704                "jmp %0"));
2705     case LE:
2706       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2707 	return (len == 1 ? ("breq %0" CR_TAB
2708 			    "brmi %0") :
2709 		len == 2 ? ("breq .+2" CR_TAB
2710 			    "brpl .+2" CR_TAB
2711 			    "rjmp %0") :
2712 		("breq .+2" CR_TAB
2713 		 "brpl .+4" CR_TAB
2714 		 "jmp %0"));
2715       else
2716 	return (len == 1 ? ("breq %0" CR_TAB
2717 			    "brlt %0") :
2718 		len == 2 ? ("breq .+2" CR_TAB
2719 			    "brge .+2" CR_TAB
2720 			    "rjmp %0") :
2721 		("breq .+2" CR_TAB
2722 		 "brge .+4" CR_TAB
2723 		 "jmp %0"));
2724     case LEU:
2725       return (len == 1 ? ("breq %0" CR_TAB
2726                           "brlo %0") :
2727               len == 2 ? ("breq .+2" CR_TAB
2728                           "brsh .+2" CR_TAB
2729 			  "rjmp %0") :
2730               ("breq .+2" CR_TAB
2731                "brsh .+4" CR_TAB
2732 	       "jmp %0"));
2733     default:
2734       if (reverse)
2735 	{
2736 	  switch (len)
2737 	    {
2738 	    case 1:
2739 	      return "br%k1 %0";
2740 	    case 2:
2741 	      return ("br%j1 .+2" CR_TAB
2742 		      "rjmp %0");
2743 	    default:
2744 	      return ("br%j1 .+4" CR_TAB
2745 		      "jmp %0");
2746 	    }
2747 	}
2748       else
2749         {
2750           switch (len)
2751             {
2752             case 1:
2753               return "br%j1 %0";
2754             case 2:
2755               return ("br%k1 .+2" CR_TAB
2756                       "rjmp %0");
2757             default:
2758               return ("br%k1 .+4" CR_TAB
2759                       "jmp %0");
2760             }
2761         }
2762     }
2763   return "";
2764 }
2765 
2766 
2767 /* Worker function for `FINAL_PRESCAN_INSN'.  */
2768 /* Output insn cost for next insn.  */
2769 
2770 void
2771 avr_final_prescan_insn (rtx_insn *insn, rtx *operand ATTRIBUTE_UNUSED,
2772                         int num_operands ATTRIBUTE_UNUSED)
2773 {
2774   if (avr_log.rtx_costs)
2775     {
2776       rtx set = single_set (insn);
2777 
2778       if (set)
2779         fprintf (asm_out_file, "/* DEBUG: cost = %d.  */\n",
2780                  set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2781       else
2782         fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d.  */\n",
2783                  rtx_cost (PATTERN (insn), INSN, 0,
2784                            optimize_insn_for_speed_p()));
2785     }
2786 }
2787 
2788 /* Return 0 if undefined, 1 if always true or always false.  */
2789 
2790 int
2791 avr_simplify_comparison_p (machine_mode mode, RTX_CODE op, rtx x)
2792 {
2793   unsigned int max = (mode == QImode ? 0xff :
2794                       mode == HImode ? 0xffff :
2795                       mode == PSImode ? 0xffffff :
2796                       mode == SImode ? 0xffffffff : 0);
2797   if (max && op && CONST_INT_P (x))
2798     {
2799       if (unsigned_condition (op) != op)
2800         max >>= 1;
2801 
2802       if (max != (INTVAL (x) & max)
2803           && INTVAL (x) != 0xff)
2804         return 1;
2805     }
2806   return 0;
2807 }
2808 
2809 
2810 /* Worker function for `FUNCTION_ARG_REGNO_P'.  */
2811 /* Returns nonzero if REGNO is the number of a hard
2812    register in which function arguments are sometimes passed.  */
2813 
2814 int
2815 avr_function_arg_regno_p(int r)
2816 {
2817   return (AVR_TINY ? r >= 20 && r <= 25 : r >= 8 && r <= 25);
2818 }
2819 
2820 
2821 /* Worker function for `INIT_CUMULATIVE_ARGS'.  */
2822 /* Initializing the variable cum for the state at the beginning
2823    of the argument list.  */
2824 
2825 void
2826 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2827                           tree fndecl ATTRIBUTE_UNUSED)
2828 {
2829   cum->nregs = AVR_TINY ? 6 : 18;
2830   cum->regno = FIRST_CUM_REG;
2831   if (!libname && stdarg_p (fntype))
2832     cum->nregs = 0;
2833 
2834   /* Assume the calle may be tail called */
2835 
2836   cfun->machine->sibcall_fails = 0;
2837 }
2838 
2839 /* Returns the number of registers to allocate for a function argument.  */
2840 
2841 static int
2842 avr_num_arg_regs (machine_mode mode, const_tree type)
2843 {
2844   int size;
2845 
2846   if (mode == BLKmode)
2847     size = int_size_in_bytes (type);
2848   else
2849     size = GET_MODE_SIZE (mode);
2850 
2851   /* Align all function arguments to start in even-numbered registers.
2852      Odd-sized arguments leave holes above them.  */
2853 
2854   return (size + 1) & ~1;
2855 }
2856 
2857 
2858 /* Implement `TARGET_FUNCTION_ARG'.  */
2859 /* Controls whether a function argument is passed
2860    in a register, and which register.  */
2861 
2862 static rtx
2863 avr_function_arg (cumulative_args_t cum_v, machine_mode mode,
2864                   const_tree type, bool named ATTRIBUTE_UNUSED)
2865 {
2866   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2867   int bytes = avr_num_arg_regs (mode, type);
2868 
2869   if (cum->nregs && bytes <= cum->nregs)
2870     return gen_rtx_REG (mode, cum->regno - bytes);
2871 
2872   return NULL_RTX;
2873 }
2874 
2875 
2876 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'.  */
2877 /* Update the summarizer variable CUM to advance past an argument
2878    in the argument list.  */
2879 
2880 static void
2881 avr_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2882                           const_tree type, bool named ATTRIBUTE_UNUSED)
2883 {
2884   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2885   int bytes = avr_num_arg_regs (mode, type);
2886 
2887   cum->nregs -= bytes;
2888   cum->regno -= bytes;
2889 
2890   /* A parameter is being passed in a call-saved register.  As the original
2891      contents of these regs has to be restored before leaving the function,
2892      a function must not pass arguments in call-saved regs in order to get
2893      tail-called.  */
2894 
2895   if (cum->regno >= 8
2896       && cum->nregs >= 0
2897       && !call_used_regs[cum->regno])
2898     {
2899       /* FIXME: We ship info on failing tail-call in struct machine_function.
2900          This uses internals of calls.c:expand_call() and the way args_so_far
2901          is used.  targetm.function_ok_for_sibcall() needs to be extended to
2902          pass &args_so_far, too.  At present, CUMULATIVE_ARGS is target
2903          dependent so that such an extension is not wanted.  */
2904 
2905       cfun->machine->sibcall_fails = 1;
2906     }
2907 
2908   /* Test if all registers needed by the ABI are actually available.  If the
2909      user has fixed a GPR needed to pass an argument, an (implicit) function
2910      call will clobber that fixed register.  See PR45099 for an example.  */
2911 
2912   if (cum->regno >= 8
2913       && cum->nregs >= 0)
2914     {
2915       int regno;
2916 
2917       for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2918         if (fixed_regs[regno])
2919           warning (0, "fixed register %s used to pass parameter to function",
2920                    reg_names[regno]);
2921     }
2922 
2923   if (cum->nregs <= 0)
2924     {
2925       cum->nregs = 0;
2926       cum->regno = FIRST_CUM_REG;
2927     }
2928 }
2929 
2930 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2931 /* Decide whether we can make a sibling call to a function.  DECL is the
2932    declaration of the function being targeted by the call and EXP is the
2933    CALL_EXPR representing the call.  */
2934 
2935 static bool
2936 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2937 {
2938   tree fntype_callee;
2939 
2940   /* Tail-calling must fail if callee-saved regs are used to pass
2941      function args.  We must not tail-call when `epilogue_restores'
2942      is used.  Unfortunately, we cannot tell at this point if that
2943      actually will happen or not, and we cannot step back from
2944      tail-calling.  Thus, we inhibit tail-calling with -mcall-prologues.  */
2945 
2946   if (cfun->machine->sibcall_fails
2947       || TARGET_CALL_PROLOGUES)
2948     {
2949       return false;
2950     }
2951 
2952   fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2953 
2954   if (decl_callee)
2955     {
2956       decl_callee = TREE_TYPE (decl_callee);
2957     }
2958   else
2959     {
2960       decl_callee = fntype_callee;
2961 
2962       while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2963              && METHOD_TYPE != TREE_CODE (decl_callee))
2964         {
2965           decl_callee = TREE_TYPE (decl_callee);
2966         }
2967     }
2968 
2969   /* Ensure that caller and callee have compatible epilogues */
2970 
2971   if (cfun->machine->is_interrupt
2972       || cfun->machine->is_signal
2973       || cfun->machine->is_naked
2974       || avr_naked_function_p (decl_callee)
2975       /* FIXME: For OS_task and OS_main, this might be over-conservative.  */
2976       || (avr_OS_task_function_p (decl_callee)
2977           != cfun->machine->is_OS_task)
2978       || (avr_OS_main_function_p (decl_callee)
2979           != cfun->machine->is_OS_main))
2980     {
2981       return false;
2982     }
2983 
2984   return true;
2985 }
2986 
2987 /***********************************************************************
2988   Functions for outputting various mov's for a various modes
2989 ************************************************************************/
2990 
2991 /* Return true if a value of mode MODE is read from flash by
2992    __load_* function from libgcc.  */
2993 
2994 bool
2995 avr_load_libgcc_p (rtx op)
2996 {
2997   machine_mode mode = GET_MODE (op);
2998   int n_bytes = GET_MODE_SIZE (mode);
2999 
3000   return (n_bytes > 2
3001           && !AVR_HAVE_LPMX
3002           && avr_mem_flash_p (op));
3003 }
3004 
3005 /* Return true if a value of mode MODE is read by __xload_* function.  */
3006 
3007 bool
3008 avr_xload_libgcc_p (machine_mode mode)
3009 {
3010   int n_bytes = GET_MODE_SIZE (mode);
3011 
3012   return (n_bytes > 1
3013           || avr_n_flash > 1);
3014 }
3015 
3016 
3017 /* Fixme: This is a hack because secondary reloads don't works as expected.
3018 
3019    Find an unused d-register to be used as scratch in INSN.
3020    EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
3021    is a register, skip all possible return values that overlap EXCLUDE.
3022    The policy for the returned register is similar to that of
3023    `reg_unused_after', i.e. the returned register may overlap the SET_DEST
3024    of INSN.
3025 
3026    Return a QImode d-register or NULL_RTX if nothing found.  */
3027 
3028 static rtx
3029 avr_find_unused_d_reg (rtx_insn *insn, rtx exclude)
3030 {
3031   int regno;
3032   bool isr_p = (avr_interrupt_function_p (current_function_decl)
3033                 || avr_signal_function_p (current_function_decl));
3034 
3035   for (regno = 16; regno < 32; regno++)
3036     {
3037       rtx reg = all_regs_rtx[regno];
3038 
3039       if ((exclude
3040            && reg_overlap_mentioned_p (exclude, reg))
3041           || fixed_regs[regno])
3042         {
3043           continue;
3044         }
3045 
3046       /* Try non-live register */
3047 
3048       if (!df_regs_ever_live_p (regno)
3049           && (TREE_THIS_VOLATILE (current_function_decl)
3050               || cfun->machine->is_OS_task
3051               || cfun->machine->is_OS_main
3052               || (!isr_p && call_used_regs[regno])))
3053         {
3054           return reg;
3055         }
3056 
3057       /* Any live register can be used if it is unused after.
3058          Prologue/epilogue will care for it as needed.  */
3059 
3060       if (df_regs_ever_live_p (regno)
3061           && reg_unused_after (insn, reg))
3062         {
3063           return reg;
3064         }
3065     }
3066 
3067   return NULL_RTX;
3068 }
3069 
3070 
3071 /* Helper function for the next function in the case where only restricted
3072    version of LPM instruction is available.  */
3073 
3074 static const char*
3075 avr_out_lpm_no_lpmx (rtx_insn *insn, rtx *xop, int *plen)
3076 {
3077   rtx dest = xop[0];
3078   rtx addr = xop[1];
3079   int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3080   int regno_dest;
3081 
3082   regno_dest = REGNO (dest);
3083 
3084   /* The implicit target register of LPM.  */
3085   xop[3] = lpm_reg_rtx;
3086 
3087   switch (GET_CODE (addr))
3088     {
3089     default:
3090       gcc_unreachable();
3091 
3092     case REG:
3093 
3094       gcc_assert (REG_Z == REGNO (addr));
3095 
3096       switch (n_bytes)
3097         {
3098         default:
3099           gcc_unreachable();
3100 
3101         case 1:
3102           avr_asm_len ("%4lpm", xop, plen, 1);
3103 
3104           if (regno_dest != LPM_REGNO)
3105             avr_asm_len ("mov %0,%3", xop, plen, 1);
3106 
3107           return "";
3108 
3109         case 2:
3110           if (REGNO (dest) == REG_Z)
3111             return avr_asm_len ("%4lpm"      CR_TAB
3112                                 "push %3"    CR_TAB
3113                                 "adiw %2,1"  CR_TAB
3114                                 "%4lpm"      CR_TAB
3115                                 "mov %B0,%3" CR_TAB
3116                                 "pop %A0", xop, plen, 6);
3117 
3118           avr_asm_len ("%4lpm"      CR_TAB
3119                        "mov %A0,%3" CR_TAB
3120                        "adiw %2,1"  CR_TAB
3121                        "%4lpm"      CR_TAB
3122                        "mov %B0,%3", xop, plen, 5);
3123 
3124           if (!reg_unused_after (insn, addr))
3125             avr_asm_len ("sbiw %2,1", xop, plen, 1);
3126 
3127           break; /* 2 */
3128         }
3129 
3130       break; /* REG */
3131 
3132     case POST_INC:
3133 
3134       gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3135                   && n_bytes <= 4);
3136 
3137       if (regno_dest == LPM_REGNO)
3138         avr_asm_len ("%4lpm"      CR_TAB
3139                      "adiw %2,1", xop, plen, 2);
3140       else
3141         avr_asm_len ("%4lpm"      CR_TAB
3142                      "mov %A0,%3" CR_TAB
3143                      "adiw %2,1", xop, plen, 3);
3144 
3145       if (n_bytes >= 2)
3146         avr_asm_len ("%4lpm"      CR_TAB
3147                      "mov %B0,%3" CR_TAB
3148                      "adiw %2,1", xop, plen, 3);
3149 
3150       if (n_bytes >= 3)
3151         avr_asm_len ("%4lpm"      CR_TAB
3152                      "mov %C0,%3" CR_TAB
3153                      "adiw %2,1", xop, plen, 3);
3154 
3155       if (n_bytes >= 4)
3156         avr_asm_len ("%4lpm"      CR_TAB
3157                      "mov %D0,%3" CR_TAB
3158                      "adiw %2,1", xop, plen, 3);
3159 
3160       break; /* POST_INC */
3161 
3162     } /* switch CODE (addr) */
3163 
3164   return "";
3165 }
3166 
3167 
3168 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
3169    OP[1] in AS1 to register OP[0].
3170    If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3171    Return "".  */
3172 
3173 const char*
3174 avr_out_lpm (rtx_insn *insn, rtx *op, int *plen)
3175 {
3176   rtx xop[7];
3177   rtx dest = op[0];
3178   rtx src = SET_SRC (single_set (insn));
3179   rtx addr;
3180   int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3181   int segment;
3182   RTX_CODE code;
3183   addr_space_t as = MEM_ADDR_SPACE (src);
3184 
3185   if (plen)
3186     *plen = 0;
3187 
3188   if (MEM_P (dest))
3189     {
3190       warning (0, "writing to address space %qs not supported",
3191                avr_addrspace[MEM_ADDR_SPACE (dest)].name);
3192 
3193       return "";
3194     }
3195 
3196   addr = XEXP (src, 0);
3197   code = GET_CODE (addr);
3198 
3199   gcc_assert (REG_P (dest));
3200   gcc_assert (REG == code || POST_INC == code);
3201 
3202   xop[0] = dest;
3203   xop[1] = addr;
3204   xop[2] = lpm_addr_reg_rtx;
3205   xop[4] = xstring_empty;
3206   xop[5] = tmp_reg_rtx;
3207   xop[6] = XEXP (rampz_rtx, 0);
3208 
3209   segment = avr_addrspace[as].segment;
3210 
3211   /* Set RAMPZ as needed.  */
3212 
3213   if (segment)
3214     {
3215       xop[4] = GEN_INT (segment);
3216       xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
3217 
3218       if (xop[3] != NULL_RTX)
3219         {
3220           avr_asm_len ("ldi %3,%4" CR_TAB
3221                        "out %i6,%3", xop, plen, 2);
3222         }
3223       else if (segment == 1)
3224         {
3225           avr_asm_len ("clr %5" CR_TAB
3226                        "inc %5" CR_TAB
3227                        "out %i6,%5", xop, plen, 3);
3228         }
3229       else
3230         {
3231           avr_asm_len ("mov %5,%2"         CR_TAB
3232                        "ldi %2,%4"         CR_TAB
3233                        "out %i6,%2"  CR_TAB
3234                        "mov %2,%5", xop, plen, 4);
3235         }
3236 
3237       xop[4] = xstring_e;
3238 
3239       if (!AVR_HAVE_ELPMX)
3240         return avr_out_lpm_no_lpmx (insn, xop, plen);
3241     }
3242   else if (!AVR_HAVE_LPMX)
3243     {
3244       return avr_out_lpm_no_lpmx (insn, xop, plen);
3245     }
3246 
3247   /* We have [E]LPMX: Output reading from Flash the comfortable way.  */
3248 
3249   switch (GET_CODE (addr))
3250     {
3251     default:
3252       gcc_unreachable();
3253 
3254     case REG:
3255 
3256       gcc_assert (REG_Z == REGNO (addr));
3257 
3258       switch (n_bytes)
3259         {
3260         default:
3261           gcc_unreachable();
3262 
3263         case 1:
3264           return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3265 
3266         case 2:
3267           if (REGNO (dest) == REG_Z)
3268             return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3269                                 "%4lpm %B0,%a2" CR_TAB
3270                                 "mov %A0,%5", xop, plen, 3);
3271           else
3272             {
3273               avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3274                            "%4lpm %B0,%a2", xop, plen, 2);
3275 
3276               if (!reg_unused_after (insn, addr))
3277                 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3278             }
3279 
3280           break; /* 2 */
3281 
3282         case 3:
3283 
3284           avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3285                        "%4lpm %B0,%a2+" CR_TAB
3286                        "%4lpm %C0,%a2", xop, plen, 3);
3287 
3288           if (!reg_unused_after (insn, addr))
3289             avr_asm_len ("sbiw %2,2", xop, plen, 1);
3290 
3291           break; /* 3 */
3292 
3293         case 4:
3294 
3295           avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3296                        "%4lpm %B0,%a2+", xop, plen, 2);
3297 
3298           if (REGNO (dest) == REG_Z - 2)
3299             return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3300                                 "%4lpm %C0,%a2"          CR_TAB
3301                                 "mov %D0,%5", xop, plen, 3);
3302           else
3303             {
3304               avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3305                            "%4lpm %D0,%a2", xop, plen, 2);
3306 
3307               if (!reg_unused_after (insn, addr))
3308                 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3309             }
3310 
3311           break; /* 4 */
3312         } /* n_bytes */
3313 
3314       break; /* REG */
3315 
3316     case POST_INC:
3317 
3318       gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3319                   && n_bytes <= 4);
3320 
3321       avr_asm_len                    ("%4lpm %A0,%a2+", xop, plen, 1);
3322       if (n_bytes >= 2)  avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3323       if (n_bytes >= 3)  avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3324       if (n_bytes >= 4)  avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3325 
3326       break; /* POST_INC */
3327 
3328     } /* switch CODE (addr) */
3329 
3330   if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3331     {
3332       /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM.  */
3333 
3334       xop[0] = zero_reg_rtx;
3335       avr_asm_len ("out %i6,%0", xop, plen, 1);
3336     }
3337 
3338   return "";
3339 }
3340 
3341 
3342 /* Worker function for xload_8 insn.  */
3343 
3344 const char*
3345 avr_out_xload (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3346 {
3347   rtx xop[4];
3348 
3349   xop[0] = op[0];
3350   xop[1] = op[1];
3351   xop[2] = lpm_addr_reg_rtx;
3352   xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3353 
3354   avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3355 
3356   avr_asm_len ("sbrc %1,7" CR_TAB
3357                "ld %3,%a2", xop, plen, 2);
3358 
3359   if (REGNO (xop[0]) != REGNO (xop[3]))
3360     avr_asm_len ("mov %0,%3", xop, plen, 1);
3361 
3362   return "";
3363 }
3364 
3365 
3366 const char*
3367 output_movqi (rtx_insn *insn, rtx operands[], int *plen)
3368 {
3369   rtx dest = operands[0];
3370   rtx src = operands[1];
3371 
3372   if (avr_mem_flash_p (src)
3373       || avr_mem_flash_p (dest))
3374     {
3375       return avr_out_lpm (insn, operands, plen);
3376     }
3377 
3378   gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3379 
3380   if (REG_P (dest))
3381     {
3382       if (REG_P (src)) /* mov r,r */
3383         {
3384           if (test_hard_reg_class (STACK_REG, dest))
3385             return avr_asm_len ("out %0,%1", operands, plen, -1);
3386           else if (test_hard_reg_class (STACK_REG, src))
3387             return avr_asm_len ("in %0,%1", operands, plen, -1);
3388 
3389           return avr_asm_len ("mov %0,%1", operands, plen, -1);
3390         }
3391       else if (CONSTANT_P (src))
3392         {
3393           output_reload_in_const (operands, NULL_RTX, plen, false);
3394           return "";
3395         }
3396       else if (MEM_P (src))
3397         return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3398     }
3399   else if (MEM_P (dest))
3400     {
3401       rtx xop[2];
3402 
3403       xop[0] = dest;
3404       xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3405 
3406       return out_movqi_mr_r (insn, xop, plen);
3407     }
3408 
3409   return "";
3410 }
3411 
3412 
3413 const char *
3414 output_movhi (rtx_insn *insn, rtx xop[], int *plen)
3415 {
3416   rtx dest = xop[0];
3417   rtx src = xop[1];
3418 
3419   gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3420 
3421   if (avr_mem_flash_p (src)
3422       || avr_mem_flash_p (dest))
3423     {
3424       return avr_out_lpm (insn, xop, plen);
3425     }
3426 
3427   gcc_assert (2 == GET_MODE_SIZE (GET_MODE (dest)));
3428 
3429   if (REG_P (dest))
3430     {
3431       if (REG_P (src)) /* mov r,r */
3432         {
3433           if (test_hard_reg_class (STACK_REG, dest))
3434             {
3435               if (AVR_HAVE_8BIT_SP)
3436                 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3437 
3438               if (AVR_XMEGA)
3439                 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3440                                     "out __SP_H__,%B1", xop, plen, -2);
3441 
3442               /* Use simple load of SP if no interrupts are  used.  */
3443 
3444               return TARGET_NO_INTERRUPTS
3445                 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3446                                "out __SP_L__,%A1", xop, plen, -2)
3447                 : avr_asm_len ("in __tmp_reg__,__SREG__"  CR_TAB
3448                                "cli"                      CR_TAB
3449                                "out __SP_H__,%B1"         CR_TAB
3450                                "out __SREG__,__tmp_reg__" CR_TAB
3451                                "out __SP_L__,%A1", xop, plen, -5);
3452             }
3453           else if (test_hard_reg_class (STACK_REG, src))
3454             {
3455               return !AVR_HAVE_SPH
3456                 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3457                                "clr %B0", xop, plen, -2)
3458 
3459                 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3460                                "in %B0,__SP_H__", xop, plen, -2);
3461             }
3462 
3463           return AVR_HAVE_MOVW
3464             ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3465 
3466             : avr_asm_len ("mov %A0,%A1" CR_TAB
3467                            "mov %B0,%B1", xop, plen, -2);
3468         } /* REG_P (src) */
3469       else if (CONSTANT_P (src))
3470         {
3471           return output_reload_inhi (xop, NULL, plen);
3472         }
3473       else if (MEM_P (src))
3474         {
3475           return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3476         }
3477     }
3478   else if (MEM_P (dest))
3479     {
3480       rtx xop[2];
3481 
3482       xop[0] = dest;
3483       xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3484 
3485       return out_movhi_mr_r (insn, xop, plen);
3486     }
3487 
3488   fatal_insn ("invalid insn:", insn);
3489 
3490   return "";
3491 }
3492 
3493 
3494 /* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
3495 
3496 static const char*
3497 avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
3498 {
3499   rtx dest = op[0];
3500   rtx src = op[1];
3501   rtx x = XEXP (src, 0);
3502 
3503   avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3504                "ld %0,%b1" , op, plen, -3);
3505 
3506   if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3507       && !reg_unused_after (insn, XEXP (x,0)))
3508     avr_asm_len (TINY_SBIW (%I1, %J1, %o1), op, plen, 2);
3509 
3510   return "";
3511 }
3512 
3513 static const char*
3514 out_movqi_r_mr (rtx_insn *insn, rtx op[], int *plen)
3515 {
3516   rtx dest = op[0];
3517   rtx src = op[1];
3518   rtx x = XEXP (src, 0);
3519 
3520   if (CONSTANT_ADDRESS_P (x))
3521     {
3522       int n_words = AVR_TINY ? 1 : 2;
3523       return io_address_operand (x, QImode)
3524         ? avr_asm_len ("in %0,%i1", op, plen, -1)
3525         : avr_asm_len ("lds %0,%m1", op, plen, -n_words);
3526     }
3527 
3528   if (GET_CODE (x) == PLUS
3529            && REG_P (XEXP (x, 0))
3530            && CONST_INT_P (XEXP (x, 1)))
3531     {
3532       /* memory access by reg+disp */
3533 
3534       int disp = INTVAL (XEXP (x, 1));
3535 
3536       if (AVR_TINY)
3537         return avr_out_movqi_r_mr_reg_disp_tiny (insn, op, plen);
3538 
3539       if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3540         {
3541           if (REGNO (XEXP (x, 0)) != REG_Y)
3542             fatal_insn ("incorrect insn:",insn);
3543 
3544           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3545             return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3546                                 "ldd %0,Y+63"     CR_TAB
3547                                 "sbiw r28,%o1-63", op, plen, -3);
3548 
3549           return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3550                               "sbci r29,hi8(-%o1)" CR_TAB
3551                               "ld %0,Y"            CR_TAB
3552                               "subi r28,lo8(%o1)"  CR_TAB
3553                               "sbci r29,hi8(%o1)", op, plen, -5);
3554         }
3555       else if (REGNO (XEXP (x, 0)) == REG_X)
3556         {
3557           /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3558              it but I have this situation with extremal optimizing options.  */
3559 
3560           avr_asm_len ("adiw r26,%o1" CR_TAB
3561                        "ld %0,X", op, plen, -2);
3562 
3563           if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3564               && !reg_unused_after (insn, XEXP (x,0)))
3565             {
3566               avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3567             }
3568 
3569           return "";
3570         }
3571 
3572       return avr_asm_len ("ldd %0,%1", op, plen, -1);
3573     }
3574 
3575   return avr_asm_len ("ld %0,%1", op, plen, -1);
3576 }
3577 
3578 
3579 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3580 
3581 static const char*
3582 avr_out_movhi_r_mr_reg_no_disp_tiny (rtx op[], int *plen)
3583 {
3584   rtx dest = op[0];
3585   rtx src = op[1];
3586   rtx base = XEXP (src, 0);
3587 
3588   int reg_dest = true_regnum (dest);
3589   int reg_base = true_regnum (base);
3590 
3591   if (reg_dest == reg_base)         /* R = (R) */
3592     return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3593 			"ld %B0,%1"          CR_TAB
3594 			"mov %A0,__tmp_reg__", op, plen, -3);
3595 
3596   return avr_asm_len ("ld %A0,%1"             CR_TAB
3597                       TINY_ADIW (%E1, %F1, 1) CR_TAB
3598                       "ld %B0,%1"             CR_TAB
3599                       TINY_SBIW (%E1, %F1, 1), op, plen, -6);
3600 }
3601 
3602 
3603 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3604 
3605 static const char*
3606 avr_out_movhi_r_mr_reg_disp_tiny (rtx op[], int *plen)
3607 {
3608   rtx dest = op[0];
3609   rtx src = op[1];
3610   rtx base = XEXP (src, 0);
3611 
3612   int reg_dest = true_regnum (dest);
3613   int reg_base = true_regnum (XEXP (base, 0));
3614 
3615   if (reg_base == reg_dest)
3616     {
3617       return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3618                           "ld __tmp_reg__,%b1+"     CR_TAB
3619                           "ld %B0,%b1"              CR_TAB
3620                           "mov %A0,__tmp_reg__", op, plen, -5);
3621     }
3622   else
3623     {
3624       return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3625                           "ld %A0,%b1+"             CR_TAB
3626                           "ld %B0,%b1"              CR_TAB
3627                           TINY_SBIW (%I1, %J1, %o1+1), op, plen, -6);
3628     }
3629 }
3630 
3631 
3632 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3633 
3634 static const char*
3635 avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn *insn, rtx op[], int *plen)
3636 {
3637   int mem_volatile_p = 0;
3638   rtx dest = op[0];
3639   rtx src = op[1];
3640   rtx base = XEXP (src, 0);
3641 
3642   /* "volatile" forces reading low byte first, even if less efficient,
3643      for correct operation with 16-bit I/O registers.  */
3644   mem_volatile_p = MEM_VOLATILE_P (src);
3645 
3646   if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3647     fatal_insn ("incorrect insn:", insn);
3648 
3649   if (!mem_volatile_p)
3650     return avr_asm_len ("ld %B0,%1" CR_TAB
3651                         "ld %A0,%1", op, plen, -2);
3652 
3653   return avr_asm_len (TINY_SBIW (%I1, %J1, 2)  CR_TAB
3654                       "ld %A0,%p1+"            CR_TAB
3655                       "ld %B0,%p1"             CR_TAB
3656                       TINY_SBIW (%I1, %J1, 1), op, plen, -6);
3657 }
3658 
3659 
3660 static const char*
3661 out_movhi_r_mr (rtx_insn *insn, rtx op[], int *plen)
3662 {
3663   rtx dest = op[0];
3664   rtx src = op[1];
3665   rtx base = XEXP (src, 0);
3666   int reg_dest = true_regnum (dest);
3667   int reg_base = true_regnum (base);
3668   /* "volatile" forces reading low byte first, even if less efficient,
3669      for correct operation with 16-bit I/O registers.  */
3670   int mem_volatile_p = MEM_VOLATILE_P (src);
3671 
3672   if (reg_base > 0)
3673     {
3674       if (AVR_TINY)
3675         return avr_out_movhi_r_mr_reg_no_disp_tiny (op, plen);
3676 
3677       if (reg_dest == reg_base)         /* R = (R) */
3678         return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3679                             "ld %B0,%1"          CR_TAB
3680                             "mov %A0,__tmp_reg__", op, plen, -3);
3681 
3682       if (reg_base != REG_X)
3683         return avr_asm_len ("ld %A0,%1" CR_TAB
3684                             "ldd %B0,%1+1", op, plen, -2);
3685 
3686       avr_asm_len ("ld %A0,X+" CR_TAB
3687                    "ld %B0,X", op, plen, -2);
3688 
3689       if (!reg_unused_after (insn, base))
3690         avr_asm_len ("sbiw r26,1", op, plen, 1);
3691 
3692       return "";
3693     }
3694   else if (GET_CODE (base) == PLUS) /* (R + i) */
3695     {
3696       int disp = INTVAL (XEXP (base, 1));
3697       int reg_base = true_regnum (XEXP (base, 0));
3698 
3699       if (AVR_TINY)
3700         return avr_out_movhi_r_mr_reg_disp_tiny (op, plen);
3701 
3702       if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3703         {
3704           if (REGNO (XEXP (base, 0)) != REG_Y)
3705             fatal_insn ("incorrect insn:",insn);
3706 
3707           return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3708             ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3709                            "ldd %A0,Y+62"    CR_TAB
3710                            "ldd %B0,Y+63"    CR_TAB
3711                            "sbiw r28,%o1-62", op, plen, -4)
3712 
3713               : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3714                            "sbci r29,hi8(-%o1)" CR_TAB
3715                            "ld %A0,Y"           CR_TAB
3716                            "ldd %B0,Y+1"        CR_TAB
3717                            "subi r28,lo8(%o1)"  CR_TAB
3718                            "sbci r29,hi8(%o1)", op, plen, -6);
3719         }
3720 
3721       /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3722          it but I have this situation with extremal
3723          optimization options.  */
3724 
3725       if (reg_base == REG_X)
3726         return reg_base == reg_dest
3727           ? avr_asm_len ("adiw r26,%o1"      CR_TAB
3728                          "ld __tmp_reg__,X+" CR_TAB
3729                          "ld %B0,X"          CR_TAB
3730                          "mov %A0,__tmp_reg__", op, plen, -4)
3731 
3732           : avr_asm_len ("adiw r26,%o1" CR_TAB
3733                          "ld %A0,X+"    CR_TAB
3734                          "ld %B0,X"     CR_TAB
3735                          "sbiw r26,%o1+1", op, plen, -4);
3736 
3737       return reg_base == reg_dest
3738         ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3739                        "ldd %B0,%B1"         CR_TAB
3740                        "mov %A0,__tmp_reg__", op, plen, -3)
3741 
3742         : avr_asm_len ("ldd %A0,%A1" CR_TAB
3743                        "ldd %B0,%B1", op, plen, -2);
3744     }
3745   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3746     {
3747       if (AVR_TINY)
3748 	return avr_out_movhi_r_mr_pre_dec_tiny (insn, op, plen);
3749 
3750       if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3751         fatal_insn ("incorrect insn:", insn);
3752 
3753       if (!mem_volatile_p)
3754         return avr_asm_len ("ld %B0,%1" CR_TAB
3755                             "ld %A0,%1", op, plen, -2);
3756 
3757       return REGNO (XEXP (base, 0)) == REG_X
3758         ? avr_asm_len ("sbiw r26,2"  CR_TAB
3759                        "ld %A0,X+"   CR_TAB
3760                        "ld %B0,X"    CR_TAB
3761                        "sbiw r26,1", op, plen, -4)
3762 
3763         : avr_asm_len ("sbiw %r1,2"  CR_TAB
3764                        "ld %A0,%p1"  CR_TAB
3765                        "ldd %B0,%p1+1", op, plen, -3);
3766     }
3767   else if (GET_CODE (base) == POST_INC) /* (R++) */
3768     {
3769       if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3770         fatal_insn ("incorrect insn:", insn);
3771 
3772       return avr_asm_len ("ld %A0,%1"  CR_TAB
3773                           "ld %B0,%1", op, plen, -2);
3774     }
3775   else if (CONSTANT_ADDRESS_P (base))
3776     {
3777       int n_words = AVR_TINY ? 2 : 4;
3778       return io_address_operand (base, HImode)
3779         ? avr_asm_len ("in %A0,%i1" CR_TAB
3780                        "in %B0,%i1+1", op, plen, -2)
3781 
3782         : avr_asm_len ("lds %A0,%m1" CR_TAB
3783                        "lds %B0,%m1+1", op, plen, -n_words);
3784     }
3785 
3786   fatal_insn ("unknown move insn:",insn);
3787   return "";
3788 }
3789 
3790 static const char*
3791 avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
3792 {
3793   rtx dest = op[0];
3794   rtx src = op[1];
3795   rtx base = XEXP (src, 0);
3796   int reg_dest = true_regnum (dest);
3797   int reg_base = true_regnum (base);
3798 
3799   if (reg_dest == reg_base)
3800     {
3801       /* "ld r26,-X" is undefined */
3802       return *l = 9, (TINY_ADIW (%E1, %F1, 3) CR_TAB
3803 		      "ld %D0,%1"             CR_TAB
3804 		      "ld %C0,-%1"            CR_TAB
3805 		      "ld __tmp_reg__,-%1"    CR_TAB
3806 		      TINY_SBIW (%E1, %F1, 1) CR_TAB
3807 		      "ld %A0,%1"             CR_TAB
3808 		      "mov %B0,__tmp_reg__");
3809     }
3810   else if (reg_dest == reg_base - 2)
3811     {
3812       return *l = 5, ("ld %A0,%1+"            CR_TAB
3813 		      "ld %B0,%1+"            CR_TAB
3814 		      "ld __tmp_reg__,%1+"    CR_TAB
3815 		      "ld %D0,%1"             CR_TAB
3816 		      "mov %C0,__tmp_reg__");
3817     }
3818   else if (reg_unused_after (insn, base))
3819     {
3820       return *l = 4, ("ld %A0,%1+"    CR_TAB
3821 		      "ld %B0,%1+"    CR_TAB
3822 		      "ld %C0,%1+"    CR_TAB
3823 		      "ld %D0,%1");
3824     }
3825   else
3826     {
3827       return *l = 6, ("ld %A0,%1+"    CR_TAB
3828 		      "ld %B0,%1+"    CR_TAB
3829 		      "ld %C0,%1+"    CR_TAB
3830 		      "ld %D0,%1"     CR_TAB
3831 		      TINY_SBIW (%E1, %F1, 3));
3832     }
3833 }
3834 
3835 
3836 static const char*
3837 avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *l)
3838 {
3839   rtx dest = op[0];
3840   rtx src = op[1];
3841   rtx base = XEXP (src, 0);
3842   int reg_dest = true_regnum (dest);
3843   int reg_base = true_regnum (XEXP (base, 0));
3844 
3845   if (reg_dest == reg_base)
3846     {
3847       /* "ld r26,-X" is undefined */
3848       return *l = 9, (TINY_ADIW (%I1, %J1, %o1+3) CR_TAB
3849                       "ld %D0,%b1"                CR_TAB
3850                       "ld %C0,-%b1"               CR_TAB
3851                       "ld __tmp_reg__,-%b1"       CR_TAB
3852                       TINY_SBIW (%I1, %J1, 1)     CR_TAB
3853                       "ld %A0,%b1"                CR_TAB
3854                       "mov %B0,__tmp_reg__");
3855     }
3856   else if (reg_dest == reg_base - 2)
3857     {
3858       return *l = 7, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3859                       "ld %A0,%b1+"             CR_TAB
3860                       "ld %B0,%b1+"             CR_TAB
3861                       "ld __tmp_reg__,%b1+"     CR_TAB
3862                       "ld %D0,%b1"              CR_TAB
3863                       "mov %C0,__tmp_reg__");
3864     }
3865   else if (reg_unused_after (insn, XEXP (base, 0)))
3866     {
3867       return *l = 6, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3868                       "ld %A0,%b1+"             CR_TAB
3869                       "ld %B0,%b1+"             CR_TAB
3870                       "ld %C0,%b1+"             CR_TAB
3871                       "ld %D0,%b1");
3872     }
3873   else
3874     {
3875       return *l = 8, (TINY_ADIW (%I1, %J1, %o1)  CR_TAB
3876                       "ld %A0,%b1+"              CR_TAB
3877                       "ld %B0,%b1+"              CR_TAB
3878                       "ld %C0,%b1+"              CR_TAB
3879                       "ld %D0,%b1"               CR_TAB
3880                       TINY_SBIW (%I1, %J1, %o1+3));
3881     }
3882 }
3883 
3884 static const char*
3885 out_movsi_r_mr (rtx_insn *insn, rtx op[], int *l)
3886 {
3887   rtx dest = op[0];
3888   rtx src = op[1];
3889   rtx base = XEXP (src, 0);
3890   int reg_dest = true_regnum (dest);
3891   int reg_base = true_regnum (base);
3892   int tmp;
3893 
3894   if (!l)
3895     l = &tmp;
3896 
3897   if (reg_base > 0)
3898     {
3899       if (AVR_TINY)
3900         return avr_out_movsi_r_mr_reg_no_disp_tiny (insn, op, l);
3901 
3902       if (reg_base == REG_X)        /* (R26) */
3903         {
3904           if (reg_dest == REG_X)
3905 	    /* "ld r26,-X" is undefined */
3906 	    return *l=7, ("adiw r26,3"        CR_TAB
3907 			  "ld r29,X"          CR_TAB
3908 			  "ld r28,-X"         CR_TAB
3909 			  "ld __tmp_reg__,-X" CR_TAB
3910 			  "sbiw r26,1"        CR_TAB
3911 			  "ld r26,X"          CR_TAB
3912 			  "mov r27,__tmp_reg__");
3913           else if (reg_dest == REG_X - 2)
3914             return *l=5, ("ld %A0,X+"          CR_TAB
3915                           "ld %B0,X+"          CR_TAB
3916                           "ld __tmp_reg__,X+"  CR_TAB
3917                           "ld %D0,X"           CR_TAB
3918                           "mov %C0,__tmp_reg__");
3919           else if (reg_unused_after (insn, base))
3920             return  *l=4, ("ld %A0,X+"  CR_TAB
3921                            "ld %B0,X+" CR_TAB
3922                            "ld %C0,X+" CR_TAB
3923                            "ld %D0,X");
3924           else
3925             return  *l=5, ("ld %A0,X+"  CR_TAB
3926                            "ld %B0,X+" CR_TAB
3927                            "ld %C0,X+" CR_TAB
3928                            "ld %D0,X"  CR_TAB
3929                            "sbiw r26,3");
3930         }
3931       else
3932         {
3933           if (reg_dest == reg_base)
3934             return *l=5, ("ldd %D0,%1+3" CR_TAB
3935                           "ldd %C0,%1+2" CR_TAB
3936                           "ldd __tmp_reg__,%1+1"  CR_TAB
3937                           "ld %A0,%1"  CR_TAB
3938                           "mov %B0,__tmp_reg__");
3939           else if (reg_base == reg_dest + 2)
3940             return *l=5, ("ld %A0,%1"             CR_TAB
3941                           "ldd %B0,%1+1"          CR_TAB
3942                           "ldd __tmp_reg__,%1+2"  CR_TAB
3943                           "ldd %D0,%1+3"          CR_TAB
3944                           "mov %C0,__tmp_reg__");
3945           else
3946             return *l=4, ("ld %A0,%1"    CR_TAB
3947                           "ldd %B0,%1+1" CR_TAB
3948                           "ldd %C0,%1+2" CR_TAB
3949                           "ldd %D0,%1+3");
3950         }
3951     }
3952   else if (GET_CODE (base) == PLUS) /* (R + i) */
3953     {
3954       int disp = INTVAL (XEXP (base, 1));
3955 
3956       if (AVR_TINY)
3957         return avr_out_movsi_r_mr_reg_disp_tiny (insn, op, l);
3958 
3959       if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3960 	{
3961 	  if (REGNO (XEXP (base, 0)) != REG_Y)
3962 	    fatal_insn ("incorrect insn:",insn);
3963 
3964 	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3965 	    return *l = 6, ("adiw r28,%o1-60" CR_TAB
3966 			    "ldd %A0,Y+60"    CR_TAB
3967 			    "ldd %B0,Y+61"    CR_TAB
3968 			    "ldd %C0,Y+62"    CR_TAB
3969 			    "ldd %D0,Y+63"    CR_TAB
3970 			    "sbiw r28,%o1-60");
3971 
3972 	  return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3973 			  "sbci r29,hi8(-%o1)" CR_TAB
3974 			  "ld %A0,Y"           CR_TAB
3975 			  "ldd %B0,Y+1"        CR_TAB
3976 			  "ldd %C0,Y+2"        CR_TAB
3977 			  "ldd %D0,Y+3"        CR_TAB
3978 			  "subi r28,lo8(%o1)"  CR_TAB
3979 			  "sbci r29,hi8(%o1)");
3980 	}
3981 
3982       reg_base = true_regnum (XEXP (base, 0));
3983       if (reg_base == REG_X)
3984 	{
3985 	  /* R = (X + d) */
3986 	  if (reg_dest == REG_X)
3987 	    {
3988 	      *l = 7;
3989 	      /* "ld r26,-X" is undefined */
3990 	      return ("adiw r26,%o1+3"    CR_TAB
3991 		      "ld r29,X"          CR_TAB
3992 		      "ld r28,-X"         CR_TAB
3993 		      "ld __tmp_reg__,-X" CR_TAB
3994 		      "sbiw r26,1"        CR_TAB
3995 		      "ld r26,X"          CR_TAB
3996 		      "mov r27,__tmp_reg__");
3997 	    }
3998 	  *l = 6;
3999 	  if (reg_dest == REG_X - 2)
4000 	    return ("adiw r26,%o1"      CR_TAB
4001 		    "ld r24,X+"         CR_TAB
4002 		    "ld r25,X+"         CR_TAB
4003 		    "ld __tmp_reg__,X+" CR_TAB
4004 		    "ld r27,X"          CR_TAB
4005 		    "mov r26,__tmp_reg__");
4006 
4007 	  return ("adiw r26,%o1" CR_TAB
4008 		  "ld %A0,X+"    CR_TAB
4009 		  "ld %B0,X+"    CR_TAB
4010 		  "ld %C0,X+"    CR_TAB
4011 		  "ld %D0,X"     CR_TAB
4012 		  "sbiw r26,%o1+3");
4013 	}
4014       if (reg_dest == reg_base)
4015         return *l=5, ("ldd %D0,%D1"          CR_TAB
4016                       "ldd %C0,%C1"          CR_TAB
4017                       "ldd __tmp_reg__,%B1"  CR_TAB
4018                       "ldd %A0,%A1"          CR_TAB
4019                       "mov %B0,__tmp_reg__");
4020       else if (reg_dest == reg_base - 2)
4021         return *l=5, ("ldd %A0,%A1"          CR_TAB
4022                       "ldd %B0,%B1"          CR_TAB
4023                       "ldd __tmp_reg__,%C1"  CR_TAB
4024                       "ldd %D0,%D1"          CR_TAB
4025                       "mov %C0,__tmp_reg__");
4026       return *l=4, ("ldd %A0,%A1" CR_TAB
4027                     "ldd %B0,%B1" CR_TAB
4028                     "ldd %C0,%C1" CR_TAB
4029                     "ldd %D0,%D1");
4030     }
4031   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4032     return *l=4, ("ld %D0,%1" CR_TAB
4033 		  "ld %C0,%1" CR_TAB
4034 		  "ld %B0,%1" CR_TAB
4035 		  "ld %A0,%1");
4036   else if (GET_CODE (base) == POST_INC) /* (R++) */
4037     return *l=4, ("ld %A0,%1" CR_TAB
4038 		  "ld %B0,%1" CR_TAB
4039 		  "ld %C0,%1" CR_TAB
4040 		  "ld %D0,%1");
4041   else if (CONSTANT_ADDRESS_P (base))
4042     {
4043       if (io_address_operand (base, SImode))
4044         {
4045           *l = 4;
4046           return ("in %A0,%i1"   CR_TAB
4047                   "in %B0,%i1+1" CR_TAB
4048                   "in %C0,%i1+2" CR_TAB
4049                   "in %D0,%i1+3");
4050         }
4051       else
4052         {
4053           *l = AVR_TINY ? 4 : 8;
4054           return ("lds %A0,%m1"   CR_TAB
4055                   "lds %B0,%m1+1" CR_TAB
4056                   "lds %C0,%m1+2" CR_TAB
4057                   "lds %D0,%m1+3");
4058         }
4059     }
4060 
4061   fatal_insn ("unknown move insn:",insn);
4062   return "";
4063 }
4064 
4065 static const char*
4066 avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4067 {
4068   rtx dest = op[0];
4069   rtx src = op[1];
4070   rtx base = XEXP (dest, 0);
4071   int reg_base = true_regnum (base);
4072   int reg_src = true_regnum (src);
4073 
4074   if (reg_base == reg_src)
4075     {
4076 	  /* "ld r26,-X" is undefined */
4077       if (reg_unused_after (insn, base))
4078         {
4079           return *l = 7, ("mov __tmp_reg__, %B1"  CR_TAB
4080 			  "st %0,%A1"             CR_TAB
4081 			  TINY_ADIW (%E0, %F0, 1) CR_TAB
4082 			  "st %0+,__tmp_reg__"    CR_TAB
4083 			  "st %0+,%C1"            CR_TAB
4084 			  "st %0+,%D1");
4085         }
4086       else
4087         {
4088           return *l = 9, ("mov __tmp_reg__, %B1"  CR_TAB
4089 			  "st %0,%A1"             CR_TAB
4090 			  TINY_ADIW (%E0, %F0, 1) CR_TAB
4091 			  "st %0+,__tmp_reg__"    CR_TAB
4092 			  "st %0+,%C1"            CR_TAB
4093 			  "st %0+,%D1"            CR_TAB
4094 			  TINY_SBIW (%E0, %F0, 3));
4095         }
4096     }
4097   else if (reg_base == reg_src + 2)
4098     {
4099       if (reg_unused_after (insn, base))
4100 	return *l = 7, ("mov __zero_reg__,%C1" CR_TAB
4101                         "mov __tmp_reg__,%D1"  CR_TAB
4102                         "st %0+,%A1"           CR_TAB
4103                         "st %0+,%B1"           CR_TAB
4104                         "st %0+,__zero_reg__"  CR_TAB
4105                         "st %0,__tmp_reg__"    CR_TAB
4106                         "clr __zero_reg__");
4107       else
4108 	return *l = 9, ("mov __zero_reg__,%C1" CR_TAB
4109 			"mov __tmp_reg__,%D1"  CR_TAB
4110 			"st %0+,%A1"           CR_TAB
4111 			"st %0+,%B1"           CR_TAB
4112 			"st %0+,__zero_reg__"  CR_TAB
4113 			"st %0,__tmp_reg__"    CR_TAB
4114 			"clr __zero_reg__"     CR_TAB
4115 			TINY_SBIW (%E0, %F0, 3));
4116     }
4117 
4118   return *l = 6, ("st %0+,%A1" CR_TAB
4119 		  "st %0+,%B1" CR_TAB
4120 		  "st %0+,%C1" CR_TAB
4121 		  "st %0,%D1"  CR_TAB
4122 		  TINY_SBIW (%E0, %F0, 3));
4123 }
4124 
4125 static const char*
4126 avr_out_movsi_mr_r_reg_disp_tiny (rtx op[], int *l)
4127 {
4128   rtx dest = op[0];
4129   rtx src = op[1];
4130   rtx base = XEXP (dest, 0);
4131   int reg_base = REGNO (XEXP (base, 0));
4132   int reg_src =true_regnum (src);
4133 
4134   if (reg_base == reg_src)
4135     {
4136       *l = 11;
4137       return ("mov __tmp_reg__,%A2"        CR_TAB
4138               "mov __zero_reg__,%B2"       CR_TAB
4139               TINY_ADIW (%I0, %J0, %o0)    CR_TAB
4140               "st %b0+,__tmp_reg__"        CR_TAB
4141               "st %b0+,__zero_reg__"       CR_TAB
4142               "st %b0+,%C2"                CR_TAB
4143               "st %b0,%D2"                 CR_TAB
4144               "clr __zero_reg__"           CR_TAB
4145               TINY_SBIW (%I0, %J0, %o0+3));
4146     }
4147   else if (reg_src == reg_base - 2)
4148     {
4149       *l = 11;
4150       return ("mov __tmp_reg__,%C2"         CR_TAB
4151               "mov __zero_reg__,%D2"        CR_TAB
4152               TINY_ADIW (%I0, %J0, %o0)     CR_TAB
4153               "st %b0+,%A0"                 CR_TAB
4154               "st %b0+,%B0"                 CR_TAB
4155               "st %b0+,__tmp_reg__"         CR_TAB
4156               "st %b0,__zero_reg__"         CR_TAB
4157               "clr __zero_reg__"            CR_TAB
4158               TINY_SBIW (%I0, %J0, %o0+3));
4159     }
4160   *l = 8;
4161   return (TINY_ADIW (%I0, %J0, %o0)     CR_TAB
4162           "st %b0+,%A1"                 CR_TAB
4163           "st %b0+,%B1"                 CR_TAB
4164           "st %b0+,%C1"                 CR_TAB
4165           "st %b0,%D1"                  CR_TAB
4166           TINY_SBIW (%I0, %J0, %o0+3));
4167 }
4168 
4169 static const char*
4170 out_movsi_mr_r (rtx_insn *insn, rtx op[], int *l)
4171 {
4172   rtx dest = op[0];
4173   rtx src = op[1];
4174   rtx base = XEXP (dest, 0);
4175   int reg_base = true_regnum (base);
4176   int reg_src = true_regnum (src);
4177   int tmp;
4178 
4179   if (!l)
4180     l = &tmp;
4181 
4182   if (CONSTANT_ADDRESS_P (base))
4183     {
4184       if (io_address_operand (base, SImode))
4185         {
4186           return *l=4,("out %i0, %A1"  CR_TAB
4187                        "out %i0+1,%B1" CR_TAB
4188                        "out %i0+2,%C1" CR_TAB
4189                        "out %i0+3,%D1");
4190         }
4191       else
4192         {
4193           *l = AVR_TINY ? 4 : 8;
4194           return ("sts %m0,%A1"   CR_TAB
4195                   "sts %m0+1,%B1" CR_TAB
4196                   "sts %m0+2,%C1" CR_TAB
4197                   "sts %m0+3,%D1");
4198         }
4199     }
4200 
4201   if (reg_base > 0)                 /* (r) */
4202     {
4203       if (AVR_TINY)
4204         return avr_out_movsi_mr_r_reg_no_disp_tiny (insn, op, l);
4205 
4206       if (reg_base == REG_X)                /* (R26) */
4207         {
4208           if (reg_src == REG_X)
4209             {
4210 	      /* "st X+,r26" is undefined */
4211               if (reg_unused_after (insn, base))
4212 		return *l=6, ("mov __tmp_reg__,r27" CR_TAB
4213 			      "st X,r26"            CR_TAB
4214 			      "adiw r26,1"          CR_TAB
4215 			      "st X+,__tmp_reg__"   CR_TAB
4216 			      "st X+,r28"           CR_TAB
4217 			      "st X,r29");
4218               else
4219                 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
4220 			      "st X,r26"            CR_TAB
4221 			      "adiw r26,1"          CR_TAB
4222 			      "st X+,__tmp_reg__"   CR_TAB
4223 			      "st X+,r28"           CR_TAB
4224 			      "st X,r29"            CR_TAB
4225 			      "sbiw r26,3");
4226             }
4227           else if (reg_base == reg_src + 2)
4228             {
4229               if (reg_unused_after (insn, base))
4230                 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
4231                               "mov __tmp_reg__,%D1"  CR_TAB
4232                               "st %0+,%A1"           CR_TAB
4233                               "st %0+,%B1"           CR_TAB
4234                               "st %0+,__zero_reg__"  CR_TAB
4235                               "st %0,__tmp_reg__"    CR_TAB
4236                               "clr __zero_reg__");
4237               else
4238                 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
4239                               "mov __tmp_reg__,%D1"  CR_TAB
4240                               "st %0+,%A1"           CR_TAB
4241                               "st %0+,%B1"           CR_TAB
4242                               "st %0+,__zero_reg__"  CR_TAB
4243                               "st %0,__tmp_reg__"    CR_TAB
4244                               "clr __zero_reg__"     CR_TAB
4245                               "sbiw r26,3");
4246             }
4247           return *l=5, ("st %0+,%A1" CR_TAB
4248                         "st %0+,%B1" CR_TAB
4249                         "st %0+,%C1" CR_TAB
4250                         "st %0,%D1"  CR_TAB
4251                         "sbiw r26,3");
4252         }
4253       else
4254         return *l=4, ("st %0,%A1"    CR_TAB
4255 		      "std %0+1,%B1" CR_TAB
4256 		      "std %0+2,%C1" CR_TAB
4257 		      "std %0+3,%D1");
4258     }
4259   else if (GET_CODE (base) == PLUS) /* (R + i) */
4260     {
4261       int disp = INTVAL (XEXP (base, 1));
4262 
4263       if (AVR_TINY)
4264         return avr_out_movsi_mr_r_reg_disp_tiny (op, l);
4265 
4266       reg_base = REGNO (XEXP (base, 0));
4267       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4268 	{
4269 	  if (reg_base != REG_Y)
4270 	    fatal_insn ("incorrect insn:",insn);
4271 
4272 	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4273 	    return *l = 6, ("adiw r28,%o0-60" CR_TAB
4274 			    "std Y+60,%A1"    CR_TAB
4275 			    "std Y+61,%B1"    CR_TAB
4276 			    "std Y+62,%C1"    CR_TAB
4277 			    "std Y+63,%D1"    CR_TAB
4278 			    "sbiw r28,%o0-60");
4279 
4280 	  return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
4281 			  "sbci r29,hi8(-%o0)" CR_TAB
4282 			  "st Y,%A1"           CR_TAB
4283 			  "std Y+1,%B1"        CR_TAB
4284 			  "std Y+2,%C1"        CR_TAB
4285 			  "std Y+3,%D1"        CR_TAB
4286 			  "subi r28,lo8(%o0)"  CR_TAB
4287 			  "sbci r29,hi8(%o0)");
4288 	}
4289       if (reg_base == REG_X)
4290 	{
4291 	  /* (X + d) = R */
4292 	  if (reg_src == REG_X)
4293 	    {
4294 	      *l = 9;
4295 	      return ("mov __tmp_reg__,r26"  CR_TAB
4296 		      "mov __zero_reg__,r27" CR_TAB
4297 		      "adiw r26,%o0"         CR_TAB
4298 		      "st X+,__tmp_reg__"    CR_TAB
4299 		      "st X+,__zero_reg__"   CR_TAB
4300 		      "st X+,r28"            CR_TAB
4301 		      "st X,r29"             CR_TAB
4302 		      "clr __zero_reg__"     CR_TAB
4303 		      "sbiw r26,%o0+3");
4304 	    }
4305 	  else if (reg_src == REG_X - 2)
4306 	    {
4307 	      *l = 9;
4308 	      return ("mov __tmp_reg__,r26"  CR_TAB
4309 		      "mov __zero_reg__,r27" CR_TAB
4310 		      "adiw r26,%o0"         CR_TAB
4311 		      "st X+,r24"            CR_TAB
4312 		      "st X+,r25"            CR_TAB
4313 		      "st X+,__tmp_reg__"    CR_TAB
4314 		      "st X,__zero_reg__"    CR_TAB
4315 		      "clr __zero_reg__"     CR_TAB
4316 		      "sbiw r26,%o0+3");
4317 	    }
4318 	  *l = 6;
4319 	  return ("adiw r26,%o0" CR_TAB
4320 		  "st X+,%A1"    CR_TAB
4321 		  "st X+,%B1"    CR_TAB
4322 		  "st X+,%C1"    CR_TAB
4323 		  "st X,%D1"     CR_TAB
4324 		  "sbiw r26,%o0+3");
4325 	}
4326       return *l=4, ("std %A0,%A1" CR_TAB
4327 		    "std %B0,%B1" CR_TAB
4328 		    "std %C0,%C1" CR_TAB
4329 		    "std %D0,%D1");
4330     }
4331   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4332     return *l=4, ("st %0,%D1" CR_TAB
4333 		  "st %0,%C1" CR_TAB
4334 		  "st %0,%B1" CR_TAB
4335 		  "st %0,%A1");
4336   else if (GET_CODE (base) == POST_INC) /* (R++) */
4337     return *l=4, ("st %0,%A1" CR_TAB
4338 		  "st %0,%B1" CR_TAB
4339 		  "st %0,%C1" CR_TAB
4340 		  "st %0,%D1");
4341   fatal_insn ("unknown move insn:",insn);
4342   return "";
4343 }
4344 
4345 const char *
4346 output_movsisf (rtx_insn *insn, rtx operands[], int *l)
4347 {
4348   int dummy;
4349   rtx dest = operands[0];
4350   rtx src = operands[1];
4351   int *real_l = l;
4352 
4353   if (avr_mem_flash_p (src)
4354       || avr_mem_flash_p (dest))
4355     {
4356       return avr_out_lpm (insn, operands, real_l);
4357     }
4358 
4359   if (!l)
4360     l = &dummy;
4361 
4362   gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
4363   if (REG_P (dest))
4364     {
4365       if (REG_P (src)) /* mov r,r */
4366 	{
4367 	  if (true_regnum (dest) > true_regnum (src))
4368 	    {
4369 	      if (AVR_HAVE_MOVW)
4370 		{
4371 		  *l = 2;
4372 		  return ("movw %C0,%C1" CR_TAB
4373 			  "movw %A0,%A1");
4374 		}
4375 	      *l = 4;
4376 	      return ("mov %D0,%D1" CR_TAB
4377 		      "mov %C0,%C1" CR_TAB
4378 		      "mov %B0,%B1" CR_TAB
4379 		      "mov %A0,%A1");
4380 	    }
4381 	  else
4382 	    {
4383 	      if (AVR_HAVE_MOVW)
4384 		{
4385 		  *l = 2;
4386 		  return ("movw %A0,%A1" CR_TAB
4387 			  "movw %C0,%C1");
4388 		}
4389 	      *l = 4;
4390 	      return ("mov %A0,%A1" CR_TAB
4391 		      "mov %B0,%B1" CR_TAB
4392 		      "mov %C0,%C1" CR_TAB
4393 		      "mov %D0,%D1");
4394 	    }
4395 	}
4396       else if (CONSTANT_P (src))
4397 	{
4398           return output_reload_insisf (operands, NULL_RTX, real_l);
4399         }
4400       else if (MEM_P (src))
4401 	return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
4402     }
4403   else if (MEM_P (dest))
4404     {
4405       const char *templ;
4406 
4407       if (src == CONST0_RTX (GET_MODE (dest)))
4408 	  operands[1] = zero_reg_rtx;
4409 
4410       templ = out_movsi_mr_r (insn, operands, real_l);
4411 
4412       if (!real_l)
4413 	output_asm_insn (templ, operands);
4414 
4415       operands[1] = src;
4416       return "";
4417     }
4418   fatal_insn ("invalid insn:", insn);
4419   return "";
4420 }
4421 
4422 
4423 /* Handle loads of 24-bit types from memory to register.  */
4424 
4425 static const char*
4426 avr_out_load_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4427 {
4428   rtx dest = op[0];
4429   rtx src = op[1];
4430   rtx base = XEXP (src, 0);
4431   int reg_dest = true_regnum (dest);
4432   int reg_base = true_regnum (base);
4433 
4434   if (reg_base == reg_dest)
4435     {
4436       return avr_asm_len (TINY_ADIW (%E1, %F1, 2)   CR_TAB
4437                           "ld %C0,%1"               CR_TAB
4438                           "ld __tmp_reg__,-%1"      CR_TAB
4439                           TINY_SBIW (%E1, %F1, 1)   CR_TAB
4440                           "ld %A0,%1"               CR_TAB
4441                           "mov %B0,__tmp_reg__", op, plen, -8);
4442     }
4443   else
4444     {
4445       return avr_asm_len ("ld %A0,%1+"  CR_TAB
4446                           "ld %B0,%1+"  CR_TAB
4447                           "ld %C0,%1", op, plen, -3);
4448 
4449       if (reg_dest != reg_base - 2 &&
4450           !reg_unused_after (insn, base))
4451         {
4452           avr_asm_len (TINY_SBIW (%E1, %F1, 2), op, plen, 2);
4453         }
4454       return "";
4455     }
4456 }
4457 
4458 static const char*
4459 avr_out_load_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4460 {
4461   rtx dest = op[0];
4462   rtx src = op[1];
4463   rtx base = XEXP (src, 0);
4464   int reg_dest = true_regnum (dest);
4465   int reg_base = true_regnum (base);
4466 
4467   reg_base = true_regnum (XEXP (base, 0));
4468   if (reg_base == reg_dest)
4469     {
4470       return avr_asm_len (TINY_ADIW (%I1, %J1, %o1+2) CR_TAB
4471                           "ld %C0,%b1"                CR_TAB
4472                           "ld __tmp_reg__,-%b1"       CR_TAB
4473                           TINY_SBIW (%I1, %J1, 1)     CR_TAB
4474                           "ld %A0,%b1"                CR_TAB
4475                           "mov %B0,__tmp_reg__", op, plen, -8);
4476    }
4477   else
4478     {
4479       avr_asm_len (TINY_ADIW (%I1, %J1, %o1)   CR_TAB
4480                           "ld %A0,%b1+"              CR_TAB
4481                           "ld %B0,%b1+"              CR_TAB
4482                           "ld %C0,%b1", op, plen, -5);
4483 
4484       if (reg_dest != (reg_base - 2)
4485           && !reg_unused_after (insn, XEXP (base, 0)))
4486           avr_asm_len (TINY_SBIW (%I1, %J1, %o1+2), op, plen, 2);
4487 
4488       return "";
4489     }
4490 }
4491 
4492 static const char*
4493 avr_out_load_psi (rtx_insn *insn, rtx *op, int *plen)
4494 {
4495   rtx dest = op[0];
4496   rtx src = op[1];
4497   rtx base = XEXP (src, 0);
4498   int reg_dest = true_regnum (dest);
4499   int reg_base = true_regnum (base);
4500 
4501   if (reg_base > 0)
4502     {
4503       if (AVR_TINY)
4504         return avr_out_load_psi_reg_no_disp_tiny (insn, op, plen);
4505 
4506       if (reg_base == REG_X)        /* (R26) */
4507         {
4508           if (reg_dest == REG_X)
4509             /* "ld r26,-X" is undefined */
4510             return avr_asm_len ("adiw r26,2"        CR_TAB
4511                                 "ld r28,X"          CR_TAB
4512                                 "ld __tmp_reg__,-X" CR_TAB
4513                                 "sbiw r26,1"        CR_TAB
4514                                 "ld r26,X"          CR_TAB
4515                                 "mov r27,__tmp_reg__", op, plen, -6);
4516           else
4517             {
4518               avr_asm_len ("ld %A0,X+" CR_TAB
4519                            "ld %B0,X+" CR_TAB
4520                            "ld %C0,X", op, plen, -3);
4521 
4522               if (reg_dest != REG_X - 2
4523                   && !reg_unused_after (insn, base))
4524                 {
4525                   avr_asm_len ("sbiw r26,2", op, plen, 1);
4526                 }
4527 
4528               return "";
4529             }
4530         }
4531       else /* reg_base != REG_X */
4532         {
4533           if (reg_dest == reg_base)
4534             return avr_asm_len ("ldd %C0,%1+2"          CR_TAB
4535                                 "ldd __tmp_reg__,%1+1"  CR_TAB
4536                                 "ld  %A0,%1"            CR_TAB
4537                                 "mov %B0,__tmp_reg__", op, plen, -4);
4538           else
4539             return avr_asm_len ("ld  %A0,%1"    CR_TAB
4540                                 "ldd %B0,%1+1"  CR_TAB
4541                                 "ldd %C0,%1+2", op, plen, -3);
4542         }
4543     }
4544   else if (GET_CODE (base) == PLUS) /* (R + i) */
4545     {
4546       int disp = INTVAL (XEXP (base, 1));
4547 
4548       if (AVR_TINY)
4549         return avr_out_load_psi_reg_disp_tiny (insn, op, plen);
4550 
4551       if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4552         {
4553           if (REGNO (XEXP (base, 0)) != REG_Y)
4554             fatal_insn ("incorrect insn:",insn);
4555 
4556           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4557             return avr_asm_len ("adiw r28,%o1-61" CR_TAB
4558                                 "ldd %A0,Y+61"    CR_TAB
4559                                 "ldd %B0,Y+62"    CR_TAB
4560                                 "ldd %C0,Y+63"    CR_TAB
4561                                 "sbiw r28,%o1-61", op, plen, -5);
4562 
4563           return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4564                               "sbci r29,hi8(-%o1)" CR_TAB
4565                               "ld  %A0,Y"           CR_TAB
4566                               "ldd %B0,Y+1"        CR_TAB
4567                               "ldd %C0,Y+2"        CR_TAB
4568                               "subi r28,lo8(%o1)"  CR_TAB
4569                               "sbci r29,hi8(%o1)", op, plen, -7);
4570         }
4571 
4572       reg_base = true_regnum (XEXP (base, 0));
4573       if (reg_base == REG_X)
4574         {
4575           /* R = (X + d) */
4576           if (reg_dest == REG_X)
4577             {
4578               /* "ld r26,-X" is undefined */
4579               return avr_asm_len ("adiw r26,%o1+2"     CR_TAB
4580                                   "ld  r28,X"          CR_TAB
4581                                   "ld  __tmp_reg__,-X" CR_TAB
4582                                   "sbiw r26,1"         CR_TAB
4583                                   "ld  r26,X"          CR_TAB
4584                                   "mov r27,__tmp_reg__", op, plen, -6);
4585             }
4586 
4587           avr_asm_len ("adiw r26,%o1" CR_TAB
4588                        "ld %A0,X+"    CR_TAB
4589                        "ld %B0,X+"    CR_TAB
4590                        "ld %C0,X", op, plen, -4);
4591 
4592           if (reg_dest != REG_W
4593               && !reg_unused_after (insn, XEXP (base, 0)))
4594             avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
4595 
4596           return "";
4597         }
4598 
4599       if (reg_dest == reg_base)
4600         return avr_asm_len ("ldd %C0,%C1" CR_TAB
4601                             "ldd __tmp_reg__,%B1"  CR_TAB
4602                             "ldd %A0,%A1" CR_TAB
4603                             "mov %B0,__tmp_reg__", op, plen, -4);
4604 
4605         return avr_asm_len ("ldd %A0,%A1" CR_TAB
4606                             "ldd %B0,%B1" CR_TAB
4607                             "ldd %C0,%C1", op, plen, -3);
4608     }
4609   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4610     return avr_asm_len ("ld %C0,%1" CR_TAB
4611                         "ld %B0,%1" CR_TAB
4612                         "ld %A0,%1", op, plen, -3);
4613   else if (GET_CODE (base) == POST_INC) /* (R++) */
4614     return avr_asm_len ("ld %A0,%1" CR_TAB
4615                         "ld %B0,%1" CR_TAB
4616                         "ld %C0,%1", op, plen, -3);
4617 
4618   else if (CONSTANT_ADDRESS_P (base))
4619     {
4620       int n_words = AVR_TINY ? 3 : 6;
4621       return avr_asm_len ("lds %A0,%m1" CR_TAB
4622                           "lds %B0,%m1+1" CR_TAB
4623                           "lds %C0,%m1+2", op, plen , -n_words);
4624     }
4625 
4626   fatal_insn ("unknown move insn:",insn);
4627   return "";
4628 }
4629 
4630 
4631 static const char*
4632 avr_out_store_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4633 {
4634   rtx dest = op[0];
4635   rtx src = op[1];
4636   rtx base = XEXP (dest, 0);
4637   int reg_base = true_regnum (base);
4638   int reg_src = true_regnum (src);
4639 
4640   if (reg_base == reg_src)
4641     {
4642       avr_asm_len ("st %0,%A1"              CR_TAB
4643                    "mov __tmp_reg__,%B1"    CR_TAB
4644                    TINY_ADIW (%E0, %F0, 1)  CR_TAB /* st X+, r27 is undefined */
4645                    "st %0+,__tmp_reg__"     CR_TAB
4646                    "st %0,%C1", op, plen, -6);
4647 
4648     }
4649   else if (reg_src == reg_base - 2)
4650     {
4651       avr_asm_len ("st %0,%A1"              CR_TAB
4652                    "mov __tmp_reg__,%C1"    CR_TAB
4653                    TINY_ADIW (%E0, %F0, 1)  CR_TAB
4654                    "st %0+,%B1"             CR_TAB
4655                    "st %0,__tmp_reg__", op, plen, 6);
4656     }
4657   else
4658     {
4659       avr_asm_len ("st %0+,%A1"  CR_TAB
4660                    "st %0+,%B1" CR_TAB
4661                    "st %0,%C1", op, plen, -3);
4662     }
4663 
4664   if (!reg_unused_after (insn, base))
4665     avr_asm_len (TINY_SBIW (%E0, %F0, 2), op, plen, 2);
4666 
4667   return "";
4668 }
4669 
4670 static const char*
4671 avr_out_store_psi_reg_disp_tiny (rtx *op, int *plen)
4672 {
4673   rtx dest = op[0];
4674   rtx src = op[1];
4675   rtx base = XEXP (dest, 0);
4676   int reg_base = REGNO (XEXP (base, 0));
4677   int reg_src = true_regnum (src);
4678 
4679   if (reg_src == reg_base)
4680     {
4681       return avr_asm_len ("mov __tmp_reg__,%A1"          CR_TAB
4682                           "mov __zero_reg__,%B1"         CR_TAB
4683                           TINY_ADIW (%I0, %J0, %o0)      CR_TAB
4684                           "st %b0+,__tmp_reg__"          CR_TAB
4685                           "st %b0+,__zero_reg__"         CR_TAB
4686                           "st %b0,%C1"                   CR_TAB
4687                           "clr __zero_reg__"             CR_TAB
4688                           TINY_SBIW (%I0, %J0, %o0+2), op, plen, -10);
4689     }
4690   else if (reg_src == reg_base - 2)
4691     {
4692       return avr_asm_len ("mov __tmp_reg__,%C1"          CR_TAB
4693                           TINY_ADIW (%I0, %J0, %o0)      CR_TAB
4694                           "st %b0+,%A1"                  CR_TAB
4695                           "st %b0+,%B1"                  CR_TAB
4696                           "st %b0,__tmp_reg__"           CR_TAB
4697                           TINY_SBIW (%I0, %J0, %o0+2), op, plen, -8);
4698     }
4699 
4700   return avr_asm_len (TINY_ADIW (%I0, %J0, %o0)      CR_TAB
4701                           "st %b0+,%A1"                  CR_TAB
4702                           "st %b0+,%B1"                  CR_TAB
4703                           "st %b0,%C1"                   CR_TAB
4704                           TINY_SBIW (%I0, %J0, %o0+2), op, plen, -7);
4705 }
4706 
4707 /* Handle store of 24-bit type from register or zero to memory.  */
4708 
4709 static const char*
4710 avr_out_store_psi (rtx_insn *insn, rtx *op, int *plen)
4711 {
4712   rtx dest = op[0];
4713   rtx src = op[1];
4714   rtx base = XEXP (dest, 0);
4715   int reg_base = true_regnum (base);
4716 
4717   if (CONSTANT_ADDRESS_P (base))
4718     {
4719       int n_words = AVR_TINY ? 3 : 6;
4720       return avr_asm_len ("sts %m0,%A1"   CR_TAB
4721                           "sts %m0+1,%B1" CR_TAB
4722                           "sts %m0+2,%C1", op, plen, -n_words);
4723     }
4724 
4725   if (reg_base > 0)                 /* (r) */
4726     {
4727       if (AVR_TINY)
4728         return avr_out_store_psi_reg_no_disp_tiny (insn, op, plen);
4729 
4730       if (reg_base == REG_X)        /* (R26) */
4731         {
4732           gcc_assert (!reg_overlap_mentioned_p (base, src));
4733 
4734           avr_asm_len ("st %0+,%A1"  CR_TAB
4735                        "st %0+,%B1" CR_TAB
4736                        "st %0,%C1", op, plen, -3);
4737 
4738           if (!reg_unused_after (insn, base))
4739             avr_asm_len ("sbiw r26,2", op, plen, 1);
4740 
4741           return "";
4742         }
4743       else
4744         return avr_asm_len ("st %0,%A1"    CR_TAB
4745                             "std %0+1,%B1" CR_TAB
4746                             "std %0+2,%C1", op, plen, -3);
4747     }
4748   else if (GET_CODE (base) == PLUS) /* (R + i) */
4749     {
4750       int disp = INTVAL (XEXP (base, 1));
4751 
4752       if (AVR_TINY)
4753         return avr_out_store_psi_reg_disp_tiny (op, plen);
4754 
4755       reg_base = REGNO (XEXP (base, 0));
4756 
4757       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4758         {
4759           if (reg_base != REG_Y)
4760             fatal_insn ("incorrect insn:",insn);
4761 
4762           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4763             return avr_asm_len ("adiw r28,%o0-61" CR_TAB
4764                                 "std Y+61,%A1"    CR_TAB
4765                                 "std Y+62,%B1"    CR_TAB
4766                                 "std Y+63,%C1"    CR_TAB
4767                                 "sbiw r28,%o0-61", op, plen, -5);
4768 
4769           return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4770                               "sbci r29,hi8(-%o0)" CR_TAB
4771                               "st Y,%A1"           CR_TAB
4772                               "std Y+1,%B1"        CR_TAB
4773                               "std Y+2,%C1"        CR_TAB
4774                               "subi r28,lo8(%o0)"  CR_TAB
4775                               "sbci r29,hi8(%o0)", op, plen, -7);
4776         }
4777       if (reg_base == REG_X)
4778         {
4779           /* (X + d) = R */
4780           gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
4781 
4782           avr_asm_len ("adiw r26,%o0" CR_TAB
4783                        "st X+,%A1"    CR_TAB
4784                        "st X+,%B1"    CR_TAB
4785                        "st X,%C1", op, plen, -4);
4786 
4787           if (!reg_unused_after (insn, XEXP (base, 0)))
4788             avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
4789 
4790           return "";
4791         }
4792 
4793       return avr_asm_len ("std %A0,%A1" CR_TAB
4794                           "std %B0,%B1" CR_TAB
4795                           "std %C0,%C1", op, plen, -3);
4796     }
4797   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4798     return avr_asm_len ("st %0,%C1" CR_TAB
4799                         "st %0,%B1" CR_TAB
4800                         "st %0,%A1", op, plen, -3);
4801   else if (GET_CODE (base) == POST_INC) /* (R++) */
4802     return avr_asm_len ("st %0,%A1" CR_TAB
4803                         "st %0,%B1" CR_TAB
4804                         "st %0,%C1", op, plen, -3);
4805 
4806   fatal_insn ("unknown move insn:",insn);
4807   return "";
4808 }
4809 
4810 
4811 /* Move around 24-bit stuff.  */
4812 
4813 const char *
4814 avr_out_movpsi (rtx_insn *insn, rtx *op, int *plen)
4815 {
4816   rtx dest = op[0];
4817   rtx src = op[1];
4818 
4819   if (avr_mem_flash_p (src)
4820       || avr_mem_flash_p (dest))
4821     {
4822       return avr_out_lpm (insn, op, plen);
4823     }
4824 
4825   if (register_operand (dest, VOIDmode))
4826     {
4827       if (register_operand (src, VOIDmode)) /* mov r,r */
4828         {
4829           if (true_regnum (dest) > true_regnum (src))
4830             {
4831               avr_asm_len ("mov %C0,%C1", op, plen, -1);
4832 
4833               if (AVR_HAVE_MOVW)
4834                 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
4835               else
4836                 return avr_asm_len ("mov %B0,%B1"  CR_TAB
4837                                     "mov %A0,%A1", op, plen, 2);
4838             }
4839           else
4840             {
4841               if (AVR_HAVE_MOVW)
4842                 avr_asm_len ("movw %A0,%A1", op, plen, -1);
4843               else
4844                 avr_asm_len ("mov %A0,%A1"  CR_TAB
4845                              "mov %B0,%B1", op, plen, -2);
4846 
4847               return avr_asm_len ("mov %C0,%C1", op, plen, 1);
4848             }
4849         }
4850       else if (CONSTANT_P (src))
4851         {
4852           return avr_out_reload_inpsi (op, NULL_RTX, plen);
4853         }
4854       else if (MEM_P (src))
4855         return avr_out_load_psi (insn, op, plen); /* mov r,m */
4856     }
4857   else if (MEM_P (dest))
4858     {
4859       rtx xop[2];
4860 
4861       xop[0] = dest;
4862       xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
4863 
4864       return avr_out_store_psi (insn, xop, plen);
4865     }
4866 
4867   fatal_insn ("invalid insn:", insn);
4868   return "";
4869 }
4870 
4871 static const char*
4872 avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
4873 {
4874   rtx dest = op[0];
4875   rtx src = op[1];
4876   rtx x = XEXP (dest, 0);
4877 
4878   if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4879     {
4880       avr_asm_len ("mov __tmp_reg__,%1"      CR_TAB
4881                    TINY_ADIW (%I0, %J0, %o0) CR_TAB
4882                    "st %b0,__tmp_reg__", op, plen, -4);
4883     }
4884     else
4885     {
4886       avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
4887           "st %b0,%1" , op, plen, -3);
4888     }
4889 
4890   if (!reg_unused_after (insn, XEXP (x,0)))
4891       avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
4892 
4893   return "";
4894 }
4895 
4896 static const char*
4897 out_movqi_mr_r (rtx_insn *insn, rtx op[], int *plen)
4898 {
4899   rtx dest = op[0];
4900   rtx src = op[1];
4901   rtx x = XEXP (dest, 0);
4902 
4903   if (CONSTANT_ADDRESS_P (x))
4904     {
4905       int n_words = AVR_TINY ? 1 : 2;
4906       return io_address_operand (x, QImode)
4907         ? avr_asm_len ("out %i0,%1", op, plen, -1)
4908         : avr_asm_len ("sts %m0,%1", op, plen, -n_words);
4909     }
4910   else if (GET_CODE (x) == PLUS
4911            && REG_P (XEXP (x, 0))
4912            && CONST_INT_P (XEXP (x, 1)))
4913     {
4914       /* memory access by reg+disp */
4915 
4916       int disp = INTVAL (XEXP (x, 1));
4917 
4918       if (AVR_TINY)
4919         return avr_out_movqi_mr_r_reg_disp_tiny (insn, op, plen);
4920 
4921       if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
4922         {
4923           if (REGNO (XEXP (x, 0)) != REG_Y)
4924             fatal_insn ("incorrect insn:",insn);
4925 
4926           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4927             return avr_asm_len ("adiw r28,%o0-63" CR_TAB
4928                                 "std Y+63,%1"     CR_TAB
4929                                 "sbiw r28,%o0-63", op, plen, -3);
4930 
4931           return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4932                               "sbci r29,hi8(-%o0)" CR_TAB
4933                               "st Y,%1"            CR_TAB
4934                               "subi r28,lo8(%o0)"  CR_TAB
4935                               "sbci r29,hi8(%o0)", op, plen, -5);
4936         }
4937       else if (REGNO (XEXP (x,0)) == REG_X)
4938         {
4939           if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
4940             {
4941               avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
4942                            "adiw r26,%o0"       CR_TAB
4943                            "st X,__tmp_reg__", op, plen, -3);
4944             }
4945           else
4946             {
4947               avr_asm_len ("adiw r26,%o0" CR_TAB
4948                            "st X,%1", op, plen, -2);
4949             }
4950 
4951           if (!reg_unused_after (insn, XEXP (x,0)))
4952             avr_asm_len ("sbiw r26,%o0", op, plen, 1);
4953 
4954           return "";
4955         }
4956 
4957       return avr_asm_len ("std %0,%1", op, plen, -1);
4958     }
4959 
4960   return avr_asm_len ("st %0,%1", op, plen, -1);
4961 }
4962 
4963 
4964 /* Helper for the next function for XMEGA.  It does the same
4965    but with low byte first.  */
4966 
4967 static const char*
4968 avr_out_movhi_mr_r_xmega (rtx_insn *insn, rtx op[], int *plen)
4969 {
4970   rtx dest = op[0];
4971   rtx src = op[1];
4972   rtx base = XEXP (dest, 0);
4973   int reg_base = true_regnum (base);
4974   int reg_src = true_regnum (src);
4975 
4976   /* "volatile" forces writing low byte first, even if less efficient,
4977      for correct operation with 16-bit I/O registers like SP.  */
4978   int mem_volatile_p = MEM_VOLATILE_P (dest);
4979 
4980   if (CONSTANT_ADDRESS_P (base))
4981     {
4982       return io_address_operand (base, HImode)
4983         ? avr_asm_len ("out %i0,%A1" CR_TAB
4984                        "out %i0+1,%B1", op, plen, -2)
4985 
4986         : avr_asm_len ("sts %m0,%A1" CR_TAB
4987                        "sts %m0+1,%B1", op, plen, -4);
4988     }
4989 
4990   if (reg_base > 0)
4991     {
4992       if (reg_base != REG_X)
4993         return avr_asm_len ("st %0,%A1" CR_TAB
4994                             "std %0+1,%B1", op, plen, -2);
4995 
4996       if (reg_src == REG_X)
4997         /* "st X+,r26" and "st -X,r26" are undefined.  */
4998         avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4999                      "st X,r26"            CR_TAB
5000                      "adiw r26,1"          CR_TAB
5001                      "st X,__tmp_reg__", op, plen, -4);
5002       else
5003         avr_asm_len ("st X+,%A1" CR_TAB
5004                      "st X,%B1", op, plen, -2);
5005 
5006       return reg_unused_after (insn, base)
5007         ? ""
5008         : avr_asm_len ("sbiw r26,1", op, plen, 1);
5009     }
5010   else if (GET_CODE (base) == PLUS)
5011     {
5012       int disp = INTVAL (XEXP (base, 1));
5013       reg_base = REGNO (XEXP (base, 0));
5014       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5015         {
5016           if (reg_base != REG_Y)
5017             fatal_insn ("incorrect insn:",insn);
5018 
5019           return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5020             ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5021                            "std Y+62,%A1"    CR_TAB
5022                            "std Y+63,%B1"    CR_TAB
5023                            "sbiw r28,%o0-62", op, plen, -4)
5024 
5025             : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5026                            "sbci r29,hi8(-%o0)" CR_TAB
5027                            "st Y,%A1"           CR_TAB
5028                            "std Y+1,%B1"        CR_TAB
5029                            "subi r28,lo8(%o0)"  CR_TAB
5030                            "sbci r29,hi8(%o0)", op, plen, -6);
5031         }
5032 
5033       if (reg_base != REG_X)
5034         return avr_asm_len ("std %A0,%A1" CR_TAB
5035                             "std %B0,%B1", op, plen, -2);
5036       /* (X + d) = R */
5037       return reg_src == REG_X
5038         ? avr_asm_len ("mov __tmp_reg__,r26"  CR_TAB
5039                        "mov __zero_reg__,r27" CR_TAB
5040                        "adiw r26,%o0"         CR_TAB
5041                        "st X+,__tmp_reg__"    CR_TAB
5042                        "st X,__zero_reg__"    CR_TAB
5043                        "clr __zero_reg__"     CR_TAB
5044                        "sbiw r26,%o0+1", op, plen, -7)
5045 
5046         : avr_asm_len ("adiw r26,%o0" CR_TAB
5047                        "st X+,%A1"    CR_TAB
5048                        "st X,%B1"     CR_TAB
5049                        "sbiw r26,%o0+1", op, plen, -4);
5050     }
5051   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5052     {
5053       if (!mem_volatile_p)
5054         return avr_asm_len ("st %0,%B1" CR_TAB
5055                             "st %0,%A1", op, plen, -2);
5056 
5057       return REGNO (XEXP (base, 0)) == REG_X
5058         ? avr_asm_len ("sbiw r26,2"  CR_TAB
5059                        "st X+,%A1"   CR_TAB
5060                        "st X,%B1"    CR_TAB
5061                        "sbiw r26,1", op, plen, -4)
5062 
5063         : avr_asm_len ("sbiw %r0,2"  CR_TAB
5064                        "st %p0,%A1"  CR_TAB
5065                        "std %p0+1,%B1", op, plen, -3);
5066     }
5067   else if (GET_CODE (base) == POST_INC) /* (R++) */
5068     {
5069       return avr_asm_len ("st %0,%A1"  CR_TAB
5070                           "st %0,%B1", op, plen, -2);
5071 
5072     }
5073   fatal_insn ("unknown move insn:",insn);
5074   return "";
5075 }
5076 
5077 static const char*
5078 avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5079 {
5080   rtx dest = op[0];
5081   rtx src = op[1];
5082   rtx base = XEXP (dest, 0);
5083   int reg_base = true_regnum (base);
5084   int reg_src = true_regnum (src);
5085   int mem_volatile_p = MEM_VOLATILE_P (dest);
5086 
5087   if (reg_base == reg_src)
5088     {
5089       return !mem_volatile_p && reg_unused_after (insn, src)
5090         ? avr_asm_len ("mov __tmp_reg__,%B1"   CR_TAB
5091                        "st %0,%A1"             CR_TAB
5092                        TINY_ADIW (%E0, %F0, 1) CR_TAB
5093                        "st %0,__tmp_reg__", op, plen, -5)
5094         : avr_asm_len ("mov __tmp_reg__,%B1"   CR_TAB
5095                        TINY_ADIW (%E0, %F0, 1) CR_TAB
5096                        "st %0,__tmp_reg__"      CR_TAB
5097                        TINY_SBIW (%E0, %F0, 1) CR_TAB
5098                        "st %0, %A1", op, plen, -7);
5099     }
5100 
5101   return !mem_volatile_p && reg_unused_after (insn, base)
5102       ? avr_asm_len ("st %0+,%A1" CR_TAB
5103                      "st %0,%B1", op, plen, -2)
5104       : avr_asm_len (TINY_ADIW (%E0, %F0, 1) CR_TAB
5105                      "st %0,%B1"             CR_TAB
5106                      "st -%0,%A1", op, plen, -4);
5107 }
5108 
5109 static const char*
5110 avr_out_movhi_mr_r_reg_disp_tiny (rtx op[], int *plen)
5111 {
5112   rtx dest = op[0];
5113   rtx src = op[1];
5114   rtx base = XEXP (dest, 0);
5115   int reg_base = REGNO (XEXP (base, 0));
5116   int reg_src = true_regnum (src);
5117 
5118   return reg_src == reg_base
5119         ? avr_asm_len ("mov __tmp_reg__,%A1"          CR_TAB
5120                        "mov __zero_reg__,%B1"         CR_TAB
5121                        TINY_ADIW (%I0, %J0, %o0+1)    CR_TAB
5122                        "st %b0,__zero_reg__"          CR_TAB
5123                        "st -%b0,__tmp_reg__"          CR_TAB
5124                        "clr __zero_reg__"             CR_TAB
5125                        TINY_SBIW (%I0, %J0, %o0), op, plen, -9)
5126 
5127         : avr_asm_len (TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5128                        "st %b0,%B1"                CR_TAB
5129                        "st -%b0,%A1"               CR_TAB
5130                        TINY_SBIW (%I0, %J0, %o0), op, plen, -6);
5131 }
5132 
5133 static const char*
5134 avr_out_movhi_mr_r_post_inc_tiny (rtx op[], int *plen)
5135 {
5136   return avr_asm_len (TINY_ADIW (%I0, %J0, 1)  CR_TAB
5137                       "st %p0,%B1"    CR_TAB
5138                       "st -%p0,%A1"   CR_TAB
5139                       TINY_ADIW (%I0, %J0, 2), op, plen, -6);
5140 }
5141 
5142 static const char*
5143 out_movhi_mr_r (rtx_insn *insn, rtx op[], int *plen)
5144 {
5145   rtx dest = op[0];
5146   rtx src = op[1];
5147   rtx base = XEXP (dest, 0);
5148   int reg_base = true_regnum (base);
5149   int reg_src = true_regnum (src);
5150   int mem_volatile_p;
5151 
5152   /* "volatile" forces writing high-byte first (no-xmega) resp.
5153      low-byte first (xmega) even if less efficient, for correct
5154      operation with 16-bit I/O registers like.  */
5155 
5156   if (AVR_XMEGA)
5157     return avr_out_movhi_mr_r_xmega (insn, op, plen);
5158 
5159   mem_volatile_p = MEM_VOLATILE_P (dest);
5160 
5161   if (CONSTANT_ADDRESS_P (base))
5162     {
5163       int n_words = AVR_TINY ? 2 : 4;
5164       return io_address_operand (base, HImode)
5165         ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5166                        "out %i0,%A1", op, plen, -2)
5167 
5168         : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5169                        "sts %m0,%A1", op, plen, -n_words);
5170     }
5171 
5172   if (reg_base > 0)
5173     {
5174       if (AVR_TINY)
5175         return avr_out_movhi_mr_r_reg_no_disp_tiny (insn, op, plen);
5176 
5177       if (reg_base != REG_X)
5178         return avr_asm_len ("std %0+1,%B1" CR_TAB
5179                             "st %0,%A1", op, plen, -2);
5180 
5181       if (reg_src == REG_X)
5182         /* "st X+,r26" and "st -X,r26" are undefined.  */
5183         return !mem_volatile_p && reg_unused_after (insn, src)
5184           ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5185                          "st X,r26"            CR_TAB
5186                          "adiw r26,1"          CR_TAB
5187                          "st X,__tmp_reg__", op, plen, -4)
5188 
5189           : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5190                          "adiw r26,1"          CR_TAB
5191                          "st X,__tmp_reg__"    CR_TAB
5192                          "sbiw r26,1"          CR_TAB
5193                          "st X,r26", op, plen, -5);
5194 
5195       return !mem_volatile_p && reg_unused_after (insn, base)
5196         ? avr_asm_len ("st X+,%A1" CR_TAB
5197                        "st X,%B1", op, plen, -2)
5198         : avr_asm_len ("adiw r26,1" CR_TAB
5199                        "st X,%B1"   CR_TAB
5200                        "st -X,%A1", op, plen, -3);
5201     }
5202   else if (GET_CODE (base) == PLUS)
5203     {
5204       int disp = INTVAL (XEXP (base, 1));
5205 
5206       if (AVR_TINY)
5207         return avr_out_movhi_mr_r_reg_disp_tiny (op, plen);
5208 
5209       reg_base = REGNO (XEXP (base, 0));
5210       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5211         {
5212           if (reg_base != REG_Y)
5213             fatal_insn ("incorrect insn:",insn);
5214 
5215           return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5216             ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5217                            "std Y+63,%B1"    CR_TAB
5218                            "std Y+62,%A1"    CR_TAB
5219                            "sbiw r28,%o0-62", op, plen, -4)
5220 
5221             : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5222                            "sbci r29,hi8(-%o0)" CR_TAB
5223                            "std Y+1,%B1"        CR_TAB
5224                            "st Y,%A1"           CR_TAB
5225                            "subi r28,lo8(%o0)"  CR_TAB
5226                            "sbci r29,hi8(%o0)", op, plen, -6);
5227         }
5228 
5229       if (reg_base != REG_X)
5230         return avr_asm_len ("std %B0,%B1" CR_TAB
5231                             "std %A0,%A1", op, plen, -2);
5232       /* (X + d) = R */
5233       return reg_src == REG_X
5234         ? avr_asm_len ("mov __tmp_reg__,r26"  CR_TAB
5235                        "mov __zero_reg__,r27" CR_TAB
5236                        "adiw r26,%o0+1"       CR_TAB
5237                        "st X,__zero_reg__"    CR_TAB
5238                        "st -X,__tmp_reg__"    CR_TAB
5239                        "clr __zero_reg__"     CR_TAB
5240                        "sbiw r26,%o0", op, plen, -7)
5241 
5242         : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5243                        "st X,%B1"       CR_TAB
5244                        "st -X,%A1"      CR_TAB
5245                        "sbiw r26,%o0", op, plen, -4);
5246     }
5247   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5248     {
5249       return avr_asm_len ("st %0,%B1" CR_TAB
5250                           "st %0,%A1", op, plen, -2);
5251     }
5252   else if (GET_CODE (base) == POST_INC) /* (R++) */
5253     {
5254       if (!mem_volatile_p)
5255         return avr_asm_len ("st %0,%A1"  CR_TAB
5256                             "st %0,%B1", op, plen, -2);
5257 
5258       if (AVR_TINY)
5259         return avr_out_movhi_mr_r_post_inc_tiny (op, plen);
5260 
5261       return REGNO (XEXP (base, 0)) == REG_X
5262         ? avr_asm_len ("adiw r26,1"  CR_TAB
5263                        "st X,%B1"    CR_TAB
5264                        "st -X,%A1"   CR_TAB
5265                        "adiw r26,2", op, plen, -4)
5266 
5267         : avr_asm_len ("std %p0+1,%B1" CR_TAB
5268                        "st %p0,%A1"    CR_TAB
5269                        "adiw %r0,2", op, plen, -3);
5270     }
5271   fatal_insn ("unknown move insn:",insn);
5272   return "";
5273 }
5274 
5275 /* Return 1 if frame pointer for current function required.  */
5276 
5277 static bool
5278 avr_frame_pointer_required_p (void)
5279 {
5280   return (cfun->calls_alloca
5281           || cfun->calls_setjmp
5282           || cfun->has_nonlocal_label
5283           || crtl->args.info.nregs == 0
5284           || get_frame_size () > 0);
5285 }
5286 
5287 /* Returns the condition of compare insn INSN, or UNKNOWN.  */
5288 
5289 static RTX_CODE
5290 compare_condition (rtx_insn *insn)
5291 {
5292   rtx_insn *next = next_real_insn (insn);
5293 
5294   if (next && JUMP_P (next))
5295     {
5296       rtx pat = PATTERN (next);
5297       rtx src = SET_SRC (pat);
5298 
5299       if (IF_THEN_ELSE == GET_CODE (src))
5300         return GET_CODE (XEXP (src, 0));
5301     }
5302 
5303   return UNKNOWN;
5304 }
5305 
5306 
5307 /* Returns true iff INSN is a tst insn that only tests the sign.  */
5308 
5309 static bool
5310 compare_sign_p (rtx_insn *insn)
5311 {
5312   RTX_CODE cond = compare_condition (insn);
5313   return (cond == GE || cond == LT);
5314 }
5315 
5316 
5317 /* Returns true iff the next insn is a JUMP_INSN with a condition
5318    that needs to be swapped (GT, GTU, LE, LEU).  */
5319 
5320 static bool
5321 compare_diff_p (rtx_insn *insn)
5322 {
5323   RTX_CODE cond = compare_condition (insn);
5324   return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
5325 }
5326 
5327 /* Returns true iff INSN is a compare insn with the EQ or NE condition.  */
5328 
5329 static bool
5330 compare_eq_p (rtx_insn *insn)
5331 {
5332   RTX_CODE cond = compare_condition (insn);
5333   return (cond == EQ || cond == NE);
5334 }
5335 
5336 
5337 /* Output compare instruction
5338 
5339       compare (XOP[0], XOP[1])
5340 
5341    for a register XOP[0] and a compile-time constant XOP[1].  Return "".
5342    XOP[2] is an 8-bit scratch register as needed.
5343 
5344    PLEN == NULL:  Output instructions.
5345    PLEN != NULL:  Set *PLEN to the length (in words) of the sequence.
5346                   Don't output anything.  */
5347 
5348 const char*
5349 avr_out_compare (rtx_insn *insn, rtx *xop, int *plen)
5350 {
5351   /* Register to compare and value to compare against. */
5352   rtx xreg = xop[0];
5353   rtx xval = xop[1];
5354 
5355   /* MODE of the comparison.  */
5356   machine_mode mode;
5357 
5358   /* Number of bytes to operate on.  */
5359   int i, n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
5360 
5361   /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown.  */
5362   int clobber_val = -1;
5363 
5364   /* Map fixed mode operands to integer operands with the same binary
5365      representation.  They are easier to handle in the remainder.  */
5366 
5367   if (CONST_FIXED_P (xval))
5368     {
5369       xreg = avr_to_int_mode (xop[0]);
5370       xval = avr_to_int_mode (xop[1]);
5371     }
5372 
5373   mode = GET_MODE (xreg);
5374 
5375   gcc_assert (REG_P (xreg));
5376   gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
5377               || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
5378 
5379   if (plen)
5380     *plen = 0;
5381 
5382   /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
5383      against 0 by ORing the bytes.  This is one instruction shorter.
5384      Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
5385      and therefore don't use this.  */
5386 
5387   if (!test_hard_reg_class (LD_REGS, xreg)
5388       && compare_eq_p (insn)
5389       && reg_unused_after (insn, xreg))
5390     {
5391       if (xval == const1_rtx)
5392         {
5393           avr_asm_len ("dec %A0" CR_TAB
5394                        "or %A0,%B0", xop, plen, 2);
5395 
5396           if (n_bytes >= 3)
5397             avr_asm_len ("or %A0,%C0", xop, plen, 1);
5398 
5399           if (n_bytes >= 4)
5400             avr_asm_len ("or %A0,%D0", xop, plen, 1);
5401 
5402           return "";
5403         }
5404       else if (xval == constm1_rtx)
5405         {
5406           if (n_bytes >= 4)
5407             avr_asm_len ("and %A0,%D0", xop, plen, 1);
5408 
5409           if (n_bytes >= 3)
5410             avr_asm_len ("and %A0,%C0", xop, plen, 1);
5411 
5412           return avr_asm_len ("and %A0,%B0" CR_TAB
5413                               "com %A0", xop, plen, 2);
5414         }
5415     }
5416 
5417   for (i = 0; i < n_bytes; i++)
5418     {
5419       /* We compare byte-wise.  */
5420       rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
5421       rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5422 
5423       /* 8-bit value to compare with this byte.  */
5424       unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5425 
5426       /* Registers R16..R31 can operate with immediate.  */
5427       bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5428 
5429       xop[0] = reg8;
5430       xop[1] = gen_int_mode (val8, QImode);
5431 
5432       /* Word registers >= R24 can use SBIW/ADIW with 0..63.  */
5433 
5434       if (i == 0
5435           && test_hard_reg_class (ADDW_REGS, reg8))
5436         {
5437           int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
5438 
5439           if (IN_RANGE (val16, 0, 63)
5440               && (val8 == 0
5441                   || reg_unused_after (insn, xreg)))
5442             {
5443               if (AVR_TINY)
5444                 avr_asm_len (TINY_SBIW (%A0, %B0, %1), xop, plen, 2);
5445               else
5446                 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
5447 
5448               i++;
5449               continue;
5450             }
5451 
5452           if (n_bytes == 2
5453               && IN_RANGE (val16, -63, -1)
5454               && compare_eq_p (insn)
5455               && reg_unused_after (insn, xreg))
5456             {
5457               return AVR_TINY
5458                   ? avr_asm_len (TINY_ADIW (%A0, %B0, %n1), xop, plen, 2)
5459                   : avr_asm_len ("adiw %0,%n1", xop, plen, 1);
5460             }
5461         }
5462 
5463       /* Comparing against 0 is easy.  */
5464 
5465       if (val8 == 0)
5466         {
5467           avr_asm_len (i == 0
5468                        ? "cp %0,__zero_reg__"
5469                        : "cpc %0,__zero_reg__", xop, plen, 1);
5470           continue;
5471         }
5472 
5473       /* Upper registers can compare and subtract-with-carry immediates.
5474          Notice that compare instructions do the same as respective subtract
5475          instruction; the only difference is that comparisons don't write
5476          the result back to the target register.  */
5477 
5478       if (ld_reg_p)
5479         {
5480           if (i == 0)
5481             {
5482               avr_asm_len ("cpi %0,%1", xop, plen, 1);
5483               continue;
5484             }
5485           else if (reg_unused_after (insn, xreg))
5486             {
5487               avr_asm_len ("sbci %0,%1", xop, plen, 1);
5488               continue;
5489             }
5490         }
5491 
5492       /* Must load the value into the scratch register.  */
5493 
5494       gcc_assert (REG_P (xop[2]));
5495 
5496       if (clobber_val != (int) val8)
5497         avr_asm_len ("ldi %2,%1", xop, plen, 1);
5498       clobber_val = (int) val8;
5499 
5500       avr_asm_len (i == 0
5501                    ? "cp %0,%2"
5502                    : "cpc %0,%2", xop, plen, 1);
5503     }
5504 
5505   return "";
5506 }
5507 
5508 
5509 /* Prepare operands of compare_const_di2 to be used with avr_out_compare.  */
5510 
5511 const char*
5512 avr_out_compare64 (rtx_insn *insn, rtx *op, int *plen)
5513 {
5514   rtx xop[3];
5515 
5516   xop[0] = gen_rtx_REG (DImode, 18);
5517   xop[1] = op[0];
5518   xop[2] = op[1];
5519 
5520   return avr_out_compare (insn, xop, plen);
5521 }
5522 
5523 /* Output test instruction for HImode.  */
5524 
5525 const char*
5526 avr_out_tsthi (rtx_insn *insn, rtx *op, int *plen)
5527 {
5528   if (compare_sign_p (insn))
5529     {
5530       avr_asm_len ("tst %B0", op, plen, -1);
5531     }
5532   else if (reg_unused_after (insn, op[0])
5533            && compare_eq_p (insn))
5534     {
5535       /* Faster than sbiw if we can clobber the operand.  */
5536       avr_asm_len ("or %A0,%B0", op, plen, -1);
5537     }
5538   else
5539     {
5540       avr_out_compare (insn, op, plen);
5541     }
5542 
5543   return "";
5544 }
5545 
5546 
5547 /* Output test instruction for PSImode.  */
5548 
5549 const char*
5550 avr_out_tstpsi (rtx_insn *insn, rtx *op, int *plen)
5551 {
5552   if (compare_sign_p (insn))
5553     {
5554       avr_asm_len ("tst %C0", op, plen, -1);
5555     }
5556   else if (reg_unused_after (insn, op[0])
5557            && compare_eq_p (insn))
5558     {
5559       /* Faster than sbiw if we can clobber the operand.  */
5560       avr_asm_len ("or %A0,%B0" CR_TAB
5561                    "or %A0,%C0", op, plen, -2);
5562     }
5563   else
5564     {
5565       avr_out_compare (insn, op, plen);
5566     }
5567 
5568   return "";
5569 }
5570 
5571 
5572 /* Output test instruction for SImode.  */
5573 
5574 const char*
5575 avr_out_tstsi (rtx_insn *insn, rtx *op, int *plen)
5576 {
5577   if (compare_sign_p (insn))
5578     {
5579       avr_asm_len ("tst %D0", op, plen, -1);
5580     }
5581   else if (reg_unused_after (insn, op[0])
5582            && compare_eq_p (insn))
5583     {
5584       /* Faster than sbiw if we can clobber the operand.  */
5585       avr_asm_len ("or %A0,%B0" CR_TAB
5586                    "or %A0,%C0" CR_TAB
5587                    "or %A0,%D0", op, plen, -3);
5588     }
5589   else
5590     {
5591       avr_out_compare (insn, op, plen);
5592     }
5593 
5594   return "";
5595 }
5596 
5597 
5598 /* Generate asm equivalent for various shifts.  This only handles cases
5599    that are not already carefully hand-optimized in ?sh??i3_out.
5600 
5601    OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
5602    OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
5603    OPERANDS[3] is a QImode scratch register from LD regs if
5604                available and SCRATCH, otherwise (no scratch available)
5605 
5606    TEMPL is an assembler template that shifts by one position.
5607    T_LEN is the length of this template.  */
5608 
5609 void
5610 out_shift_with_cnt (const char *templ, rtx_insn *insn, rtx operands[],
5611 		    int *plen, int t_len)
5612 {
5613   bool second_label = true;
5614   bool saved_in_tmp = false;
5615   bool use_zero_reg = false;
5616   rtx op[5];
5617 
5618   op[0] = operands[0];
5619   op[1] = operands[1];
5620   op[2] = operands[2];
5621   op[3] = operands[3];
5622 
5623   if (plen)
5624     *plen = 0;
5625 
5626   if (CONST_INT_P (operands[2]))
5627     {
5628       bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
5629                       && REG_P (operands[3]));
5630       int count = INTVAL (operands[2]);
5631       int max_len = 10;  /* If larger than this, always use a loop.  */
5632 
5633       if (count <= 0)
5634           return;
5635 
5636       if (count < 8 && !scratch)
5637         use_zero_reg = true;
5638 
5639       if (optimize_size)
5640         max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
5641 
5642       if (t_len * count <= max_len)
5643         {
5644           /* Output shifts inline with no loop - faster.  */
5645 
5646           while (count-- > 0)
5647             avr_asm_len (templ, op, plen, t_len);
5648 
5649           return;
5650         }
5651 
5652       if (scratch)
5653         {
5654           avr_asm_len ("ldi %3,%2", op, plen, 1);
5655         }
5656       else if (use_zero_reg)
5657         {
5658           /* Hack to save one word: use __zero_reg__ as loop counter.
5659              Set one bit, then shift in a loop until it is 0 again.  */
5660 
5661           op[3] = zero_reg_rtx;
5662 
5663           avr_asm_len ("set" CR_TAB
5664                        "bld %3,%2-1", op, plen, 2);
5665         }
5666       else
5667         {
5668           /* No scratch register available, use one from LD_REGS (saved in
5669              __tmp_reg__) that doesn't overlap with registers to shift.  */
5670 
5671           op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
5672           op[4] = tmp_reg_rtx;
5673           saved_in_tmp = true;
5674 
5675           avr_asm_len ("mov %4,%3" CR_TAB
5676                        "ldi %3,%2", op, plen, 2);
5677         }
5678 
5679       second_label = false;
5680     }
5681   else if (MEM_P (op[2]))
5682     {
5683       rtx op_mov[2];
5684 
5685       op_mov[0] = op[3] = tmp_reg_rtx;
5686       op_mov[1] = op[2];
5687 
5688       out_movqi_r_mr (insn, op_mov, plen);
5689     }
5690   else if (register_operand (op[2], QImode))
5691     {
5692       op[3] = op[2];
5693 
5694       if (!reg_unused_after (insn, op[2])
5695           || reg_overlap_mentioned_p (op[0], op[2]))
5696         {
5697           op[3] = tmp_reg_rtx;
5698           avr_asm_len ("mov %3,%2", op, plen, 1);
5699         }
5700     }
5701   else
5702     fatal_insn ("bad shift insn:", insn);
5703 
5704   if (second_label)
5705       avr_asm_len ("rjmp 2f", op, plen, 1);
5706 
5707   avr_asm_len ("1:", op, plen, 0);
5708   avr_asm_len (templ, op, plen, t_len);
5709 
5710   if (second_label)
5711     avr_asm_len ("2:", op, plen, 0);
5712 
5713   avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
5714   avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
5715 
5716   if (saved_in_tmp)
5717     avr_asm_len ("mov %3,%4", op, plen, 1);
5718 }
5719 
5720 
5721 /* 8bit shift left ((char)x << i)   */
5722 
5723 const char *
5724 ashlqi3_out (rtx_insn *insn, rtx operands[], int *len)
5725 {
5726   if (GET_CODE (operands[2]) == CONST_INT)
5727     {
5728       int k;
5729 
5730       if (!len)
5731 	len = &k;
5732 
5733       switch (INTVAL (operands[2]))
5734 	{
5735 	default:
5736 	  if (INTVAL (operands[2]) < 8)
5737 	    break;
5738 
5739 	  *len = 1;
5740 	  return "clr %0";
5741 
5742 	case 1:
5743 	  *len = 1;
5744 	  return "lsl %0";
5745 
5746 	case 2:
5747 	  *len = 2;
5748 	  return ("lsl %0" CR_TAB
5749 		  "lsl %0");
5750 
5751 	case 3:
5752 	  *len = 3;
5753 	  return ("lsl %0" CR_TAB
5754 		  "lsl %0" CR_TAB
5755 		  "lsl %0");
5756 
5757 	case 4:
5758 	  if (test_hard_reg_class (LD_REGS, operands[0]))
5759 	    {
5760 	      *len = 2;
5761 	      return ("swap %0" CR_TAB
5762 		      "andi %0,0xf0");
5763 	    }
5764 	  *len = 4;
5765 	  return ("lsl %0" CR_TAB
5766 		  "lsl %0" CR_TAB
5767 		  "lsl %0" CR_TAB
5768 		  "lsl %0");
5769 
5770 	case 5:
5771 	  if (test_hard_reg_class (LD_REGS, operands[0]))
5772 	    {
5773 	      *len = 3;
5774 	      return ("swap %0" CR_TAB
5775 		      "lsl %0"  CR_TAB
5776 		      "andi %0,0xe0");
5777 	    }
5778 	  *len = 5;
5779 	  return ("lsl %0" CR_TAB
5780 		  "lsl %0" CR_TAB
5781 		  "lsl %0" CR_TAB
5782 		  "lsl %0" CR_TAB
5783 		  "lsl %0");
5784 
5785 	case 6:
5786 	  if (test_hard_reg_class (LD_REGS, operands[0]))
5787 	    {
5788 	      *len = 4;
5789 	      return ("swap %0" CR_TAB
5790 		      "lsl %0"  CR_TAB
5791 		      "lsl %0"  CR_TAB
5792 		      "andi %0,0xc0");
5793 	    }
5794 	  *len = 6;
5795 	  return ("lsl %0" CR_TAB
5796 		  "lsl %0" CR_TAB
5797 		  "lsl %0" CR_TAB
5798 		  "lsl %0" CR_TAB
5799 		  "lsl %0" CR_TAB
5800 		  "lsl %0");
5801 
5802 	case 7:
5803 	  *len = 3;
5804 	  return ("ror %0" CR_TAB
5805 		  "clr %0" CR_TAB
5806 		  "ror %0");
5807 	}
5808     }
5809   else if (CONSTANT_P (operands[2]))
5810     fatal_insn ("internal compiler error.  Incorrect shift:", insn);
5811 
5812   out_shift_with_cnt ("lsl %0",
5813                       insn, operands, len, 1);
5814   return "";
5815 }
5816 
5817 
5818 /* 16bit shift left ((short)x << i)   */
5819 
5820 const char *
5821 ashlhi3_out (rtx_insn *insn, rtx operands[], int *len)
5822 {
5823   if (GET_CODE (operands[2]) == CONST_INT)
5824     {
5825       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5826       int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5827       int k;
5828       int *t = len;
5829 
5830       if (!len)
5831 	len = &k;
5832 
5833       switch (INTVAL (operands[2]))
5834 	{
5835 	default:
5836 	  if (INTVAL (operands[2]) < 16)
5837 	    break;
5838 
5839 	  *len = 2;
5840 	  return ("clr %B0" CR_TAB
5841 		  "clr %A0");
5842 
5843 	case 4:
5844 	  if (optimize_size && scratch)
5845 	    break;  /* 5 */
5846 	  if (ldi_ok)
5847 	    {
5848 	      *len = 6;
5849 	      return ("swap %A0"      CR_TAB
5850 		      "swap %B0"      CR_TAB
5851 		      "andi %B0,0xf0" CR_TAB
5852 		      "eor %B0,%A0"   CR_TAB
5853 		      "andi %A0,0xf0" CR_TAB
5854 		      "eor %B0,%A0");
5855 	    }
5856 	  if (scratch)
5857 	    {
5858 	      *len = 7;
5859 	      return ("swap %A0"    CR_TAB
5860 		      "swap %B0"    CR_TAB
5861 		      "ldi %3,0xf0" CR_TAB
5862 		      "and %B0,%3"      CR_TAB
5863 		      "eor %B0,%A0" CR_TAB
5864 		      "and %A0,%3"      CR_TAB
5865 		      "eor %B0,%A0");
5866 	    }
5867 	  break;  /* optimize_size ? 6 : 8 */
5868 
5869 	case 5:
5870 	  if (optimize_size)
5871 	    break;  /* scratch ? 5 : 6 */
5872 	  if (ldi_ok)
5873 	    {
5874 	      *len = 8;
5875 	      return ("lsl %A0"       CR_TAB
5876 		      "rol %B0"       CR_TAB
5877 		      "swap %A0"      CR_TAB
5878 		      "swap %B0"      CR_TAB
5879 		      "andi %B0,0xf0" CR_TAB
5880 		      "eor %B0,%A0"   CR_TAB
5881 		      "andi %A0,0xf0" CR_TAB
5882 		      "eor %B0,%A0");
5883 	    }
5884 	  if (scratch)
5885 	    {
5886 	      *len = 9;
5887 	      return ("lsl %A0"     CR_TAB
5888 		      "rol %B0"     CR_TAB
5889 		      "swap %A0"    CR_TAB
5890 		      "swap %B0"    CR_TAB
5891 		      "ldi %3,0xf0" CR_TAB
5892 		      "and %B0,%3"      CR_TAB
5893 		      "eor %B0,%A0" CR_TAB
5894 		      "and %A0,%3"      CR_TAB
5895 		      "eor %B0,%A0");
5896 	    }
5897 	  break;  /* 10 */
5898 
5899 	case 6:
5900 	  if (optimize_size)
5901 	    break;  /* scratch ? 5 : 6 */
5902 	  *len = 9;
5903 	  return ("clr __tmp_reg__" CR_TAB
5904 		  "lsr %B0"         CR_TAB
5905 		  "ror %A0"         CR_TAB
5906 		  "ror __tmp_reg__" CR_TAB
5907 		  "lsr %B0"         CR_TAB
5908 		  "ror %A0"         CR_TAB
5909 		  "ror __tmp_reg__" CR_TAB
5910 		  "mov %B0,%A0"     CR_TAB
5911 		  "mov %A0,__tmp_reg__");
5912 
5913 	case 7:
5914 	  *len = 5;
5915 	  return ("lsr %B0"     CR_TAB
5916 		  "mov %B0,%A0" CR_TAB
5917 		  "clr %A0"     CR_TAB
5918 		  "ror %B0"     CR_TAB
5919 		  "ror %A0");
5920 
5921 	case 8:
5922 	  return *len = 2, ("mov %B0,%A1" CR_TAB
5923 			    "clr %A0");
5924 
5925 	case 9:
5926 	  *len = 3;
5927 	  return ("mov %B0,%A0" CR_TAB
5928 		  "clr %A0"     CR_TAB
5929 		  "lsl %B0");
5930 
5931 	case 10:
5932 	  *len = 4;
5933 	  return ("mov %B0,%A0" CR_TAB
5934 		  "clr %A0"     CR_TAB
5935 		  "lsl %B0"     CR_TAB
5936 		  "lsl %B0");
5937 
5938 	case 11:
5939 	  *len = 5;
5940 	  return ("mov %B0,%A0" CR_TAB
5941 		  "clr %A0"     CR_TAB
5942 		  "lsl %B0"     CR_TAB
5943 		  "lsl %B0"     CR_TAB
5944 		  "lsl %B0");
5945 
5946 	case 12:
5947 	  if (ldi_ok)
5948 	    {
5949 	      *len = 4;
5950 	      return ("mov %B0,%A0" CR_TAB
5951 		      "clr %A0"     CR_TAB
5952 		      "swap %B0"    CR_TAB
5953 		      "andi %B0,0xf0");
5954 	    }
5955 	  if (scratch)
5956 	    {
5957 	      *len = 5;
5958 	      return ("mov %B0,%A0" CR_TAB
5959 		      "clr %A0"     CR_TAB
5960 		      "swap %B0"    CR_TAB
5961 		      "ldi %3,0xf0" CR_TAB
5962 		      "and %B0,%3");
5963 	    }
5964 	  *len = 6;
5965 	  return ("mov %B0,%A0" CR_TAB
5966 		  "clr %A0"     CR_TAB
5967 		  "lsl %B0"     CR_TAB
5968 		  "lsl %B0"     CR_TAB
5969 		  "lsl %B0"     CR_TAB
5970 		  "lsl %B0");
5971 
5972 	case 13:
5973 	  if (ldi_ok)
5974 	    {
5975 	      *len = 5;
5976 	      return ("mov %B0,%A0" CR_TAB
5977 		      "clr %A0"     CR_TAB
5978 		      "swap %B0"    CR_TAB
5979 		      "lsl %B0"     CR_TAB
5980 		      "andi %B0,0xe0");
5981 	    }
5982 	  if (AVR_HAVE_MUL && scratch)
5983 	    {
5984 	      *len = 5;
5985 	      return ("ldi %3,0x20" CR_TAB
5986 		      "mul %A0,%3"  CR_TAB
5987 		      "mov %B0,r0"  CR_TAB
5988 		      "clr %A0"     CR_TAB
5989 		      "clr __zero_reg__");
5990 	    }
5991 	  if (optimize_size && scratch)
5992 	    break;  /* 5 */
5993 	  if (scratch)
5994 	    {
5995 	      *len = 6;
5996 	      return ("mov %B0,%A0" CR_TAB
5997 		      "clr %A0"     CR_TAB
5998 		      "swap %B0"    CR_TAB
5999 		      "lsl %B0"     CR_TAB
6000 		      "ldi %3,0xe0" CR_TAB
6001 		      "and %B0,%3");
6002 	    }
6003 	  if (AVR_HAVE_MUL)
6004 	    {
6005 	      *len = 6;
6006 	      return ("set"            CR_TAB
6007 		      "bld r1,5"   CR_TAB
6008 		      "mul %A0,r1" CR_TAB
6009 		      "mov %B0,r0" CR_TAB
6010 		      "clr %A0"    CR_TAB
6011 		      "clr __zero_reg__");
6012 	    }
6013 	  *len = 7;
6014 	  return ("mov %B0,%A0" CR_TAB
6015 		  "clr %A0"     CR_TAB
6016 		  "lsl %B0"     CR_TAB
6017 		  "lsl %B0"     CR_TAB
6018 		  "lsl %B0"     CR_TAB
6019 		  "lsl %B0"     CR_TAB
6020 		  "lsl %B0");
6021 
6022 	case 14:
6023 	  if (AVR_HAVE_MUL && ldi_ok)
6024 	    {
6025 	      *len = 5;
6026 	      return ("ldi %B0,0x40" CR_TAB
6027 		      "mul %A0,%B0"  CR_TAB
6028 		      "mov %B0,r0"   CR_TAB
6029 		      "clr %A0"      CR_TAB
6030 		      "clr __zero_reg__");
6031 	    }
6032 	  if (AVR_HAVE_MUL && scratch)
6033 	    {
6034 	      *len = 5;
6035 	      return ("ldi %3,0x40" CR_TAB
6036 		      "mul %A0,%3"  CR_TAB
6037 		      "mov %B0,r0"  CR_TAB
6038 		      "clr %A0"     CR_TAB
6039 		      "clr __zero_reg__");
6040 	    }
6041 	  if (optimize_size && ldi_ok)
6042 	    {
6043 	      *len = 5;
6044 	      return ("mov %B0,%A0" CR_TAB
6045 		      "ldi %A0,6" "\n1:\t"
6046 		      "lsl %B0"     CR_TAB
6047 		      "dec %A0"     CR_TAB
6048 		      "brne 1b");
6049 	    }
6050 	  if (optimize_size && scratch)
6051 	    break;  /* 5 */
6052 	  *len = 6;
6053 	  return ("clr %B0" CR_TAB
6054 		  "lsr %A0" CR_TAB
6055 		  "ror %B0" CR_TAB
6056 		  "lsr %A0" CR_TAB
6057 		  "ror %B0" CR_TAB
6058 		  "clr %A0");
6059 
6060 	case 15:
6061 	  *len = 4;
6062 	  return ("clr %B0" CR_TAB
6063 		  "lsr %A0" CR_TAB
6064 		  "ror %B0" CR_TAB
6065 		  "clr %A0");
6066 	}
6067       len = t;
6068     }
6069   out_shift_with_cnt ("lsl %A0" CR_TAB
6070                       "rol %B0", insn, operands, len, 2);
6071   return "";
6072 }
6073 
6074 
6075 /* 24-bit shift left */
6076 
6077 const char*
6078 avr_out_ashlpsi3 (rtx_insn *insn, rtx *op, int *plen)
6079 {
6080   if (plen)
6081     *plen = 0;
6082 
6083   if (CONST_INT_P (op[2]))
6084     {
6085       switch (INTVAL (op[2]))
6086         {
6087         default:
6088           if (INTVAL (op[2]) < 24)
6089             break;
6090 
6091           return avr_asm_len ("clr %A0" CR_TAB
6092                               "clr %B0" CR_TAB
6093                               "clr %C0", op, plen, 3);
6094 
6095         case 8:
6096           {
6097             int reg0 = REGNO (op[0]);
6098             int reg1 = REGNO (op[1]);
6099 
6100             if (reg0 >= reg1)
6101               return avr_asm_len ("mov %C0,%B1"  CR_TAB
6102                                   "mov %B0,%A1"  CR_TAB
6103                                   "clr %A0", op, plen, 3);
6104             else
6105               return avr_asm_len ("clr %A0"      CR_TAB
6106                                   "mov %B0,%A1"  CR_TAB
6107                                   "mov %C0,%B1", op, plen, 3);
6108           }
6109 
6110         case 16:
6111           {
6112             int reg0 = REGNO (op[0]);
6113             int reg1 = REGNO (op[1]);
6114 
6115             if (reg0 + 2 != reg1)
6116               avr_asm_len ("mov %C0,%A0", op, plen, 1);
6117 
6118             return avr_asm_len ("clr %B0"  CR_TAB
6119                                 "clr %A0", op, plen, 2);
6120           }
6121 
6122         case 23:
6123           return avr_asm_len ("clr %C0" CR_TAB
6124                               "lsr %A0" CR_TAB
6125                               "ror %C0" CR_TAB
6126                               "clr %B0" CR_TAB
6127                               "clr %A0", op, plen, 5);
6128         }
6129     }
6130 
6131   out_shift_with_cnt ("lsl %A0" CR_TAB
6132                       "rol %B0" CR_TAB
6133                       "rol %C0", insn, op, plen, 3);
6134   return "";
6135 }
6136 
6137 
6138 /* 32bit shift left ((long)x << i)   */
6139 
6140 const char *
6141 ashlsi3_out (rtx_insn *insn, rtx operands[], int *len)
6142 {
6143   if (GET_CODE (operands[2]) == CONST_INT)
6144     {
6145       int k;
6146       int *t = len;
6147 
6148       if (!len)
6149 	len = &k;
6150 
6151       switch (INTVAL (operands[2]))
6152 	{
6153 	default:
6154 	  if (INTVAL (operands[2]) < 32)
6155 	    break;
6156 
6157 	  if (AVR_HAVE_MOVW)
6158 	    return *len = 3, ("clr %D0" CR_TAB
6159 			      "clr %C0" CR_TAB
6160 			      "movw %A0,%C0");
6161 	  *len = 4;
6162 	  return ("clr %D0" CR_TAB
6163 		  "clr %C0" CR_TAB
6164 		  "clr %B0" CR_TAB
6165 		  "clr %A0");
6166 
6167 	case 8:
6168 	  {
6169 	    int reg0 = true_regnum (operands[0]);
6170 	    int reg1 = true_regnum (operands[1]);
6171 	    *len = 4;
6172 	    if (reg0 >= reg1)
6173 	      return ("mov %D0,%C1"  CR_TAB
6174 		      "mov %C0,%B1"  CR_TAB
6175 		      "mov %B0,%A1"  CR_TAB
6176 		      "clr %A0");
6177 	    else
6178 	      return ("clr %A0"      CR_TAB
6179 		      "mov %B0,%A1"  CR_TAB
6180 		      "mov %C0,%B1"  CR_TAB
6181 		      "mov %D0,%C1");
6182 	  }
6183 
6184 	case 16:
6185 	  {
6186 	    int reg0 = true_regnum (operands[0]);
6187 	    int reg1 = true_regnum (operands[1]);
6188 	    if (reg0 + 2 == reg1)
6189 	      return *len = 2, ("clr %B0"      CR_TAB
6190 				"clr %A0");
6191 	    if (AVR_HAVE_MOVW)
6192 	      return *len = 3, ("movw %C0,%A1" CR_TAB
6193 				"clr %B0"      CR_TAB
6194 				"clr %A0");
6195 	    else
6196 	      return *len = 4, ("mov %C0,%A1"  CR_TAB
6197 				"mov %D0,%B1"  CR_TAB
6198 				"clr %B0"      CR_TAB
6199 				"clr %A0");
6200 	  }
6201 
6202 	case 24:
6203 	  *len = 4;
6204 	  return ("mov %D0,%A1"  CR_TAB
6205 		  "clr %C0"      CR_TAB
6206 		  "clr %B0"      CR_TAB
6207 		  "clr %A0");
6208 
6209 	case 31:
6210 	  *len = 6;
6211 	  return ("clr %D0" CR_TAB
6212 		  "lsr %A0" CR_TAB
6213 		  "ror %D0" CR_TAB
6214 		  "clr %C0" CR_TAB
6215 		  "clr %B0" CR_TAB
6216 		  "clr %A0");
6217 	}
6218       len = t;
6219     }
6220   out_shift_with_cnt ("lsl %A0" CR_TAB
6221                       "rol %B0" CR_TAB
6222                       "rol %C0" CR_TAB
6223                       "rol %D0", insn, operands, len, 4);
6224   return "";
6225 }
6226 
6227 /* 8bit arithmetic shift right  ((signed char)x >> i) */
6228 
6229 const char *
6230 ashrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6231 {
6232   if (GET_CODE (operands[2]) == CONST_INT)
6233     {
6234       int k;
6235 
6236       if (!len)
6237 	len = &k;
6238 
6239       switch (INTVAL (operands[2]))
6240 	{
6241 	case 1:
6242 	  *len = 1;
6243 	  return "asr %0";
6244 
6245 	case 2:
6246 	  *len = 2;
6247 	  return ("asr %0" CR_TAB
6248 		  "asr %0");
6249 
6250 	case 3:
6251 	  *len = 3;
6252 	  return ("asr %0" CR_TAB
6253 		  "asr %0" CR_TAB
6254 		  "asr %0");
6255 
6256 	case 4:
6257 	  *len = 4;
6258 	  return ("asr %0" CR_TAB
6259 		  "asr %0" CR_TAB
6260 		  "asr %0" CR_TAB
6261 		  "asr %0");
6262 
6263 	case 5:
6264 	  *len = 5;
6265 	  return ("asr %0" CR_TAB
6266 		  "asr %0" CR_TAB
6267 		  "asr %0" CR_TAB
6268 		  "asr %0" CR_TAB
6269 		  "asr %0");
6270 
6271 	case 6:
6272 	  *len = 4;
6273 	  return ("bst %0,6"  CR_TAB
6274 		  "lsl %0"    CR_TAB
6275 		  "sbc %0,%0" CR_TAB
6276 		  "bld %0,0");
6277 
6278 	default:
6279 	  if (INTVAL (operands[2]) < 8)
6280 	    break;
6281 
6282 	  /* fall through */
6283 
6284 	case 7:
6285 	  *len = 2;
6286 	  return ("lsl %0" CR_TAB
6287 		  "sbc %0,%0");
6288 	}
6289     }
6290   else if (CONSTANT_P (operands[2]))
6291     fatal_insn ("internal compiler error.  Incorrect shift:", insn);
6292 
6293   out_shift_with_cnt ("asr %0",
6294                       insn, operands, len, 1);
6295   return "";
6296 }
6297 
6298 
6299 /* 16bit arithmetic shift right  ((signed short)x >> i) */
6300 
6301 const char *
6302 ashrhi3_out (rtx_insn *insn, rtx operands[], int *len)
6303 {
6304   if (GET_CODE (operands[2]) == CONST_INT)
6305     {
6306       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6307       int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6308       int k;
6309       int *t = len;
6310 
6311       if (!len)
6312 	len = &k;
6313 
6314       switch (INTVAL (operands[2]))
6315 	{
6316 	case 4:
6317 	case 5:
6318 	  /* XXX try to optimize this too? */
6319 	  break;
6320 
6321 	case 6:
6322 	  if (optimize_size)
6323 	    break;  /* scratch ? 5 : 6 */
6324 	  *len = 8;
6325 	  return ("mov __tmp_reg__,%A0" CR_TAB
6326 		  "mov %A0,%B0"         CR_TAB
6327 		  "lsl __tmp_reg__"     CR_TAB
6328 		  "rol %A0"             CR_TAB
6329 		  "sbc %B0,%B0"         CR_TAB
6330 		  "lsl __tmp_reg__"     CR_TAB
6331 		  "rol %A0"             CR_TAB
6332 		  "rol %B0");
6333 
6334 	case 7:
6335 	  *len = 4;
6336 	  return ("lsl %A0"     CR_TAB
6337 		  "mov %A0,%B0" CR_TAB
6338 		  "rol %A0"     CR_TAB
6339 		  "sbc %B0,%B0");
6340 
6341 	case 8:
6342 	  {
6343 	    int reg0 = true_regnum (operands[0]);
6344 	    int reg1 = true_regnum (operands[1]);
6345 
6346 	    if (reg0 == reg1)
6347 	      return *len = 3, ("mov %A0,%B0" CR_TAB
6348 				"lsl %B0"     CR_TAB
6349 				"sbc %B0,%B0");
6350 	    else
6351 	      return *len = 4, ("mov %A0,%B1" CR_TAB
6352 			        "clr %B0"     CR_TAB
6353 			        "sbrc %A0,7"  CR_TAB
6354 			        "dec %B0");
6355 	  }
6356 
6357 	case 9:
6358 	  *len = 4;
6359 	  return ("mov %A0,%B0" CR_TAB
6360 		  "lsl %B0"      CR_TAB
6361 		  "sbc %B0,%B0" CR_TAB
6362 		  "asr %A0");
6363 
6364 	case 10:
6365 	  *len = 5;
6366 	  return ("mov %A0,%B0" CR_TAB
6367 		  "lsl %B0"     CR_TAB
6368 		  "sbc %B0,%B0" CR_TAB
6369 		  "asr %A0"     CR_TAB
6370 		  "asr %A0");
6371 
6372 	case 11:
6373 	  if (AVR_HAVE_MUL && ldi_ok)
6374 	    {
6375 	      *len = 5;
6376 	      return ("ldi %A0,0x20" CR_TAB
6377 		      "muls %B0,%A0" CR_TAB
6378 		      "mov %A0,r1"   CR_TAB
6379 		      "sbc %B0,%B0"  CR_TAB
6380 		      "clr __zero_reg__");
6381 	    }
6382 	  if (optimize_size && scratch)
6383 	    break;  /* 5 */
6384 	  *len = 6;
6385 	  return ("mov %A0,%B0" CR_TAB
6386 		  "lsl %B0"     CR_TAB
6387 		  "sbc %B0,%B0" CR_TAB
6388 		  "asr %A0"     CR_TAB
6389 		  "asr %A0"     CR_TAB
6390 		  "asr %A0");
6391 
6392 	case 12:
6393 	  if (AVR_HAVE_MUL && ldi_ok)
6394 	    {
6395 	      *len = 5;
6396 	      return ("ldi %A0,0x10" CR_TAB
6397 		      "muls %B0,%A0" CR_TAB
6398 		      "mov %A0,r1"   CR_TAB
6399 		      "sbc %B0,%B0"  CR_TAB
6400 		      "clr __zero_reg__");
6401 	    }
6402 	  if (optimize_size && scratch)
6403 	    break;  /* 5 */
6404 	  *len = 7;
6405 	  return ("mov %A0,%B0" CR_TAB
6406 		  "lsl %B0"     CR_TAB
6407 		  "sbc %B0,%B0" CR_TAB
6408 		  "asr %A0"     CR_TAB
6409 		  "asr %A0"     CR_TAB
6410 		  "asr %A0"     CR_TAB
6411 		  "asr %A0");
6412 
6413 	case 13:
6414 	  if (AVR_HAVE_MUL && ldi_ok)
6415 	    {
6416 	      *len = 5;
6417 	      return ("ldi %A0,0x08" CR_TAB
6418 		      "muls %B0,%A0" CR_TAB
6419 		      "mov %A0,r1"   CR_TAB
6420 		      "sbc %B0,%B0"  CR_TAB
6421 		      "clr __zero_reg__");
6422 	    }
6423 	  if (optimize_size)
6424 	    break;  /* scratch ? 5 : 7 */
6425 	  *len = 8;
6426 	  return ("mov %A0,%B0" CR_TAB
6427 		  "lsl %B0"     CR_TAB
6428 		  "sbc %B0,%B0" CR_TAB
6429 		  "asr %A0"     CR_TAB
6430 		  "asr %A0"     CR_TAB
6431 		  "asr %A0"     CR_TAB
6432 		  "asr %A0"     CR_TAB
6433 		  "asr %A0");
6434 
6435 	case 14:
6436 	  *len = 5;
6437 	  return ("lsl %B0"     CR_TAB
6438 		  "sbc %A0,%A0" CR_TAB
6439 		  "lsl %B0"     CR_TAB
6440 		  "mov %B0,%A0" CR_TAB
6441 		  "rol %A0");
6442 
6443 	default:
6444 	  if (INTVAL (operands[2]) < 16)
6445 	    break;
6446 
6447 	  /* fall through */
6448 
6449 	case 15:
6450 	  return *len = 3, ("lsl %B0"     CR_TAB
6451 			    "sbc %A0,%A0" CR_TAB
6452 			    "mov %B0,%A0");
6453 	}
6454       len = t;
6455     }
6456   out_shift_with_cnt ("asr %B0" CR_TAB
6457                       "ror %A0", insn, operands, len, 2);
6458   return "";
6459 }
6460 
6461 
6462 /* 24-bit arithmetic shift right */
6463 
6464 const char*
6465 avr_out_ashrpsi3 (rtx_insn *insn, rtx *op, int *plen)
6466 {
6467   int dest = REGNO (op[0]);
6468   int src = REGNO (op[1]);
6469 
6470   if (CONST_INT_P (op[2]))
6471     {
6472       if (plen)
6473         *plen = 0;
6474 
6475       switch (INTVAL (op[2]))
6476         {
6477         case 8:
6478           if (dest <= src)
6479             return avr_asm_len ("mov %A0,%B1" CR_TAB
6480                                 "mov %B0,%C1" CR_TAB
6481                                 "clr %C0"     CR_TAB
6482                                 "sbrc %B0,7"  CR_TAB
6483                                 "dec %C0", op, plen, 5);
6484           else
6485             return avr_asm_len ("clr %C0"     CR_TAB
6486                                 "sbrc %C1,7"  CR_TAB
6487                                 "dec %C0"     CR_TAB
6488                                 "mov %B0,%C1" CR_TAB
6489                                 "mov %A0,%B1", op, plen, 5);
6490 
6491         case 16:
6492           if (dest != src + 2)
6493             avr_asm_len ("mov %A0,%C1", op, plen, 1);
6494 
6495           return avr_asm_len ("clr %B0"     CR_TAB
6496                               "sbrc %A0,7"  CR_TAB
6497                               "com %B0"     CR_TAB
6498                               "mov %C0,%B0", op, plen, 4);
6499 
6500         default:
6501           if (INTVAL (op[2]) < 24)
6502             break;
6503 
6504           /* fall through */
6505 
6506         case 23:
6507           return avr_asm_len ("lsl %C0"     CR_TAB
6508                               "sbc %A0,%A0" CR_TAB
6509                               "mov %B0,%A0" CR_TAB
6510                               "mov %C0,%A0", op, plen, 4);
6511         } /* switch */
6512     }
6513 
6514   out_shift_with_cnt ("asr %C0" CR_TAB
6515                       "ror %B0" CR_TAB
6516                       "ror %A0", insn, op, plen, 3);
6517   return "";
6518 }
6519 
6520 
6521 /* 32-bit arithmetic shift right  ((signed long)x >> i) */
6522 
6523 const char *
6524 ashrsi3_out (rtx_insn *insn, rtx operands[], int *len)
6525 {
6526   if (GET_CODE (operands[2]) == CONST_INT)
6527     {
6528       int k;
6529       int *t = len;
6530 
6531       if (!len)
6532 	len = &k;
6533 
6534       switch (INTVAL (operands[2]))
6535 	{
6536 	case 8:
6537 	  {
6538 	    int reg0 = true_regnum (operands[0]);
6539 	    int reg1 = true_regnum (operands[1]);
6540 	    *len=6;
6541 	    if (reg0 <= reg1)
6542 	      return ("mov %A0,%B1" CR_TAB
6543 		      "mov %B0,%C1" CR_TAB
6544 		      "mov %C0,%D1" CR_TAB
6545 		      "clr %D0"     CR_TAB
6546 		      "sbrc %C0,7"  CR_TAB
6547 		      "dec %D0");
6548 	    else
6549 	      return ("clr %D0"     CR_TAB
6550 		      "sbrc %D1,7"  CR_TAB
6551 		      "dec %D0"     CR_TAB
6552 		      "mov %C0,%D1" CR_TAB
6553 		      "mov %B0,%C1" CR_TAB
6554 		      "mov %A0,%B1");
6555 	  }
6556 
6557 	case 16:
6558 	  {
6559 	    int reg0 = true_regnum (operands[0]);
6560 	    int reg1 = true_regnum (operands[1]);
6561 
6562 	    if (reg0 == reg1 + 2)
6563 	      return *len = 4, ("clr %D0"     CR_TAB
6564 				"sbrc %B0,7"  CR_TAB
6565 				"com %D0"     CR_TAB
6566 				"mov %C0,%D0");
6567 	    if (AVR_HAVE_MOVW)
6568 	      return *len = 5, ("movw %A0,%C1" CR_TAB
6569 				"clr %D0"      CR_TAB
6570 				"sbrc %B0,7"   CR_TAB
6571 				"com %D0"      CR_TAB
6572 				"mov %C0,%D0");
6573 	    else
6574 	      return *len = 6, ("mov %B0,%D1" CR_TAB
6575 				"mov %A0,%C1" CR_TAB
6576 				"clr %D0"     CR_TAB
6577 				"sbrc %B0,7"  CR_TAB
6578 				"com %D0"     CR_TAB
6579 				"mov %C0,%D0");
6580 	  }
6581 
6582 	case 24:
6583 	  return *len = 6, ("mov %A0,%D1" CR_TAB
6584 			    "clr %D0"     CR_TAB
6585 			    "sbrc %A0,7"  CR_TAB
6586 			    "com %D0"     CR_TAB
6587 			    "mov %B0,%D0" CR_TAB
6588 			    "mov %C0,%D0");
6589 
6590 	default:
6591 	  if (INTVAL (operands[2]) < 32)
6592 	    break;
6593 
6594 	  /* fall through */
6595 
6596 	case 31:
6597 	  if (AVR_HAVE_MOVW)
6598 	    return *len = 4, ("lsl %D0"     CR_TAB
6599 			      "sbc %A0,%A0" CR_TAB
6600 			      "mov %B0,%A0" CR_TAB
6601 			      "movw %C0,%A0");
6602 	  else
6603 	    return *len = 5, ("lsl %D0"     CR_TAB
6604 			      "sbc %A0,%A0" CR_TAB
6605 			      "mov %B0,%A0" CR_TAB
6606 			      "mov %C0,%A0" CR_TAB
6607 			      "mov %D0,%A0");
6608 	}
6609       len = t;
6610     }
6611   out_shift_with_cnt ("asr %D0" CR_TAB
6612                       "ror %C0" CR_TAB
6613                       "ror %B0" CR_TAB
6614                       "ror %A0", insn, operands, len, 4);
6615   return "";
6616 }
6617 
6618 /* 8-bit logic shift right ((unsigned char)x >> i) */
6619 
6620 const char *
6621 lshrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6622 {
6623   if (GET_CODE (operands[2]) == CONST_INT)
6624     {
6625       int k;
6626 
6627       if (!len)
6628 	len = &k;
6629 
6630       switch (INTVAL (operands[2]))
6631 	{
6632 	default:
6633 	  if (INTVAL (operands[2]) < 8)
6634 	    break;
6635 
6636 	  *len = 1;
6637 	  return "clr %0";
6638 
6639 	case 1:
6640 	  *len = 1;
6641 	  return "lsr %0";
6642 
6643 	case 2:
6644 	  *len = 2;
6645 	  return ("lsr %0" CR_TAB
6646 		  "lsr %0");
6647 	case 3:
6648 	  *len = 3;
6649 	  return ("lsr %0" CR_TAB
6650 		  "lsr %0" CR_TAB
6651 		  "lsr %0");
6652 
6653 	case 4:
6654 	  if (test_hard_reg_class (LD_REGS, operands[0]))
6655 	    {
6656 	      *len=2;
6657 	      return ("swap %0" CR_TAB
6658 		      "andi %0,0x0f");
6659 	    }
6660 	  *len = 4;
6661 	  return ("lsr %0" CR_TAB
6662 		  "lsr %0" CR_TAB
6663 		  "lsr %0" CR_TAB
6664 		  "lsr %0");
6665 
6666 	case 5:
6667 	  if (test_hard_reg_class (LD_REGS, operands[0]))
6668 	    {
6669 	      *len = 3;
6670 	      return ("swap %0" CR_TAB
6671 		      "lsr %0"  CR_TAB
6672 		      "andi %0,0x7");
6673 	    }
6674 	  *len = 5;
6675 	  return ("lsr %0" CR_TAB
6676 		  "lsr %0" CR_TAB
6677 		  "lsr %0" CR_TAB
6678 		  "lsr %0" CR_TAB
6679 		  "lsr %0");
6680 
6681 	case 6:
6682 	  if (test_hard_reg_class (LD_REGS, operands[0]))
6683 	    {
6684 	      *len = 4;
6685 	      return ("swap %0" CR_TAB
6686 		      "lsr %0"  CR_TAB
6687 		      "lsr %0"  CR_TAB
6688 		      "andi %0,0x3");
6689 	    }
6690 	  *len = 6;
6691 	  return ("lsr %0" CR_TAB
6692 		  "lsr %0" CR_TAB
6693 		  "lsr %0" CR_TAB
6694 		  "lsr %0" CR_TAB
6695 		  "lsr %0" CR_TAB
6696 		  "lsr %0");
6697 
6698 	case 7:
6699 	  *len = 3;
6700 	  return ("rol %0" CR_TAB
6701 		  "clr %0" CR_TAB
6702 		  "rol %0");
6703 	}
6704     }
6705   else if (CONSTANT_P (operands[2]))
6706     fatal_insn ("internal compiler error.  Incorrect shift:", insn);
6707 
6708   out_shift_with_cnt ("lsr %0",
6709                       insn, operands, len, 1);
6710   return "";
6711 }
6712 
6713 /* 16-bit logic shift right ((unsigned short)x >> i) */
6714 
6715 const char *
6716 lshrhi3_out (rtx_insn *insn, rtx operands[], int *len)
6717 {
6718   if (GET_CODE (operands[2]) == CONST_INT)
6719     {
6720       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6721       int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6722       int k;
6723       int *t = len;
6724 
6725       if (!len)
6726 	len = &k;
6727 
6728       switch (INTVAL (operands[2]))
6729 	{
6730 	default:
6731 	  if (INTVAL (operands[2]) < 16)
6732 	    break;
6733 
6734 	  *len = 2;
6735 	  return ("clr %B0" CR_TAB
6736 		  "clr %A0");
6737 
6738 	case 4:
6739 	  if (optimize_size && scratch)
6740 	    break;  /* 5 */
6741 	  if (ldi_ok)
6742 	    {
6743 	      *len = 6;
6744 	      return ("swap %B0"      CR_TAB
6745 		      "swap %A0"      CR_TAB
6746 		      "andi %A0,0x0f" CR_TAB
6747 		      "eor %A0,%B0"   CR_TAB
6748 		      "andi %B0,0x0f" CR_TAB
6749 		      "eor %A0,%B0");
6750 	    }
6751 	  if (scratch)
6752 	    {
6753 	      *len = 7;
6754 	      return ("swap %B0"    CR_TAB
6755 		      "swap %A0"    CR_TAB
6756 		      "ldi %3,0x0f" CR_TAB
6757 		      "and %A0,%3"      CR_TAB
6758 		      "eor %A0,%B0" CR_TAB
6759 		      "and %B0,%3"      CR_TAB
6760 		      "eor %A0,%B0");
6761 	    }
6762 	  break;  /* optimize_size ? 6 : 8 */
6763 
6764 	case 5:
6765 	  if (optimize_size)
6766 	    break;  /* scratch ? 5 : 6 */
6767 	  if (ldi_ok)
6768 	    {
6769 	      *len = 8;
6770 	      return ("lsr %B0"       CR_TAB
6771 		      "ror %A0"       CR_TAB
6772 		      "swap %B0"      CR_TAB
6773 		      "swap %A0"      CR_TAB
6774 		      "andi %A0,0x0f" CR_TAB
6775 		      "eor %A0,%B0"   CR_TAB
6776 		      "andi %B0,0x0f" CR_TAB
6777 		      "eor %A0,%B0");
6778 	    }
6779 	  if (scratch)
6780 	    {
6781 	      *len = 9;
6782 	      return ("lsr %B0"     CR_TAB
6783 		      "ror %A0"     CR_TAB
6784 		      "swap %B0"    CR_TAB
6785 		      "swap %A0"    CR_TAB
6786 		      "ldi %3,0x0f" CR_TAB
6787 		      "and %A0,%3"      CR_TAB
6788 		      "eor %A0,%B0" CR_TAB
6789 		      "and %B0,%3"      CR_TAB
6790 		      "eor %A0,%B0");
6791 	    }
6792 	  break;  /* 10 */
6793 
6794 	case 6:
6795 	  if (optimize_size)
6796 	    break;  /* scratch ? 5 : 6 */
6797 	  *len = 9;
6798 	  return ("clr __tmp_reg__" CR_TAB
6799 		  "lsl %A0"         CR_TAB
6800 		  "rol %B0"         CR_TAB
6801 		  "rol __tmp_reg__" CR_TAB
6802 		  "lsl %A0"         CR_TAB
6803 		  "rol %B0"         CR_TAB
6804 		  "rol __tmp_reg__" CR_TAB
6805 		  "mov %A0,%B0"     CR_TAB
6806 		  "mov %B0,__tmp_reg__");
6807 
6808 	case 7:
6809 	  *len = 5;
6810 	  return ("lsl %A0"     CR_TAB
6811 		  "mov %A0,%B0" CR_TAB
6812 		  "rol %A0"     CR_TAB
6813 		  "sbc %B0,%B0" CR_TAB
6814 		  "neg %B0");
6815 
6816 	case 8:
6817 	  return *len = 2, ("mov %A0,%B1" CR_TAB
6818 			    "clr %B0");
6819 
6820 	case 9:
6821 	  *len = 3;
6822 	  return ("mov %A0,%B0" CR_TAB
6823 		  "clr %B0"     CR_TAB
6824 		  "lsr %A0");
6825 
6826 	case 10:
6827 	  *len = 4;
6828 	  return ("mov %A0,%B0" CR_TAB
6829 		  "clr %B0"     CR_TAB
6830 		  "lsr %A0"     CR_TAB
6831 		  "lsr %A0");
6832 
6833 	case 11:
6834 	  *len = 5;
6835 	  return ("mov %A0,%B0" CR_TAB
6836 		  "clr %B0"     CR_TAB
6837 		  "lsr %A0"     CR_TAB
6838 		  "lsr %A0"     CR_TAB
6839 		  "lsr %A0");
6840 
6841 	case 12:
6842 	  if (ldi_ok)
6843 	    {
6844 	      *len = 4;
6845 	      return ("mov %A0,%B0" CR_TAB
6846 		      "clr %B0"     CR_TAB
6847 		      "swap %A0"    CR_TAB
6848 		      "andi %A0,0x0f");
6849 	    }
6850 	  if (scratch)
6851 	    {
6852 	      *len = 5;
6853 	      return ("mov %A0,%B0" CR_TAB
6854 		      "clr %B0"     CR_TAB
6855 		      "swap %A0"    CR_TAB
6856 		      "ldi %3,0x0f" CR_TAB
6857 		      "and %A0,%3");
6858 	    }
6859 	  *len = 6;
6860 	  return ("mov %A0,%B0" CR_TAB
6861 		  "clr %B0"     CR_TAB
6862 		  "lsr %A0"     CR_TAB
6863 		  "lsr %A0"     CR_TAB
6864 		  "lsr %A0"     CR_TAB
6865 		  "lsr %A0");
6866 
6867 	case 13:
6868 	  if (ldi_ok)
6869 	    {
6870 	      *len = 5;
6871 	      return ("mov %A0,%B0" CR_TAB
6872 		      "clr %B0"     CR_TAB
6873 		      "swap %A0"    CR_TAB
6874 		      "lsr %A0"     CR_TAB
6875 		      "andi %A0,0x07");
6876 	    }
6877 	  if (AVR_HAVE_MUL && scratch)
6878 	    {
6879 	      *len = 5;
6880 	      return ("ldi %3,0x08" CR_TAB
6881 		      "mul %B0,%3"  CR_TAB
6882 		      "mov %A0,r1"  CR_TAB
6883 		      "clr %B0"     CR_TAB
6884 		      "clr __zero_reg__");
6885 	    }
6886 	  if (optimize_size && scratch)
6887 	    break;  /* 5 */
6888 	  if (scratch)
6889 	    {
6890 	      *len = 6;
6891 	      return ("mov %A0,%B0" CR_TAB
6892 		      "clr %B0"     CR_TAB
6893 		      "swap %A0"    CR_TAB
6894 		      "lsr %A0"     CR_TAB
6895 		      "ldi %3,0x07" CR_TAB
6896 		      "and %A0,%3");
6897 	    }
6898 	  if (AVR_HAVE_MUL)
6899 	    {
6900 	      *len = 6;
6901 	      return ("set"            CR_TAB
6902 		      "bld r1,3"   CR_TAB
6903 		      "mul %B0,r1" CR_TAB
6904 		      "mov %A0,r1" CR_TAB
6905 		      "clr %B0"    CR_TAB
6906 		      "clr __zero_reg__");
6907 	    }
6908 	  *len = 7;
6909 	  return ("mov %A0,%B0" CR_TAB
6910 		  "clr %B0"     CR_TAB
6911 		  "lsr %A0"     CR_TAB
6912 		  "lsr %A0"     CR_TAB
6913 		  "lsr %A0"     CR_TAB
6914 		  "lsr %A0"     CR_TAB
6915 		  "lsr %A0");
6916 
6917 	case 14:
6918 	  if (AVR_HAVE_MUL && ldi_ok)
6919 	    {
6920 	      *len = 5;
6921 	      return ("ldi %A0,0x04" CR_TAB
6922 		      "mul %B0,%A0"  CR_TAB
6923 		      "mov %A0,r1"   CR_TAB
6924 		      "clr %B0"      CR_TAB
6925 		      "clr __zero_reg__");
6926 	    }
6927 	  if (AVR_HAVE_MUL && scratch)
6928 	    {
6929 	      *len = 5;
6930 	      return ("ldi %3,0x04" CR_TAB
6931 		      "mul %B0,%3"  CR_TAB
6932 		      "mov %A0,r1"  CR_TAB
6933 		      "clr %B0"     CR_TAB
6934 		      "clr __zero_reg__");
6935 	    }
6936 	  if (optimize_size && ldi_ok)
6937 	    {
6938 	      *len = 5;
6939 	      return ("mov %A0,%B0" CR_TAB
6940 		      "ldi %B0,6" "\n1:\t"
6941 		      "lsr %A0"     CR_TAB
6942 		      "dec %B0"     CR_TAB
6943 		      "brne 1b");
6944 	    }
6945 	  if (optimize_size && scratch)
6946 	    break;  /* 5 */
6947 	  *len = 6;
6948 	  return ("clr %A0" CR_TAB
6949 		  "lsl %B0" CR_TAB
6950 		  "rol %A0" CR_TAB
6951 		  "lsl %B0" CR_TAB
6952 		  "rol %A0" CR_TAB
6953 		  "clr %B0");
6954 
6955 	case 15:
6956 	  *len = 4;
6957 	  return ("clr %A0" CR_TAB
6958 		  "lsl %B0" CR_TAB
6959 		  "rol %A0" CR_TAB
6960 		  "clr %B0");
6961 	}
6962       len = t;
6963     }
6964   out_shift_with_cnt ("lsr %B0" CR_TAB
6965                       "ror %A0", insn, operands, len, 2);
6966   return "";
6967 }
6968 
6969 
6970 /* 24-bit logic shift right */
6971 
6972 const char*
6973 avr_out_lshrpsi3 (rtx_insn *insn, rtx *op, int *plen)
6974 {
6975   int dest = REGNO (op[0]);
6976   int src = REGNO (op[1]);
6977 
6978   if (CONST_INT_P (op[2]))
6979     {
6980       if (plen)
6981         *plen = 0;
6982 
6983       switch (INTVAL (op[2]))
6984         {
6985         case 8:
6986           if (dest <= src)
6987             return avr_asm_len ("mov %A0,%B1" CR_TAB
6988                                 "mov %B0,%C1" CR_TAB
6989                                 "clr %C0", op, plen, 3);
6990           else
6991             return avr_asm_len ("clr %C0"     CR_TAB
6992                                 "mov %B0,%C1" CR_TAB
6993                                 "mov %A0,%B1", op, plen, 3);
6994 
6995         case 16:
6996           if (dest != src + 2)
6997             avr_asm_len ("mov %A0,%C1", op, plen, 1);
6998 
6999           return avr_asm_len ("clr %B0"  CR_TAB
7000                               "clr %C0", op, plen, 2);
7001 
7002         default:
7003           if (INTVAL (op[2]) < 24)
7004             break;
7005 
7006           /* fall through */
7007 
7008         case 23:
7009           return avr_asm_len ("clr %A0"    CR_TAB
7010                               "sbrc %C0,7" CR_TAB
7011                               "inc %A0"    CR_TAB
7012                               "clr %B0"    CR_TAB
7013                               "clr %C0", op, plen, 5);
7014         } /* switch */
7015     }
7016 
7017   out_shift_with_cnt ("lsr %C0" CR_TAB
7018                       "ror %B0" CR_TAB
7019                       "ror %A0", insn, op, plen, 3);
7020   return "";
7021 }
7022 
7023 
7024 /* 32-bit logic shift right ((unsigned int)x >> i) */
7025 
7026 const char *
7027 lshrsi3_out (rtx_insn *insn, rtx operands[], int *len)
7028 {
7029   if (GET_CODE (operands[2]) == CONST_INT)
7030     {
7031       int k;
7032       int *t = len;
7033 
7034       if (!len)
7035 	len = &k;
7036 
7037       switch (INTVAL (operands[2]))
7038 	{
7039 	default:
7040 	  if (INTVAL (operands[2]) < 32)
7041 	    break;
7042 
7043 	  if (AVR_HAVE_MOVW)
7044 	    return *len = 3, ("clr %D0" CR_TAB
7045 			      "clr %C0" CR_TAB
7046 			      "movw %A0,%C0");
7047 	  *len = 4;
7048 	  return ("clr %D0" CR_TAB
7049 		  "clr %C0" CR_TAB
7050 		  "clr %B0" CR_TAB
7051 		  "clr %A0");
7052 
7053 	case 8:
7054 	  {
7055 	    int reg0 = true_regnum (operands[0]);
7056 	    int reg1 = true_regnum (operands[1]);
7057 	    *len = 4;
7058 	    if (reg0 <= reg1)
7059 	      return ("mov %A0,%B1" CR_TAB
7060 		      "mov %B0,%C1" CR_TAB
7061 		      "mov %C0,%D1" CR_TAB
7062 		      "clr %D0");
7063 	    else
7064 	      return ("clr %D0"     CR_TAB
7065 		      "mov %C0,%D1" CR_TAB
7066 		      "mov %B0,%C1" CR_TAB
7067 		      "mov %A0,%B1");
7068 	  }
7069 
7070 	case 16:
7071 	  {
7072 	    int reg0 = true_regnum (operands[0]);
7073 	    int reg1 = true_regnum (operands[1]);
7074 
7075 	    if (reg0 == reg1 + 2)
7076 	      return *len = 2, ("clr %C0"     CR_TAB
7077 				"clr %D0");
7078 	    if (AVR_HAVE_MOVW)
7079 	      return *len = 3, ("movw %A0,%C1" CR_TAB
7080 				"clr %C0"      CR_TAB
7081 				"clr %D0");
7082 	    else
7083 	      return *len = 4, ("mov %B0,%D1" CR_TAB
7084 				"mov %A0,%C1" CR_TAB
7085 				"clr %C0"     CR_TAB
7086 				"clr %D0");
7087 	  }
7088 
7089 	case 24:
7090 	  return *len = 4, ("mov %A0,%D1" CR_TAB
7091 			    "clr %B0"     CR_TAB
7092 			    "clr %C0"     CR_TAB
7093 			    "clr %D0");
7094 
7095 	case 31:
7096 	  *len = 6;
7097 	  return ("clr %A0"    CR_TAB
7098 		  "sbrc %D0,7" CR_TAB
7099 		  "inc %A0"    CR_TAB
7100 		  "clr %B0"    CR_TAB
7101 		  "clr %C0"    CR_TAB
7102 		  "clr %D0");
7103 	}
7104       len = t;
7105     }
7106   out_shift_with_cnt ("lsr %D0" CR_TAB
7107                       "ror %C0" CR_TAB
7108                       "ror %B0" CR_TAB
7109                       "ror %A0", insn, operands, len, 4);
7110   return "";
7111 }
7112 
7113 
7114 /* Output addition of register XOP[0] and compile time constant XOP[2].
7115    CODE == PLUS:  perform addition by using ADD instructions or
7116    CODE == MINUS: perform addition by using SUB instructions:
7117 
7118       XOP[0] = XOP[0] + XOP[2]
7119 
7120    Or perform addition/subtraction with register XOP[2] depending on CODE:
7121 
7122       XOP[0] = XOP[0] +/- XOP[2]
7123 
7124    If PLEN == NULL, print assembler instructions to perform the operation;
7125    otherwise, set *PLEN to the length of the instruction sequence (in words)
7126    printed with PLEN == NULL.  XOP[3] is an 8-bit scratch register or NULL_RTX.
7127    Set *PCC to effect on cc0 according to respective CC_* insn attribute.
7128 
7129    CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
7130    CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
7131    If  CODE_SAT != UNKNOWN  then SIGN contains the sign of the summand resp.
7132    the subtrahend in the original insn, provided it is a compile time constant.
7133    In all other cases, SIGN is 0.
7134 
7135    If OUT_LABEL is true, print the final 0: label which is needed for
7136    saturated addition / subtraction.  The only case where OUT_LABEL = false
7137    is useful is for saturated addition / subtraction performed during
7138    fixed-point rounding, cf. `avr_out_round'.  */
7139 
7140 static void
7141 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
7142                 enum rtx_code code_sat, int sign, bool out_label)
7143 {
7144   /* MODE of the operation.  */
7145   machine_mode mode = GET_MODE (xop[0]);
7146 
7147   /* INT_MODE of the same size.  */
7148   machine_mode imode = int_mode_for_mode (mode);
7149 
7150   /* Number of bytes to operate on.  */
7151   int i, n_bytes = GET_MODE_SIZE (mode);
7152 
7153   /* Value (0..0xff) held in clobber register op[3] or -1 if unknown.  */
7154   int clobber_val = -1;
7155 
7156   /* op[0]: 8-bit destination register
7157      op[1]: 8-bit const int
7158      op[2]: 8-bit scratch register */
7159   rtx op[3];
7160 
7161   /* Started the operation?  Before starting the operation we may skip
7162      adding 0.  This is no more true after the operation started because
7163      carry must be taken into account.  */
7164   bool started = false;
7165 
7166   /* Value to add.  There are two ways to add VAL: R += VAL and R -= -VAL.  */
7167   rtx xval = xop[2];
7168 
7169   /* Output a BRVC instruction.  Only needed with saturation.  */
7170   bool out_brvc = true;
7171 
7172   if (plen)
7173     *plen = 0;
7174 
7175   if (REG_P (xop[2]))
7176     {
7177       *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_CLOBBER;
7178 
7179       for (i = 0; i < n_bytes; i++)
7180         {
7181           /* We operate byte-wise on the destination.  */
7182           op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
7183           op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
7184 
7185           if (i == 0)
7186             avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
7187                          op, plen, 1);
7188           else
7189             avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
7190                          op, plen, 1);
7191         }
7192 
7193       if (reg_overlap_mentioned_p (xop[0], xop[2]))
7194         {
7195           gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
7196 
7197           if (MINUS == code)
7198             return;
7199         }
7200 
7201       goto saturate;
7202     }
7203 
7204   /* Except in the case of ADIW with 16-bit register (see below)
7205      addition does not set cc0 in a usable way.  */
7206 
7207   *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
7208 
7209   if (CONST_FIXED_P (xval))
7210     xval = avr_to_int_mode (xval);
7211 
7212   /* Adding/Subtracting zero is a no-op.  */
7213 
7214   if (xval == const0_rtx)
7215     {
7216       *pcc = CC_NONE;
7217       return;
7218     }
7219 
7220   if (MINUS == code)
7221     xval = simplify_unary_operation (NEG, imode, xval, imode);
7222 
7223   op[2] = xop[3];
7224 
7225   if (SS_PLUS == code_sat && MINUS == code
7226       && sign < 0
7227       && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
7228                   & GET_MODE_MASK (QImode)))
7229     {
7230       /* We compute x + 0x80 by means of SUB instructions.  We negated the
7231          constant subtrahend above and are left with  x - (-128)  so that we
7232          need something like SUBI r,128 which does not exist because SUBI sets
7233          V according to the sign of the subtrahend.  Notice the only case
7234          where this must be done is when NEG overflowed in case [2s] because
7235          the V computation needs the right sign of the subtrahend.  */
7236 
7237       rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7238 
7239       avr_asm_len ("subi %0,128" CR_TAB
7240                    "brmi 0f", &msb, plen, 2);
7241       out_brvc = false;
7242 
7243       goto saturate;
7244     }
7245 
7246   for (i = 0; i < n_bytes; i++)
7247     {
7248       /* We operate byte-wise on the destination.  */
7249       rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7250       rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
7251 
7252       /* 8-bit value to operate with this byte. */
7253       unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7254 
7255       /* Registers R16..R31 can operate with immediate.  */
7256       bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7257 
7258       op[0] = reg8;
7259       op[1] = gen_int_mode (val8, QImode);
7260 
7261       /* To get usable cc0 no low-bytes must have been skipped.  */
7262 
7263       if (i && !started)
7264         *pcc = CC_CLOBBER;
7265 
7266       if (!started
7267           && i % 2 == 0
7268           && i + 2 <= n_bytes
7269           && test_hard_reg_class (ADDW_REGS, reg8))
7270         {
7271           rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
7272           unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
7273 
7274           /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
7275              i.e. operate word-wise.  */
7276 
7277           if (val16 < 64)
7278             {
7279               if (val16 != 0)
7280                 {
7281                   started = true;
7282                   avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
7283                                op, plen, 1);
7284 
7285                   if (n_bytes == 2 && PLUS == code)
7286                     *pcc = CC_SET_CZN;
7287                 }
7288 
7289               i++;
7290               continue;
7291             }
7292         }
7293 
7294       if (val8 == 0)
7295         {
7296           if (started)
7297             avr_asm_len (code == PLUS
7298                          ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
7299                          op, plen, 1);
7300           continue;
7301         }
7302       else if ((val8 == 1 || val8 == 0xff)
7303                && UNKNOWN == code_sat
7304                && !started
7305                && i == n_bytes - 1)
7306         {
7307           avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
7308                        op, plen, 1);
7309           *pcc = CC_CLOBBER;
7310           break;
7311         }
7312 
7313       switch (code)
7314         {
7315         case PLUS:
7316 
7317           gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
7318 
7319           if (plen != NULL && UNKNOWN != code_sat)
7320             {
7321               /* This belongs to the x + 0x80 corner case.  The code with
7322                  ADD instruction is not smaller, thus make this case
7323                  expensive so that the caller won't pick it.  */
7324 
7325               *plen += 10;
7326               break;
7327             }
7328 
7329           if (clobber_val != (int) val8)
7330             avr_asm_len ("ldi %2,%1", op, plen, 1);
7331           clobber_val = (int) val8;
7332 
7333           avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
7334 
7335           break; /* PLUS */
7336 
7337         case MINUS:
7338 
7339           if (ld_reg_p)
7340             avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
7341           else
7342             {
7343               gcc_assert (plen != NULL || REG_P (op[2]));
7344 
7345               if (clobber_val != (int) val8)
7346                 avr_asm_len ("ldi %2,%1", op, plen, 1);
7347               clobber_val = (int) val8;
7348 
7349               avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
7350             }
7351 
7352           break; /* MINUS */
7353 
7354         default:
7355           /* Unknown code */
7356           gcc_unreachable();
7357         }
7358 
7359       started = true;
7360 
7361     } /* for all sub-bytes */
7362 
7363  saturate:
7364 
7365   if (UNKNOWN == code_sat)
7366     return;
7367 
7368   *pcc = (int) CC_CLOBBER;
7369 
7370   /* Vanilla addition/subtraction is done.  We are left with saturation.
7371 
7372      We have to compute  A = A <op> B  where  A  is a register and
7373      B is a register or a non-zero compile time constant CONST.
7374      A is register class "r" if unsigned && B is REG.  Otherwise, A is in "d".
7375      B stands for the original operand $2 in INSN.  In the case of B = CONST,
7376      SIGN in { -1, 1 } is the sign of B.  Otherwise, SIGN is 0.
7377 
7378      CODE is the instruction flavor we use in the asm sequence to perform <op>.
7379 
7380 
7381      unsigned
7382      operation        |  code |  sat if  |    b is      | sat value |  case
7383      -----------------+-------+----------+--------------+-----------+-------
7384      +  as  a + b     |  add  |  C == 1  |  const, reg  | u+ = 0xff |  [1u]
7385      +  as  a - (-b)  |  sub  |  C == 0  |  const       | u+ = 0xff |  [2u]
7386      -  as  a - b     |  sub  |  C == 1  |  const, reg  | u- = 0    |  [3u]
7387      -  as  a + (-b)  |  add  |  C == 0  |  const       | u- = 0    |  [4u]
7388 
7389 
7390      signed
7391      operation        |  code |  sat if  |    b is      | sat value |  case
7392      -----------------+-------+----------+--------------+-----------+-------
7393      +  as  a + b     |  add  |  V == 1  |  const, reg  | s+        |  [1s]
7394      +  as  a - (-b)  |  sub  |  V == 1  |  const       | s+        |  [2s]
7395      -  as  a - b     |  sub  |  V == 1  |  const, reg  | s-        |  [3s]
7396      -  as  a + (-b)  |  add  |  V == 1  |  const       | s-        |  [4s]
7397 
7398      s+  =  b < 0  ?  -0x80 :  0x7f
7399      s-  =  b < 0  ?   0x7f : -0x80
7400 
7401      The cases a - b actually perform  a - (-(-b))  if B is CONST.
7402   */
7403 
7404   op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7405   op[1] = n_bytes > 1
7406     ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
7407     : NULL_RTX;
7408 
7409   bool need_copy = true;
7410   int len_call = 1 + AVR_HAVE_JMP_CALL;
7411 
7412   switch (code_sat)
7413     {
7414     default:
7415       gcc_unreachable();
7416 
7417     case SS_PLUS:
7418     case SS_MINUS:
7419 
7420       if (out_brvc)
7421         avr_asm_len ("brvc 0f", op, plen, 1);
7422 
7423       if (reg_overlap_mentioned_p (xop[0], xop[2]))
7424         {
7425           /* [1s,reg] */
7426 
7427           if (n_bytes == 1)
7428             avr_asm_len ("ldi %0,0x7f" CR_TAB
7429                          "adc %0,__zero_reg__", op, plen, 2);
7430           else
7431             avr_asm_len ("ldi %0,0x7f" CR_TAB
7432                          "ldi %1,0xff" CR_TAB
7433                          "adc %1,__zero_reg__" CR_TAB
7434                          "adc %0,__zero_reg__", op, plen, 4);
7435         }
7436       else if (sign == 0 && PLUS == code)
7437         {
7438           /* [1s,reg] */
7439 
7440           op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
7441 
7442           if (n_bytes == 1)
7443             avr_asm_len ("ldi %0,0x80" CR_TAB
7444                          "sbrs %2,7"   CR_TAB
7445                          "dec %0", op, plen, 3);
7446           else
7447             avr_asm_len ("ldi %0,0x80" CR_TAB
7448                          "cp %2,%0"    CR_TAB
7449                          "sbc %1,%1"   CR_TAB
7450                          "sbci %0,0", op, plen, 4);
7451         }
7452       else if (sign == 0 && MINUS == code)
7453         {
7454           /* [3s,reg] */
7455 
7456           op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
7457 
7458           if (n_bytes == 1)
7459             avr_asm_len ("ldi %0,0x7f" CR_TAB
7460                          "sbrs %2,7"   CR_TAB
7461                          "inc %0", op, plen, 3);
7462           else
7463             avr_asm_len ("ldi %0,0x7f" CR_TAB
7464                          "cp %0,%2"    CR_TAB
7465                          "sbc %1,%1"   CR_TAB
7466                          "sbci %0,-1", op, plen, 4);
7467         }
7468       else if ((sign < 0) ^ (SS_MINUS == code_sat))
7469         {
7470           /* [1s,const,B < 0] [2s,B < 0] */
7471           /* [3s,const,B > 0] [4s,B > 0] */
7472 
7473           if (n_bytes == 8)
7474             {
7475               avr_asm_len ("%~call __clr_8", op, plen, len_call);
7476               need_copy = false;
7477             }
7478 
7479           avr_asm_len ("ldi %0,0x80", op, plen, 1);
7480           if (n_bytes > 1 && need_copy)
7481             avr_asm_len ("clr %1", op, plen, 1);
7482         }
7483       else if ((sign > 0) ^ (SS_MINUS == code_sat))
7484         {
7485           /* [1s,const,B > 0] [2s,B > 0] */
7486           /* [3s,const,B < 0] [4s,B < 0] */
7487 
7488           if (n_bytes == 8)
7489             {
7490               avr_asm_len ("sec" CR_TAB
7491                            "%~call __sbc_8", op, plen, 1 + len_call);
7492               need_copy = false;
7493             }
7494 
7495           avr_asm_len ("ldi %0,0x7f", op, plen, 1);
7496           if (n_bytes > 1 && need_copy)
7497             avr_asm_len ("ldi %1,0xff", op, plen, 1);
7498         }
7499       else
7500         gcc_unreachable();
7501 
7502       break;
7503 
7504     case US_PLUS:
7505       /* [1u] : [2u] */
7506 
7507       avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
7508 
7509       if (n_bytes == 8)
7510         {
7511           if (MINUS == code)
7512             avr_asm_len ("sec", op, plen, 1);
7513           avr_asm_len ("%~call __sbc_8", op, plen, len_call);
7514 
7515           need_copy = false;
7516         }
7517       else
7518         {
7519           if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
7520             avr_asm_len ("sec" CR_TAB
7521                          "sbc %0,%0", op, plen, 2);
7522           else
7523             avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
7524                          op, plen, 1);
7525         }
7526       break; /* US_PLUS */
7527 
7528     case US_MINUS:
7529       /* [4u] : [3u] */
7530 
7531       avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
7532 
7533       if (n_bytes == 8)
7534         {
7535           avr_asm_len ("%~call __clr_8", op, plen, len_call);
7536           need_copy = false;
7537         }
7538       else
7539         avr_asm_len ("clr %0", op, plen, 1);
7540 
7541       break;
7542     }
7543 
7544   /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
7545      Now copy the right value to the LSBs.  */
7546 
7547   if (need_copy && n_bytes > 1)
7548     {
7549       if (US_MINUS == code_sat || US_PLUS == code_sat)
7550         {
7551           avr_asm_len ("mov %1,%0", op, plen, 1);
7552 
7553           if (n_bytes > 2)
7554             {
7555               op[0] = xop[0];
7556               if (AVR_HAVE_MOVW)
7557                 avr_asm_len ("movw %0,%1", op, plen, 1);
7558               else
7559                 avr_asm_len ("mov %A0,%1" CR_TAB
7560                              "mov %B0,%1", op, plen, 2);
7561             }
7562         }
7563       else if (n_bytes > 2)
7564         {
7565           op[0] = xop[0];
7566           avr_asm_len ("mov %A0,%1" CR_TAB
7567                        "mov %B0,%1", op, plen, 2);
7568         }
7569     }
7570 
7571   if (need_copy && n_bytes == 8)
7572     {
7573       if (AVR_HAVE_MOVW)
7574         avr_asm_len ("movw %r0+2,%0" CR_TAB
7575                      "movw %r0+4,%0", xop, plen, 2);
7576       else
7577         avr_asm_len ("mov %r0+2,%0" CR_TAB
7578                      "mov %r0+3,%0" CR_TAB
7579                      "mov %r0+4,%0" CR_TAB
7580                      "mov %r0+5,%0", xop, plen, 4);
7581     }
7582 
7583   if (out_label)
7584     avr_asm_len ("0:", op, plen, 0);
7585 }
7586 
7587 
7588 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
7589    is ont a compile-time constant:
7590 
7591       XOP[0] = XOP[0] +/- XOP[2]
7592 
7593    This is a helper for the function below.  The only insns that need this
7594    are additions/subtraction for pointer modes, i.e. HImode and PSImode.  */
7595 
7596 static const char*
7597 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
7598 {
7599   machine_mode mode = GET_MODE (xop[0]);
7600 
7601   /* Only pointer modes want to add symbols.  */
7602 
7603   gcc_assert (mode == HImode || mode == PSImode);
7604 
7605   *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
7606 
7607   avr_asm_len (PLUS == code
7608                ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
7609                : "subi %A0,lo8(%2)"    CR_TAB "sbci %B0,hi8(%2)",
7610                xop, plen, -2);
7611 
7612   if (PSImode == mode)
7613     avr_asm_len (PLUS == code
7614                  ? "sbci %C0,hlo8(-(%2))"
7615                  : "sbci %C0,hlo8(%2)", xop, plen, 1);
7616   return "";
7617 }
7618 
7619 
7620 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
7621 
7622    INSN is a single_set insn or an insn pattern with a binary operation as
7623    SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
7624 
7625    XOP are the operands of INSN.  In the case of 64-bit operations with
7626    constant XOP[] has just one element:  The summand/subtrahend in XOP[0].
7627    The non-saturating insns up to 32 bits may or may not supply a "d" class
7628    scratch as XOP[3].
7629 
7630    If PLEN == NULL output the instructions.
7631    If PLEN != NULL set *PLEN to the length of the sequence in words.
7632 
7633    PCC is a pointer to store the instructions' effect on cc0.
7634    PCC may be NULL.
7635 
7636    PLEN and PCC default to NULL.
7637 
7638    OUT_LABEL defaults to TRUE.  For a description, see AVR_OUT_PLUS_1.
7639 
7640    Return ""  */
7641 
7642 const char*
7643 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
7644 {
7645   int cc_plus, cc_minus, cc_dummy;
7646   int len_plus, len_minus;
7647   rtx op[4];
7648   rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
7649   rtx xdest = SET_DEST (xpattern);
7650   machine_mode mode = GET_MODE (xdest);
7651   machine_mode imode = int_mode_for_mode (mode);
7652   int n_bytes = GET_MODE_SIZE (mode);
7653   enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
7654   enum rtx_code code
7655     = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
7656        ? PLUS : MINUS);
7657 
7658   if (!pcc)
7659     pcc = &cc_dummy;
7660 
7661   /* PLUS and MINUS don't saturate:  Use modular wrap-around.  */
7662 
7663   if (PLUS == code_sat || MINUS == code_sat)
7664     code_sat = UNKNOWN;
7665 
7666   if (n_bytes <= 4 && REG_P (xop[2]))
7667     {
7668       avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
7669       return "";
7670     }
7671 
7672   if (8 == n_bytes)
7673     {
7674       op[0] = gen_rtx_REG (DImode, ACC_A);
7675       op[1] = gen_rtx_REG (DImode, ACC_A);
7676       op[2] = avr_to_int_mode (xop[0]);
7677     }
7678   else
7679     {
7680       if (!REG_P (xop[2])
7681           && !CONST_INT_P (xop[2])
7682           && !CONST_FIXED_P (xop[2]))
7683         {
7684           return avr_out_plus_symbol (xop, code, plen, pcc);
7685         }
7686 
7687       op[0] = avr_to_int_mode (xop[0]);
7688       op[1] = avr_to_int_mode (xop[1]);
7689       op[2] = avr_to_int_mode (xop[2]);
7690     }
7691 
7692   /* Saturations and 64-bit operations don't have a clobber operand.
7693      For the other cases, the caller will provide a proper XOP[3].  */
7694 
7695   xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
7696   op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
7697 
7698   /* Saturation will need the sign of the original operand.  */
7699 
7700   rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
7701   int sign = INTVAL (xmsb) < 0 ? -1 : 1;
7702 
7703   /* If we subtract and the subtrahend is a constant, then negate it
7704      so that avr_out_plus_1 can be used.  */
7705 
7706   if (MINUS == code)
7707     op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
7708 
7709   /* Work out the shortest sequence.  */
7710 
7711   avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
7712   avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
7713 
7714   if (plen)
7715     {
7716       *plen = (len_minus <= len_plus) ? len_minus : len_plus;
7717       *pcc  = (len_minus <= len_plus) ? cc_minus : cc_plus;
7718     }
7719   else if (len_minus <= len_plus)
7720     avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
7721   else
7722     avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
7723 
7724   return "";
7725 }
7726 
7727 
7728 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
7729    time constant XOP[2]:
7730 
7731       XOP[0] = XOP[0] <op> XOP[2]
7732 
7733    and return "".  If PLEN == NULL, print assembler instructions to perform the
7734    operation; otherwise, set *PLEN to the length of the instruction sequence
7735    (in words) printed with PLEN == NULL.  XOP[3] is either an 8-bit clobber
7736    register or SCRATCH if no clobber register is needed for the operation.
7737    INSN is an INSN_P or a pattern of an insn.  */
7738 
7739 const char*
7740 avr_out_bitop (rtx insn, rtx *xop, int *plen)
7741 {
7742   /* CODE and MODE of the operation.  */
7743   rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
7744   enum rtx_code code = GET_CODE (SET_SRC (xpattern));
7745   machine_mode mode = GET_MODE (xop[0]);
7746 
7747   /* Number of bytes to operate on.  */
7748   int i, n_bytes = GET_MODE_SIZE (mode);
7749 
7750   /* Value of T-flag (0 or 1) or -1 if unknow.  */
7751   int set_t = -1;
7752 
7753   /* Value (0..0xff) held in clobber register op[3] or -1 if unknown.  */
7754   int clobber_val = -1;
7755 
7756   /* op[0]: 8-bit destination register
7757      op[1]: 8-bit const int
7758      op[2]: 8-bit clobber register or SCRATCH
7759      op[3]: 8-bit register containing 0xff or NULL_RTX  */
7760   rtx op[4];
7761 
7762   op[2] = xop[3];
7763   op[3] = NULL_RTX;
7764 
7765   if (plen)
7766     *plen = 0;
7767 
7768   for (i = 0; i < n_bytes; i++)
7769     {
7770       /* We operate byte-wise on the destination.  */
7771       rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7772       rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
7773 
7774       /* 8-bit value to operate with this byte. */
7775       unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7776 
7777       /* Number of bits set in the current byte of the constant.  */
7778       int pop8 = avr_popcount (val8);
7779 
7780       /* Registers R16..R31 can operate with immediate.  */
7781       bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7782 
7783       op[0] = reg8;
7784       op[1] = GEN_INT (val8);
7785 
7786       switch (code)
7787         {
7788         case IOR:
7789 
7790           if (0 == pop8)
7791             continue;
7792           else if (ld_reg_p)
7793             avr_asm_len ("ori %0,%1", op, plen, 1);
7794           else if (1 == pop8)
7795             {
7796               if (set_t != 1)
7797                 avr_asm_len ("set", op, plen, 1);
7798               set_t = 1;
7799 
7800               op[1] = GEN_INT (exact_log2 (val8));
7801               avr_asm_len ("bld %0,%1", op, plen, 1);
7802             }
7803           else if (8 == pop8)
7804             {
7805               if (op[3] != NULL_RTX)
7806                 avr_asm_len ("mov %0,%3", op, plen, 1);
7807               else
7808                 avr_asm_len ("clr %0" CR_TAB
7809                              "dec %0", op, plen, 2);
7810 
7811               op[3] = op[0];
7812             }
7813           else
7814             {
7815               if (clobber_val != (int) val8)
7816                 avr_asm_len ("ldi %2,%1", op, plen, 1);
7817               clobber_val = (int) val8;
7818 
7819               avr_asm_len ("or %0,%2", op, plen, 1);
7820             }
7821 
7822           continue; /* IOR */
7823 
7824         case AND:
7825 
7826           if (8 == pop8)
7827             continue;
7828           else if (0 == pop8)
7829             avr_asm_len ("clr %0", op, plen, 1);
7830           else if (ld_reg_p)
7831             avr_asm_len ("andi %0,%1", op, plen, 1);
7832           else if (7 == pop8)
7833             {
7834               if (set_t != 0)
7835                 avr_asm_len ("clt", op, plen, 1);
7836               set_t = 0;
7837 
7838               op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
7839               avr_asm_len ("bld %0,%1", op, plen, 1);
7840             }
7841           else
7842             {
7843               if (clobber_val != (int) val8)
7844                 avr_asm_len ("ldi %2,%1", op, plen, 1);
7845               clobber_val = (int) val8;
7846 
7847               avr_asm_len ("and %0,%2", op, plen, 1);
7848             }
7849 
7850           continue; /* AND */
7851 
7852         case XOR:
7853 
7854           if (0 == pop8)
7855             continue;
7856           else if (8 == pop8)
7857             avr_asm_len ("com %0", op, plen, 1);
7858           else if (ld_reg_p && val8 == (1 << 7))
7859             avr_asm_len ("subi %0,%1", op, plen, 1);
7860           else
7861             {
7862               if (clobber_val != (int) val8)
7863                 avr_asm_len ("ldi %2,%1", op, plen, 1);
7864               clobber_val = (int) val8;
7865 
7866               avr_asm_len ("eor %0,%2", op, plen, 1);
7867             }
7868 
7869           continue; /* XOR */
7870 
7871         default:
7872           /* Unknown rtx_code */
7873           gcc_unreachable();
7874         }
7875     } /* for all sub-bytes */
7876 
7877   return "";
7878 }
7879 
7880 
7881 /* Output sign extension from XOP[1] to XOP[0] and return "".
7882    If PLEN == NULL, print assembler instructions to perform the operation;
7883    otherwise, set *PLEN to the length of the instruction sequence (in words)
7884    as printed with PLEN == NULL.  */
7885 
7886 const char*
7887 avr_out_sign_extend (rtx_insn *insn, rtx *xop, int *plen)
7888 {
7889   // Size in bytes of source resp. destination operand.
7890   unsigned n_src = GET_MODE_SIZE (GET_MODE (xop[1]));
7891   unsigned n_dest = GET_MODE_SIZE (GET_MODE (xop[0]));
7892   rtx r_msb = all_regs_rtx[REGNO (xop[1]) + n_src - 1];
7893 
7894   if (plen)
7895     *plen = 0;
7896 
7897   // Copy destination to source
7898 
7899   if (REGNO (xop[0]) != REGNO (xop[1]))
7900     {
7901       gcc_assert (n_src <= 2);
7902 
7903       if (n_src == 2)
7904         avr_asm_len (AVR_HAVE_MOVW
7905                      ? "movw %0,%1"
7906                      : "mov %B0,%B1", xop, plen, 1);
7907       if (n_src == 1 || !AVR_HAVE_MOVW)
7908         avr_asm_len ("mov %A0,%A1", xop, plen, 1);
7909     }
7910 
7911   // Set Carry to the sign bit MSB.7...
7912 
7913   if (REGNO (xop[0]) == REGNO (xop[1])
7914       || !reg_unused_after (insn, r_msb))
7915     {
7916       avr_asm_len ("mov __tmp_reg__,%0", &r_msb, plen, 1);
7917       r_msb = tmp_reg_rtx;
7918     }
7919 
7920   avr_asm_len ("lsl %0", &r_msb, plen, 1);
7921 
7922   // ...and propagate it to all the new sign bits
7923 
7924   for (unsigned n = n_src; n < n_dest; n++)
7925     avr_asm_len ("sbc %0,%0", &all_regs_rtx[REGNO (xop[0]) + n], plen, 1);
7926 
7927   return "";
7928 }
7929 
7930 
7931 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
7932    PLEN != NULL: Set *PLEN to the length of that sequence.
7933    Return "".  */
7934 
7935 const char*
7936 avr_out_addto_sp (rtx *op, int *plen)
7937 {
7938   int pc_len = AVR_2_BYTE_PC ? 2 : 3;
7939   int addend = INTVAL (op[0]);
7940 
7941   if (plen)
7942     *plen = 0;
7943 
7944   if (addend < 0)
7945     {
7946       if (flag_verbose_asm || flag_print_asm_name)
7947         avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
7948 
7949       while (addend <= -pc_len)
7950         {
7951           addend += pc_len;
7952           avr_asm_len ("rcall .", op, plen, 1);
7953         }
7954 
7955       while (addend++ < 0)
7956         avr_asm_len ("push __zero_reg__", op, plen, 1);
7957     }
7958   else if (addend > 0)
7959     {
7960       if (flag_verbose_asm || flag_print_asm_name)
7961         avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
7962 
7963       while (addend-- > 0)
7964         avr_asm_len ("pop __tmp_reg__", op, plen, 1);
7965     }
7966 
7967   return "";
7968 }
7969 
7970 
7971 /* Outputs instructions needed for fixed point type conversion.
7972    This includes converting between any fixed point type, as well
7973    as converting to any integer type.  Conversion between integer
7974    types is not supported.
7975 
7976    Converting signed fractional types requires a bit shift if converting
7977    to or from any unsigned fractional type because the decimal place is
7978    shifted by 1 bit.  When the destination is a signed fractional, the sign
7979    is stored in either the carry or T bit.  */
7980 
7981 const char*
7982 avr_out_fract (rtx_insn *insn, rtx operands[], bool intsigned, int *plen)
7983 {
7984   size_t i;
7985   rtx xop[6];
7986   RTX_CODE shift = UNKNOWN;
7987   bool sign_in_carry = false;
7988   bool msb_in_carry = false;
7989   bool lsb_in_tmp_reg = false;
7990   bool lsb_in_carry = false;
7991   bool frac_rounded = false;
7992   const char *code_ashift = "lsl %0";
7993 
7994 
7995 #define MAY_CLOBBER(RR)                                                 \
7996   /* Shorthand used below.  */                                          \
7997   ((sign_bytes                                                          \
7998     && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb))  \
7999    || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb))		\
8000    || (reg_unused_after (insn, all_regs_rtx[RR])                        \
8001        && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
8002 
8003   struct
8004   {
8005     /* bytes       : Length of operand in bytes.
8006        ibyte       : Length of integral part in bytes.
8007        fbyte, fbit : Length of fractional part in bytes, bits.  */
8008 
8009     bool sbit;
8010     unsigned fbit, bytes, ibyte, fbyte;
8011     unsigned regno, regno_msb;
8012   } dest, src, *val[2] = { &dest, &src };
8013 
8014   if (plen)
8015     *plen = 0;
8016 
8017   /* Step 0:  Determine information on source and destination operand we
8018      ======   will need in the remainder.  */
8019 
8020   for (i = 0; i < sizeof (val) / sizeof (*val); i++)
8021     {
8022       machine_mode mode;
8023 
8024       xop[i] = operands[i];
8025 
8026       mode = GET_MODE (xop[i]);
8027 
8028       val[i]->bytes = GET_MODE_SIZE (mode);
8029       val[i]->regno = REGNO (xop[i]);
8030       val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
8031 
8032       if (SCALAR_INT_MODE_P (mode))
8033         {
8034           val[i]->sbit = intsigned;
8035           val[i]->fbit = 0;
8036         }
8037       else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
8038         {
8039           val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
8040           val[i]->fbit = GET_MODE_FBIT (mode);
8041         }
8042       else
8043         fatal_insn ("unsupported fixed-point conversion", insn);
8044 
8045       val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
8046       val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
8047     }
8048 
8049   // Byte offset of the decimal point taking into account different place
8050   // of the decimal point in input and output and different register numbers
8051   // of input and output.
8052   int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
8053 
8054   // Number of destination bytes that will come from sign / zero extension.
8055   int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
8056 
8057   // Number of bytes at the low end to be filled with zeros.
8058   int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
8059 
8060   // Do we have a 16-Bit register that is cleared?
8061   rtx clrw = NULL_RTX;
8062 
8063   bool sign_extend = src.sbit && sign_bytes;
8064 
8065   if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
8066     shift = ASHIFT;
8067   else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
8068     shift = ASHIFTRT;
8069   else if (dest.fbit % 8 == src.fbit % 8)
8070     shift = UNKNOWN;
8071   else
8072     gcc_unreachable();
8073 
8074   /* If we need to round the fraction part, we might need to save/round it
8075      before clobbering any of it in Step 1.  Also, we might want to do
8076      the rounding now to make use of LD_REGS.  */
8077   if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8078       && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8079       && !TARGET_FRACT_CONV_TRUNC)
8080     {
8081       bool overlap
8082         = (src.regno <=
8083            (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
8084            && dest.regno - offset -1 >= dest.regno);
8085       unsigned s0 = dest.regno - offset -1;
8086       bool use_src = true;
8087       unsigned sn;
8088       unsigned copied_msb = src.regno_msb;
8089       bool have_carry = false;
8090 
8091       if (src.ibyte > dest.ibyte)
8092         copied_msb -= src.ibyte - dest.ibyte;
8093 
8094       for (sn = s0; sn <= copied_msb; sn++)
8095         if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
8096             && !reg_unused_after (insn, all_regs_rtx[sn]))
8097           use_src = false;
8098       if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
8099         {
8100           avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8101                        &all_regs_rtx[src.regno_msb], plen, 2);
8102           sn = src.regno;
8103           if (sn < s0)
8104             {
8105               if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
8106                 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
8107               else
8108                 avr_asm_len ("sec" CR_TAB
8109                              "cpc %0,__zero_reg__",
8110                              &all_regs_rtx[sn], plen, 2);
8111               have_carry = true;
8112             }
8113           while (++sn < s0)
8114             avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8115 
8116           avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
8117                        &all_regs_rtx[s0], plen, 1);
8118           for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8119             avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
8120           avr_asm_len ("\n0:", NULL, plen, 0);
8121           frac_rounded = true;
8122         }
8123       else if (use_src && overlap)
8124         {
8125           avr_asm_len ("clr __tmp_reg__" CR_TAB
8126                        "sbrc %1,0"       CR_TAB
8127                        "dec __tmp_reg__", xop, plen, 1);
8128           sn = src.regno;
8129           if (sn < s0)
8130             {
8131               avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8132               have_carry = true;
8133             }
8134 
8135           while (++sn < s0)
8136             avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8137 
8138           if (have_carry)
8139             avr_asm_len ("clt"                CR_TAB
8140                          "bld __tmp_reg__,7"  CR_TAB
8141                          "adc %0,__tmp_reg__",
8142                          &all_regs_rtx[s0], plen, 1);
8143           else
8144             avr_asm_len ("lsr __tmp_reg" CR_TAB
8145                          "add %0,__tmp_reg__",
8146                          &all_regs_rtx[s0], plen, 2);
8147           for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8148             avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8149           frac_rounded = true;
8150         }
8151       else if (overlap)
8152         {
8153           bool use_src
8154             = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
8155                && (IN_RANGE (s0, dest.regno, dest.regno_msb)
8156                    || reg_unused_after (insn, all_regs_rtx[s0])));
8157           xop[2] = all_regs_rtx[s0];
8158           unsigned sn = src.regno;
8159           if (!use_src || sn == s0)
8160             avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8161           /* We need to consider to-be-discarded bits
8162              if the value is negative.  */
8163           if (sn < s0)
8164             {
8165               avr_asm_len ("tst %0" CR_TAB
8166                            "brpl 0f",
8167                            &all_regs_rtx[src.regno_msb], plen, 2);
8168               /* Test to-be-discarded bytes for any nozero bits.
8169                  ??? Could use OR or SBIW to test two registers at once.  */
8170               if (sn < s0)
8171                 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8172 
8173               while (++sn < s0)
8174                 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8175               /* Set bit 0 in __tmp_reg__ if any of the lower bits was set.  */
8176               if (use_src)
8177                 avr_asm_len ("breq 0f" CR_TAB
8178                              "ori %2,1"
8179                              "\n0:\t" "mov __tmp_reg__,%2",
8180                              xop, plen, 3);
8181               else
8182                 avr_asm_len ("breq 0f" CR_TAB
8183                              "set"     CR_TAB
8184                              "bld __tmp_reg__,0\n0:",
8185                              xop, plen, 3);
8186             }
8187           lsb_in_tmp_reg = true;
8188         }
8189     }
8190 
8191   /* Step 1:  Clear bytes at the low end and copy payload bits from source
8192      ======   to destination.  */
8193 
8194   int step = offset < 0 ? 1 : -1;
8195   unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
8196 
8197   // We cleared at least that number of registers.
8198   int clr_n = 0;
8199 
8200   for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
8201     {
8202       // Next regno of destination is needed for MOVW
8203       unsigned d1 = d0 + step;
8204 
8205       // Current and next regno of source
8206       signed s0 = d0 - offset;
8207       signed s1 = s0 + step;
8208 
8209       // Must current resp. next regno be CLRed?  This applies to the low
8210       // bytes of the destination that have no associated source bytes.
8211       bool clr0 = s0 < (signed) src.regno;
8212       bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
8213 
8214       // First gather what code to emit (if any) and additional step to
8215       // apply if a MOVW is in use.  xop[2] is destination rtx and xop[3]
8216       // is the source rtx for the current loop iteration.
8217       const char *code = NULL;
8218       int stepw = 0;
8219 
8220       if (clr0)
8221         {
8222           if (AVR_HAVE_MOVW && clr1 && clrw)
8223             {
8224               xop[2] = all_regs_rtx[d0 & ~1];
8225               xop[3] = clrw;
8226               code = "movw %2,%3";
8227               stepw = step;
8228             }
8229           else
8230             {
8231               xop[2] = all_regs_rtx[d0];
8232               code = "clr %2";
8233 
8234               if (++clr_n >= 2
8235                   && !clrw
8236                   && d0 % 2 == (step > 0))
8237                 {
8238                   clrw = all_regs_rtx[d0 & ~1];
8239                 }
8240             }
8241         }
8242       else if (offset && s0 <= (signed) src.regno_msb)
8243         {
8244           int movw = AVR_HAVE_MOVW && offset % 2 == 0
8245             && d0 % 2 == (offset > 0)
8246             && d1 <= dest.regno_msb && d1 >= dest.regno
8247             && s1 <= (signed) src.regno_msb  && s1 >= (signed) src.regno;
8248 
8249           xop[2] = all_regs_rtx[d0 & ~movw];
8250           xop[3] = all_regs_rtx[s0 & ~movw];
8251           code = movw ? "movw %2,%3" : "mov %2,%3";
8252           stepw = step * movw;
8253         }
8254 
8255       if (code)
8256         {
8257           if (sign_extend && shift != ASHIFT && !sign_in_carry
8258               && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
8259             {
8260               /* We are going to override the sign bit.  If we sign-extend,
8261                  store the sign in the Carry flag.  This is not needed if
8262                  the destination will be ASHIFT in the remainder because
8263                  the ASHIFT will set Carry without extra instruction.  */
8264 
8265               avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
8266               sign_in_carry = true;
8267             }
8268 
8269           unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
8270 
8271           if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8272               && src.ibyte > dest.ibyte
8273               && (d0 == src_msb || d0 + stepw == src_msb))
8274             {
8275               /* We are going to override the MSB.  If we shift right,
8276                  store the MSB in the Carry flag.  This is only needed if
8277                  we don't sign-extend becaue with sign-extension the MSB
8278                  (the sign) will be produced by the sign extension.  */
8279 
8280               avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
8281               msb_in_carry = true;
8282             }
8283 
8284           unsigned src_lsb = dest.regno - offset -1;
8285 
8286           if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
8287 	      && !lsb_in_tmp_reg
8288               && (d0 == src_lsb || d0 + stepw == src_lsb))
8289             {
8290               /* We are going to override the new LSB; store it into carry.  */
8291 
8292               avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
8293               code_ashift = "rol %0";
8294               lsb_in_carry = true;
8295             }
8296 
8297           avr_asm_len (code, xop, plen, 1);
8298           d0 += stepw;
8299         }
8300     }
8301 
8302   /* Step 2:  Shift destination left by 1 bit position.  This might be needed
8303      ======   for signed input and unsigned output.  */
8304 
8305   if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
8306     {
8307       unsigned s0 = dest.regno - offset -1;
8308 
8309       /* n1169 4.1.4 says:
8310 	 "Conversions from a fixed-point to an integer type round toward zero."
8311 	 Hence, converting a fract type to integer only gives a non-zero result
8312 	 for -1.  */
8313       if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8314 	  && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
8315 	  && !TARGET_FRACT_CONV_TRUNC)
8316 	{
8317 	  gcc_assert (s0 == src.regno_msb);
8318 	  /* Check if the input is -1.  We do that by checking if negating
8319 	     the input causes an integer overflow.  */
8320 	  unsigned sn = src.regno;
8321 	  avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8322 	  while (sn <= s0)
8323 	    avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8324 
8325 	  /* Overflow goes with set carry.  Clear carry otherwise.  */
8326 	  avr_asm_len ("brvs 0f" CR_TAB
8327                        "clc\n0:", NULL, plen, 2);
8328 	}
8329       /* Likewise, when converting from accumulator types to integer, we
8330 	 need to round up negative values.  */
8331       else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8332 	       && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8333 	       && !TARGET_FRACT_CONV_TRUNC
8334 	       && !frac_rounded)
8335 	{
8336 	  bool have_carry = false;
8337 
8338 	  xop[2] = all_regs_rtx[s0];
8339 	  if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
8340 	    avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8341 	  avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8342 		       &all_regs_rtx[src.regno_msb], plen, 2);
8343 	  if (!lsb_in_tmp_reg)
8344 	    {
8345 	      unsigned sn = src.regno;
8346 	      if (sn < s0)
8347 		{
8348 		  avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
8349 			       plen, 1);
8350 		  have_carry = true;
8351 		}
8352 	      while (++sn < s0)
8353 		avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
8354 	      lsb_in_tmp_reg = !MAY_CLOBBER (s0);
8355 	    }
8356 	  /* Add in C and the rounding value 127.  */
8357 	  /* If the destination msb is a sign byte, and in LD_REGS,
8358 	     grab it as a temporary.  */
8359 	  if (sign_bytes
8360 	      && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
8361 				    dest.regno_msb))
8362 	    {
8363 	      xop[3] = all_regs_rtx[dest.regno_msb];
8364 	      avr_asm_len ("ldi %3,127", xop, plen, 1);
8365 	      avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
8366 			   : have_carry ? "adc %2,%3"
8367 			   : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
8368 			   : "add %2,%3"),
8369 			   xop, plen, 1);
8370 	    }
8371 	  else
8372 	    {
8373 	      /* Fall back to use __zero_reg__ as a temporary.  */
8374 	      avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
8375 	      if (have_carry)
8376 		avr_asm_len ("clt" CR_TAB
8377                              "bld __zero_reg__,7", NULL, plen, 2);
8378 	      else
8379 		avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
8380 	      avr_asm_len (have_carry && lsb_in_tmp_reg
8381                            ? "adc __tmp_reg__,__zero_reg__"
8382                            : have_carry ? "adc %2,__zero_reg__"
8383                            : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
8384                            : "add %2,__zero_reg__",
8385 			   xop, plen, 1);
8386 	      avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
8387 	    }
8388 
8389           for (d0 = dest.regno + zero_bytes;
8390 	       d0 <= dest.regno_msb - sign_bytes; d0++)
8391 	    avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
8392 
8393           avr_asm_len (lsb_in_tmp_reg
8394 		       ? "\n0:\t" "lsl __tmp_reg__"
8395                        : "\n0:\t" "lsl %2",
8396 		       xop, plen, 1);
8397 	}
8398       else if (MAY_CLOBBER (s0))
8399         avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
8400       else
8401         avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8402                      "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8403 
8404       code_ashift = "rol %0";
8405       lsb_in_carry = true;
8406     }
8407 
8408   if (shift == ASHIFT)
8409     {
8410       for (d0 = dest.regno + zero_bytes;
8411            d0 <= dest.regno_msb - sign_bytes; d0++)
8412         {
8413           avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
8414           code_ashift = "rol %0";
8415         }
8416 
8417       lsb_in_carry = false;
8418       sign_in_carry = true;
8419     }
8420 
8421   /* Step 4a:  Store MSB in carry if we don't already have it or will produce
8422      =======   it in sign-extension below.  */
8423 
8424   if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8425       && src.ibyte > dest.ibyte)
8426     {
8427       unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
8428 
8429       if (MAY_CLOBBER (s0))
8430         avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
8431       else
8432         avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8433                      "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8434 
8435       msb_in_carry = true;
8436     }
8437 
8438   /* Step 3:  Sign-extend or zero-extend the destination as needed.
8439      ======   */
8440 
8441   if (sign_extend && !sign_in_carry)
8442     {
8443       unsigned s0 = src.regno_msb;
8444 
8445       if (MAY_CLOBBER (s0))
8446         avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
8447       else
8448         avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8449                      "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8450 
8451       sign_in_carry = true;
8452   }
8453 
8454   gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
8455 
8456   unsigned copies = 0;
8457   rtx movw = sign_extend ? NULL_RTX : clrw;
8458 
8459   for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
8460     {
8461       if (AVR_HAVE_MOVW && movw
8462           && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
8463         {
8464           xop[2] = all_regs_rtx[d0];
8465           xop[3] = movw;
8466           avr_asm_len ("movw %2,%3", xop, plen, 1);
8467           d0++;
8468         }
8469       else
8470         {
8471           avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
8472                        &all_regs_rtx[d0], plen, 1);
8473 
8474           if (++copies >= 2 && !movw && d0 % 2 == 1)
8475             movw = all_regs_rtx[d0-1];
8476         }
8477     } /* for */
8478 
8479 
8480   /* Step 4:  Right shift the destination.  This might be needed for
8481      ======   conversions from unsigned to signed.  */
8482 
8483   if (shift == ASHIFTRT)
8484     {
8485       const char *code_ashiftrt = "lsr %0";
8486 
8487       if (sign_extend || msb_in_carry)
8488         code_ashiftrt = "ror %0";
8489 
8490       if (src.sbit && src.ibyte == dest.ibyte)
8491         code_ashiftrt = "asr %0";
8492 
8493       for (d0 = dest.regno_msb - sign_bytes;
8494            d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
8495         {
8496           avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
8497           code_ashiftrt = "ror %0";
8498         }
8499     }
8500 
8501 #undef MAY_CLOBBER
8502 
8503   return "";
8504 }
8505 
8506 
8507 /* Output fixed-point rounding.  XOP[0] = XOP[1] is the operand to round.
8508    XOP[2] is the rounding point, a CONST_INT.  The function prints the
8509    instruction sequence if PLEN = NULL and computes the length in words
8510    of the sequence if PLEN != NULL.  Most of this function deals with
8511    preparing operands for calls to `avr_out_plus' and `avr_out_bitop'.  */
8512 
8513 const char*
8514 avr_out_round (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
8515 {
8516   machine_mode mode = GET_MODE (xop[0]);
8517   machine_mode imode = int_mode_for_mode (mode);
8518   // The smallest fractional bit not cleared by the rounding is 2^(-RP).
8519   int fbit = (int) GET_MODE_FBIT (mode);
8520   double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
8521   wide_int wi_add = wi::set_bit_in_zero (fbit-1 - INTVAL (xop[2]),
8522 					 GET_MODE_PRECISION (imode));
8523   // Lengths of PLUS and AND parts.
8524   int len_add = 0, *plen_add = plen ? &len_add : NULL;
8525   int len_and = 0, *plen_and = plen ? &len_and : NULL;
8526 
8527   // Add-Saturate  1/2 * 2^(-RP).  Don't print the label "0:" when printing
8528   // the saturated addition so that we can emit the "rjmp 1f" before the
8529   // "0:" below.
8530 
8531   rtx xadd = const_fixed_from_double_int (i_add, mode);
8532   rtx xpattern, xsrc, op[4];
8533 
8534   xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
8535     ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
8536     : gen_rtx_US_PLUS (mode, xop[1], xadd);
8537   xpattern = gen_rtx_SET (VOIDmode, xop[0], xsrc);
8538 
8539   op[0] = xop[0];
8540   op[1] = xop[1];
8541   op[2] = xadd;
8542   avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
8543 
8544   avr_asm_len ("rjmp 1f" CR_TAB
8545                "0:", NULL, plen_add, 1);
8546 
8547   // Keep  all bits from RP and higher:   ... 2^(-RP)
8548   // Clear all bits from RP+1 and lower:              2^(-RP-1) ...
8549   // Rounding point                           ^^^^^^^
8550   // Added above                                      ^^^^^^^^^
8551   rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
8552   rtx xmask = immed_wide_int_const (-wi_add - wi_add, imode);
8553 
8554   xpattern = gen_rtx_SET (VOIDmode, xreg, gen_rtx_AND (imode, xreg, xmask));
8555 
8556   op[0] = xreg;
8557   op[1] = xreg;
8558   op[2] = xmask;
8559   op[3] = gen_rtx_SCRATCH (QImode);
8560   avr_out_bitop (xpattern, op, plen_and);
8561   avr_asm_len ("1:", NULL, plen, 0);
8562 
8563   if (plen)
8564     *plen = len_add + len_and;
8565 
8566   return "";
8567 }
8568 
8569 
8570 /* Create RTL split patterns for byte sized rotate expressions.  This
8571   produces a series of move instructions and considers overlap situations.
8572   Overlapping non-HImode operands need a scratch register.  */
8573 
8574 bool
8575 avr_rotate_bytes (rtx operands[])
8576 {
8577     int i, j;
8578     machine_mode mode = GET_MODE (operands[0]);
8579     bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
8580     bool same_reg = rtx_equal_p (operands[0], operands[1]);
8581     int num = INTVAL (operands[2]);
8582     rtx scratch = operands[3];
8583     /* Work out if byte or word move is needed.  Odd byte rotates need QImode.
8584        Word move if no scratch is needed, otherwise use size of scratch.  */
8585     machine_mode move_mode = QImode;
8586     int move_size, offset, size;
8587 
8588     if (num & 0xf)
8589       move_mode = QImode;
8590     else if ((mode == SImode && !same_reg) || !overlapped)
8591       move_mode = HImode;
8592     else
8593       move_mode = GET_MODE (scratch);
8594 
8595     /* Force DI rotate to use QI moves since other DI moves are currently split
8596        into QI moves so forward propagation works better.  */
8597     if (mode == DImode)
8598       move_mode = QImode;
8599     /* Make scratch smaller if needed.  */
8600     if (SCRATCH != GET_CODE (scratch)
8601         && HImode == GET_MODE (scratch)
8602         && QImode == move_mode)
8603       scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
8604 
8605     move_size = GET_MODE_SIZE (move_mode);
8606     /* Number of bytes/words to rotate.  */
8607     offset = (num  >> 3) / move_size;
8608     /* Number of moves needed.  */
8609     size = GET_MODE_SIZE (mode) / move_size;
8610     /* Himode byte swap is special case to avoid a scratch register.  */
8611     if (mode == HImode && same_reg)
8612       {
8613 	/* HImode byte swap, using xor.  This is as quick as using scratch.  */
8614 	rtx src, dst;
8615 	src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
8616 	dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
8617 	if (!rtx_equal_p (dst, src))
8618 	  {
8619 	     emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
8620 	     emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
8621 	     emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
8622 	  }
8623       }
8624     else
8625       {
8626 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode)  */
8627 	/* Create linked list of moves to determine move order.  */
8628 	struct {
8629 	  rtx src, dst;
8630 	  int links;
8631 	} move[MAX_SIZE + 8];
8632 	int blocked, moves;
8633 
8634 	gcc_assert (size <= MAX_SIZE);
8635 	/* Generate list of subreg moves.  */
8636 	for (i = 0; i < size; i++)
8637           {
8638 	    int from = i;
8639 	    int to = (from + offset) % size;
8640 	    move[i].src = simplify_gen_subreg (move_mode, operands[1],
8641                                                mode, from * move_size);
8642 	    move[i].dst = simplify_gen_subreg (move_mode, operands[0],
8643                                                mode, to * move_size);
8644             move[i].links = -1;
8645           }
8646 	/* Mark dependence where a dst of one move is the src of another move.
8647 	   The first move is a conflict as it must wait until second is
8648 	   performed.  We ignore moves to self - we catch this later.  */
8649 	if (overlapped)
8650 	  for (i = 0; i < size; i++)
8651 	    if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
8652 	      for (j = 0; j < size; j++)
8653 		if (j != i && rtx_equal_p (move[j].src, move[i].dst))
8654 		  {
8655 		    /* The dst of move i is the src of move j.  */
8656 		    move[i].links = j;
8657 		    break;
8658 		  }
8659 
8660 	blocked = -1;
8661 	moves = 0;
8662 	/* Go through move list and perform non-conflicting moves.  As each
8663 	   non-overlapping move is made, it may remove other conflicts
8664 	   so the process is repeated until no conflicts remain.  */
8665 	do
8666 	  {
8667 	    blocked = -1;
8668 	    moves = 0;
8669 	    /* Emit move where dst is not also a src or we have used that
8670 	       src already.  */
8671 	    for (i = 0; i < size; i++)
8672 	      if (move[i].src != NULL_RTX)
8673 		{
8674 		  if (move[i].links == -1
8675 		      || move[move[i].links].src == NULL_RTX)
8676 		    {
8677 		      moves++;
8678 		      /* Ignore NOP moves to self.  */
8679 		      if (!rtx_equal_p (move[i].dst, move[i].src))
8680 			emit_move_insn (move[i].dst, move[i].src);
8681 
8682 		      /* Remove  conflict from list.  */
8683 		      move[i].src = NULL_RTX;
8684 		    }
8685 		  else
8686 		    blocked = i;
8687 		}
8688 
8689 	    /* Check for deadlock. This is when no moves occurred and we have
8690 	       at least one blocked move.  */
8691 	    if (moves == 0 && blocked != -1)
8692 	      {
8693 		/* Need to use scratch register to break deadlock.
8694 		   Add move to put dst of blocked move into scratch.
8695 		   When this move occurs, it will break chain deadlock.
8696 		   The scratch register is substituted for real move.  */
8697 
8698 		gcc_assert (SCRATCH != GET_CODE (scratch));
8699 
8700 		move[size].src = move[blocked].dst;
8701 		move[size].dst =  scratch;
8702 		/* Scratch move is never blocked.  */
8703 		move[size].links = -1;
8704 		/* Make sure we have valid link.  */
8705 		gcc_assert (move[blocked].links != -1);
8706 		/* Replace src of  blocking move with scratch reg.  */
8707 		move[move[blocked].links].src = scratch;
8708 		/* Make dependent on scratch move occurring.  */
8709 		move[blocked].links = size;
8710 		size=size+1;
8711 	      }
8712 	  }
8713 	while (blocked != -1);
8714       }
8715     return true;
8716 }
8717 
8718 
8719 /* Worker function for `ADJUST_INSN_LENGTH'.  */
8720 /* Modifies the length assigned to instruction INSN
8721    LEN is the initially computed length of the insn.  */
8722 
8723 int
8724 avr_adjust_insn_length (rtx_insn *insn, int len)
8725 {
8726   rtx *op = recog_data.operand;
8727   enum attr_adjust_len adjust_len;
8728 
8729   /* Some complex insns don't need length adjustment and therefore
8730      the length need not/must not be adjusted for these insns.
8731      It is easier to state this in an insn attribute "adjust_len" than
8732      to clutter up code here...  */
8733 
8734   if (!NONDEBUG_INSN_P (insn)
8735       || -1 == recog_memoized (insn))
8736     {
8737       return len;
8738     }
8739 
8740   /* Read from insn attribute "adjust_len" if/how length is to be adjusted.  */
8741 
8742   adjust_len = get_attr_adjust_len (insn);
8743 
8744   if (adjust_len == ADJUST_LEN_NO)
8745     {
8746       /* Nothing to adjust: The length from attribute "length" is fine.
8747          This is the default.  */
8748 
8749       return len;
8750     }
8751 
8752   /* Extract insn's operands.  */
8753 
8754   extract_constrain_insn_cached (insn);
8755 
8756   /* Dispatch to right function.  */
8757 
8758   switch (adjust_len)
8759     {
8760     case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
8761     case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
8762     case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
8763 
8764     case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
8765 
8766     case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
8767     case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
8768 
8769     case ADJUST_LEN_MOV8:  output_movqi (insn, op, &len); break;
8770     case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
8771     case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
8772     case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
8773     case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
8774     case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
8775     case ADJUST_LEN_LPM: avr_out_lpm (insn, op, &len); break;
8776     case ADJUST_LEN_SEXT: avr_out_sign_extend (insn, op, &len); break;
8777 
8778     case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
8779     case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
8780     case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
8781 
8782     case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
8783     case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
8784     case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
8785     case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
8786     case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
8787 
8788     case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
8789     case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
8790     case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
8791 
8792     case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
8793     case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
8794     case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
8795 
8796     case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
8797     case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
8798     case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
8799 
8800     case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
8801     case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
8802     case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
8803 
8804     case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
8805 
8806     case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
8807 
8808     default:
8809       gcc_unreachable();
8810     }
8811 
8812   return len;
8813 }
8814 
8815 /* Return nonzero if register REG dead after INSN.  */
8816 
8817 int
8818 reg_unused_after (rtx_insn *insn, rtx reg)
8819 {
8820   return (dead_or_set_p (insn, reg)
8821 	  || (REG_P(reg) && _reg_unused_after (insn, reg)));
8822 }
8823 
8824 /* Return nonzero if REG is not used after INSN.
8825    We assume REG is a reload reg, and therefore does
8826    not live past labels.  It may live past calls or jumps though.  */
8827 
8828 int
8829 _reg_unused_after (rtx_insn *insn, rtx reg)
8830 {
8831   enum rtx_code code;
8832   rtx set;
8833 
8834   /* If the reg is set by this instruction, then it is safe for our
8835      case.  Disregard the case where this is a store to memory, since
8836      we are checking a register used in the store address.  */
8837   set = single_set (insn);
8838   if (set && GET_CODE (SET_DEST (set)) != MEM
8839       && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8840     return 1;
8841 
8842   while ((insn = NEXT_INSN (insn)))
8843     {
8844       rtx set;
8845       code = GET_CODE (insn);
8846 
8847 #if 0
8848       /* If this is a label that existed before reload, then the register
8849 	 if dead here.  However, if this is a label added by reorg, then
8850 	 the register may still be live here.  We can't tell the difference,
8851 	 so we just ignore labels completely.  */
8852       if (code == CODE_LABEL)
8853 	return 1;
8854       /* else */
8855 #endif
8856 
8857       if (!INSN_P (insn))
8858 	continue;
8859 
8860       if (code == JUMP_INSN)
8861 	return 0;
8862 
8863       /* If this is a sequence, we must handle them all at once.
8864 	 We could have for instance a call that sets the target register,
8865 	 and an insn in a delay slot that uses the register.  In this case,
8866 	 we must return 0.  */
8867       else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
8868 	{
8869 	  rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
8870 	  int i;
8871 	  int retval = 0;
8872 
8873 	  for (i = 0; i < seq->len (); i++)
8874 	    {
8875 	      rtx_insn *this_insn = seq->insn (i);
8876 	      rtx set = single_set (this_insn);
8877 
8878 	      if (CALL_P (this_insn))
8879 		code = CALL_INSN;
8880 	      else if (JUMP_P (this_insn))
8881 		{
8882 		  if (INSN_ANNULLED_BRANCH_P (this_insn))
8883 		    return 0;
8884 		  code = JUMP_INSN;
8885 		}
8886 
8887 	      if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
8888 		return 0;
8889 	      if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8890 		{
8891 		  if (GET_CODE (SET_DEST (set)) != MEM)
8892 		    retval = 1;
8893 		  else
8894 		    return 0;
8895 		}
8896 	      if (set == 0
8897 		  && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
8898 		return 0;
8899 	    }
8900 	  if (retval == 1)
8901 	    return 1;
8902 	  else if (code == JUMP_INSN)
8903 	    return 0;
8904 	}
8905 
8906       if (code == CALL_INSN)
8907 	{
8908 	  rtx tem;
8909 	  for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
8910 	    if (GET_CODE (XEXP (tem, 0)) == USE
8911 		&& REG_P (XEXP (XEXP (tem, 0), 0))
8912 		&& reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
8913 	      return 0;
8914 	  if (call_used_regs[REGNO (reg)])
8915 	    return 1;
8916 	}
8917 
8918       set = single_set (insn);
8919 
8920       if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
8921 	return 0;
8922       if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
8923 	return GET_CODE (SET_DEST (set)) != MEM;
8924       if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
8925 	return 0;
8926     }
8927   return 1;
8928 }
8929 
8930 
8931 /* Implement `TARGET_ASM_INTEGER'.  */
8932 /* Target hook for assembling integer objects.  The AVR version needs
8933    special handling for references to certain labels.  */
8934 
8935 static bool
8936 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
8937 {
8938   if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
8939       && text_segment_operand (x, VOIDmode))
8940     {
8941       fputs ("\t.word\tgs(", asm_out_file);
8942       output_addr_const (asm_out_file, x);
8943       fputs (")\n", asm_out_file);
8944 
8945       return true;
8946     }
8947   else if (GET_MODE (x) == PSImode)
8948     {
8949       /* This needs binutils 2.23+, see PR binutils/13503  */
8950 
8951       fputs ("\t.byte\tlo8(", asm_out_file);
8952       output_addr_const (asm_out_file, x);
8953       fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8954 
8955       fputs ("\t.byte\thi8(", asm_out_file);
8956       output_addr_const (asm_out_file, x);
8957       fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8958 
8959       fputs ("\t.byte\thh8(", asm_out_file);
8960       output_addr_const (asm_out_file, x);
8961       fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
8962 
8963       return true;
8964     }
8965   else if (CONST_FIXED_P (x))
8966     {
8967       unsigned n;
8968 
8969       /* varasm fails to handle big fixed modes that don't fit in hwi.  */
8970 
8971       for (n = 0; n < size; n++)
8972         {
8973           rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
8974           default_assemble_integer (xn, 1, aligned_p);
8975         }
8976 
8977       return true;
8978     }
8979 
8980   return default_assemble_integer (x, size, aligned_p);
8981 }
8982 
8983 
8984 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'.  */
8985 /* Return value is nonzero if pseudos that have been
8986    assigned to registers of class CLASS would likely be spilled
8987    because registers of CLASS are needed for spill registers.  */
8988 
8989 static bool
8990 avr_class_likely_spilled_p (reg_class_t c)
8991 {
8992   return (c != ALL_REGS &&
8993            (AVR_TINY ? 1 : c != ADDW_REGS));
8994 }
8995 
8996 
8997 /* Valid attributes:
8998    progmem   -  Put data to program memory.
8999    signal    -  Make a function to be hardware interrupt.
9000                 After function prologue interrupts remain disabled.
9001    interrupt -  Make a function to be hardware interrupt. Before function
9002                 prologue interrupts are enabled by means of SEI.
9003    naked     -  Don't generate function prologue/epilogue and RET
9004                 instruction.  */
9005 
9006 /* Handle a "progmem" attribute; arguments as in
9007    struct attribute_spec.handler.  */
9008 
9009 static tree
9010 avr_handle_progmem_attribute (tree *node, tree name,
9011 			      tree args ATTRIBUTE_UNUSED,
9012 			      int flags ATTRIBUTE_UNUSED,
9013 			      bool *no_add_attrs)
9014 {
9015   if (DECL_P (*node))
9016     {
9017       if (TREE_CODE (*node) == TYPE_DECL)
9018 	{
9019 	  /* This is really a decl attribute, not a type attribute,
9020 	     but try to handle it for GCC 3.0 backwards compatibility.  */
9021 
9022 	  tree type = TREE_TYPE (*node);
9023 	  tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
9024 	  tree newtype = build_type_attribute_variant (type, attr);
9025 
9026 	  TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
9027 	  TREE_TYPE (*node) = newtype;
9028 	  *no_add_attrs = true;
9029 	}
9030       else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
9031 	{
9032           *no_add_attrs = false;
9033 	}
9034       else
9035 	{
9036 	  warning (OPT_Wattributes, "%qE attribute ignored",
9037 		   name);
9038 	  *no_add_attrs = true;
9039 	}
9040     }
9041 
9042   return NULL_TREE;
9043 }
9044 
9045 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
9046    struct attribute_spec.handler.  */
9047 
9048 static tree
9049 avr_handle_fndecl_attribute (tree *node, tree name,
9050 			     tree args ATTRIBUTE_UNUSED,
9051 			     int flags ATTRIBUTE_UNUSED,
9052 			     bool *no_add_attrs)
9053 {
9054   if (TREE_CODE (*node) != FUNCTION_DECL)
9055     {
9056       warning (OPT_Wattributes, "%qE attribute only applies to functions",
9057 	       name);
9058       *no_add_attrs = true;
9059     }
9060 
9061   return NULL_TREE;
9062 }
9063 
9064 static tree
9065 avr_handle_fntype_attribute (tree *node, tree name,
9066                              tree args ATTRIBUTE_UNUSED,
9067                              int flags ATTRIBUTE_UNUSED,
9068                              bool *no_add_attrs)
9069 {
9070   if (TREE_CODE (*node) != FUNCTION_TYPE)
9071     {
9072       warning (OPT_Wattributes, "%qE attribute only applies to functions",
9073 	       name);
9074       *no_add_attrs = true;
9075     }
9076 
9077   return NULL_TREE;
9078 }
9079 
9080 static tree
9081 avr_handle_addr_attribute (tree *node, tree name, tree args,
9082 			   int flags ATTRIBUTE_UNUSED, bool *no_add)
9083 {
9084   bool io_p = (strncmp (IDENTIFIER_POINTER (name), "io", 2) == 0);
9085   location_t loc = DECL_SOURCE_LOCATION (*node);
9086 
9087   if (!VAR_P (*node))
9088     {
9089       warning_at (loc, OPT_Wattributes, "%qE attribute only applies to "
9090 		  "variables", name);
9091       *no_add = true;
9092       return NULL_TREE;
9093     }
9094 
9095   if (args != NULL_TREE)
9096     {
9097       if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
9098 	TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
9099       tree arg = TREE_VALUE (args);
9100       if (TREE_CODE (arg) != INTEGER_CST)
9101 	{
9102 	  warning_at (loc, OPT_Wattributes, "%qE attribute allows only an "
9103 		      "integer constant argument", name);
9104 	  *no_add = true;
9105 	}
9106       else if (io_p
9107 	       && (!tree_fits_shwi_p (arg)
9108 		   || !(strcmp (IDENTIFIER_POINTER (name), "io_low") == 0
9109 			? low_io_address_operand : io_address_operand)
9110 			 (GEN_INT (TREE_INT_CST_LOW (arg)), QImode)))
9111 	{
9112 	  warning_at (loc, OPT_Wattributes, "%qE attribute address "
9113 		      "out of range", name);
9114 	  *no_add = true;
9115 	}
9116       else
9117 	{
9118 	  tree attribs = DECL_ATTRIBUTES (*node);
9119 	  const char *names[] = { "io", "io_low", "address", NULL };
9120 	  for (const char **p = names; *p; p++)
9121 	    {
9122 	      tree other = lookup_attribute (*p, attribs);
9123 	      if (other && TREE_VALUE (other))
9124 		{
9125 		  warning_at (loc, OPT_Wattributes,
9126 			      "both %s and %qE attribute provide address",
9127 			      *p, name);
9128 		  *no_add = true;
9129 		  break;
9130 		}
9131 	    }
9132 	}
9133     }
9134 
9135   if (*no_add == false && io_p && !TREE_THIS_VOLATILE (*node))
9136     warning_at (loc, OPT_Wattributes, "%qE attribute on non-volatile variable",
9137 		name);
9138 
9139   return NULL_TREE;
9140 }
9141 
9142 rtx
9143 avr_eval_addr_attrib (rtx x)
9144 {
9145   if (GET_CODE (x) == SYMBOL_REF
9146       && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_ADDRESS))
9147     {
9148       tree decl = SYMBOL_REF_DECL (x);
9149       tree attr = NULL_TREE;
9150 
9151       if (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO)
9152 	{
9153 	  attr = lookup_attribute ("io", DECL_ATTRIBUTES (decl));
9154 	  if (!attr || !TREE_VALUE (attr))
9155 	    attr = lookup_attribute ("io_low", DECL_ATTRIBUTES (decl));
9156 	  gcc_assert (attr);
9157 	}
9158       if (!attr || !TREE_VALUE (attr))
9159 	attr = lookup_attribute ("address", DECL_ATTRIBUTES (decl));
9160       gcc_assert (attr && TREE_VALUE (attr) && TREE_VALUE (TREE_VALUE (attr)));
9161       return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr))));
9162     }
9163   return x;
9164 }
9165 
9166 
9167 /* AVR attributes.  */
9168 static const struct attribute_spec
9169 avr_attribute_table[] =
9170 {
9171   /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
9172        affects_type_identity } */
9173   { "progmem",   0, 0, false, false, false,  avr_handle_progmem_attribute,
9174     false },
9175   { "signal",    0, 0, true,  false, false,  avr_handle_fndecl_attribute,
9176     false },
9177   { "interrupt", 0, 0, true,  false, false,  avr_handle_fndecl_attribute,
9178     false },
9179   { "naked",     0, 0, false, true,  true,   avr_handle_fntype_attribute,
9180     false },
9181   { "OS_task",   0, 0, false, true,  true,   avr_handle_fntype_attribute,
9182     false },
9183   { "OS_main",   0, 0, false, true,  true,   avr_handle_fntype_attribute,
9184     false },
9185   { "io",        0, 1, true, false, false,  avr_handle_addr_attribute,
9186     false },
9187   { "io_low",    0, 1, true, false, false,  avr_handle_addr_attribute,
9188     false },
9189   { "address",   1, 1, true, false, false,  avr_handle_addr_attribute,
9190     false },
9191   { NULL,        0, 0, false, false, false, NULL, false }
9192 };
9193 
9194 
9195 /* Look if DECL shall be placed in program memory space by
9196    means of attribute `progmem' or some address-space qualifier.
9197    Return non-zero if DECL is data that must end up in Flash and
9198    zero if the data lives in RAM (.bss, .data, .rodata, ...).
9199 
9200    Return 2   if DECL is located in 24-bit flash address-space
9201    Return 1   if DECL is located in 16-bit flash address-space
9202    Return -1  if attribute `progmem' occurs in DECL or ATTRIBUTES
9203    Return 0   otherwise  */
9204 
9205 int
9206 avr_progmem_p (tree decl, tree attributes)
9207 {
9208   tree a;
9209 
9210   if (TREE_CODE (decl) != VAR_DECL)
9211     return 0;
9212 
9213   if (avr_decl_memx_p (decl))
9214     return 2;
9215 
9216   if (avr_decl_flash_p (decl))
9217     return 1;
9218 
9219   if (NULL_TREE
9220       != lookup_attribute ("progmem", attributes))
9221     return -1;
9222 
9223   a = decl;
9224 
9225   do
9226     a = TREE_TYPE(a);
9227   while (TREE_CODE (a) == ARRAY_TYPE);
9228 
9229   if (a == error_mark_node)
9230     return 0;
9231 
9232   if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
9233     return -1;
9234 
9235   return 0;
9236 }
9237 
9238 
9239 /* Scan type TYP for pointer references to address space ASn.
9240    Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
9241    the AS are also declared to be CONST.
9242    Otherwise, return the respective address space, i.e. a value != 0.  */
9243 
9244 static addr_space_t
9245 avr_nonconst_pointer_addrspace (tree typ)
9246 {
9247   while (ARRAY_TYPE == TREE_CODE (typ))
9248     typ = TREE_TYPE (typ);
9249 
9250   if (POINTER_TYPE_P (typ))
9251     {
9252       addr_space_t as;
9253       tree target = TREE_TYPE (typ);
9254 
9255       /* Pointer to function: Test the function's return type.  */
9256 
9257       if (FUNCTION_TYPE == TREE_CODE (target))
9258         return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
9259 
9260       /* "Ordinary" pointers... */
9261 
9262       while (TREE_CODE (target) == ARRAY_TYPE)
9263         target = TREE_TYPE (target);
9264 
9265       /* Pointers to non-generic address space must be const.
9266          Refuse address spaces outside the device's flash.  */
9267 
9268       as = TYPE_ADDR_SPACE (target);
9269 
9270       if (!ADDR_SPACE_GENERIC_P (as)
9271           && (!TYPE_READONLY (target)
9272               || avr_addrspace[as].segment >= avr_n_flash
9273 	      /* Also refuse __memx address space if we can't support it.  */
9274 	      || (!AVR_HAVE_LPM && avr_addrspace[as].pointer_size > 2)))
9275         {
9276           return as;
9277         }
9278 
9279       /* Scan pointer's target type.  */
9280 
9281       return avr_nonconst_pointer_addrspace (target);
9282     }
9283 
9284   return ADDR_SPACE_GENERIC;
9285 }
9286 
9287 
9288 /* Sanity check NODE so that all pointers targeting non-generic address spaces
9289    go along with CONST qualifier.  Writing to these address spaces should
9290    be detected and complained about as early as possible.  */
9291 
9292 static bool
9293 avr_pgm_check_var_decl (tree node)
9294 {
9295   const char *reason = NULL;
9296 
9297   addr_space_t as = ADDR_SPACE_GENERIC;
9298 
9299   gcc_assert (as == 0);
9300 
9301   if (avr_log.progmem)
9302     avr_edump ("%?: %t\n", node);
9303 
9304   switch (TREE_CODE (node))
9305     {
9306     default:
9307       break;
9308 
9309     case VAR_DECL:
9310       if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9311         reason = "variable";
9312       break;
9313 
9314     case PARM_DECL:
9315       if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9316         reason = "function parameter";
9317       break;
9318 
9319     case FIELD_DECL:
9320       if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9321         reason = "structure field";
9322       break;
9323 
9324     case FUNCTION_DECL:
9325       if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
9326           as)
9327         reason = "return type of function";
9328       break;
9329 
9330     case POINTER_TYPE:
9331       if (as = avr_nonconst_pointer_addrspace (node), as)
9332         reason = "pointer";
9333       break;
9334     }
9335 
9336   if (reason)
9337     {
9338       if (avr_addrspace[as].segment >= avr_n_flash)
9339         {
9340           if (TYPE_P (node))
9341             error ("%qT uses address space %qs beyond flash of %d KiB",
9342                    node, avr_addrspace[as].name, 64 * avr_n_flash);
9343           else
9344             error ("%s %q+D uses address space %qs beyond flash of %d KiB",
9345                    reason, node, avr_addrspace[as].name, 64 * avr_n_flash);
9346         }
9347       else
9348         {
9349           if (TYPE_P (node))
9350             error ("pointer targeting address space %qs must be const in %qT",
9351                    avr_addrspace[as].name, node);
9352           else
9353             error ("pointer targeting address space %qs must be const"
9354                    " in %s %q+D",
9355                    avr_addrspace[as].name, reason, node);
9356         }
9357     }
9358 
9359   return reason == NULL;
9360 }
9361 
9362 
9363 /* Add the section attribute if the variable is in progmem.  */
9364 
9365 static void
9366 avr_insert_attributes (tree node, tree *attributes)
9367 {
9368   avr_pgm_check_var_decl (node);
9369 
9370   if (TREE_CODE (node) == VAR_DECL
9371       && (TREE_STATIC (node) || DECL_EXTERNAL (node))
9372       && avr_progmem_p (node, *attributes))
9373     {
9374       addr_space_t as;
9375       tree node0 = node;
9376 
9377       /* For C++, we have to peel arrays in order to get correct
9378          determination of readonlyness.  */
9379 
9380       do
9381         node0 = TREE_TYPE (node0);
9382       while (TREE_CODE (node0) == ARRAY_TYPE);
9383 
9384       if (error_mark_node == node0)
9385         return;
9386 
9387       as = TYPE_ADDR_SPACE (TREE_TYPE (node));
9388 
9389       if (avr_addrspace[as].segment >= avr_n_flash)
9390         {
9391           error ("variable %q+D located in address space %qs beyond flash "
9392                  "of %d KiB", node, avr_addrspace[as].name, 64 * avr_n_flash);
9393         }
9394       else if (!AVR_HAVE_LPM && avr_addrspace[as].pointer_size > 2)
9395 	{
9396           error ("variable %q+D located in address space %qs"
9397                  " which is not supported for architecture %qs",
9398                  node, avr_addrspace[as].name, avr_arch->name);
9399 	}
9400 
9401       if (!TYPE_READONLY (node0)
9402           && !TREE_READONLY (node))
9403         {
9404           const char *reason = "__attribute__((progmem))";
9405 
9406           if (!ADDR_SPACE_GENERIC_P (as))
9407             reason = avr_addrspace[as].name;
9408 
9409           if (avr_log.progmem)
9410             avr_edump ("\n%?: %t\n%t\n", node, node0);
9411 
9412           error ("variable %q+D must be const in order to be put into"
9413                  " read-only section by means of %qs", node, reason);
9414         }
9415     }
9416 }
9417 
9418 
9419 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'.  */
9420 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'.  */
9421 /* Track need of __do_clear_bss.  */
9422 
9423 void
9424 avr_asm_output_aligned_decl_common (FILE * stream,
9425                                     tree decl,
9426                                     const char *name,
9427                                     unsigned HOST_WIDE_INT size,
9428                                     unsigned int align, bool local_p)
9429 {
9430   rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
9431   rtx symbol;
9432 
9433   if (mem != NULL_RTX && MEM_P (mem)
9434       && GET_CODE ((symbol = XEXP (mem, 0))) == SYMBOL_REF
9435       && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
9436     {
9437 
9438       if (!local_p)
9439 	{
9440 	  fprintf (stream, "\t.globl\t");
9441 	  assemble_name (stream, name);
9442 	  fprintf (stream, "\n");
9443 	}
9444       if (SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS)
9445 	{
9446 	  assemble_name (stream, name);
9447 	  fprintf (stream, " = %ld\n",
9448 		   (long) INTVAL (avr_eval_addr_attrib (symbol)));
9449 	}
9450       else if (local_p)
9451 	error_at (DECL_SOURCE_LOCATION (decl),
9452 		  "static IO declaration for %q+D needs an address", decl);
9453       return;
9454     }
9455 
9456   /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
9457      There is no need to trigger __do_clear_bss code for them.  */
9458 
9459   if (!STR_PREFIX_P (name, "__gnu_lto"))
9460     avr_need_clear_bss_p = true;
9461 
9462   if (local_p)
9463     ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
9464   else
9465     ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
9466 }
9467 
9468 void
9469 avr_asm_asm_output_aligned_bss (FILE *file, tree decl, const char *name,
9470 				unsigned HOST_WIDE_INT size, int align,
9471 				void (*default_func)
9472 				  (FILE *, tree, const char *,
9473 				   unsigned HOST_WIDE_INT, int))
9474 {
9475   rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
9476   rtx symbol;
9477 
9478   if (mem != NULL_RTX && MEM_P (mem)
9479       && GET_CODE ((symbol = XEXP (mem, 0))) == SYMBOL_REF
9480       && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
9481     {
9482       if (!(SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS))
9483 	error_at (DECL_SOURCE_LOCATION (decl),
9484 		  "IO definition for %q+D needs an address", decl);
9485       avr_asm_output_aligned_decl_common (file, decl, name, size, align, false);
9486     }
9487   else
9488     default_func (file, decl, name, size, align);
9489 }
9490 
9491 
9492 /* Unnamed section callback for data_section
9493    to track need of __do_copy_data.  */
9494 
9495 static void
9496 avr_output_data_section_asm_op (const void *data)
9497 {
9498   avr_need_copy_data_p = true;
9499 
9500   /* Dispatch to default.  */
9501   output_section_asm_op (data);
9502 }
9503 
9504 
9505 /* Unnamed section callback for bss_section
9506    to track need of __do_clear_bss.  */
9507 
9508 static void
9509 avr_output_bss_section_asm_op (const void *data)
9510 {
9511   avr_need_clear_bss_p = true;
9512 
9513   /* Dispatch to default.  */
9514   output_section_asm_op (data);
9515 }
9516 
9517 
9518 /* Unnamed section callback for progmem*.data sections.  */
9519 
9520 static void
9521 avr_output_progmem_section_asm_op (const void *data)
9522 {
9523   fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
9524            (const char*) data);
9525 }
9526 
9527 
9528 /* Implement `TARGET_ASM_INIT_SECTIONS'.  */
9529 
9530 static void
9531 avr_asm_init_sections (void)
9532 {
9533   /* Set up a section for jump tables.  Alignment is handled by
9534      ASM_OUTPUT_BEFORE_CASE_LABEL.  */
9535 
9536   if (AVR_HAVE_JMP_CALL)
9537     {
9538       progmem_swtable_section
9539         = get_unnamed_section (0, output_section_asm_op,
9540                                "\t.section\t.progmem.gcc_sw_table"
9541                                ",\"a\",@progbits");
9542     }
9543   else
9544     {
9545       progmem_swtable_section
9546         = get_unnamed_section (SECTION_CODE, output_section_asm_op,
9547                                "\t.section\t.progmem.gcc_sw_table"
9548                                ",\"ax\",@progbits");
9549     }
9550 
9551   /* Override section callbacks to keep track of `avr_need_clear_bss_p'
9552      resp. `avr_need_copy_data_p'.  */
9553 
9554   readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
9555   data_section->unnamed.callback = avr_output_data_section_asm_op;
9556   bss_section->unnamed.callback = avr_output_bss_section_asm_op;
9557 }
9558 
9559 
9560 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'.  */
9561 
9562 static section*
9563 avr_asm_function_rodata_section (tree decl)
9564 {
9565   /* If a function is unused and optimized out by -ffunction-sections
9566      and --gc-sections, ensure that the same will happen for its jump
9567      tables by putting them into individual sections.  */
9568 
9569   unsigned int flags;
9570   section * frodata;
9571 
9572   /* Get the frodata section from the default function in varasm.c
9573      but treat function-associated data-like jump tables as code
9574      rather than as user defined data.  AVR has no constant pools.  */
9575   {
9576     int fdata = flag_data_sections;
9577 
9578     flag_data_sections = flag_function_sections;
9579     frodata = default_function_rodata_section (decl);
9580     flag_data_sections = fdata;
9581     flags = frodata->common.flags;
9582   }
9583 
9584   if (frodata != readonly_data_section
9585       && flags & SECTION_NAMED)
9586     {
9587       /* Adjust section flags and replace section name prefix.  */
9588 
9589       unsigned int i;
9590 
9591       static const char* const prefix[] =
9592         {
9593           ".rodata",          ".progmem.gcc_sw_table",
9594           ".gnu.linkonce.r.", ".gnu.linkonce.t."
9595         };
9596 
9597       for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
9598         {
9599           const char * old_prefix = prefix[i];
9600           const char * new_prefix = prefix[i+1];
9601           const char * name = frodata->named.name;
9602 
9603           if (STR_PREFIX_P (name, old_prefix))
9604             {
9605               const char *rname = ACONCAT ((new_prefix,
9606                                             name + strlen (old_prefix), NULL));
9607               flags &= ~SECTION_CODE;
9608               flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
9609 
9610               return get_section (rname, flags, frodata->named.decl);
9611             }
9612         }
9613     }
9614 
9615   return progmem_swtable_section;
9616 }
9617 
9618 
9619 /* Implement `TARGET_ASM_NAMED_SECTION'.  */
9620 /* Track need of __do_clear_bss, __do_copy_data for named sections.  */
9621 
9622 static void
9623 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
9624 {
9625   if (flags & AVR_SECTION_PROGMEM)
9626     {
9627       addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
9628       const char *old_prefix = ".rodata";
9629       const char *new_prefix = avr_addrspace[as].section_name;
9630 
9631       if (STR_PREFIX_P (name, old_prefix))
9632         {
9633           const char *sname = ACONCAT ((new_prefix,
9634                                         name + strlen (old_prefix), NULL));
9635           default_elf_asm_named_section (sname, flags, decl);
9636           return;
9637         }
9638 
9639       default_elf_asm_named_section (new_prefix, flags, decl);
9640       return;
9641     }
9642 
9643   if (!avr_need_copy_data_p)
9644     avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
9645                             || STR_PREFIX_P (name, ".rodata")
9646                             || STR_PREFIX_P (name, ".gnu.linkonce.d"));
9647 
9648   if (!avr_need_clear_bss_p)
9649     avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
9650 
9651   default_elf_asm_named_section (name, flags, decl);
9652 }
9653 
9654 
9655 /* Implement `TARGET_SECTION_TYPE_FLAGS'.  */
9656 
9657 static unsigned int
9658 avr_section_type_flags (tree decl, const char *name, int reloc)
9659 {
9660   unsigned int flags = default_section_type_flags (decl, name, reloc);
9661 
9662   if (STR_PREFIX_P (name, ".noinit"))
9663     {
9664       if (decl && TREE_CODE (decl) == VAR_DECL
9665 	  && DECL_INITIAL (decl) == NULL_TREE)
9666 	flags |= SECTION_BSS;  /* @nobits */
9667       else
9668 	warning (0, "only uninitialized variables can be placed in the "
9669 		 ".noinit section");
9670     }
9671 
9672   if (decl && DECL_P (decl)
9673       && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9674     {
9675       addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
9676 
9677       /* Attribute progmem puts data in generic address space.
9678          Set section flags as if it was in __flash to get the right
9679          section prefix in the remainder.  */
9680 
9681       if (ADDR_SPACE_GENERIC_P (as))
9682         as = ADDR_SPACE_FLASH;
9683 
9684       flags |= as * SECTION_MACH_DEP;
9685       flags &= ~SECTION_WRITE;
9686       flags &= ~SECTION_BSS;
9687     }
9688 
9689   return flags;
9690 }
9691 
9692 
9693 /* Implement `TARGET_ENCODE_SECTION_INFO'.  */
9694 
9695 static void
9696 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
9697 {
9698   /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
9699      readily available, see PR34734.  So we postpone the warning
9700      about uninitialized data in program memory section until here.  */
9701 
9702   if (new_decl_p
9703       && decl && DECL_P (decl)
9704       && !DECL_EXTERNAL (decl)
9705       && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9706     {
9707       if (!TREE_READONLY (decl))
9708         {
9709           // This might happen with C++ if stuff needs constructing.
9710           error ("variable %q+D with dynamic initialization put "
9711                  "into program memory area", decl);
9712         }
9713       else if (NULL_TREE == DECL_INITIAL (decl))
9714         {
9715           // Don't warn for (implicit) aliases like in PR80462.
9716           tree asmname = DECL_ASSEMBLER_NAME (decl);
9717           varpool_node *node = varpool_node::get_for_asmname (asmname);
9718           bool alias_p = node && node->alias;
9719 
9720           if (!alias_p)
9721             warning (OPT_Wuninitialized, "uninitialized variable %q+D put "
9722                      "into program memory area", decl);
9723         }
9724     }
9725 
9726   default_encode_section_info (decl, rtl, new_decl_p);
9727 
9728   if (decl && DECL_P (decl)
9729       && TREE_CODE (decl) != FUNCTION_DECL
9730       && MEM_P (rtl)
9731       && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
9732    {
9733       rtx sym = XEXP (rtl, 0);
9734       tree type = TREE_TYPE (decl);
9735       tree attr = DECL_ATTRIBUTES (decl);
9736       if (type == error_mark_node)
9737 	return;
9738 
9739       addr_space_t as = TYPE_ADDR_SPACE (type);
9740 
9741       /* PSTR strings are in generic space but located in flash:
9742          patch address space.  */
9743 
9744       if (-1 == avr_progmem_p (decl, attr))
9745         as = ADDR_SPACE_FLASH;
9746 
9747       AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
9748 
9749       tree io_low_attr = lookup_attribute ("io_low", attr);
9750       tree io_attr = lookup_attribute ("io", attr);
9751       tree addr_attr;
9752       if (io_low_attr
9753 	  && TREE_VALUE (io_low_attr) && TREE_VALUE (TREE_VALUE (io_low_attr)))
9754 	addr_attr = io_attr;
9755       else if (io_attr
9756 	       && TREE_VALUE (io_attr) && TREE_VALUE (TREE_VALUE (io_attr)))
9757 	addr_attr = io_attr;
9758       else
9759 	addr_attr = lookup_attribute ("address", attr);
9760       if (io_low_attr
9761 	  || (io_attr && addr_attr
9762               && low_io_address_operand
9763                   (GEN_INT (TREE_INT_CST_LOW
9764                             (TREE_VALUE (TREE_VALUE (addr_attr)))), QImode)))
9765 	SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO_LOW;
9766       if (io_attr || io_low_attr)
9767 	SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO;
9768       /* If we have an (io) address attribute specification, but the variable
9769 	 is external, treat the address as only a tentative definition
9770 	 to be used to determine if an io port is in the lower range, but
9771 	 don't use the exact value for constant propagation.  */
9772       if (addr_attr && !DECL_EXTERNAL (decl))
9773 	SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_ADDRESS;
9774     }
9775 }
9776 
9777 
9778 /* Implement `TARGET_ASM_SELECT_SECTION' */
9779 
9780 static section *
9781 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
9782 {
9783   section * sect = default_elf_select_section (decl, reloc, align);
9784 
9785   if (decl && DECL_P (decl)
9786       && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
9787     {
9788       addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
9789 
9790       /* __progmem__ goes in generic space but shall be allocated to
9791          .progmem.data  */
9792 
9793       if (ADDR_SPACE_GENERIC_P (as))
9794         as = ADDR_SPACE_FLASH;
9795 
9796       if (sect->common.flags & SECTION_NAMED)
9797         {
9798           const char * name = sect->named.name;
9799           const char * old_prefix = ".rodata";
9800           const char * new_prefix = avr_addrspace[as].section_name;
9801 
9802           if (STR_PREFIX_P (name, old_prefix))
9803             {
9804               const char *sname = ACONCAT ((new_prefix,
9805                                             name + strlen (old_prefix), NULL));
9806               return get_section (sname, sect->common.flags, sect->named.decl);
9807             }
9808         }
9809 
9810       if (!progmem_section[as])
9811         {
9812           progmem_section[as]
9813             = get_unnamed_section (0, avr_output_progmem_section_asm_op,
9814                                    avr_addrspace[as].section_name);
9815         }
9816 
9817       return progmem_section[as];
9818     }
9819 
9820   return sect;
9821 }
9822 
9823 /* Implement `TARGET_ASM_FILE_START'.  */
9824 /* Outputs some text at the start of each assembler file.  */
9825 
9826 static void
9827 avr_file_start (void)
9828 {
9829   int sfr_offset = avr_arch->sfr_offset;
9830 
9831   if (avr_arch->asm_only)
9832     error ("architecture %qs supported for assembler only", avr_mmcu);
9833 
9834   default_file_start ();
9835 
9836   /* Print I/O addresses of some SFRs used with IN and OUT.  */
9837 
9838   if (AVR_HAVE_SPH)
9839     fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
9840 
9841   fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
9842   fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
9843   if (AVR_HAVE_RAMPZ)
9844     fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
9845   if (AVR_HAVE_RAMPY)
9846     fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
9847   if (AVR_HAVE_RAMPX)
9848     fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
9849   if (AVR_HAVE_RAMPD)
9850     fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
9851   if (AVR_XMEGA || AVR_TINY)
9852     fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
9853   fprintf (asm_out_file, "__tmp_reg__ = %d\n", AVR_TMP_REGNO);
9854   fprintf (asm_out_file, "__zero_reg__ = %d\n", AVR_ZERO_REGNO);
9855 }
9856 
9857 
9858 /* Implement `TARGET_ASM_FILE_END'.  */
9859 /* Outputs to the stdio stream FILE some
9860    appropriate text to go at the end of an assembler file.  */
9861 
9862 static void
9863 avr_file_end (void)
9864 {
9865   /* Output these only if there is anything in the
9866      .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
9867      input section(s) - some code size can be saved by not
9868      linking in the initialization code from libgcc if resp.
9869      sections are empty, see PR18145.  */
9870 
9871   if (avr_need_copy_data_p)
9872     fputs (".global __do_copy_data\n", asm_out_file);
9873 
9874   if (avr_need_clear_bss_p)
9875     fputs (".global __do_clear_bss\n", asm_out_file);
9876 }
9877 
9878 
9879 /* Worker function for `ADJUST_REG_ALLOC_ORDER'.  */
9880 /* Choose the order in which to allocate hard registers for
9881    pseudo-registers local to a basic block.
9882 
9883    Store the desired register order in the array `reg_alloc_order'.
9884    Element 0 should be the register to allocate first; element 1, the
9885    next register; and so on.  */
9886 
9887 void
9888 avr_adjust_reg_alloc_order (void)
9889 {
9890   unsigned int i;
9891   static const int order_0[] =
9892     {
9893       24, 25,
9894       18, 19, 20, 21, 22, 23,
9895       30, 31,
9896       26, 27, 28, 29,
9897       17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9898       0, 1,
9899       32, 33, 34, 35
9900   };
9901   static const int tiny_order_0[] = {
9902     20, 21,
9903     22, 23,
9904     24, 25,
9905     30, 31,
9906     26, 27,
9907     28, 29,
9908     19, 18,
9909     16, 17,
9910     32, 33, 34, 35,
9911     15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9912   };
9913   static const int order_1[] =
9914     {
9915       18, 19, 20, 21, 22, 23, 24, 25,
9916       30, 31,
9917       26, 27, 28, 29,
9918       17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9919       0, 1,
9920       32, 33, 34, 35
9921   };
9922   static const int tiny_order_1[] = {
9923     22, 23,
9924     24, 25,
9925     30, 31,
9926     26, 27,
9927     28, 29,
9928     21, 20, 19, 18,
9929     16, 17,
9930     32, 33, 34, 35,
9931     15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
9932   };
9933   static const int order_2[] =
9934     {
9935       25, 24, 23, 22, 21, 20, 19, 18,
9936       30, 31,
9937       26, 27, 28, 29,
9938       17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
9939       1, 0,
9940       32, 33, 34, 35
9941   };
9942 
9943   /* Select specific register allocation order.
9944      Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
9945      so different allocation order should be used.  */
9946 
9947   const int *order = (TARGET_ORDER_1 ? (AVR_TINY ? tiny_order_1 : order_1)
9948                       : TARGET_ORDER_2 ? (AVR_TINY ? tiny_order_0 : order_2)
9949                       : (AVR_TINY ? tiny_order_0 : order_0));
9950 
9951   for (i = 0; i < ARRAY_SIZE (order_0); ++i)
9952       reg_alloc_order[i] = order[i];
9953 }
9954 
9955 
9956 /* Implement `TARGET_REGISTER_MOVE_COST' */
9957 
9958 static int
9959 avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
9960                         reg_class_t from, reg_class_t to)
9961 {
9962   return (from == STACK_REG ? 6
9963           : to == STACK_REG ? 12
9964           : 2);
9965 }
9966 
9967 
9968 /* Implement `TARGET_MEMORY_MOVE_COST' */
9969 
9970 static int
9971 avr_memory_move_cost (machine_mode mode,
9972                       reg_class_t rclass ATTRIBUTE_UNUSED,
9973                       bool in ATTRIBUTE_UNUSED)
9974 {
9975   return (mode == QImode ? 2
9976           : mode == HImode ? 4
9977           : mode == SImode ? 8
9978           : mode == SFmode ? 8
9979           : 16);
9980 }
9981 
9982 
9983 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
9984    cost of an RTX operand given its context.  X is the rtx of the
9985    operand, MODE is its mode, and OUTER is the rtx_code of this
9986    operand's parent operator.  */
9987 
9988 static int
9989 avr_operand_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer,
9990 		      int opno, bool speed)
9991 {
9992   enum rtx_code code = GET_CODE (x);
9993   int total;
9994 
9995   switch (code)
9996     {
9997     case REG:
9998     case SUBREG:
9999       return 0;
10000 
10001     case CONST_INT:
10002     case CONST_FIXED:
10003     case CONST_DOUBLE:
10004       return COSTS_N_INSNS (GET_MODE_SIZE (mode));
10005 
10006     default:
10007       break;
10008     }
10009 
10010   total = 0;
10011   avr_rtx_costs (x, code, outer, opno, &total, speed);
10012   return total;
10013 }
10014 
10015 /* Worker function for AVR backend's rtx_cost function.
10016    X is rtx expression whose cost is to be calculated.
10017    Return true if the complete cost has been computed.
10018    Return false if subexpressions should be scanned.
10019    In either case, *TOTAL contains the cost result.  */
10020 
10021 static bool
10022 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
10023                  int opno ATTRIBUTE_UNUSED, int *total, bool speed)
10024 {
10025   enum rtx_code code = (enum rtx_code) codearg;
10026   machine_mode mode = GET_MODE (x);
10027   HOST_WIDE_INT val;
10028 
10029   switch (code)
10030     {
10031     case CONST_INT:
10032     case CONST_FIXED:
10033     case CONST_DOUBLE:
10034     case SYMBOL_REF:
10035     case CONST:
10036     case LABEL_REF:
10037       /* Immediate constants are as cheap as registers.  */
10038       *total = 0;
10039       return true;
10040 
10041     case MEM:
10042       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10043       return true;
10044 
10045     case NEG:
10046       switch (mode)
10047 	{
10048 	case QImode:
10049 	case SFmode:
10050 	  *total = COSTS_N_INSNS (1);
10051 	  break;
10052 
10053         case HImode:
10054         case PSImode:
10055         case SImode:
10056           *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
10057           break;
10058 
10059 	default:
10060 	  return false;
10061 	}
10062       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10063       return true;
10064 
10065     case ABS:
10066       switch (mode)
10067 	{
10068 	case QImode:
10069 	case SFmode:
10070 	  *total = COSTS_N_INSNS (1);
10071 	  break;
10072 
10073 	default:
10074 	  return false;
10075 	}
10076       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10077       return true;
10078 
10079     case NOT:
10080       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10081       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10082       return true;
10083 
10084     case ZERO_EXTEND:
10085       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
10086 			      - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10087       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10088       return true;
10089 
10090     case SIGN_EXTEND:
10091       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
10092 			      - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10093       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10094       return true;
10095 
10096     case PLUS:
10097       switch (mode)
10098 	{
10099 	case QImode:
10100           if (AVR_HAVE_MUL
10101               && MULT == GET_CODE (XEXP (x, 0))
10102               && register_operand (XEXP (x, 1), QImode))
10103             {
10104               /* multiply-add */
10105               *total = COSTS_N_INSNS (speed ? 4 : 3);
10106               /* multiply-add with constant: will be split and load constant. */
10107               if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10108                 *total = COSTS_N_INSNS (1) + *total;
10109               return true;
10110             }
10111 	  *total = COSTS_N_INSNS (1);
10112 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10113 	    *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10114 	  break;
10115 
10116 	case HImode:
10117           if (AVR_HAVE_MUL
10118               && (MULT == GET_CODE (XEXP (x, 0))
10119                   || ASHIFT == GET_CODE (XEXP (x, 0)))
10120               && register_operand (XEXP (x, 1), HImode)
10121               && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
10122                   || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
10123             {
10124               /* multiply-add */
10125               *total = COSTS_N_INSNS (speed ? 5 : 4);
10126               /* multiply-add with constant: will be split and load constant. */
10127               if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10128                 *total = COSTS_N_INSNS (1) + *total;
10129               return true;
10130             }
10131 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10132 	    {
10133 	      *total = COSTS_N_INSNS (2);
10134 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10135 					      speed);
10136 	    }
10137 	  else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10138 	    *total = COSTS_N_INSNS (1);
10139 	  else
10140 	    *total = COSTS_N_INSNS (2);
10141 	  break;
10142 
10143         case PSImode:
10144           if (!CONST_INT_P (XEXP (x, 1)))
10145             {
10146               *total = COSTS_N_INSNS (3);
10147               *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10148                                               speed);
10149             }
10150           else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10151             *total = COSTS_N_INSNS (2);
10152           else
10153             *total = COSTS_N_INSNS (3);
10154           break;
10155 
10156 	case SImode:
10157 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10158 	    {
10159 	      *total = COSTS_N_INSNS (4);
10160 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10161 					      speed);
10162 	    }
10163 	  else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
10164 	    *total = COSTS_N_INSNS (1);
10165 	  else
10166 	    *total = COSTS_N_INSNS (4);
10167 	  break;
10168 
10169 	default:
10170 	  return false;
10171 	}
10172       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10173       return true;
10174 
10175     case MINUS:
10176       if (AVR_HAVE_MUL
10177           && QImode == mode
10178           && register_operand (XEXP (x, 0), QImode)
10179           && MULT == GET_CODE (XEXP (x, 1)))
10180         {
10181           /* multiply-sub */
10182           *total = COSTS_N_INSNS (speed ? 4 : 3);
10183           /* multiply-sub with constant: will be split and load constant. */
10184           if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10185             *total = COSTS_N_INSNS (1) + *total;
10186           return true;
10187         }
10188       if (AVR_HAVE_MUL
10189           && HImode == mode
10190           && register_operand (XEXP (x, 0), HImode)
10191           && (MULT == GET_CODE (XEXP (x, 1))
10192               || ASHIFT == GET_CODE (XEXP (x, 1)))
10193           && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
10194               || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
10195         {
10196           /* multiply-sub */
10197           *total = COSTS_N_INSNS (speed ? 5 : 4);
10198           /* multiply-sub with constant: will be split and load constant. */
10199           if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10200             *total = COSTS_N_INSNS (1) + *total;
10201           return true;
10202         }
10203       /* FALLTHRU */
10204     case AND:
10205     case IOR:
10206       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10207       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10208       if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10209 	*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10210       return true;
10211 
10212     case XOR:
10213       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10214       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10215       *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10216       return true;
10217 
10218     case MULT:
10219       switch (mode)
10220 	{
10221 	case QImode:
10222 	  if (AVR_HAVE_MUL)
10223 	    *total = COSTS_N_INSNS (!speed ? 3 : 4);
10224 	  else if (!speed)
10225 	    *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10226 	  else
10227 	    return false;
10228 	  break;
10229 
10230 	case HImode:
10231 	  if (AVR_HAVE_MUL)
10232             {
10233               rtx op0 = XEXP (x, 0);
10234               rtx op1 = XEXP (x, 1);
10235               enum rtx_code code0 = GET_CODE (op0);
10236               enum rtx_code code1 = GET_CODE (op1);
10237               bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
10238               bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
10239 
10240               if (ex0
10241                   && (u8_operand (op1, HImode)
10242                       || s8_operand (op1, HImode)))
10243                 {
10244                   *total = COSTS_N_INSNS (!speed ? 4 : 6);
10245                   return true;
10246                 }
10247               if (ex0
10248                   && register_operand (op1, HImode))
10249                 {
10250                   *total = COSTS_N_INSNS (!speed ? 5 : 8);
10251                   return true;
10252                 }
10253               else if (ex0 || ex1)
10254                 {
10255                   *total = COSTS_N_INSNS (!speed ? 3 : 5);
10256                   return true;
10257                 }
10258               else if (register_operand (op0, HImode)
10259                        && (u8_operand (op1, HImode)
10260                            || s8_operand (op1, HImode)))
10261                 {
10262                   *total = COSTS_N_INSNS (!speed ? 6 : 9);
10263                   return true;
10264                 }
10265               else
10266                 *total = COSTS_N_INSNS (!speed ? 7 : 10);
10267             }
10268 	  else if (!speed)
10269 	    *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10270 	  else
10271 	    return false;
10272 	  break;
10273 
10274         case PSImode:
10275           if (!speed)
10276             *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10277           else
10278             *total = 10;
10279           break;
10280 
10281 	case SImode:
10282 	case DImode:
10283 	  if (AVR_HAVE_MUL)
10284             {
10285               if (!speed)
10286                 {
10287                   /* Add some additional costs besides CALL like moves etc.  */
10288 
10289                   *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
10290                 }
10291               else
10292                 {
10293                   /* Just a rough estimate.  Even with -O2 we don't want bulky
10294                      code expanded inline.  */
10295 
10296                   *total = COSTS_N_INSNS (25);
10297                 }
10298             }
10299           else
10300             {
10301               if (speed)
10302                 *total = COSTS_N_INSNS (300);
10303               else
10304                 /* Add some additional costs besides CALL like moves etc.  */
10305                 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
10306             }
10307 
10308 	   if (mode == DImode)
10309 	     *total *= 2;
10310 
10311 	   return true;
10312 
10313 	default:
10314 	  return false;
10315 	}
10316       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10317       *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10318       return true;
10319 
10320     case DIV:
10321     case MOD:
10322     case UDIV:
10323     case UMOD:
10324       if (!speed)
10325         *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10326       else
10327         *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
10328       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10329       /* For div/mod with const-int divisor we have at least the cost of
10330          loading the divisor. */
10331       if (CONST_INT_P (XEXP (x, 1)))
10332         *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
10333       /* Add some overall penaly for clobbering and moving around registers */
10334       *total += COSTS_N_INSNS (2);
10335       return true;
10336 
10337     case ROTATE:
10338       switch (mode)
10339 	{
10340 	case QImode:
10341 	  if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
10342 	    *total = COSTS_N_INSNS (1);
10343 
10344 	  break;
10345 
10346 	case HImode:
10347 	  if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
10348 	    *total = COSTS_N_INSNS (3);
10349 
10350 	  break;
10351 
10352 	case SImode:
10353 	  if (CONST_INT_P (XEXP (x, 1)))
10354 	    switch (INTVAL (XEXP (x, 1)))
10355 	      {
10356 	      case 8:
10357 	      case 24:
10358 		*total = COSTS_N_INSNS (5);
10359 		break;
10360 	      case 16:
10361 		*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
10362 		break;
10363 	      }
10364 	  break;
10365 
10366 	default:
10367 	  return false;
10368 	}
10369       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10370       return true;
10371 
10372     case ASHIFT:
10373       switch (mode)
10374 	{
10375 	case QImode:
10376 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10377 	    {
10378 	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
10379 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10380 					      speed);
10381 	    }
10382 	  else
10383 	    {
10384 	      val = INTVAL (XEXP (x, 1));
10385 	      if (val == 7)
10386 		*total = COSTS_N_INSNS (3);
10387 	      else if (val >= 0 && val <= 7)
10388 		*total = COSTS_N_INSNS (val);
10389 	      else
10390 		*total = COSTS_N_INSNS (1);
10391 	    }
10392 	  break;
10393 
10394 	case HImode:
10395           if (AVR_HAVE_MUL)
10396             {
10397               if (const_2_to_7_operand (XEXP (x, 1), HImode)
10398                   && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
10399                       || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
10400                 {
10401                   *total = COSTS_N_INSNS (!speed ? 4 : 6);
10402                   return true;
10403                 }
10404             }
10405 
10406           if (const1_rtx == (XEXP (x, 1))
10407               && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
10408             {
10409               *total = COSTS_N_INSNS (2);
10410               return true;
10411             }
10412 
10413 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10414 	    {
10415 	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
10416 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10417 					      speed);
10418 	    }
10419 	  else
10420 	    switch (INTVAL (XEXP (x, 1)))
10421 	      {
10422 	      case 0:
10423 		*total = 0;
10424 		break;
10425 	      case 1:
10426 	      case 8:
10427 		*total = COSTS_N_INSNS (2);
10428 		break;
10429 	      case 9:
10430 		*total = COSTS_N_INSNS (3);
10431 		break;
10432 	      case 2:
10433 	      case 3:
10434 	      case 10:
10435 	      case 15:
10436 		*total = COSTS_N_INSNS (4);
10437 		break;
10438 	      case 7:
10439 	      case 11:
10440 	      case 12:
10441 		*total = COSTS_N_INSNS (5);
10442 		break;
10443 	      case 4:
10444 		*total = COSTS_N_INSNS (!speed ? 5 : 8);
10445 		break;
10446 	      case 6:
10447 		*total = COSTS_N_INSNS (!speed ? 5 : 9);
10448 		break;
10449 	      case 5:
10450 		*total = COSTS_N_INSNS (!speed ? 5 : 10);
10451 		break;
10452 	      default:
10453 	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
10454 	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10455 						speed);
10456 	      }
10457 	  break;
10458 
10459         case PSImode:
10460           if (!CONST_INT_P (XEXP (x, 1)))
10461             {
10462               *total = COSTS_N_INSNS (!speed ? 6 : 73);
10463             }
10464           else
10465             switch (INTVAL (XEXP (x, 1)))
10466               {
10467               case 0:
10468                 *total = 0;
10469                 break;
10470               case 1:
10471               case 8:
10472               case 16:
10473                 *total = COSTS_N_INSNS (3);
10474                 break;
10475               case 23:
10476                 *total = COSTS_N_INSNS (5);
10477                 break;
10478               default:
10479                 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10480                 break;
10481               }
10482           break;
10483 
10484 	case SImode:
10485 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10486 	    {
10487 	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
10488 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10489 					      speed);
10490 	    }
10491 	  else
10492 	    switch (INTVAL (XEXP (x, 1)))
10493 	      {
10494 	      case 0:
10495 		*total = 0;
10496 		break;
10497 	      case 24:
10498 		*total = COSTS_N_INSNS (3);
10499 		break;
10500 	      case 1:
10501 	      case 8:
10502 	      case 16:
10503 		*total = COSTS_N_INSNS (4);
10504 		break;
10505 	      case 31:
10506 		*total = COSTS_N_INSNS (6);
10507 		break;
10508 	      case 2:
10509 		*total = COSTS_N_INSNS (!speed ? 7 : 8);
10510 		break;
10511 	      default:
10512 		*total = COSTS_N_INSNS (!speed ? 7 : 113);
10513 		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10514 						speed);
10515 	      }
10516 	  break;
10517 
10518 	default:
10519 	  return false;
10520 	}
10521       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10522       return true;
10523 
10524     case ASHIFTRT:
10525       switch (mode)
10526 	{
10527 	case QImode:
10528 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10529 	    {
10530 	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
10531 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10532 					      speed);
10533 	    }
10534 	  else
10535 	    {
10536 	      val = INTVAL (XEXP (x, 1));
10537 	      if (val == 6)
10538 		*total = COSTS_N_INSNS (4);
10539 	      else if (val == 7)
10540 		*total = COSTS_N_INSNS (2);
10541 	      else if (val >= 0 && val <= 7)
10542 		*total = COSTS_N_INSNS (val);
10543 	      else
10544 		*total = COSTS_N_INSNS (1);
10545 	    }
10546 	  break;
10547 
10548 	case HImode:
10549 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10550 	    {
10551 	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
10552 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10553 					      speed);
10554 	    }
10555 	  else
10556 	    switch (INTVAL (XEXP (x, 1)))
10557 	      {
10558 	      case 0:
10559 		*total = 0;
10560 		break;
10561 	      case 1:
10562 		*total = COSTS_N_INSNS (2);
10563 		break;
10564 	      case 15:
10565 		*total = COSTS_N_INSNS (3);
10566 		break;
10567 	      case 2:
10568 	      case 7:
10569               case 8:
10570               case 9:
10571 		*total = COSTS_N_INSNS (4);
10572 		break;
10573               case 10:
10574 	      case 14:
10575 		*total = COSTS_N_INSNS (5);
10576 		break;
10577               case 11:
10578                 *total = COSTS_N_INSNS (!speed ? 5 : 6);
10579 		break;
10580               case 12:
10581                 *total = COSTS_N_INSNS (!speed ? 5 : 7);
10582 		break;
10583               case 6:
10584 	      case 13:
10585                 *total = COSTS_N_INSNS (!speed ? 5 : 8);
10586 		break;
10587 	      default:
10588 	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
10589 	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10590 						speed);
10591 	      }
10592 	  break;
10593 
10594         case PSImode:
10595           if (!CONST_INT_P (XEXP (x, 1)))
10596             {
10597               *total = COSTS_N_INSNS (!speed ? 6 : 73);
10598             }
10599           else
10600             switch (INTVAL (XEXP (x, 1)))
10601               {
10602               case 0:
10603                 *total = 0;
10604                 break;
10605               case 1:
10606                 *total = COSTS_N_INSNS (3);
10607                 break;
10608               case 16:
10609               case 8:
10610                 *total = COSTS_N_INSNS (5);
10611                 break;
10612               case 23:
10613                 *total = COSTS_N_INSNS (4);
10614                 break;
10615               default:
10616                 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10617                 break;
10618               }
10619           break;
10620 
10621 	case SImode:
10622 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10623 	    {
10624 	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
10625 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10626 					      speed);
10627 	    }
10628 	  else
10629 	    switch (INTVAL (XEXP (x, 1)))
10630 	      {
10631 	      case 0:
10632 		*total = 0;
10633 		break;
10634 	      case 1:
10635 		*total = COSTS_N_INSNS (4);
10636 		break;
10637 	      case 8:
10638 	      case 16:
10639 	      case 24:
10640 		*total = COSTS_N_INSNS (6);
10641 		break;
10642 	      case 2:
10643 		*total = COSTS_N_INSNS (!speed ? 7 : 8);
10644 		break;
10645 	      case 31:
10646 		*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
10647 		break;
10648 	      default:
10649 		*total = COSTS_N_INSNS (!speed ? 7 : 113);
10650 		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10651 						speed);
10652 	      }
10653 	  break;
10654 
10655 	default:
10656 	  return false;
10657 	}
10658       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10659       return true;
10660 
10661     case LSHIFTRT:
10662       switch (mode)
10663 	{
10664 	case QImode:
10665 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10666 	    {
10667 	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
10668 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10669 					      speed);
10670 	    }
10671 	  else
10672 	    {
10673 	      val = INTVAL (XEXP (x, 1));
10674 	      if (val == 7)
10675 		*total = COSTS_N_INSNS (3);
10676 	      else if (val >= 0 && val <= 7)
10677 		*total = COSTS_N_INSNS (val);
10678 	      else
10679 		*total = COSTS_N_INSNS (1);
10680 	    }
10681 	  break;
10682 
10683 	case HImode:
10684 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10685 	    {
10686 	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
10687 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10688 					      speed);
10689 	    }
10690 	  else
10691 	    switch (INTVAL (XEXP (x, 1)))
10692 	      {
10693 	      case 0:
10694 		*total = 0;
10695 		break;
10696 	      case 1:
10697 	      case 8:
10698 		*total = COSTS_N_INSNS (2);
10699 		break;
10700 	      case 9:
10701 		*total = COSTS_N_INSNS (3);
10702 		break;
10703 	      case 2:
10704 	      case 10:
10705 	      case 15:
10706 		*total = COSTS_N_INSNS (4);
10707 		break;
10708 	      case 7:
10709               case 11:
10710 		*total = COSTS_N_INSNS (5);
10711 		break;
10712 	      case 3:
10713 	      case 12:
10714 	      case 13:
10715 	      case 14:
10716 		*total = COSTS_N_INSNS (!speed ? 5 : 6);
10717 		break;
10718 	      case 4:
10719 		*total = COSTS_N_INSNS (!speed ? 5 : 7);
10720 		break;
10721 	      case 5:
10722 	      case 6:
10723 		*total = COSTS_N_INSNS (!speed ? 5 : 9);
10724 		break;
10725 	      default:
10726 	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
10727 	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10728 						speed);
10729 	      }
10730 	  break;
10731 
10732         case PSImode:
10733           if (!CONST_INT_P (XEXP (x, 1)))
10734             {
10735               *total = COSTS_N_INSNS (!speed ? 6 : 73);
10736             }
10737           else
10738             switch (INTVAL (XEXP (x, 1)))
10739               {
10740               case 0:
10741                 *total = 0;
10742                 break;
10743               case 1:
10744               case 8:
10745               case 16:
10746                 *total = COSTS_N_INSNS (3);
10747                 break;
10748               case 23:
10749                 *total = COSTS_N_INSNS (5);
10750                 break;
10751               default:
10752                 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
10753                 break;
10754               }
10755           break;
10756 
10757 	case SImode:
10758 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10759 	    {
10760 	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
10761 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10762 					      speed);
10763 	    }
10764 	  else
10765 	    switch (INTVAL (XEXP (x, 1)))
10766 	      {
10767 	      case 0:
10768 		*total = 0;
10769 		break;
10770 	      case 1:
10771 		*total = COSTS_N_INSNS (4);
10772 		break;
10773 	      case 2:
10774 		*total = COSTS_N_INSNS (!speed ? 7 : 8);
10775 		break;
10776 	      case 8:
10777 	      case 16:
10778 	      case 24:
10779 		*total = COSTS_N_INSNS (4);
10780 		break;
10781 	      case 31:
10782 		*total = COSTS_N_INSNS (6);
10783 		break;
10784 	      default:
10785 		*total = COSTS_N_INSNS (!speed ? 7 : 113);
10786 		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10787 						speed);
10788 	      }
10789 	  break;
10790 
10791 	default:
10792 	  return false;
10793 	}
10794       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10795       return true;
10796 
10797     case COMPARE:
10798       switch (GET_MODE (XEXP (x, 0)))
10799 	{
10800 	case QImode:
10801 	  *total = COSTS_N_INSNS (1);
10802 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10803 	    *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10804 	  break;
10805 
10806         case HImode:
10807 	  *total = COSTS_N_INSNS (2);
10808 	  if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10809             *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10810 	  else if (INTVAL (XEXP (x, 1)) != 0)
10811 	    *total += COSTS_N_INSNS (1);
10812           break;
10813 
10814         case PSImode:
10815           *total = COSTS_N_INSNS (3);
10816           if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
10817             *total += COSTS_N_INSNS (2);
10818           break;
10819 
10820         case SImode:
10821           *total = COSTS_N_INSNS (4);
10822           if (GET_CODE (XEXP (x, 1)) != CONST_INT)
10823             *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10824 	  else if (INTVAL (XEXP (x, 1)) != 0)
10825 	    *total += COSTS_N_INSNS (3);
10826           break;
10827 
10828 	default:
10829 	  return false;
10830 	}
10831       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10832       return true;
10833 
10834     case TRUNCATE:
10835       if (AVR_HAVE_MUL
10836           && LSHIFTRT == GET_CODE (XEXP (x, 0))
10837           && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
10838           && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10839         {
10840           if (QImode == mode || HImode == mode)
10841             {
10842               *total = COSTS_N_INSNS (2);
10843               return true;
10844             }
10845         }
10846       break;
10847 
10848     default:
10849       break;
10850     }
10851   return false;
10852 }
10853 
10854 
10855 /* Implement `TARGET_RTX_COSTS'.  */
10856 
10857 static bool
10858 avr_rtx_costs (rtx x, int codearg, int outer_code,
10859 	       int opno, int *total, bool speed)
10860 {
10861   bool done = avr_rtx_costs_1 (x, codearg, outer_code,
10862                                opno, total, speed);
10863 
10864   if (avr_log.rtx_costs)
10865     {
10866       avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
10867                  done, speed ? "speed" : "size", *total, outer_code, x);
10868     }
10869 
10870   return done;
10871 }
10872 
10873 
10874 /* Implement `TARGET_ADDRESS_COST'.  */
10875 
10876 static int
10877 avr_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
10878                   addr_space_t as ATTRIBUTE_UNUSED,
10879                   bool speed ATTRIBUTE_UNUSED)
10880 {
10881   int cost = 4;
10882 
10883   if (GET_CODE (x) == PLUS
10884       && CONST_INT_P (XEXP (x, 1))
10885       && (REG_P (XEXP (x, 0))
10886           || GET_CODE (XEXP (x, 0)) == SUBREG))
10887     {
10888       if (INTVAL (XEXP (x, 1)) > MAX_LD_OFFSET(mode))
10889         cost = 18;
10890     }
10891   else if (CONSTANT_ADDRESS_P (x))
10892     {
10893       if (io_address_operand (x, QImode))
10894         cost = 2;
10895     }
10896 
10897   if (avr_log.address_cost)
10898     avr_edump ("\n%?: %d = %r\n", cost, x);
10899 
10900   return cost;
10901 }
10902 
10903 /* Test for extra memory constraint 'Q'.
10904    It's a memory address based on Y or Z pointer with valid displacement.  */
10905 
10906 int
10907 extra_constraint_Q (rtx x)
10908 {
10909   int ok = 0;
10910 
10911   if (GET_CODE (XEXP (x,0)) == PLUS
10912       && REG_P (XEXP (XEXP (x,0), 0))
10913       && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
10914       && (INTVAL (XEXP (XEXP (x,0), 1))
10915 	  <= MAX_LD_OFFSET (GET_MODE (x))))
10916     {
10917       rtx xx = XEXP (XEXP (x,0), 0);
10918       int regno = REGNO (xx);
10919 
10920       ok = (/* allocate pseudos */
10921             regno >= FIRST_PSEUDO_REGISTER
10922             /* strictly check */
10923             || regno == REG_Z || regno == REG_Y
10924             /* XXX frame & arg pointer checks */
10925             || xx == frame_pointer_rtx
10926             || xx == arg_pointer_rtx);
10927 
10928       if (avr_log.constraints)
10929         avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
10930                    ok, reload_completed, reload_in_progress, x);
10931     }
10932 
10933   return ok;
10934 }
10935 
10936 /* Convert condition code CONDITION to the valid AVR condition code.  */
10937 
10938 RTX_CODE
10939 avr_normalize_condition (RTX_CODE condition)
10940 {
10941   switch (condition)
10942     {
10943     case GT:
10944       return GE;
10945     case GTU:
10946       return GEU;
10947     case LE:
10948       return LT;
10949     case LEU:
10950       return LTU;
10951     default:
10952       gcc_unreachable ();
10953     }
10954 }
10955 
10956 /* Helper function for `avr_reorg'.  */
10957 
10958 static rtx
10959 avr_compare_pattern (rtx_insn *insn)
10960 {
10961   rtx pattern = single_set (insn);
10962 
10963   if (pattern
10964       && NONJUMP_INSN_P (insn)
10965       && SET_DEST (pattern) == cc0_rtx
10966       && GET_CODE (SET_SRC (pattern)) == COMPARE)
10967     {
10968       machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
10969       machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
10970 
10971       /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
10972          They must not be swapped, thus skip them.  */
10973 
10974       if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
10975           && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
10976         return pattern;
10977     }
10978 
10979   return NULL_RTX;
10980 }
10981 
10982 /* Helper function for `avr_reorg'.  */
10983 
10984 /* Expansion of switch/case decision trees leads to code like
10985 
10986        cc0 = compare (Reg, Num)
10987        if (cc0 == 0)
10988          goto L1
10989 
10990        cc0 = compare (Reg, Num)
10991        if (cc0 > 0)
10992          goto L2
10993 
10994    The second comparison is superfluous and can be deleted.
10995    The second jump condition can be transformed from a
10996    "difficult" one to a "simple" one because "cc0 > 0" and
10997    "cc0 >= 0" will have the same effect here.
10998 
10999    This function relies on the way switch/case is being expaned
11000    as binary decision tree.  For example code see PR 49903.
11001 
11002    Return TRUE if optimization performed.
11003    Return FALSE if nothing changed.
11004 
11005    INSN1 is a comparison, i.e. avr_compare_pattern != 0.
11006 
11007    We don't want to do this in text peephole because it is
11008    tedious to work out jump offsets there and the second comparison
11009    might have been transormed by `avr_reorg'.
11010 
11011    RTL peephole won't do because peephole2 does not scan across
11012    basic blocks.  */
11013 
11014 static bool
11015 avr_reorg_remove_redundant_compare (rtx_insn *insn1)
11016 {
11017   rtx comp1, ifelse1, xcond1;
11018   rtx_insn *branch1;
11019   rtx comp2, ifelse2, xcond2;
11020   rtx_insn *branch2, *insn2;
11021   enum rtx_code code;
11022   rtx_insn *jump;
11023   rtx target, cond;
11024 
11025   /* Look out for:  compare1 - branch1 - compare2 - branch2  */
11026 
11027   branch1 = next_nonnote_nondebug_insn (insn1);
11028   if (!branch1 || !JUMP_P (branch1))
11029     return false;
11030 
11031   insn2 = next_nonnote_nondebug_insn (branch1);
11032   if (!insn2 || !avr_compare_pattern (insn2))
11033     return false;
11034 
11035   branch2 = next_nonnote_nondebug_insn (insn2);
11036   if (!branch2 || !JUMP_P (branch2))
11037     return false;
11038 
11039   comp1 = avr_compare_pattern (insn1);
11040   comp2 = avr_compare_pattern (insn2);
11041   xcond1 = single_set (branch1);
11042   xcond2 = single_set (branch2);
11043 
11044   if (!comp1 || !comp2
11045       || !rtx_equal_p (comp1, comp2)
11046       || !xcond1 || SET_DEST (xcond1) != pc_rtx
11047       || !xcond2 || SET_DEST (xcond2) != pc_rtx
11048       || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
11049       || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
11050     {
11051       return false;
11052     }
11053 
11054   comp1 = SET_SRC (comp1);
11055   ifelse1 = SET_SRC (xcond1);
11056   ifelse2 = SET_SRC (xcond2);
11057 
11058   /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE.  */
11059 
11060   if (EQ != GET_CODE (XEXP (ifelse1, 0))
11061       || !REG_P (XEXP (comp1, 0))
11062       || !CONST_INT_P (XEXP (comp1, 1))
11063       || XEXP (ifelse1, 2) != pc_rtx
11064       || XEXP (ifelse2, 2) != pc_rtx
11065       || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
11066       || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
11067       || !COMPARISON_P (XEXP (ifelse2, 0))
11068       || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
11069       || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
11070       || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
11071       || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
11072     {
11073       return false;
11074     }
11075 
11076   /* We filtered the insn sequence to look like
11077 
11078         (set (cc0)
11079              (compare (reg:M N)
11080                       (const_int VAL)))
11081         (set (pc)
11082              (if_then_else (eq (cc0)
11083                                (const_int 0))
11084                            (label_ref L1)
11085                            (pc)))
11086 
11087         (set (cc0)
11088              (compare (reg:M N)
11089                       (const_int VAL)))
11090         (set (pc)
11091              (if_then_else (CODE (cc0)
11092                                  (const_int 0))
11093                            (label_ref L2)
11094                            (pc)))
11095   */
11096 
11097   code = GET_CODE (XEXP (ifelse2, 0));
11098 
11099   /* Map GT/GTU to GE/GEU which is easier for AVR.
11100      The first two instructions compare/branch on EQ
11101      so we may replace the difficult
11102 
11103         if (x == VAL)   goto L1;
11104         if (x > VAL)    goto L2;
11105 
11106      with easy
11107 
11108          if (x == VAL)   goto L1;
11109          if (x >= VAL)   goto L2;
11110 
11111      Similarly, replace LE/LEU by LT/LTU.  */
11112 
11113   switch (code)
11114     {
11115     case EQ:
11116     case LT:  case LTU:
11117     case GE:  case GEU:
11118       break;
11119 
11120     case LE:  case LEU:
11121     case GT:  case GTU:
11122       code = avr_normalize_condition (code);
11123       break;
11124 
11125     default:
11126       return false;
11127     }
11128 
11129   /* Wrap the branches into UNSPECs so they won't be changed or
11130      optimized in the remainder.  */
11131 
11132   target = XEXP (XEXP (ifelse1, 1), 0);
11133   cond = XEXP (ifelse1, 0);
11134   jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
11135 
11136   JUMP_LABEL (jump) = JUMP_LABEL (branch1);
11137 
11138   target = XEXP (XEXP (ifelse2, 1), 0);
11139   cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11140   jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
11141 
11142   JUMP_LABEL (jump) = JUMP_LABEL (branch2);
11143 
11144   /* The comparisons in insn1 and insn2 are exactly the same;
11145      insn2 is superfluous so delete it.  */
11146 
11147   delete_insn (insn2);
11148   delete_insn (branch1);
11149   delete_insn (branch2);
11150 
11151   return true;
11152 }
11153 
11154 
11155 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'.  */
11156 /* Optimize conditional jumps.  */
11157 
11158 static void
11159 avr_reorg (void)
11160 {
11161   rtx_insn *insn = get_insns();
11162 
11163   for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
11164     {
11165       rtx pattern = avr_compare_pattern (insn);
11166 
11167       if (!pattern)
11168         continue;
11169 
11170       if (optimize
11171           && avr_reorg_remove_redundant_compare (insn))
11172         {
11173           continue;
11174         }
11175 
11176       if (compare_diff_p (insn))
11177 	{
11178           /* Now we work under compare insn with difficult branch.  */
11179 
11180           rtx next = next_real_insn (insn);
11181           rtx pat = PATTERN (next);
11182 
11183           pattern = SET_SRC (pattern);
11184 
11185           if (true_regnum (XEXP (pattern, 0)) >= 0
11186               && true_regnum (XEXP (pattern, 1)) >= 0)
11187             {
11188               rtx x = XEXP (pattern, 0);
11189               rtx src = SET_SRC (pat);
11190               rtx t = XEXP (src,0);
11191               PUT_CODE (t, swap_condition (GET_CODE (t)));
11192               XEXP (pattern, 0) = XEXP (pattern, 1);
11193               XEXP (pattern, 1) = x;
11194               INSN_CODE (next) = -1;
11195             }
11196           else if (true_regnum (XEXP (pattern, 0)) >= 0
11197                    && XEXP (pattern, 1) == const0_rtx)
11198             {
11199               /* This is a tst insn, we can reverse it.  */
11200               rtx src = SET_SRC (pat);
11201               rtx t = XEXP (src,0);
11202 
11203               PUT_CODE (t, swap_condition (GET_CODE (t)));
11204               XEXP (pattern, 1) = XEXP (pattern, 0);
11205               XEXP (pattern, 0) = const0_rtx;
11206               INSN_CODE (next) = -1;
11207               INSN_CODE (insn) = -1;
11208             }
11209           else if (true_regnum (XEXP (pattern, 0)) >= 0
11210                    && CONST_INT_P (XEXP (pattern, 1)))
11211             {
11212               rtx x = XEXP (pattern, 1);
11213               rtx src = SET_SRC (pat);
11214               rtx t = XEXP (src,0);
11215               machine_mode mode = GET_MODE (XEXP (pattern, 0));
11216 
11217               if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
11218                 {
11219                   XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
11220                   PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
11221                   INSN_CODE (next) = -1;
11222                   INSN_CODE (insn) = -1;
11223                 }
11224             }
11225         }
11226     }
11227 }
11228 
11229 /* Returns register number for function return value.*/
11230 
11231 static inline unsigned int
11232 avr_ret_register (void)
11233 {
11234   return 24;
11235 }
11236 
11237 
11238 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'.  */
11239 
11240 static bool
11241 avr_function_value_regno_p (const unsigned int regno)
11242 {
11243   return (regno == avr_ret_register ());
11244 }
11245 
11246 
11247 /* Implement `TARGET_LIBCALL_VALUE'.  */
11248 /* Create an RTX representing the place where a
11249    library function returns a value of mode MODE.  */
11250 
11251 static rtx
11252 avr_libcall_value (machine_mode mode,
11253 		   const_rtx func ATTRIBUTE_UNUSED)
11254 {
11255   int offs = GET_MODE_SIZE (mode);
11256 
11257   if (offs <= 4)
11258     offs = (offs + 1) & ~1;
11259 
11260   return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
11261 }
11262 
11263 
11264 /* Implement `TARGET_FUNCTION_VALUE'.  */
11265 /* Create an RTX representing the place where a
11266    function returns a value of data type VALTYPE.  */
11267 
11268 static rtx
11269 avr_function_value (const_tree type,
11270                     const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
11271                     bool outgoing ATTRIBUTE_UNUSED)
11272 {
11273   unsigned int offs;
11274 
11275   if (TYPE_MODE (type) != BLKmode)
11276     return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
11277 
11278   offs = int_size_in_bytes (type);
11279   if (offs < 2)
11280     offs = 2;
11281   if (offs > 2 && offs < GET_MODE_SIZE (SImode))
11282     offs = GET_MODE_SIZE (SImode);
11283   else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
11284     offs = GET_MODE_SIZE (DImode);
11285 
11286   return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
11287 }
11288 
11289 int
11290 test_hard_reg_class (enum reg_class rclass, rtx x)
11291 {
11292   int regno = true_regnum (x);
11293   if (regno < 0)
11294     return 0;
11295 
11296   if (TEST_HARD_REG_CLASS (rclass, regno))
11297     return 1;
11298 
11299   return 0;
11300 }
11301 
11302 
11303 /* Helper for jump_over_one_insn_p:  Test if INSN is a 2-word instruction
11304    and thus is suitable to be skipped by CPSE, SBRC, etc.  */
11305 
11306 static bool
11307 avr_2word_insn_p (rtx_insn *insn)
11308 {
11309   if (TARGET_SKIP_BUG
11310       || !insn
11311       || 2 != get_attr_length (insn))
11312     {
11313       return false;
11314     }
11315 
11316   switch (INSN_CODE (insn))
11317     {
11318     default:
11319       return false;
11320 
11321     case CODE_FOR_movqi_insn:
11322     case CODE_FOR_movuqq_insn:
11323     case CODE_FOR_movqq_insn:
11324       {
11325         rtx set  = single_set (insn);
11326         rtx src  = SET_SRC (set);
11327         rtx dest = SET_DEST (set);
11328 
11329         /* Factor out LDS and STS from movqi_insn.  */
11330 
11331         if (MEM_P (dest)
11332             && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
11333           {
11334             return CONSTANT_ADDRESS_P (XEXP (dest, 0));
11335           }
11336         else if (REG_P (dest)
11337                  && MEM_P (src))
11338           {
11339             return CONSTANT_ADDRESS_P (XEXP (src, 0));
11340           }
11341 
11342         return false;
11343       }
11344 
11345     case CODE_FOR_call_insn:
11346     case CODE_FOR_call_value_insn:
11347       return true;
11348     }
11349 }
11350 
11351 
11352 int
11353 jump_over_one_insn_p (rtx_insn *insn, rtx dest)
11354 {
11355   int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
11356 		      ? XEXP (dest, 0)
11357 		      : dest);
11358   int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
11359   int dest_addr = INSN_ADDRESSES (uid);
11360   int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
11361 
11362   return (jump_offset == 1
11363           || (jump_offset == 2
11364               && avr_2word_insn_p (next_active_insn (insn))));
11365 }
11366 
11367 
11368 /* Worker function for `HARD_REGNO_MODE_OK'.  */
11369 /* Returns 1 if a value of mode MODE can be stored starting with hard
11370    register number REGNO.  On the enhanced core, anything larger than
11371    1 byte must start in even numbered register for "movw" to work
11372    (this way we don't have to check for odd registers everywhere).  */
11373 
11374 int
11375 avr_hard_regno_mode_ok (int regno, machine_mode mode)
11376 {
11377   /* NOTE: 8-bit values must not be disallowed for R28 or R29.
11378         Disallowing QI et al. in these regs might lead to code like
11379             (set (subreg:QI (reg:HI 28) n) ...)
11380         which will result in wrong code because reload does not
11381         handle SUBREGs of hard regsisters like this.
11382         This could be fixed in reload.  However, it appears
11383         that fixing reload is not wanted by reload people.  */
11384 
11385   /* Any GENERAL_REGS register can hold 8-bit values.  */
11386 
11387   if (GET_MODE_SIZE (mode) == 1)
11388     return 1;
11389 
11390   /* FIXME: Ideally, the following test is not needed.
11391         However, it turned out that it can reduce the number
11392         of spill fails.  AVR and it's poor endowment with
11393         address registers is extreme stress test for reload.  */
11394 
11395   if (GET_MODE_SIZE (mode) >= 4
11396       && regno >= REG_X)
11397     return 0;
11398 
11399   /* All modes larger than 8 bits should start in an even register.  */
11400 
11401   return !(regno & 1);
11402 }
11403 
11404 
11405 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'.  */
11406 
11407 int
11408 avr_hard_regno_call_part_clobbered (unsigned regno, machine_mode mode)
11409 {
11410   /* FIXME: This hook gets called with MODE:REGNO combinations that don't
11411         represent valid hard registers like, e.g. HI:29.  Returning TRUE
11412         for such registers can lead to performance degradation as mentioned
11413         in PR53595.  Thus, report invalid hard registers as FALSE.  */
11414 
11415   if (!avr_hard_regno_mode_ok (regno, mode))
11416     return 0;
11417 
11418   /* Return true if any of the following boundaries is crossed:
11419      17/18, 27/28 and 29/30.  */
11420 
11421   return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
11422           || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
11423           || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
11424 }
11425 
11426 
11427 /* Implement `MODE_CODE_BASE_REG_CLASS'.  */
11428 
11429 enum reg_class
11430 avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
11431                               addr_space_t as, RTX_CODE outer_code,
11432                               RTX_CODE index_code ATTRIBUTE_UNUSED)
11433 {
11434   if (!ADDR_SPACE_GENERIC_P (as))
11435     {
11436       return POINTER_Z_REGS;
11437     }
11438 
11439   if (!avr_strict_X)
11440     return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
11441 
11442   return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
11443 }
11444 
11445 
11446 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'.  */
11447 
11448 bool
11449 avr_regno_mode_code_ok_for_base_p (int regno,
11450                                    machine_mode mode ATTRIBUTE_UNUSED,
11451                                    addr_space_t as ATTRIBUTE_UNUSED,
11452                                    RTX_CODE outer_code,
11453                                    RTX_CODE index_code ATTRIBUTE_UNUSED)
11454 {
11455   bool ok = false;
11456 
11457   if (!ADDR_SPACE_GENERIC_P (as))
11458     {
11459       if (regno < FIRST_PSEUDO_REGISTER
11460           && regno == REG_Z)
11461         {
11462           return true;
11463         }
11464 
11465       if (reg_renumber)
11466         {
11467           regno = reg_renumber[regno];
11468 
11469           if (regno == REG_Z)
11470             {
11471               return true;
11472             }
11473         }
11474 
11475       return false;
11476     }
11477 
11478   if (regno < FIRST_PSEUDO_REGISTER
11479       && (regno == REG_X
11480           || regno == REG_Y
11481           || regno == REG_Z
11482           || regno == ARG_POINTER_REGNUM))
11483     {
11484       ok = true;
11485     }
11486   else if (reg_renumber)
11487     {
11488       regno = reg_renumber[regno];
11489 
11490       if (regno == REG_X
11491           || regno == REG_Y
11492           || regno == REG_Z
11493           || regno == ARG_POINTER_REGNUM)
11494         {
11495           ok = true;
11496         }
11497     }
11498 
11499   if (avr_strict_X
11500       && PLUS == outer_code
11501       && regno == REG_X)
11502     {
11503       ok = false;
11504     }
11505 
11506   return ok;
11507 }
11508 
11509 
11510 /* A helper for `output_reload_insisf' and `output_reload_inhi'.  */
11511 /* Set 32-bit register OP[0] to compile-time constant OP[1].
11512    CLOBBER_REG is a QI clobber register or NULL_RTX.
11513    LEN == NULL: output instructions.
11514    LEN != NULL: set *LEN to the length of the instruction sequence
11515                 (in words) printed with LEN = NULL.
11516    If CLEAR_P is true, OP[0] had been cleard to Zero already.
11517    If CLEAR_P is false, nothing is known about OP[0].
11518 
11519    The effect on cc0 is as follows:
11520 
11521    Load 0 to any register except ZERO_REG : NONE
11522    Load ld register with any value        : NONE
11523    Anything else:                         : CLOBBER  */
11524 
11525 static void
11526 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
11527 {
11528   rtx src = op[1];
11529   rtx dest = op[0];
11530   rtx xval, xdest[4];
11531   int ival[4];
11532   int clobber_val = 1234;
11533   bool cooked_clobber_p = false;
11534   bool set_p = false;
11535   machine_mode mode = GET_MODE (dest);
11536   int n, n_bytes = GET_MODE_SIZE (mode);
11537 
11538   gcc_assert (REG_P (dest)
11539               && CONSTANT_P (src));
11540 
11541   if (len)
11542     *len = 0;
11543 
11544   /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
11545      but has some subregs that are in LD_REGS.  Use the MSB (REG:QI 17).  */
11546 
11547   if (REGNO (dest) < 16
11548       && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
11549     {
11550       clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
11551     }
11552 
11553   /* We might need a clobber reg but don't have one.  Look at the value to
11554      be loaded more closely.  A clobber is only needed if it is a symbol
11555      or contains a byte that is neither 0, -1 or a power of 2.  */
11556 
11557   if (NULL_RTX == clobber_reg
11558       && !test_hard_reg_class (LD_REGS, dest)
11559       && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
11560           || !avr_popcount_each_byte (src, n_bytes,
11561                                       (1 << 0) | (1 << 1) | (1 << 8))))
11562     {
11563       /* We have no clobber register but need one.  Cook one up.
11564          That's cheaper than loading from constant pool.  */
11565 
11566       cooked_clobber_p = true;
11567       clobber_reg = all_regs_rtx[REG_Z + 1];
11568       avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
11569     }
11570 
11571   /* Now start filling DEST from LSB to MSB.  */
11572 
11573   for (n = 0; n < n_bytes; n++)
11574     {
11575       int ldreg_p;
11576       bool done_byte = false;
11577       int j;
11578       rtx xop[3];
11579 
11580       /* Crop the n-th destination byte.  */
11581 
11582       xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
11583       ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
11584 
11585       if (!CONST_INT_P (src)
11586           && !CONST_FIXED_P (src)
11587           && !CONST_DOUBLE_P (src))
11588         {
11589           static const char* const asm_code[][2] =
11590             {
11591               { "ldi %2,lo8(%1)"  CR_TAB "mov %0,%2",    "ldi %0,lo8(%1)"  },
11592               { "ldi %2,hi8(%1)"  CR_TAB "mov %0,%2",    "ldi %0,hi8(%1)"  },
11593               { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2",    "ldi %0,hlo8(%1)" },
11594               { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2",    "ldi %0,hhi8(%1)" }
11595             };
11596 
11597           xop[0] = xdest[n];
11598           xop[1] = src;
11599           xop[2] = clobber_reg;
11600 
11601           avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
11602 
11603           continue;
11604         }
11605 
11606       /* Crop the n-th source byte.  */
11607 
11608       xval = simplify_gen_subreg (QImode, src, mode, n);
11609       ival[n] = INTVAL (xval);
11610 
11611       /* Look if we can reuse the low word by means of MOVW.  */
11612 
11613       if (n == 2
11614           && n_bytes >= 4
11615           && AVR_HAVE_MOVW)
11616         {
11617           rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
11618           rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
11619 
11620           if (INTVAL (lo16) == INTVAL (hi16))
11621             {
11622               if (0 != INTVAL (lo16)
11623                   || !clear_p)
11624                 {
11625                   avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
11626                 }
11627 
11628               break;
11629             }
11630         }
11631 
11632       /* Don't use CLR so that cc0 is set as expected.  */
11633 
11634       if (ival[n] == 0)
11635         {
11636           if (!clear_p)
11637             avr_asm_len (ldreg_p ? "ldi %0,0"
11638                          : AVR_ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
11639                          : "mov %0,__zero_reg__",
11640                          &xdest[n], len, 1);
11641           continue;
11642         }
11643 
11644       if (clobber_val == ival[n]
11645           && REGNO (clobber_reg) == REGNO (xdest[n]))
11646         {
11647           continue;
11648         }
11649 
11650       /* LD_REGS can use LDI to move a constant value */
11651 
11652       if (ldreg_p)
11653         {
11654           xop[0] = xdest[n];
11655           xop[1] = xval;
11656           avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
11657           continue;
11658         }
11659 
11660       /* Try to reuse value already loaded in some lower byte. */
11661 
11662       for (j = 0; j < n; j++)
11663         if (ival[j] == ival[n])
11664           {
11665             xop[0] = xdest[n];
11666             xop[1] = xdest[j];
11667 
11668             avr_asm_len ("mov %0,%1", xop, len, 1);
11669             done_byte = true;
11670             break;
11671           }
11672 
11673       if (done_byte)
11674         continue;
11675 
11676       /* Need no clobber reg for -1: Use CLR/DEC */
11677 
11678       if (-1 == ival[n])
11679         {
11680           if (!clear_p)
11681             avr_asm_len ("clr %0", &xdest[n], len, 1);
11682 
11683           avr_asm_len ("dec %0", &xdest[n], len, 1);
11684           continue;
11685         }
11686       else if (1 == ival[n])
11687         {
11688           if (!clear_p)
11689             avr_asm_len ("clr %0", &xdest[n], len, 1);
11690 
11691           avr_asm_len ("inc %0", &xdest[n], len, 1);
11692           continue;
11693         }
11694 
11695       /* Use T flag or INC to manage powers of 2 if we have
11696          no clobber reg.  */
11697 
11698       if (NULL_RTX == clobber_reg
11699           && single_one_operand (xval, QImode))
11700         {
11701           xop[0] = xdest[n];
11702           xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
11703 
11704           gcc_assert (constm1_rtx != xop[1]);
11705 
11706           if (!set_p)
11707             {
11708               set_p = true;
11709               avr_asm_len ("set", xop, len, 1);
11710             }
11711 
11712           if (!clear_p)
11713             avr_asm_len ("clr %0", xop, len, 1);
11714 
11715           avr_asm_len ("bld %0,%1", xop, len, 1);
11716           continue;
11717         }
11718 
11719       /* We actually need the LD_REGS clobber reg.  */
11720 
11721       gcc_assert (NULL_RTX != clobber_reg);
11722 
11723       xop[0] = xdest[n];
11724       xop[1] = xval;
11725       xop[2] = clobber_reg;
11726       clobber_val = ival[n];
11727 
11728       avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
11729                    "mov %0,%2", xop, len, 2);
11730     }
11731 
11732   /* If we cooked up a clobber reg above, restore it.  */
11733 
11734   if (cooked_clobber_p)
11735     {
11736       avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
11737     }
11738 }
11739 
11740 
11741 /* Reload the constant OP[1] into the HI register OP[0].
11742    CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11743    into a NO_LD_REGS register.  If CLOBBER_REG is NULL_RTX we either don't
11744    need a clobber reg or have to cook one up.
11745 
11746    PLEN == NULL: Output instructions.
11747    PLEN != NULL: Output nothing.  Set *PLEN to number of words occupied
11748                  by the insns printed.
11749 
11750    Return "".  */
11751 
11752 const char*
11753 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
11754 {
11755   output_reload_in_const (op, clobber_reg, plen, false);
11756   return "";
11757 }
11758 
11759 
11760 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
11761    CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
11762    into a NO_LD_REGS register.  If CLOBBER_REG is NULL_RTX we either don't
11763    need a clobber reg or have to cook one up.
11764 
11765    LEN == NULL: Output instructions.
11766 
11767    LEN != NULL: Output nothing.  Set *LEN to number of words occupied
11768                 by the insns printed.
11769 
11770    Return "".  */
11771 
11772 const char *
11773 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
11774 {
11775   if (AVR_HAVE_MOVW
11776       && !test_hard_reg_class (LD_REGS, op[0])
11777       && (CONST_INT_P (op[1])
11778           || CONST_FIXED_P (op[1])
11779           || CONST_DOUBLE_P (op[1])))
11780     {
11781       int len_clr, len_noclr;
11782 
11783       /* In some cases it is better to clear the destination beforehand, e.g.
11784 
11785              CLR R2   CLR R3   MOVW R4,R2   INC R2
11786 
11787          is shorther than
11788 
11789              CLR R2   INC R2   CLR  R3      CLR R4   CLR R5
11790 
11791          We find it too tedious to work that out in the print function.
11792          Instead, we call the print function twice to get the lengths of
11793          both methods and use the shortest one.  */
11794 
11795       output_reload_in_const (op, clobber_reg, &len_clr, true);
11796       output_reload_in_const (op, clobber_reg, &len_noclr, false);
11797 
11798       if (len_noclr - len_clr == 4)
11799         {
11800           /* Default needs 4 CLR instructions: clear register beforehand.  */
11801 
11802           avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
11803                        "mov %B0,__zero_reg__" CR_TAB
11804                        "movw %C0,%A0", &op[0], len, 3);
11805 
11806           output_reload_in_const (op, clobber_reg, len, true);
11807 
11808           if (len)
11809             *len += 3;
11810 
11811           return "";
11812         }
11813     }
11814 
11815   /* Default: destination not pre-cleared.  */
11816 
11817   output_reload_in_const (op, clobber_reg, len, false);
11818   return "";
11819 }
11820 
11821 const char*
11822 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
11823 {
11824   output_reload_in_const (op, clobber_reg, len, false);
11825   return "";
11826 }
11827 
11828 
11829 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'.  */
11830 
11831 void
11832 avr_output_addr_vec_elt (FILE *stream, int value)
11833 {
11834   if (AVR_HAVE_JMP_CALL)
11835     fprintf (stream, "\t.word gs(.L%d)\n", value);
11836   else
11837     fprintf (stream, "\trjmp .L%d\n", value);
11838 }
11839 
11840 static void
11841 avr_conditional_register_usage(void)
11842 {
11843   if (AVR_TINY)
11844     {
11845       unsigned int i;
11846 
11847       const int tiny_reg_alloc_order[] = {
11848         24, 25,
11849         22, 23,
11850         30, 31,
11851         26, 27,
11852         28, 29,
11853         21, 20, 19, 18,
11854         16, 17,
11855         32, 33, 34, 35,
11856         15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
11857       };
11858 
11859       /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
11860          - R0-R15 are not available in Tiny Core devices
11861          - R16 and R17 are fixed registers.  */
11862 
11863       for (i = 0;  i <= 17;  i++)
11864         {
11865           fixed_regs[i] = 1;
11866           call_used_regs[i] = 1;
11867         }
11868 
11869       /* Set R18 to R21 as callee saved registers
11870          - R18, R19, R20 and R21 are the callee saved registers in
11871            Tiny Core devices  */
11872 
11873       for (i = 18; i <= LAST_CALLEE_SAVED_REG; i++)
11874         {
11875           call_used_regs[i] = 0;
11876         }
11877 
11878       /* Update register allocation order for Tiny Core devices */
11879 
11880       for (i = 0; i < ARRAY_SIZE (tiny_reg_alloc_order); i++)
11881         {
11882           reg_alloc_order[i] = tiny_reg_alloc_order[i];
11883         }
11884 
11885       CLEAR_HARD_REG_SET (reg_class_contents[(int) ADDW_REGS]);
11886       CLEAR_HARD_REG_SET (reg_class_contents[(int) NO_LD_REGS]);
11887     }
11888 }
11889 
11890 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'.  */
11891 /* Returns true if SCRATCH are safe to be allocated as a scratch
11892    registers (for a define_peephole2) in the current function.  */
11893 
11894 static bool
11895 avr_hard_regno_scratch_ok (unsigned int regno)
11896 {
11897   /* Interrupt functions can only use registers that have already been saved
11898      by the prologue, even if they would normally be call-clobbered.  */
11899 
11900   if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
11901       && !df_regs_ever_live_p (regno))
11902     return false;
11903 
11904   /* Don't allow hard registers that might be part of the frame pointer.
11905      Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11906      and don't care for a frame pointer that spans more than one register.  */
11907 
11908   if ((!reload_completed || frame_pointer_needed)
11909       && (regno == REG_Y || regno == REG_Y + 1))
11910     {
11911       return false;
11912     }
11913 
11914   return true;
11915 }
11916 
11917 
11918 /* Worker function for `HARD_REGNO_RENAME_OK'.  */
11919 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG.  */
11920 
11921 int
11922 avr_hard_regno_rename_ok (unsigned int old_reg,
11923 			  unsigned int new_reg)
11924 {
11925   /* Interrupt functions can only use registers that have already been
11926      saved by the prologue, even if they would normally be
11927      call-clobbered.  */
11928 
11929   if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
11930       && !df_regs_ever_live_p (new_reg))
11931     return 0;
11932 
11933   /* Don't allow hard registers that might be part of the frame pointer.
11934      Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
11935      and don't care for a frame pointer that spans more than one register.  */
11936 
11937   if ((!reload_completed || frame_pointer_needed)
11938       && (old_reg == REG_Y || old_reg == REG_Y + 1
11939           || new_reg == REG_Y || new_reg == REG_Y + 1))
11940     {
11941       return 0;
11942     }
11943 
11944   return 1;
11945 }
11946 
11947 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
11948    or memory location in the I/O space (QImode only).
11949 
11950    Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
11951    Operand 1: register operand to test, or CONST_INT memory address.
11952    Operand 2: bit number.
11953    Operand 3: label to jump to if the test is true.  */
11954 
11955 const char*
11956 avr_out_sbxx_branch (rtx_insn *insn, rtx operands[])
11957 {
11958   enum rtx_code comp = GET_CODE (operands[0]);
11959   bool long_jump = get_attr_length (insn) >= 4;
11960   bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
11961 
11962   if (comp == GE)
11963     comp = EQ;
11964   else if (comp == LT)
11965     comp = NE;
11966 
11967   if (reverse)
11968     comp = reverse_condition (comp);
11969 
11970   switch (GET_CODE (operands[1]))
11971     {
11972     default:
11973       gcc_unreachable();
11974 
11975     case CONST_INT:
11976     case CONST:
11977     case SYMBOL_REF:
11978 
11979       if (low_io_address_operand (operands[1], QImode))
11980         {
11981           if (comp == EQ)
11982             output_asm_insn ("sbis %i1,%2", operands);
11983           else
11984             output_asm_insn ("sbic %i1,%2", operands);
11985         }
11986       else
11987         {
11988 	  gcc_assert (io_address_operand (operands[1], QImode));
11989           output_asm_insn ("in __tmp_reg__,%i1", operands);
11990           if (comp == EQ)
11991             output_asm_insn ("sbrs __tmp_reg__,%2", operands);
11992           else
11993             output_asm_insn ("sbrc __tmp_reg__,%2", operands);
11994         }
11995 
11996       break; /* CONST_INT */
11997 
11998     case REG:
11999 
12000       if (comp == EQ)
12001         output_asm_insn ("sbrs %T1%T2", operands);
12002       else
12003         output_asm_insn ("sbrc %T1%T2", operands);
12004 
12005       break; /* REG */
12006     }        /* switch */
12007 
12008   if (long_jump)
12009     return ("rjmp .+4" CR_TAB
12010             "jmp %x3");
12011 
12012   if (!reverse)
12013     return "rjmp %x3";
12014 
12015   return "";
12016 }
12017 
12018 /* Worker function for `TARGET_ASM_CONSTRUCTOR'.  */
12019 
12020 static void
12021 avr_asm_out_ctor (rtx symbol, int priority)
12022 {
12023   fputs ("\t.global __do_global_ctors\n", asm_out_file);
12024   default_ctor_section_asm_out_constructor (symbol, priority);
12025 }
12026 
12027 
12028 /* Worker function for `TARGET_ASM_DESTRUCTOR'.  */
12029 
12030 static void
12031 avr_asm_out_dtor (rtx symbol, int priority)
12032 {
12033   fputs ("\t.global __do_global_dtors\n", asm_out_file);
12034   default_dtor_section_asm_out_destructor (symbol, priority);
12035 }
12036 
12037 
12038 /* Worker function for `TARGET_RETURN_IN_MEMORY'.  */
12039 
12040 static bool
12041 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
12042 {
12043   HOST_WIDE_INT size = int_size_in_bytes (type);
12044   HOST_WIDE_INT ret_size_limit = AVR_TINY ? 4 : 8;
12045 
12046   /* In avr, there are 8 return registers. But, for Tiny Core
12047      (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
12048      Return true if size is unknown or greater than the limit.  */
12049 
12050   if (size == -1 || size > ret_size_limit)
12051     {
12052       return true;
12053     }
12054   else
12055     {
12056       return false;
12057     }
12058 }
12059 
12060 
12061 /* Implement `CASE_VALUES_THRESHOLD'.  */
12062 /* Supply the default for --param case-values-threshold=0  */
12063 
12064 static unsigned int
12065 avr_case_values_threshold (void)
12066 {
12067   /* The exact break-even point between a jump table and an if-else tree
12068      depends on several factors not available here like, e.g. if 8-bit
12069      comparisons can be used in the if-else tree or not, on the
12070      range of the case values, if the case value can be reused, on the
12071      register allocation, etc.  '7' appears to be a good choice.  */
12072 
12073   return 7;
12074 }
12075 
12076 
12077 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'.  */
12078 
12079 static machine_mode
12080 avr_addr_space_address_mode (addr_space_t as)
12081 {
12082   return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
12083 }
12084 
12085 
12086 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'.  */
12087 
12088 static machine_mode
12089 avr_addr_space_pointer_mode (addr_space_t as)
12090 {
12091   return avr_addr_space_address_mode (as);
12092 }
12093 
12094 
12095 /* Helper for following function.  */
12096 
12097 static bool
12098 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
12099 {
12100   gcc_assert (REG_P (reg));
12101 
12102   if (strict)
12103     {
12104       return REGNO (reg) == REG_Z;
12105     }
12106 
12107   /* Avoid combine to propagate hard regs.  */
12108 
12109   if (can_create_pseudo_p()
12110       && REGNO (reg) < REG_Z)
12111     {
12112       return false;
12113     }
12114 
12115   return true;
12116 }
12117 
12118 
12119 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'.  */
12120 
12121 static bool
12122 avr_addr_space_legitimate_address_p (machine_mode mode, rtx x,
12123                                      bool strict, addr_space_t as)
12124 {
12125   bool ok = false;
12126 
12127   switch (as)
12128     {
12129     default:
12130       gcc_unreachable();
12131 
12132     case ADDR_SPACE_GENERIC:
12133       return avr_legitimate_address_p (mode, x, strict);
12134 
12135     case ADDR_SPACE_FLASH:
12136     case ADDR_SPACE_FLASH1:
12137     case ADDR_SPACE_FLASH2:
12138     case ADDR_SPACE_FLASH3:
12139     case ADDR_SPACE_FLASH4:
12140     case ADDR_SPACE_FLASH5:
12141 
12142       switch (GET_CODE (x))
12143         {
12144         case REG:
12145           ok = avr_reg_ok_for_pgm_addr (x, strict);
12146           break;
12147 
12148         case POST_INC:
12149           ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
12150           break;
12151 
12152         default:
12153           break;
12154         }
12155 
12156       break; /* FLASH */
12157 
12158     case ADDR_SPACE_MEMX:
12159       if (REG_P (x))
12160         ok = (!strict
12161               && can_create_pseudo_p());
12162 
12163       if (LO_SUM == GET_CODE (x))
12164         {
12165           rtx hi = XEXP (x, 0);
12166           rtx lo = XEXP (x, 1);
12167 
12168           ok = (REG_P (hi)
12169                 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
12170                 && REG_P (lo)
12171                 && REGNO (lo) == REG_Z);
12172         }
12173 
12174       break; /* MEMX */
12175     }
12176 
12177   if (avr_log.legitimate_address_p)
12178     {
12179       avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
12180                  "reload_completed=%d reload_in_progress=%d %s:",
12181                  ok, mode, strict, reload_completed, reload_in_progress,
12182                  reg_renumber ? "(reg_renumber)" : "");
12183 
12184       if (GET_CODE (x) == PLUS
12185           && REG_P (XEXP (x, 0))
12186           && CONST_INT_P (XEXP (x, 1))
12187           && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
12188           && reg_renumber)
12189         {
12190           avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
12191                      true_regnum (XEXP (x, 0)));
12192         }
12193 
12194       avr_edump ("\n%r\n", x);
12195     }
12196 
12197   return ok;
12198 }
12199 
12200 
12201 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'.  */
12202 
12203 static rtx
12204 avr_addr_space_legitimize_address (rtx x, rtx old_x,
12205                                    machine_mode mode, addr_space_t as)
12206 {
12207   if (ADDR_SPACE_GENERIC_P (as))
12208     return avr_legitimize_address (x, old_x, mode);
12209 
12210   if (avr_log.legitimize_address)
12211     {
12212       avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
12213     }
12214 
12215   return old_x;
12216 }
12217 
12218 
12219 /* Implement `TARGET_ADDR_SPACE_CONVERT'.  */
12220 
12221 static rtx
12222 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
12223 {
12224   addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
12225   addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
12226 
12227   if (avr_log.progmem)
12228     avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
12229                src, type_from, type_to);
12230 
12231   /* Up-casting from 16-bit to 24-bit pointer.  */
12232 
12233   if (as_from != ADDR_SPACE_MEMX
12234       && as_to == ADDR_SPACE_MEMX)
12235     {
12236       int msb;
12237       rtx sym = src;
12238       rtx reg = gen_reg_rtx (PSImode);
12239 
12240       while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
12241         sym = XEXP (sym, 0);
12242 
12243       /* Look at symbol flags:  avr_encode_section_info set the flags
12244          also if attribute progmem was seen so that we get the right
12245          promotion for, e.g. PSTR-like strings that reside in generic space
12246          but are located in flash.  In that case we patch the incoming
12247          address space.  */
12248 
12249       if (SYMBOL_REF == GET_CODE (sym)
12250           && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
12251         {
12252           as_from = ADDR_SPACE_FLASH;
12253         }
12254 
12255       /* Linearize memory: RAM has bit 23 set.  */
12256 
12257       msb = ADDR_SPACE_GENERIC_P (as_from)
12258         ? 0x80
12259         : avr_addrspace[as_from].segment;
12260 
12261       src = force_reg (Pmode, src);
12262 
12263       emit_insn (msb == 0
12264                  ? gen_zero_extendhipsi2 (reg, src)
12265                  : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
12266 
12267       return reg;
12268     }
12269 
12270   /* Down-casting from 24-bit to 16-bit throws away the high byte.  */
12271 
12272   if (as_from == ADDR_SPACE_MEMX
12273       && as_to != ADDR_SPACE_MEMX)
12274     {
12275       rtx new_src = gen_reg_rtx (Pmode);
12276 
12277       src = force_reg (PSImode, src);
12278 
12279       emit_move_insn (new_src,
12280                       simplify_gen_subreg (Pmode, src, PSImode, 0));
12281       return new_src;
12282     }
12283 
12284   return src;
12285 }
12286 
12287 
12288 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'.  */
12289 
12290 static bool
12291 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
12292                          addr_space_t superset ATTRIBUTE_UNUSED)
12293 {
12294   /* Allow any kind of pointer mess.  */
12295 
12296   return true;
12297 }
12298 
12299 
12300 /* Implement `TARGET_CONVERT_TO_TYPE'.  */
12301 
12302 static tree
12303 avr_convert_to_type (tree type, tree expr)
12304 {
12305   /* Print a diagnose for pointer conversion that changes the address
12306      space of the pointer target to a non-enclosing address space,
12307      provided -Waddr-space-convert is on.
12308 
12309      FIXME: Filter out cases where the target object is known to
12310             be located in the right memory, like in
12311 
12312                 (const __flash*) PSTR ("text")
12313 
12314             Also try to distinguish between explicit casts requested by
12315             the user and implicit casts like
12316 
12317                 void f (const __flash char*);
12318 
12319                 void g (const char *p)
12320                 {
12321                     f ((const __flash*) p);
12322                 }
12323 
12324             under the assumption that an explicit casts means that the user
12325             knows what he is doing, e.g. interface with PSTR or old style
12326             code with progmem and pgm_read_xxx.
12327   */
12328 
12329   if (avr_warn_addr_space_convert
12330       && expr != error_mark_node
12331       && POINTER_TYPE_P (type)
12332       && POINTER_TYPE_P (TREE_TYPE (expr)))
12333     {
12334       addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
12335       addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
12336 
12337       if (avr_log.progmem)
12338         avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
12339 
12340       if (as_new != ADDR_SPACE_MEMX
12341           && as_new != as_old)
12342         {
12343           location_t loc = EXPR_LOCATION (expr);
12344           const char *name_old = avr_addrspace[as_old].name;
12345           const char *name_new = avr_addrspace[as_new].name;
12346 
12347           warning (OPT_Waddr_space_convert,
12348                    "conversion from address space %qs to address space %qs",
12349                    ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
12350                    ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
12351 
12352           return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
12353         }
12354     }
12355 
12356   return NULL_TREE;
12357 }
12358 
12359 
12360 /* PR63633: The middle-end might come up with hard regs as input operands.
12361 
12362    RMASK is a bit mask representing a subset of hard registers R0...R31:
12363    Rn is an element of that set iff bit n of RMASK is set.
12364    OPMASK describes a subset of OP[]:  If bit n of OPMASK is 1 then
12365    OP[n] has to be fixed; otherwise OP[n] is left alone.
12366 
12367    For each element of OPMASK which is a hard register overlapping RMASK,
12368    replace OP[n] with a newly created pseudo register
12369 
12370    HREG == 0:  Also emit a move insn that copies the contents of that
12371                hard register into the new pseudo.
12372 
12373    HREG != 0:  Also set HREG[n] to the hard register.  */
12374 
12375 static void
12376 avr_fix_operands (rtx *op, rtx *hreg, unsigned opmask, unsigned rmask)
12377 {
12378   for (; opmask; opmask >>= 1, op++)
12379     {
12380       rtx reg = *op;
12381 
12382       if (hreg)
12383         *hreg = NULL_RTX;
12384 
12385       if ((opmask & 1)
12386           && REG_P (reg)
12387           && REGNO (reg) < FIRST_PSEUDO_REGISTER
12388           // This hard-reg overlaps other prohibited hard regs?
12389           && (rmask & regmask (GET_MODE (reg), REGNO (reg))))
12390         {
12391           *op = gen_reg_rtx (GET_MODE (reg));
12392           if (hreg == NULL)
12393             emit_move_insn (*op, reg);
12394           else
12395             *hreg = reg;
12396         }
12397 
12398       if (hreg)
12399         hreg++;
12400     }
12401 }
12402 
12403 
12404 void
12405 avr_fix_inputs (rtx *op, unsigned opmask, unsigned rmask)
12406 {
12407   avr_fix_operands (op, NULL, opmask, rmask);
12408 }
12409 
12410 
12411 /* Helper for the function below:  If bit n of MASK is set and
12412    HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
12413    Otherwise do nothing for that n.  Return TRUE.  */
12414 
12415 static bool
12416 avr_move_fixed_operands (rtx *op, rtx *hreg, unsigned mask)
12417 {
12418   for (; mask; mask >>= 1, op++, hreg++)
12419     if ((mask & 1)
12420         && *hreg)
12421       emit_move_insn (*hreg, *op);
12422 
12423   return true;
12424 }
12425 
12426 
12427 /* PR63633: The middle-end might come up with hard regs as output operands.
12428 
12429    GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
12430    RMASK is a bit mask representing a subset of hard registers R0...R31:
12431    Rn is an element of that set iff bit n of RMASK is set.
12432    OPMASK describes a subset of OP[]:  If bit n of OPMASK is 1 then
12433    OP[n] has to be fixed; otherwise OP[n] is left alone.
12434 
12435    Emit the insn sequence as generated by GEN() with all elements of OPMASK
12436    which are hard registers overlapping RMASK replaced by newly created
12437    pseudo registers.  After the sequence has been emitted, emit insns that
12438    move the contents of respective pseudos to their hard regs.  */
12439 
12440 bool
12441 avr_emit3_fix_outputs (rtx (*gen)(rtx,rtx,rtx), rtx *op,
12442                        unsigned opmask, unsigned rmask)
12443 {
12444   const int n = 3;
12445   rtx hreg[n];
12446 
12447   /* It is letigimate for GEN to call this function, and in order not to
12448      get self-recursive we use the following static kludge.  This is the
12449      only way not to duplicate all expanders and to avoid ugly and
12450      hard-to-maintain C-code instead of the much more appreciated RTL
12451      representation as supplied by define_expand.  */
12452   static bool lock = false;
12453 
12454   gcc_assert (opmask < (1u << n));
12455 
12456   if (lock)
12457     return false;
12458 
12459   avr_fix_operands (op, hreg, opmask, rmask);
12460 
12461   lock = true;
12462   emit_insn (gen (op[0], op[1], op[2]));
12463   lock = false;
12464 
12465   return avr_move_fixed_operands (op, hreg, opmask);
12466 }
12467 
12468 
12469 /* Worker function for movmemhi expander.
12470    XOP[0]  Destination as MEM:BLK
12471    XOP[1]  Source      "     "
12472    XOP[2]  # Bytes to copy
12473 
12474    Return TRUE  if the expansion is accomplished.
12475    Return FALSE if the operand compination is not supported.  */
12476 
12477 bool
12478 avr_emit_movmemhi (rtx *xop)
12479 {
12480   HOST_WIDE_INT count;
12481   machine_mode loop_mode;
12482   addr_space_t as = MEM_ADDR_SPACE (xop[1]);
12483   rtx loop_reg, addr1, a_src, a_dest, insn, xas;
12484   rtx a_hi8 = NULL_RTX;
12485 
12486   if (avr_mem_flash_p (xop[0]))
12487     return false;
12488 
12489   if (!CONST_INT_P (xop[2]))
12490     return false;
12491 
12492   count = INTVAL (xop[2]);
12493   if (count <= 0)
12494     return false;
12495 
12496   a_src  = XEXP (xop[1], 0);
12497   a_dest = XEXP (xop[0], 0);
12498 
12499   if (PSImode == GET_MODE (a_src))
12500     {
12501       gcc_assert (as == ADDR_SPACE_MEMX);
12502 
12503       loop_mode = (count < 0x100) ? QImode : HImode;
12504       loop_reg = gen_rtx_REG (loop_mode, 24);
12505       emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
12506 
12507       addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
12508       a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
12509     }
12510   else
12511     {
12512       int segment = avr_addrspace[as].segment;
12513 
12514       if (segment
12515           && avr_n_flash > 1)
12516         {
12517           a_hi8 = GEN_INT (segment);
12518           emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
12519         }
12520       else if (!ADDR_SPACE_GENERIC_P (as))
12521         {
12522           as = ADDR_SPACE_FLASH;
12523         }
12524 
12525       addr1 = a_src;
12526 
12527       loop_mode = (count <= 0x100) ? QImode : HImode;
12528       loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
12529     }
12530 
12531   xas = GEN_INT (as);
12532 
12533   /* FIXME: Register allocator might come up with spill fails if it is left
12534         on its own.  Thus, we allocate the pointer registers by hand:
12535         Z = source address
12536         X = destination address  */
12537 
12538   emit_move_insn (lpm_addr_reg_rtx, addr1);
12539   emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
12540 
12541   /* FIXME: Register allocator does a bad job and might spill address
12542         register(s) inside the loop leading to additional move instruction
12543         to/from stack which could clobber tmp_reg.  Thus, do *not* emit
12544         load and store as separate insns.  Instead, we perform the copy
12545         by means of one monolithic insn.  */
12546 
12547   gcc_assert (TMP_REGNO == LPM_REGNO);
12548 
12549   if (as != ADDR_SPACE_MEMX)
12550     {
12551       /* Load instruction ([E]LPM or LD) is known at compile time:
12552          Do the copy-loop inline.  */
12553 
12554       rtx (*fun) (rtx, rtx, rtx)
12555         = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
12556 
12557       insn = fun (xas, loop_reg, loop_reg);
12558     }
12559   else
12560     {
12561       rtx (*fun) (rtx, rtx)
12562         = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
12563 
12564       emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
12565 
12566       insn = fun (xas, GEN_INT (avr_addr.rampz));
12567     }
12568 
12569   set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
12570   emit_insn (insn);
12571 
12572   return true;
12573 }
12574 
12575 
12576 /* Print assembler for movmem_qi, movmem_hi insns...
12577        $0     : Address Space
12578        $1, $2 : Loop register
12579        Z      : Source address
12580        X      : Destination address
12581 */
12582 
12583 const char*
12584 avr_out_movmem (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
12585 {
12586   addr_space_t as = (addr_space_t) INTVAL (op[0]);
12587   machine_mode loop_mode = GET_MODE (op[1]);
12588   bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
12589   rtx xop[3];
12590 
12591   if (plen)
12592     *plen = 0;
12593 
12594   xop[0] = op[0];
12595   xop[1] = op[1];
12596   xop[2] = tmp_reg_rtx;
12597 
12598   /* Loop label */
12599 
12600   avr_asm_len ("0:", xop, plen, 0);
12601 
12602   /* Load with post-increment */
12603 
12604   switch (as)
12605     {
12606     default:
12607       gcc_unreachable();
12608 
12609     case ADDR_SPACE_GENERIC:
12610 
12611       avr_asm_len ("ld %2,Z+", xop, plen, 1);
12612       break;
12613 
12614     case ADDR_SPACE_FLASH:
12615 
12616       if (AVR_HAVE_LPMX)
12617         avr_asm_len ("lpm %2,Z+", xop, plen, 1);
12618       else
12619         avr_asm_len ("lpm" CR_TAB
12620                      "adiw r30,1", xop, plen, 2);
12621       break;
12622 
12623     case ADDR_SPACE_FLASH1:
12624     case ADDR_SPACE_FLASH2:
12625     case ADDR_SPACE_FLASH3:
12626     case ADDR_SPACE_FLASH4:
12627     case ADDR_SPACE_FLASH5:
12628 
12629       if (AVR_HAVE_ELPMX)
12630         avr_asm_len ("elpm %2,Z+", xop, plen, 1);
12631       else
12632         avr_asm_len ("elpm" CR_TAB
12633                      "adiw r30,1", xop, plen, 2);
12634       break;
12635     }
12636 
12637   /* Store with post-increment */
12638 
12639   avr_asm_len ("st X+,%2", xop, plen, 1);
12640 
12641   /* Decrement loop-counter and set Z-flag */
12642 
12643   if (QImode == loop_mode)
12644     {
12645       avr_asm_len ("dec %1", xop, plen, 1);
12646     }
12647   else if (sbiw_p)
12648     {
12649       avr_asm_len ("sbiw %1,1", xop, plen, 1);
12650     }
12651   else
12652     {
12653       avr_asm_len ("subi %A1,1" CR_TAB
12654                    "sbci %B1,0", xop, plen, 2);
12655     }
12656 
12657   /* Loop until zero */
12658 
12659   return avr_asm_len ("brne 0b", xop, plen, 1);
12660 }
12661 
12662 
12663 
12664 /* Helper for __builtin_avr_delay_cycles */
12665 
12666 static rtx
12667 avr_mem_clobber (void)
12668 {
12669   rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
12670   MEM_VOLATILE_P (mem) = 1;
12671   return mem;
12672 }
12673 
12674 static void
12675 avr_expand_delay_cycles (rtx operands0)
12676 {
12677   unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
12678   unsigned HOST_WIDE_INT cycles_used;
12679   unsigned HOST_WIDE_INT loop_count;
12680 
12681   if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
12682     {
12683       loop_count = ((cycles - 9) / 6) + 1;
12684       cycles_used = ((loop_count - 1) * 6) + 9;
12685       emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
12686                                      avr_mem_clobber()));
12687       cycles -= cycles_used;
12688     }
12689 
12690   if (IN_RANGE (cycles, 262145, 83886081))
12691     {
12692       loop_count = ((cycles - 7) / 5) + 1;
12693       if (loop_count > 0xFFFFFF)
12694         loop_count = 0xFFFFFF;
12695       cycles_used = ((loop_count - 1) * 5) + 7;
12696       emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
12697                                      avr_mem_clobber()));
12698       cycles -= cycles_used;
12699     }
12700 
12701   if (IN_RANGE (cycles, 768, 262144))
12702     {
12703       loop_count = ((cycles - 5) / 4) + 1;
12704       if (loop_count > 0xFFFF)
12705         loop_count = 0xFFFF;
12706       cycles_used = ((loop_count - 1) * 4) + 5;
12707       emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
12708                                      avr_mem_clobber()));
12709       cycles -= cycles_used;
12710     }
12711 
12712   if (IN_RANGE (cycles, 6, 767))
12713     {
12714       loop_count = cycles / 3;
12715       if (loop_count > 255)
12716         loop_count = 255;
12717       cycles_used = loop_count * 3;
12718       emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
12719                                      avr_mem_clobber()));
12720       cycles -= cycles_used;
12721       }
12722 
12723   while (cycles >= 2)
12724     {
12725       emit_insn (gen_nopv (GEN_INT(2)));
12726       cycles -= 2;
12727     }
12728 
12729   if (cycles == 1)
12730     {
12731       emit_insn (gen_nopv (GEN_INT(1)));
12732       cycles--;
12733     }
12734 }
12735 
12736 
12737 /* Compute the image of x under f, i.e. perform   x --> f(x)    */
12738 
12739 static int
12740 avr_map (unsigned int f, int x)
12741 {
12742   return x < 8 ? (f >> (4 * x)) & 0xf : 0;
12743 }
12744 
12745 
12746 /* Return some metrics of map A.  */
12747 
12748 enum
12749   {
12750     /* Number of fixed points in { 0 ... 7 } */
12751     MAP_FIXED_0_7,
12752 
12753     /* Size of preimage of non-fixed points in { 0 ... 7 } */
12754     MAP_NONFIXED_0_7,
12755 
12756     /* Mask representing the fixed points in { 0 ... 7 } */
12757     MAP_MASK_FIXED_0_7,
12758 
12759     /* Size of the preimage of { 0 ... 7 } */
12760     MAP_PREIMAGE_0_7,
12761 
12762     /* Mask that represents the preimage of { f } */
12763     MAP_MASK_PREIMAGE_F
12764   };
12765 
12766 static unsigned
12767 avr_map_metric (unsigned int a, int mode)
12768 {
12769   unsigned i, metric = 0;
12770 
12771   for (i = 0; i < 8; i++)
12772     {
12773       unsigned ai = avr_map (a, i);
12774 
12775       if (mode == MAP_FIXED_0_7)
12776         metric += ai == i;
12777       else if (mode == MAP_NONFIXED_0_7)
12778         metric += ai < 8 && ai != i;
12779       else if (mode == MAP_MASK_FIXED_0_7)
12780         metric |= ((unsigned) (ai == i)) << i;
12781       else if (mode == MAP_PREIMAGE_0_7)
12782         metric += ai < 8;
12783       else if (mode == MAP_MASK_PREIMAGE_F)
12784         metric |= ((unsigned) (ai == 0xf)) << i;
12785       else
12786         gcc_unreachable();
12787     }
12788 
12789   return metric;
12790 }
12791 
12792 
12793 /* Return true if IVAL has a 0xf in its hexadecimal representation
12794    and false, otherwise.  Only nibbles 0..7 are taken into account.
12795    Used as constraint helper for C0f and Cxf.  */
12796 
12797 bool
12798 avr_has_nibble_0xf (rtx ival)
12799 {
12800   unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
12801   return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
12802 }
12803 
12804 
12805 /* We have a set of bits that are mapped by a function F.
12806    Try to decompose F by means of a second function G so that
12807 
12808       F = F o G^-1 o G
12809 
12810    and
12811 
12812       cost (F o G^-1) + cost (G)  <  cost (F)
12813 
12814    Example:  Suppose builtin insert_bits supplies us with the map
12815    F = 0x3210ffff.  Instead of doing 4 bit insertions to get the high
12816    nibble of the result, we can just as well rotate the bits before inserting
12817    them and use the map 0x7654ffff which is cheaper than the original map.
12818    For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff.  */
12819 
12820 typedef struct
12821 {
12822   /* tree code of binary function G */
12823   enum tree_code code;
12824 
12825   /* The constant second argument of G */
12826   int arg;
12827 
12828   /* G^-1, the inverse of G (*, arg) */
12829   unsigned ginv;
12830 
12831   /* The cost of appplying G (*, arg) */
12832   int cost;
12833 
12834   /* The composition F o G^-1 (*, arg) for some function F */
12835   unsigned int map;
12836 
12837   /* For debug purpose only */
12838   const char *str;
12839 } avr_map_op_t;
12840 
12841 static const avr_map_op_t avr_map_op[] =
12842   {
12843     { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
12844     { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
12845     { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
12846     { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
12847     { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
12848     { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
12849     { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
12850     { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
12851     { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
12852     { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
12853     { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
12854     { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
12855     { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
12856     { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
12857     { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
12858   };
12859 
12860 
12861 /* Try to decompose F as F = (F o G^-1) o G as described above.
12862    The result is a struct representing F o G^-1 and G.
12863    If result.cost < 0 then such a decomposition does not exist.  */
12864 
12865 static avr_map_op_t
12866 avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
12867 {
12868   int i;
12869   bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
12870   avr_map_op_t f_ginv = *g;
12871   unsigned int ginv = g->ginv;
12872 
12873   f_ginv.cost = -1;
12874 
12875   /* Step 1:  Computing F o G^-1  */
12876 
12877   for (i = 7; i >= 0; i--)
12878     {
12879       int x = avr_map (f, i);
12880 
12881       if (x <= 7)
12882         {
12883           x = avr_map (ginv, x);
12884 
12885           /* The bit is no element of the image of G: no avail (cost = -1)  */
12886 
12887           if (x > 7)
12888             return f_ginv;
12889         }
12890 
12891       f_ginv.map = (f_ginv.map << 4) + x;
12892     }
12893 
12894   /* Step 2:  Compute the cost of the operations.
12895      The overall cost of doing an operation prior to the insertion is
12896       the cost of the insertion plus the cost of the operation.  */
12897 
12898   /* Step 2a:  Compute cost of F o G^-1  */
12899 
12900   if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
12901     {
12902       /* The mapping consists only of fixed points and can be folded
12903          to AND/OR logic in the remainder.  Reasonable cost is 3. */
12904 
12905       f_ginv.cost = 2 + (val_used_p && !val_const_p);
12906     }
12907   else
12908     {
12909       rtx xop[4];
12910 
12911       /* Get the cost of the insn by calling the output worker with some
12912          fake values.  Mimic effect of reloading xop[3]: Unused operands
12913          are mapped to 0 and used operands are reloaded to xop[0].  */
12914 
12915       xop[0] = all_regs_rtx[24];
12916       xop[1] = gen_int_mode (f_ginv.map, SImode);
12917       xop[2] = all_regs_rtx[25];
12918       xop[3] = val_used_p ? xop[0] : const0_rtx;
12919 
12920       avr_out_insert_bits (xop, &f_ginv.cost);
12921 
12922       f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
12923     }
12924 
12925   /* Step 2b:  Add cost of G  */
12926 
12927   f_ginv.cost += g->cost;
12928 
12929   if (avr_log.builtin)
12930     avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
12931 
12932   return f_ginv;
12933 }
12934 
12935 
12936 /* Insert bits from XOP[1] into XOP[0] according to MAP.
12937    XOP[0] and XOP[1] don't overlap.
12938    If FIXP_P = true:  Move all bits according to MAP using BLD/BST sequences.
12939    If FIXP_P = false: Just move the bit if its position in the destination
12940    is different to its source position.  */
12941 
12942 static void
12943 avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
12944 {
12945   int bit_dest, b;
12946 
12947   /* T-flag contains this bit of the source, i.e. of XOP[1]  */
12948   int t_bit_src = -1;
12949 
12950   /* We order the operations according to the requested source bit b.  */
12951 
12952   for (b = 0; b < 8; b++)
12953     for (bit_dest = 0; bit_dest < 8; bit_dest++)
12954       {
12955         int bit_src = avr_map (map, bit_dest);
12956 
12957         if (b != bit_src
12958             || bit_src >= 8
12959             /* Same position: No need to copy as requested by FIXP_P.  */
12960             || (bit_dest == bit_src && !fixp_p))
12961           continue;
12962 
12963         if (t_bit_src != bit_src)
12964           {
12965             /* Source bit is not yet in T: Store it to T.  */
12966 
12967             t_bit_src = bit_src;
12968 
12969             xop[3] = GEN_INT (bit_src);
12970             avr_asm_len ("bst %T1%T3", xop, plen, 1);
12971           }
12972 
12973         /* Load destination bit with T.  */
12974 
12975         xop[3] = GEN_INT (bit_dest);
12976         avr_asm_len ("bld %T0%T3", xop, plen, 1);
12977       }
12978 }
12979 
12980 
12981 /* PLEN == 0: Print assembler code for `insert_bits'.
12982    PLEN != 0: Compute code length in bytes.
12983 
12984    OP[0]:  Result
12985    OP[1]:  The mapping composed of nibbles. If nibble no. N is
12986            0:   Bit N of result is copied from bit OP[2].0
12987            ...  ...
12988            7:   Bit N of result is copied from bit OP[2].7
12989            0xf: Bit N of result is copied from bit OP[3].N
12990    OP[2]:  Bits to be inserted
12991    OP[3]:  Target value  */
12992 
12993 const char*
12994 avr_out_insert_bits (rtx *op, int *plen)
12995 {
12996   unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
12997   unsigned mask_fixed;
12998   bool fixp_p = true;
12999   rtx xop[4];
13000 
13001   xop[0] = op[0];
13002   xop[1] = op[2];
13003   xop[2] = op[3];
13004 
13005   gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
13006 
13007   if (plen)
13008     *plen = 0;
13009   else if (flag_print_asm_name)
13010     fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
13011 
13012   /* If MAP has fixed points it might be better to initialize the result
13013      with the bits to be inserted instead of moving all bits by hand.  */
13014 
13015   mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
13016 
13017   if (REGNO (xop[0]) == REGNO (xop[1]))
13018     {
13019       /* Avoid early-clobber conflicts */
13020 
13021       avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
13022       xop[1] = tmp_reg_rtx;
13023       fixp_p = false;
13024     }
13025 
13026   if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
13027     {
13028       /* XOP[2] is used and reloaded to XOP[0] already */
13029 
13030       int n_fix = 0, n_nofix = 0;
13031 
13032       gcc_assert (REG_P (xop[2]));
13033 
13034       /* Get the code size of the bit insertions; once with all bits
13035          moved and once with fixed points omitted.  */
13036 
13037       avr_move_bits (xop, map, true, &n_fix);
13038       avr_move_bits (xop, map, false, &n_nofix);
13039 
13040       if (fixp_p && n_fix - n_nofix > 3)
13041         {
13042           xop[3] = gen_int_mode (~mask_fixed, QImode);
13043 
13044           avr_asm_len ("eor %0,%1"   CR_TAB
13045                        "andi %0,%3"  CR_TAB
13046                        "eor %0,%1", xop, plen, 3);
13047           fixp_p = false;
13048         }
13049     }
13050   else
13051     {
13052       /* XOP[2] is unused */
13053 
13054       if (fixp_p && mask_fixed)
13055         {
13056           avr_asm_len ("mov %0,%1", xop, plen, 1);
13057           fixp_p = false;
13058         }
13059     }
13060 
13061   /* Move/insert remaining bits.  */
13062 
13063   avr_move_bits (xop, map, fixp_p, plen);
13064 
13065   return "";
13066 }
13067 
13068 
13069 /* IDs for all the AVR builtins.  */
13070 
13071 enum avr_builtin_id
13072   {
13073 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME)  \
13074     AVR_BUILTIN_ ## NAME,
13075 #include "builtins.def"
13076 #undef DEF_BUILTIN
13077 
13078     AVR_BUILTIN_COUNT
13079   };
13080 
13081 struct GTY(()) avr_builtin_description
13082 {
13083   enum insn_code icode;
13084   int n_args;
13085   tree fndecl;
13086 };
13087 
13088 
13089 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
13090    that a built-in's ID can be used to access the built-in by means of
13091    avr_bdesc[ID]  */
13092 
13093 static GTY(()) struct avr_builtin_description
13094 avr_bdesc[AVR_BUILTIN_COUNT] =
13095   {
13096 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME)         \
13097     { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
13098 #include "builtins.def"
13099 #undef DEF_BUILTIN
13100   };
13101 
13102 
13103 /* Implement `TARGET_BUILTIN_DECL'.  */
13104 
13105 static tree
13106 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
13107 {
13108   if (id < AVR_BUILTIN_COUNT)
13109     return avr_bdesc[id].fndecl;
13110 
13111   return error_mark_node;
13112 }
13113 
13114 
13115 static void
13116 avr_init_builtin_int24 (void)
13117 {
13118   tree int24_type  = make_signed_type (GET_MODE_BITSIZE (PSImode));
13119   tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
13120 
13121   lang_hooks.types.register_builtin_type (int24_type, "__int24");
13122   lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
13123 }
13124 
13125 
13126 /* Implement `TARGET_INIT_BUILTINS' */
13127 /* Set up all builtin functions for this target.  */
13128 
13129 static void
13130 avr_init_builtins (void)
13131 {
13132   tree void_ftype_void
13133     = build_function_type_list (void_type_node, NULL_TREE);
13134   tree uchar_ftype_uchar
13135     = build_function_type_list (unsigned_char_type_node,
13136                                 unsigned_char_type_node,
13137                                 NULL_TREE);
13138   tree uint_ftype_uchar_uchar
13139     = build_function_type_list (unsigned_type_node,
13140                                 unsigned_char_type_node,
13141                                 unsigned_char_type_node,
13142                                 NULL_TREE);
13143   tree int_ftype_char_char
13144     = build_function_type_list (integer_type_node,
13145                                 char_type_node,
13146                                 char_type_node,
13147                                 NULL_TREE);
13148   tree int_ftype_char_uchar
13149     = build_function_type_list (integer_type_node,
13150                                 char_type_node,
13151                                 unsigned_char_type_node,
13152                                 NULL_TREE);
13153   tree void_ftype_ulong
13154     = build_function_type_list (void_type_node,
13155                                 long_unsigned_type_node,
13156                                 NULL_TREE);
13157 
13158   tree uchar_ftype_ulong_uchar_uchar
13159     = build_function_type_list (unsigned_char_type_node,
13160                                 long_unsigned_type_node,
13161                                 unsigned_char_type_node,
13162                                 unsigned_char_type_node,
13163                                 NULL_TREE);
13164 
13165   tree const_memx_void_node
13166     = build_qualified_type (void_type_node,
13167                             TYPE_QUAL_CONST
13168                             | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
13169 
13170   tree const_memx_ptr_type_node
13171     = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
13172 
13173   tree char_ftype_const_memx_ptr
13174     = build_function_type_list (char_type_node,
13175                                 const_memx_ptr_type_node,
13176                                 NULL);
13177 
13178 #define ITYP(T)                                                         \
13179   lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
13180 
13181 #define FX_FTYPE_FX(fx)                                                 \
13182   tree fx##r_ftype_##fx##r                                              \
13183     = build_function_type_list (node_##fx##r, node_##fx##r, NULL);      \
13184   tree fx##k_ftype_##fx##k                                              \
13185     = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
13186 
13187 #define FX_FTYPE_FX_INT(fx)                                             \
13188   tree fx##r_ftype_##fx##r_int                                          \
13189     = build_function_type_list (node_##fx##r, node_##fx##r,             \
13190                                 integer_type_node, NULL);               \
13191   tree fx##k_ftype_##fx##k_int                                          \
13192     = build_function_type_list (node_##fx##k, node_##fx##k,             \
13193                                 integer_type_node, NULL)
13194 
13195 #define INT_FTYPE_FX(fx)                                                \
13196   tree int_ftype_##fx##r                                                \
13197     = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
13198   tree int_ftype_##fx##k                                                \
13199     = build_function_type_list (integer_type_node, node_##fx##k, NULL)
13200 
13201 #define INTX_FTYPE_FX(fx)                                               \
13202   tree int##fx##r_ftype_##fx##r                                         \
13203     = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
13204   tree int##fx##k_ftype_##fx##k                                         \
13205     = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
13206 
13207 #define FX_FTYPE_INTX(fx)                                               \
13208   tree fx##r_ftype_int##fx##r                                           \
13209     = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
13210   tree fx##k_ftype_int##fx##k                                           \
13211     = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
13212 
13213   tree node_hr = short_fract_type_node;
13214   tree node_nr = fract_type_node;
13215   tree node_lr = long_fract_type_node;
13216   tree node_llr = long_long_fract_type_node;
13217 
13218   tree node_uhr = unsigned_short_fract_type_node;
13219   tree node_unr = unsigned_fract_type_node;
13220   tree node_ulr = unsigned_long_fract_type_node;
13221   tree node_ullr = unsigned_long_long_fract_type_node;
13222 
13223   tree node_hk = short_accum_type_node;
13224   tree node_nk = accum_type_node;
13225   tree node_lk = long_accum_type_node;
13226   tree node_llk = long_long_accum_type_node;
13227 
13228   tree node_uhk = unsigned_short_accum_type_node;
13229   tree node_unk = unsigned_accum_type_node;
13230   tree node_ulk = unsigned_long_accum_type_node;
13231   tree node_ullk = unsigned_long_long_accum_type_node;
13232 
13233 
13234   /* For absfx builtins.  */
13235 
13236   FX_FTYPE_FX (h);
13237   FX_FTYPE_FX (n);
13238   FX_FTYPE_FX (l);
13239   FX_FTYPE_FX (ll);
13240 
13241   /* For roundfx builtins.  */
13242 
13243   FX_FTYPE_FX_INT (h);
13244   FX_FTYPE_FX_INT (n);
13245   FX_FTYPE_FX_INT (l);
13246   FX_FTYPE_FX_INT (ll);
13247 
13248   FX_FTYPE_FX_INT (uh);
13249   FX_FTYPE_FX_INT (un);
13250   FX_FTYPE_FX_INT (ul);
13251   FX_FTYPE_FX_INT (ull);
13252 
13253   /* For countlsfx builtins.  */
13254 
13255   INT_FTYPE_FX (h);
13256   INT_FTYPE_FX (n);
13257   INT_FTYPE_FX (l);
13258   INT_FTYPE_FX (ll);
13259 
13260   INT_FTYPE_FX (uh);
13261   INT_FTYPE_FX (un);
13262   INT_FTYPE_FX (ul);
13263   INT_FTYPE_FX (ull);
13264 
13265   /* For bitsfx builtins.  */
13266 
13267   INTX_FTYPE_FX (h);
13268   INTX_FTYPE_FX (n);
13269   INTX_FTYPE_FX (l);
13270   INTX_FTYPE_FX (ll);
13271 
13272   INTX_FTYPE_FX (uh);
13273   INTX_FTYPE_FX (un);
13274   INTX_FTYPE_FX (ul);
13275   INTX_FTYPE_FX (ull);
13276 
13277   /* For fxbits builtins.  */
13278 
13279   FX_FTYPE_INTX (h);
13280   FX_FTYPE_INTX (n);
13281   FX_FTYPE_INTX (l);
13282   FX_FTYPE_INTX (ll);
13283 
13284   FX_FTYPE_INTX (uh);
13285   FX_FTYPE_INTX (un);
13286   FX_FTYPE_INTX (ul);
13287   FX_FTYPE_INTX (ull);
13288 
13289 
13290 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME)                  \
13291   {                                                                     \
13292     int id = AVR_BUILTIN_ ## NAME;                                      \
13293     const char *Name = "__builtin_avr_" #NAME;                          \
13294     char *name = (char*) alloca (1 + strlen (Name));                    \
13295                                                                         \
13296     gcc_assert (id < AVR_BUILTIN_COUNT);                                \
13297     avr_bdesc[id].fndecl                                                \
13298       = add_builtin_function (avr_tolower (name, Name), TYPE, id,       \
13299                               BUILT_IN_MD, LIBNAME, NULL_TREE);         \
13300   }
13301 #include "builtins.def"
13302 #undef DEF_BUILTIN
13303 
13304   avr_init_builtin_int24 ();
13305 }
13306 
13307 
13308 /* Subroutine of avr_expand_builtin to expand vanilla builtins
13309    with non-void result and 1 ... 3 arguments.  */
13310 
13311 static rtx
13312 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
13313 {
13314   rtx pat, xop[3];
13315   int n, n_args = call_expr_nargs (exp);
13316   machine_mode tmode = insn_data[icode].operand[0].mode;
13317 
13318   gcc_assert (n_args >= 1 && n_args <= 3);
13319 
13320   if (target == NULL_RTX
13321       || GET_MODE (target) != tmode
13322       || !insn_data[icode].operand[0].predicate (target, tmode))
13323     {
13324       target = gen_reg_rtx (tmode);
13325     }
13326 
13327   for (n = 0; n < n_args; n++)
13328     {
13329       tree arg = CALL_EXPR_ARG (exp, n);
13330       rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13331       machine_mode opmode = GET_MODE (op);
13332       machine_mode mode = insn_data[icode].operand[n+1].mode;
13333 
13334       if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
13335         {
13336           opmode = HImode;
13337           op = gen_lowpart (HImode, op);
13338         }
13339 
13340       /* In case the insn wants input operands in modes different from
13341          the result, abort.  */
13342 
13343       gcc_assert (opmode == mode || opmode == VOIDmode);
13344 
13345       if (!insn_data[icode].operand[n+1].predicate (op, mode))
13346         op = copy_to_mode_reg (mode, op);
13347 
13348       xop[n] = op;
13349     }
13350 
13351   switch (n_args)
13352     {
13353     case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
13354     case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
13355     case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
13356 
13357     default:
13358       gcc_unreachable();
13359     }
13360 
13361   if (pat == NULL_RTX)
13362     return NULL_RTX;
13363 
13364   emit_insn (pat);
13365 
13366   return target;
13367 }
13368 
13369 
13370 /* Implement `TARGET_EXPAND_BUILTIN'.  */
13371 /* Expand an expression EXP that calls a built-in function,
13372    with result going to TARGET if that's convenient
13373    (and in mode MODE if that's convenient).
13374    SUBTARGET may be used as the target for computing one of EXP's operands.
13375    IGNORE is nonzero if the value is to be ignored.  */
13376 
13377 static rtx
13378 avr_expand_builtin (tree exp, rtx target,
13379                     rtx subtarget ATTRIBUTE_UNUSED,
13380                     machine_mode mode ATTRIBUTE_UNUSED,
13381                     int ignore)
13382 {
13383   tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
13384   const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
13385   unsigned int id = DECL_FUNCTION_CODE (fndecl);
13386   const struct avr_builtin_description *d = &avr_bdesc[id];
13387   tree arg0;
13388   rtx op0;
13389 
13390   gcc_assert (id < AVR_BUILTIN_COUNT);
13391 
13392   switch (id)
13393     {
13394     case AVR_BUILTIN_NOP:
13395       emit_insn (gen_nopv (GEN_INT(1)));
13396       return 0;
13397 
13398     case AVR_BUILTIN_DELAY_CYCLES:
13399       {
13400         arg0 = CALL_EXPR_ARG (exp, 0);
13401         op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13402 
13403         if (!CONST_INT_P (op0))
13404           error ("%s expects a compile time integer constant", bname);
13405         else
13406           avr_expand_delay_cycles (op0);
13407 
13408         return NULL_RTX;
13409       }
13410 
13411     case AVR_BUILTIN_INSERT_BITS:
13412       {
13413         arg0 = CALL_EXPR_ARG (exp, 0);
13414         op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13415 
13416         if (!CONST_INT_P (op0))
13417           {
13418             error ("%s expects a compile time long integer constant"
13419                    " as first argument", bname);
13420             return target;
13421           }
13422 
13423         break;
13424       }
13425 
13426     case AVR_BUILTIN_ROUNDHR:   case AVR_BUILTIN_ROUNDUHR:
13427     case AVR_BUILTIN_ROUNDR:    case AVR_BUILTIN_ROUNDUR:
13428     case AVR_BUILTIN_ROUNDLR:   case AVR_BUILTIN_ROUNDULR:
13429     case AVR_BUILTIN_ROUNDLLR:  case AVR_BUILTIN_ROUNDULLR:
13430 
13431     case AVR_BUILTIN_ROUNDHK:   case AVR_BUILTIN_ROUNDUHK:
13432     case AVR_BUILTIN_ROUNDK:    case AVR_BUILTIN_ROUNDUK:
13433     case AVR_BUILTIN_ROUNDLK:   case AVR_BUILTIN_ROUNDULK:
13434     case AVR_BUILTIN_ROUNDLLK:  case AVR_BUILTIN_ROUNDULLK:
13435 
13436       /* Warn about odd rounding.  Rounding points >= FBIT will have
13437          no effect.  */
13438 
13439       if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
13440         break;
13441 
13442       int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
13443 
13444       if (rbit >= (int) GET_MODE_FBIT (mode))
13445         {
13446           warning (OPT_Wextra, "rounding to %d bits has no effect for "
13447                    "fixed-point value with %d fractional bits",
13448                    rbit, GET_MODE_FBIT (mode));
13449 
13450           return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
13451                               EXPAND_NORMAL);
13452         }
13453       else if (rbit <= - (int) GET_MODE_IBIT (mode))
13454         {
13455           warning (0, "rounding result will always be 0");
13456           return CONST0_RTX (mode);
13457         }
13458 
13459       /* The rounding points RP satisfies now:  -IBIT < RP < FBIT.
13460 
13461          TR 18037 only specifies results for  RP > 0.  However, the
13462          remaining cases of  -IBIT < RP <= 0  can easily be supported
13463          without any additional overhead.  */
13464 
13465       break; /* round */
13466     }
13467 
13468   /* No fold found and no insn:  Call support function from libgcc.  */
13469 
13470   if (d->icode == CODE_FOR_nothing
13471       && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
13472     {
13473       return expand_call (exp, target, ignore);
13474     }
13475 
13476   /* No special treatment needed: vanilla expand.  */
13477 
13478   gcc_assert (d->icode != CODE_FOR_nothing);
13479   gcc_assert (d->n_args == call_expr_nargs (exp));
13480 
13481   if (d->n_args == 0)
13482     {
13483       emit_insn ((GEN_FCN (d->icode)) (target));
13484       return NULL_RTX;
13485     }
13486 
13487   return avr_default_expand_builtin (d->icode, exp, target);
13488 }
13489 
13490 
13491 /* Helper for `avr_fold_builtin' that folds  absfx (FIXED_CST).  */
13492 
13493 static tree
13494 avr_fold_absfx (tree tval)
13495 {
13496   if (FIXED_CST != TREE_CODE (tval))
13497     return NULL_TREE;
13498 
13499   /* Our fixed-points have no padding:  Use double_int payload directly.  */
13500 
13501   FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
13502   unsigned int bits = GET_MODE_BITSIZE (fval.mode);
13503   double_int ival = fval.data.sext (bits);
13504 
13505   if (!ival.is_negative())
13506     return tval;
13507 
13508   /* ISO/IEC TR 18037, 7.18a.6.2:  The absfx functions are saturating.  */
13509 
13510   fval.data = (ival == double_int::min_value (bits, false).sext (bits))
13511     ? double_int::max_value (bits, false)
13512     : -ival;
13513 
13514   return build_fixed (TREE_TYPE (tval), fval);
13515 }
13516 
13517 
13518 /* Implement `TARGET_FOLD_BUILTIN'.  */
13519 
13520 static tree
13521 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
13522                   bool ignore ATTRIBUTE_UNUSED)
13523 {
13524   unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
13525   tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
13526 
13527   if (!optimize)
13528     return NULL_TREE;
13529 
13530   switch (fcode)
13531     {
13532     default:
13533       break;
13534 
13535     case AVR_BUILTIN_SWAP:
13536       {
13537         return fold_build2 (LROTATE_EXPR, val_type, arg[0],
13538                             build_int_cst (val_type, 4));
13539       }
13540 
13541     case AVR_BUILTIN_ABSHR:
13542     case AVR_BUILTIN_ABSR:
13543     case AVR_BUILTIN_ABSLR:
13544     case AVR_BUILTIN_ABSLLR:
13545 
13546     case AVR_BUILTIN_ABSHK:
13547     case AVR_BUILTIN_ABSK:
13548     case AVR_BUILTIN_ABSLK:
13549     case AVR_BUILTIN_ABSLLK:
13550       /* GCC is not good with folding ABS for fixed-point.  Do it by hand.  */
13551 
13552       return avr_fold_absfx (arg[0]);
13553 
13554     case AVR_BUILTIN_BITSHR:    case AVR_BUILTIN_HRBITS:
13555     case AVR_BUILTIN_BITSHK:    case AVR_BUILTIN_HKBITS:
13556     case AVR_BUILTIN_BITSUHR:   case AVR_BUILTIN_UHRBITS:
13557     case AVR_BUILTIN_BITSUHK:   case AVR_BUILTIN_UHKBITS:
13558 
13559     case AVR_BUILTIN_BITSR:     case AVR_BUILTIN_RBITS:
13560     case AVR_BUILTIN_BITSK:     case AVR_BUILTIN_KBITS:
13561     case AVR_BUILTIN_BITSUR:    case AVR_BUILTIN_URBITS:
13562     case AVR_BUILTIN_BITSUK:    case AVR_BUILTIN_UKBITS:
13563 
13564     case AVR_BUILTIN_BITSLR:    case AVR_BUILTIN_LRBITS:
13565     case AVR_BUILTIN_BITSLK:    case AVR_BUILTIN_LKBITS:
13566     case AVR_BUILTIN_BITSULR:   case AVR_BUILTIN_ULRBITS:
13567     case AVR_BUILTIN_BITSULK:   case AVR_BUILTIN_ULKBITS:
13568 
13569     case AVR_BUILTIN_BITSLLR:   case AVR_BUILTIN_LLRBITS:
13570     case AVR_BUILTIN_BITSLLK:   case AVR_BUILTIN_LLKBITS:
13571     case AVR_BUILTIN_BITSULLR:  case AVR_BUILTIN_ULLRBITS:
13572     case AVR_BUILTIN_BITSULLK:  case AVR_BUILTIN_ULLKBITS:
13573 
13574       gcc_assert (TYPE_PRECISION (val_type)
13575                   == TYPE_PRECISION (TREE_TYPE (arg[0])));
13576 
13577       return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
13578 
13579     case AVR_BUILTIN_INSERT_BITS:
13580       {
13581         tree tbits = arg[1];
13582         tree tval = arg[2];
13583         tree tmap;
13584         tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
13585         unsigned int map;
13586         bool changed = false;
13587         unsigned i;
13588         avr_map_op_t best_g;
13589 
13590         if (TREE_CODE (arg[0]) != INTEGER_CST)
13591           {
13592             /* No constant as first argument: Don't fold this and run into
13593                error in avr_expand_builtin.  */
13594 
13595             break;
13596           }
13597 
13598         tmap = wide_int_to_tree (map_type, arg[0]);
13599         map = TREE_INT_CST_LOW (tmap);
13600 
13601         if (TREE_CODE (tval) != INTEGER_CST
13602             && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
13603           {
13604             /* There are no F in the map, i.e. 3rd operand is unused.
13605                Replace that argument with some constant to render
13606                respective input unused.  */
13607 
13608             tval = build_int_cst (val_type, 0);
13609             changed = true;
13610           }
13611 
13612         if (TREE_CODE (tbits) != INTEGER_CST
13613             && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
13614           {
13615             /* Similar for the bits to be inserted. If they are unused,
13616                we can just as well pass 0.  */
13617 
13618             tbits = build_int_cst (val_type, 0);
13619           }
13620 
13621         if (TREE_CODE (tbits) == INTEGER_CST)
13622           {
13623             /* Inserting bits known at compile time is easy and can be
13624                performed by AND and OR with appropriate masks.  */
13625 
13626             int bits = TREE_INT_CST_LOW (tbits);
13627             int mask_ior = 0, mask_and = 0xff;
13628 
13629             for (i = 0; i < 8; i++)
13630               {
13631                 int mi = avr_map (map, i);
13632 
13633                 if (mi < 8)
13634                   {
13635                     if (bits & (1 << mi))     mask_ior |=  (1 << i);
13636                     else                      mask_and &= ~(1 << i);
13637                   }
13638               }
13639 
13640             tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
13641                                 build_int_cst (val_type, mask_ior));
13642             return fold_build2 (BIT_AND_EXPR, val_type, tval,
13643                                 build_int_cst (val_type, mask_and));
13644           }
13645 
13646         if (changed)
13647           return build_call_expr (fndecl, 3, tmap, tbits, tval);
13648 
13649         /* If bits don't change their position we can use vanilla logic
13650            to merge the two arguments.  */
13651 
13652         if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
13653           {
13654             int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
13655             tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
13656 
13657             tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
13658             tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
13659             return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
13660           }
13661 
13662         /* Try to decomposing map to reduce overall cost.  */
13663 
13664         if (avr_log.builtin)
13665           avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
13666 
13667         best_g = avr_map_op[0];
13668         best_g.cost = 1000;
13669 
13670         for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
13671           {
13672             avr_map_op_t g
13673               = avr_map_decompose (map, avr_map_op + i,
13674                                    TREE_CODE (tval) == INTEGER_CST);
13675 
13676             if (g.cost >= 0 && g.cost < best_g.cost)
13677               best_g = g;
13678           }
13679 
13680         if (avr_log.builtin)
13681           avr_edump ("\n");
13682 
13683         if (best_g.arg == 0)
13684           /* No optimization found */
13685           break;
13686 
13687         /* Apply operation G to the 2nd argument.  */
13688 
13689         if (avr_log.builtin)
13690           avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
13691                      best_g.str, best_g.arg, best_g.map, best_g.cost);
13692 
13693         /* Do right-shifts arithmetically: They copy the MSB instead of
13694            shifting in a non-usable value (0) as with logic right-shift.  */
13695 
13696         tbits = fold_convert (signed_char_type_node, tbits);
13697         tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
13698                              build_int_cst (val_type, best_g.arg));
13699         tbits = fold_convert (val_type, tbits);
13700 
13701         /* Use map o G^-1 instead of original map to undo the effect of G.  */
13702 
13703         tmap = wide_int_to_tree (map_type, best_g.map);
13704 
13705         return build_call_expr (fndecl, 3, tmap, tbits, tval);
13706       } /* AVR_BUILTIN_INSERT_BITS */
13707     }
13708 
13709   return NULL_TREE;
13710 }
13711 
13712 
13713 
13714 /* Initialize the GCC target structure.  */
13715 
13716 #undef  TARGET_ASM_ALIGNED_HI_OP
13717 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
13718 #undef  TARGET_ASM_ALIGNED_SI_OP
13719 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
13720 #undef  TARGET_ASM_UNALIGNED_HI_OP
13721 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
13722 #undef  TARGET_ASM_UNALIGNED_SI_OP
13723 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
13724 #undef  TARGET_ASM_INTEGER
13725 #define TARGET_ASM_INTEGER avr_assemble_integer
13726 #undef  TARGET_ASM_FILE_START
13727 #define TARGET_ASM_FILE_START avr_file_start
13728 #undef  TARGET_ASM_FILE_END
13729 #define TARGET_ASM_FILE_END avr_file_end
13730 
13731 #undef  TARGET_ASM_FUNCTION_END_PROLOGUE
13732 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
13733 #undef  TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
13734 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
13735 
13736 #undef  TARGET_FUNCTION_VALUE
13737 #define TARGET_FUNCTION_VALUE avr_function_value
13738 #undef  TARGET_LIBCALL_VALUE
13739 #define TARGET_LIBCALL_VALUE avr_libcall_value
13740 #undef  TARGET_FUNCTION_VALUE_REGNO_P
13741 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
13742 
13743 #undef  TARGET_ATTRIBUTE_TABLE
13744 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
13745 #undef  TARGET_INSERT_ATTRIBUTES
13746 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
13747 #undef  TARGET_SECTION_TYPE_FLAGS
13748 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
13749 
13750 #undef  TARGET_ASM_NAMED_SECTION
13751 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
13752 #undef  TARGET_ASM_INIT_SECTIONS
13753 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
13754 #undef  TARGET_ENCODE_SECTION_INFO
13755 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
13756 #undef  TARGET_ASM_SELECT_SECTION
13757 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
13758 
13759 #undef  TARGET_REGISTER_MOVE_COST
13760 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
13761 #undef  TARGET_MEMORY_MOVE_COST
13762 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
13763 #undef  TARGET_RTX_COSTS
13764 #define TARGET_RTX_COSTS avr_rtx_costs
13765 #undef  TARGET_ADDRESS_COST
13766 #define TARGET_ADDRESS_COST avr_address_cost
13767 #undef  TARGET_MACHINE_DEPENDENT_REORG
13768 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
13769 #undef  TARGET_FUNCTION_ARG
13770 #define TARGET_FUNCTION_ARG avr_function_arg
13771 #undef  TARGET_FUNCTION_ARG_ADVANCE
13772 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
13773 
13774 #undef  TARGET_SET_CURRENT_FUNCTION
13775 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
13776 
13777 #undef  TARGET_RETURN_IN_MEMORY
13778 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
13779 
13780 #undef  TARGET_STRICT_ARGUMENT_NAMING
13781 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
13782 
13783 #undef  TARGET_BUILTIN_SETJMP_FRAME_VALUE
13784 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
13785 
13786 #undef TARGET_CONDITIONAL_REGISTER_USAGE
13787 #define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
13788 
13789 #undef  TARGET_HARD_REGNO_SCRATCH_OK
13790 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
13791 #undef  TARGET_CASE_VALUES_THRESHOLD
13792 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
13793 
13794 #undef  TARGET_FRAME_POINTER_REQUIRED
13795 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
13796 #undef  TARGET_CAN_ELIMINATE
13797 #define TARGET_CAN_ELIMINATE avr_can_eliminate
13798 
13799 #undef  TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
13800 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
13801 
13802 #undef TARGET_WARN_FUNC_RETURN
13803 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
13804 
13805 #undef  TARGET_CLASS_LIKELY_SPILLED_P
13806 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
13807 
13808 #undef  TARGET_OPTION_OVERRIDE
13809 #define TARGET_OPTION_OVERRIDE avr_option_override
13810 
13811 #undef  TARGET_CANNOT_MODIFY_JUMPS_P
13812 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
13813 
13814 #undef  TARGET_FUNCTION_OK_FOR_SIBCALL
13815 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
13816 
13817 #undef  TARGET_INIT_BUILTINS
13818 #define TARGET_INIT_BUILTINS avr_init_builtins
13819 
13820 #undef  TARGET_BUILTIN_DECL
13821 #define TARGET_BUILTIN_DECL avr_builtin_decl
13822 
13823 #undef  TARGET_EXPAND_BUILTIN
13824 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
13825 
13826 #undef  TARGET_FOLD_BUILTIN
13827 #define TARGET_FOLD_BUILTIN avr_fold_builtin
13828 
13829 #undef  TARGET_ASM_FUNCTION_RODATA_SECTION
13830 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
13831 
13832 #undef  TARGET_SCALAR_MODE_SUPPORTED_P
13833 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
13834 
13835 #undef  TARGET_BUILD_BUILTIN_VA_LIST
13836 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
13837 
13838 #undef  TARGET_FIXED_POINT_SUPPORTED_P
13839 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
13840 
13841 #undef  TARGET_CONVERT_TO_TYPE
13842 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
13843 
13844 #undef  TARGET_ADDR_SPACE_SUBSET_P
13845 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
13846 
13847 #undef  TARGET_ADDR_SPACE_CONVERT
13848 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
13849 
13850 #undef  TARGET_ADDR_SPACE_ADDRESS_MODE
13851 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
13852 
13853 #undef  TARGET_ADDR_SPACE_POINTER_MODE
13854 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
13855 
13856 #undef  TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
13857 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P  \
13858   avr_addr_space_legitimate_address_p
13859 
13860 #undef  TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
13861 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
13862 
13863 #undef  TARGET_MODE_DEPENDENT_ADDRESS_P
13864 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
13865 
13866 #undef  TARGET_SECONDARY_RELOAD
13867 #define TARGET_SECONDARY_RELOAD avr_secondary_reload
13868 
13869 #undef  TARGET_PRINT_OPERAND
13870 #define TARGET_PRINT_OPERAND avr_print_operand
13871 #undef  TARGET_PRINT_OPERAND_ADDRESS
13872 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
13873 #undef  TARGET_PRINT_OPERAND_PUNCT_VALID_P
13874 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
13875 
13876 struct gcc_target targetm = TARGET_INITIALIZER;
13877 
13878 
13879 #include "gt-avr.h"
13880