xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/avr/avr.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2    Copyright (C) 1998-2017 Free Software Foundation, Inc.
3    Contributed by Denis Chertykov (chertykov@gmail.com)
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3, or (at your option)
10    any later version.
11 
12    GCC is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "intl.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "cgraph.h"
30 #include "c-family/c-common.h"
31 #include "cfghooks.h"
32 #include "df.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "optabs.h"
36 #include "regs.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "conditions.h"
40 #include "insn-attr.h"
41 #include "reload.h"
42 #include "varasm.h"
43 #include "calls.h"
44 #include "stor-layout.h"
45 #include "output.h"
46 #include "explow.h"
47 #include "expr.h"
48 #include "langhooks.h"
49 #include "cfgrtl.h"
50 #include "params.h"
51 #include "builtins.h"
52 #include "context.h"
53 #include "tree-pass.h"
54 #include "print-rtl.h"
55 #include "rtl-iter.h"
56 
57 /* This file should be included last.  */
58 #include "target-def.h"
59 
60 /* Maximal allowed offset for an address in the LD command */
61 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
62 
63 /* Return true if STR starts with PREFIX and false, otherwise.  */
64 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
65 
66 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
67    address space where data is to be located.
68    As the only non-generic address spaces are all located in flash,
69    this can be used to test if data shall go into some .progmem* section.
70    This must be the rightmost field of machine dependent section flags.  */
71 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
72 
73 /* Similar 4-bit region for SYMBOL_REF_FLAGS.  */
74 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
75 
76 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
77    Set address-space AS in SYMBOL_REF_FLAGS of SYM  */
78 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS)                       \
79   do {                                                          \
80     SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM;         \
81     SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP;      \
82   } while (0)
83 
84 /* Read address-space from SYMBOL_REF_FLAGS of SYM  */
85 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM)                          \
86   ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM)           \
87    / SYMBOL_FLAG_MACH_DEP)
88 
89 /* (AVR_TINY only): Symbol has attribute progmem */
90 #define AVR_SYMBOL_FLAG_TINY_PM \
91   (SYMBOL_FLAG_MACH_DEP << 7)
92 
93 /* (AVR_TINY only): Symbol has attribute absdata */
94 #define AVR_SYMBOL_FLAG_TINY_ABSDATA \
95   (SYMBOL_FLAG_MACH_DEP << 8)
96 
97 #define TINY_ADIW(REG1, REG2, I)                                \
98     "subi " #REG1 ",lo8(-(" #I "))" CR_TAB                      \
99     "sbci " #REG2 ",hi8(-(" #I "))"
100 
101 #define TINY_SBIW(REG1, REG2, I)                                \
102     "subi " #REG1 ",lo8((" #I "))" CR_TAB                       \
103     "sbci " #REG2 ",hi8((" #I "))"
104 
105 #define AVR_TMP_REGNO (AVR_TINY ? TMP_REGNO_TINY : TMP_REGNO)
106 #define AVR_ZERO_REGNO (AVR_TINY ? ZERO_REGNO_TINY : ZERO_REGNO)
107 
108 /* Known address spaces.  The order must be the same as in the respective
109    enum from avr.h (or designated initialized must be used).  */
110 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
111 {
112   { ADDR_SPACE_RAM,  0, 2, "", 0, NULL },
113   { ADDR_SPACE_FLASH,  1, 2, "__flash",   0, ".progmem.data" },
114   { ADDR_SPACE_FLASH1, 1, 2, "__flash1",  1, ".progmem1.data" },
115   { ADDR_SPACE_FLASH2, 1, 2, "__flash2",  2, ".progmem2.data" },
116   { ADDR_SPACE_FLASH3, 1, 2, "__flash3",  3, ".progmem3.data" },
117   { ADDR_SPACE_FLASH4, 1, 2, "__flash4",  4, ".progmem4.data" },
118   { ADDR_SPACE_FLASH5, 1, 2, "__flash5",  5, ".progmem5.data" },
119   { ADDR_SPACE_MEMX, 1, 3, "__memx",  0, ".progmemx.data" },
120 };
121 
122 
123 /* Holding RAM addresses of some SFRs used by the compiler and that
124    are unique over all devices in an architecture like 'avr4'.  */
125 
126 typedef struct
127 {
128   /* SREG: The processor status */
129   int sreg;
130 
131   /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
132   int ccp;
133   int rampd;
134   int rampx;
135   int rampy;
136 
137   /* RAMPZ: The high byte of 24-bit address used with ELPM */
138   int rampz;
139 
140   /* SP: The stack pointer and its low and high byte */
141   int sp_l;
142   int sp_h;
143 } avr_addr_t;
144 
145 static avr_addr_t avr_addr;
146 
147 
148 /* Prototypes for local helper functions.  */
149 
150 static const char* out_movqi_r_mr (rtx_insn *, rtx[], int*);
151 static const char* out_movhi_r_mr (rtx_insn *, rtx[], int*);
152 static const char* out_movsi_r_mr (rtx_insn *, rtx[], int*);
153 static const char* out_movqi_mr_r (rtx_insn *, rtx[], int*);
154 static const char* out_movhi_mr_r (rtx_insn *, rtx[], int*);
155 static const char* out_movsi_mr_r (rtx_insn *, rtx[], int*);
156 
157 static int get_sequence_length (rtx_insn *insns);
158 static int sequent_regs_live (void);
159 static const char *ptrreg_to_str (int);
160 static const char *cond_string (enum rtx_code);
161 static int avr_num_arg_regs (machine_mode, const_tree);
162 static int avr_operand_rtx_cost (rtx, machine_mode, enum rtx_code,
163                                  int, bool);
164 static void output_reload_in_const (rtx*, rtx, int*, bool);
165 static struct machine_function * avr_init_machine_status (void);
166 
167 
168 /* Prototypes for hook implementors if needed before their implementation.  */
169 
170 static bool avr_rtx_costs (rtx, machine_mode, int, int, int*, bool);
171 
172 
173 /* Allocate registers from r25 to r8 for parameters for function calls.  */
174 #define FIRST_CUM_REG 26
175 
176 /* Last call saved register */
177 #define LAST_CALLEE_SAVED_REG (AVR_TINY ? 19 : 17)
178 
179 /* Implicit target register of LPM instruction (R0) */
180 extern GTY(()) rtx lpm_reg_rtx;
181 rtx lpm_reg_rtx;
182 
183 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
184 extern GTY(()) rtx lpm_addr_reg_rtx;
185 rtx lpm_addr_reg_rtx;
186 
187 /* Temporary register RTX (reg:QI TMP_REGNO) */
188 extern GTY(()) rtx tmp_reg_rtx;
189 rtx tmp_reg_rtx;
190 
191 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
192 extern GTY(()) rtx zero_reg_rtx;
193 rtx zero_reg_rtx;
194 
195 /* RTXs for all general purpose registers as QImode */
196 extern GTY(()) rtx all_regs_rtx[32];
197 rtx all_regs_rtx[32];
198 
199 /* SREG, the processor status */
200 extern GTY(()) rtx sreg_rtx;
201 rtx sreg_rtx;
202 
203 /* RAMP* special function registers */
204 extern GTY(()) rtx rampd_rtx;
205 extern GTY(()) rtx rampx_rtx;
206 extern GTY(()) rtx rampy_rtx;
207 extern GTY(()) rtx rampz_rtx;
208 rtx rampd_rtx;
209 rtx rampx_rtx;
210 rtx rampy_rtx;
211 rtx rampz_rtx;
212 
213 /* RTX containing the strings "" and "e", respectively */
214 static GTY(()) rtx xstring_empty;
215 static GTY(()) rtx xstring_e;
216 
217 /* Current architecture.  */
218 const avr_arch_t *avr_arch;
219 
220 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
221    or to address space __flash* or __memx.  Only used as singletons inside
222    avr_asm_select_section, but it must not be local there because of GTY.  */
223 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
224 
225 /* Condition for insns/expanders from avr-dimode.md.  */
226 bool avr_have_dimode = true;
227 
228 /* To track if code will use .bss and/or .data.  */
229 bool avr_need_clear_bss_p = false;
230 bool avr_need_copy_data_p = false;
231 
232 
233 /* Transform UP into lowercase and write the result to LO.
234    You must provide enough space for LO.  Return LO.  */
235 
236 static char*
237 avr_tolower (char *lo, const char *up)
238 {
239   char *lo0 = lo;
240 
241   for (; *up; up++, lo++)
242     *lo = TOLOWER (*up);
243 
244   *lo = '\0';
245 
246   return lo0;
247 }
248 
249 
250 /* Constraint helper function.  XVAL is a CONST_INT or a CONST_DOUBLE.
251    Return true if the least significant N_BYTES bytes of XVAL all have a
252    popcount in POP_MASK and false, otherwise.  POP_MASK represents a subset
253    of integers which contains an integer N iff bit N of POP_MASK is set.  */
254 
255 bool
256 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
257 {
258   machine_mode mode = GET_MODE (xval);
259 
260   if (VOIDmode == mode)
261     mode = SImode;
262 
263   for (int i = 0; i < n_bytes; i++)
264     {
265       rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
266       unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
267 
268       if (0 == (pop_mask & (1 << popcount_hwi (val8))))
269         return false;
270     }
271 
272   return true;
273 }
274 
275 
276 /* Access some RTX as INT_MODE.  If X is a CONST_FIXED we can get
277    the bit representation of X by "casting" it to CONST_INT.  */
278 
279 rtx
280 avr_to_int_mode (rtx x)
281 {
282   machine_mode mode = GET_MODE (x);
283 
284   return VOIDmode == mode
285     ? x
286     : simplify_gen_subreg (int_mode_for_mode (mode), x, mode, 0);
287 }
288 
289 namespace {
290 
291 static const pass_data avr_pass_data_recompute_notes =
292 {
293   RTL_PASS,      // type
294   "",            // name (will be patched)
295   OPTGROUP_NONE, // optinfo_flags
296   TV_DF_SCAN,    // tv_id
297   0,             // properties_required
298   0,             // properties_provided
299   0,             // properties_destroyed
300   0,             // todo_flags_start
301   TODO_df_finish | TODO_df_verify // todo_flags_finish
302 };
303 
304 
305 class avr_pass_recompute_notes : public rtl_opt_pass
306 {
307 public:
308   avr_pass_recompute_notes (gcc::context *ctxt, const char *name)
309     : rtl_opt_pass (avr_pass_data_recompute_notes, ctxt)
310   {
311     this->name = name;
312   }
313 
314   virtual unsigned int execute (function*)
315   {
316     df_note_add_problem ();
317     df_analyze ();
318 
319     return 0;
320   }
321 }; // avr_pass_recompute_notes
322 
323 static const pass_data avr_pass_data_casesi =
324 {
325   RTL_PASS,      // type
326   "",            // name (will be patched)
327   OPTGROUP_NONE, // optinfo_flags
328   TV_DF_SCAN,    // tv_id
329   0,             // properties_required
330   0,             // properties_provided
331   0,             // properties_destroyed
332   0,             // todo_flags_start
333   0              // todo_flags_finish
334 };
335 
336 
337 class avr_pass_casesi : public rtl_opt_pass
338 {
339 public:
340   avr_pass_casesi (gcc::context *ctxt, const char *name)
341     : rtl_opt_pass (avr_pass_data_casesi, ctxt)
342   {
343     this->name = name;
344   }
345 
346   void avr_rest_of_handle_casesi (function*);
347 
348   virtual bool gate (function*) { return optimize > 0; }
349 
350   virtual unsigned int execute (function *func)
351   {
352     avr_rest_of_handle_casesi (func);
353 
354     return 0;
355   }
356 }; // avr_pass_casesi
357 
358 } // anon namespace
359 
360 rtl_opt_pass*
361 make_avr_pass_recompute_notes (gcc::context *ctxt)
362 {
363   return new avr_pass_recompute_notes (ctxt, "avr-notes-free-cfg");
364 }
365 
366 rtl_opt_pass*
367 make_avr_pass_casesi (gcc::context *ctxt)
368 {
369   return new avr_pass_casesi (ctxt, "avr-casesi");
370 }
371 
372 
373 /* Make one parallel insn with all the patterns from insns i[0]..i[5].  */
374 
375 static rtx_insn*
376 avr_parallel_insn_from_insns (rtx_insn *i[6])
377 {
378   rtvec vec = gen_rtvec (6, PATTERN (i[0]), PATTERN (i[1]), PATTERN (i[2]),
379                          PATTERN (i[3]), PATTERN (i[4]), PATTERN (i[5]));
380   start_sequence();
381   emit (gen_rtx_PARALLEL (VOIDmode, vec));
382   rtx_insn *insn = get_insns();
383   end_sequence();
384 
385   return insn;
386 }
387 
388 
389 /* Return true if we see an insn stream generated by casesi expander together
390    with an extension to SImode of the switch value.
391 
392    If this is the case, fill in the insns from casesi to INSNS[1..5] and
393    the SImode extension to INSNS[0].  Moreover, extract the operands of
394    pattern casesi_<mode>_sequence forged from the sequence to recog_data.  */
395 
396 static bool
397 avr_is_casesi_sequence (basic_block bb, rtx_insn *insn, rtx_insn *insns[6])
398 {
399   rtx set_5, set_0;
400 
401   /* A first and quick test for a casesi sequences.  As a side effect of
402      the test, harvest respective insns to INSNS[0..5].  */
403 
404   if (!(JUMP_P (insns[5] = insn)
405         // casesi is the only insn that comes up with UNSPEC_INDEX_JMP,
406         // hence the following test ensures that we are actually dealing
407         // with code from casesi.
408         && (set_5 = single_set (insns[5]))
409         && UNSPEC == GET_CODE (SET_SRC (set_5))
410         && UNSPEC_INDEX_JMP == XINT (SET_SRC (set_5), 1)
411 
412         && (insns[4] = prev_real_insn (insns[5]))
413         && (insns[3] = prev_real_insn (insns[4]))
414         && (insns[2] = prev_real_insn (insns[3]))
415         && (insns[1] = prev_real_insn (insns[2]))
416 
417         // Insn prior to casesi.
418         && (insns[0] = prev_real_insn (insns[1]))
419         && (set_0 = single_set (insns[0]))
420         && extend_operator (SET_SRC (set_0), SImode)))
421     {
422       return false;
423     }
424 
425   if (dump_file)
426     {
427       fprintf (dump_file, ";; Sequence from casesi in "
428                "[bb %d]:\n\n", bb->index);
429       for (int i = 0; i < 6; i++)
430         print_rtl_single (dump_file, insns[i]);
431     }
432 
433   /* We have to deal with quite some operands.  Extracting them by hand
434      would be tedious, therefore wrap the insn patterns into a parallel,
435      run recog against it and then use insn extract to get the operands. */
436 
437   rtx_insn *xinsn = avr_parallel_insn_from_insns (insns);
438 
439   INSN_CODE (xinsn) = recog (PATTERN (xinsn), xinsn, NULL /* num_clobbers */);
440 
441   /* Failing to recognize means that someone changed the casesi expander or
442      that some passes prior to this one performed some unexpected changes.
443      Gracefully drop such situations instead of aborting.  */
444 
445   if (INSN_CODE (xinsn) < 0)
446     {
447       if (dump_file)
448         fprintf (dump_file, ";; Sequence not recognized, giving up.\n\n");
449 
450       return false;
451     }
452 
453   gcc_assert (CODE_FOR_casesi_qi_sequence == INSN_CODE (xinsn)
454               || CODE_FOR_casesi_hi_sequence == INSN_CODE (xinsn));
455 
456   extract_insn (xinsn);
457 
458   // Assert on the anatomy of xinsn's operands we are going to work with.
459 
460   gcc_assert (11 == recog_data.n_operands);
461   gcc_assert (4 == recog_data.n_dups);
462 
463   if (dump_file)
464     {
465       fprintf (dump_file, ";; Operands extracted:\n");
466       for (int i = 0; i < recog_data.n_operands; i++)
467         avr_fdump (dump_file, ";; $%d = %r\n", i, recog_data.operand[i]);
468       fprintf (dump_file, "\n");
469     }
470 
471   return true;
472 }
473 
474 
475 /* Perform some extra checks on operands of casesi_<mode>_sequence.
476    Not all operand dependencies can be described by means of predicates.
477    This function performs left over checks and should always return true.
478    Returning false means that someone changed the casesi expander but did
479    not adjust casesi_<mode>_sequence.  */
480 
481 bool
482 avr_casei_sequence_check_operands (rtx *xop)
483 {
484   rtx sub_5 = NULL_RTX;
485 
486   if (AVR_HAVE_EIJMP_EICALL
487       // The last clobber op of the tablejump.
488       && xop[8] == all_regs_rtx[24])
489     {
490       // $6 is: (subreg:SI ($5) 0)
491       sub_5 = xop[6];
492     }
493 
494   if (!AVR_HAVE_EIJMP_EICALL
495       // $6 is: (plus:HI (subreg:SI ($5) 0)
496       //                 (label_ref ($3)))
497       && PLUS == GET_CODE (xop[6])
498       && LABEL_REF == GET_CODE (XEXP (xop[6], 1))
499       && rtx_equal_p (xop[3], XEXP (XEXP (xop[6], 1), 0))
500       // The last clobber op of the tablejump.
501       && xop[8] == const0_rtx)
502     {
503       sub_5 = XEXP (xop[6], 0);
504     }
505 
506   if (sub_5
507       && SUBREG_P (sub_5)
508       && 0 == SUBREG_BYTE (sub_5)
509       && rtx_equal_p (xop[5], SUBREG_REG (sub_5)))
510     return true;
511 
512   if (dump_file)
513     fprintf (dump_file, "\n;; Failed condition for casesi_<mode>_sequence\n\n");
514 
515   return false;
516 }
517 
518 
519 /* INSNS[1..5] is a sequence as generated by casesi and INSNS[0] is an
520    extension of an 8-bit or 16-bit integer to SImode.  XOP contains the
521    operands of INSNS as extracted by insn_extract from pattern
522    casesi_<mode>_sequence:
523 
524       $0: SImode reg switch value as result of $9.
525       $1: Negative of smallest index in switch.
526       $2: Number of entries in switch.
527       $3: Label to table.
528       $4: Label if out-of-bounds.
529       $5: $0 + $1.
530       $6: 3-byte PC: subreg:HI ($5) + label_ref ($3)
531           2-byte PC: subreg:HI ($5)
532       $7: HI reg index into table (Z or pseudo)
533       $8: R24 or const0_rtx (to be clobbered)
534       $9: Extension to SImode of an 8-bit or 16-bit integer register $10.
535       $10: QImode or HImode register input of $9.
536 
537    Try to optimize this sequence, i.e. use the original HImode / QImode
538    switch value instead of SImode.  */
539 
540 static void
541 avr_optimize_casesi (rtx_insn *insns[6], rtx *xop)
542 {
543   // Original mode of the switch value; this is QImode or HImode.
544   machine_mode mode = GET_MODE (xop[10]);
545 
546   // How the original switch value was extended to SImode; this is
547   // SIGN_EXTEND or ZERO_EXTEND.
548   enum rtx_code code = GET_CODE (xop[9]);
549 
550   // Lower index, upper index (plus one) and range of case calues.
551   HOST_WIDE_INT low_idx = -INTVAL (xop[1]);
552   HOST_WIDE_INT num_idx = INTVAL (xop[2]);
553   HOST_WIDE_INT hig_idx = low_idx + num_idx;
554 
555   // Maximum ranges of (un)signed QImode resp. HImode.
556   unsigned umax = QImode == mode ? 0xff : 0xffff;
557   int imax = QImode == mode ? 0x7f : 0x7fff;
558   int imin = -imax - 1;
559 
560   // Testing the case range and whether it fits into the range of the
561   // (un)signed mode.  This test should actually always pass because it
562   // makes no sense to have case values outside the mode range.  Notice
563   // that case labels which are unreachable because they are outside the
564   // mode of the switch value (e.g. "case -1" for uint8_t) have already
565   // been thrown away by the middle-end.
566 
567   if (SIGN_EXTEND == code
568       && low_idx >= imin
569       && hig_idx <= imax)
570     {
571       // ok
572     }
573   else if (ZERO_EXTEND == code
574            && low_idx >= 0
575            && (unsigned) hig_idx <= umax)
576     {
577       // ok
578     }
579   else
580     {
581       if (dump_file)
582         fprintf (dump_file, ";; Case ranges too big, giving up.\n\n");
583       return;
584     }
585 
586   // Do normalization of switch value $10 and out-of-bound check in its
587   // original mode instead of in SImode.  Use a newly created pseudo.
588   // This will replace insns[1..2].
589 
590   start_sequence();
591 
592   rtx_insn *seq1, *seq2, *last1, *last2;
593 
594   rtx reg = copy_to_mode_reg (mode, xop[10]);
595 
596   rtx (*gen_add)(rtx,rtx,rtx) = QImode == mode ? gen_addqi3 : gen_addhi3;
597   rtx (*gen_cmp)(rtx,rtx) = QImode == mode ? gen_cmpqi3 : gen_cmphi3;
598 
599   emit_insn (gen_add (reg, reg, gen_int_mode (-low_idx, mode)));
600   emit_insn (gen_cmp (reg, gen_int_mode (num_idx, mode)));
601 
602   seq1 = get_insns();
603   last1 = get_last_insn();
604   end_sequence();
605 
606   emit_insn_before (seq1, insns[1]);
607 
608   // After the out-of-bounds test and corresponding branch, use a
609   // 16-bit index.  If QImode is used, extend it to HImode first.
610   // This will replace insns[4].
611 
612   start_sequence();
613 
614   if (QImode == mode)
615     reg = force_reg (HImode, gen_rtx_fmt_e (code, HImode, reg));
616 
617   rtx pat_4 = AVR_3_BYTE_PC
618     ? gen_movhi (xop[7], reg)
619     : gen_addhi3 (xop[7], reg, gen_rtx_LABEL_REF (VOIDmode, xop[3]));
620 
621   emit_insn (pat_4);
622 
623   seq2 = get_insns();
624   last2 = get_last_insn();
625   end_sequence();
626 
627   emit_insn_after (seq2, insns[4]);
628 
629   if (dump_file)
630     {
631       fprintf (dump_file, ";; New insns: ");
632 
633       for (rtx_insn *insn = seq1; ; insn = NEXT_INSN (insn))
634         {
635           fprintf (dump_file, "%d, ", INSN_UID (insn));
636           if (insn == last1)
637             break;
638         }
639       for (rtx_insn *insn = seq2; ; insn = NEXT_INSN (insn))
640         {
641           fprintf (dump_file, "%d%s", INSN_UID (insn),
642                    insn == last2 ? ".\n\n" : ", ");
643           if (insn == last2)
644             break;
645         }
646 
647       fprintf (dump_file, ";; Deleting insns: %d, %d, %d.\n\n",
648                INSN_UID (insns[1]), INSN_UID (insns[2]), INSN_UID (insns[4]));
649     }
650 
651   // Pseudodelete the SImode and subreg of SImode insns.  We don't care
652   // about the extension insns[0]: Its result is now unused and other
653   // passes will clean it up.
654 
655   SET_INSN_DELETED (insns[1]);
656   SET_INSN_DELETED (insns[2]);
657   SET_INSN_DELETED (insns[4]);
658 }
659 
660 
661 void
662 avr_pass_casesi::avr_rest_of_handle_casesi (function *func)
663 {
664   basic_block bb;
665 
666   FOR_EACH_BB_FN (bb, func)
667     {
668       rtx_insn *insn, *insns[6];
669 
670       FOR_BB_INSNS (bb, insn)
671         {
672           if (avr_is_casesi_sequence (bb, insn, insns))
673             {
674               avr_optimize_casesi (insns, recog_data.operand);
675             }
676         }
677     }
678 }
679 
680 
681 /* Set `avr_arch' as specified by `-mmcu='.
682    Return true on success.  */
683 
684 static bool
685 avr_set_core_architecture (void)
686 {
687   /* Search for mcu core architecture.  */
688 
689   if (!avr_mmcu)
690     avr_mmcu = AVR_MMCU_DEFAULT;
691 
692   avr_arch = &avr_arch_types[0];
693 
694   for (const avr_mcu_t *mcu = avr_mcu_types; ; mcu++)
695     {
696       if (NULL == mcu->name)
697         {
698           /* Reached the end of `avr_mcu_types'.  This should actually never
699              happen as options are provided by device-specs.  It could be a
700              typo in a device-specs or calling the compiler proper directly
701              with -mmcu=<device>. */
702 
703           error ("unknown core architecture %qs specified with %qs",
704                  avr_mmcu, "-mmcu=");
705           avr_inform_core_architectures ();
706           break;
707         }
708       else if (0 == strcmp (mcu->name, avr_mmcu)
709                // Is this a proper architecture ?
710                && NULL == mcu->macro)
711         {
712           avr_arch = &avr_arch_types[mcu->arch_id];
713           if (avr_n_flash < 0)
714             avr_n_flash = 1 + (mcu->flash_size - 1) / 0x10000;
715 
716           return true;
717         }
718     }
719 
720   return false;
721 }
722 
723 
724 /* Implement `TARGET_OPTION_OVERRIDE'.  */
725 
726 static void
727 avr_option_override (void)
728 {
729   /* Disable -fdelete-null-pointer-checks option for AVR target.
730      This option compiler assumes that dereferencing of a null pointer
731      would halt the program.  For AVR this assumption is not true and
732      programs can safely dereference null pointers.  Changes made by this
733      option may not work properly for AVR.  So disable this option. */
734 
735   flag_delete_null_pointer_checks = 0;
736 
737   /* caller-save.c looks for call-clobbered hard registers that are assigned
738      to pseudos that cross calls and tries so save-restore them around calls
739      in order to reduce the number of stack slots needed.
740 
741      This might lead to situations where reload is no more able to cope
742      with the challenge of AVR's very few address registers and fails to
743      perform the requested spills.  */
744 
745   if (avr_strict_X)
746     flag_caller_saves = 0;
747 
748   /* Allow optimizer to introduce store data races. This used to be the
749      default - it was changed because bigger targets did not see any
750      performance decrease. For the AVR though, disallowing data races
751      introduces additional code in LIM and increases reg pressure.  */
752 
753   maybe_set_param_value (PARAM_ALLOW_STORE_DATA_RACES, 1,
754                          global_options.x_param_values,
755                          global_options_set.x_param_values);
756 
757   /* Unwind tables currently require a frame pointer for correctness,
758      see toplev.c:process_options().  */
759 
760   if ((flag_unwind_tables
761        || flag_non_call_exceptions
762        || flag_asynchronous_unwind_tables)
763       && !ACCUMULATE_OUTGOING_ARGS)
764     {
765       flag_omit_frame_pointer = 0;
766     }
767 
768   if (flag_pic == 1)
769     warning (OPT_fpic, "-fpic is not supported");
770   if (flag_pic == 2)
771     warning (OPT_fPIC, "-fPIC is not supported");
772   if (flag_pie == 1)
773     warning (OPT_fpie, "-fpie is not supported");
774   if (flag_pie == 2)
775     warning (OPT_fPIE, "-fPIE is not supported");
776 
777   if (!avr_set_core_architecture())
778     return;
779 
780   /* RAM addresses of some SFRs common to all devices in respective arch. */
781 
782   /* SREG: Status Register containing flags like I (global IRQ) */
783   avr_addr.sreg = 0x3F + avr_arch->sfr_offset;
784 
785   /* RAMPZ: Address' high part when loading via ELPM */
786   avr_addr.rampz = 0x3B + avr_arch->sfr_offset;
787 
788   avr_addr.rampy = 0x3A + avr_arch->sfr_offset;
789   avr_addr.rampx = 0x39 + avr_arch->sfr_offset;
790   avr_addr.rampd = 0x38 + avr_arch->sfr_offset;
791   avr_addr.ccp = (AVR_TINY ? 0x3C : 0x34) + avr_arch->sfr_offset;
792 
793   /* SP: Stack Pointer (SP_H:SP_L) */
794   avr_addr.sp_l = 0x3D + avr_arch->sfr_offset;
795   avr_addr.sp_h = avr_addr.sp_l + 1;
796 
797   init_machine_status = avr_init_machine_status;
798 
799   avr_log_set_avr_log();
800 }
801 
802 /* Function to set up the backend function structure.  */
803 
804 static struct machine_function *
805 avr_init_machine_status (void)
806 {
807   return ggc_cleared_alloc<machine_function> ();
808 }
809 
810 
811 /* Implement `INIT_EXPANDERS'.  */
812 /* The function works like a singleton.  */
813 
814 void
815 avr_init_expanders (void)
816 {
817   for (int regno = 0; regno < 32; regno ++)
818     all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
819 
820   lpm_reg_rtx  = all_regs_rtx[LPM_REGNO];
821   tmp_reg_rtx  = all_regs_rtx[AVR_TMP_REGNO];
822   zero_reg_rtx = all_regs_rtx[AVR_ZERO_REGNO];
823 
824   lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
825 
826   sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
827   rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
828   rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
829   rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
830   rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
831 
832   xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
833   xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
834 
835   /* TINY core does not have regs r10-r16, but avr-dimode.md expects them
836      to be present */
837   if (AVR_TINY)
838     avr_have_dimode = false;
839 }
840 
841 
842 /* Implement `REGNO_REG_CLASS'.  */
843 /* Return register class for register R.  */
844 
845 enum reg_class
846 avr_regno_reg_class (int r)
847 {
848   static const enum reg_class reg_class_tab[] =
849     {
850       R0_REG,
851       /* r1 - r15 */
852       NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
853       NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
854       NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
855       NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
856       /* r16 - r23 */
857       SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
858       SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
859       /* r24, r25 */
860       ADDW_REGS, ADDW_REGS,
861       /* X: r26, 27 */
862       POINTER_X_REGS, POINTER_X_REGS,
863       /* Y: r28, r29 */
864       POINTER_Y_REGS, POINTER_Y_REGS,
865       /* Z: r30, r31 */
866       POINTER_Z_REGS, POINTER_Z_REGS,
867       /* SP: SPL, SPH */
868       STACK_REG, STACK_REG
869     };
870 
871   if (r <= 33)
872     return reg_class_tab[r];
873 
874   return ALL_REGS;
875 }
876 
877 
878 /* Implement `TARGET_SCALAR_MODE_SUPPORTED_P'.  */
879 
880 static bool
881 avr_scalar_mode_supported_p (machine_mode mode)
882 {
883   if (ALL_FIXED_POINT_MODE_P (mode))
884     return true;
885 
886   if (PSImode == mode)
887     return true;
888 
889   return default_scalar_mode_supported_p (mode);
890 }
891 
892 
893 /* Return TRUE if DECL is a VAR_DECL located in flash and FALSE, otherwise.  */
894 
895 static bool
896 avr_decl_flash_p (tree decl)
897 {
898   if (TREE_CODE (decl) != VAR_DECL
899       || TREE_TYPE (decl) == error_mark_node)
900     {
901       return false;
902     }
903 
904   return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
905 }
906 
907 
908 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit flash
909    address space and FALSE, otherwise.  */
910 
911 static bool
912 avr_decl_memx_p (tree decl)
913 {
914   if (TREE_CODE (decl) != VAR_DECL
915       || TREE_TYPE (decl) == error_mark_node)
916     {
917       return false;
918     }
919 
920   return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
921 }
922 
923 
924 /* Return TRUE if X is a MEM rtx located in flash and FALSE, otherwise.  */
925 
926 bool
927 avr_mem_flash_p (rtx x)
928 {
929   return (MEM_P (x)
930           && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
931 }
932 
933 
934 /* Return TRUE if X is a MEM rtx located in the 24-bit flash
935    address space and FALSE, otherwise.  */
936 
937 bool
938 avr_mem_memx_p (rtx x)
939 {
940   return (MEM_P (x)
941           && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
942 }
943 
944 
945 /* A helper for the subsequent function attribute used to dig for
946    attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
947 
948 static inline int
949 avr_lookup_function_attribute1 (const_tree func, const char *name)
950 {
951   if (FUNCTION_DECL == TREE_CODE (func))
952     {
953       if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
954         {
955           return true;
956         }
957 
958       func = TREE_TYPE (func);
959     }
960 
961   gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
962               || TREE_CODE (func) == METHOD_TYPE);
963 
964   return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
965 }
966 
967 /* Return nonzero if FUNC is a naked function.  */
968 
969 static int
970 avr_naked_function_p (tree func)
971 {
972   return avr_lookup_function_attribute1 (func, "naked");
973 }
974 
975 /* Return nonzero if FUNC is an interrupt function as specified
976    by the "interrupt" attribute.  */
977 
978 static int
979 avr_interrupt_function_p (tree func)
980 {
981   return avr_lookup_function_attribute1 (func, "interrupt");
982 }
983 
984 /* Return nonzero if FUNC is a signal function as specified
985    by the "signal" attribute.  */
986 
987 static int
988 avr_signal_function_p (tree func)
989 {
990   return avr_lookup_function_attribute1 (func, "signal");
991 }
992 
993 /* Return nonzero if FUNC is an OS_task function.  */
994 
995 static int
996 avr_OS_task_function_p (tree func)
997 {
998   return avr_lookup_function_attribute1 (func, "OS_task");
999 }
1000 
1001 /* Return nonzero if FUNC is an OS_main function.  */
1002 
1003 static int
1004 avr_OS_main_function_p (tree func)
1005 {
1006   return avr_lookup_function_attribute1 (func, "OS_main");
1007 }
1008 
1009 
1010 /* Implement `TARGET_SET_CURRENT_FUNCTION'.  */
1011 /* Sanity cheching for above function attributes.  */
1012 
1013 static void
1014 avr_set_current_function (tree decl)
1015 {
1016   location_t loc;
1017   const char *isr;
1018 
1019   if (decl == NULL_TREE
1020       || current_function_decl == NULL_TREE
1021       || current_function_decl == error_mark_node
1022       || ! cfun->machine
1023       || cfun->machine->attributes_checked_p)
1024     return;
1025 
1026   loc = DECL_SOURCE_LOCATION (decl);
1027 
1028   cfun->machine->is_naked = avr_naked_function_p (decl);
1029   cfun->machine->is_signal = avr_signal_function_p (decl);
1030   cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
1031   cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
1032   cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
1033 
1034   isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
1035 
1036   /* Too much attributes make no sense as they request conflicting features. */
1037 
1038   if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
1039       + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
1040     error_at (loc, "function attributes %qs, %qs and %qs are mutually"
1041               " exclusive", "OS_task", "OS_main", isr);
1042 
1043   /* 'naked' will hide effects of 'OS_task' and 'OS_main'.  */
1044 
1045   if (cfun->machine->is_naked
1046       && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1047     warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
1048                 " no effect on %qs function", "OS_task", "OS_main", "naked");
1049 
1050   if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1051     {
1052       tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
1053       tree ret = TREE_TYPE (TREE_TYPE (decl));
1054       const char *name;
1055 
1056       name = DECL_ASSEMBLER_NAME_SET_P (decl)
1057         ? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
1058         : IDENTIFIER_POINTER (DECL_NAME (decl));
1059 
1060       /* Skip a leading '*' that might still prefix the assembler name,
1061          e.g. in non-LTO runs.  */
1062 
1063       name = default_strip_name_encoding (name);
1064 
1065       /* Interrupt handlers must be  void __vector (void)  functions.  */
1066 
1067       if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
1068         error_at (loc, "%qs function cannot have arguments", isr);
1069 
1070       if (TREE_CODE (ret) != VOID_TYPE)
1071         error_at (loc, "%qs function cannot return a value", isr);
1072 
1073 #if defined WITH_AVRLIBC
1074       /* Silently ignore 'signal' if 'interrupt' is present.  AVR-LibC startet
1075          using this when it switched from SIGNAL and INTERRUPT to ISR.  */
1076 
1077       if (cfun->machine->is_interrupt)
1078         cfun->machine->is_signal = 0;
1079 
1080       /* If the function has the 'signal' or 'interrupt' attribute, ensure
1081          that the name of the function is "__vector_NN" so as to catch
1082          when the user misspells the vector name.  */
1083 
1084       if (!STR_PREFIX_P (name, "__vector"))
1085         warning_at (loc, OPT_Wmisspelled_isr, "%qs appears to be a misspelled "
1086                     "%qs handler, missing %<__vector%> prefix", name, isr);
1087 #endif // AVR-LibC naming conventions
1088     }
1089 
1090 #if defined WITH_AVRLIBC
1091   // Common problem is using "ISR" without first including avr/interrupt.h.
1092   const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
1093   name = default_strip_name_encoding (name);
1094   if (0 == strcmp ("ISR", name)
1095       || 0 == strcmp ("INTERRUPT", name)
1096       || 0 == strcmp ("SIGNAL", name))
1097     {
1098       warning_at (loc, OPT_Wmisspelled_isr, "%qs is a reserved identifier"
1099                   " in AVR-LibC.  Consider %<#include <avr/interrupt.h>%>"
1100                   " before using the %qs macro", name, name);
1101     }
1102 #endif // AVR-LibC naming conventions
1103 
1104   /* Don't print the above diagnostics more than once.  */
1105 
1106   cfun->machine->attributes_checked_p = 1;
1107 }
1108 
1109 
1110 /* Implement `ACCUMULATE_OUTGOING_ARGS'.  */
1111 
1112 int
1113 avr_accumulate_outgoing_args (void)
1114 {
1115   if (!cfun)
1116     return TARGET_ACCUMULATE_OUTGOING_ARGS;
1117 
1118   /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
1119         what offset is correct.  In some cases it is relative to
1120         virtual_outgoing_args_rtx and in others it is relative to
1121         virtual_stack_vars_rtx.  For example code see
1122             gcc.c-torture/execute/built-in-setjmp.c
1123             gcc.c-torture/execute/builtins/sprintf-chk.c   */
1124 
1125   return (TARGET_ACCUMULATE_OUTGOING_ARGS
1126           && !(cfun->calls_setjmp
1127                || cfun->has_nonlocal_label));
1128 }
1129 
1130 
1131 /* Report contribution of accumulated outgoing arguments to stack size.  */
1132 
1133 static inline int
1134 avr_outgoing_args_size (void)
1135 {
1136   return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
1137 }
1138 
1139 
1140 /* Implement `STARTING_FRAME_OFFSET'.  */
1141 /* This is the offset from the frame pointer register to the first stack slot
1142    that contains a variable living in the frame.  */
1143 
1144 int
1145 avr_starting_frame_offset (void)
1146 {
1147   return 1 + avr_outgoing_args_size ();
1148 }
1149 
1150 
1151 /* Return the number of hard registers to push/pop in the prologue/epilogue
1152    of the current function, and optionally store these registers in SET.  */
1153 
1154 static int
1155 avr_regs_to_save (HARD_REG_SET *set)
1156 {
1157   int count;
1158   int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1159 
1160   if (set)
1161     CLEAR_HARD_REG_SET (*set);
1162   count = 0;
1163 
1164   /* No need to save any registers if the function never returns or
1165      has the "OS_task" or "OS_main" attribute.  */
1166 
1167   if (TREE_THIS_VOLATILE (current_function_decl)
1168       || cfun->machine->is_OS_task
1169       || cfun->machine->is_OS_main)
1170     return 0;
1171 
1172   for (int reg = 0; reg < 32; reg++)
1173     {
1174       /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
1175          any global register variables.  */
1176 
1177       if (fixed_regs[reg])
1178         continue;
1179 
1180       if ((int_or_sig_p && !crtl->is_leaf && call_used_regs[reg])
1181           || (df_regs_ever_live_p (reg)
1182               && (int_or_sig_p || !call_used_regs[reg])
1183               /* Don't record frame pointer registers here.  They are treated
1184                  indivitually in prologue.  */
1185               && !(frame_pointer_needed
1186                    && (reg == REG_Y || reg == REG_Y + 1))))
1187         {
1188           if (set)
1189             SET_HARD_REG_BIT (*set, reg);
1190           count++;
1191         }
1192     }
1193   return count;
1194 }
1195 
1196 
1197 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
1198 
1199 static bool
1200 avr_allocate_stack_slots_for_args (void)
1201 {
1202   return !cfun->machine->is_naked;
1203 }
1204 
1205 
1206 /* Return true if register FROM can be eliminated via register TO.  */
1207 
1208 static bool
1209 avr_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1210 {
1211   return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
1212           || !frame_pointer_needed);
1213 }
1214 
1215 
1216 /* Implement `TARGET_WARN_FUNC_RETURN'.  */
1217 
1218 static bool
1219 avr_warn_func_return (tree decl)
1220 {
1221   /* Naked functions are implemented entirely in assembly, including the
1222      return sequence, so suppress warnings about this.  */
1223 
1224   return !avr_naked_function_p (decl);
1225 }
1226 
1227 /* Compute offset between arg_pointer and frame_pointer.  */
1228 
1229 int
1230 avr_initial_elimination_offset (int from, int to)
1231 {
1232   if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1233     return 0;
1234   else
1235     {
1236       int offset = frame_pointer_needed ? 2 : 0;
1237       int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
1238 
1239       offset += avr_regs_to_save (NULL);
1240       return (get_frame_size () + avr_outgoing_args_size()
1241               + avr_pc_size + 1 + offset);
1242     }
1243 }
1244 
1245 
1246 /* Helper for the function below.  */
1247 
1248 static void
1249 avr_adjust_type_node (tree *node, machine_mode mode, int sat_p)
1250 {
1251   *node = make_node (FIXED_POINT_TYPE);
1252   TYPE_SATURATING (*node) = sat_p;
1253   TYPE_UNSIGNED (*node) = UNSIGNED_FIXED_POINT_MODE_P (mode);
1254   TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
1255   TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
1256   TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
1257   SET_TYPE_ALIGN (*node, 8);
1258   SET_TYPE_MODE (*node, mode);
1259 
1260   layout_type (*node);
1261 }
1262 
1263 
1264 /* Implement `TARGET_BUILD_BUILTIN_VA_LIST'.  */
1265 
1266 static tree
1267 avr_build_builtin_va_list (void)
1268 {
1269   /* avr-modes.def adjusts [U]TA to be 64-bit modes with 48 fractional bits.
1270      This is more appropriate for the 8-bit machine AVR than 128-bit modes.
1271      The ADJUST_IBIT/FBIT are handled in toplev:init_adjust_machine_modes()
1272      which is auto-generated by genmodes, but the compiler assigns [U]DAmode
1273      to the long long accum modes instead of the desired [U]TAmode.
1274 
1275      Fix this now, right after node setup in tree.c:build_common_tree_nodes().
1276      This must run before c-cppbuiltin.c:builtin_define_fixed_point_constants()
1277      which built-in defines macros like __ULLACCUM_FBIT__ that are used by
1278      libgcc to detect IBIT and FBIT.  */
1279 
1280   avr_adjust_type_node (&ta_type_node, TAmode, 0);
1281   avr_adjust_type_node (&uta_type_node, UTAmode, 0);
1282   avr_adjust_type_node (&sat_ta_type_node, TAmode, 1);
1283   avr_adjust_type_node (&sat_uta_type_node, UTAmode, 1);
1284 
1285   unsigned_long_long_accum_type_node = uta_type_node;
1286   long_long_accum_type_node = ta_type_node;
1287   sat_unsigned_long_long_accum_type_node = sat_uta_type_node;
1288   sat_long_long_accum_type_node = sat_ta_type_node;
1289 
1290   /* Dispatch to the default handler.  */
1291 
1292   return std_build_builtin_va_list ();
1293 }
1294 
1295 
1296 /* Implement `TARGET_BUILTIN_SETJMP_FRAME_VALUE'.  */
1297 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
1298    frame pointer by +STARTING_FRAME_OFFSET.
1299    Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
1300    avoids creating add/sub of offset in nonlocal goto and setjmp.  */
1301 
1302 static rtx
1303 avr_builtin_setjmp_frame_value (void)
1304 {
1305   rtx xval = gen_reg_rtx (Pmode);
1306   emit_insn (gen_subhi3 (xval, virtual_stack_vars_rtx,
1307                          gen_int_mode (STARTING_FRAME_OFFSET, Pmode)));
1308   return xval;
1309 }
1310 
1311 
1312 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3-byte PC).
1313    This is return address of function.  */
1314 
1315 rtx
1316 avr_return_addr_rtx (int count, rtx tem)
1317 {
1318   rtx r;
1319 
1320   /* Can only return this function's return address. Others not supported.  */
1321   if (count)
1322     return NULL;
1323 
1324   if (AVR_3_BYTE_PC)
1325     {
1326       r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
1327       warning (0, "%<builtin_return_address%> contains only 2 bytes"
1328                " of address");
1329     }
1330   else
1331     r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
1332 
1333   r = gen_rtx_PLUS (Pmode, tem, r);
1334   r = gen_frame_mem (Pmode, memory_address (Pmode, r));
1335   r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
1336   return r;
1337 }
1338 
1339 /* Return 1 if the function epilogue is just a single "ret".  */
1340 
1341 int
1342 avr_simple_epilogue (void)
1343 {
1344   return (! frame_pointer_needed
1345           && get_frame_size () == 0
1346           && avr_outgoing_args_size() == 0
1347           && avr_regs_to_save (NULL) == 0
1348           && ! cfun->machine->is_interrupt
1349           && ! cfun->machine->is_signal
1350           && ! cfun->machine->is_naked
1351           && ! TREE_THIS_VOLATILE (current_function_decl));
1352 }
1353 
1354 /* This function checks sequence of live registers.  */
1355 
1356 static int
1357 sequent_regs_live (void)
1358 {
1359   int live_seq = 0;
1360   int cur_seq = 0;
1361 
1362   for (int reg = 0; reg <= LAST_CALLEE_SAVED_REG; ++reg)
1363     {
1364       if (fixed_regs[reg])
1365         {
1366           /* Don't recognize sequences that contain global register
1367              variables.  */
1368 
1369           if (live_seq != 0)
1370             return 0;
1371           else
1372             continue;
1373         }
1374 
1375       if (!call_used_regs[reg])
1376         {
1377           if (df_regs_ever_live_p (reg))
1378             {
1379               ++live_seq;
1380               ++cur_seq;
1381             }
1382           else
1383             cur_seq = 0;
1384         }
1385     }
1386 
1387   if (!frame_pointer_needed)
1388     {
1389       if (df_regs_ever_live_p (REG_Y))
1390         {
1391           ++live_seq;
1392           ++cur_seq;
1393         }
1394       else
1395         cur_seq = 0;
1396 
1397       if (df_regs_ever_live_p (REG_Y + 1))
1398         {
1399           ++live_seq;
1400           ++cur_seq;
1401         }
1402       else
1403         cur_seq = 0;
1404     }
1405   else
1406     {
1407       cur_seq += 2;
1408       live_seq += 2;
1409     }
1410   return (cur_seq == live_seq) ? live_seq : 0;
1411 }
1412 
1413 /* Obtain the length sequence of insns.  */
1414 
1415 int
1416 get_sequence_length (rtx_insn *insns)
1417 {
1418   int length = 0;
1419 
1420   for (rtx_insn *insn = insns; insn; insn = NEXT_INSN (insn))
1421     length += get_attr_length (insn);
1422 
1423   return length;
1424 }
1425 
1426 
1427 /*  Implement `INCOMING_RETURN_ADDR_RTX'.  */
1428 
1429 rtx
1430 avr_incoming_return_addr_rtx (void)
1431 {
1432   /* The return address is at the top of the stack.  Note that the push
1433      was via post-decrement, which means the actual address is off by one.  */
1434   return gen_frame_mem (HImode, plus_constant (Pmode, stack_pointer_rtx, 1));
1435 }
1436 
1437 /*  Helper for expand_prologue.  Emit a push of a byte register.  */
1438 
1439 static void
1440 emit_push_byte (unsigned regno, bool frame_related_p)
1441 {
1442   rtx mem, reg;
1443   rtx_insn *insn;
1444 
1445   mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
1446   mem = gen_frame_mem (QImode, mem);
1447   reg = gen_rtx_REG (QImode, regno);
1448 
1449   insn = emit_insn (gen_rtx_SET (mem, reg));
1450   if (frame_related_p)
1451     RTX_FRAME_RELATED_P (insn) = 1;
1452 
1453   cfun->machine->stack_usage++;
1454 }
1455 
1456 
1457 /*  Helper for expand_prologue.  Emit a push of a SFR via tmp_reg.
1458     SFR is a MEM representing the memory location of the SFR.
1459     If CLR_P then clear the SFR after the push using zero_reg.  */
1460 
1461 static void
1462 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
1463 {
1464   rtx_insn *insn;
1465 
1466   gcc_assert (MEM_P (sfr));
1467 
1468   /* IN __tmp_reg__, IO(SFR) */
1469   insn = emit_move_insn (tmp_reg_rtx, sfr);
1470   if (frame_related_p)
1471     RTX_FRAME_RELATED_P (insn) = 1;
1472 
1473   /* PUSH __tmp_reg__ */
1474   emit_push_byte (AVR_TMP_REGNO, frame_related_p);
1475 
1476   if (clr_p)
1477     {
1478       /* OUT IO(SFR), __zero_reg__ */
1479       insn = emit_move_insn (sfr, const0_rtx);
1480       if (frame_related_p)
1481         RTX_FRAME_RELATED_P (insn) = 1;
1482     }
1483 }
1484 
1485 static void
1486 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
1487 {
1488   rtx_insn *insn;
1489   bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1490   int live_seq = sequent_regs_live ();
1491 
1492   HOST_WIDE_INT size_max
1493     = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
1494 
1495   bool minimize = (TARGET_CALL_PROLOGUES
1496                    && size < size_max
1497                    && live_seq
1498                    && !isr_p
1499                    && !cfun->machine->is_OS_task
1500                    && !cfun->machine->is_OS_main
1501                    && !AVR_TINY);
1502 
1503   if (minimize
1504       && (frame_pointer_needed
1505           || avr_outgoing_args_size() > 8
1506           || (AVR_2_BYTE_PC && live_seq > 6)
1507           || live_seq > 7))
1508     {
1509       rtx pattern;
1510       int first_reg, reg, offset;
1511 
1512       emit_move_insn (gen_rtx_REG (HImode, REG_X),
1513                       gen_int_mode (size, HImode));
1514 
1515       pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
1516                                          gen_int_mode (live_seq+size, HImode));
1517       insn = emit_insn (pattern);
1518       RTX_FRAME_RELATED_P (insn) = 1;
1519 
1520       /* Describe the effect of the unspec_volatile call to prologue_saves.
1521          Note that this formulation assumes that add_reg_note pushes the
1522          notes to the front.  Thus we build them in the reverse order of
1523          how we want dwarf2out to process them.  */
1524 
1525       /* The function does always set frame_pointer_rtx, but whether that
1526          is going to be permanent in the function is frame_pointer_needed.  */
1527 
1528       add_reg_note (insn, REG_CFA_ADJUST_CFA,
1529                     gen_rtx_SET ((frame_pointer_needed
1530 				  ? frame_pointer_rtx
1531 				  : stack_pointer_rtx),
1532                                  plus_constant (Pmode, stack_pointer_rtx,
1533                                                 -(size + live_seq))));
1534 
1535       /* Note that live_seq always contains r28+r29, but the other
1536          registers to be saved are all below 18.  */
1537 
1538       first_reg = (LAST_CALLEE_SAVED_REG + 1) - (live_seq - 2);
1539 
1540       for (reg = 29, offset = -live_seq + 1;
1541            reg >= first_reg;
1542            reg = (reg == 28 ? LAST_CALLEE_SAVED_REG : reg - 1), ++offset)
1543         {
1544           rtx m, r;
1545 
1546           m = gen_rtx_MEM (QImode, plus_constant (Pmode, stack_pointer_rtx,
1547                                                   offset));
1548           r = gen_rtx_REG (QImode, reg);
1549           add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (m, r));
1550         }
1551 
1552       cfun->machine->stack_usage += size + live_seq;
1553     }
1554   else /* !minimize */
1555     {
1556       for (int reg = 0; reg < 32; ++reg)
1557         if (TEST_HARD_REG_BIT (set, reg))
1558           emit_push_byte (reg, true);
1559 
1560       if (frame_pointer_needed
1561           && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
1562         {
1563           /* Push frame pointer.  Always be consistent about the
1564              ordering of pushes -- epilogue_restores expects the
1565              register pair to be pushed low byte first.  */
1566 
1567           emit_push_byte (REG_Y, true);
1568           emit_push_byte (REG_Y + 1, true);
1569         }
1570 
1571       if (frame_pointer_needed
1572           && size == 0)
1573         {
1574           insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1575           RTX_FRAME_RELATED_P (insn) = 1;
1576         }
1577 
1578       if (size != 0)
1579         {
1580           /*  Creating a frame can be done by direct manipulation of the
1581               stack or via the frame pointer. These two methods are:
1582                   fp =  sp
1583                   fp -= size
1584                   sp =  fp
1585               or
1586                   sp -= size
1587                   fp =  sp    (*)
1588               the optimum method depends on function type, stack and
1589               frame size.  To avoid a complex logic, both methods are
1590               tested and shortest is selected.
1591 
1592               There is also the case where SIZE != 0 and no frame pointer is
1593               needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1594               In that case, insn (*) is not needed in that case.
1595               We use the X register as scratch. This is save because in X
1596               is call-clobbered.
1597                  In an interrupt routine, the case of SIZE != 0 together with
1598               !frame_pointer_needed can only occur if the function is not a
1599               leaf function and thus X has already been saved.  */
1600 
1601           int irq_state = -1;
1602           HOST_WIDE_INT size_cfa = size, neg_size;
1603           rtx_insn *fp_plus_insns;
1604           rtx fp, my_fp;
1605 
1606           gcc_assert (frame_pointer_needed
1607                       || !isr_p
1608                       || !crtl->is_leaf);
1609 
1610           fp = my_fp = (frame_pointer_needed
1611                         ? frame_pointer_rtx
1612                         : gen_rtx_REG (Pmode, REG_X));
1613 
1614           if (AVR_HAVE_8BIT_SP)
1615             {
1616               /* The high byte (r29) does not change:
1617                  Prefer SUBI (1 cycle) over SBIW (2 cycles, same size).  */
1618 
1619               my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1620             }
1621 
1622           /* Cut down size and avoid size = 0 so that we don't run
1623              into ICE like PR52488 in the remainder.  */
1624 
1625           if (size > size_max)
1626             {
1627               /* Don't error so that insane code from newlib still compiles
1628                  and does not break building newlib.  As PR51345 is implemented
1629                  now, there are multilib variants with -msp8.
1630 
1631                  If user wants sanity checks he can use -Wstack-usage=
1632                  or similar options.
1633 
1634                  For CFA we emit the original, non-saturated size so that
1635                  the generic machinery is aware of the real stack usage and
1636                  will print the above diagnostic as expected.  */
1637 
1638               size = size_max;
1639             }
1640 
1641           size = trunc_int_for_mode (size, GET_MODE (my_fp));
1642           neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1643 
1644           /************  Method 1: Adjust frame pointer  ************/
1645 
1646           start_sequence ();
1647 
1648           /* Normally, the dwarf2out frame-related-expr interpreter does
1649              not expect to have the CFA change once the frame pointer is
1650              set up.  Thus, we avoid marking the move insn below and
1651              instead indicate that the entire operation is complete after
1652              the frame pointer subtraction is done.  */
1653 
1654           insn = emit_move_insn (fp, stack_pointer_rtx);
1655           if (frame_pointer_needed)
1656             {
1657               RTX_FRAME_RELATED_P (insn) = 1;
1658               add_reg_note (insn, REG_CFA_ADJUST_CFA,
1659                             gen_rtx_SET (fp, stack_pointer_rtx));
1660             }
1661 
1662           insn = emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp),
1663                                                        my_fp, neg_size));
1664 
1665           if (frame_pointer_needed)
1666             {
1667               RTX_FRAME_RELATED_P (insn) = 1;
1668               add_reg_note (insn, REG_CFA_ADJUST_CFA,
1669                             gen_rtx_SET (fp, plus_constant (Pmode, fp,
1670 							    -size_cfa)));
1671             }
1672 
1673           /* Copy to stack pointer.  Note that since we've already
1674              changed the CFA to the frame pointer this operation
1675              need not be annotated if frame pointer is needed.
1676              Always move through unspec, see PR50063.
1677              For meaning of irq_state see movhi_sp_r insn.  */
1678 
1679           if (cfun->machine->is_interrupt)
1680             irq_state = 1;
1681 
1682           if (TARGET_NO_INTERRUPTS
1683               || cfun->machine->is_signal
1684               || cfun->machine->is_OS_main)
1685             irq_state = 0;
1686 
1687           if (AVR_HAVE_8BIT_SP)
1688             irq_state = 2;
1689 
1690           insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1691                                             fp, GEN_INT (irq_state)));
1692           if (!frame_pointer_needed)
1693             {
1694               RTX_FRAME_RELATED_P (insn) = 1;
1695               add_reg_note (insn, REG_CFA_ADJUST_CFA,
1696                             gen_rtx_SET (stack_pointer_rtx,
1697                                          plus_constant (Pmode,
1698                                                         stack_pointer_rtx,
1699                                                         -size_cfa)));
1700             }
1701 
1702           fp_plus_insns = get_insns ();
1703           end_sequence ();
1704 
1705           /************  Method 2: Adjust Stack pointer  ************/
1706 
1707           /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1708              can only handle specific offsets.  */
1709 
1710           int n_rcall = size / (AVR_3_BYTE_PC ? 3 : 2);
1711 
1712           if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode)
1713               // Don't use more than 3 RCALLs.
1714               && n_rcall <= 3)
1715             {
1716               rtx_insn *sp_plus_insns;
1717 
1718               start_sequence ();
1719 
1720               insn = emit_move_insn (stack_pointer_rtx,
1721                                      plus_constant (Pmode, stack_pointer_rtx,
1722                                                     -size));
1723               RTX_FRAME_RELATED_P (insn) = 1;
1724               add_reg_note (insn, REG_CFA_ADJUST_CFA,
1725                             gen_rtx_SET (stack_pointer_rtx,
1726                                          plus_constant (Pmode,
1727                                                         stack_pointer_rtx,
1728                                                         -size_cfa)));
1729               if (frame_pointer_needed)
1730                 {
1731                   insn = emit_move_insn (fp, stack_pointer_rtx);
1732                   RTX_FRAME_RELATED_P (insn) = 1;
1733                 }
1734 
1735               sp_plus_insns = get_insns ();
1736               end_sequence ();
1737 
1738               /************ Use shortest method  ************/
1739 
1740               emit_insn (get_sequence_length (sp_plus_insns)
1741                          < get_sequence_length (fp_plus_insns)
1742                          ? sp_plus_insns
1743                          : fp_plus_insns);
1744             }
1745           else
1746             {
1747               emit_insn (fp_plus_insns);
1748             }
1749 
1750           cfun->machine->stack_usage += size_cfa;
1751         } /* !minimize && size != 0 */
1752     } /* !minimize */
1753 }
1754 
1755 
1756 /*  Output function prologue.  */
1757 
1758 void
1759 avr_expand_prologue (void)
1760 {
1761   HARD_REG_SET set;
1762   HOST_WIDE_INT size;
1763 
1764   size = get_frame_size() + avr_outgoing_args_size();
1765 
1766   cfun->machine->stack_usage = 0;
1767 
1768   /* Prologue: naked.  */
1769   if (cfun->machine->is_naked)
1770     {
1771       return;
1772     }
1773 
1774   avr_regs_to_save (&set);
1775 
1776   if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1777     {
1778       /* Enable interrupts.  */
1779       if (cfun->machine->is_interrupt)
1780         emit_insn (gen_enable_interrupt ());
1781 
1782       /* Push zero reg.  */
1783       emit_push_byte (AVR_ZERO_REGNO, true);
1784 
1785       /* Push tmp reg.  */
1786       emit_push_byte (AVR_TMP_REGNO, true);
1787 
1788       /* Push SREG.  */
1789       /* ??? There's no dwarf2 column reserved for SREG.  */
1790       emit_push_sfr (sreg_rtx, false, false /* clr */);
1791 
1792       /* Clear zero reg.  */
1793       emit_move_insn (zero_reg_rtx, const0_rtx);
1794 
1795       /* Prevent any attempt to delete the setting of ZERO_REG!  */
1796       emit_use (zero_reg_rtx);
1797 
1798       /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1799          ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z.  */
1800 
1801       if (AVR_HAVE_RAMPD)
1802         emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1803 
1804       if (AVR_HAVE_RAMPX
1805           && TEST_HARD_REG_BIT (set, REG_X)
1806           && TEST_HARD_REG_BIT (set, REG_X + 1))
1807         {
1808           emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1809         }
1810 
1811       if (AVR_HAVE_RAMPY
1812           && (frame_pointer_needed
1813               || (TEST_HARD_REG_BIT (set, REG_Y)
1814                   && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1815         {
1816           emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1817         }
1818 
1819       if (AVR_HAVE_RAMPZ
1820           && TEST_HARD_REG_BIT (set, REG_Z)
1821           && TEST_HARD_REG_BIT (set, REG_Z + 1))
1822         {
1823           emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1824         }
1825     }  /* is_interrupt is_signal */
1826 
1827   avr_prologue_setup_frame (size, set);
1828 
1829   if (flag_stack_usage_info)
1830     current_function_static_stack_size
1831       = cfun->machine->stack_usage + INCOMING_FRAME_SP_OFFSET;
1832 }
1833 
1834 
1835 /* Implement `TARGET_ASM_FUNCTION_END_PROLOGUE'.  */
1836 /* Output summary at end of function prologue.  */
1837 
1838 static void
1839 avr_asm_function_end_prologue (FILE *file)
1840 {
1841   if (cfun->machine->is_naked)
1842     {
1843       fputs ("/* prologue: naked */\n", file);
1844     }
1845   else
1846     {
1847       if (cfun->machine->is_interrupt)
1848         {
1849           fputs ("/* prologue: Interrupt */\n", file);
1850         }
1851       else if (cfun->machine->is_signal)
1852         {
1853           fputs ("/* prologue: Signal */\n", file);
1854         }
1855       else
1856         fputs ("/* prologue: function */\n", file);
1857     }
1858 
1859   if (ACCUMULATE_OUTGOING_ARGS)
1860     fprintf (file, "/* outgoing args size = %d */\n",
1861              avr_outgoing_args_size());
1862 
1863   fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1864            get_frame_size());
1865   fprintf (file, "/* stack size = %d */\n",
1866            cfun->machine->stack_usage);
1867   /* Create symbol stack offset here so all functions have it. Add 1 to stack
1868      usage for offset so that SP + .L__stack_offset = return address.  */
1869   fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1870 }
1871 
1872 
1873 /* Implement `EPILOGUE_USES'.  */
1874 
1875 int
1876 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1877 {
1878   if (reload_completed
1879       && cfun->machine
1880       && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1881     return 1;
1882   return 0;
1883 }
1884 
1885 /*  Helper for avr_expand_epilogue.  Emit a pop of a byte register.  */
1886 
1887 static void
1888 emit_pop_byte (unsigned regno)
1889 {
1890   rtx mem, reg;
1891 
1892   mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1893   mem = gen_frame_mem (QImode, mem);
1894   reg = gen_rtx_REG (QImode, regno);
1895 
1896   emit_insn (gen_rtx_SET (reg, mem));
1897 }
1898 
1899 /*  Output RTL epilogue.  */
1900 
1901 void
1902 avr_expand_epilogue (bool sibcall_p)
1903 {
1904   int live_seq;
1905   HARD_REG_SET set;
1906   int minimize;
1907   HOST_WIDE_INT size;
1908   bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1909 
1910   size = get_frame_size() + avr_outgoing_args_size();
1911 
1912   /* epilogue: naked  */
1913   if (cfun->machine->is_naked)
1914     {
1915       gcc_assert (!sibcall_p);
1916 
1917       emit_jump_insn (gen_return ());
1918       return;
1919     }
1920 
1921   avr_regs_to_save (&set);
1922   live_seq = sequent_regs_live ();
1923 
1924   minimize = (TARGET_CALL_PROLOGUES
1925               && live_seq
1926               && !isr_p
1927               && !cfun->machine->is_OS_task
1928               && !cfun->machine->is_OS_main
1929               && !AVR_TINY);
1930 
1931   if (minimize
1932       && (live_seq > 4
1933           || frame_pointer_needed
1934           || size))
1935     {
1936       /*  Get rid of frame.  */
1937 
1938       if (!frame_pointer_needed)
1939         {
1940           emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1941         }
1942 
1943       if (size)
1944         {
1945           emit_move_insn (frame_pointer_rtx,
1946                           plus_constant (Pmode, frame_pointer_rtx, size));
1947         }
1948 
1949       emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1950       return;
1951     }
1952 
1953   if (size)
1954     {
1955       /* Try two methods to adjust stack and select shortest.  */
1956 
1957       int irq_state = -1;
1958       rtx fp, my_fp;
1959       rtx_insn *fp_plus_insns;
1960       HOST_WIDE_INT size_max;
1961 
1962       gcc_assert (frame_pointer_needed
1963                   || !isr_p
1964                   || !crtl->is_leaf);
1965 
1966       fp = my_fp = (frame_pointer_needed
1967                     ? frame_pointer_rtx
1968                     : gen_rtx_REG (Pmode, REG_X));
1969 
1970       if (AVR_HAVE_8BIT_SP)
1971         {
1972           /* The high byte (r29) does not change:
1973              Prefer SUBI (1 cycle) over SBIW (2 cycles).  */
1974 
1975           my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1976         }
1977 
1978       /* For rationale see comment in prologue generation.  */
1979 
1980       size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1981       if (size > size_max)
1982         size = size_max;
1983       size = trunc_int_for_mode (size, GET_MODE (my_fp));
1984 
1985       /********** Method 1: Adjust fp register  **********/
1986 
1987       start_sequence ();
1988 
1989       if (!frame_pointer_needed)
1990         emit_move_insn (fp, stack_pointer_rtx);
1991 
1992       emit_move_insn (my_fp, plus_constant (GET_MODE (my_fp), my_fp, size));
1993 
1994       /* Copy to stack pointer.  */
1995 
1996       if (TARGET_NO_INTERRUPTS)
1997         irq_state = 0;
1998 
1999       if (AVR_HAVE_8BIT_SP)
2000         irq_state = 2;
2001 
2002       emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
2003                                  GEN_INT (irq_state)));
2004 
2005       fp_plus_insns = get_insns ();
2006       end_sequence ();
2007 
2008       /********** Method 2: Adjust Stack pointer  **********/
2009 
2010       if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
2011         {
2012           rtx_insn *sp_plus_insns;
2013 
2014           start_sequence ();
2015 
2016           emit_move_insn (stack_pointer_rtx,
2017                           plus_constant (Pmode, stack_pointer_rtx, size));
2018 
2019           sp_plus_insns = get_insns ();
2020           end_sequence ();
2021 
2022           /************ Use shortest method  ************/
2023 
2024           emit_insn (get_sequence_length (sp_plus_insns)
2025                      < get_sequence_length (fp_plus_insns)
2026                      ? sp_plus_insns
2027                      : fp_plus_insns);
2028         }
2029       else
2030         emit_insn (fp_plus_insns);
2031     } /* size != 0 */
2032 
2033   if (frame_pointer_needed
2034       && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
2035     {
2036       /* Restore previous frame_pointer.  See avr_expand_prologue for
2037          rationale for not using pophi.  */
2038 
2039       emit_pop_byte (REG_Y + 1);
2040       emit_pop_byte (REG_Y);
2041     }
2042 
2043   /* Restore used registers.  */
2044 
2045   for (int reg = 31; reg >= 0; --reg)
2046     if (TEST_HARD_REG_BIT (set, reg))
2047       emit_pop_byte (reg);
2048 
2049   if (isr_p)
2050     {
2051       /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
2052          The conditions to restore them must be tha same as in prologue.  */
2053 
2054       if (AVR_HAVE_RAMPZ
2055           && TEST_HARD_REG_BIT (set, REG_Z)
2056           && TEST_HARD_REG_BIT (set, REG_Z + 1))
2057         {
2058           emit_pop_byte (TMP_REGNO);
2059           emit_move_insn (rampz_rtx, tmp_reg_rtx);
2060         }
2061 
2062       if (AVR_HAVE_RAMPY
2063           && (frame_pointer_needed
2064               || (TEST_HARD_REG_BIT (set, REG_Y)
2065                   && TEST_HARD_REG_BIT (set, REG_Y + 1))))
2066         {
2067           emit_pop_byte (TMP_REGNO);
2068           emit_move_insn (rampy_rtx, tmp_reg_rtx);
2069         }
2070 
2071       if (AVR_HAVE_RAMPX
2072           && TEST_HARD_REG_BIT (set, REG_X)
2073           && TEST_HARD_REG_BIT (set, REG_X + 1))
2074         {
2075           emit_pop_byte (TMP_REGNO);
2076           emit_move_insn (rampx_rtx, tmp_reg_rtx);
2077         }
2078 
2079       if (AVR_HAVE_RAMPD)
2080         {
2081           emit_pop_byte (TMP_REGNO);
2082           emit_move_insn (rampd_rtx, tmp_reg_rtx);
2083         }
2084 
2085       /* Restore SREG using tmp_reg as scratch.  */
2086 
2087       emit_pop_byte (AVR_TMP_REGNO);
2088       emit_move_insn (sreg_rtx, tmp_reg_rtx);
2089 
2090       /* Restore tmp REG.  */
2091       emit_pop_byte (AVR_TMP_REGNO);
2092 
2093       /* Restore zero REG.  */
2094       emit_pop_byte (AVR_ZERO_REGNO);
2095     }
2096 
2097   if (!sibcall_p)
2098     emit_jump_insn (gen_return ());
2099 }
2100 
2101 
2102 /* Implement `TARGET_ASM_FUNCTION_BEGIN_EPILOGUE'.  */
2103 
2104 static void
2105 avr_asm_function_begin_epilogue (FILE *file)
2106 {
2107   fprintf (file, "/* epilogue start */\n");
2108 }
2109 
2110 
2111 /* Implement `TARGET_CANNOT_MODITY_JUMPS_P'.  */
2112 
2113 static bool
2114 avr_cannot_modify_jumps_p (void)
2115 {
2116   /* Naked Functions must not have any instructions after
2117      their epilogue, see PR42240 */
2118 
2119   if (reload_completed
2120       && cfun->machine
2121       && cfun->machine->is_naked)
2122     {
2123       return true;
2124     }
2125 
2126   return false;
2127 }
2128 
2129 
2130 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'.  */
2131 
2132 static bool
2133 avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
2134 {
2135   /* FIXME:  Non-generic addresses are not mode-dependent in themselves.
2136        This hook just serves to hack around PR rtl-optimization/52543 by
2137        claiming that non-generic addresses were mode-dependent so that
2138        lower-subreg.c will skip these addresses.  lower-subreg.c sets up fake
2139        RTXes to probe SET and MEM costs and assumes that MEM is always in the
2140        generic address space which is not true.  */
2141 
2142   return !ADDR_SPACE_GENERIC_P (as);
2143 }
2144 
2145 
2146 /* Return true if rtx X is a CONST_INT, CONST or SYMBOL_REF
2147    address with the `absdata' variable attribute, i.e. respective
2148    data can be read / written by LDS / STS instruction.
2149    This is used only for AVR_TINY.  */
2150 
2151 static bool
2152 avr_address_tiny_absdata_p (rtx x, machine_mode mode)
2153 {
2154   if (CONST == GET_CODE (x))
2155     x = XEXP (XEXP (x, 0), 0);
2156 
2157   if (SYMBOL_REF_P (x))
2158     return SYMBOL_REF_FLAGS (x) & AVR_SYMBOL_FLAG_TINY_ABSDATA;
2159 
2160   if (CONST_INT_P (x)
2161       && IN_RANGE (INTVAL (x), 0, 0xc0 - GET_MODE_SIZE (mode)))
2162     return true;
2163 
2164   return false;
2165 }
2166 
2167 
2168 /* Helper function for `avr_legitimate_address_p'.  */
2169 
2170 static inline bool
2171 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
2172                        RTX_CODE outer_code, bool strict)
2173 {
2174   return (REG_P (reg)
2175           && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
2176                                                  as, outer_code, UNKNOWN)
2177               || (!strict
2178                   && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
2179 }
2180 
2181 
2182 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
2183    machine for a memory operand of mode MODE.  */
2184 
2185 static bool
2186 avr_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2187 {
2188   bool ok = CONSTANT_ADDRESS_P (x);
2189 
2190   switch (GET_CODE (x))
2191     {
2192     case REG:
2193       ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
2194                                   MEM, strict);
2195 
2196       if (strict
2197           && GET_MODE_SIZE (mode) > 4
2198           && REG_X == REGNO (x))
2199         {
2200           ok = false;
2201         }
2202       break;
2203 
2204     case POST_INC:
2205     case PRE_DEC:
2206       ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
2207                                   GET_CODE (x), strict);
2208       break;
2209 
2210     case PLUS:
2211       {
2212         rtx reg = XEXP (x, 0);
2213         rtx op1 = XEXP (x, 1);
2214 
2215         if (REG_P (reg)
2216             && CONST_INT_P (op1)
2217             && INTVAL (op1) >= 0)
2218           {
2219             bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
2220 
2221             if (fit)
2222               {
2223                 ok = (! strict
2224                       || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
2225                                                 PLUS, strict));
2226 
2227                 if (reg == frame_pointer_rtx
2228                     || reg == arg_pointer_rtx)
2229                   {
2230                     ok = true;
2231                   }
2232               }
2233             else if (frame_pointer_needed
2234                      && reg == frame_pointer_rtx)
2235               {
2236                 ok = true;
2237               }
2238           }
2239       }
2240       break;
2241 
2242     default:
2243       break;
2244     }
2245 
2246   if (AVR_TINY
2247       && CONSTANT_ADDRESS_P (x))
2248     {
2249       /* avrtiny's load / store instructions only cover addresses 0..0xbf:
2250          IN / OUT range is 0..0x3f and LDS / STS can access 0x40..0xbf.  */
2251 
2252       ok = avr_address_tiny_absdata_p (x, mode);
2253     }
2254 
2255   if (avr_log.legitimate_address_p)
2256     {
2257       avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
2258                  "reload_completed=%d reload_in_progress=%d %s:",
2259                  ok, mode, strict, reload_completed, reload_in_progress,
2260                  reg_renumber ? "(reg_renumber)" : "");
2261 
2262       if (GET_CODE (x) == PLUS
2263           && REG_P (XEXP (x, 0))
2264           && CONST_INT_P (XEXP (x, 1))
2265           && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
2266           && reg_renumber)
2267         {
2268           avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
2269                      true_regnum (XEXP (x, 0)));
2270         }
2271 
2272       avr_edump ("\n%r\n", x);
2273     }
2274 
2275   return ok;
2276 }
2277 
2278 
2279 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
2280    now only a helper for avr_addr_space_legitimize_address.  */
2281 /* Attempts to replace X with a valid
2282    memory address for an operand of mode MODE  */
2283 
2284 static rtx
2285 avr_legitimize_address (rtx x, rtx oldx, machine_mode mode)
2286 {
2287   bool big_offset_p = false;
2288 
2289   x = oldx;
2290 
2291   if (AVR_TINY)
2292     {
2293       if (CONSTANT_ADDRESS_P (x)
2294           && ! avr_address_tiny_absdata_p (x, mode))
2295         {
2296           x = force_reg (Pmode, x);
2297         }
2298     }
2299 
2300   if (GET_CODE (oldx) == PLUS
2301       && REG_P (XEXP (oldx, 0)))
2302     {
2303       if (REG_P (XEXP (oldx, 1)))
2304         x = force_reg (GET_MODE (oldx), oldx);
2305       else if (CONST_INT_P (XEXP (oldx, 1)))
2306         {
2307           int offs = INTVAL (XEXP (oldx, 1));
2308           if (frame_pointer_rtx != XEXP (oldx, 0)
2309               && offs > MAX_LD_OFFSET (mode))
2310             {
2311               big_offset_p = true;
2312               x = force_reg (GET_MODE (oldx), oldx);
2313             }
2314         }
2315     }
2316 
2317   if (avr_log.legitimize_address)
2318     {
2319       avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
2320 
2321       if (x != oldx)
2322         avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
2323     }
2324 
2325   return x;
2326 }
2327 
2328 
2329 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'.  */
2330 /* This will allow register R26/27 to be used where it is no worse than normal
2331    base pointers R28/29 or R30/31.  For example, if base offset is greater
2332    than 63 bytes or for R++ or --R addressing.  */
2333 
2334 rtx
2335 avr_legitimize_reload_address (rtx *px, machine_mode mode,
2336                                int opnum, int type, int addr_type,
2337                                int ind_levels ATTRIBUTE_UNUSED,
2338                                rtx (*mk_memloc)(rtx,int))
2339 {
2340   rtx x = *px;
2341 
2342   if (avr_log.legitimize_reload_address)
2343     avr_edump ("\n%?:%m %r\n", mode, x);
2344 
2345   if (1 && (GET_CODE (x) == POST_INC
2346             || GET_CODE (x) == PRE_DEC))
2347     {
2348       push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
2349                    POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
2350                    opnum, RELOAD_OTHER);
2351 
2352       if (avr_log.legitimize_reload_address)
2353         avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
2354                    POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
2355 
2356       return x;
2357     }
2358 
2359   if (GET_CODE (x) == PLUS
2360       && REG_P (XEXP (x, 0))
2361       && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
2362       && CONST_INT_P (XEXP (x, 1))
2363       && INTVAL (XEXP (x, 1)) >= 1)
2364     {
2365       bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
2366 
2367       if (fit)
2368         {
2369           if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
2370             {
2371               int regno = REGNO (XEXP (x, 0));
2372               rtx mem = mk_memloc (x, regno);
2373 
2374               push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
2375                            POINTER_REGS, Pmode, VOIDmode, 0, 0,
2376                            1, (enum reload_type) addr_type);
2377 
2378               if (avr_log.legitimize_reload_address)
2379                 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2380                            POINTER_REGS, XEXP (mem, 0), NULL_RTX);
2381 
2382               push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
2383                            BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2384                            opnum, (enum reload_type) type);
2385 
2386               if (avr_log.legitimize_reload_address)
2387                 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
2388                            BASE_POINTER_REGS, mem, NULL_RTX);
2389 
2390               return x;
2391             }
2392         }
2393       else if (! (frame_pointer_needed
2394                   && XEXP (x, 0) == frame_pointer_rtx))
2395         {
2396           push_reload (x, NULL_RTX, px, NULL,
2397                        POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
2398                        opnum, (enum reload_type) type);
2399 
2400           if (avr_log.legitimize_reload_address)
2401             avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
2402                        POINTER_REGS, x, NULL_RTX);
2403 
2404           return x;
2405         }
2406     }
2407 
2408   return NULL_RTX;
2409 }
2410 
2411 
2412 /* Helper function to print assembler resp. track instruction
2413    sequence lengths.  Always return "".
2414 
2415    If PLEN == NULL:
2416        Output assembler code from template TPL with operands supplied
2417        by OPERANDS.  This is just forwarding to output_asm_insn.
2418 
2419    If PLEN != NULL:
2420        If N_WORDS >= 0  Add N_WORDS to *PLEN.
2421        If N_WORDS < 0   Set *PLEN to -N_WORDS.
2422        Don't output anything.
2423 */
2424 
2425 static const char*
2426 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
2427 {
2428   if (NULL == plen)
2429     {
2430       output_asm_insn (tpl, operands);
2431     }
2432   else
2433     {
2434       if (n_words < 0)
2435         *plen = -n_words;
2436       else
2437         *plen += n_words;
2438     }
2439 
2440   return "";
2441 }
2442 
2443 
2444 /* Return a pointer register name as a string.  */
2445 
2446 static const char*
2447 ptrreg_to_str (int regno)
2448 {
2449   switch (regno)
2450     {
2451     case REG_X: return "X";
2452     case REG_Y: return "Y";
2453     case REG_Z: return "Z";
2454     default:
2455       output_operand_lossage ("address operand requires constraint for"
2456                               " X, Y, or Z register");
2457     }
2458   return NULL;
2459 }
2460 
2461 /* Return the condition name as a string.
2462    Used in conditional jump constructing  */
2463 
2464 static const char*
2465 cond_string (enum rtx_code code)
2466 {
2467   switch (code)
2468     {
2469     case NE:
2470       return "ne";
2471     case EQ:
2472       return "eq";
2473     case GE:
2474       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2475         return "pl";
2476       else
2477         return "ge";
2478     case LT:
2479       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2480         return "mi";
2481       else
2482         return "lt";
2483     case GEU:
2484       return "sh";
2485     case LTU:
2486       return "lo";
2487     default:
2488       gcc_unreachable ();
2489     }
2490 
2491   return "";
2492 }
2493 
2494 
2495 /* Return true if rtx X is a CONST or SYMBOL_REF with progmem.
2496    This must be used for AVR_TINY only because on other cores
2497    the flash memory is not visible in the RAM address range and
2498    cannot be read by, say,  LD instruction.  */
2499 
2500 static bool
2501 avr_address_tiny_pm_p (rtx x)
2502 {
2503   if (CONST == GET_CODE (x))
2504     x = XEXP (XEXP (x, 0), 0);
2505 
2506   if (SYMBOL_REF_P (x))
2507     return SYMBOL_REF_FLAGS (x) & AVR_SYMBOL_FLAG_TINY_PM;
2508 
2509   return false;
2510 }
2511 
2512 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'.  */
2513 /* Output ADDR to FILE as address.  */
2514 
2515 static void
2516 avr_print_operand_address (FILE *file, machine_mode /*mode*/, rtx addr)
2517 {
2518   if (AVR_TINY
2519       && avr_address_tiny_pm_p (addr))
2520     {
2521       addr = plus_constant (Pmode, addr, AVR_TINY_PM_OFFSET);
2522     }
2523 
2524   switch (GET_CODE (addr))
2525     {
2526     case REG:
2527       fprintf (file, "%s", ptrreg_to_str (REGNO (addr)));
2528       break;
2529 
2530     case PRE_DEC:
2531       fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2532       break;
2533 
2534     case POST_INC:
2535       fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
2536       break;
2537 
2538     default:
2539       if (CONSTANT_ADDRESS_P (addr)
2540           && text_segment_operand (addr, VOIDmode))
2541         {
2542           rtx x = addr;
2543           if (GET_CODE (x) == CONST)
2544             x = XEXP (x, 0);
2545           if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
2546             {
2547               /* Assembler gs() will implant word address.  Make offset
2548                  a byte offset inside gs() for assembler.  This is
2549                  needed because the more logical (constant+gs(sym)) is not
2550                  accepted by gas.  For 128K and smaller devices this is ok.
2551                  For large devices it will create a trampoline to offset
2552                  from symbol which may not be what the user really wanted.  */
2553 
2554               fprintf (file, "gs(");
2555               output_addr_const (file, XEXP (x, 0));
2556               fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
2557                        2 * INTVAL (XEXP (x, 1)));
2558               if (AVR_3_BYTE_PC)
2559                 if (warning (0, "pointer offset from symbol maybe incorrect"))
2560                   {
2561                     output_addr_const (stderr, addr);
2562                     fprintf (stderr, "\n");
2563                   }
2564             }
2565           else
2566             {
2567               fprintf (file, "gs(");
2568               output_addr_const (file, addr);
2569               fprintf (file, ")");
2570             }
2571         }
2572       else
2573         output_addr_const (file, addr);
2574     }
2575 }
2576 
2577 
2578 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'.  */
2579 
2580 static bool
2581 avr_print_operand_punct_valid_p (unsigned char code)
2582 {
2583   return code == '~' || code == '!';
2584 }
2585 
2586 
2587 /* Implement `TARGET_PRINT_OPERAND'.  */
2588 /* Output X as assembler operand to file FILE.
2589    For a description of supported %-codes, see top of avr.md.  */
2590 
2591 static void
2592 avr_print_operand (FILE *file, rtx x, int code)
2593 {
2594   int abcd = 0, ef = 0, ij = 0;
2595 
2596   if (code >= 'A' && code <= 'D')
2597     abcd = code - 'A';
2598   else if (code == 'E' || code == 'F')
2599     ef = code - 'E';
2600   else if (code == 'I' || code == 'J')
2601     ij = code - 'I';
2602 
2603   if (code == '~')
2604     {
2605       if (!AVR_HAVE_JMP_CALL)
2606         fputc ('r', file);
2607     }
2608   else if (code == '!')
2609     {
2610       if (AVR_HAVE_EIJMP_EICALL)
2611         fputc ('e', file);
2612     }
2613   else if (code == 't'
2614            || code == 'T')
2615     {
2616       static int t_regno = -1;
2617       static int t_nbits = -1;
2618 
2619       if (REG_P (x) && t_regno < 0 && code == 'T')
2620         {
2621           t_regno = REGNO (x);
2622           t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
2623         }
2624       else if (CONST_INT_P (x) && t_regno >= 0
2625                && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
2626         {
2627           int bpos = INTVAL (x);
2628 
2629           fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
2630           if (code == 'T')
2631             fprintf (file, ",%d", bpos % 8);
2632 
2633           t_regno = -1;
2634         }
2635       else
2636         fatal_insn ("operands to %T/%t must be reg + const_int:", x);
2637     }
2638   else if (code == 'E' || code == 'F')
2639     {
2640       rtx op = XEXP (x, 0);
2641       fprintf (file, "%s", reg_names[REGNO (op) + ef]);
2642     }
2643   else if (code == 'I' || code == 'J')
2644     {
2645       rtx op = XEXP (XEXP (x, 0), 0);
2646       fprintf (file, "%s", reg_names[REGNO (op) + ij]);
2647     }
2648   else if (REG_P (x))
2649     {
2650       if (x == zero_reg_rtx)
2651         fprintf (file, "__zero_reg__");
2652       else if (code == 'r' && REGNO (x) < 32)
2653         fprintf (file, "%d", (int) REGNO (x));
2654       else
2655         fprintf (file, "%s", reg_names[REGNO (x) + abcd]);
2656     }
2657   else if (CONST_INT_P (x))
2658     {
2659       HOST_WIDE_INT ival = INTVAL (x);
2660 
2661       if ('i' != code)
2662         fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
2663       else if (low_io_address_operand (x, VOIDmode)
2664                || high_io_address_operand (x, VOIDmode))
2665         {
2666           if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
2667             fprintf (file, "__RAMPZ__");
2668           else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
2669             fprintf (file, "__RAMPY__");
2670           else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
2671             fprintf (file, "__RAMPX__");
2672           else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
2673             fprintf (file, "__RAMPD__");
2674           else if ((AVR_XMEGA || AVR_TINY) && ival == avr_addr.ccp)
2675             fprintf (file, "__CCP__");
2676           else if (ival == avr_addr.sreg)   fprintf (file, "__SREG__");
2677           else if (ival == avr_addr.sp_l)   fprintf (file, "__SP_L__");
2678           else if (ival == avr_addr.sp_h)   fprintf (file, "__SP_H__");
2679           else
2680             {
2681               fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2682                        ival - avr_arch->sfr_offset);
2683             }
2684         }
2685       else
2686         fatal_insn ("bad address, not an I/O address:", x);
2687     }
2688   else if (MEM_P (x))
2689     {
2690       rtx addr = XEXP (x, 0);
2691 
2692       if (code == 'm')
2693         {
2694           if (!CONSTANT_P (addr))
2695             fatal_insn ("bad address, not a constant:", addr);
2696           /* Assembler template with m-code is data - not progmem section */
2697           if (text_segment_operand (addr, VOIDmode))
2698             if (warning (0, "accessing data memory with"
2699                          " program memory address"))
2700               {
2701                 output_addr_const (stderr, addr);
2702                 fprintf(stderr,"\n");
2703               }
2704           output_addr_const (file, addr);
2705         }
2706       else if (code == 'i')
2707         {
2708           avr_print_operand (file, addr, 'i');
2709         }
2710       else if (code == 'o')
2711         {
2712           if (GET_CODE (addr) != PLUS)
2713             fatal_insn ("bad address, not (reg+disp):", addr);
2714 
2715           avr_print_operand (file, XEXP (addr, 1), 0);
2716         }
2717       else if (code == 'b')
2718         {
2719           if (GET_CODE (addr) != PLUS)
2720             fatal_insn ("bad address, not (reg+disp):", addr);
2721 
2722           avr_print_operand_address (file, VOIDmode, XEXP (addr, 0));
2723         }
2724       else if (code == 'p' || code == 'r')
2725         {
2726           if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2727             fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2728 
2729           if (code == 'p')
2730             /* X, Y, Z */
2731             avr_print_operand_address (file, VOIDmode, XEXP (addr, 0));
2732           else
2733             avr_print_operand (file, XEXP (addr, 0), 0);  /* r26, r28, r30 */
2734         }
2735       else if (GET_CODE (addr) == PLUS)
2736         {
2737           avr_print_operand_address (file, VOIDmode, XEXP (addr, 0));
2738           if (REGNO (XEXP (addr, 0)) == REG_X)
2739             fatal_insn ("internal compiler error.  Bad address:"
2740                         ,addr);
2741           fputc ('+', file);
2742           avr_print_operand (file, XEXP (addr, 1), code);
2743         }
2744       else
2745         avr_print_operand_address (file, VOIDmode, addr);
2746     }
2747   else if (code == 'i')
2748     {
2749       if (SYMBOL_REF_P (x) && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO))
2750 	avr_print_operand_address
2751 	  (file, VOIDmode, plus_constant (HImode, x, -avr_arch->sfr_offset));
2752       else
2753 	fatal_insn ("bad address, not an I/O address:", x);
2754     }
2755   else if (code == 'x')
2756     {
2757       /* Constant progmem address - like used in jmp or call */
2758       if (0 == text_segment_operand (x, VOIDmode))
2759         if (warning (0, "accessing program memory"
2760                      " with data memory address"))
2761           {
2762             output_addr_const (stderr, x);
2763             fprintf(stderr,"\n");
2764           }
2765       /* Use normal symbol for direct address no linker trampoline needed */
2766       output_addr_const (file, x);
2767     }
2768   else if (CONST_FIXED_P (x))
2769     {
2770       HOST_WIDE_INT ival = INTVAL (avr_to_int_mode (x));
2771       if (code != 0)
2772         output_operand_lossage ("Unsupported code '%c' for fixed-point:",
2773                                 code);
2774       fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival);
2775     }
2776   else if (CONST_DOUBLE_P (x))
2777     {
2778       long val;
2779       if (GET_MODE (x) != SFmode)
2780         fatal_insn ("internal compiler error.  Unknown mode:", x);
2781       REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), val);
2782       fprintf (file, "0x%lx", val);
2783     }
2784   else if (GET_CODE (x) == CONST_STRING)
2785     fputs (XSTR (x, 0), file);
2786   else if (code == 'j')
2787     fputs (cond_string (GET_CODE (x)), file);
2788   else if (code == 'k')
2789     fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2790   else
2791     avr_print_operand_address (file, VOIDmode, x);
2792 }
2793 
2794 
2795 /* Implement TARGET_USE_BY_PIECES_INFRASTRUCTURE_P.  */
2796 
2797 /* Prefer sequence of loads/stores for moves of size upto
2798    two - two pairs of load/store instructions are always better
2799    than the 5 instruction sequence for a loop (1 instruction
2800    for loop counter setup, and 4 for the body of the loop). */
2801 
2802 static bool
2803 avr_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
2804                                     unsigned int align ATTRIBUTE_UNUSED,
2805                                     enum by_pieces_operation op,
2806                                     bool speed_p)
2807 {
2808   if (op != MOVE_BY_PIECES
2809       || (speed_p && size > MOVE_MAX_PIECES))
2810     return default_use_by_pieces_infrastructure_p (size, align, op, speed_p);
2811 
2812   return size <= MOVE_MAX_PIECES;
2813 }
2814 
2815 
2816 /* Worker function for `NOTICE_UPDATE_CC'.  */
2817 /* Update the condition code in the INSN.  */
2818 
2819 void
2820 avr_notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx_insn *insn)
2821 {
2822   rtx set;
2823   enum attr_cc cc = get_attr_cc (insn);
2824 
2825   switch (cc)
2826     {
2827     default:
2828       break;
2829 
2830     case CC_PLUS:
2831     case CC_LDI:
2832       {
2833         rtx *op = recog_data.operand;
2834         int len_dummy, icc;
2835 
2836         /* Extract insn's operands.  */
2837         extract_constrain_insn_cached (insn);
2838 
2839         switch (cc)
2840           {
2841           default:
2842             gcc_unreachable();
2843 
2844           case CC_PLUS:
2845             avr_out_plus (insn, op, &len_dummy, &icc);
2846             cc = (enum attr_cc) icc;
2847             break;
2848 
2849           case CC_LDI:
2850 
2851             cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2852                   && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2853               /* Loading zero-reg with 0 uses CLR and thus clobbers cc0.  */
2854               ? CC_CLOBBER
2855               /* Any other "r,rL" combination does not alter cc0.  */
2856               : CC_NONE;
2857 
2858             break;
2859           } /* inner switch */
2860 
2861         break;
2862       }
2863     } /* outer swicth */
2864 
2865   switch (cc)
2866     {
2867     default:
2868       /* Special values like CC_OUT_PLUS from above have been
2869          mapped to "standard" CC_* values so we never come here.  */
2870 
2871       gcc_unreachable();
2872       break;
2873 
2874     case CC_NONE:
2875       /* Insn does not affect CC at all, but it might set some registers
2876          that are stored in cc_status.  If such a register is affected by
2877          the current insn, for example by means of a SET or a CLOBBER,
2878          then we must reset cc_status; cf. PR77326.
2879 
2880          Unfortunately, set_of cannot be used as reg_overlap_mentioned_p
2881          will abort on COMPARE (which might be found in cc_status.value1/2).
2882          Thus work out the registers set by the insn and regs mentioned
2883          in cc_status.value1/2.  */
2884 
2885       if (cc_status.value1
2886           || cc_status.value2)
2887         {
2888           HARD_REG_SET regs_used;
2889           HARD_REG_SET regs_set;
2890           CLEAR_HARD_REG_SET (regs_used);
2891 
2892           if (cc_status.value1
2893               && !CONSTANT_P (cc_status.value1))
2894             {
2895               find_all_hard_regs (cc_status.value1, &regs_used);
2896             }
2897 
2898           if (cc_status.value2
2899               && !CONSTANT_P (cc_status.value2))
2900             {
2901               find_all_hard_regs (cc_status.value2, &regs_used);
2902             }
2903 
2904           find_all_hard_reg_sets (insn, &regs_set, false);
2905 
2906           if (hard_reg_set_intersect_p (regs_used, regs_set))
2907             {
2908               CC_STATUS_INIT;
2909             }
2910         }
2911 
2912       break; // CC_NONE
2913 
2914     case CC_SET_N:
2915       CC_STATUS_INIT;
2916       break;
2917 
2918     case CC_SET_ZN:
2919       set = single_set (insn);
2920       CC_STATUS_INIT;
2921       if (set)
2922         {
2923           cc_status.flags |= CC_NO_OVERFLOW;
2924           cc_status.value1 = SET_DEST (set);
2925         }
2926       break;
2927 
2928     case CC_SET_VZN:
2929       /* Insn like INC, DEC, NEG that set Z,N,V.  We currently don't make use
2930          of this combination, cf. also PR61055.  */
2931       CC_STATUS_INIT;
2932       break;
2933 
2934     case CC_SET_CZN:
2935       /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2936          The V flag may or may not be known but that's ok because
2937          alter_cond will change tests to use EQ/NE.  */
2938       set = single_set (insn);
2939       CC_STATUS_INIT;
2940       if (set)
2941         {
2942           cc_status.value1 = SET_DEST (set);
2943           cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2944         }
2945       break;
2946 
2947     case CC_COMPARE:
2948       set = single_set (insn);
2949       CC_STATUS_INIT;
2950       if (set)
2951         cc_status.value1 = SET_SRC (set);
2952       break;
2953 
2954     case CC_CLOBBER:
2955       /* Insn doesn't leave CC in a usable state.  */
2956       CC_STATUS_INIT;
2957       break;
2958     }
2959 }
2960 
2961 /* Choose mode for jump insn:
2962    1 - relative jump in range -63 <= x <= 62 ;
2963    2 - relative jump in range -2046 <= x <= 2045 ;
2964    3 - absolute jump (only for ATmega[16]03).  */
2965 
2966 int
2967 avr_jump_mode (rtx x, rtx_insn *insn)
2968 {
2969   int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2970                                             ? XEXP (x, 0) : x));
2971   int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2972   int jump_distance = cur_addr - dest_addr;
2973 
2974   if (IN_RANGE (jump_distance, -63, 62))
2975     return 1;
2976   else if (IN_RANGE (jump_distance, -2046, 2045))
2977     return 2;
2978   else if (AVR_HAVE_JMP_CALL)
2979     return 3;
2980 
2981   return 2;
2982 }
2983 
2984 /* Return an AVR condition jump commands.
2985    X is a comparison RTX.
2986    LEN is a number returned by avr_jump_mode function.
2987    If REVERSE nonzero then condition code in X must be reversed.  */
2988 
2989 const char*
2990 ret_cond_branch (rtx x, int len, int reverse)
2991 {
2992   RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2993 
2994   switch (cond)
2995     {
2996     case GT:
2997       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2998 	return (len == 1 ? ("breq .+2" CR_TAB
2999 			    "brpl %0") :
3000 		len == 2 ? ("breq .+4" CR_TAB
3001 			    "brmi .+2" CR_TAB
3002 			    "rjmp %0") :
3003 		("breq .+6" CR_TAB
3004 		 "brmi .+4" CR_TAB
3005 		 "jmp %0"));
3006 
3007       else
3008 	return (len == 1 ? ("breq .+2" CR_TAB
3009 			    "brge %0") :
3010 		len == 2 ? ("breq .+4" CR_TAB
3011 			    "brlt .+2" CR_TAB
3012 			    "rjmp %0") :
3013 		("breq .+6" CR_TAB
3014 		 "brlt .+4" CR_TAB
3015 		 "jmp %0"));
3016     case GTU:
3017       return (len == 1 ? ("breq .+2" CR_TAB
3018                           "brsh %0") :
3019               len == 2 ? ("breq .+4" CR_TAB
3020                           "brlo .+2" CR_TAB
3021                           "rjmp %0") :
3022               ("breq .+6" CR_TAB
3023                "brlo .+4" CR_TAB
3024                "jmp %0"));
3025     case LE:
3026       if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
3027 	return (len == 1 ? ("breq %0" CR_TAB
3028 			    "brmi %0") :
3029 		len == 2 ? ("breq .+2" CR_TAB
3030 			    "brpl .+2" CR_TAB
3031 			    "rjmp %0") :
3032 		("breq .+2" CR_TAB
3033 		 "brpl .+4" CR_TAB
3034 		 "jmp %0"));
3035       else
3036 	return (len == 1 ? ("breq %0" CR_TAB
3037 			    "brlt %0") :
3038 		len == 2 ? ("breq .+2" CR_TAB
3039 			    "brge .+2" CR_TAB
3040 			    "rjmp %0") :
3041 		("breq .+2" CR_TAB
3042 		 "brge .+4" CR_TAB
3043 		 "jmp %0"));
3044     case LEU:
3045       return (len == 1 ? ("breq %0" CR_TAB
3046                           "brlo %0") :
3047               len == 2 ? ("breq .+2" CR_TAB
3048                           "brsh .+2" CR_TAB
3049 			  "rjmp %0") :
3050               ("breq .+2" CR_TAB
3051                "brsh .+4" CR_TAB
3052 	       "jmp %0"));
3053     default:
3054       if (reverse)
3055 	{
3056 	  switch (len)
3057 	    {
3058 	    case 1:
3059 	      return "br%k1 %0";
3060 	    case 2:
3061 	      return ("br%j1 .+2" CR_TAB
3062 		      "rjmp %0");
3063 	    default:
3064 	      return ("br%j1 .+4" CR_TAB
3065 		      "jmp %0");
3066 	    }
3067 	}
3068       else
3069         {
3070           switch (len)
3071             {
3072             case 1:
3073               return "br%j1 %0";
3074             case 2:
3075               return ("br%k1 .+2" CR_TAB
3076                       "rjmp %0");
3077             default:
3078               return ("br%k1 .+4" CR_TAB
3079                       "jmp %0");
3080             }
3081         }
3082     }
3083   return "";
3084 }
3085 
3086 
3087 /* Worker function for `FINAL_PRESCAN_INSN'.  */
3088 /* Output insn cost for next insn.  */
3089 
3090 void
3091 avr_final_prescan_insn (rtx_insn *insn, rtx *operand ATTRIBUTE_UNUSED,
3092                         int num_operands ATTRIBUTE_UNUSED)
3093 {
3094   if (avr_log.rtx_costs)
3095     {
3096       rtx set = single_set (insn);
3097 
3098       if (set)
3099         fprintf (asm_out_file, "/* DEBUG: cost = %d.  */\n",
3100                  set_src_cost (SET_SRC (set), GET_MODE (SET_DEST (set)),
3101 			       optimize_insn_for_speed_p ()));
3102       else
3103         fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d.  */\n",
3104                  rtx_cost (PATTERN (insn), VOIDmode, INSN, 0,
3105                            optimize_insn_for_speed_p()));
3106     }
3107 }
3108 
3109 /* Return 0 if undefined, 1 if always true or always false.  */
3110 
3111 int
3112 avr_simplify_comparison_p (machine_mode mode, RTX_CODE op, rtx x)
3113 {
3114   unsigned int max = (mode == QImode ? 0xff :
3115                       mode == HImode ? 0xffff :
3116                       mode == PSImode ? 0xffffff :
3117                       mode == SImode ? 0xffffffff : 0);
3118   if (max && op && CONST_INT_P (x))
3119     {
3120       if (unsigned_condition (op) != op)
3121         max >>= 1;
3122 
3123       if (max != (INTVAL (x) & max)
3124           && INTVAL (x) != 0xff)
3125         return 1;
3126     }
3127   return 0;
3128 }
3129 
3130 
3131 /* Worker function for `FUNCTION_ARG_REGNO_P'.  */
3132 /* Returns nonzero if REGNO is the number of a hard
3133    register in which function arguments are sometimes passed.  */
3134 
3135 int
3136 avr_function_arg_regno_p (int r)
3137 {
3138   return AVR_TINY ? IN_RANGE (r, 20, 25) : IN_RANGE (r, 8, 25);
3139 }
3140 
3141 
3142 /* Worker function for `INIT_CUMULATIVE_ARGS'.  */
3143 /* Initializing the variable cum for the state at the beginning
3144    of the argument list.  */
3145 
3146 void
3147 avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
3148                           tree fndecl ATTRIBUTE_UNUSED)
3149 {
3150   cum->nregs = AVR_TINY ? 6 : 18;
3151   cum->regno = FIRST_CUM_REG;
3152   if (!libname && stdarg_p (fntype))
3153     cum->nregs = 0;
3154 
3155   /* Assume the calle may be tail called */
3156 
3157   cfun->machine->sibcall_fails = 0;
3158 }
3159 
3160 /* Returns the number of registers to allocate for a function argument.  */
3161 
3162 static int
3163 avr_num_arg_regs (machine_mode mode, const_tree type)
3164 {
3165   int size;
3166 
3167   if (mode == BLKmode)
3168     size = int_size_in_bytes (type);
3169   else
3170     size = GET_MODE_SIZE (mode);
3171 
3172   /* Align all function arguments to start in even-numbered registers.
3173      Odd-sized arguments leave holes above them.  */
3174 
3175   return (size + 1) & ~1;
3176 }
3177 
3178 
3179 /* Implement `TARGET_FUNCTION_ARG'.  */
3180 /* Controls whether a function argument is passed
3181    in a register, and which register.  */
3182 
3183 static rtx
3184 avr_function_arg (cumulative_args_t cum_v, machine_mode mode,
3185                   const_tree type, bool named ATTRIBUTE_UNUSED)
3186 {
3187   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3188   int bytes = avr_num_arg_regs (mode, type);
3189 
3190   if (cum->nregs && bytes <= cum->nregs)
3191     return gen_rtx_REG (mode, cum->regno - bytes);
3192 
3193   return NULL_RTX;
3194 }
3195 
3196 
3197 /* Implement `TARGET_FUNCTION_ARG_ADVANCE'.  */
3198 /* Update the summarizer variable CUM to advance past an argument
3199    in the argument list.  */
3200 
3201 static void
3202 avr_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
3203                           const_tree type, bool named ATTRIBUTE_UNUSED)
3204 {
3205   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3206   int bytes = avr_num_arg_regs (mode, type);
3207 
3208   cum->nregs -= bytes;
3209   cum->regno -= bytes;
3210 
3211   /* A parameter is being passed in a call-saved register.  As the original
3212      contents of these regs has to be restored before leaving the function,
3213      a function must not pass arguments in call-saved regs in order to get
3214      tail-called.  */
3215 
3216   if (cum->regno >= 8
3217       && cum->nregs >= 0
3218       && !call_used_regs[cum->regno])
3219     {
3220       /* FIXME: We ship info on failing tail-call in struct machine_function.
3221          This uses internals of calls.c:expand_call() and the way args_so_far
3222          is used.  targetm.function_ok_for_sibcall() needs to be extended to
3223          pass &args_so_far, too.  At present, CUMULATIVE_ARGS is target
3224          dependent so that such an extension is not wanted.  */
3225 
3226       cfun->machine->sibcall_fails = 1;
3227     }
3228 
3229   /* Test if all registers needed by the ABI are actually available.  If the
3230      user has fixed a GPR needed to pass an argument, an (implicit) function
3231      call will clobber that fixed register.  See PR45099 for an example.  */
3232 
3233   if (cum->regno >= 8
3234       && cum->nregs >= 0)
3235     {
3236       for (int regno = cum->regno; regno < cum->regno + bytes; regno++)
3237         if (fixed_regs[regno])
3238           warning (0, "fixed register %s used to pass parameter to function",
3239                    reg_names[regno]);
3240     }
3241 
3242   if (cum->nregs <= 0)
3243     {
3244       cum->nregs = 0;
3245       cum->regno = FIRST_CUM_REG;
3246     }
3247 }
3248 
3249 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
3250 /* Decide whether we can make a sibling call to a function.  DECL is the
3251    declaration of the function being targeted by the call and EXP is the
3252    CALL_EXPR representing the call.  */
3253 
3254 static bool
3255 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
3256 {
3257   tree fntype_callee;
3258 
3259   /* Tail-calling must fail if callee-saved regs are used to pass
3260      function args.  We must not tail-call when `epilogue_restores'
3261      is used.  Unfortunately, we cannot tell at this point if that
3262      actually will happen or not, and we cannot step back from
3263      tail-calling.  Thus, we inhibit tail-calling with -mcall-prologues.  */
3264 
3265   if (cfun->machine->sibcall_fails
3266       || TARGET_CALL_PROLOGUES)
3267     {
3268       return false;
3269     }
3270 
3271   fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
3272 
3273   if (decl_callee)
3274     {
3275       decl_callee = TREE_TYPE (decl_callee);
3276     }
3277   else
3278     {
3279       decl_callee = fntype_callee;
3280 
3281       while (FUNCTION_TYPE != TREE_CODE (decl_callee)
3282              && METHOD_TYPE != TREE_CODE (decl_callee))
3283         {
3284           decl_callee = TREE_TYPE (decl_callee);
3285         }
3286     }
3287 
3288   /* Ensure that caller and callee have compatible epilogues */
3289 
3290   if (cfun->machine->is_interrupt
3291       || cfun->machine->is_signal
3292       || cfun->machine->is_naked
3293       || avr_naked_function_p (decl_callee)
3294       /* FIXME: For OS_task and OS_main, this might be over-conservative.  */
3295       || (avr_OS_task_function_p (decl_callee)
3296           != cfun->machine->is_OS_task)
3297       || (avr_OS_main_function_p (decl_callee)
3298           != cfun->machine->is_OS_main))
3299     {
3300       return false;
3301     }
3302 
3303   return true;
3304 }
3305 
3306 /***********************************************************************
3307   Functions for outputting various mov's for a various modes
3308 ************************************************************************/
3309 
3310 /* Return true if a value of mode MODE is read from flash by
3311    __load_* function from libgcc.  */
3312 
3313 bool
3314 avr_load_libgcc_p (rtx op)
3315 {
3316   machine_mode mode = GET_MODE (op);
3317   int n_bytes = GET_MODE_SIZE (mode);
3318 
3319   return (n_bytes > 2
3320           && !AVR_HAVE_LPMX
3321           && avr_mem_flash_p (op));
3322 }
3323 
3324 /* Return true if a value of mode MODE is read by __xload_* function.  */
3325 
3326 bool
3327 avr_xload_libgcc_p (machine_mode mode)
3328 {
3329   int n_bytes = GET_MODE_SIZE (mode);
3330 
3331   return (n_bytes > 1
3332           || avr_n_flash > 1);
3333 }
3334 
3335 
3336 /* Fixme: This is a hack because secondary reloads don't works as expected.
3337 
3338    Find an unused d-register to be used as scratch in INSN.
3339    EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
3340    is a register, skip all possible return values that overlap EXCLUDE.
3341    The policy for the returned register is similar to that of
3342    `reg_unused_after', i.e. the returned register may overlap the SET_DEST
3343    of INSN.
3344 
3345    Return a QImode d-register or NULL_RTX if nothing found.  */
3346 
3347 static rtx
3348 avr_find_unused_d_reg (rtx_insn *insn, rtx exclude)
3349 {
3350   bool isr_p = (avr_interrupt_function_p (current_function_decl)
3351                 || avr_signal_function_p (current_function_decl));
3352 
3353   for (int regno = 16; regno < 32; regno++)
3354     {
3355       rtx reg = all_regs_rtx[regno];
3356 
3357       if ((exclude
3358            && reg_overlap_mentioned_p (exclude, reg))
3359           || fixed_regs[regno])
3360         {
3361           continue;
3362         }
3363 
3364       /* Try non-live register */
3365 
3366       if (!df_regs_ever_live_p (regno)
3367           && (TREE_THIS_VOLATILE (current_function_decl)
3368               || cfun->machine->is_OS_task
3369               || cfun->machine->is_OS_main
3370               || (!isr_p && call_used_regs[regno])))
3371         {
3372           return reg;
3373         }
3374 
3375       /* Any live register can be used if it is unused after.
3376          Prologue/epilogue will care for it as needed.  */
3377 
3378       if (df_regs_ever_live_p (regno)
3379           && reg_unused_after (insn, reg))
3380         {
3381           return reg;
3382         }
3383     }
3384 
3385   return NULL_RTX;
3386 }
3387 
3388 
3389 /* Helper function for the next function in the case where only restricted
3390    version of LPM instruction is available.  */
3391 
3392 static const char*
3393 avr_out_lpm_no_lpmx (rtx_insn *insn, rtx *xop, int *plen)
3394 {
3395   rtx dest = xop[0];
3396   rtx addr = xop[1];
3397   int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3398   int regno_dest;
3399 
3400   regno_dest = REGNO (dest);
3401 
3402   /* The implicit target register of LPM.  */
3403   xop[3] = lpm_reg_rtx;
3404 
3405   switch (GET_CODE (addr))
3406     {
3407     default:
3408       gcc_unreachable();
3409 
3410     case REG:
3411 
3412       gcc_assert (REG_Z == REGNO (addr));
3413 
3414       switch (n_bytes)
3415         {
3416         default:
3417           gcc_unreachable();
3418 
3419         case 1:
3420           avr_asm_len ("%4lpm", xop, plen, 1);
3421 
3422           if (regno_dest != LPM_REGNO)
3423             avr_asm_len ("mov %0,%3", xop, plen, 1);
3424 
3425           return "";
3426 
3427         case 2:
3428           if (REGNO (dest) == REG_Z)
3429             return avr_asm_len ("%4lpm"      CR_TAB
3430                                 "push %3"    CR_TAB
3431                                 "adiw %2,1"  CR_TAB
3432                                 "%4lpm"      CR_TAB
3433                                 "mov %B0,%3" CR_TAB
3434                                 "pop %A0", xop, plen, 6);
3435 
3436           avr_asm_len ("%4lpm"      CR_TAB
3437                        "mov %A0,%3" CR_TAB
3438                        "adiw %2,1"  CR_TAB
3439                        "%4lpm"      CR_TAB
3440                        "mov %B0,%3", xop, plen, 5);
3441 
3442           if (!reg_unused_after (insn, addr))
3443             avr_asm_len ("sbiw %2,1", xop, plen, 1);
3444 
3445           break; /* 2 */
3446         }
3447 
3448       break; /* REG */
3449 
3450     case POST_INC:
3451 
3452       gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3453                   && n_bytes <= 4);
3454 
3455       if (regno_dest == LPM_REGNO)
3456         avr_asm_len ("%4lpm"      CR_TAB
3457                      "adiw %2,1", xop, plen, 2);
3458       else
3459         avr_asm_len ("%4lpm"      CR_TAB
3460                      "mov %A0,%3" CR_TAB
3461                      "adiw %2,1", xop, plen, 3);
3462 
3463       if (n_bytes >= 2)
3464         avr_asm_len ("%4lpm"      CR_TAB
3465                      "mov %B0,%3" CR_TAB
3466                      "adiw %2,1", xop, plen, 3);
3467 
3468       if (n_bytes >= 3)
3469         avr_asm_len ("%4lpm"      CR_TAB
3470                      "mov %C0,%3" CR_TAB
3471                      "adiw %2,1", xop, plen, 3);
3472 
3473       if (n_bytes >= 4)
3474         avr_asm_len ("%4lpm"      CR_TAB
3475                      "mov %D0,%3" CR_TAB
3476                      "adiw %2,1", xop, plen, 3);
3477 
3478       break; /* POST_INC */
3479 
3480     } /* switch CODE (addr) */
3481 
3482   return "";
3483 }
3484 
3485 
3486 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
3487    OP[1] in AS1 to register OP[0].
3488    If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
3489    Return "".  */
3490 
3491 const char*
3492 avr_out_lpm (rtx_insn *insn, rtx *op, int *plen)
3493 {
3494   rtx xop[7];
3495   rtx dest = op[0];
3496   rtx src = SET_SRC (single_set (insn));
3497   rtx addr;
3498   int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
3499   int segment;
3500   RTX_CODE code;
3501   addr_space_t as = MEM_ADDR_SPACE (src);
3502 
3503   if (plen)
3504     *plen = 0;
3505 
3506   if (MEM_P (dest))
3507     {
3508       warning (0, "writing to address space %qs not supported",
3509                avr_addrspace[MEM_ADDR_SPACE (dest)].name);
3510 
3511       return "";
3512     }
3513 
3514   addr = XEXP (src, 0);
3515   code = GET_CODE (addr);
3516 
3517   gcc_assert (REG_P (dest));
3518   gcc_assert (REG == code || POST_INC == code);
3519 
3520   xop[0] = dest;
3521   xop[1] = addr;
3522   xop[2] = lpm_addr_reg_rtx;
3523   xop[4] = xstring_empty;
3524   xop[5] = tmp_reg_rtx;
3525   xop[6] = XEXP (rampz_rtx, 0);
3526 
3527   segment = avr_addrspace[as].segment;
3528 
3529   /* Set RAMPZ as needed.  */
3530 
3531   if (segment)
3532     {
3533       xop[4] = GEN_INT (segment);
3534       xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx);
3535 
3536       if (xop[3] != NULL_RTX)
3537         {
3538           avr_asm_len ("ldi %3,%4" CR_TAB
3539                        "out %i6,%3", xop, plen, 2);
3540         }
3541       else if (segment == 1)
3542         {
3543           avr_asm_len ("clr %5" CR_TAB
3544                        "inc %5" CR_TAB
3545                        "out %i6,%5", xop, plen, 3);
3546         }
3547       else
3548         {
3549           avr_asm_len ("mov %5,%2"   CR_TAB
3550                        "ldi %2,%4"   CR_TAB
3551                        "out %i6,%2"  CR_TAB
3552                        "mov %2,%5", xop, plen, 4);
3553         }
3554 
3555       xop[4] = xstring_e;
3556 
3557       if (!AVR_HAVE_ELPMX)
3558         return avr_out_lpm_no_lpmx (insn, xop, plen);
3559     }
3560   else if (!AVR_HAVE_LPMX)
3561     {
3562       return avr_out_lpm_no_lpmx (insn, xop, plen);
3563     }
3564 
3565   /* We have [E]LPMX: Output reading from Flash the comfortable way.  */
3566 
3567   switch (GET_CODE (addr))
3568     {
3569     default:
3570       gcc_unreachable();
3571 
3572     case REG:
3573 
3574       gcc_assert (REG_Z == REGNO (addr));
3575 
3576       switch (n_bytes)
3577         {
3578         default:
3579           gcc_unreachable();
3580 
3581         case 1:
3582           return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
3583 
3584         case 2:
3585           if (REGNO (dest) == REG_Z)
3586             return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3587                                 "%4lpm %B0,%a2" CR_TAB
3588                                 "mov %A0,%5", xop, plen, 3);
3589           else
3590             {
3591               avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3592                            "%4lpm %B0,%a2", xop, plen, 2);
3593 
3594               if (!reg_unused_after (insn, addr))
3595                 avr_asm_len ("sbiw %2,1", xop, plen, 1);
3596             }
3597 
3598           break; /* 2 */
3599 
3600         case 3:
3601 
3602           avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3603                        "%4lpm %B0,%a2+" CR_TAB
3604                        "%4lpm %C0,%a2", xop, plen, 3);
3605 
3606           if (!reg_unused_after (insn, addr))
3607             avr_asm_len ("sbiw %2,2", xop, plen, 1);
3608 
3609           break; /* 3 */
3610 
3611         case 4:
3612 
3613           avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
3614                        "%4lpm %B0,%a2+", xop, plen, 2);
3615 
3616           if (REGNO (dest) == REG_Z - 2)
3617             return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
3618                                 "%4lpm %C0,%a2" CR_TAB
3619                                 "mov %D0,%5", xop, plen, 3);
3620           else
3621             {
3622               avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
3623                            "%4lpm %D0,%a2", xop, plen, 2);
3624 
3625               if (!reg_unused_after (insn, addr))
3626                 avr_asm_len ("sbiw %2,3", xop, plen, 1);
3627             }
3628 
3629           break; /* 4 */
3630         } /* n_bytes */
3631 
3632       break; /* REG */
3633 
3634     case POST_INC:
3635 
3636       gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
3637                   && n_bytes <= 4);
3638 
3639       avr_asm_len                    ("%4lpm %A0,%a2+", xop, plen, 1);
3640       if (n_bytes >= 2)  avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
3641       if (n_bytes >= 3)  avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
3642       if (n_bytes >= 4)  avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
3643 
3644       break; /* POST_INC */
3645 
3646     } /* switch CODE (addr) */
3647 
3648   if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
3649     {
3650       /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM.  */
3651 
3652       xop[0] = zero_reg_rtx;
3653       avr_asm_len ("out %i6,%0", xop, plen, 1);
3654     }
3655 
3656   return "";
3657 }
3658 
3659 
3660 /* Worker function for xload_8 insn.  */
3661 
3662 const char*
3663 avr_out_xload (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
3664 {
3665   rtx xop[4];
3666 
3667   xop[0] = op[0];
3668   xop[1] = op[1];
3669   xop[2] = lpm_addr_reg_rtx;
3670   xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
3671 
3672   avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, -1);
3673 
3674   avr_asm_len ("sbrc %1,7" CR_TAB
3675                "ld %3,%a2", xop, plen, 2);
3676 
3677   if (REGNO (xop[0]) != REGNO (xop[3]))
3678     avr_asm_len ("mov %0,%3", xop, plen, 1);
3679 
3680   return "";
3681 }
3682 
3683 
3684 const char*
3685 output_movqi (rtx_insn *insn, rtx operands[], int *plen)
3686 {
3687   rtx dest = operands[0];
3688   rtx src = operands[1];
3689 
3690   if (avr_mem_flash_p (src)
3691       || avr_mem_flash_p (dest))
3692     {
3693       return avr_out_lpm (insn, operands, plen);
3694     }
3695 
3696   gcc_assert (1 == GET_MODE_SIZE (GET_MODE (dest)));
3697 
3698   if (REG_P (dest))
3699     {
3700       if (REG_P (src)) /* mov r,r */
3701         {
3702           if (test_hard_reg_class (STACK_REG, dest))
3703             return avr_asm_len ("out %0,%1", operands, plen, -1);
3704           else if (test_hard_reg_class (STACK_REG, src))
3705             return avr_asm_len ("in %0,%1", operands, plen, -1);
3706 
3707           return avr_asm_len ("mov %0,%1", operands, plen, -1);
3708         }
3709       else if (CONSTANT_P (src))
3710         {
3711           output_reload_in_const (operands, NULL_RTX, plen, false);
3712           return "";
3713         }
3714       else if (MEM_P (src))
3715         return out_movqi_r_mr (insn, operands, plen); /* mov r,m */
3716     }
3717   else if (MEM_P (dest))
3718     {
3719       rtx xop[2];
3720 
3721       xop[0] = dest;
3722       xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3723 
3724       return out_movqi_mr_r (insn, xop, plen);
3725     }
3726 
3727   return "";
3728 }
3729 
3730 
3731 const char *
3732 output_movhi (rtx_insn *insn, rtx xop[], int *plen)
3733 {
3734   rtx dest = xop[0];
3735   rtx src = xop[1];
3736 
3737   gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
3738 
3739   if (avr_mem_flash_p (src)
3740       || avr_mem_flash_p (dest))
3741     {
3742       return avr_out_lpm (insn, xop, plen);
3743     }
3744 
3745   if (REG_P (dest))
3746     {
3747       if (REG_P (src)) /* mov r,r */
3748         {
3749           if (test_hard_reg_class (STACK_REG, dest))
3750             {
3751               if (AVR_HAVE_8BIT_SP)
3752                 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
3753 
3754               if (AVR_XMEGA)
3755                 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
3756                                     "out __SP_H__,%B1", xop, plen, -2);
3757 
3758               /* Use simple load of SP if no interrupts are  used.  */
3759 
3760               return TARGET_NO_INTERRUPTS
3761                 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
3762                                "out __SP_L__,%A1", xop, plen, -2)
3763                 : avr_asm_len ("in __tmp_reg__,__SREG__"  CR_TAB
3764                                "cli"                      CR_TAB
3765                                "out __SP_H__,%B1"         CR_TAB
3766                                "out __SREG__,__tmp_reg__" CR_TAB
3767                                "out __SP_L__,%A1", xop, plen, -5);
3768             }
3769           else if (test_hard_reg_class (STACK_REG, src))
3770             {
3771               return !AVR_HAVE_SPH
3772                 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
3773                                "clr %B0", xop, plen, -2)
3774 
3775                 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
3776                                "in %B0,__SP_H__", xop, plen, -2);
3777             }
3778 
3779           return AVR_HAVE_MOVW
3780             ? avr_asm_len ("movw %0,%1", xop, plen, -1)
3781 
3782             : avr_asm_len ("mov %A0,%A1" CR_TAB
3783                            "mov %B0,%B1", xop, plen, -2);
3784         } /* REG_P (src) */
3785       else if (CONSTANT_P (src))
3786         {
3787           return output_reload_inhi (xop, NULL, plen);
3788         }
3789       else if (MEM_P (src))
3790         {
3791           return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
3792         }
3793     }
3794   else if (MEM_P (dest))
3795     {
3796       rtx xop[2];
3797 
3798       xop[0] = dest;
3799       xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3800 
3801       return out_movhi_mr_r (insn, xop, plen);
3802     }
3803 
3804   fatal_insn ("invalid insn:", insn);
3805 
3806   return "";
3807 }
3808 
3809 
3810 /* Same as out_movqi_r_mr, but TINY does not have ADIW or SBIW */
3811 
3812 static const char*
3813 avr_out_movqi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
3814 {
3815   rtx dest = op[0];
3816   rtx src = op[1];
3817   rtx x = XEXP (src, 0);
3818 
3819   avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3820                "ld %0,%b1" , op, plen, -3);
3821 
3822   if (!reg_overlap_mentioned_p (dest, XEXP (x, 0))
3823       && !reg_unused_after (insn, XEXP (x, 0)))
3824     avr_asm_len (TINY_SBIW (%I1, %J1, %o1), op, plen, 2);
3825 
3826   return "";
3827 }
3828 
3829 static const char*
3830 out_movqi_r_mr (rtx_insn *insn, rtx op[], int *plen)
3831 {
3832   rtx dest = op[0];
3833   rtx src = op[1];
3834   rtx x = XEXP (src, 0);
3835 
3836   if (CONSTANT_ADDRESS_P (x))
3837     {
3838       int n_words = AVR_TINY ? 1 : 2;
3839       return io_address_operand (x, QImode)
3840         ? avr_asm_len ("in %0,%i1", op, plen, -1)
3841         : avr_asm_len ("lds %0,%m1", op, plen, -n_words);
3842     }
3843 
3844   if (GET_CODE (x) == PLUS
3845       && REG_P (XEXP (x, 0))
3846       && CONST_INT_P (XEXP (x, 1)))
3847     {
3848       /* memory access by reg+disp */
3849 
3850       int disp = INTVAL (XEXP (x, 1));
3851 
3852       if (AVR_TINY)
3853         return avr_out_movqi_r_mr_reg_disp_tiny (insn, op, plen);
3854 
3855       if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
3856         {
3857           if (REGNO (XEXP (x, 0)) != REG_Y)
3858             fatal_insn ("incorrect insn:",insn);
3859 
3860           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3861             return avr_asm_len ("adiw r28,%o1-63" CR_TAB
3862                                 "ldd %0,Y+63"     CR_TAB
3863                                 "sbiw r28,%o1-63", op, plen, -3);
3864 
3865           return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3866                               "sbci r29,hi8(-%o1)" CR_TAB
3867                               "ld %0,Y"            CR_TAB
3868                               "subi r28,lo8(%o1)"  CR_TAB
3869                               "sbci r29,hi8(%o1)", op, plen, -5);
3870         }
3871       else if (REGNO (XEXP (x, 0)) == REG_X)
3872         {
3873           /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3874              it but I have this situation with extremal optimizing options.  */
3875 
3876           avr_asm_len ("adiw r26,%o1" CR_TAB
3877                        "ld %0,X", op, plen, -2);
3878 
3879           if (!reg_overlap_mentioned_p (dest, XEXP (x, 0))
3880               && !reg_unused_after (insn, XEXP (x, 0)))
3881             {
3882               avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3883             }
3884 
3885           return "";
3886         }
3887 
3888       return avr_asm_len ("ldd %0,%1", op, plen, -1);
3889     }
3890 
3891   return avr_asm_len ("ld %0,%1", op, plen, -1);
3892 }
3893 
3894 
3895 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3896 
3897 static const char*
3898 avr_out_movhi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
3899 {
3900   rtx dest = op[0];
3901   rtx src = op[1];
3902   rtx base = XEXP (src, 0);
3903 
3904   int reg_dest = true_regnum (dest);
3905   int reg_base = true_regnum (base);
3906 
3907   if (reg_dest == reg_base)         /* R = (R) */
3908     return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3909 			"ld %B0,%1"          CR_TAB
3910 			"mov %A0,__tmp_reg__", op, plen, -3);
3911 
3912   avr_asm_len ("ld %A0,%1+" CR_TAB
3913                "ld %B0,%1", op, plen, -2);
3914 
3915   if (!reg_unused_after (insn, base))
3916     avr_asm_len (TINY_SBIW (%E1, %F1, 1), op, plen, 2);
3917 
3918   return "";
3919 }
3920 
3921 
3922 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3923 
3924 static const char*
3925 avr_out_movhi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
3926 {
3927   rtx dest = op[0];
3928   rtx src = op[1];
3929   rtx base = XEXP (src, 0);
3930 
3931   int reg_dest = true_regnum (dest);
3932   int reg_base = true_regnum (XEXP (base, 0));
3933 
3934   if (reg_base == reg_dest)
3935     {
3936       return avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3937                           "ld __tmp_reg__,%b1+"     CR_TAB
3938                           "ld %B0,%b1"              CR_TAB
3939                           "mov %A0,__tmp_reg__", op, plen, -5);
3940     }
3941   else
3942     {
3943       avr_asm_len (TINY_ADIW (%I1, %J1, %o1) CR_TAB
3944                    "ld %A0,%b1+"             CR_TAB
3945                    "ld %B0,%b1", op, plen, -4);
3946 
3947       if (!reg_unused_after (insn, XEXP (base, 0)))
3948         avr_asm_len (TINY_SBIW (%I1, %J1, %o1+1), op, plen, 2);
3949 
3950       return "";
3951     }
3952 }
3953 
3954 
3955 /* Same as movhi_r_mr, but TINY does not have ADIW, SBIW and LDD */
3956 
3957 static const char*
3958 avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn *insn, rtx op[], int *plen)
3959 {
3960   int mem_volatile_p = 0;
3961   rtx dest = op[0];
3962   rtx src = op[1];
3963   rtx base = XEXP (src, 0);
3964 
3965   /* "volatile" forces reading low byte first, even if less efficient,
3966      for correct operation with 16-bit I/O registers.  */
3967   mem_volatile_p = MEM_VOLATILE_P (src);
3968 
3969   if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3970     fatal_insn ("incorrect insn:", insn);
3971 
3972   if (!mem_volatile_p)
3973     return avr_asm_len ("ld %B0,%1" CR_TAB
3974                         "ld %A0,%1", op, plen, -2);
3975 
3976   return avr_asm_len (TINY_SBIW (%I1, %J1, 2)  CR_TAB
3977                       "ld %A0,%p1+"            CR_TAB
3978                       "ld %B0,%p1"             CR_TAB
3979                       TINY_SBIW (%I1, %J1, 1), op, plen, -6);
3980 }
3981 
3982 
3983 static const char*
3984 out_movhi_r_mr (rtx_insn *insn, rtx op[], int *plen)
3985 {
3986   rtx dest = op[0];
3987   rtx src = op[1];
3988   rtx base = XEXP (src, 0);
3989   int reg_dest = true_regnum (dest);
3990   int reg_base = true_regnum (base);
3991   /* "volatile" forces reading low byte first, even if less efficient,
3992      for correct operation with 16-bit I/O registers.  */
3993   int mem_volatile_p = MEM_VOLATILE_P (src);
3994 
3995   if (reg_base > 0)
3996     {
3997       if (AVR_TINY)
3998         return avr_out_movhi_r_mr_reg_no_disp_tiny (insn, op, plen);
3999 
4000       if (reg_dest == reg_base)         /* R = (R) */
4001         return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
4002                             "ld %B0,%1"          CR_TAB
4003                             "mov %A0,__tmp_reg__", op, plen, -3);
4004 
4005       if (reg_base != REG_X)
4006         return avr_asm_len ("ld %A0,%1" CR_TAB
4007                             "ldd %B0,%1+1", op, plen, -2);
4008 
4009       avr_asm_len ("ld %A0,X+" CR_TAB
4010                    "ld %B0,X", op, plen, -2);
4011 
4012       if (!reg_unused_after (insn, base))
4013         avr_asm_len ("sbiw r26,1", op, plen, 1);
4014 
4015       return "";
4016     }
4017   else if (GET_CODE (base) == PLUS) /* (R + i) */
4018     {
4019       int disp = INTVAL (XEXP (base, 1));
4020       int reg_base = true_regnum (XEXP (base, 0));
4021 
4022       if (AVR_TINY)
4023         return avr_out_movhi_r_mr_reg_disp_tiny (insn, op, plen);
4024 
4025       if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4026         {
4027           if (REGNO (XEXP (base, 0)) != REG_Y)
4028             fatal_insn ("incorrect insn:",insn);
4029 
4030           return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
4031             ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
4032                            "ldd %A0,Y+62"    CR_TAB
4033                            "ldd %B0,Y+63"    CR_TAB
4034                            "sbiw r28,%o1-62", op, plen, -4)
4035 
4036             : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4037                            "sbci r29,hi8(-%o1)" CR_TAB
4038                            "ld %A0,Y"           CR_TAB
4039                            "ldd %B0,Y+1"        CR_TAB
4040                            "subi r28,lo8(%o1)"  CR_TAB
4041                            "sbci r29,hi8(%o1)", op, plen, -6);
4042         }
4043 
4044       /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
4045          it but I have this situation with extremal
4046          optimization options.  */
4047 
4048       if (reg_base == REG_X)
4049         {
4050           if (reg_base == reg_dest)
4051             return avr_asm_len ("adiw r26,%o1"      CR_TAB
4052                                 "ld __tmp_reg__,X+" CR_TAB
4053                                 "ld %B0,X"          CR_TAB
4054                                 "mov %A0,__tmp_reg__", op, plen, -4);
4055 
4056           avr_asm_len ("adiw r26,%o1" CR_TAB
4057                        "ld %A0,X+"    CR_TAB
4058                        "ld %B0,X", op, plen, -3);
4059 
4060           if (!reg_unused_after (insn, XEXP (base, 0)))
4061             avr_asm_len ("sbiw r26,%o1+1", op, plen, 1);
4062 
4063           return "";
4064         }
4065 
4066       return reg_base == reg_dest
4067         ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
4068                        "ldd %B0,%B1"         CR_TAB
4069                        "mov %A0,__tmp_reg__", op, plen, -3)
4070 
4071         : avr_asm_len ("ldd %A0,%A1" CR_TAB
4072                        "ldd %B0,%B1", op, plen, -2);
4073     }
4074   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4075     {
4076       if (AVR_TINY)
4077 	return avr_out_movhi_r_mr_pre_dec_tiny (insn, op, plen);
4078 
4079       if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
4080         fatal_insn ("incorrect insn:", insn);
4081 
4082       if (!mem_volatile_p)
4083         return avr_asm_len ("ld %B0,%1" CR_TAB
4084                             "ld %A0,%1", op, plen, -2);
4085 
4086       return REGNO (XEXP (base, 0)) == REG_X
4087         ? avr_asm_len ("sbiw r26,2"  CR_TAB
4088                        "ld %A0,X+"   CR_TAB
4089                        "ld %B0,X"    CR_TAB
4090                        "sbiw r26,1", op, plen, -4)
4091 
4092         : avr_asm_len ("sbiw %r1,2"  CR_TAB
4093                        "ld %A0,%p1"  CR_TAB
4094                        "ldd %B0,%p1+1", op, plen, -3);
4095     }
4096   else if (GET_CODE (base) == POST_INC) /* (R++) */
4097     {
4098       if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
4099         fatal_insn ("incorrect insn:", insn);
4100 
4101       return avr_asm_len ("ld %A0,%1"  CR_TAB
4102                           "ld %B0,%1", op, plen, -2);
4103     }
4104   else if (CONSTANT_ADDRESS_P (base))
4105     {
4106       int n_words = AVR_TINY ? 2 : 4;
4107       return io_address_operand (base, HImode)
4108         ? avr_asm_len ("in %A0,%i1" CR_TAB
4109                        "in %B0,%i1+1", op, plen, -2)
4110 
4111         : avr_asm_len ("lds %A0,%m1" CR_TAB
4112                        "lds %B0,%m1+1", op, plen, -n_words);
4113     }
4114 
4115   fatal_insn ("unknown move insn:",insn);
4116   return "";
4117 }
4118 
4119 static const char*
4120 avr_out_movsi_r_mr_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4121 {
4122   rtx dest = op[0];
4123   rtx src = op[1];
4124   rtx base = XEXP (src, 0);
4125   int reg_dest = true_regnum (dest);
4126   int reg_base = true_regnum (base);
4127 
4128   if (reg_dest == reg_base)
4129     {
4130       /* "ld r26,-X" is undefined */
4131       return *l = 9, (TINY_ADIW (%E1, %F1, 3) CR_TAB
4132 		      "ld %D0,%1"             CR_TAB
4133 		      "ld %C0,-%1"            CR_TAB
4134 		      "ld __tmp_reg__,-%1"    CR_TAB
4135 		      TINY_SBIW (%E1, %F1, 1) CR_TAB
4136 		      "ld %A0,%1"             CR_TAB
4137 		      "mov %B0,__tmp_reg__");
4138     }
4139   else if (reg_dest == reg_base - 2)
4140     {
4141       return *l = 5, ("ld %A0,%1+"            CR_TAB
4142 		      "ld %B0,%1+"            CR_TAB
4143 		      "ld __tmp_reg__,%1+"    CR_TAB
4144 		      "ld %D0,%1"             CR_TAB
4145 		      "mov %C0,__tmp_reg__");
4146     }
4147   else if (reg_unused_after (insn, base))
4148     {
4149       return *l = 4, ("ld %A0,%1+"    CR_TAB
4150 		      "ld %B0,%1+"    CR_TAB
4151 		      "ld %C0,%1+"    CR_TAB
4152 		      "ld %D0,%1");
4153     }
4154   else
4155     {
4156       return *l = 6, ("ld %A0,%1+"    CR_TAB
4157 		      "ld %B0,%1+"    CR_TAB
4158 		      "ld %C0,%1+"    CR_TAB
4159 		      "ld %D0,%1"     CR_TAB
4160 		      TINY_SBIW (%E1, %F1, 3));
4161     }
4162 }
4163 
4164 
4165 static const char*
4166 avr_out_movsi_r_mr_reg_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4167 {
4168   rtx dest = op[0];
4169   rtx src = op[1];
4170   rtx base = XEXP (src, 0);
4171   int reg_dest = true_regnum (dest);
4172   int reg_base = true_regnum (XEXP (base, 0));
4173 
4174   if (reg_dest == reg_base)
4175     {
4176       /* "ld r26,-X" is undefined */
4177       return *l = 9, (TINY_ADIW (%I1, %J1, %o1+3) CR_TAB
4178                       "ld %D0,%b1"                CR_TAB
4179                       "ld %C0,-%b1"               CR_TAB
4180                       "ld __tmp_reg__,-%b1"       CR_TAB
4181                       TINY_SBIW (%I1, %J1, 1)     CR_TAB
4182                       "ld %A0,%b1"                CR_TAB
4183                       "mov %B0,__tmp_reg__");
4184     }
4185   else if (reg_dest == reg_base - 2)
4186     {
4187       return *l = 7, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4188                       "ld %A0,%b1+"             CR_TAB
4189                       "ld %B0,%b1+"             CR_TAB
4190                       "ld __tmp_reg__,%b1+"     CR_TAB
4191                       "ld %D0,%b1"              CR_TAB
4192                       "mov %C0,__tmp_reg__");
4193     }
4194   else if (reg_unused_after (insn, XEXP (base, 0)))
4195     {
4196       return *l = 6, (TINY_ADIW (%I1, %J1, %o1) CR_TAB
4197                       "ld %A0,%b1+"             CR_TAB
4198                       "ld %B0,%b1+"             CR_TAB
4199                       "ld %C0,%b1+"             CR_TAB
4200                       "ld %D0,%b1");
4201     }
4202   else
4203     {
4204       return *l = 8, (TINY_ADIW (%I1, %J1, %o1)  CR_TAB
4205                       "ld %A0,%b1+"              CR_TAB
4206                       "ld %B0,%b1+"              CR_TAB
4207                       "ld %C0,%b1+"              CR_TAB
4208                       "ld %D0,%b1"               CR_TAB
4209                       TINY_SBIW (%I1, %J1, %o1+3));
4210     }
4211 }
4212 
4213 static const char*
4214 out_movsi_r_mr (rtx_insn *insn, rtx op[], int *l)
4215 {
4216   rtx dest = op[0];
4217   rtx src = op[1];
4218   rtx base = XEXP (src, 0);
4219   int reg_dest = true_regnum (dest);
4220   int reg_base = true_regnum (base);
4221   int tmp;
4222 
4223   if (!l)
4224     l = &tmp;
4225 
4226   if (reg_base > 0)
4227     {
4228       if (AVR_TINY)
4229         return avr_out_movsi_r_mr_reg_no_disp_tiny (insn, op, l);
4230 
4231       if (reg_base == REG_X)        /* (R26) */
4232         {
4233           if (reg_dest == REG_X)
4234 	    /* "ld r26,-X" is undefined */
4235 	    return *l=7, ("adiw r26,3"        CR_TAB
4236 			  "ld r29,X"          CR_TAB
4237 			  "ld r28,-X"         CR_TAB
4238 			  "ld __tmp_reg__,-X" CR_TAB
4239 			  "sbiw r26,1"        CR_TAB
4240 			  "ld r26,X"          CR_TAB
4241 			  "mov r27,__tmp_reg__");
4242           else if (reg_dest == REG_X - 2)
4243             return *l=5, ("ld %A0,X+"          CR_TAB
4244                           "ld %B0,X+"          CR_TAB
4245                           "ld __tmp_reg__,X+"  CR_TAB
4246                           "ld %D0,X"           CR_TAB
4247                           "mov %C0,__tmp_reg__");
4248           else if (reg_unused_after (insn, base))
4249             return  *l=4, ("ld %A0,X+" CR_TAB
4250                            "ld %B0,X+" CR_TAB
4251                            "ld %C0,X+" CR_TAB
4252                            "ld %D0,X");
4253           else
4254             return  *l=5, ("ld %A0,X+" CR_TAB
4255                            "ld %B0,X+" CR_TAB
4256                            "ld %C0,X+" CR_TAB
4257                            "ld %D0,X"  CR_TAB
4258                            "sbiw r26,3");
4259         }
4260       else
4261         {
4262           if (reg_dest == reg_base)
4263             return *l=5, ("ldd %D0,%1+3" CR_TAB
4264                           "ldd %C0,%1+2" CR_TAB
4265                           "ldd __tmp_reg__,%1+1"  CR_TAB
4266                           "ld %A0,%1"  CR_TAB
4267                           "mov %B0,__tmp_reg__");
4268           else if (reg_base == reg_dest + 2)
4269             return *l=5, ("ld %A0,%1"             CR_TAB
4270                           "ldd %B0,%1+1"          CR_TAB
4271                           "ldd __tmp_reg__,%1+2"  CR_TAB
4272                           "ldd %D0,%1+3"          CR_TAB
4273                           "mov %C0,__tmp_reg__");
4274           else
4275             return *l=4, ("ld %A0,%1"    CR_TAB
4276                           "ldd %B0,%1+1" CR_TAB
4277                           "ldd %C0,%1+2" CR_TAB
4278                           "ldd %D0,%1+3");
4279         }
4280     }
4281   else if (GET_CODE (base) == PLUS) /* (R + i) */
4282     {
4283       int disp = INTVAL (XEXP (base, 1));
4284 
4285       if (AVR_TINY)
4286         return avr_out_movsi_r_mr_reg_disp_tiny (insn, op, l);
4287 
4288       if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4289 	{
4290 	  if (REGNO (XEXP (base, 0)) != REG_Y)
4291 	    fatal_insn ("incorrect insn:",insn);
4292 
4293 	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4294 	    return *l = 6, ("adiw r28,%o1-60" CR_TAB
4295 			    "ldd %A0,Y+60"    CR_TAB
4296 			    "ldd %B0,Y+61"    CR_TAB
4297 			    "ldd %C0,Y+62"    CR_TAB
4298 			    "ldd %D0,Y+63"    CR_TAB
4299 			    "sbiw r28,%o1-60");
4300 
4301 	  return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
4302 			  "sbci r29,hi8(-%o1)" CR_TAB
4303 			  "ld %A0,Y"           CR_TAB
4304 			  "ldd %B0,Y+1"        CR_TAB
4305 			  "ldd %C0,Y+2"        CR_TAB
4306 			  "ldd %D0,Y+3"        CR_TAB
4307 			  "subi r28,lo8(%o1)"  CR_TAB
4308 			  "sbci r29,hi8(%o1)");
4309 	}
4310 
4311       reg_base = true_regnum (XEXP (base, 0));
4312       if (reg_base == REG_X)
4313 	{
4314 	  /* R = (X + d) */
4315 	  if (reg_dest == REG_X)
4316 	    {
4317 	      *l = 7;
4318 	      /* "ld r26,-X" is undefined */
4319 	      return ("adiw r26,%o1+3"    CR_TAB
4320 		      "ld r29,X"          CR_TAB
4321 		      "ld r28,-X"         CR_TAB
4322 		      "ld __tmp_reg__,-X" CR_TAB
4323 		      "sbiw r26,1"        CR_TAB
4324 		      "ld r26,X"          CR_TAB
4325 		      "mov r27,__tmp_reg__");
4326 	    }
4327 	  *l = 6;
4328 	  if (reg_dest == REG_X - 2)
4329 	    return ("adiw r26,%o1"      CR_TAB
4330 		    "ld r24,X+"         CR_TAB
4331 		    "ld r25,X+"         CR_TAB
4332 		    "ld __tmp_reg__,X+" CR_TAB
4333 		    "ld r27,X"          CR_TAB
4334 		    "mov r26,__tmp_reg__");
4335 
4336 	  return ("adiw r26,%o1" CR_TAB
4337 		  "ld %A0,X+"    CR_TAB
4338 		  "ld %B0,X+"    CR_TAB
4339 		  "ld %C0,X+"    CR_TAB
4340 		  "ld %D0,X"     CR_TAB
4341 		  "sbiw r26,%o1+3");
4342 	}
4343       if (reg_dest == reg_base)
4344         return *l=5, ("ldd %D0,%D1"          CR_TAB
4345                       "ldd %C0,%C1"          CR_TAB
4346                       "ldd __tmp_reg__,%B1"  CR_TAB
4347                       "ldd %A0,%A1"          CR_TAB
4348                       "mov %B0,__tmp_reg__");
4349       else if (reg_dest == reg_base - 2)
4350         return *l=5, ("ldd %A0,%A1"          CR_TAB
4351                       "ldd %B0,%B1"          CR_TAB
4352                       "ldd __tmp_reg__,%C1"  CR_TAB
4353                       "ldd %D0,%D1"          CR_TAB
4354                       "mov %C0,__tmp_reg__");
4355       return *l=4, ("ldd %A0,%A1" CR_TAB
4356                     "ldd %B0,%B1" CR_TAB
4357                     "ldd %C0,%C1" CR_TAB
4358                     "ldd %D0,%D1");
4359     }
4360   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4361     return *l=4, ("ld %D0,%1" CR_TAB
4362 		  "ld %C0,%1" CR_TAB
4363 		  "ld %B0,%1" CR_TAB
4364 		  "ld %A0,%1");
4365   else if (GET_CODE (base) == POST_INC) /* (R++) */
4366     return *l=4, ("ld %A0,%1" CR_TAB
4367 		  "ld %B0,%1" CR_TAB
4368 		  "ld %C0,%1" CR_TAB
4369 		  "ld %D0,%1");
4370   else if (CONSTANT_ADDRESS_P (base))
4371     {
4372       if (io_address_operand (base, SImode))
4373         {
4374           *l = 4;
4375           return ("in %A0,%i1"   CR_TAB
4376                   "in %B0,%i1+1" CR_TAB
4377                   "in %C0,%i1+2" CR_TAB
4378                   "in %D0,%i1+3");
4379         }
4380       else
4381         {
4382           *l = AVR_TINY ? 4 : 8;
4383           return ("lds %A0,%m1"   CR_TAB
4384                   "lds %B0,%m1+1" CR_TAB
4385                   "lds %C0,%m1+2" CR_TAB
4386                   "lds %D0,%m1+3");
4387         }
4388     }
4389 
4390   fatal_insn ("unknown move insn:",insn);
4391   return "";
4392 }
4393 
4394 static const char*
4395 avr_out_movsi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *l)
4396 {
4397   rtx dest = op[0];
4398   rtx src = op[1];
4399   rtx base = XEXP (dest, 0);
4400   int reg_base = true_regnum (base);
4401   int reg_src = true_regnum (src);
4402 
4403   if (reg_base == reg_src)
4404     {
4405       /* "ld r26,-X" is undefined */
4406       if (reg_unused_after (insn, base))
4407         {
4408           return *l = 7, ("mov __tmp_reg__, %B1"  CR_TAB
4409 			  "st %0,%A1"             CR_TAB
4410 			  TINY_ADIW (%E0, %F0, 1) CR_TAB
4411 			  "st %0+,__tmp_reg__"    CR_TAB
4412 			  "st %0+,%C1"            CR_TAB
4413 			  "st %0+,%D1");
4414         }
4415       else
4416         {
4417           return *l = 9, ("mov __tmp_reg__, %B1"  CR_TAB
4418 			  "st %0,%A1"             CR_TAB
4419 			  TINY_ADIW (%E0, %F0, 1) CR_TAB
4420 			  "st %0+,__tmp_reg__"    CR_TAB
4421 			  "st %0+,%C1"            CR_TAB
4422 			  "st %0+,%D1"            CR_TAB
4423 			  TINY_SBIW (%E0, %F0, 3));
4424         }
4425     }
4426   else if (reg_base == reg_src + 2)
4427     {
4428       if (reg_unused_after (insn, base))
4429 	return *l = 7, ("mov __zero_reg__,%C1" CR_TAB
4430                         "mov __tmp_reg__,%D1"  CR_TAB
4431                         "st %0+,%A1"           CR_TAB
4432                         "st %0+,%B1"           CR_TAB
4433                         "st %0+,__zero_reg__"  CR_TAB
4434                         "st %0,__tmp_reg__"    CR_TAB
4435                         "clr __zero_reg__");
4436       else
4437 	return *l = 9, ("mov __zero_reg__,%C1" CR_TAB
4438 			"mov __tmp_reg__,%D1"  CR_TAB
4439 			"st %0+,%A1"           CR_TAB
4440 			"st %0+,%B1"           CR_TAB
4441 			"st %0+,__zero_reg__"  CR_TAB
4442 			"st %0,__tmp_reg__"    CR_TAB
4443 			"clr __zero_reg__"     CR_TAB
4444 			TINY_SBIW (%E0, %F0, 3));
4445     }
4446 
4447   return *l = 6, ("st %0+,%A1" CR_TAB
4448 		  "st %0+,%B1" CR_TAB
4449 		  "st %0+,%C1" CR_TAB
4450 		  "st %0,%D1"  CR_TAB
4451 		  TINY_SBIW (%E0, %F0, 3));
4452 }
4453 
4454 static const char*
4455 avr_out_movsi_mr_r_reg_disp_tiny (rtx op[], int *l)
4456 {
4457   rtx dest = op[0];
4458   rtx src = op[1];
4459   rtx base = XEXP (dest, 0);
4460   int reg_base = REGNO (XEXP (base, 0));
4461   int reg_src =true_regnum (src);
4462 
4463   if (reg_base == reg_src)
4464     {
4465       *l = 11;
4466       return ("mov __tmp_reg__,%A2"        CR_TAB
4467               "mov __zero_reg__,%B2"       CR_TAB
4468               TINY_ADIW (%I0, %J0, %o0)    CR_TAB
4469               "st %b0+,__tmp_reg__"        CR_TAB
4470               "st %b0+,__zero_reg__"       CR_TAB
4471               "st %b0+,%C2"                CR_TAB
4472               "st %b0,%D2"                 CR_TAB
4473               "clr __zero_reg__"           CR_TAB
4474               TINY_SBIW (%I0, %J0, %o0+3));
4475     }
4476   else if (reg_src == reg_base - 2)
4477     {
4478       *l = 11;
4479       return ("mov __tmp_reg__,%C2"         CR_TAB
4480               "mov __zero_reg__,%D2"        CR_TAB
4481               TINY_ADIW (%I0, %J0, %o0)     CR_TAB
4482               "st %b0+,%A0"                 CR_TAB
4483               "st %b0+,%B0"                 CR_TAB
4484               "st %b0+,__tmp_reg__"         CR_TAB
4485               "st %b0,__zero_reg__"         CR_TAB
4486               "clr __zero_reg__"            CR_TAB
4487               TINY_SBIW (%I0, %J0, %o0+3));
4488     }
4489   *l = 8;
4490   return (TINY_ADIW (%I0, %J0, %o0)     CR_TAB
4491           "st %b0+,%A1"                 CR_TAB
4492           "st %b0+,%B1"                 CR_TAB
4493           "st %b0+,%C1"                 CR_TAB
4494           "st %b0,%D1"                  CR_TAB
4495           TINY_SBIW (%I0, %J0, %o0+3));
4496 }
4497 
4498 static const char*
4499 out_movsi_mr_r (rtx_insn *insn, rtx op[], int *l)
4500 {
4501   rtx dest = op[0];
4502   rtx src = op[1];
4503   rtx base = XEXP (dest, 0);
4504   int reg_base = true_regnum (base);
4505   int reg_src = true_regnum (src);
4506   int tmp;
4507 
4508   if (!l)
4509     l = &tmp;
4510 
4511   if (CONSTANT_ADDRESS_P (base))
4512     {
4513       if (io_address_operand (base, SImode))
4514         {
4515           return *l=4,("out %i0, %A1"  CR_TAB
4516                        "out %i0+1,%B1" CR_TAB
4517                        "out %i0+2,%C1" CR_TAB
4518                        "out %i0+3,%D1");
4519         }
4520       else
4521         {
4522           *l = AVR_TINY ? 4 : 8;
4523           return ("sts %m0,%A1"   CR_TAB
4524                   "sts %m0+1,%B1" CR_TAB
4525                   "sts %m0+2,%C1" CR_TAB
4526                   "sts %m0+3,%D1");
4527         }
4528     }
4529 
4530   if (reg_base > 0)                 /* (r) */
4531     {
4532       if (AVR_TINY)
4533         return avr_out_movsi_mr_r_reg_no_disp_tiny (insn, op, l);
4534 
4535       if (reg_base == REG_X)                /* (R26) */
4536         {
4537           if (reg_src == REG_X)
4538             {
4539 	      /* "st X+,r26" is undefined */
4540               if (reg_unused_after (insn, base))
4541 		return *l=6, ("mov __tmp_reg__,r27" CR_TAB
4542 			      "st X,r26"            CR_TAB
4543 			      "adiw r26,1"          CR_TAB
4544 			      "st X+,__tmp_reg__"   CR_TAB
4545 			      "st X+,r28"           CR_TAB
4546 			      "st X,r29");
4547               else
4548                 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
4549 			      "st X,r26"            CR_TAB
4550 			      "adiw r26,1"          CR_TAB
4551 			      "st X+,__tmp_reg__"   CR_TAB
4552 			      "st X+,r28"           CR_TAB
4553 			      "st X,r29"            CR_TAB
4554 			      "sbiw r26,3");
4555             }
4556           else if (reg_base == reg_src + 2)
4557             {
4558               if (reg_unused_after (insn, base))
4559                 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
4560                               "mov __tmp_reg__,%D1"  CR_TAB
4561                               "st %0+,%A1"           CR_TAB
4562                               "st %0+,%B1"           CR_TAB
4563                               "st %0+,__zero_reg__"  CR_TAB
4564                               "st %0,__tmp_reg__"    CR_TAB
4565                               "clr __zero_reg__");
4566               else
4567                 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
4568                               "mov __tmp_reg__,%D1"  CR_TAB
4569                               "st %0+,%A1"           CR_TAB
4570                               "st %0+,%B1"           CR_TAB
4571                               "st %0+,__zero_reg__"  CR_TAB
4572                               "st %0,__tmp_reg__"    CR_TAB
4573                               "clr __zero_reg__"     CR_TAB
4574                               "sbiw r26,3");
4575             }
4576           return *l=5, ("st %0+,%A1" CR_TAB
4577                         "st %0+,%B1" CR_TAB
4578                         "st %0+,%C1" CR_TAB
4579                         "st %0,%D1"  CR_TAB
4580                         "sbiw r26,3");
4581         }
4582       else
4583         return *l=4, ("st %0,%A1"    CR_TAB
4584 		      "std %0+1,%B1" CR_TAB
4585 		      "std %0+2,%C1" CR_TAB
4586 		      "std %0+3,%D1");
4587     }
4588   else if (GET_CODE (base) == PLUS) /* (R + i) */
4589     {
4590       int disp = INTVAL (XEXP (base, 1));
4591 
4592       if (AVR_TINY)
4593         return avr_out_movsi_mr_r_reg_disp_tiny (op, l);
4594 
4595       reg_base = REGNO (XEXP (base, 0));
4596       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4597 	{
4598 	  if (reg_base != REG_Y)
4599 	    fatal_insn ("incorrect insn:",insn);
4600 
4601 	  if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
4602 	    return *l = 6, ("adiw r28,%o0-60" CR_TAB
4603 			    "std Y+60,%A1"    CR_TAB
4604 			    "std Y+61,%B1"    CR_TAB
4605 			    "std Y+62,%C1"    CR_TAB
4606 			    "std Y+63,%D1"    CR_TAB
4607 			    "sbiw r28,%o0-60");
4608 
4609 	  return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
4610 			  "sbci r29,hi8(-%o0)" CR_TAB
4611 			  "st Y,%A1"           CR_TAB
4612 			  "std Y+1,%B1"        CR_TAB
4613 			  "std Y+2,%C1"        CR_TAB
4614 			  "std Y+3,%D1"        CR_TAB
4615 			  "subi r28,lo8(%o0)"  CR_TAB
4616 			  "sbci r29,hi8(%o0)");
4617 	}
4618       if (reg_base == REG_X)
4619 	{
4620 	  /* (X + d) = R */
4621 	  if (reg_src == REG_X)
4622 	    {
4623 	      *l = 9;
4624 	      return ("mov __tmp_reg__,r26"  CR_TAB
4625 		      "mov __zero_reg__,r27" CR_TAB
4626 		      "adiw r26,%o0"         CR_TAB
4627 		      "st X+,__tmp_reg__"    CR_TAB
4628 		      "st X+,__zero_reg__"   CR_TAB
4629 		      "st X+,r28"            CR_TAB
4630 		      "st X,r29"             CR_TAB
4631 		      "clr __zero_reg__"     CR_TAB
4632 		      "sbiw r26,%o0+3");
4633 	    }
4634 	  else if (reg_src == REG_X - 2)
4635 	    {
4636 	      *l = 9;
4637 	      return ("mov __tmp_reg__,r26"  CR_TAB
4638 		      "mov __zero_reg__,r27" CR_TAB
4639 		      "adiw r26,%o0"         CR_TAB
4640 		      "st X+,r24"            CR_TAB
4641 		      "st X+,r25"            CR_TAB
4642 		      "st X+,__tmp_reg__"    CR_TAB
4643 		      "st X,__zero_reg__"    CR_TAB
4644 		      "clr __zero_reg__"     CR_TAB
4645 		      "sbiw r26,%o0+3");
4646 	    }
4647 	  *l = 6;
4648 	  return ("adiw r26,%o0" CR_TAB
4649 		  "st X+,%A1"    CR_TAB
4650 		  "st X+,%B1"    CR_TAB
4651 		  "st X+,%C1"    CR_TAB
4652 		  "st X,%D1"     CR_TAB
4653 		  "sbiw r26,%o0+3");
4654 	}
4655       return *l=4, ("std %A0,%A1" CR_TAB
4656 		    "std %B0,%B1" CR_TAB
4657 		    "std %C0,%C1" CR_TAB
4658 		    "std %D0,%D1");
4659     }
4660   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4661     return *l=4, ("st %0,%D1" CR_TAB
4662 		  "st %0,%C1" CR_TAB
4663 		  "st %0,%B1" CR_TAB
4664 		  "st %0,%A1");
4665   else if (GET_CODE (base) == POST_INC) /* (R++) */
4666     return *l=4, ("st %0,%A1" CR_TAB
4667 		  "st %0,%B1" CR_TAB
4668 		  "st %0,%C1" CR_TAB
4669 		  "st %0,%D1");
4670   fatal_insn ("unknown move insn:",insn);
4671   return "";
4672 }
4673 
4674 const char *
4675 output_movsisf (rtx_insn *insn, rtx operands[], int *l)
4676 {
4677   int dummy;
4678   rtx dest = operands[0];
4679   rtx src = operands[1];
4680   int *real_l = l;
4681 
4682   if (avr_mem_flash_p (src)
4683       || avr_mem_flash_p (dest))
4684     {
4685       return avr_out_lpm (insn, operands, real_l);
4686     }
4687 
4688   if (!l)
4689     l = &dummy;
4690 
4691   gcc_assert (4 == GET_MODE_SIZE (GET_MODE (dest)));
4692 
4693   if (REG_P (dest))
4694     {
4695       if (REG_P (src)) /* mov r,r */
4696 	{
4697 	  if (true_regnum (dest) > true_regnum (src))
4698 	    {
4699 	      if (AVR_HAVE_MOVW)
4700 		{
4701 		  *l = 2;
4702 		  return ("movw %C0,%C1" CR_TAB
4703 			  "movw %A0,%A1");
4704 		}
4705 	      *l = 4;
4706 	      return ("mov %D0,%D1" CR_TAB
4707 		      "mov %C0,%C1" CR_TAB
4708 		      "mov %B0,%B1" CR_TAB
4709 		      "mov %A0,%A1");
4710 	    }
4711 	  else
4712 	    {
4713 	      if (AVR_HAVE_MOVW)
4714 		{
4715 		  *l = 2;
4716 		  return ("movw %A0,%A1" CR_TAB
4717 			  "movw %C0,%C1");
4718 		}
4719 	      *l = 4;
4720 	      return ("mov %A0,%A1" CR_TAB
4721 		      "mov %B0,%B1" CR_TAB
4722 		      "mov %C0,%C1" CR_TAB
4723 		      "mov %D0,%D1");
4724 	    }
4725 	}
4726       else if (CONSTANT_P (src))
4727 	{
4728           return output_reload_insisf (operands, NULL_RTX, real_l);
4729         }
4730       else if (MEM_P (src))
4731 	return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
4732     }
4733   else if (MEM_P (dest))
4734     {
4735       const char *templ;
4736 
4737       if (src == CONST0_RTX (GET_MODE (dest)))
4738         operands[1] = zero_reg_rtx;
4739 
4740       templ = out_movsi_mr_r (insn, operands, real_l);
4741 
4742       if (!real_l)
4743 	output_asm_insn (templ, operands);
4744 
4745       operands[1] = src;
4746       return "";
4747     }
4748   fatal_insn ("invalid insn:", insn);
4749   return "";
4750 }
4751 
4752 
4753 /* Handle loads of 24-bit types from memory to register.  */
4754 
4755 static const char*
4756 avr_out_load_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4757 {
4758   rtx dest = op[0];
4759   rtx src = op[1];
4760   rtx base = XEXP (src, 0);
4761   int reg_dest = true_regnum (dest);
4762   int reg_base = true_regnum (base);
4763 
4764   if (reg_base == reg_dest)
4765     {
4766       return avr_asm_len (TINY_ADIW (%E1, %F1, 2)   CR_TAB
4767                           "ld %C0,%1"               CR_TAB
4768                           "ld __tmp_reg__,-%1"      CR_TAB
4769                           TINY_SBIW (%E1, %F1, 1)   CR_TAB
4770                           "ld %A0,%1"               CR_TAB
4771                           "mov %B0,__tmp_reg__", op, plen, -8);
4772     }
4773   else
4774     {
4775       avr_asm_len ("ld %A0,%1+"  CR_TAB
4776 		   "ld %B0,%1+"  CR_TAB
4777 		   "ld %C0,%1", op, plen, -3);
4778 
4779       if (reg_dest != reg_base - 2
4780           && !reg_unused_after (insn, base))
4781         {
4782           avr_asm_len (TINY_SBIW (%E1, %F1, 2), op, plen, 2);
4783         }
4784       return "";
4785     }
4786 }
4787 
4788 static const char*
4789 avr_out_load_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4790 {
4791   rtx dest = op[0];
4792   rtx src = op[1];
4793   rtx base = XEXP (src, 0);
4794   int reg_dest = true_regnum (dest);
4795   int reg_base = true_regnum (base);
4796 
4797   reg_base = true_regnum (XEXP (base, 0));
4798   if (reg_base == reg_dest)
4799     {
4800       return avr_asm_len (TINY_ADIW (%I1, %J1, %o1+2) CR_TAB
4801                           "ld %C0,%b1"                CR_TAB
4802                           "ld __tmp_reg__,-%b1"       CR_TAB
4803                           TINY_SBIW (%I1, %J1, 1)     CR_TAB
4804                           "ld %A0,%b1"                CR_TAB
4805                           "mov %B0,__tmp_reg__", op, plen, -8);
4806     }
4807   else
4808     {
4809       avr_asm_len (TINY_ADIW (%I1, %J1, %o1)   CR_TAB
4810                    "ld %A0,%b1+"               CR_TAB
4811                    "ld %B0,%b1+"               CR_TAB
4812                    "ld %C0,%b1", op, plen, -5);
4813 
4814       if (reg_dest != reg_base - 2
4815           && !reg_unused_after (insn, XEXP (base, 0)))
4816         avr_asm_len (TINY_SBIW (%I1, %J1, %o1+2), op, plen, 2);
4817 
4818       return "";
4819     }
4820 }
4821 
4822 static const char*
4823 avr_out_load_psi (rtx_insn *insn, rtx *op, int *plen)
4824 {
4825   rtx dest = op[0];
4826   rtx src = op[1];
4827   rtx base = XEXP (src, 0);
4828   int reg_dest = true_regnum (dest);
4829   int reg_base = true_regnum (base);
4830 
4831   if (reg_base > 0)
4832     {
4833       if (AVR_TINY)
4834         return avr_out_load_psi_reg_no_disp_tiny (insn, op, plen);
4835 
4836       if (reg_base == REG_X)        /* (R26) */
4837         {
4838           if (reg_dest == REG_X)
4839             /* "ld r26,-X" is undefined */
4840             return avr_asm_len ("adiw r26,2"        CR_TAB
4841                                 "ld r28,X"          CR_TAB
4842                                 "ld __tmp_reg__,-X" CR_TAB
4843                                 "sbiw r26,1"        CR_TAB
4844                                 "ld r26,X"          CR_TAB
4845                                 "mov r27,__tmp_reg__", op, plen, -6);
4846           else
4847             {
4848               avr_asm_len ("ld %A0,X+" CR_TAB
4849                            "ld %B0,X+" CR_TAB
4850                            "ld %C0,X", op, plen, -3);
4851 
4852               if (reg_dest != REG_X - 2
4853                   && !reg_unused_after (insn, base))
4854                 {
4855                   avr_asm_len ("sbiw r26,2", op, plen, 1);
4856                 }
4857 
4858               return "";
4859             }
4860         }
4861       else /* reg_base != REG_X */
4862         {
4863           if (reg_dest == reg_base)
4864             return avr_asm_len ("ldd %C0,%1+2"          CR_TAB
4865                                 "ldd __tmp_reg__,%1+1"  CR_TAB
4866                                 "ld  %A0,%1"            CR_TAB
4867                                 "mov %B0,__tmp_reg__", op, plen, -4);
4868           else
4869             return avr_asm_len ("ld  %A0,%1"    CR_TAB
4870                                 "ldd %B0,%1+1"  CR_TAB
4871                                 "ldd %C0,%1+2", op, plen, -3);
4872         }
4873     }
4874   else if (GET_CODE (base) == PLUS) /* (R + i) */
4875     {
4876       int disp = INTVAL (XEXP (base, 1));
4877 
4878       if (AVR_TINY)
4879         return avr_out_load_psi_reg_disp_tiny (insn, op, plen);
4880 
4881       if (disp > MAX_LD_OFFSET (GET_MODE (src)))
4882         {
4883           if (REGNO (XEXP (base, 0)) != REG_Y)
4884             fatal_insn ("incorrect insn:",insn);
4885 
4886           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
4887             return avr_asm_len ("adiw r28,%o1-61" CR_TAB
4888                                 "ldd %A0,Y+61"    CR_TAB
4889                                 "ldd %B0,Y+62"    CR_TAB
4890                                 "ldd %C0,Y+63"    CR_TAB
4891                                 "sbiw r28,%o1-61", op, plen, -5);
4892 
4893           return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
4894                               "sbci r29,hi8(-%o1)" CR_TAB
4895                               "ld  %A0,Y"          CR_TAB
4896                               "ldd %B0,Y+1"        CR_TAB
4897                               "ldd %C0,Y+2"        CR_TAB
4898                               "subi r28,lo8(%o1)"  CR_TAB
4899                               "sbci r29,hi8(%o1)", op, plen, -7);
4900         }
4901 
4902       reg_base = true_regnum (XEXP (base, 0));
4903       if (reg_base == REG_X)
4904         {
4905           /* R = (X + d) */
4906           if (reg_dest == REG_X)
4907             {
4908               /* "ld r26,-X" is undefined */
4909               return avr_asm_len ("adiw r26,%o1+2"     CR_TAB
4910                                   "ld  r28,X"          CR_TAB
4911                                   "ld  __tmp_reg__,-X" CR_TAB
4912                                   "sbiw r26,1"         CR_TAB
4913                                   "ld  r26,X"          CR_TAB
4914                                   "mov r27,__tmp_reg__", op, plen, -6);
4915             }
4916 
4917           avr_asm_len ("adiw r26,%o1" CR_TAB
4918                        "ld %A0,X+"    CR_TAB
4919                        "ld %B0,X+"    CR_TAB
4920                        "ld %C0,X", op, plen, -4);
4921 
4922           if (reg_dest != REG_W
4923               && !reg_unused_after (insn, XEXP (base, 0)))
4924             avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
4925 
4926           return "";
4927         }
4928 
4929       if (reg_dest == reg_base)
4930         return avr_asm_len ("ldd %C0,%C1" CR_TAB
4931                             "ldd __tmp_reg__,%B1"  CR_TAB
4932                             "ldd %A0,%A1" CR_TAB
4933                             "mov %B0,__tmp_reg__", op, plen, -4);
4934 
4935       return avr_asm_len ("ldd %A0,%A1" CR_TAB
4936                           "ldd %B0,%B1" CR_TAB
4937                           "ldd %C0,%C1", op, plen, -3);
4938     }
4939   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4940     return avr_asm_len ("ld %C0,%1" CR_TAB
4941                         "ld %B0,%1" CR_TAB
4942                         "ld %A0,%1", op, plen, -3);
4943   else if (GET_CODE (base) == POST_INC) /* (R++) */
4944     return avr_asm_len ("ld %A0,%1" CR_TAB
4945                         "ld %B0,%1" CR_TAB
4946                         "ld %C0,%1", op, plen, -3);
4947 
4948   else if (CONSTANT_ADDRESS_P (base))
4949     {
4950       int n_words = AVR_TINY ? 3 : 6;
4951       return avr_asm_len ("lds %A0,%m1" CR_TAB
4952                           "lds %B0,%m1+1" CR_TAB
4953                           "lds %C0,%m1+2", op, plen , -n_words);
4954     }
4955 
4956   fatal_insn ("unknown move insn:",insn);
4957   return "";
4958 }
4959 
4960 
4961 static const char*
4962 avr_out_store_psi_reg_no_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
4963 {
4964   rtx dest = op[0];
4965   rtx src = op[1];
4966   rtx base = XEXP (dest, 0);
4967   int reg_base = true_regnum (base);
4968   int reg_src = true_regnum (src);
4969 
4970   if (reg_base == reg_src)
4971     {
4972       avr_asm_len ("st %0,%A1"              CR_TAB
4973                    "mov __tmp_reg__,%B1"    CR_TAB
4974                    TINY_ADIW (%E0, %F0, 1)  CR_TAB /* st X+, r27 is undefined */
4975                    "st %0+,__tmp_reg__"     CR_TAB
4976                    "st %0,%C1", op, plen, -6);
4977 
4978     }
4979   else if (reg_src == reg_base - 2)
4980     {
4981       avr_asm_len ("st %0,%A1"              CR_TAB
4982                    "mov __tmp_reg__,%C1"    CR_TAB
4983                    TINY_ADIW (%E0, %F0, 1)  CR_TAB
4984                    "st %0+,%B1"             CR_TAB
4985                    "st %0,__tmp_reg__", op, plen, 6);
4986     }
4987   else
4988     {
4989       avr_asm_len ("st %0+,%A1"  CR_TAB
4990                    "st %0+,%B1" CR_TAB
4991                    "st %0,%C1", op, plen, -3);
4992     }
4993 
4994   if (!reg_unused_after (insn, base))
4995     avr_asm_len (TINY_SBIW (%E0, %F0, 2), op, plen, 2);
4996 
4997   return "";
4998 }
4999 
5000 static const char*
5001 avr_out_store_psi_reg_disp_tiny (rtx_insn *insn, rtx *op, int *plen)
5002 {
5003   rtx dest = op[0];
5004   rtx src = op[1];
5005   rtx base = XEXP (dest, 0);
5006   int reg_base = REGNO (XEXP (base, 0));
5007   int reg_src = true_regnum (src);
5008 
5009   if (reg_src == reg_base)
5010     avr_asm_len ("mov __tmp_reg__,%A1"          CR_TAB
5011                  "mov __zero_reg__,%B1"         CR_TAB
5012                  TINY_ADIW (%I0, %J0, %o0)      CR_TAB
5013                  "st %b0+,__tmp_reg__"          CR_TAB
5014                  "st %b0+,__zero_reg__"         CR_TAB
5015                  "st %b0,%C1"                   CR_TAB
5016                  "clr __zero_reg__", op, plen, -8);
5017   else if (reg_src == reg_base - 2)
5018     avr_asm_len ("mov __tmp_reg__,%C1"          CR_TAB
5019                  TINY_ADIW (%I0, %J0, %o0)      CR_TAB
5020                  "st %b0+,%A1"                  CR_TAB
5021                  "st %b0+,%B1"                  CR_TAB
5022                  "st %b0,__tmp_reg__", op, plen, -6);
5023   else
5024     avr_asm_len (TINY_ADIW (%I0, %J0, %o0)      CR_TAB
5025                  "st %b0+,%A1"                  CR_TAB
5026                  "st %b0+,%B1"                  CR_TAB
5027                  "st %b0,%C1", op, plen, -5);
5028 
5029   if (!reg_unused_after (insn, XEXP (base, 0)))
5030     avr_asm_len (TINY_SBIW (%I0, %J0, %o0+2), op, plen, 2);
5031 
5032   return "";
5033 }
5034 
5035 /* Handle store of 24-bit type from register or zero to memory.  */
5036 
5037 static const char*
5038 avr_out_store_psi (rtx_insn *insn, rtx *op, int *plen)
5039 {
5040   rtx dest = op[0];
5041   rtx src = op[1];
5042   rtx base = XEXP (dest, 0);
5043   int reg_base = true_regnum (base);
5044 
5045   if (CONSTANT_ADDRESS_P (base))
5046     {
5047       int n_words = AVR_TINY ? 3 : 6;
5048       return avr_asm_len ("sts %m0,%A1"   CR_TAB
5049                           "sts %m0+1,%B1" CR_TAB
5050                           "sts %m0+2,%C1", op, plen, -n_words);
5051     }
5052 
5053   if (reg_base > 0)                 /* (r) */
5054     {
5055       if (AVR_TINY)
5056         return avr_out_store_psi_reg_no_disp_tiny (insn, op, plen);
5057 
5058       if (reg_base == REG_X)        /* (R26) */
5059         {
5060           gcc_assert (!reg_overlap_mentioned_p (base, src));
5061 
5062           avr_asm_len ("st %0+,%A1"  CR_TAB
5063                        "st %0+,%B1" CR_TAB
5064                        "st %0,%C1", op, plen, -3);
5065 
5066           if (!reg_unused_after (insn, base))
5067             avr_asm_len ("sbiw r26,2", op, plen, 1);
5068 
5069           return "";
5070         }
5071       else
5072         return avr_asm_len ("st %0,%A1"    CR_TAB
5073                             "std %0+1,%B1" CR_TAB
5074                             "std %0+2,%C1", op, plen, -3);
5075     }
5076   else if (GET_CODE (base) == PLUS) /* (R + i) */
5077     {
5078       int disp = INTVAL (XEXP (base, 1));
5079 
5080       if (AVR_TINY)
5081         return avr_out_store_psi_reg_disp_tiny (insn, op, plen);
5082 
5083       reg_base = REGNO (XEXP (base, 0));
5084 
5085       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5086         {
5087           if (reg_base != REG_Y)
5088             fatal_insn ("incorrect insn:",insn);
5089 
5090           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
5091             return avr_asm_len ("adiw r28,%o0-61" CR_TAB
5092                                 "std Y+61,%A1"    CR_TAB
5093                                 "std Y+62,%B1"    CR_TAB
5094                                 "std Y+63,%C1"    CR_TAB
5095                                 "sbiw r28,%o0-61", op, plen, -5);
5096 
5097           return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5098                               "sbci r29,hi8(-%o0)" CR_TAB
5099                               "st Y,%A1"           CR_TAB
5100                               "std Y+1,%B1"        CR_TAB
5101                               "std Y+2,%C1"        CR_TAB
5102                               "subi r28,lo8(%o0)"  CR_TAB
5103                               "sbci r29,hi8(%o0)", op, plen, -7);
5104         }
5105       if (reg_base == REG_X)
5106         {
5107           /* (X + d) = R */
5108           gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
5109 
5110           avr_asm_len ("adiw r26,%o0" CR_TAB
5111                        "st X+,%A1"    CR_TAB
5112                        "st X+,%B1"    CR_TAB
5113                        "st X,%C1", op, plen, -4);
5114 
5115           if (!reg_unused_after (insn, XEXP (base, 0)))
5116             avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
5117 
5118           return "";
5119         }
5120 
5121       return avr_asm_len ("std %A0,%A1" CR_TAB
5122                           "std %B0,%B1" CR_TAB
5123                           "std %C0,%C1", op, plen, -3);
5124     }
5125   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5126     return avr_asm_len ("st %0,%C1" CR_TAB
5127                         "st %0,%B1" CR_TAB
5128                         "st %0,%A1", op, plen, -3);
5129   else if (GET_CODE (base) == POST_INC) /* (R++) */
5130     return avr_asm_len ("st %0,%A1" CR_TAB
5131                         "st %0,%B1" CR_TAB
5132                         "st %0,%C1", op, plen, -3);
5133 
5134   fatal_insn ("unknown move insn:",insn);
5135   return "";
5136 }
5137 
5138 
5139 /* Move around 24-bit stuff.  */
5140 
5141 const char *
5142 avr_out_movpsi (rtx_insn *insn, rtx *op, int *plen)
5143 {
5144   rtx dest = op[0];
5145   rtx src = op[1];
5146 
5147   if (avr_mem_flash_p (src)
5148       || avr_mem_flash_p (dest))
5149     {
5150       return avr_out_lpm (insn, op, plen);
5151     }
5152 
5153   if (register_operand (dest, VOIDmode))
5154     {
5155       if (register_operand (src, VOIDmode)) /* mov r,r */
5156         {
5157           if (true_regnum (dest) > true_regnum (src))
5158             {
5159               avr_asm_len ("mov %C0,%C1", op, plen, -1);
5160 
5161               if (AVR_HAVE_MOVW)
5162                 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
5163               else
5164                 return avr_asm_len ("mov %B0,%B1"  CR_TAB
5165                                     "mov %A0,%A1", op, plen, 2);
5166             }
5167           else
5168             {
5169               if (AVR_HAVE_MOVW)
5170                 avr_asm_len ("movw %A0,%A1", op, plen, -1);
5171               else
5172                 avr_asm_len ("mov %A0,%A1"  CR_TAB
5173                              "mov %B0,%B1", op, plen, -2);
5174 
5175               return avr_asm_len ("mov %C0,%C1", op, plen, 1);
5176             }
5177         }
5178       else if (CONSTANT_P (src))
5179         {
5180           return avr_out_reload_inpsi (op, NULL_RTX, plen);
5181         }
5182       else if (MEM_P (src))
5183         return avr_out_load_psi (insn, op, plen); /* mov r,m */
5184     }
5185   else if (MEM_P (dest))
5186     {
5187       rtx xop[2];
5188 
5189       xop[0] = dest;
5190       xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
5191 
5192       return avr_out_store_psi (insn, xop, plen);
5193     }
5194 
5195   fatal_insn ("invalid insn:", insn);
5196   return "";
5197 }
5198 
5199 static const char*
5200 avr_out_movqi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5201 {
5202   rtx dest = op[0];
5203   rtx src = op[1];
5204   rtx x = XEXP (dest, 0);
5205 
5206   if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
5207     {
5208       avr_asm_len ("mov __tmp_reg__,%1"      CR_TAB
5209                    TINY_ADIW (%I0, %J0, %o0) CR_TAB
5210                    "st %b0,__tmp_reg__", op, plen, -4);
5211     }
5212   else
5213     {
5214       avr_asm_len (TINY_ADIW (%I0, %J0, %o0) CR_TAB
5215                    "st %b0,%1", op, plen, -3);
5216     }
5217 
5218   if (!reg_unused_after (insn, XEXP (x, 0)))
5219     avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
5220 
5221   return "";
5222 }
5223 
5224 static const char*
5225 out_movqi_mr_r (rtx_insn *insn, rtx op[], int *plen)
5226 {
5227   rtx dest = op[0];
5228   rtx src = op[1];
5229   rtx x = XEXP (dest, 0);
5230 
5231   if (CONSTANT_ADDRESS_P (x))
5232     {
5233       int n_words = AVR_TINY ? 1 : 2;
5234       return io_address_operand (x, QImode)
5235         ? avr_asm_len ("out %i0,%1", op, plen, -1)
5236         : avr_asm_len ("sts %m0,%1", op, plen, -n_words);
5237     }
5238   else if (GET_CODE (x) == PLUS
5239            && REG_P (XEXP (x, 0))
5240            && CONST_INT_P (XEXP (x, 1)))
5241     {
5242       /* memory access by reg+disp */
5243 
5244       int disp = INTVAL (XEXP (x, 1));
5245 
5246       if (AVR_TINY)
5247         return avr_out_movqi_mr_r_reg_disp_tiny (insn, op, plen);
5248 
5249       if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
5250         {
5251           if (REGNO (XEXP (x, 0)) != REG_Y)
5252             fatal_insn ("incorrect insn:",insn);
5253 
5254           if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
5255             return avr_asm_len ("adiw r28,%o0-63" CR_TAB
5256                                 "std Y+63,%1"     CR_TAB
5257                                 "sbiw r28,%o0-63", op, plen, -3);
5258 
5259           return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5260                               "sbci r29,hi8(-%o0)" CR_TAB
5261                               "st Y,%1"            CR_TAB
5262                               "subi r28,lo8(%o0)"  CR_TAB
5263                               "sbci r29,hi8(%o0)", op, plen, -5);
5264         }
5265       else if (REGNO (XEXP (x, 0)) == REG_X)
5266         {
5267           if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
5268             {
5269               avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
5270                            "adiw r26,%o0"       CR_TAB
5271                            "st X,__tmp_reg__", op, plen, -3);
5272             }
5273           else
5274             {
5275               avr_asm_len ("adiw r26,%o0" CR_TAB
5276                            "st X,%1", op, plen, -2);
5277             }
5278 
5279           if (!reg_unused_after (insn, XEXP (x, 0)))
5280             avr_asm_len ("sbiw r26,%o0", op, plen, 1);
5281 
5282           return "";
5283         }
5284 
5285       return avr_asm_len ("std %0,%1", op, plen, -1);
5286     }
5287 
5288   return avr_asm_len ("st %0,%1", op, plen, -1);
5289 }
5290 
5291 
5292 /* Helper for the next function for XMEGA.  It does the same
5293    but with low byte first.  */
5294 
5295 static const char*
5296 avr_out_movhi_mr_r_xmega (rtx_insn *insn, rtx op[], int *plen)
5297 {
5298   rtx dest = op[0];
5299   rtx src = op[1];
5300   rtx base = XEXP (dest, 0);
5301   int reg_base = true_regnum (base);
5302   int reg_src = true_regnum (src);
5303 
5304   /* "volatile" forces writing low byte first, even if less efficient,
5305      for correct operation with 16-bit I/O registers like SP.  */
5306   int mem_volatile_p = MEM_VOLATILE_P (dest);
5307 
5308   if (CONSTANT_ADDRESS_P (base))
5309     {
5310       return io_address_operand (base, HImode)
5311         ? avr_asm_len ("out %i0,%A1" CR_TAB
5312                        "out %i0+1,%B1", op, plen, -2)
5313 
5314         : avr_asm_len ("sts %m0,%A1" CR_TAB
5315                        "sts %m0+1,%B1", op, plen, -4);
5316     }
5317 
5318   if (reg_base > 0)
5319     {
5320       if (reg_base != REG_X)
5321         return avr_asm_len ("st %0,%A1" CR_TAB
5322                             "std %0+1,%B1", op, plen, -2);
5323 
5324       if (reg_src == REG_X)
5325         /* "st X+,r26" and "st -X,r26" are undefined.  */
5326         avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5327                      "st X,r26"            CR_TAB
5328                      "adiw r26,1"          CR_TAB
5329                      "st X,__tmp_reg__", op, plen, -4);
5330       else
5331         avr_asm_len ("st X+,%A1" CR_TAB
5332                      "st X,%B1", op, plen, -2);
5333 
5334       return reg_unused_after (insn, base)
5335         ? ""
5336         : avr_asm_len ("sbiw r26,1", op, plen, 1);
5337     }
5338   else if (GET_CODE (base) == PLUS)
5339     {
5340       int disp = INTVAL (XEXP (base, 1));
5341       reg_base = REGNO (XEXP (base, 0));
5342       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5343         {
5344           if (reg_base != REG_Y)
5345             fatal_insn ("incorrect insn:",insn);
5346 
5347           return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5348             ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5349                            "std Y+62,%A1"    CR_TAB
5350                            "std Y+63,%B1"    CR_TAB
5351                            "sbiw r28,%o0-62", op, plen, -4)
5352 
5353             : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5354                            "sbci r29,hi8(-%o0)" CR_TAB
5355                            "st Y,%A1"           CR_TAB
5356                            "std Y+1,%B1"        CR_TAB
5357                            "subi r28,lo8(%o0)"  CR_TAB
5358                            "sbci r29,hi8(%o0)", op, plen, -6);
5359         }
5360 
5361       if (reg_base != REG_X)
5362         return avr_asm_len ("std %A0,%A1" CR_TAB
5363                             "std %B0,%B1", op, plen, -2);
5364       /* (X + d) = R */
5365       return reg_src == REG_X
5366         ? avr_asm_len ("mov __tmp_reg__,r26"  CR_TAB
5367                        "mov __zero_reg__,r27" CR_TAB
5368                        "adiw r26,%o0"         CR_TAB
5369                        "st X+,__tmp_reg__"    CR_TAB
5370                        "st X,__zero_reg__"    CR_TAB
5371                        "clr __zero_reg__"     CR_TAB
5372                        "sbiw r26,%o0+1", op, plen, -7)
5373 
5374         : avr_asm_len ("adiw r26,%o0" CR_TAB
5375                        "st X+,%A1"    CR_TAB
5376                        "st X,%B1"     CR_TAB
5377                        "sbiw r26,%o0+1", op, plen, -4);
5378     }
5379   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5380     {
5381       if (!mem_volatile_p)
5382         return avr_asm_len ("st %0,%B1" CR_TAB
5383                             "st %0,%A1", op, plen, -2);
5384 
5385       return REGNO (XEXP (base, 0)) == REG_X
5386         ? avr_asm_len ("sbiw r26,2"  CR_TAB
5387                        "st X+,%A1"   CR_TAB
5388                        "st X,%B1"    CR_TAB
5389                        "sbiw r26,1", op, plen, -4)
5390 
5391         : avr_asm_len ("sbiw %r0,2"  CR_TAB
5392                        "st %p0,%A1"  CR_TAB
5393                        "std %p0+1,%B1", op, plen, -3);
5394     }
5395   else if (GET_CODE (base) == POST_INC) /* (R++) */
5396     {
5397       return avr_asm_len ("st %0,%A1"  CR_TAB
5398                           "st %0,%B1", op, plen, -2);
5399 
5400     }
5401   fatal_insn ("unknown move insn:",insn);
5402   return "";
5403 }
5404 
5405 static const char*
5406 avr_out_movhi_mr_r_reg_no_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5407 {
5408   rtx dest = op[0];
5409   rtx src = op[1];
5410   rtx base = XEXP (dest, 0);
5411   int reg_base = true_regnum (base);
5412   int reg_src = true_regnum (src);
5413   int mem_volatile_p = MEM_VOLATILE_P (dest);
5414 
5415   if (reg_base == reg_src)
5416     {
5417       return !mem_volatile_p && reg_unused_after (insn, src)
5418         ? avr_asm_len ("mov __tmp_reg__,%B1"   CR_TAB
5419                        "st %0,%A1"             CR_TAB
5420                        TINY_ADIW (%E0, %F0, 1) CR_TAB
5421                        "st %0,__tmp_reg__", op, plen, -5)
5422         : avr_asm_len ("mov __tmp_reg__,%B1"   CR_TAB
5423                        TINY_ADIW (%E0, %F0, 1) CR_TAB
5424                        "st %0,__tmp_reg__"     CR_TAB
5425                        TINY_SBIW (%E0, %F0, 1) CR_TAB
5426                        "st %0, %A1", op, plen, -7);
5427     }
5428 
5429   return !mem_volatile_p && reg_unused_after (insn, base)
5430     ? avr_asm_len ("st %0+,%A1" CR_TAB
5431                    "st %0,%B1", op, plen, -2)
5432     : avr_asm_len (TINY_ADIW (%E0, %F0, 1) CR_TAB
5433                    "st %0,%B1"             CR_TAB
5434                    "st -%0,%A1", op, plen, -4);
5435 }
5436 
5437 static const char*
5438 avr_out_movhi_mr_r_reg_disp_tiny (rtx_insn *insn, rtx op[], int *plen)
5439 {
5440   rtx dest = op[0];
5441   rtx src = op[1];
5442   rtx base = XEXP (dest, 0);
5443   int reg_base = REGNO (XEXP (base, 0));
5444   int reg_src = true_regnum (src);
5445 
5446   if (reg_src == reg_base)
5447     avr_asm_len ("mov __tmp_reg__,%A1"          CR_TAB
5448                  "mov __zero_reg__,%B1"         CR_TAB
5449                  TINY_ADIW (%I0, %J0, %o0+1)    CR_TAB
5450                  "st %b0,__zero_reg__"          CR_TAB
5451                  "st -%b0,__tmp_reg__"          CR_TAB
5452                  "clr __zero_reg__", op, plen, -7);
5453   else
5454     avr_asm_len (TINY_ADIW (%I0, %J0, %o0+1) CR_TAB
5455                  "st %b0,%B1"                CR_TAB
5456                  "st -%b0,%A1", op, plen, -4);
5457 
5458   if (!reg_unused_after (insn, XEXP (base, 0)))
5459     avr_asm_len (TINY_SBIW (%I0, %J0, %o0), op, plen, 2);
5460 
5461   return "";
5462 }
5463 
5464 static const char*
5465 avr_out_movhi_mr_r_post_inc_tiny (rtx op[], int *plen)
5466 {
5467   return avr_asm_len (TINY_ADIW (%I0, %J0, 1)  CR_TAB
5468                       "st %p0,%B1"    CR_TAB
5469                       "st -%p0,%A1"   CR_TAB
5470                       TINY_ADIW (%I0, %J0, 2), op, plen, -6);
5471 }
5472 
5473 static const char*
5474 out_movhi_mr_r (rtx_insn *insn, rtx op[], int *plen)
5475 {
5476   rtx dest = op[0];
5477   rtx src = op[1];
5478   rtx base = XEXP (dest, 0);
5479   int reg_base = true_regnum (base);
5480   int reg_src = true_regnum (src);
5481   int mem_volatile_p;
5482 
5483   /* "volatile" forces writing high-byte first (no-xmega) resp.
5484      low-byte first (xmega) even if less efficient, for correct
5485      operation with 16-bit I/O registers like.  */
5486 
5487   if (AVR_XMEGA)
5488     return avr_out_movhi_mr_r_xmega (insn, op, plen);
5489 
5490   mem_volatile_p = MEM_VOLATILE_P (dest);
5491 
5492   if (CONSTANT_ADDRESS_P (base))
5493     {
5494       int n_words = AVR_TINY ? 2 : 4;
5495       return io_address_operand (base, HImode)
5496         ? avr_asm_len ("out %i0+1,%B1" CR_TAB
5497                        "out %i0,%A1", op, plen, -2)
5498 
5499         : avr_asm_len ("sts %m0+1,%B1" CR_TAB
5500                        "sts %m0,%A1", op, plen, -n_words);
5501     }
5502 
5503   if (reg_base > 0)
5504     {
5505       if (AVR_TINY)
5506         return avr_out_movhi_mr_r_reg_no_disp_tiny (insn, op, plen);
5507 
5508       if (reg_base != REG_X)
5509         return avr_asm_len ("std %0+1,%B1" CR_TAB
5510                             "st %0,%A1", op, plen, -2);
5511 
5512       if (reg_src == REG_X)
5513         /* "st X+,r26" and "st -X,r26" are undefined.  */
5514         return !mem_volatile_p && reg_unused_after (insn, src)
5515           ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5516                          "st X,r26"            CR_TAB
5517                          "adiw r26,1"          CR_TAB
5518                          "st X,__tmp_reg__", op, plen, -4)
5519 
5520           : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
5521                          "adiw r26,1"          CR_TAB
5522                          "st X,__tmp_reg__"    CR_TAB
5523                          "sbiw r26,1"          CR_TAB
5524                          "st X,r26", op, plen, -5);
5525 
5526       return !mem_volatile_p && reg_unused_after (insn, base)
5527         ? avr_asm_len ("st X+,%A1" CR_TAB
5528                        "st X,%B1", op, plen, -2)
5529         : avr_asm_len ("adiw r26,1" CR_TAB
5530                        "st X,%B1"   CR_TAB
5531                        "st -X,%A1", op, plen, -3);
5532     }
5533   else if (GET_CODE (base) == PLUS)
5534     {
5535       int disp = INTVAL (XEXP (base, 1));
5536 
5537       if (AVR_TINY)
5538         return avr_out_movhi_mr_r_reg_disp_tiny (insn, op, plen);
5539 
5540       reg_base = REGNO (XEXP (base, 0));
5541       if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
5542         {
5543           if (reg_base != REG_Y)
5544             fatal_insn ("incorrect insn:",insn);
5545 
5546           return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
5547             ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
5548                            "std Y+63,%B1"    CR_TAB
5549                            "std Y+62,%A1"    CR_TAB
5550                            "sbiw r28,%o0-62", op, plen, -4)
5551 
5552             : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
5553                            "sbci r29,hi8(-%o0)" CR_TAB
5554                            "std Y+1,%B1"        CR_TAB
5555                            "st Y,%A1"           CR_TAB
5556                            "subi r28,lo8(%o0)"  CR_TAB
5557                            "sbci r29,hi8(%o0)", op, plen, -6);
5558         }
5559 
5560       if (reg_base != REG_X)
5561         return avr_asm_len ("std %B0,%B1" CR_TAB
5562                             "std %A0,%A1", op, plen, -2);
5563       /* (X + d) = R */
5564       return reg_src == REG_X
5565         ? avr_asm_len ("mov __tmp_reg__,r26"  CR_TAB
5566                        "mov __zero_reg__,r27" CR_TAB
5567                        "adiw r26,%o0+1"       CR_TAB
5568                        "st X,__zero_reg__"    CR_TAB
5569                        "st -X,__tmp_reg__"    CR_TAB
5570                        "clr __zero_reg__"     CR_TAB
5571                        "sbiw r26,%o0", op, plen, -7)
5572 
5573         : avr_asm_len ("adiw r26,%o0+1" CR_TAB
5574                        "st X,%B1"       CR_TAB
5575                        "st -X,%A1"      CR_TAB
5576                        "sbiw r26,%o0", op, plen, -4);
5577     }
5578   else if (GET_CODE (base) == PRE_DEC) /* (--R) */
5579     {
5580       return avr_asm_len ("st %0,%B1" CR_TAB
5581                           "st %0,%A1", op, plen, -2);
5582     }
5583   else if (GET_CODE (base) == POST_INC) /* (R++) */
5584     {
5585       if (!mem_volatile_p)
5586         return avr_asm_len ("st %0,%A1"  CR_TAB
5587                             "st %0,%B1", op, plen, -2);
5588 
5589       if (AVR_TINY)
5590         return avr_out_movhi_mr_r_post_inc_tiny (op, plen);
5591 
5592       return REGNO (XEXP (base, 0)) == REG_X
5593         ? avr_asm_len ("adiw r26,1"  CR_TAB
5594                        "st X,%B1"    CR_TAB
5595                        "st -X,%A1"   CR_TAB
5596                        "adiw r26,2", op, plen, -4)
5597 
5598         : avr_asm_len ("std %p0+1,%B1" CR_TAB
5599                        "st %p0,%A1"    CR_TAB
5600                        "adiw %r0,2", op, plen, -3);
5601     }
5602   fatal_insn ("unknown move insn:",insn);
5603   return "";
5604 }
5605 
5606 /* Return 1 if frame pointer for current function required.  */
5607 
5608 static bool
5609 avr_frame_pointer_required_p (void)
5610 {
5611   return (cfun->calls_alloca
5612           || cfun->calls_setjmp
5613           || cfun->has_nonlocal_label
5614           || crtl->args.info.nregs == 0
5615           || get_frame_size () > 0);
5616 }
5617 
5618 /* Returns the condition of compare insn INSN, or UNKNOWN.  */
5619 
5620 static RTX_CODE
5621 compare_condition (rtx_insn *insn)
5622 {
5623   rtx_insn *next = next_real_insn (insn);
5624 
5625   if (next && JUMP_P (next))
5626     {
5627       rtx pat = PATTERN (next);
5628       rtx src = SET_SRC (pat);
5629 
5630       if (IF_THEN_ELSE == GET_CODE (src))
5631         return GET_CODE (XEXP (src, 0));
5632     }
5633 
5634   return UNKNOWN;
5635 }
5636 
5637 
5638 /* Returns true iff INSN is a tst insn that only tests the sign.  */
5639 
5640 static bool
5641 compare_sign_p (rtx_insn *insn)
5642 {
5643   RTX_CODE cond = compare_condition (insn);
5644   return (cond == GE || cond == LT);
5645 }
5646 
5647 
5648 /* Returns true iff the next insn is a JUMP_INSN with a condition
5649    that needs to be swapped (GT, GTU, LE, LEU).  */
5650 
5651 static bool
5652 compare_diff_p (rtx_insn *insn)
5653 {
5654   RTX_CODE cond = compare_condition (insn);
5655   return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
5656 }
5657 
5658 /* Returns true iff INSN is a compare insn with the EQ or NE condition.  */
5659 
5660 static bool
5661 compare_eq_p (rtx_insn *insn)
5662 {
5663   RTX_CODE cond = compare_condition (insn);
5664   return (cond == EQ || cond == NE);
5665 }
5666 
5667 
5668 /* Output compare instruction
5669 
5670       compare (XOP[0], XOP[1])
5671 
5672    for a register XOP[0] and a compile-time constant XOP[1].  Return "".
5673    XOP[2] is an 8-bit scratch register as needed.
5674 
5675    PLEN == NULL:  Output instructions.
5676    PLEN != NULL:  Set *PLEN to the length (in words) of the sequence.
5677                   Don't output anything.  */
5678 
5679 const char*
5680 avr_out_compare (rtx_insn *insn, rtx *xop, int *plen)
5681 {
5682   /* Register to compare and value to compare against. */
5683   rtx xreg = xop[0];
5684   rtx xval = xop[1];
5685 
5686   /* MODE of the comparison.  */
5687   machine_mode mode;
5688 
5689   /* Number of bytes to operate on.  */
5690   int n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
5691 
5692   /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown.  */
5693   int clobber_val = -1;
5694 
5695   /* Map fixed mode operands to integer operands with the same binary
5696      representation.  They are easier to handle in the remainder.  */
5697 
5698   if (CONST_FIXED_P (xval))
5699     {
5700       xreg = avr_to_int_mode (xop[0]);
5701       xval = avr_to_int_mode (xop[1]);
5702     }
5703 
5704   mode = GET_MODE (xreg);
5705 
5706   gcc_assert (REG_P (xreg));
5707   gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
5708               || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
5709 
5710   if (plen)
5711     *plen = 0;
5712 
5713   /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
5714      against 0 by ORing the bytes.  This is one instruction shorter.
5715      Notice that 64-bit comparisons are always against reg:ALL8 18 (ACC_A)
5716      and therefore don't use this.  */
5717 
5718   if (!test_hard_reg_class (LD_REGS, xreg)
5719       && compare_eq_p (insn)
5720       && reg_unused_after (insn, xreg))
5721     {
5722       if (xval == const1_rtx)
5723         {
5724           avr_asm_len ("dec %A0" CR_TAB
5725                        "or %A0,%B0", xop, plen, 2);
5726 
5727           if (n_bytes >= 3)
5728             avr_asm_len ("or %A0,%C0", xop, plen, 1);
5729 
5730           if (n_bytes >= 4)
5731             avr_asm_len ("or %A0,%D0", xop, plen, 1);
5732 
5733           return "";
5734         }
5735       else if (xval == constm1_rtx)
5736         {
5737           if (n_bytes >= 4)
5738             avr_asm_len ("and %A0,%D0", xop, plen, 1);
5739 
5740           if (n_bytes >= 3)
5741             avr_asm_len ("and %A0,%C0", xop, plen, 1);
5742 
5743           return avr_asm_len ("and %A0,%B0" CR_TAB
5744                               "com %A0", xop, plen, 2);
5745         }
5746     }
5747 
5748   /* Comparisons == -1 and != -1 of a d-register that's used after the
5749      comparison.  (If it's unused after we use CPI / SBCI or ADIW sequence
5750      from below.)  Instead of  CPI Rlo,-1 / LDI Rx,-1 / CPC Rhi,Rx  we can
5751      use  CPI Rlo,-1 / CPC Rhi,Rlo  which is 1 instruction shorter:
5752      If CPI is true then Rlo contains -1 and we can use Rlo instead of Rx
5753      when CPC'ing the high part.  If CPI is false then CPC cannot render
5754      the result to true.  This also works for the more generic case where
5755      the constant is of the form 0xabab.  */
5756 
5757   if (n_bytes == 2
5758       && xval != const0_rtx
5759       && test_hard_reg_class (LD_REGS, xreg)
5760       && compare_eq_p (insn)
5761       && !reg_unused_after (insn, xreg))
5762     {
5763       rtx xlo8 = simplify_gen_subreg (QImode, xval, mode, 0);
5764       rtx xhi8 = simplify_gen_subreg (QImode, xval, mode, 1);
5765 
5766       if (INTVAL (xlo8) == INTVAL (xhi8))
5767         {
5768           xop[0] = xreg;
5769           xop[1] = xlo8;
5770 
5771           return avr_asm_len ("cpi %A0,%1"  CR_TAB
5772                               "cpc %B0,%A0", xop, plen, 2);
5773         }
5774     }
5775 
5776   for (int i = 0; i < n_bytes; i++)
5777     {
5778       /* We compare byte-wise.  */
5779       rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
5780       rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5781 
5782       /* 8-bit value to compare with this byte.  */
5783       unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5784 
5785       /* Registers R16..R31 can operate with immediate.  */
5786       bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5787 
5788       xop[0] = reg8;
5789       xop[1] = gen_int_mode (val8, QImode);
5790 
5791       /* Word registers >= R24 can use SBIW/ADIW with 0..63.  */
5792 
5793       if (i == 0
5794           && test_hard_reg_class (ADDW_REGS, reg8))
5795         {
5796           int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
5797 
5798           if (IN_RANGE (val16, 0, 63)
5799               && (val8 == 0
5800                   || reg_unused_after (insn, xreg)))
5801             {
5802               if (AVR_TINY)
5803                 avr_asm_len (TINY_SBIW (%A0, %B0, %1), xop, plen, 2);
5804               else
5805                 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
5806 
5807               i++;
5808               continue;
5809             }
5810 
5811           if (n_bytes == 2
5812               && IN_RANGE (val16, -63, -1)
5813               && compare_eq_p (insn)
5814               && reg_unused_after (insn, xreg))
5815             {
5816               return AVR_TINY
5817                 ? avr_asm_len (TINY_ADIW (%A0, %B0, %n1), xop, plen, 2)
5818                 : avr_asm_len ("adiw %0,%n1", xop, plen, 1);
5819             }
5820         }
5821 
5822       /* Comparing against 0 is easy.  */
5823 
5824       if (val8 == 0)
5825         {
5826           avr_asm_len (i == 0
5827                        ? "cp %0,__zero_reg__"
5828                        : "cpc %0,__zero_reg__", xop, plen, 1);
5829           continue;
5830         }
5831 
5832       /* Upper registers can compare and subtract-with-carry immediates.
5833          Notice that compare instructions do the same as respective subtract
5834          instruction; the only difference is that comparisons don't write
5835          the result back to the target register.  */
5836 
5837       if (ld_reg_p)
5838         {
5839           if (i == 0)
5840             {
5841               avr_asm_len ("cpi %0,%1", xop, plen, 1);
5842               continue;
5843             }
5844           else if (reg_unused_after (insn, xreg))
5845             {
5846               avr_asm_len ("sbci %0,%1", xop, plen, 1);
5847               continue;
5848             }
5849         }
5850 
5851       /* Must load the value into the scratch register.  */
5852 
5853       gcc_assert (REG_P (xop[2]));
5854 
5855       if (clobber_val != (int) val8)
5856         avr_asm_len ("ldi %2,%1", xop, plen, 1);
5857       clobber_val = (int) val8;
5858 
5859       avr_asm_len (i == 0
5860                    ? "cp %0,%2"
5861                    : "cpc %0,%2", xop, plen, 1);
5862     }
5863 
5864   return "";
5865 }
5866 
5867 
5868 /* Prepare operands of compare_const_di2 to be used with avr_out_compare.  */
5869 
5870 const char*
5871 avr_out_compare64 (rtx_insn *insn, rtx *op, int *plen)
5872 {
5873   rtx xop[3];
5874 
5875   xop[0] = gen_rtx_REG (DImode, 18);
5876   xop[1] = op[0];
5877   xop[2] = op[1];
5878 
5879   return avr_out_compare (insn, xop, plen);
5880 }
5881 
5882 /* Output test instruction for HImode.  */
5883 
5884 const char*
5885 avr_out_tsthi (rtx_insn *insn, rtx *op, int *plen)
5886 {
5887   if (compare_sign_p (insn))
5888     {
5889       avr_asm_len ("tst %B0", op, plen, -1);
5890     }
5891   else if (reg_unused_after (insn, op[0])
5892            && compare_eq_p (insn))
5893     {
5894       /* Faster than sbiw if we can clobber the operand.  */
5895       avr_asm_len ("or %A0,%B0", op, plen, -1);
5896     }
5897   else
5898     {
5899       avr_out_compare (insn, op, plen);
5900     }
5901 
5902   return "";
5903 }
5904 
5905 
5906 /* Output test instruction for PSImode.  */
5907 
5908 const char*
5909 avr_out_tstpsi (rtx_insn *insn, rtx *op, int *plen)
5910 {
5911   if (compare_sign_p (insn))
5912     {
5913       avr_asm_len ("tst %C0", op, plen, -1);
5914     }
5915   else if (reg_unused_after (insn, op[0])
5916            && compare_eq_p (insn))
5917     {
5918       /* Faster than sbiw if we can clobber the operand.  */
5919       avr_asm_len ("or %A0,%B0" CR_TAB
5920                    "or %A0,%C0", op, plen, -2);
5921     }
5922   else
5923     {
5924       avr_out_compare (insn, op, plen);
5925     }
5926 
5927   return "";
5928 }
5929 
5930 
5931 /* Output test instruction for SImode.  */
5932 
5933 const char*
5934 avr_out_tstsi (rtx_insn *insn, rtx *op, int *plen)
5935 {
5936   if (compare_sign_p (insn))
5937     {
5938       avr_asm_len ("tst %D0", op, plen, -1);
5939     }
5940   else if (reg_unused_after (insn, op[0])
5941            && compare_eq_p (insn))
5942     {
5943       /* Faster than sbiw if we can clobber the operand.  */
5944       avr_asm_len ("or %A0,%B0" CR_TAB
5945                    "or %A0,%C0" CR_TAB
5946                    "or %A0,%D0", op, plen, -3);
5947     }
5948   else
5949     {
5950       avr_out_compare (insn, op, plen);
5951     }
5952 
5953   return "";
5954 }
5955 
5956 
5957 /* Generate asm equivalent for various shifts.  This only handles cases
5958    that are not already carefully hand-optimized in ?sh??i3_out.
5959 
5960    OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
5961    OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
5962    OPERANDS[3] is a QImode scratch register from LD regs if
5963                available and SCRATCH, otherwise (no scratch available)
5964 
5965    TEMPL is an assembler template that shifts by one position.
5966    T_LEN is the length of this template.  */
5967 
5968 void
5969 out_shift_with_cnt (const char *templ, rtx_insn *insn, rtx operands[],
5970 		    int *plen, int t_len)
5971 {
5972   bool second_label = true;
5973   bool saved_in_tmp = false;
5974   bool use_zero_reg = false;
5975   rtx op[5];
5976 
5977   op[0] = operands[0];
5978   op[1] = operands[1];
5979   op[2] = operands[2];
5980   op[3] = operands[3];
5981 
5982   if (plen)
5983     *plen = 0;
5984 
5985   if (CONST_INT_P (operands[2]))
5986     {
5987       bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
5988                       && REG_P (operands[3]));
5989       int count = INTVAL (operands[2]);
5990       int max_len = 10;  /* If larger than this, always use a loop.  */
5991 
5992       if (count <= 0)
5993         return;
5994 
5995       if (count < 8 && !scratch)
5996         use_zero_reg = true;
5997 
5998       if (optimize_size)
5999         max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
6000 
6001       if (t_len * count <= max_len)
6002         {
6003           /* Output shifts inline with no loop - faster.  */
6004 
6005           while (count-- > 0)
6006             avr_asm_len (templ, op, plen, t_len);
6007 
6008           return;
6009         }
6010 
6011       if (scratch)
6012         {
6013           avr_asm_len ("ldi %3,%2", op, plen, 1);
6014         }
6015       else if (use_zero_reg)
6016         {
6017           /* Hack to save one word: use __zero_reg__ as loop counter.
6018              Set one bit, then shift in a loop until it is 0 again.  */
6019 
6020           op[3] = zero_reg_rtx;
6021 
6022           avr_asm_len ("set" CR_TAB
6023                        "bld %3,%2-1", op, plen, 2);
6024         }
6025       else
6026         {
6027           /* No scratch register available, use one from LD_REGS (saved in
6028              __tmp_reg__) that doesn't overlap with registers to shift.  */
6029 
6030           op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
6031           op[4] = tmp_reg_rtx;
6032           saved_in_tmp = true;
6033 
6034           avr_asm_len ("mov %4,%3" CR_TAB
6035                        "ldi %3,%2", op, plen, 2);
6036         }
6037 
6038       second_label = false;
6039     }
6040   else if (MEM_P (op[2]))
6041     {
6042       rtx op_mov[2];
6043 
6044       op_mov[0] = op[3] = tmp_reg_rtx;
6045       op_mov[1] = op[2];
6046 
6047       out_movqi_r_mr (insn, op_mov, plen);
6048     }
6049   else if (register_operand (op[2], QImode))
6050     {
6051       op[3] = op[2];
6052 
6053       if (!reg_unused_after (insn, op[2])
6054           || reg_overlap_mentioned_p (op[0], op[2]))
6055         {
6056           op[3] = tmp_reg_rtx;
6057           avr_asm_len ("mov %3,%2", op, plen, 1);
6058         }
6059     }
6060   else
6061     fatal_insn ("bad shift insn:", insn);
6062 
6063   if (second_label)
6064     avr_asm_len ("rjmp 2f", op, plen, 1);
6065 
6066   avr_asm_len ("1:", op, plen, 0);
6067   avr_asm_len (templ, op, plen, t_len);
6068 
6069   if (second_label)
6070     avr_asm_len ("2:", op, plen, 0);
6071 
6072   avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
6073   avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
6074 
6075   if (saved_in_tmp)
6076     avr_asm_len ("mov %3,%4", op, plen, 1);
6077 }
6078 
6079 
6080 /* 8bit shift left ((char)x << i)   */
6081 
6082 const char *
6083 ashlqi3_out (rtx_insn *insn, rtx operands[], int *len)
6084 {
6085   if (CONST_INT_P (operands[2]))
6086     {
6087       int k;
6088 
6089       if (!len)
6090 	len = &k;
6091 
6092       switch (INTVAL (operands[2]))
6093 	{
6094 	default:
6095 	  if (INTVAL (operands[2]) < 8)
6096 	    break;
6097 
6098 	  *len = 1;
6099 	  return "clr %0";
6100 
6101 	case 1:
6102 	  *len = 1;
6103 	  return "lsl %0";
6104 
6105 	case 2:
6106 	  *len = 2;
6107 	  return ("lsl %0" CR_TAB
6108 		  "lsl %0");
6109 
6110 	case 3:
6111 	  *len = 3;
6112 	  return ("lsl %0" CR_TAB
6113 		  "lsl %0" CR_TAB
6114 		  "lsl %0");
6115 
6116 	case 4:
6117 	  if (test_hard_reg_class (LD_REGS, operands[0]))
6118 	    {
6119 	      *len = 2;
6120 	      return ("swap %0" CR_TAB
6121 		      "andi %0,0xf0");
6122 	    }
6123 	  *len = 4;
6124 	  return ("lsl %0" CR_TAB
6125 		  "lsl %0" CR_TAB
6126 		  "lsl %0" CR_TAB
6127 		  "lsl %0");
6128 
6129 	case 5:
6130 	  if (test_hard_reg_class (LD_REGS, operands[0]))
6131 	    {
6132 	      *len = 3;
6133 	      return ("swap %0" CR_TAB
6134 		      "lsl %0"  CR_TAB
6135 		      "andi %0,0xe0");
6136 	    }
6137 	  *len = 5;
6138 	  return ("lsl %0" CR_TAB
6139 		  "lsl %0" CR_TAB
6140 		  "lsl %0" CR_TAB
6141 		  "lsl %0" CR_TAB
6142 		  "lsl %0");
6143 
6144 	case 6:
6145 	  if (test_hard_reg_class (LD_REGS, operands[0]))
6146 	    {
6147 	      *len = 4;
6148 	      return ("swap %0" CR_TAB
6149 		      "lsl %0"  CR_TAB
6150 		      "lsl %0"  CR_TAB
6151 		      "andi %0,0xc0");
6152 	    }
6153 	  *len = 6;
6154 	  return ("lsl %0" CR_TAB
6155 		  "lsl %0" CR_TAB
6156 		  "lsl %0" CR_TAB
6157 		  "lsl %0" CR_TAB
6158 		  "lsl %0" CR_TAB
6159 		  "lsl %0");
6160 
6161 	case 7:
6162 	  *len = 3;
6163 	  return ("ror %0" CR_TAB
6164 		  "clr %0" CR_TAB
6165 		  "ror %0");
6166 	}
6167     }
6168   else if (CONSTANT_P (operands[2]))
6169     fatal_insn ("internal compiler error.  Incorrect shift:", insn);
6170 
6171   out_shift_with_cnt ("lsl %0",
6172                       insn, operands, len, 1);
6173   return "";
6174 }
6175 
6176 
6177 /* 16bit shift left ((short)x << i)   */
6178 
6179 const char *
6180 ashlhi3_out (rtx_insn *insn, rtx operands[], int *len)
6181 {
6182   if (CONST_INT_P (operands[2]))
6183     {
6184       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6185       int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6186       int k;
6187       int *t = len;
6188 
6189       if (!len)
6190 	len = &k;
6191 
6192       switch (INTVAL (operands[2]))
6193 	{
6194 	default:
6195 	  if (INTVAL (operands[2]) < 16)
6196 	    break;
6197 
6198 	  *len = 2;
6199 	  return ("clr %B0" CR_TAB
6200 		  "clr %A0");
6201 
6202 	case 4:
6203 	  if (optimize_size && scratch)
6204 	    break;  /* 5 */
6205 	  if (ldi_ok)
6206 	    {
6207 	      *len = 6;
6208 	      return ("swap %A0"      CR_TAB
6209 		      "swap %B0"      CR_TAB
6210 		      "andi %B0,0xf0" CR_TAB
6211 		      "eor %B0,%A0"   CR_TAB
6212 		      "andi %A0,0xf0" CR_TAB
6213 		      "eor %B0,%A0");
6214 	    }
6215 	  if (scratch)
6216 	    {
6217 	      *len = 7;
6218 	      return ("swap %A0"    CR_TAB
6219 		      "swap %B0"    CR_TAB
6220 		      "ldi %3,0xf0" CR_TAB
6221 		      "and %B0,%3"  CR_TAB
6222 		      "eor %B0,%A0" CR_TAB
6223 		      "and %A0,%3"  CR_TAB
6224 		      "eor %B0,%A0");
6225 	    }
6226 	  break;  /* optimize_size ? 6 : 8 */
6227 
6228 	case 5:
6229 	  if (optimize_size)
6230 	    break;  /* scratch ? 5 : 6 */
6231 	  if (ldi_ok)
6232 	    {
6233 	      *len = 8;
6234 	      return ("lsl %A0"       CR_TAB
6235 		      "rol %B0"       CR_TAB
6236 		      "swap %A0"      CR_TAB
6237 		      "swap %B0"      CR_TAB
6238 		      "andi %B0,0xf0" CR_TAB
6239 		      "eor %B0,%A0"   CR_TAB
6240 		      "andi %A0,0xf0" CR_TAB
6241 		      "eor %B0,%A0");
6242 	    }
6243 	  if (scratch)
6244 	    {
6245 	      *len = 9;
6246 	      return ("lsl %A0"     CR_TAB
6247 		      "rol %B0"     CR_TAB
6248 		      "swap %A0"    CR_TAB
6249 		      "swap %B0"    CR_TAB
6250 		      "ldi %3,0xf0" CR_TAB
6251 		      "and %B0,%3"  CR_TAB
6252 		      "eor %B0,%A0" CR_TAB
6253 		      "and %A0,%3"  CR_TAB
6254 		      "eor %B0,%A0");
6255 	    }
6256 	  break;  /* 10 */
6257 
6258 	case 6:
6259 	  if (optimize_size)
6260 	    break;  /* scratch ? 5 : 6 */
6261 	  *len = 9;
6262 	  return ("clr __tmp_reg__" CR_TAB
6263 		  "lsr %B0"         CR_TAB
6264 		  "ror %A0"         CR_TAB
6265 		  "ror __tmp_reg__" CR_TAB
6266 		  "lsr %B0"         CR_TAB
6267 		  "ror %A0"         CR_TAB
6268 		  "ror __tmp_reg__" CR_TAB
6269 		  "mov %B0,%A0"     CR_TAB
6270 		  "mov %A0,__tmp_reg__");
6271 
6272 	case 7:
6273 	  *len = 5;
6274 	  return ("lsr %B0"     CR_TAB
6275 		  "mov %B0,%A0" CR_TAB
6276 		  "clr %A0"     CR_TAB
6277 		  "ror %B0"     CR_TAB
6278 		  "ror %A0");
6279 
6280 	case 8:
6281 	  return *len = 2, ("mov %B0,%A1" CR_TAB
6282 			    "clr %A0");
6283 
6284 	case 9:
6285 	  *len = 3;
6286 	  return ("mov %B0,%A0" CR_TAB
6287 		  "clr %A0"     CR_TAB
6288 		  "lsl %B0");
6289 
6290 	case 10:
6291 	  *len = 4;
6292 	  return ("mov %B0,%A0" CR_TAB
6293 		  "clr %A0"     CR_TAB
6294 		  "lsl %B0"     CR_TAB
6295 		  "lsl %B0");
6296 
6297 	case 11:
6298 	  *len = 5;
6299 	  return ("mov %B0,%A0" CR_TAB
6300 		  "clr %A0"     CR_TAB
6301 		  "lsl %B0"     CR_TAB
6302 		  "lsl %B0"     CR_TAB
6303 		  "lsl %B0");
6304 
6305 	case 12:
6306 	  if (ldi_ok)
6307 	    {
6308 	      *len = 4;
6309 	      return ("mov %B0,%A0" CR_TAB
6310 		      "clr %A0"     CR_TAB
6311 		      "swap %B0"    CR_TAB
6312 		      "andi %B0,0xf0");
6313 	    }
6314 	  if (scratch)
6315 	    {
6316 	      *len = 5;
6317 	      return ("mov %B0,%A0" CR_TAB
6318 		      "clr %A0"     CR_TAB
6319 		      "swap %B0"    CR_TAB
6320 		      "ldi %3,0xf0" CR_TAB
6321 		      "and %B0,%3");
6322 	    }
6323 	  *len = 6;
6324 	  return ("mov %B0,%A0" CR_TAB
6325 		  "clr %A0"     CR_TAB
6326 		  "lsl %B0"     CR_TAB
6327 		  "lsl %B0"     CR_TAB
6328 		  "lsl %B0"     CR_TAB
6329 		  "lsl %B0");
6330 
6331 	case 13:
6332 	  if (ldi_ok)
6333 	    {
6334 	      *len = 5;
6335 	      return ("mov %B0,%A0" CR_TAB
6336 		      "clr %A0"     CR_TAB
6337 		      "swap %B0"    CR_TAB
6338 		      "lsl %B0"     CR_TAB
6339 		      "andi %B0,0xe0");
6340 	    }
6341 	  if (AVR_HAVE_MUL && scratch)
6342 	    {
6343 	      *len = 5;
6344 	      return ("ldi %3,0x20" CR_TAB
6345 		      "mul %A0,%3"  CR_TAB
6346 		      "mov %B0,r0"  CR_TAB
6347 		      "clr %A0"     CR_TAB
6348 		      "clr __zero_reg__");
6349 	    }
6350 	  if (optimize_size && scratch)
6351 	    break;  /* 5 */
6352 	  if (scratch)
6353 	    {
6354 	      *len = 6;
6355 	      return ("mov %B0,%A0" CR_TAB
6356 		      "clr %A0"     CR_TAB
6357 		      "swap %B0"    CR_TAB
6358 		      "lsl %B0"     CR_TAB
6359 		      "ldi %3,0xe0" CR_TAB
6360 		      "and %B0,%3");
6361 	    }
6362 	  if (AVR_HAVE_MUL)
6363 	    {
6364 	      *len = 6;
6365 	      return ("set"        CR_TAB
6366 		      "bld r1,5"   CR_TAB
6367 		      "mul %A0,r1" CR_TAB
6368 		      "mov %B0,r0" CR_TAB
6369 		      "clr %A0"    CR_TAB
6370 		      "clr __zero_reg__");
6371 	    }
6372 	  *len = 7;
6373 	  return ("mov %B0,%A0" CR_TAB
6374 		  "clr %A0"     CR_TAB
6375 		  "lsl %B0"     CR_TAB
6376 		  "lsl %B0"     CR_TAB
6377 		  "lsl %B0"     CR_TAB
6378 		  "lsl %B0"     CR_TAB
6379 		  "lsl %B0");
6380 
6381 	case 14:
6382 	  if (AVR_HAVE_MUL && ldi_ok)
6383 	    {
6384 	      *len = 5;
6385 	      return ("ldi %B0,0x40" CR_TAB
6386 		      "mul %A0,%B0"  CR_TAB
6387 		      "mov %B0,r0"   CR_TAB
6388 		      "clr %A0"      CR_TAB
6389 		      "clr __zero_reg__");
6390 	    }
6391 	  if (AVR_HAVE_MUL && scratch)
6392 	    {
6393 	      *len = 5;
6394 	      return ("ldi %3,0x40" CR_TAB
6395 		      "mul %A0,%3"  CR_TAB
6396 		      "mov %B0,r0"  CR_TAB
6397 		      "clr %A0"     CR_TAB
6398 		      "clr __zero_reg__");
6399 	    }
6400 	  if (optimize_size && ldi_ok)
6401 	    {
6402 	      *len = 5;
6403 	      return ("mov %B0,%A0" CR_TAB
6404 		      "ldi %A0,6" "\n1:\t"
6405 		      "lsl %B0"     CR_TAB
6406 		      "dec %A0"     CR_TAB
6407 		      "brne 1b");
6408 	    }
6409 	  if (optimize_size && scratch)
6410 	    break;  /* 5 */
6411 	  *len = 6;
6412 	  return ("clr %B0" CR_TAB
6413 		  "lsr %A0" CR_TAB
6414 		  "ror %B0" CR_TAB
6415 		  "lsr %A0" CR_TAB
6416 		  "ror %B0" CR_TAB
6417 		  "clr %A0");
6418 
6419 	case 15:
6420 	  *len = 4;
6421 	  return ("clr %B0" CR_TAB
6422 		  "lsr %A0" CR_TAB
6423 		  "ror %B0" CR_TAB
6424 		  "clr %A0");
6425 	}
6426       len = t;
6427     }
6428   out_shift_with_cnt ("lsl %A0" CR_TAB
6429                       "rol %B0", insn, operands, len, 2);
6430   return "";
6431 }
6432 
6433 
6434 /* 24-bit shift left */
6435 
6436 const char*
6437 avr_out_ashlpsi3 (rtx_insn *insn, rtx *op, int *plen)
6438 {
6439   if (plen)
6440     *plen = 0;
6441 
6442   if (CONST_INT_P (op[2]))
6443     {
6444       switch (INTVAL (op[2]))
6445         {
6446         default:
6447           if (INTVAL (op[2]) < 24)
6448             break;
6449 
6450           return avr_asm_len ("clr %A0" CR_TAB
6451                               "clr %B0" CR_TAB
6452                               "clr %C0", op, plen, 3);
6453 
6454         case 8:
6455           {
6456             int reg0 = REGNO (op[0]);
6457             int reg1 = REGNO (op[1]);
6458 
6459             if (reg0 >= reg1)
6460               return avr_asm_len ("mov %C0,%B1"  CR_TAB
6461                                   "mov %B0,%A1"  CR_TAB
6462                                   "clr %A0", op, plen, 3);
6463             else
6464               return avr_asm_len ("clr %A0"      CR_TAB
6465                                   "mov %B0,%A1"  CR_TAB
6466                                   "mov %C0,%B1", op, plen, 3);
6467           }
6468 
6469         case 16:
6470           {
6471             int reg0 = REGNO (op[0]);
6472             int reg1 = REGNO (op[1]);
6473 
6474             if (reg0 + 2 != reg1)
6475               avr_asm_len ("mov %C0,%A0", op, plen, 1);
6476 
6477             return avr_asm_len ("clr %B0"  CR_TAB
6478                                 "clr %A0", op, plen, 2);
6479           }
6480 
6481         case 23:
6482           return avr_asm_len ("clr %C0" CR_TAB
6483                               "lsr %A0" CR_TAB
6484                               "ror %C0" CR_TAB
6485                               "clr %B0" CR_TAB
6486                               "clr %A0", op, plen, 5);
6487         }
6488     }
6489 
6490   out_shift_with_cnt ("lsl %A0" CR_TAB
6491                       "rol %B0" CR_TAB
6492                       "rol %C0", insn, op, plen, 3);
6493   return "";
6494 }
6495 
6496 
6497 /* 32bit shift left ((long)x << i)   */
6498 
6499 const char *
6500 ashlsi3_out (rtx_insn *insn, rtx operands[], int *len)
6501 {
6502   if (CONST_INT_P (operands[2]))
6503     {
6504       int k;
6505       int *t = len;
6506 
6507       if (!len)
6508 	len = &k;
6509 
6510       switch (INTVAL (operands[2]))
6511 	{
6512 	default:
6513 	  if (INTVAL (operands[2]) < 32)
6514 	    break;
6515 
6516 	  if (AVR_HAVE_MOVW)
6517 	    return *len = 3, ("clr %D0" CR_TAB
6518 			      "clr %C0" CR_TAB
6519 			      "movw %A0,%C0");
6520 	  *len = 4;
6521 	  return ("clr %D0" CR_TAB
6522 		  "clr %C0" CR_TAB
6523 		  "clr %B0" CR_TAB
6524 		  "clr %A0");
6525 
6526 	case 8:
6527 	  {
6528 	    int reg0 = true_regnum (operands[0]);
6529 	    int reg1 = true_regnum (operands[1]);
6530 	    *len = 4;
6531 	    if (reg0 >= reg1)
6532 	      return ("mov %D0,%C1"  CR_TAB
6533 		      "mov %C0,%B1"  CR_TAB
6534 		      "mov %B0,%A1"  CR_TAB
6535 		      "clr %A0");
6536 	    else
6537 	      return ("clr %A0"      CR_TAB
6538 		      "mov %B0,%A1"  CR_TAB
6539 		      "mov %C0,%B1"  CR_TAB
6540 		      "mov %D0,%C1");
6541 	  }
6542 
6543 	case 16:
6544 	  {
6545 	    int reg0 = true_regnum (operands[0]);
6546 	    int reg1 = true_regnum (operands[1]);
6547 	    if (reg0 + 2 == reg1)
6548 	      return *len = 2, ("clr %B0"      CR_TAB
6549 				"clr %A0");
6550 	    if (AVR_HAVE_MOVW)
6551 	      return *len = 3, ("movw %C0,%A1" CR_TAB
6552 				"clr %B0"      CR_TAB
6553 				"clr %A0");
6554 	    else
6555 	      return *len = 4, ("mov %C0,%A1"  CR_TAB
6556 				"mov %D0,%B1"  CR_TAB
6557 				"clr %B0"      CR_TAB
6558 				"clr %A0");
6559 	  }
6560 
6561 	case 24:
6562 	  *len = 4;
6563 	  return ("mov %D0,%A1"  CR_TAB
6564 		  "clr %C0"      CR_TAB
6565 		  "clr %B0"      CR_TAB
6566 		  "clr %A0");
6567 
6568 	case 31:
6569 	  *len = 6;
6570 	  return ("clr %D0" CR_TAB
6571 		  "lsr %A0" CR_TAB
6572 		  "ror %D0" CR_TAB
6573 		  "clr %C0" CR_TAB
6574 		  "clr %B0" CR_TAB
6575 		  "clr %A0");
6576 	}
6577       len = t;
6578     }
6579   out_shift_with_cnt ("lsl %A0" CR_TAB
6580                       "rol %B0" CR_TAB
6581                       "rol %C0" CR_TAB
6582                       "rol %D0", insn, operands, len, 4);
6583   return "";
6584 }
6585 
6586 /* 8bit arithmetic shift right  ((signed char)x >> i) */
6587 
6588 const char *
6589 ashrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6590 {
6591   if (CONST_INT_P (operands[2]))
6592     {
6593       int k;
6594 
6595       if (!len)
6596 	len = &k;
6597 
6598       switch (INTVAL (operands[2]))
6599 	{
6600 	case 1:
6601 	  *len = 1;
6602 	  return "asr %0";
6603 
6604 	case 2:
6605 	  *len = 2;
6606 	  return ("asr %0" CR_TAB
6607 		  "asr %0");
6608 
6609 	case 3:
6610 	  *len = 3;
6611 	  return ("asr %0" CR_TAB
6612 		  "asr %0" CR_TAB
6613 		  "asr %0");
6614 
6615 	case 4:
6616 	  *len = 4;
6617 	  return ("asr %0" CR_TAB
6618 		  "asr %0" CR_TAB
6619 		  "asr %0" CR_TAB
6620 		  "asr %0");
6621 
6622 	case 5:
6623 	  *len = 5;
6624 	  return ("asr %0" CR_TAB
6625 		  "asr %0" CR_TAB
6626 		  "asr %0" CR_TAB
6627 		  "asr %0" CR_TAB
6628 		  "asr %0");
6629 
6630 	case 6:
6631 	  *len = 4;
6632 	  return ("bst %0,6"  CR_TAB
6633 		  "lsl %0"    CR_TAB
6634 		  "sbc %0,%0" CR_TAB
6635 		  "bld %0,0");
6636 
6637 	default:
6638 	  if (INTVAL (operands[2]) < 8)
6639 	    break;
6640 
6641 	  /* fall through */
6642 
6643 	case 7:
6644 	  *len = 2;
6645 	  return ("lsl %0" CR_TAB
6646 		  "sbc %0,%0");
6647 	}
6648     }
6649   else if (CONSTANT_P (operands[2]))
6650     fatal_insn ("internal compiler error.  Incorrect shift:", insn);
6651 
6652   out_shift_with_cnt ("asr %0",
6653                       insn, operands, len, 1);
6654   return "";
6655 }
6656 
6657 
6658 /* 16bit arithmetic shift right  ((signed short)x >> i) */
6659 
6660 const char *
6661 ashrhi3_out (rtx_insn *insn, rtx operands[], int *len)
6662 {
6663   if (CONST_INT_P (operands[2]))
6664     {
6665       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
6666       int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
6667       int k;
6668       int *t = len;
6669 
6670       if (!len)
6671 	len = &k;
6672 
6673       switch (INTVAL (operands[2]))
6674 	{
6675 	case 4:
6676 	case 5:
6677 	  /* XXX try to optimize this too? */
6678 	  break;
6679 
6680 	case 6:
6681 	  if (optimize_size)
6682 	    break;  /* scratch ? 5 : 6 */
6683 	  *len = 8;
6684 	  return ("mov __tmp_reg__,%A0" CR_TAB
6685 		  "mov %A0,%B0"         CR_TAB
6686 		  "lsl __tmp_reg__"     CR_TAB
6687 		  "rol %A0"             CR_TAB
6688 		  "sbc %B0,%B0"         CR_TAB
6689 		  "lsl __tmp_reg__"     CR_TAB
6690 		  "rol %A0"             CR_TAB
6691 		  "rol %B0");
6692 
6693 	case 7:
6694 	  *len = 4;
6695 	  return ("lsl %A0"     CR_TAB
6696 		  "mov %A0,%B0" CR_TAB
6697 		  "rol %A0"     CR_TAB
6698 		  "sbc %B0,%B0");
6699 
6700 	case 8:
6701 	  {
6702 	    int reg0 = true_regnum (operands[0]);
6703 	    int reg1 = true_regnum (operands[1]);
6704 
6705 	    if (reg0 == reg1)
6706 	      return *len = 3, ("mov %A0,%B0" CR_TAB
6707 				"lsl %B0"     CR_TAB
6708 				"sbc %B0,%B0");
6709 	    else
6710 	      return *len = 4, ("mov %A0,%B1" CR_TAB
6711 			        "clr %B0"     CR_TAB
6712 			        "sbrc %A0,7"  CR_TAB
6713 			        "dec %B0");
6714 	  }
6715 
6716 	case 9:
6717 	  *len = 4;
6718 	  return ("mov %A0,%B0" CR_TAB
6719 		  "lsl %B0"      CR_TAB
6720 		  "sbc %B0,%B0" CR_TAB
6721 		  "asr %A0");
6722 
6723 	case 10:
6724 	  *len = 5;
6725 	  return ("mov %A0,%B0" CR_TAB
6726 		  "lsl %B0"     CR_TAB
6727 		  "sbc %B0,%B0" CR_TAB
6728 		  "asr %A0"     CR_TAB
6729 		  "asr %A0");
6730 
6731 	case 11:
6732 	  if (AVR_HAVE_MUL && ldi_ok)
6733 	    {
6734 	      *len = 5;
6735 	      return ("ldi %A0,0x20" CR_TAB
6736 		      "muls %B0,%A0" CR_TAB
6737 		      "mov %A0,r1"   CR_TAB
6738 		      "sbc %B0,%B0"  CR_TAB
6739 		      "clr __zero_reg__");
6740 	    }
6741 	  if (optimize_size && scratch)
6742 	    break;  /* 5 */
6743 	  *len = 6;
6744 	  return ("mov %A0,%B0" CR_TAB
6745 		  "lsl %B0"     CR_TAB
6746 		  "sbc %B0,%B0" CR_TAB
6747 		  "asr %A0"     CR_TAB
6748 		  "asr %A0"     CR_TAB
6749 		  "asr %A0");
6750 
6751 	case 12:
6752 	  if (AVR_HAVE_MUL && ldi_ok)
6753 	    {
6754 	      *len = 5;
6755 	      return ("ldi %A0,0x10" CR_TAB
6756 		      "muls %B0,%A0" CR_TAB
6757 		      "mov %A0,r1"   CR_TAB
6758 		      "sbc %B0,%B0"  CR_TAB
6759 		      "clr __zero_reg__");
6760 	    }
6761 	  if (optimize_size && scratch)
6762 	    break;  /* 5 */
6763 	  *len = 7;
6764 	  return ("mov %A0,%B0" CR_TAB
6765 		  "lsl %B0"     CR_TAB
6766 		  "sbc %B0,%B0" CR_TAB
6767 		  "asr %A0"     CR_TAB
6768 		  "asr %A0"     CR_TAB
6769 		  "asr %A0"     CR_TAB
6770 		  "asr %A0");
6771 
6772 	case 13:
6773 	  if (AVR_HAVE_MUL && ldi_ok)
6774 	    {
6775 	      *len = 5;
6776 	      return ("ldi %A0,0x08" CR_TAB
6777 		      "muls %B0,%A0" CR_TAB
6778 		      "mov %A0,r1"   CR_TAB
6779 		      "sbc %B0,%B0"  CR_TAB
6780 		      "clr __zero_reg__");
6781 	    }
6782 	  if (optimize_size)
6783 	    break;  /* scratch ? 5 : 7 */
6784 	  *len = 8;
6785 	  return ("mov %A0,%B0" CR_TAB
6786 		  "lsl %B0"     CR_TAB
6787 		  "sbc %B0,%B0" CR_TAB
6788 		  "asr %A0"     CR_TAB
6789 		  "asr %A0"     CR_TAB
6790 		  "asr %A0"     CR_TAB
6791 		  "asr %A0"     CR_TAB
6792 		  "asr %A0");
6793 
6794 	case 14:
6795 	  *len = 5;
6796 	  return ("lsl %B0"     CR_TAB
6797 		  "sbc %A0,%A0" CR_TAB
6798 		  "lsl %B0"     CR_TAB
6799 		  "mov %B0,%A0" CR_TAB
6800 		  "rol %A0");
6801 
6802 	default:
6803 	  if (INTVAL (operands[2]) < 16)
6804 	    break;
6805 
6806 	  /* fall through */
6807 
6808 	case 15:
6809 	  return *len = 3, ("lsl %B0"     CR_TAB
6810 			    "sbc %A0,%A0" CR_TAB
6811 			    "mov %B0,%A0");
6812 	}
6813       len = t;
6814     }
6815   out_shift_with_cnt ("asr %B0" CR_TAB
6816                       "ror %A0", insn, operands, len, 2);
6817   return "";
6818 }
6819 
6820 
6821 /* 24-bit arithmetic shift right */
6822 
6823 const char*
6824 avr_out_ashrpsi3 (rtx_insn *insn, rtx *op, int *plen)
6825 {
6826   int dest = REGNO (op[0]);
6827   int src = REGNO (op[1]);
6828 
6829   if (CONST_INT_P (op[2]))
6830     {
6831       if (plen)
6832         *plen = 0;
6833 
6834       switch (INTVAL (op[2]))
6835         {
6836         case 8:
6837           if (dest <= src)
6838             return avr_asm_len ("mov %A0,%B1" CR_TAB
6839                                 "mov %B0,%C1" CR_TAB
6840                                 "clr %C0"     CR_TAB
6841                                 "sbrc %B0,7"  CR_TAB
6842                                 "dec %C0", op, plen, 5);
6843           else
6844             return avr_asm_len ("clr %C0"     CR_TAB
6845                                 "sbrc %C1,7"  CR_TAB
6846                                 "dec %C0"     CR_TAB
6847                                 "mov %B0,%C1" CR_TAB
6848                                 "mov %A0,%B1", op, plen, 5);
6849 
6850         case 16:
6851           if (dest != src + 2)
6852             avr_asm_len ("mov %A0,%C1", op, plen, 1);
6853 
6854           return avr_asm_len ("clr %B0"     CR_TAB
6855                               "sbrc %A0,7"  CR_TAB
6856                               "com %B0"     CR_TAB
6857                               "mov %C0,%B0", op, plen, 4);
6858 
6859         default:
6860           if (INTVAL (op[2]) < 24)
6861             break;
6862 
6863           /* fall through */
6864 
6865         case 23:
6866           return avr_asm_len ("lsl %C0"     CR_TAB
6867                               "sbc %A0,%A0" CR_TAB
6868                               "mov %B0,%A0" CR_TAB
6869                               "mov %C0,%A0", op, plen, 4);
6870         } /* switch */
6871     }
6872 
6873   out_shift_with_cnt ("asr %C0" CR_TAB
6874                       "ror %B0" CR_TAB
6875                       "ror %A0", insn, op, plen, 3);
6876   return "";
6877 }
6878 
6879 
6880 /* 32-bit arithmetic shift right  ((signed long)x >> i) */
6881 
6882 const char *
6883 ashrsi3_out (rtx_insn *insn, rtx operands[], int *len)
6884 {
6885   if (CONST_INT_P (operands[2]))
6886     {
6887       int k;
6888       int *t = len;
6889 
6890       if (!len)
6891 	len = &k;
6892 
6893       switch (INTVAL (operands[2]))
6894 	{
6895 	case 8:
6896 	  {
6897 	    int reg0 = true_regnum (operands[0]);
6898 	    int reg1 = true_regnum (operands[1]);
6899 	    *len=6;
6900 	    if (reg0 <= reg1)
6901 	      return ("mov %A0,%B1" CR_TAB
6902 		      "mov %B0,%C1" CR_TAB
6903 		      "mov %C0,%D1" CR_TAB
6904 		      "clr %D0"     CR_TAB
6905 		      "sbrc %C0,7"  CR_TAB
6906 		      "dec %D0");
6907 	    else
6908 	      return ("clr %D0"     CR_TAB
6909 		      "sbrc %D1,7"  CR_TAB
6910 		      "dec %D0"     CR_TAB
6911 		      "mov %C0,%D1" CR_TAB
6912 		      "mov %B0,%C1" CR_TAB
6913 		      "mov %A0,%B1");
6914 	  }
6915 
6916 	case 16:
6917 	  {
6918 	    int reg0 = true_regnum (operands[0]);
6919 	    int reg1 = true_regnum (operands[1]);
6920 
6921 	    if (reg0 == reg1 + 2)
6922 	      return *len = 4, ("clr %D0"     CR_TAB
6923 				"sbrc %B0,7"  CR_TAB
6924 				"com %D0"     CR_TAB
6925 				"mov %C0,%D0");
6926 	    if (AVR_HAVE_MOVW)
6927 	      return *len = 5, ("movw %A0,%C1" CR_TAB
6928 				"clr %D0"      CR_TAB
6929 				"sbrc %B0,7"   CR_TAB
6930 				"com %D0"      CR_TAB
6931 				"mov %C0,%D0");
6932 	    else
6933 	      return *len = 6, ("mov %B0,%D1" CR_TAB
6934 				"mov %A0,%C1" CR_TAB
6935 				"clr %D0"     CR_TAB
6936 				"sbrc %B0,7"  CR_TAB
6937 				"com %D0"     CR_TAB
6938 				"mov %C0,%D0");
6939 	  }
6940 
6941 	case 24:
6942 	  return *len = 6, ("mov %A0,%D1" CR_TAB
6943 			    "clr %D0"     CR_TAB
6944 			    "sbrc %A0,7"  CR_TAB
6945 			    "com %D0"     CR_TAB
6946 			    "mov %B0,%D0" CR_TAB
6947 			    "mov %C0,%D0");
6948 
6949 	default:
6950 	  if (INTVAL (operands[2]) < 32)
6951 	    break;
6952 
6953 	  /* fall through */
6954 
6955 	case 31:
6956 	  if (AVR_HAVE_MOVW)
6957 	    return *len = 4, ("lsl %D0"     CR_TAB
6958 			      "sbc %A0,%A0" CR_TAB
6959 			      "mov %B0,%A0" CR_TAB
6960 			      "movw %C0,%A0");
6961 	  else
6962 	    return *len = 5, ("lsl %D0"     CR_TAB
6963 			      "sbc %A0,%A0" CR_TAB
6964 			      "mov %B0,%A0" CR_TAB
6965 			      "mov %C0,%A0" CR_TAB
6966 			      "mov %D0,%A0");
6967 	}
6968       len = t;
6969     }
6970   out_shift_with_cnt ("asr %D0" CR_TAB
6971                       "ror %C0" CR_TAB
6972                       "ror %B0" CR_TAB
6973                       "ror %A0", insn, operands, len, 4);
6974   return "";
6975 }
6976 
6977 /* 8-bit logic shift right ((unsigned char)x >> i) */
6978 
6979 const char *
6980 lshrqi3_out (rtx_insn *insn, rtx operands[], int *len)
6981 {
6982   if (CONST_INT_P (operands[2]))
6983     {
6984       int k;
6985 
6986       if (!len)
6987 	len = &k;
6988 
6989       switch (INTVAL (operands[2]))
6990 	{
6991 	default:
6992 	  if (INTVAL (operands[2]) < 8)
6993 	    break;
6994 
6995 	  *len = 1;
6996 	  return "clr %0";
6997 
6998 	case 1:
6999 	  *len = 1;
7000 	  return "lsr %0";
7001 
7002 	case 2:
7003 	  *len = 2;
7004 	  return ("lsr %0" CR_TAB
7005 		  "lsr %0");
7006 	case 3:
7007 	  *len = 3;
7008 	  return ("lsr %0" CR_TAB
7009 		  "lsr %0" CR_TAB
7010 		  "lsr %0");
7011 
7012 	case 4:
7013 	  if (test_hard_reg_class (LD_REGS, operands[0]))
7014 	    {
7015 	      *len=2;
7016 	      return ("swap %0" CR_TAB
7017 		      "andi %0,0x0f");
7018 	    }
7019 	  *len = 4;
7020 	  return ("lsr %0" CR_TAB
7021 		  "lsr %0" CR_TAB
7022 		  "lsr %0" CR_TAB
7023 		  "lsr %0");
7024 
7025 	case 5:
7026 	  if (test_hard_reg_class (LD_REGS, operands[0]))
7027 	    {
7028 	      *len = 3;
7029 	      return ("swap %0" CR_TAB
7030 		      "lsr %0"  CR_TAB
7031 		      "andi %0,0x7");
7032 	    }
7033 	  *len = 5;
7034 	  return ("lsr %0" CR_TAB
7035 		  "lsr %0" CR_TAB
7036 		  "lsr %0" CR_TAB
7037 		  "lsr %0" CR_TAB
7038 		  "lsr %0");
7039 
7040 	case 6:
7041 	  if (test_hard_reg_class (LD_REGS, operands[0]))
7042 	    {
7043 	      *len = 4;
7044 	      return ("swap %0" CR_TAB
7045 		      "lsr %0"  CR_TAB
7046 		      "lsr %0"  CR_TAB
7047 		      "andi %0,0x3");
7048 	    }
7049 	  *len = 6;
7050 	  return ("lsr %0" CR_TAB
7051 		  "lsr %0" CR_TAB
7052 		  "lsr %0" CR_TAB
7053 		  "lsr %0" CR_TAB
7054 		  "lsr %0" CR_TAB
7055 		  "lsr %0");
7056 
7057 	case 7:
7058 	  *len = 3;
7059 	  return ("rol %0" CR_TAB
7060 		  "clr %0" CR_TAB
7061 		  "rol %0");
7062 	}
7063     }
7064   else if (CONSTANT_P (operands[2]))
7065     fatal_insn ("internal compiler error.  Incorrect shift:", insn);
7066 
7067   out_shift_with_cnt ("lsr %0",
7068                       insn, operands, len, 1);
7069   return "";
7070 }
7071 
7072 /* 16-bit logic shift right ((unsigned short)x >> i) */
7073 
7074 const char *
7075 lshrhi3_out (rtx_insn *insn, rtx operands[], int *len)
7076 {
7077   if (CONST_INT_P (operands[2]))
7078     {
7079       int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
7080       int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
7081       int k;
7082       int *t = len;
7083 
7084       if (!len)
7085 	len = &k;
7086 
7087       switch (INTVAL (operands[2]))
7088 	{
7089 	default:
7090 	  if (INTVAL (operands[2]) < 16)
7091 	    break;
7092 
7093 	  *len = 2;
7094 	  return ("clr %B0" CR_TAB
7095 		  "clr %A0");
7096 
7097 	case 4:
7098 	  if (optimize_size && scratch)
7099 	    break;  /* 5 */
7100 	  if (ldi_ok)
7101 	    {
7102 	      *len = 6;
7103 	      return ("swap %B0"      CR_TAB
7104 		      "swap %A0"      CR_TAB
7105 		      "andi %A0,0x0f" CR_TAB
7106 		      "eor %A0,%B0"   CR_TAB
7107 		      "andi %B0,0x0f" CR_TAB
7108 		      "eor %A0,%B0");
7109 	    }
7110 	  if (scratch)
7111 	    {
7112 	      *len = 7;
7113 	      return ("swap %B0"    CR_TAB
7114 		      "swap %A0"    CR_TAB
7115 		      "ldi %3,0x0f" CR_TAB
7116 		      "and %A0,%3"  CR_TAB
7117 		      "eor %A0,%B0" CR_TAB
7118 		      "and %B0,%3"  CR_TAB
7119 		      "eor %A0,%B0");
7120 	    }
7121 	  break;  /* optimize_size ? 6 : 8 */
7122 
7123 	case 5:
7124 	  if (optimize_size)
7125 	    break;  /* scratch ? 5 : 6 */
7126 	  if (ldi_ok)
7127 	    {
7128 	      *len = 8;
7129 	      return ("lsr %B0"       CR_TAB
7130 		      "ror %A0"       CR_TAB
7131 		      "swap %B0"      CR_TAB
7132 		      "swap %A0"      CR_TAB
7133 		      "andi %A0,0x0f" CR_TAB
7134 		      "eor %A0,%B0"   CR_TAB
7135 		      "andi %B0,0x0f" CR_TAB
7136 		      "eor %A0,%B0");
7137 	    }
7138 	  if (scratch)
7139 	    {
7140 	      *len = 9;
7141 	      return ("lsr %B0"     CR_TAB
7142 		      "ror %A0"     CR_TAB
7143 		      "swap %B0"    CR_TAB
7144 		      "swap %A0"    CR_TAB
7145 		      "ldi %3,0x0f" CR_TAB
7146 		      "and %A0,%3"  CR_TAB
7147 		      "eor %A0,%B0" CR_TAB
7148 		      "and %B0,%3"  CR_TAB
7149 		      "eor %A0,%B0");
7150 	    }
7151 	  break;  /* 10 */
7152 
7153 	case 6:
7154 	  if (optimize_size)
7155 	    break;  /* scratch ? 5 : 6 */
7156 	  *len = 9;
7157 	  return ("clr __tmp_reg__" CR_TAB
7158 		  "lsl %A0"         CR_TAB
7159 		  "rol %B0"         CR_TAB
7160 		  "rol __tmp_reg__" CR_TAB
7161 		  "lsl %A0"         CR_TAB
7162 		  "rol %B0"         CR_TAB
7163 		  "rol __tmp_reg__" CR_TAB
7164 		  "mov %A0,%B0"     CR_TAB
7165 		  "mov %B0,__tmp_reg__");
7166 
7167 	case 7:
7168 	  *len = 5;
7169 	  return ("lsl %A0"     CR_TAB
7170 		  "mov %A0,%B0" CR_TAB
7171 		  "rol %A0"     CR_TAB
7172 		  "sbc %B0,%B0" CR_TAB
7173 		  "neg %B0");
7174 
7175 	case 8:
7176 	  return *len = 2, ("mov %A0,%B1" CR_TAB
7177 			    "clr %B0");
7178 
7179 	case 9:
7180 	  *len = 3;
7181 	  return ("mov %A0,%B0" CR_TAB
7182 		  "clr %B0"     CR_TAB
7183 		  "lsr %A0");
7184 
7185 	case 10:
7186 	  *len = 4;
7187 	  return ("mov %A0,%B0" CR_TAB
7188 		  "clr %B0"     CR_TAB
7189 		  "lsr %A0"     CR_TAB
7190 		  "lsr %A0");
7191 
7192 	case 11:
7193 	  *len = 5;
7194 	  return ("mov %A0,%B0" CR_TAB
7195 		  "clr %B0"     CR_TAB
7196 		  "lsr %A0"     CR_TAB
7197 		  "lsr %A0"     CR_TAB
7198 		  "lsr %A0");
7199 
7200 	case 12:
7201 	  if (ldi_ok)
7202 	    {
7203 	      *len = 4;
7204 	      return ("mov %A0,%B0" CR_TAB
7205 		      "clr %B0"     CR_TAB
7206 		      "swap %A0"    CR_TAB
7207 		      "andi %A0,0x0f");
7208 	    }
7209 	  if (scratch)
7210 	    {
7211 	      *len = 5;
7212 	      return ("mov %A0,%B0" CR_TAB
7213 		      "clr %B0"     CR_TAB
7214 		      "swap %A0"    CR_TAB
7215 		      "ldi %3,0x0f" CR_TAB
7216 		      "and %A0,%3");
7217 	    }
7218 	  *len = 6;
7219 	  return ("mov %A0,%B0" CR_TAB
7220 		  "clr %B0"     CR_TAB
7221 		  "lsr %A0"     CR_TAB
7222 		  "lsr %A0"     CR_TAB
7223 		  "lsr %A0"     CR_TAB
7224 		  "lsr %A0");
7225 
7226 	case 13:
7227 	  if (ldi_ok)
7228 	    {
7229 	      *len = 5;
7230 	      return ("mov %A0,%B0" CR_TAB
7231 		      "clr %B0"     CR_TAB
7232 		      "swap %A0"    CR_TAB
7233 		      "lsr %A0"     CR_TAB
7234 		      "andi %A0,0x07");
7235 	    }
7236 	  if (AVR_HAVE_MUL && scratch)
7237 	    {
7238 	      *len = 5;
7239 	      return ("ldi %3,0x08" CR_TAB
7240 		      "mul %B0,%3"  CR_TAB
7241 		      "mov %A0,r1"  CR_TAB
7242 		      "clr %B0"     CR_TAB
7243 		      "clr __zero_reg__");
7244 	    }
7245 	  if (optimize_size && scratch)
7246 	    break;  /* 5 */
7247 	  if (scratch)
7248 	    {
7249 	      *len = 6;
7250 	      return ("mov %A0,%B0" CR_TAB
7251 		      "clr %B0"     CR_TAB
7252 		      "swap %A0"    CR_TAB
7253 		      "lsr %A0"     CR_TAB
7254 		      "ldi %3,0x07" CR_TAB
7255 		      "and %A0,%3");
7256 	    }
7257 	  if (AVR_HAVE_MUL)
7258 	    {
7259 	      *len = 6;
7260 	      return ("set"        CR_TAB
7261 		      "bld r1,3"   CR_TAB
7262 		      "mul %B0,r1" CR_TAB
7263 		      "mov %A0,r1" CR_TAB
7264 		      "clr %B0"    CR_TAB
7265 		      "clr __zero_reg__");
7266 	    }
7267 	  *len = 7;
7268 	  return ("mov %A0,%B0" CR_TAB
7269 		  "clr %B0"     CR_TAB
7270 		  "lsr %A0"     CR_TAB
7271 		  "lsr %A0"     CR_TAB
7272 		  "lsr %A0"     CR_TAB
7273 		  "lsr %A0"     CR_TAB
7274 		  "lsr %A0");
7275 
7276 	case 14:
7277 	  if (AVR_HAVE_MUL && ldi_ok)
7278 	    {
7279 	      *len = 5;
7280 	      return ("ldi %A0,0x04" CR_TAB
7281 		      "mul %B0,%A0"  CR_TAB
7282 		      "mov %A0,r1"   CR_TAB
7283 		      "clr %B0"      CR_TAB
7284 		      "clr __zero_reg__");
7285 	    }
7286 	  if (AVR_HAVE_MUL && scratch)
7287 	    {
7288 	      *len = 5;
7289 	      return ("ldi %3,0x04" CR_TAB
7290 		      "mul %B0,%3"  CR_TAB
7291 		      "mov %A0,r1"  CR_TAB
7292 		      "clr %B0"     CR_TAB
7293 		      "clr __zero_reg__");
7294 	    }
7295 	  if (optimize_size && ldi_ok)
7296 	    {
7297 	      *len = 5;
7298 	      return ("mov %A0,%B0" CR_TAB
7299 		      "ldi %B0,6" "\n1:\t"
7300 		      "lsr %A0"     CR_TAB
7301 		      "dec %B0"     CR_TAB
7302 		      "brne 1b");
7303 	    }
7304 	  if (optimize_size && scratch)
7305 	    break;  /* 5 */
7306 	  *len = 6;
7307 	  return ("clr %A0" CR_TAB
7308 		  "lsl %B0" CR_TAB
7309 		  "rol %A0" CR_TAB
7310 		  "lsl %B0" CR_TAB
7311 		  "rol %A0" CR_TAB
7312 		  "clr %B0");
7313 
7314 	case 15:
7315 	  *len = 4;
7316 	  return ("clr %A0" CR_TAB
7317 		  "lsl %B0" CR_TAB
7318 		  "rol %A0" CR_TAB
7319 		  "clr %B0");
7320 	}
7321       len = t;
7322     }
7323   out_shift_with_cnt ("lsr %B0" CR_TAB
7324                       "ror %A0", insn, operands, len, 2);
7325   return "";
7326 }
7327 
7328 
7329 /* 24-bit logic shift right */
7330 
7331 const char*
7332 avr_out_lshrpsi3 (rtx_insn *insn, rtx *op, int *plen)
7333 {
7334   int dest = REGNO (op[0]);
7335   int src = REGNO (op[1]);
7336 
7337   if (CONST_INT_P (op[2]))
7338     {
7339       if (plen)
7340         *plen = 0;
7341 
7342       switch (INTVAL (op[2]))
7343         {
7344         case 8:
7345           if (dest <= src)
7346             return avr_asm_len ("mov %A0,%B1" CR_TAB
7347                                 "mov %B0,%C1" CR_TAB
7348                                 "clr %C0", op, plen, 3);
7349           else
7350             return avr_asm_len ("clr %C0"     CR_TAB
7351                                 "mov %B0,%C1" CR_TAB
7352                                 "mov %A0,%B1", op, plen, 3);
7353 
7354         case 16:
7355           if (dest != src + 2)
7356             avr_asm_len ("mov %A0,%C1", op, plen, 1);
7357 
7358           return avr_asm_len ("clr %B0"  CR_TAB
7359                               "clr %C0", op, plen, 2);
7360 
7361         default:
7362           if (INTVAL (op[2]) < 24)
7363             break;
7364 
7365           /* fall through */
7366 
7367         case 23:
7368           return avr_asm_len ("clr %A0"    CR_TAB
7369                               "sbrc %C0,7" CR_TAB
7370                               "inc %A0"    CR_TAB
7371                               "clr %B0"    CR_TAB
7372                               "clr %C0", op, plen, 5);
7373         } /* switch */
7374     }
7375 
7376   out_shift_with_cnt ("lsr %C0" CR_TAB
7377                       "ror %B0" CR_TAB
7378                       "ror %A0", insn, op, plen, 3);
7379   return "";
7380 }
7381 
7382 
7383 /* 32-bit logic shift right ((unsigned int)x >> i) */
7384 
7385 const char *
7386 lshrsi3_out (rtx_insn *insn, rtx operands[], int *len)
7387 {
7388   if (CONST_INT_P (operands[2]))
7389     {
7390       int k;
7391       int *t = len;
7392 
7393       if (!len)
7394 	len = &k;
7395 
7396       switch (INTVAL (operands[2]))
7397 	{
7398 	default:
7399 	  if (INTVAL (operands[2]) < 32)
7400 	    break;
7401 
7402 	  if (AVR_HAVE_MOVW)
7403 	    return *len = 3, ("clr %D0" CR_TAB
7404 			      "clr %C0" CR_TAB
7405 			      "movw %A0,%C0");
7406 	  *len = 4;
7407 	  return ("clr %D0" CR_TAB
7408 		  "clr %C0" CR_TAB
7409 		  "clr %B0" CR_TAB
7410 		  "clr %A0");
7411 
7412 	case 8:
7413 	  {
7414 	    int reg0 = true_regnum (operands[0]);
7415 	    int reg1 = true_regnum (operands[1]);
7416 	    *len = 4;
7417 	    if (reg0 <= reg1)
7418 	      return ("mov %A0,%B1" CR_TAB
7419 		      "mov %B0,%C1" CR_TAB
7420 		      "mov %C0,%D1" CR_TAB
7421 		      "clr %D0");
7422 	    else
7423 	      return ("clr %D0"     CR_TAB
7424 		      "mov %C0,%D1" CR_TAB
7425 		      "mov %B0,%C1" CR_TAB
7426 		      "mov %A0,%B1");
7427 	  }
7428 
7429 	case 16:
7430 	  {
7431 	    int reg0 = true_regnum (operands[0]);
7432 	    int reg1 = true_regnum (operands[1]);
7433 
7434 	    if (reg0 == reg1 + 2)
7435 	      return *len = 2, ("clr %C0"     CR_TAB
7436 				"clr %D0");
7437 	    if (AVR_HAVE_MOVW)
7438 	      return *len = 3, ("movw %A0,%C1" CR_TAB
7439 				"clr %C0"      CR_TAB
7440 				"clr %D0");
7441 	    else
7442 	      return *len = 4, ("mov %B0,%D1" CR_TAB
7443 				"mov %A0,%C1" CR_TAB
7444 				"clr %C0"     CR_TAB
7445 				"clr %D0");
7446 	  }
7447 
7448 	case 24:
7449 	  return *len = 4, ("mov %A0,%D1" CR_TAB
7450 			    "clr %B0"     CR_TAB
7451 			    "clr %C0"     CR_TAB
7452 			    "clr %D0");
7453 
7454 	case 31:
7455 	  *len = 6;
7456 	  return ("clr %A0"    CR_TAB
7457 		  "sbrc %D0,7" CR_TAB
7458 		  "inc %A0"    CR_TAB
7459 		  "clr %B0"    CR_TAB
7460 		  "clr %C0"    CR_TAB
7461 		  "clr %D0");
7462 	}
7463       len = t;
7464     }
7465   out_shift_with_cnt ("lsr %D0" CR_TAB
7466                       "ror %C0" CR_TAB
7467                       "ror %B0" CR_TAB
7468                       "ror %A0", insn, operands, len, 4);
7469   return "";
7470 }
7471 
7472 
7473 /* Output addition of register XOP[0] and compile time constant XOP[2].
7474    CODE == PLUS:  perform addition by using ADD instructions or
7475    CODE == MINUS: perform addition by using SUB instructions:
7476 
7477       XOP[0] = XOP[0] + XOP[2]
7478 
7479    Or perform addition/subtraction with register XOP[2] depending on CODE:
7480 
7481       XOP[0] = XOP[0] +/- XOP[2]
7482 
7483    If PLEN == NULL, print assembler instructions to perform the operation;
7484    otherwise, set *PLEN to the length of the instruction sequence (in words)
7485    printed with PLEN == NULL.  XOP[3] is an 8-bit scratch register or NULL_RTX.
7486    Set *PCC to effect on cc0 according to respective CC_* insn attribute.
7487 
7488    CODE_SAT == UNKNOWN: Perform ordinary, non-saturating operation.
7489    CODE_SAT != UNKNOWN: Perform operation and saturate according to CODE_SAT.
7490    If  CODE_SAT != UNKNOWN  then SIGN contains the sign of the summand resp.
7491    the subtrahend in the original insn, provided it is a compile time constant.
7492    In all other cases, SIGN is 0.
7493 
7494    If OUT_LABEL is true, print the final 0: label which is needed for
7495    saturated addition / subtraction.  The only case where OUT_LABEL = false
7496    is useful is for saturated addition / subtraction performed during
7497    fixed-point rounding, cf. `avr_out_round'.  */
7498 
7499 static void
7500 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc,
7501                 enum rtx_code code_sat, int sign, bool out_label)
7502 {
7503   /* MODE of the operation.  */
7504   machine_mode mode = GET_MODE (xop[0]);
7505 
7506   /* INT_MODE of the same size.  */
7507   machine_mode imode = int_mode_for_mode (mode);
7508 
7509   /* Number of bytes to operate on.  */
7510   int n_bytes = GET_MODE_SIZE (mode);
7511 
7512   /* Value (0..0xff) held in clobber register op[3] or -1 if unknown.  */
7513   int clobber_val = -1;
7514 
7515   /* op[0]: 8-bit destination register
7516      op[1]: 8-bit const int
7517      op[2]: 8-bit scratch register */
7518   rtx op[3];
7519 
7520   /* Started the operation?  Before starting the operation we may skip
7521      adding 0.  This is no more true after the operation started because
7522      carry must be taken into account.  */
7523   bool started = false;
7524 
7525   /* Value to add.  There are two ways to add VAL: R += VAL and R -= -VAL.  */
7526   rtx xval = xop[2];
7527 
7528   /* Output a BRVC instruction.  Only needed with saturation.  */
7529   bool out_brvc = true;
7530 
7531   if (plen)
7532     *plen = 0;
7533 
7534   if (REG_P (xop[2]))
7535     {
7536       *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_CLOBBER;
7537 
7538       for (int i = 0; i < n_bytes; i++)
7539         {
7540           /* We operate byte-wise on the destination.  */
7541           op[0] = simplify_gen_subreg (QImode, xop[0], mode, i);
7542           op[1] = simplify_gen_subreg (QImode, xop[2], mode, i);
7543 
7544           if (i == 0)
7545             avr_asm_len (code == PLUS ? "add %0,%1" : "sub %0,%1",
7546                          op, plen, 1);
7547           else
7548             avr_asm_len (code == PLUS ? "adc %0,%1" : "sbc %0,%1",
7549                          op, plen, 1);
7550         }
7551 
7552       if (reg_overlap_mentioned_p (xop[0], xop[2]))
7553         {
7554           gcc_assert (REGNO (xop[0]) == REGNO (xop[2]));
7555 
7556           if (MINUS == code)
7557             return;
7558         }
7559 
7560       goto saturate;
7561     }
7562 
7563   /* Except in the case of ADIW with 16-bit register (see below)
7564      addition does not set cc0 in a usable way.  */
7565 
7566   *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
7567 
7568   if (CONST_FIXED_P (xval))
7569     xval = avr_to_int_mode (xval);
7570 
7571   /* Adding/Subtracting zero is a no-op.  */
7572 
7573   if (xval == const0_rtx)
7574     {
7575       *pcc = CC_NONE;
7576       return;
7577     }
7578 
7579   if (MINUS == code)
7580     xval = simplify_unary_operation (NEG, imode, xval, imode);
7581 
7582   op[2] = xop[3];
7583 
7584   if (SS_PLUS == code_sat && MINUS == code
7585       && sign < 0
7586       && 0x80 == (INTVAL (simplify_gen_subreg (QImode, xval, imode, n_bytes-1))
7587                   & GET_MODE_MASK (QImode)))
7588     {
7589       /* We compute x + 0x80 by means of SUB instructions.  We negated the
7590          constant subtrahend above and are left with  x - (-128)  so that we
7591          need something like SUBI r,128 which does not exist because SUBI sets
7592          V according to the sign of the subtrahend.  Notice the only case
7593          where this must be done is when NEG overflowed in case [2s] because
7594          the V computation needs the right sign of the subtrahend.  */
7595 
7596       rtx msb = simplify_gen_subreg (QImode, xop[0], mode, n_bytes - 1);
7597 
7598       avr_asm_len ("subi %0,128" CR_TAB
7599                    "brmi 0f", &msb, plen, 2);
7600       out_brvc = false;
7601 
7602       goto saturate;
7603     }
7604 
7605   for (int i = 0; i < n_bytes; i++)
7606     {
7607       /* We operate byte-wise on the destination.  */
7608       rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
7609       rtx xval8 = simplify_gen_subreg (QImode, xval, imode, i);
7610 
7611       /* 8-bit value to operate with this byte. */
7612       unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
7613 
7614       /* Registers R16..R31 can operate with immediate.  */
7615       bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
7616 
7617       op[0] = reg8;
7618       op[1] = gen_int_mode (val8, QImode);
7619 
7620       /* To get usable cc0 no low-bytes must have been skipped.  */
7621 
7622       if (i && !started)
7623         *pcc = CC_CLOBBER;
7624 
7625       if (!started
7626           && i % 2 == 0
7627           && i + 2 <= n_bytes
7628           && test_hard_reg_class (ADDW_REGS, reg8))
7629         {
7630           rtx xval16 = simplify_gen_subreg (HImode, xval, imode, i);
7631           unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
7632 
7633           /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
7634              i.e. operate word-wise.  */
7635 
7636           if (val16 < 64)
7637             {
7638               if (val16 != 0)
7639                 {
7640                   started = true;
7641                   avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
7642                                op, plen, 1);
7643 
7644                   if (n_bytes == 2 && PLUS == code)
7645                     *pcc = CC_SET_CZN;
7646                 }
7647 
7648               i++;
7649               continue;
7650             }
7651         }
7652 
7653       if (val8 == 0)
7654         {
7655           if (started)
7656             avr_asm_len (code == PLUS
7657                          ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
7658                          op, plen, 1);
7659           continue;
7660         }
7661       else if ((val8 == 1 || val8 == 0xff)
7662                && UNKNOWN == code_sat
7663                && !started
7664                && i == n_bytes - 1)
7665         {
7666           avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
7667                        op, plen, 1);
7668           *pcc = CC_CLOBBER;
7669           break;
7670         }
7671 
7672       switch (code)
7673         {
7674         case PLUS:
7675 
7676           gcc_assert (plen != NULL || (op[2] && REG_P (op[2])));
7677 
7678           if (plen != NULL && UNKNOWN != code_sat)
7679             {
7680               /* This belongs to the x + 0x80 corner case.  The code with
7681                  ADD instruction is not smaller, thus make this case
7682                  expensive so that the caller won't pick it.  */
7683 
7684               *plen += 10;
7685               break;
7686             }
7687 
7688           if (clobber_val != (int) val8)
7689             avr_asm_len ("ldi %2,%1", op, plen, 1);
7690           clobber_val = (int) val8;
7691 
7692           avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
7693 
7694           break; /* PLUS */
7695 
7696         case MINUS:
7697 
7698           if (ld_reg_p)
7699             avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
7700           else
7701             {
7702               gcc_assert (plen != NULL || REG_P (op[2]));
7703 
7704               if (clobber_val != (int) val8)
7705                 avr_asm_len ("ldi %2,%1", op, plen, 1);
7706               clobber_val = (int) val8;
7707 
7708               avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
7709             }
7710 
7711           break; /* MINUS */
7712 
7713         default:
7714           /* Unknown code */
7715           gcc_unreachable();
7716         }
7717 
7718       started = true;
7719 
7720     } /* for all sub-bytes */
7721 
7722  saturate:
7723 
7724   if (UNKNOWN == code_sat)
7725     return;
7726 
7727   *pcc = (int) CC_CLOBBER;
7728 
7729   /* Vanilla addition/subtraction is done.  We are left with saturation.
7730 
7731      We have to compute  A = A <op> B  where  A  is a register and
7732      B is a register or a non-zero compile time constant CONST.
7733      A is register class "r" if unsigned && B is REG.  Otherwise, A is in "d".
7734      B stands for the original operand $2 in INSN.  In the case of B = CONST,
7735      SIGN in { -1, 1 } is the sign of B.  Otherwise, SIGN is 0.
7736 
7737      CODE is the instruction flavor we use in the asm sequence to perform <op>.
7738 
7739 
7740      unsigned
7741      operation        |  code |  sat if  |    b is      | sat value |  case
7742      -----------------+-------+----------+--------------+-----------+-------
7743      +  as  a + b     |  add  |  C == 1  |  const, reg  | u+ = 0xff |  [1u]
7744      +  as  a - (-b)  |  sub  |  C == 0  |  const       | u+ = 0xff |  [2u]
7745      -  as  a - b     |  sub  |  C == 1  |  const, reg  | u- = 0    |  [3u]
7746      -  as  a + (-b)  |  add  |  C == 0  |  const       | u- = 0    |  [4u]
7747 
7748 
7749      signed
7750      operation        |  code |  sat if  |    b is      | sat value |  case
7751      -----------------+-------+----------+--------------+-----------+-------
7752      +  as  a + b     |  add  |  V == 1  |  const, reg  | s+        |  [1s]
7753      +  as  a - (-b)  |  sub  |  V == 1  |  const       | s+        |  [2s]
7754      -  as  a - b     |  sub  |  V == 1  |  const, reg  | s-        |  [3s]
7755      -  as  a + (-b)  |  add  |  V == 1  |  const       | s-        |  [4s]
7756 
7757      s+  =  b < 0  ?  -0x80 :  0x7f
7758      s-  =  b < 0  ?   0x7f : -0x80
7759 
7760      The cases a - b actually perform  a - (-(-b))  if B is CONST.
7761   */
7762 
7763   op[0] = simplify_gen_subreg (QImode, xop[0], mode, n_bytes-1);
7764   op[1] = n_bytes > 1
7765     ? simplify_gen_subreg (QImode, xop[0], mode, n_bytes-2)
7766     : NULL_RTX;
7767 
7768   bool need_copy = true;
7769   int len_call = 1 + AVR_HAVE_JMP_CALL;
7770 
7771   switch (code_sat)
7772     {
7773     default:
7774       gcc_unreachable();
7775 
7776     case SS_PLUS:
7777     case SS_MINUS:
7778 
7779       if (out_brvc)
7780         avr_asm_len ("brvc 0f", op, plen, 1);
7781 
7782       if (reg_overlap_mentioned_p (xop[0], xop[2]))
7783         {
7784           /* [1s,reg] */
7785 
7786           if (n_bytes == 1)
7787             avr_asm_len ("ldi %0,0x7f" CR_TAB
7788                          "adc %0,__zero_reg__", op, plen, 2);
7789           else
7790             avr_asm_len ("ldi %0,0x7f" CR_TAB
7791                          "ldi %1,0xff" CR_TAB
7792                          "adc %1,__zero_reg__" CR_TAB
7793                          "adc %0,__zero_reg__", op, plen, 4);
7794         }
7795       else if (sign == 0 && PLUS == code)
7796         {
7797           /* [1s,reg] */
7798 
7799           op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
7800 
7801           if (n_bytes == 1)
7802             avr_asm_len ("ldi %0,0x80" CR_TAB
7803                          "sbrs %2,7"   CR_TAB
7804                          "dec %0", op, plen, 3);
7805           else
7806             avr_asm_len ("ldi %0,0x80" CR_TAB
7807                          "cp %2,%0"    CR_TAB
7808                          "sbc %1,%1"   CR_TAB
7809                          "sbci %0,0", op, plen, 4);
7810         }
7811       else if (sign == 0 && MINUS == code)
7812         {
7813           /* [3s,reg] */
7814 
7815           op[2] = simplify_gen_subreg (QImode, xop[2], mode, n_bytes-1);
7816 
7817           if (n_bytes == 1)
7818             avr_asm_len ("ldi %0,0x7f" CR_TAB
7819                          "sbrs %2,7"   CR_TAB
7820                          "inc %0", op, plen, 3);
7821           else
7822             avr_asm_len ("ldi %0,0x7f" CR_TAB
7823                          "cp %0,%2"    CR_TAB
7824                          "sbc %1,%1"   CR_TAB
7825                          "sbci %0,-1", op, plen, 4);
7826         }
7827       else if ((sign < 0) ^ (SS_MINUS == code_sat))
7828         {
7829           /* [1s,const,B < 0] [2s,B < 0] */
7830           /* [3s,const,B > 0] [4s,B > 0] */
7831 
7832           if (n_bytes == 8)
7833             {
7834               avr_asm_len ("%~call __clr_8", op, plen, len_call);
7835               need_copy = false;
7836             }
7837 
7838           avr_asm_len ("ldi %0,0x80", op, plen, 1);
7839           if (n_bytes > 1 && need_copy)
7840             avr_asm_len ("clr %1", op, plen, 1);
7841         }
7842       else if ((sign > 0) ^ (SS_MINUS == code_sat))
7843         {
7844           /* [1s,const,B > 0] [2s,B > 0] */
7845           /* [3s,const,B < 0] [4s,B < 0] */
7846 
7847           if (n_bytes == 8)
7848             {
7849               avr_asm_len ("sec" CR_TAB
7850                            "%~call __sbc_8", op, plen, 1 + len_call);
7851               need_copy = false;
7852             }
7853 
7854           avr_asm_len ("ldi %0,0x7f", op, plen, 1);
7855           if (n_bytes > 1 && need_copy)
7856             avr_asm_len ("ldi %1,0xff", op, plen, 1);
7857         }
7858       else
7859         gcc_unreachable();
7860 
7861       break;
7862 
7863     case US_PLUS:
7864       /* [1u] : [2u] */
7865 
7866       avr_asm_len (PLUS == code ? "brcc 0f" : "brcs 0f", op, plen, 1);
7867 
7868       if (n_bytes == 8)
7869         {
7870           if (MINUS == code)
7871             avr_asm_len ("sec", op, plen, 1);
7872           avr_asm_len ("%~call __sbc_8", op, plen, len_call);
7873 
7874           need_copy = false;
7875         }
7876       else
7877         {
7878           if (MINUS == code && !test_hard_reg_class (LD_REGS, op[0]))
7879             avr_asm_len ("sec" CR_TAB
7880                          "sbc %0,%0", op, plen, 2);
7881           else
7882             avr_asm_len (PLUS == code ? "sbc %0,%0" : "ldi %0,0xff",
7883                          op, plen, 1);
7884         }
7885       break; /* US_PLUS */
7886 
7887     case US_MINUS:
7888       /* [4u] : [3u] */
7889 
7890       avr_asm_len (PLUS == code ? "brcs 0f" : "brcc 0f", op, plen, 1);
7891 
7892       if (n_bytes == 8)
7893         {
7894           avr_asm_len ("%~call __clr_8", op, plen, len_call);
7895           need_copy = false;
7896         }
7897       else
7898         avr_asm_len ("clr %0", op, plen, 1);
7899 
7900       break;
7901     }
7902 
7903   /* We set the MSB in the unsigned case and the 2 MSBs in the signed case.
7904      Now copy the right value to the LSBs.  */
7905 
7906   if (need_copy && n_bytes > 1)
7907     {
7908       if (US_MINUS == code_sat || US_PLUS == code_sat)
7909         {
7910           avr_asm_len ("mov %1,%0", op, plen, 1);
7911 
7912           if (n_bytes > 2)
7913             {
7914               op[0] = xop[0];
7915               if (AVR_HAVE_MOVW)
7916                 avr_asm_len ("movw %0,%1", op, plen, 1);
7917               else
7918                 avr_asm_len ("mov %A0,%1" CR_TAB
7919                              "mov %B0,%1", op, plen, 2);
7920             }
7921         }
7922       else if (n_bytes > 2)
7923         {
7924           op[0] = xop[0];
7925           avr_asm_len ("mov %A0,%1" CR_TAB
7926                        "mov %B0,%1", op, plen, 2);
7927         }
7928     }
7929 
7930   if (need_copy && n_bytes == 8)
7931     {
7932       if (AVR_HAVE_MOVW)
7933         avr_asm_len ("movw %r0+2,%0" CR_TAB
7934                      "movw %r0+4,%0", xop, plen, 2);
7935       else
7936         avr_asm_len ("mov %r0+2,%0" CR_TAB
7937                      "mov %r0+3,%0" CR_TAB
7938                      "mov %r0+4,%0" CR_TAB
7939                      "mov %r0+5,%0", xop, plen, 4);
7940     }
7941 
7942   if (out_label)
7943     avr_asm_len ("0:", op, plen, 0);
7944 }
7945 
7946 
7947 /* Output addition/subtraction of register XOP[0] and a constant XOP[2] that
7948    is ont a compile-time constant:
7949 
7950       XOP[0] = XOP[0] +/- XOP[2]
7951 
7952    This is a helper for the function below.  The only insns that need this
7953    are additions/subtraction for pointer modes, i.e. HImode and PSImode.  */
7954 
7955 static const char*
7956 avr_out_plus_symbol (rtx *xop, enum rtx_code code, int *plen, int *pcc)
7957 {
7958   machine_mode mode = GET_MODE (xop[0]);
7959 
7960   /* Only pointer modes want to add symbols.  */
7961 
7962   gcc_assert (mode == HImode || mode == PSImode);
7963 
7964   *pcc = MINUS == code ? (int) CC_SET_CZN : (int) CC_SET_N;
7965 
7966   avr_asm_len (PLUS == code
7967                ? "subi %A0,lo8(-(%2))" CR_TAB "sbci %B0,hi8(-(%2))"
7968                : "subi %A0,lo8(%2)"    CR_TAB "sbci %B0,hi8(%2)",
7969                xop, plen, -2);
7970 
7971   if (PSImode == mode)
7972     avr_asm_len (PLUS == code
7973                  ? "sbci %C0,hlo8(-(%2))"
7974                  : "sbci %C0,hlo8(%2)", xop, plen, 1);
7975   return "";
7976 }
7977 
7978 
7979 /* Prepare operands of addition/subtraction to be used with avr_out_plus_1.
7980 
7981    INSN is a single_set insn or an insn pattern with a binary operation as
7982    SET_SRC that is one of: PLUS, SS_PLUS, US_PLUS, MINUS, SS_MINUS, US_MINUS.
7983 
7984    XOP are the operands of INSN.  In the case of 64-bit operations with
7985    constant XOP[] has just one element:  The summand/subtrahend in XOP[0].
7986    The non-saturating insns up to 32 bits may or may not supply a "d" class
7987    scratch as XOP[3].
7988 
7989    If PLEN == NULL output the instructions.
7990    If PLEN != NULL set *PLEN to the length of the sequence in words.
7991 
7992    PCC is a pointer to store the instructions' effect on cc0.
7993    PCC may be NULL.
7994 
7995    PLEN and PCC default to NULL.
7996 
7997    OUT_LABEL defaults to TRUE.  For a description, see AVR_OUT_PLUS_1.
7998 
7999    Return ""  */
8000 
8001 const char*
8002 avr_out_plus (rtx insn, rtx *xop, int *plen, int *pcc, bool out_label)
8003 {
8004   int cc_plus, cc_minus, cc_dummy;
8005   int len_plus, len_minus;
8006   rtx op[4];
8007   rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
8008   rtx xdest = SET_DEST (xpattern);
8009   machine_mode mode = GET_MODE (xdest);
8010   machine_mode imode = int_mode_for_mode (mode);
8011   int n_bytes = GET_MODE_SIZE (mode);
8012   enum rtx_code code_sat = GET_CODE (SET_SRC (xpattern));
8013   enum rtx_code code
8014     = (PLUS == code_sat || SS_PLUS == code_sat || US_PLUS == code_sat
8015        ? PLUS : MINUS);
8016 
8017   if (!pcc)
8018     pcc = &cc_dummy;
8019 
8020   /* PLUS and MINUS don't saturate:  Use modular wrap-around.  */
8021 
8022   if (PLUS == code_sat || MINUS == code_sat)
8023     code_sat = UNKNOWN;
8024 
8025   if (n_bytes <= 4 && REG_P (xop[2]))
8026     {
8027       avr_out_plus_1 (xop, plen, code, pcc, code_sat, 0, out_label);
8028       return "";
8029     }
8030 
8031   if (8 == n_bytes)
8032     {
8033       op[0] = gen_rtx_REG (DImode, ACC_A);
8034       op[1] = gen_rtx_REG (DImode, ACC_A);
8035       op[2] = avr_to_int_mode (xop[0]);
8036     }
8037   else
8038     {
8039       if (!REG_P (xop[2])
8040           && !CONST_INT_P (xop[2])
8041           && !CONST_FIXED_P (xop[2]))
8042         {
8043           return avr_out_plus_symbol (xop, code, plen, pcc);
8044         }
8045 
8046       op[0] = avr_to_int_mode (xop[0]);
8047       op[1] = avr_to_int_mode (xop[1]);
8048       op[2] = avr_to_int_mode (xop[2]);
8049     }
8050 
8051   /* Saturations and 64-bit operations don't have a clobber operand.
8052      For the other cases, the caller will provide a proper XOP[3].  */
8053 
8054   xpattern = INSN_P (insn) ? PATTERN (insn) : insn;
8055   op[3] = PARALLEL == GET_CODE (xpattern) ? xop[3] : NULL_RTX;
8056 
8057   /* Saturation will need the sign of the original operand.  */
8058 
8059   rtx xmsb = simplify_gen_subreg (QImode, op[2], imode, n_bytes-1);
8060   int sign = INTVAL (xmsb) < 0 ? -1 : 1;
8061 
8062   /* If we subtract and the subtrahend is a constant, then negate it
8063      so that avr_out_plus_1 can be used.  */
8064 
8065   if (MINUS == code)
8066     op[2] = simplify_unary_operation (NEG, imode, op[2], imode);
8067 
8068   /* Work out the shortest sequence.  */
8069 
8070   avr_out_plus_1 (op, &len_minus, MINUS, &cc_minus, code_sat, sign, out_label);
8071   avr_out_plus_1 (op, &len_plus, PLUS, &cc_plus, code_sat, sign, out_label);
8072 
8073   if (plen)
8074     {
8075       *plen = (len_minus <= len_plus) ? len_minus : len_plus;
8076       *pcc  = (len_minus <= len_plus) ? cc_minus : cc_plus;
8077     }
8078   else if (len_minus <= len_plus)
8079     avr_out_plus_1 (op, NULL, MINUS, pcc, code_sat, sign, out_label);
8080   else
8081     avr_out_plus_1 (op, NULL, PLUS, pcc, code_sat, sign, out_label);
8082 
8083   return "";
8084 }
8085 
8086 
8087 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
8088    time constant XOP[2]:
8089 
8090       XOP[0] = XOP[0] <op> XOP[2]
8091 
8092    and return "".  If PLEN == NULL, print assembler instructions to perform the
8093    operation; otherwise, set *PLEN to the length of the instruction sequence
8094    (in words) printed with PLEN == NULL.  XOP[3] is either an 8-bit clobber
8095    register or SCRATCH if no clobber register is needed for the operation.
8096    INSN is an INSN_P or a pattern of an insn.  */
8097 
8098 const char*
8099 avr_out_bitop (rtx insn, rtx *xop, int *plen)
8100 {
8101   /* CODE and MODE of the operation.  */
8102   rtx xpattern = INSN_P (insn) ? single_set (as_a <rtx_insn *> (insn)) : insn;
8103   enum rtx_code code = GET_CODE (SET_SRC (xpattern));
8104   machine_mode mode = GET_MODE (xop[0]);
8105 
8106   /* Number of bytes to operate on.  */
8107   int n_bytes = GET_MODE_SIZE (mode);
8108 
8109   /* Value of T-flag (0 or 1) or -1 if unknow.  */
8110   int set_t = -1;
8111 
8112   /* Value (0..0xff) held in clobber register op[3] or -1 if unknown.  */
8113   int clobber_val = -1;
8114 
8115   /* op[0]: 8-bit destination register
8116      op[1]: 8-bit const int
8117      op[2]: 8-bit clobber register, SCRATCH or NULL_RTX.
8118      op[3]: 8-bit register containing 0xff or NULL_RTX  */
8119   rtx op[4];
8120 
8121   op[2] = QImode == mode ? NULL_RTX : xop[3];
8122   op[3] = NULL_RTX;
8123 
8124   if (plen)
8125     *plen = 0;
8126 
8127   for (int i = 0; i < n_bytes; i++)
8128     {
8129       /* We operate byte-wise on the destination.  */
8130       rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
8131       rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
8132 
8133       /* 8-bit value to operate with this byte. */
8134       unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
8135 
8136       /* Number of bits set in the current byte of the constant.  */
8137       int pop8 = popcount_hwi (val8);
8138 
8139       /* Registers R16..R31 can operate with immediate.  */
8140       bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
8141 
8142       op[0] = reg8;
8143       op[1] = GEN_INT (val8);
8144 
8145       switch (code)
8146         {
8147         case IOR:
8148 
8149           if (0 == pop8)
8150             continue;
8151           else if (ld_reg_p)
8152             avr_asm_len ("ori %0,%1", op, plen, 1);
8153           else if (1 == pop8)
8154             {
8155               if (set_t != 1)
8156                 avr_asm_len ("set", op, plen, 1);
8157               set_t = 1;
8158 
8159               op[1] = GEN_INT (exact_log2 (val8));
8160               avr_asm_len ("bld %0,%1", op, plen, 1);
8161             }
8162           else if (8 == pop8)
8163             {
8164               if (op[3] != NULL_RTX)
8165                 avr_asm_len ("mov %0,%3", op, plen, 1);
8166               else
8167                 avr_asm_len ("clr %0" CR_TAB
8168                              "dec %0", op, plen, 2);
8169 
8170               op[3] = op[0];
8171             }
8172           else
8173             {
8174               if (clobber_val != (int) val8)
8175                 avr_asm_len ("ldi %2,%1", op, plen, 1);
8176               clobber_val = (int) val8;
8177 
8178               avr_asm_len ("or %0,%2", op, plen, 1);
8179             }
8180 
8181           continue; /* IOR */
8182 
8183         case AND:
8184 
8185           if (8 == pop8)
8186             continue;
8187           else if (0 == pop8)
8188             avr_asm_len ("clr %0", op, plen, 1);
8189           else if (ld_reg_p)
8190             avr_asm_len ("andi %0,%1", op, plen, 1);
8191           else if (7 == pop8)
8192             {
8193               if (set_t != 0)
8194                 avr_asm_len ("clt", op, plen, 1);
8195               set_t = 0;
8196 
8197               op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
8198               avr_asm_len ("bld %0,%1", op, plen, 1);
8199             }
8200           else
8201             {
8202               if (clobber_val != (int) val8)
8203                 avr_asm_len ("ldi %2,%1", op, plen, 1);
8204               clobber_val = (int) val8;
8205 
8206               avr_asm_len ("and %0,%2", op, plen, 1);
8207             }
8208 
8209           continue; /* AND */
8210 
8211         case XOR:
8212 
8213           if (0 == pop8)
8214             continue;
8215           else if (8 == pop8)
8216             avr_asm_len ("com %0", op, plen, 1);
8217           else if (ld_reg_p && val8 == (1 << 7))
8218             avr_asm_len ("subi %0,%1", op, plen, 1);
8219           else
8220             {
8221               if (clobber_val != (int) val8)
8222                 avr_asm_len ("ldi %2,%1", op, plen, 1);
8223               clobber_val = (int) val8;
8224 
8225               avr_asm_len ("eor %0,%2", op, plen, 1);
8226             }
8227 
8228           continue; /* XOR */
8229 
8230         default:
8231           /* Unknown rtx_code */
8232           gcc_unreachable();
8233         }
8234     } /* for all sub-bytes */
8235 
8236   return "";
8237 }
8238 
8239 
8240 /* Output sign extension from XOP[1] to XOP[0] and return "".
8241    If PLEN == NULL, print assembler instructions to perform the operation;
8242    otherwise, set *PLEN to the length of the instruction sequence (in words)
8243    as printed with PLEN == NULL.  */
8244 
8245 const char*
8246 avr_out_sign_extend (rtx_insn *insn, rtx *xop, int *plen)
8247 {
8248   // Size in bytes of source resp. destination operand.
8249   unsigned n_src = GET_MODE_SIZE (GET_MODE (xop[1]));
8250   unsigned n_dest = GET_MODE_SIZE (GET_MODE (xop[0]));
8251   rtx r_msb = all_regs_rtx[REGNO (xop[1]) + n_src - 1];
8252 
8253   if (plen)
8254     *plen = 0;
8255 
8256   // Copy destination to source
8257 
8258   if (REGNO (xop[0]) != REGNO (xop[1]))
8259     {
8260       gcc_assert (n_src <= 2);
8261 
8262       if (n_src == 2)
8263         avr_asm_len (AVR_HAVE_MOVW
8264                      ? "movw %0,%1"
8265                      : "mov %B0,%B1", xop, plen, 1);
8266       if (n_src == 1 || !AVR_HAVE_MOVW)
8267         avr_asm_len ("mov %A0,%A1", xop, plen, 1);
8268     }
8269 
8270   // Set Carry to the sign bit MSB.7...
8271 
8272   if (REGNO (xop[0]) == REGNO (xop[1])
8273       || !reg_unused_after (insn, r_msb))
8274     {
8275       avr_asm_len ("mov __tmp_reg__,%0", &r_msb, plen, 1);
8276       r_msb = tmp_reg_rtx;
8277     }
8278 
8279   avr_asm_len ("lsl %0", &r_msb, plen, 1);
8280 
8281   // ...and propagate it to all the new sign bits
8282 
8283   for (unsigned n = n_src; n < n_dest; n++)
8284     avr_asm_len ("sbc %0,%0", &all_regs_rtx[REGNO (xop[0]) + n], plen, 1);
8285 
8286   return "";
8287 }
8288 
8289 
8290 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
8291    PLEN != NULL: Set *PLEN to the length of that sequence.
8292    Return "".  */
8293 
8294 const char*
8295 avr_out_addto_sp (rtx *op, int *plen)
8296 {
8297   int pc_len = AVR_2_BYTE_PC ? 2 : 3;
8298   int addend = INTVAL (op[0]);
8299 
8300   if (plen)
8301     *plen = 0;
8302 
8303   if (addend < 0)
8304     {
8305       if (flag_verbose_asm || flag_print_asm_name)
8306         avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
8307 
8308       while (addend <= -pc_len)
8309         {
8310           addend += pc_len;
8311           avr_asm_len ("rcall .", op, plen, 1);
8312         }
8313 
8314       while (addend++ < 0)
8315         avr_asm_len ("push __zero_reg__", op, plen, 1);
8316     }
8317   else if (addend > 0)
8318     {
8319       if (flag_verbose_asm || flag_print_asm_name)
8320         avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
8321 
8322       while (addend-- > 0)
8323         avr_asm_len ("pop __tmp_reg__", op, plen, 1);
8324     }
8325 
8326   return "";
8327 }
8328 
8329 
8330 /* Output instructions to insert an inverted bit into OPERANDS[0]:
8331    $0.$1 = ~$2.$3      if XBITNO = NULL
8332    $0.$1 = ~$2.XBITNO  if XBITNO != NULL.
8333    If PLEN = NULL then output the respective instruction sequence which
8334    is a combination of BST / BLD and some instruction(s) to invert the bit.
8335    If PLEN != NULL then store the length of the sequence (in words) in *PLEN.
8336    Return "".  */
8337 
8338 const char*
8339 avr_out_insert_notbit (rtx_insn *insn, rtx operands[], rtx xbitno, int *plen)
8340 {
8341   rtx op[4] = { operands[0], operands[1], operands[2],
8342                 xbitno == NULL_RTX ? operands [3] : xbitno };
8343 
8344   if (INTVAL (op[1]) == 7
8345       && test_hard_reg_class (LD_REGS, op[0]))
8346     {
8347       /* If the inserted bit number is 7 and we have a d-reg, then invert
8348          the bit after the insertion by means of SUBI *,0x80.  */
8349 
8350       if (INTVAL (op[3]) == 7
8351           && REGNO (op[0]) == REGNO (op[2]))
8352         {
8353           avr_asm_len ("subi %0,0x80", op, plen, -1);
8354         }
8355       else
8356         {
8357           avr_asm_len ("bst %2,%3" CR_TAB
8358                        "bld %0,%1" CR_TAB
8359                        "subi %0,0x80", op, plen, -3);
8360         }
8361     }
8362   else if (test_hard_reg_class (LD_REGS, op[0])
8363            && (INTVAL (op[1]) != INTVAL (op[3])
8364                || !reg_overlap_mentioned_p (op[0], op[2])))
8365     {
8366       /* If the destination bit is in a d-reg we can jump depending
8367          on the source bit and use ANDI / ORI.  This just applies if we
8368          have not an early-clobber situation with the bit.  */
8369 
8370       avr_asm_len ("andi %0,~(1<<%1)" CR_TAB
8371                    "sbrs %2,%3"       CR_TAB
8372                    "ori %0,1<<%1", op, plen, -3);
8373     }
8374   else
8375     {
8376       /* Otherwise, invert the bit by means of COM before we store it with
8377          BST and then undo the COM if needed.  */
8378 
8379       avr_asm_len ("com %2" CR_TAB
8380                    "bst %2,%3", op, plen, -2);
8381 
8382       if (!reg_unused_after (insn, op[2])
8383           // A simple 'reg_unused_after' is not enough because that function
8384           // assumes that the destination register is overwritten completely
8385           // and hence is in order for our purpose.  This is not the case
8386           // with BLD which just changes one bit of the destination.
8387           || reg_overlap_mentioned_p (op[0], op[2]))
8388         {
8389           /* Undo the COM from above.  */
8390           avr_asm_len ("com %2", op, plen, 1);
8391         }
8392 
8393       avr_asm_len ("bld %0,%1", op, plen, 1);
8394     }
8395 
8396   return "";
8397 }
8398 
8399 
8400 /* Outputs instructions needed for fixed point type conversion.
8401    This includes converting between any fixed point type, as well
8402    as converting to any integer type.  Conversion between integer
8403    types is not supported.
8404 
8405    Converting signed fractional types requires a bit shift if converting
8406    to or from any unsigned fractional type because the decimal place is
8407    shifted by 1 bit.  When the destination is a signed fractional, the sign
8408    is stored in either the carry or T bit.  */
8409 
8410 const char*
8411 avr_out_fract (rtx_insn *insn, rtx operands[], bool intsigned, int *plen)
8412 {
8413   rtx xop[6];
8414   RTX_CODE shift = UNKNOWN;
8415   bool sign_in_carry = false;
8416   bool msb_in_carry = false;
8417   bool lsb_in_tmp_reg = false;
8418   bool lsb_in_carry = false;
8419   bool frac_rounded = false;
8420   const char *code_ashift = "lsl %0";
8421 
8422 
8423 #define MAY_CLOBBER(RR)                                                 \
8424   /* Shorthand used below.  */                                          \
8425   ((sign_bytes                                                          \
8426     && IN_RANGE (RR, dest.regno_msb - sign_bytes + 1, dest.regno_msb))  \
8427    || (offset && IN_RANGE (RR, dest.regno, dest.regno_msb))		\
8428    || (reg_unused_after (insn, all_regs_rtx[RR])                        \
8429        && !IN_RANGE (RR, dest.regno, dest.regno_msb)))
8430 
8431   struct
8432   {
8433     /* bytes       : Length of operand in bytes.
8434        ibyte       : Length of integral part in bytes.
8435        fbyte, fbit : Length of fractional part in bytes, bits.  */
8436 
8437     bool sbit;
8438     unsigned fbit, bytes, ibyte, fbyte;
8439     unsigned regno, regno_msb;
8440   } dest, src, *val[2] = { &dest, &src };
8441 
8442   if (plen)
8443     *plen = 0;
8444 
8445   /* Step 0:  Determine information on source and destination operand we
8446      ======   will need in the remainder.  */
8447 
8448   for (size_t i = 0; i < ARRAY_SIZE (val); i++)
8449     {
8450       machine_mode mode;
8451 
8452       xop[i] = operands[i];
8453 
8454       mode = GET_MODE (xop[i]);
8455 
8456       val[i]->bytes = GET_MODE_SIZE (mode);
8457       val[i]->regno = REGNO (xop[i]);
8458       val[i]->regno_msb = REGNO (xop[i]) + val[i]->bytes - 1;
8459 
8460       if (SCALAR_INT_MODE_P (mode))
8461         {
8462           val[i]->sbit = intsigned;
8463           val[i]->fbit = 0;
8464         }
8465       else if (ALL_SCALAR_FIXED_POINT_MODE_P (mode))
8466         {
8467           val[i]->sbit = SIGNED_SCALAR_FIXED_POINT_MODE_P (mode);
8468           val[i]->fbit = GET_MODE_FBIT (mode);
8469         }
8470       else
8471         fatal_insn ("unsupported fixed-point conversion", insn);
8472 
8473       val[i]->fbyte = (1 + val[i]->fbit) / BITS_PER_UNIT;
8474       val[i]->ibyte = val[i]->bytes - val[i]->fbyte;
8475     }
8476 
8477   // Byte offset of the decimal point taking into account different place
8478   // of the decimal point in input and output and different register numbers
8479   // of input and output.
8480   int offset = dest.regno - src.regno + dest.fbyte - src.fbyte;
8481 
8482   // Number of destination bytes that will come from sign / zero extension.
8483   int sign_bytes = (dest.ibyte - src.ibyte) * (dest.ibyte > src.ibyte);
8484 
8485   // Number of bytes at the low end to be filled with zeros.
8486   int zero_bytes = (dest.fbyte - src.fbyte) * (dest.fbyte > src.fbyte);
8487 
8488   // Do we have a 16-Bit register that is cleared?
8489   rtx clrw = NULL_RTX;
8490 
8491   bool sign_extend = src.sbit && sign_bytes;
8492 
8493   if (0 == dest.fbit % 8 && 7 == src.fbit % 8)
8494     shift = ASHIFT;
8495   else if (7 == dest.fbit % 8 && 0 == src.fbit % 8)
8496     shift = ASHIFTRT;
8497   else if (dest.fbit % 8 == src.fbit % 8)
8498     shift = UNKNOWN;
8499   else
8500     gcc_unreachable();
8501 
8502   /* If we need to round the fraction part, we might need to save/round it
8503      before clobbering any of it in Step 1.  Also, we might want to do
8504      the rounding now to make use of LD_REGS.  */
8505   if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8506       && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8507       && !TARGET_FRACT_CONV_TRUNC)
8508     {
8509       bool overlap
8510         = (src.regno <=
8511            (offset ? dest.regno_msb - sign_bytes : dest.regno + zero_bytes - 1)
8512            && dest.regno - offset -1 >= dest.regno);
8513       unsigned s0 = dest.regno - offset -1;
8514       bool use_src = true;
8515       unsigned sn;
8516       unsigned copied_msb = src.regno_msb;
8517       bool have_carry = false;
8518 
8519       if (src.ibyte > dest.ibyte)
8520         copied_msb -= src.ibyte - dest.ibyte;
8521 
8522       for (sn = s0; sn <= copied_msb; sn++)
8523         if (!IN_RANGE (sn, dest.regno, dest.regno_msb)
8524             && !reg_unused_after (insn, all_regs_rtx[sn]))
8525           use_src = false;
8526       if (use_src && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0))
8527         {
8528           avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8529                        &all_regs_rtx[src.regno_msb], plen, 2);
8530           sn = src.regno;
8531           if (sn < s0)
8532             {
8533               if (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], sn))
8534                 avr_asm_len ("cpi %0,1", &all_regs_rtx[sn], plen, 1);
8535               else
8536                 avr_asm_len ("sec" CR_TAB
8537                              "cpc %0,__zero_reg__",
8538                              &all_regs_rtx[sn], plen, 2);
8539               have_carry = true;
8540             }
8541           while (++sn < s0)
8542             avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8543 
8544           avr_asm_len (have_carry ? "sbci %0,128" : "subi %0,129",
8545                        &all_regs_rtx[s0], plen, 1);
8546           for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8547             avr_asm_len ("sbci %0,255", &all_regs_rtx[sn], plen, 1);
8548           avr_asm_len ("\n0:", NULL, plen, 0);
8549           frac_rounded = true;
8550         }
8551       else if (use_src && overlap)
8552         {
8553           avr_asm_len ("clr __tmp_reg__" CR_TAB
8554                        "sbrc %1,0"       CR_TAB
8555                        "dec __tmp_reg__", xop, plen, 1);
8556           sn = src.regno;
8557           if (sn < s0)
8558             {
8559               avr_asm_len ("add %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8560               have_carry = true;
8561             }
8562 
8563           while (++sn < s0)
8564             avr_asm_len ("adc %0,__tmp_reg__", &all_regs_rtx[sn], plen, 1);
8565 
8566           if (have_carry)
8567             avr_asm_len ("clt"                CR_TAB
8568                          "bld __tmp_reg__,7"  CR_TAB
8569                          "adc %0,__tmp_reg__",
8570                          &all_regs_rtx[s0], plen, 1);
8571           else
8572             avr_asm_len ("lsr __tmp_reg" CR_TAB
8573                          "add %0,__tmp_reg__",
8574                          &all_regs_rtx[s0], plen, 2);
8575           for (sn = src.regno + src.fbyte; sn <= copied_msb; sn++)
8576             avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8577           frac_rounded = true;
8578         }
8579       else if (overlap)
8580         {
8581           bool use_src
8582             = (TEST_HARD_REG_BIT (reg_class_contents[LD_REGS], s0)
8583                && (IN_RANGE (s0, dest.regno, dest.regno_msb)
8584                    || reg_unused_after (insn, all_regs_rtx[s0])));
8585           xop[2] = all_regs_rtx[s0];
8586           unsigned sn = src.regno;
8587           if (!use_src || sn == s0)
8588             avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8589           /* We need to consider to-be-discarded bits
8590              if the value is negative.  */
8591           if (sn < s0)
8592             {
8593               avr_asm_len ("tst %0" CR_TAB
8594                            "brpl 0f",
8595                            &all_regs_rtx[src.regno_msb], plen, 2);
8596               /* Test to-be-discarded bytes for any nozero bits.
8597                  ??? Could use OR or SBIW to test two registers at once.  */
8598               if (sn < s0)
8599                 avr_asm_len ("cp %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8600 
8601               while (++sn < s0)
8602                 avr_asm_len ("cpc %0,__zero_reg__", &all_regs_rtx[sn], plen, 1);
8603               /* Set bit 0 in __tmp_reg__ if any of the lower bits was set.  */
8604               if (use_src)
8605                 avr_asm_len ("breq 0f" CR_TAB
8606                              "ori %2,1"
8607                              "\n0:\t" "mov __tmp_reg__,%2",
8608                              xop, plen, 3);
8609               else
8610                 avr_asm_len ("breq 0f" CR_TAB
8611                              "set"     CR_TAB
8612                              "bld __tmp_reg__,0\n0:",
8613                              xop, plen, 3);
8614             }
8615           lsb_in_tmp_reg = true;
8616         }
8617     }
8618 
8619   /* Step 1:  Clear bytes at the low end and copy payload bits from source
8620      ======   to destination.  */
8621 
8622   int step = offset < 0 ? 1 : -1;
8623   unsigned d0 = offset < 0 ? dest.regno : dest.regno_msb;
8624 
8625   // We cleared at least that number of registers.
8626   int clr_n = 0;
8627 
8628   for (; d0 >= dest.regno && d0 <= dest.regno_msb; d0 += step)
8629     {
8630       // Next regno of destination is needed for MOVW
8631       unsigned d1 = d0 + step;
8632 
8633       // Current and next regno of source
8634       signed s0 = d0 - offset;
8635       signed s1 = s0 + step;
8636 
8637       // Must current resp. next regno be CLRed?  This applies to the low
8638       // bytes of the destination that have no associated source bytes.
8639       bool clr0 = s0 < (signed) src.regno;
8640       bool clr1 = s1 < (signed) src.regno && d1 >= dest.regno;
8641 
8642       // First gather what code to emit (if any) and additional step to
8643       // apply if a MOVW is in use.  xop[2] is destination rtx and xop[3]
8644       // is the source rtx for the current loop iteration.
8645       const char *code = NULL;
8646       int stepw = 0;
8647 
8648       if (clr0)
8649         {
8650           if (AVR_HAVE_MOVW && clr1 && clrw)
8651             {
8652               xop[2] = all_regs_rtx[d0 & ~1];
8653               xop[3] = clrw;
8654               code = "movw %2,%3";
8655               stepw = step;
8656             }
8657           else
8658             {
8659               xop[2] = all_regs_rtx[d0];
8660               code = "clr %2";
8661 
8662               if (++clr_n >= 2
8663                   && !clrw
8664                   && d0 % 2 == (step > 0))
8665                 {
8666                   clrw = all_regs_rtx[d0 & ~1];
8667                 }
8668             }
8669         }
8670       else if (offset && s0 <= (signed) src.regno_msb)
8671         {
8672           int movw = AVR_HAVE_MOVW && offset % 2 == 0
8673             && d0 % 2 == (offset > 0)
8674             && d1 <= dest.regno_msb && d1 >= dest.regno
8675             && s1 <= (signed) src.regno_msb  && s1 >= (signed) src.regno;
8676 
8677           xop[2] = all_regs_rtx[d0 & ~movw];
8678           xop[3] = all_regs_rtx[s0 & ~movw];
8679           code = movw ? "movw %2,%3" : "mov %2,%3";
8680           stepw = step * movw;
8681         }
8682 
8683       if (code)
8684         {
8685           if (sign_extend && shift != ASHIFT && !sign_in_carry
8686               && (d0 == src.regno_msb || d0 + stepw == src.regno_msb))
8687             {
8688               /* We are going to override the sign bit.  If we sign-extend,
8689                  store the sign in the Carry flag.  This is not needed if
8690                  the destination will be ASHIFT in the remainder because
8691                  the ASHIFT will set Carry without extra instruction.  */
8692 
8693               avr_asm_len ("lsl %0", &all_regs_rtx[src.regno_msb], plen, 1);
8694               sign_in_carry = true;
8695             }
8696 
8697           unsigned src_msb = dest.regno_msb - sign_bytes - offset + 1;
8698 
8699           if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8700               && src.ibyte > dest.ibyte
8701               && (d0 == src_msb || d0 + stepw == src_msb))
8702             {
8703               /* We are going to override the MSB.  If we shift right,
8704                  store the MSB in the Carry flag.  This is only needed if
8705                  we don't sign-extend becaue with sign-extension the MSB
8706                  (the sign) will be produced by the sign extension.  */
8707 
8708               avr_asm_len ("lsr %0", &all_regs_rtx[src_msb], plen, 1);
8709               msb_in_carry = true;
8710             }
8711 
8712           unsigned src_lsb = dest.regno - offset -1;
8713 
8714           if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry
8715 	      && !lsb_in_tmp_reg
8716               && (d0 == src_lsb || d0 + stepw == src_lsb))
8717             {
8718               /* We are going to override the new LSB; store it into carry.  */
8719 
8720               avr_asm_len ("lsl %0", &all_regs_rtx[src_lsb], plen, 1);
8721               code_ashift = "rol %0";
8722               lsb_in_carry = true;
8723             }
8724 
8725           avr_asm_len (code, xop, plen, 1);
8726           d0 += stepw;
8727         }
8728     }
8729 
8730   /* Step 2:  Shift destination left by 1 bit position.  This might be needed
8731      ======   for signed input and unsigned output.  */
8732 
8733   if (shift == ASHIFT && src.fbyte > dest.fbyte && !lsb_in_carry)
8734     {
8735       unsigned s0 = dest.regno - offset -1;
8736 
8737       /* n1169 4.1.4 says:
8738 	 "Conversions from a fixed-point to an integer type round toward zero."
8739 	 Hence, converting a fract type to integer only gives a non-zero result
8740 	 for -1.  */
8741       if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8742 	  && SCALAR_FRACT_MODE_P (GET_MODE (xop[1]))
8743 	  && !TARGET_FRACT_CONV_TRUNC)
8744 	{
8745 	  gcc_assert (s0 == src.regno_msb);
8746 	  /* Check if the input is -1.  We do that by checking if negating
8747 	     the input causes an integer overflow.  */
8748 	  unsigned sn = src.regno;
8749 	  avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8750 	  while (sn <= s0)
8751 	    avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn++], plen, 1);
8752 
8753 	  /* Overflow goes with set carry.  Clear carry otherwise.  */
8754 	  avr_asm_len ("brvs 0f" CR_TAB
8755                        "clc\n0:", NULL, plen, 2);
8756 	}
8757       /* Likewise, when converting from accumulator types to integer, we
8758 	 need to round up negative values.  */
8759       else if (SCALAR_INT_MODE_P (GET_MODE (xop[0]))
8760 	       && SCALAR_ACCUM_MODE_P (GET_MODE (xop[1]))
8761 	       && !TARGET_FRACT_CONV_TRUNC
8762 	       && !frac_rounded)
8763 	{
8764 	  bool have_carry = false;
8765 
8766 	  xop[2] = all_regs_rtx[s0];
8767 	  if (!lsb_in_tmp_reg && !MAY_CLOBBER (s0))
8768 	    avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
8769 	  avr_asm_len ("tst %0" CR_TAB "brpl 0f",
8770 		       &all_regs_rtx[src.regno_msb], plen, 2);
8771 	  if (!lsb_in_tmp_reg)
8772 	    {
8773 	      unsigned sn = src.regno;
8774 	      if (sn < s0)
8775 		{
8776 		  avr_asm_len ("cp __zero_reg__,%0", &all_regs_rtx[sn],
8777 			       plen, 1);
8778 		  have_carry = true;
8779 		}
8780 	      while (++sn < s0)
8781 		avr_asm_len ("cpc __zero_reg__,%0", &all_regs_rtx[sn], plen, 1);
8782 	      lsb_in_tmp_reg = !MAY_CLOBBER (s0);
8783 	    }
8784 	  /* Add in C and the rounding value 127.  */
8785 	  /* If the destination msb is a sign byte, and in LD_REGS,
8786 	     grab it as a temporary.  */
8787 	  if (sign_bytes
8788 	      && TEST_HARD_REG_BIT (reg_class_contents[LD_REGS],
8789 				    dest.regno_msb))
8790 	    {
8791 	      xop[3] = all_regs_rtx[dest.regno_msb];
8792 	      avr_asm_len ("ldi %3,127", xop, plen, 1);
8793 	      avr_asm_len ((have_carry && lsb_in_tmp_reg ? "adc __tmp_reg__,%3"
8794 			    : have_carry ? "adc %2,%3"
8795 			    : lsb_in_tmp_reg ? "add __tmp_reg__,%3"
8796 			    : "add %2,%3"),
8797 			   xop, plen, 1);
8798 	    }
8799 	  else
8800 	    {
8801 	      /* Fall back to use __zero_reg__ as a temporary.  */
8802 	      avr_asm_len ("dec __zero_reg__", NULL, plen, 1);
8803 	      if (have_carry)
8804 		avr_asm_len ("clt" CR_TAB
8805                              "bld __zero_reg__,7", NULL, plen, 2);
8806 	      else
8807 		avr_asm_len ("lsr __zero_reg__", NULL, plen, 1);
8808 	      avr_asm_len (have_carry && lsb_in_tmp_reg
8809                            ? "adc __tmp_reg__,__zero_reg__"
8810                            : have_carry ? "adc %2,__zero_reg__"
8811                            : lsb_in_tmp_reg ? "add __tmp_reg__,__zero_reg__"
8812                            : "add %2,__zero_reg__",
8813 			   xop, plen, 1);
8814 	      avr_asm_len ("eor __zero_reg__,__zero_reg__", NULL, plen, 1);
8815 	    }
8816 
8817           for (d0 = dest.regno + zero_bytes;
8818 	       d0 <= dest.regno_msb - sign_bytes; d0++)
8819 	    avr_asm_len ("adc %0,__zero_reg__", &all_regs_rtx[d0], plen, 1);
8820 
8821           avr_asm_len (lsb_in_tmp_reg
8822 		       ? "\n0:\t" "lsl __tmp_reg__"
8823                        : "\n0:\t" "lsl %2",
8824 		       xop, plen, 1);
8825 	}
8826       else if (MAY_CLOBBER (s0))
8827         avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
8828       else
8829         avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8830                      "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8831 
8832       code_ashift = "rol %0";
8833       lsb_in_carry = true;
8834     }
8835 
8836   if (shift == ASHIFT)
8837     {
8838       for (d0 = dest.regno + zero_bytes;
8839            d0 <= dest.regno_msb - sign_bytes; d0++)
8840         {
8841           avr_asm_len (code_ashift, &all_regs_rtx[d0], plen, 1);
8842           code_ashift = "rol %0";
8843         }
8844 
8845       lsb_in_carry = false;
8846       sign_in_carry = true;
8847     }
8848 
8849   /* Step 4a:  Store MSB in carry if we don't already have it or will produce
8850      =======   it in sign-extension below.  */
8851 
8852   if (!sign_extend && shift == ASHIFTRT && !msb_in_carry
8853       && src.ibyte > dest.ibyte)
8854     {
8855       unsigned s0 = dest.regno_msb - sign_bytes - offset + 1;
8856 
8857       if (MAY_CLOBBER (s0))
8858         avr_asm_len ("lsr %0", &all_regs_rtx[s0], plen, 1);
8859       else
8860         avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8861                      "lsr __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8862 
8863       msb_in_carry = true;
8864     }
8865 
8866   /* Step 3:  Sign-extend or zero-extend the destination as needed.
8867      ======   */
8868 
8869   if (sign_extend && !sign_in_carry)
8870     {
8871       unsigned s0 = src.regno_msb;
8872 
8873       if (MAY_CLOBBER (s0))
8874         avr_asm_len ("lsl %0", &all_regs_rtx[s0], plen, 1);
8875       else
8876         avr_asm_len ("mov __tmp_reg__,%0" CR_TAB
8877                      "lsl __tmp_reg__", &all_regs_rtx[s0], plen, 2);
8878 
8879       sign_in_carry = true;
8880     }
8881 
8882   gcc_assert (sign_in_carry + msb_in_carry + lsb_in_carry <= 1);
8883 
8884   unsigned copies = 0;
8885   rtx movw = sign_extend ? NULL_RTX : clrw;
8886 
8887   for (d0 = dest.regno_msb - sign_bytes + 1; d0 <= dest.regno_msb; d0++)
8888     {
8889       if (AVR_HAVE_MOVW && movw
8890           && d0 % 2 == 0 && d0 + 1 <= dest.regno_msb)
8891         {
8892           xop[2] = all_regs_rtx[d0];
8893           xop[3] = movw;
8894           avr_asm_len ("movw %2,%3", xop, plen, 1);
8895           d0++;
8896         }
8897       else
8898         {
8899           avr_asm_len (sign_extend ? "sbc %0,%0" : "clr %0",
8900                        &all_regs_rtx[d0], plen, 1);
8901 
8902           if (++copies >= 2 && !movw && d0 % 2 == 1)
8903             movw = all_regs_rtx[d0-1];
8904         }
8905     } /* for */
8906 
8907 
8908   /* Step 4:  Right shift the destination.  This might be needed for
8909      ======   conversions from unsigned to signed.  */
8910 
8911   if (shift == ASHIFTRT)
8912     {
8913       const char *code_ashiftrt = "lsr %0";
8914 
8915       if (sign_extend || msb_in_carry)
8916         code_ashiftrt = "ror %0";
8917 
8918       if (src.sbit && src.ibyte == dest.ibyte)
8919         code_ashiftrt = "asr %0";
8920 
8921       for (d0 = dest.regno_msb - sign_bytes;
8922            d0 >= dest.regno + zero_bytes - 1 && d0 >= dest.regno; d0--)
8923         {
8924           avr_asm_len (code_ashiftrt, &all_regs_rtx[d0], plen, 1);
8925           code_ashiftrt = "ror %0";
8926         }
8927     }
8928 
8929 #undef MAY_CLOBBER
8930 
8931   return "";
8932 }
8933 
8934 
8935 /* Output fixed-point rounding.  XOP[0] = XOP[1] is the operand to round.
8936    XOP[2] is the rounding point, a CONST_INT.  The function prints the
8937    instruction sequence if PLEN = NULL and computes the length in words
8938    of the sequence if PLEN != NULL.  Most of this function deals with
8939    preparing operands for calls to `avr_out_plus' and `avr_out_bitop'.  */
8940 
8941 const char*
8942 avr_out_round (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
8943 {
8944   machine_mode mode = GET_MODE (xop[0]);
8945   machine_mode imode = int_mode_for_mode (mode);
8946   // The smallest fractional bit not cleared by the rounding is 2^(-RP).
8947   int fbit = (int) GET_MODE_FBIT (mode);
8948   double_int i_add = double_int_zero.set_bit (fbit-1 - INTVAL (xop[2]));
8949   wide_int wi_add = wi::set_bit_in_zero (fbit-1 - INTVAL (xop[2]),
8950 					 GET_MODE_PRECISION (imode));
8951   // Lengths of PLUS and AND parts.
8952   int len_add = 0, *plen_add = plen ? &len_add : NULL;
8953   int len_and = 0, *plen_and = plen ? &len_and : NULL;
8954 
8955   // Add-Saturate  1/2 * 2^(-RP).  Don't print the label "0:" when printing
8956   // the saturated addition so that we can emit the "rjmp 1f" before the
8957   // "0:" below.
8958 
8959   rtx xadd = const_fixed_from_double_int (i_add, mode);
8960   rtx xpattern, xsrc, op[4];
8961 
8962   xsrc = SIGNED_FIXED_POINT_MODE_P (mode)
8963     ? gen_rtx_SS_PLUS (mode, xop[1], xadd)
8964     : gen_rtx_US_PLUS (mode, xop[1], xadd);
8965   xpattern = gen_rtx_SET (xop[0], xsrc);
8966 
8967   op[0] = xop[0];
8968   op[1] = xop[1];
8969   op[2] = xadd;
8970   avr_out_plus (xpattern, op, plen_add, NULL, false /* Don't print "0:" */);
8971 
8972   avr_asm_len ("rjmp 1f" CR_TAB
8973                "0:", NULL, plen_add, 1);
8974 
8975   // Keep  all bits from RP and higher:   ... 2^(-RP)
8976   // Clear all bits from RP+1 and lower:              2^(-RP-1) ...
8977   // Rounding point                           ^^^^^^^
8978   // Added above                                      ^^^^^^^^^
8979   rtx xreg = simplify_gen_subreg (imode, xop[0], mode, 0);
8980   rtx xmask = immed_wide_int_const (-wi_add - wi_add, imode);
8981 
8982   xpattern = gen_rtx_SET (xreg, gen_rtx_AND (imode, xreg, xmask));
8983 
8984   op[0] = xreg;
8985   op[1] = xreg;
8986   op[2] = xmask;
8987   op[3] = gen_rtx_SCRATCH (QImode);
8988   avr_out_bitop (xpattern, op, plen_and);
8989   avr_asm_len ("1:", NULL, plen, 0);
8990 
8991   if (plen)
8992     *plen = len_add + len_and;
8993 
8994   return "";
8995 }
8996 
8997 
8998 /* Create RTL split patterns for byte sized rotate expressions.  This
8999    produces a series of move instructions and considers overlap situations.
9000    Overlapping non-HImode operands need a scratch register.  */
9001 
9002 bool
9003 avr_rotate_bytes (rtx operands[])
9004 {
9005   machine_mode mode = GET_MODE (operands[0]);
9006   bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
9007   bool same_reg = rtx_equal_p (operands[0], operands[1]);
9008   int num = INTVAL (operands[2]);
9009   rtx scratch = operands[3];
9010   /* Work out if byte or word move is needed.  Odd byte rotates need QImode.
9011      Word move if no scratch is needed, otherwise use size of scratch.  */
9012   machine_mode move_mode = QImode;
9013   int move_size, offset, size;
9014 
9015   if (num & 0xf)
9016     move_mode = QImode;
9017   else if ((mode == SImode && !same_reg) || !overlapped)
9018     move_mode = HImode;
9019   else
9020     move_mode = GET_MODE (scratch);
9021 
9022   /* Force DI rotate to use QI moves since other DI moves are currently split
9023      into QI moves so forward propagation works better.  */
9024   if (mode == DImode)
9025     move_mode = QImode;
9026   /* Make scratch smaller if needed.  */
9027   if (SCRATCH != GET_CODE (scratch)
9028       && HImode == GET_MODE (scratch)
9029       && QImode == move_mode)
9030     scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
9031 
9032   move_size = GET_MODE_SIZE (move_mode);
9033   /* Number of bytes/words to rotate.  */
9034   offset = (num  >> 3) / move_size;
9035   /* Number of moves needed.  */
9036   size = GET_MODE_SIZE (mode) / move_size;
9037   /* Himode byte swap is special case to avoid a scratch register.  */
9038   if (mode == HImode && same_reg)
9039     {
9040       /* HImode byte swap, using xor.  This is as quick as using scratch.  */
9041       rtx src, dst;
9042       src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
9043       dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
9044       if (!rtx_equal_p (dst, src))
9045         {
9046           emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
9047           emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
9048           emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
9049         }
9050     }
9051   else
9052     {
9053 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode)  */
9054       /* Create linked list of moves to determine move order.  */
9055       struct {
9056         rtx src, dst;
9057         int links;
9058       } move[MAX_SIZE + 8];
9059       int blocked, moves;
9060 
9061       gcc_assert (size <= MAX_SIZE);
9062       /* Generate list of subreg moves.  */
9063       for (int i = 0; i < size; i++)
9064         {
9065           int from = i;
9066           int to = (from + offset) % size;
9067           move[i].src = simplify_gen_subreg (move_mode, operands[1],
9068                                              mode, from * move_size);
9069           move[i].dst = simplify_gen_subreg (move_mode, operands[0],
9070                                              mode, to * move_size);
9071           move[i].links = -1;
9072         }
9073       /* Mark dependence where a dst of one move is the src of another move.
9074          The first move is a conflict as it must wait until second is
9075          performed.  We ignore moves to self - we catch this later.  */
9076       if (overlapped)
9077         for (int i = 0; i < size; i++)
9078           if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
9079             for (int j = 0; j < size; j++)
9080               if (j != i && rtx_equal_p (move[j].src, move[i].dst))
9081                 {
9082                   /* The dst of move i is the src of move j.  */
9083                   move[i].links = j;
9084                   break;
9085                 }
9086 
9087       blocked = -1;
9088       moves = 0;
9089       /* Go through move list and perform non-conflicting moves.  As each
9090          non-overlapping move is made, it may remove other conflicts
9091          so the process is repeated until no conflicts remain.  */
9092       do
9093         {
9094           blocked = -1;
9095           moves = 0;
9096           /* Emit move where dst is not also a src or we have used that
9097              src already.  */
9098           for (int i = 0; i < size; i++)
9099             if (move[i].src != NULL_RTX)
9100               {
9101                 if (move[i].links == -1
9102                     || move[move[i].links].src == NULL_RTX)
9103                   {
9104                     moves++;
9105                     /* Ignore NOP moves to self.  */
9106                     if (!rtx_equal_p (move[i].dst, move[i].src))
9107                       emit_move_insn (move[i].dst, move[i].src);
9108 
9109                     /* Remove  conflict from list.  */
9110                     move[i].src = NULL_RTX;
9111                   }
9112                 else
9113                   blocked = i;
9114               }
9115 
9116           /* Check for deadlock. This is when no moves occurred and we have
9117              at least one blocked move.  */
9118           if (moves == 0 && blocked != -1)
9119             {
9120               /* Need to use scratch register to break deadlock.
9121                  Add move to put dst of blocked move into scratch.
9122                  When this move occurs, it will break chain deadlock.
9123                  The scratch register is substituted for real move.  */
9124 
9125               gcc_assert (SCRATCH != GET_CODE (scratch));
9126 
9127               move[size].src = move[blocked].dst;
9128               move[size].dst =  scratch;
9129               /* Scratch move is never blocked.  */
9130               move[size].links = -1;
9131               /* Make sure we have valid link.  */
9132               gcc_assert (move[blocked].links != -1);
9133               /* Replace src of  blocking move with scratch reg.  */
9134               move[move[blocked].links].src = scratch;
9135               /* Make dependent on scratch move occurring.  */
9136               move[blocked].links = size;
9137               size=size+1;
9138             }
9139         }
9140       while (blocked != -1);
9141     }
9142   return true;
9143 }
9144 
9145 
9146 /* Worker function for `ADJUST_INSN_LENGTH'.  */
9147 /* Modifies the length assigned to instruction INSN
9148    LEN is the initially computed length of the insn.  */
9149 
9150 int
9151 avr_adjust_insn_length (rtx_insn *insn, int len)
9152 {
9153   rtx *op = recog_data.operand;
9154   enum attr_adjust_len adjust_len;
9155 
9156   /* Some complex insns don't need length adjustment and therefore
9157      the length need not/must not be adjusted for these insns.
9158      It is easier to state this in an insn attribute "adjust_len" than
9159      to clutter up code here...  */
9160 
9161   if (!NONDEBUG_INSN_P (insn)
9162       || -1 == recog_memoized (insn))
9163     {
9164       return len;
9165     }
9166 
9167   /* Read from insn attribute "adjust_len" if/how length is to be adjusted.  */
9168 
9169   adjust_len = get_attr_adjust_len (insn);
9170 
9171   if (adjust_len == ADJUST_LEN_NO)
9172     {
9173       /* Nothing to adjust: The length from attribute "length" is fine.
9174          This is the default.  */
9175 
9176       return len;
9177     }
9178 
9179   /* Extract insn's operands.  */
9180 
9181   extract_constrain_insn_cached (insn);
9182 
9183   /* Dispatch to right function.  */
9184 
9185   switch (adjust_len)
9186     {
9187     case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
9188     case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
9189     case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
9190 
9191     case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
9192 
9193     case ADJUST_LEN_PLUS: avr_out_plus (insn, op, &len); break;
9194     case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
9195 
9196     case ADJUST_LEN_MOV8:  output_movqi (insn, op, &len); break;
9197     case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
9198     case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
9199     case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
9200     case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
9201     case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
9202     case ADJUST_LEN_SEXT: avr_out_sign_extend (insn, op, &len); break;
9203 
9204     case ADJUST_LEN_SFRACT: avr_out_fract (insn, op, true, &len); break;
9205     case ADJUST_LEN_UFRACT: avr_out_fract (insn, op, false, &len); break;
9206     case ADJUST_LEN_ROUND: avr_out_round (insn, op, &len); break;
9207 
9208     case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
9209     case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
9210     case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
9211     case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
9212     case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
9213 
9214     case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
9215     case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
9216     case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
9217 
9218     case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
9219     case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
9220     case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
9221 
9222     case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
9223     case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
9224     case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
9225 
9226     case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
9227     case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
9228     case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
9229 
9230     case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
9231 
9232     case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
9233 
9234     case ADJUST_LEN_INSV_NOTBIT:
9235       avr_out_insert_notbit (insn, op, NULL_RTX, &len);
9236       break;
9237     case ADJUST_LEN_INSV_NOTBIT_0:
9238       avr_out_insert_notbit (insn, op, const0_rtx, &len);
9239       break;
9240     case ADJUST_LEN_INSV_NOTBIT_7:
9241       avr_out_insert_notbit (insn, op, GEN_INT (7), &len);
9242       break;
9243 
9244     default:
9245       gcc_unreachable();
9246     }
9247 
9248   return len;
9249 }
9250 
9251 /* Return nonzero if register REG dead after INSN.  */
9252 
9253 int
9254 reg_unused_after (rtx_insn *insn, rtx reg)
9255 {
9256   return (dead_or_set_p (insn, reg)
9257 	  || (REG_P (reg) && _reg_unused_after (insn, reg)));
9258 }
9259 
9260 /* Return nonzero if REG is not used after INSN.
9261    We assume REG is a reload reg, and therefore does
9262    not live past labels.  It may live past calls or jumps though.  */
9263 
9264 int
9265 _reg_unused_after (rtx_insn *insn, rtx reg)
9266 {
9267   enum rtx_code code;
9268   rtx set;
9269 
9270   /* If the reg is set by this instruction, then it is safe for our
9271      case.  Disregard the case where this is a store to memory, since
9272      we are checking a register used in the store address.  */
9273   set = single_set (insn);
9274   if (set && !MEM_P (SET_DEST (set))
9275       && reg_overlap_mentioned_p (reg, SET_DEST (set)))
9276     return 1;
9277 
9278   while ((insn = NEXT_INSN (insn)))
9279     {
9280       rtx set;
9281       code = GET_CODE (insn);
9282 
9283 #if 0
9284       /* If this is a label that existed before reload, then the register
9285 	 if dead here.  However, if this is a label added by reorg, then
9286 	 the register may still be live here.  We can't tell the difference,
9287 	 so we just ignore labels completely.  */
9288       if (code == CODE_LABEL)
9289 	return 1;
9290       /* else */
9291 #endif
9292 
9293       if (!INSN_P (insn))
9294 	continue;
9295 
9296       if (code == JUMP_INSN)
9297 	return 0;
9298 
9299       /* If this is a sequence, we must handle them all at once.
9300 	 We could have for instance a call that sets the target register,
9301 	 and an insn in a delay slot that uses the register.  In this case,
9302 	 we must return 0.  */
9303       else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
9304 	{
9305 	  rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
9306 	  int retval = 0;
9307 
9308 	  for (int i = 0; i < seq->len (); i++)
9309 	    {
9310 	      rtx_insn *this_insn = seq->insn (i);
9311 	      rtx set = single_set (this_insn);
9312 
9313 	      if (CALL_P (this_insn))
9314 		code = CALL_INSN;
9315 	      else if (JUMP_P (this_insn))
9316 		{
9317 		  if (INSN_ANNULLED_BRANCH_P (this_insn))
9318 		    return 0;
9319 		  code = JUMP_INSN;
9320 		}
9321 
9322 	      if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
9323 		return 0;
9324 	      if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
9325 		{
9326 		  if (!MEM_P (SET_DEST (set)))
9327 		    retval = 1;
9328 		  else
9329 		    return 0;
9330 		}
9331 	      if (set == 0
9332 		  && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
9333 		return 0;
9334 	    }
9335 	  if (retval == 1)
9336 	    return 1;
9337 	  else if (code == JUMP_INSN)
9338 	    return 0;
9339 	}
9340 
9341       if (code == CALL_INSN)
9342 	{
9343 	  rtx tem;
9344 	  for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
9345 	    if (GET_CODE (XEXP (tem, 0)) == USE
9346 		&& REG_P (XEXP (XEXP (tem, 0), 0))
9347 		&& reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
9348 	      return 0;
9349 	  if (call_used_regs[REGNO (reg)])
9350 	    return 1;
9351 	}
9352 
9353       set = single_set (insn);
9354 
9355       if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
9356 	return 0;
9357       if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
9358 	return !MEM_P (SET_DEST (set));
9359       if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9360 	return 0;
9361     }
9362   return 1;
9363 }
9364 
9365 
9366 /* Implement `TARGET_ASM_INTEGER'.  */
9367 /* Target hook for assembling integer objects.  The AVR version needs
9368    special handling for references to certain labels.  */
9369 
9370 static bool
9371 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
9372 {
9373   if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
9374       && text_segment_operand (x, VOIDmode))
9375     {
9376       fputs ("\t.word\tgs(", asm_out_file);
9377       output_addr_const (asm_out_file, x);
9378       fputs (")\n", asm_out_file);
9379 
9380       return true;
9381     }
9382   else if (GET_MODE (x) == PSImode)
9383     {
9384       /* This needs binutils 2.23+, see PR binutils/13503  */
9385 
9386       fputs ("\t.byte\tlo8(", asm_out_file);
9387       output_addr_const (asm_out_file, x);
9388       fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
9389 
9390       fputs ("\t.byte\thi8(", asm_out_file);
9391       output_addr_const (asm_out_file, x);
9392       fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
9393 
9394       fputs ("\t.byte\thh8(", asm_out_file);
9395       output_addr_const (asm_out_file, x);
9396       fputs (")" ASM_COMMENT_START "need binutils PR13503\n", asm_out_file);
9397 
9398       return true;
9399     }
9400   else if (CONST_FIXED_P (x))
9401     {
9402       /* varasm fails to handle big fixed modes that don't fit in hwi.  */
9403 
9404       for (unsigned n = 0; n < size; n++)
9405         {
9406           rtx xn = simplify_gen_subreg (QImode, x, GET_MODE (x), n);
9407           default_assemble_integer (xn, 1, aligned_p);
9408         }
9409 
9410       return true;
9411     }
9412 
9413   if (AVR_TINY
9414       && avr_address_tiny_pm_p (x))
9415     {
9416       x = plus_constant (Pmode, x, AVR_TINY_PM_OFFSET);
9417     }
9418 
9419   return default_assemble_integer (x, size, aligned_p);
9420 }
9421 
9422 
9423 /* Implement `TARGET_CLASS_LIKELY_SPILLED_P'.  */
9424 /* Return value is nonzero if pseudos that have been
9425    assigned to registers of class CLASS would likely be spilled
9426    because registers of CLASS are needed for spill registers.  */
9427 
9428 static bool
9429 avr_class_likely_spilled_p (reg_class_t c)
9430 {
9431   return (c != ALL_REGS &&
9432            (AVR_TINY ? 1 : c != ADDW_REGS));
9433 }
9434 
9435 
9436 /* Valid attributes:
9437    progmem   -  Put data to program memory.
9438    signal    -  Make a function to be hardware interrupt.
9439                 After function prologue interrupts remain disabled.
9440    interrupt -  Make a function to be hardware interrupt. Before function
9441                 prologue interrupts are enabled by means of SEI.
9442    naked     -  Don't generate function prologue/epilogue and RET
9443                 instruction.  */
9444 
9445 /* Handle a "progmem" attribute; arguments as in
9446    struct attribute_spec.handler.  */
9447 
9448 static tree
9449 avr_handle_progmem_attribute (tree *node, tree name,
9450 			      tree args ATTRIBUTE_UNUSED,
9451 			      int flags ATTRIBUTE_UNUSED,
9452 			      bool *no_add_attrs)
9453 {
9454   if (DECL_P (*node))
9455     {
9456       if (TREE_CODE (*node) == TYPE_DECL)
9457 	{
9458 	  /* This is really a decl attribute, not a type attribute,
9459 	     but try to handle it for GCC 3.0 backwards compatibility.  */
9460 
9461 	  tree type = TREE_TYPE (*node);
9462 	  tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
9463 	  tree newtype = build_type_attribute_variant (type, attr);
9464 
9465 	  TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
9466 	  TREE_TYPE (*node) = newtype;
9467 	  *no_add_attrs = true;
9468 	}
9469       else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
9470 	{
9471           *no_add_attrs = false;
9472 	}
9473       else
9474 	{
9475 	  warning (OPT_Wattributes, "%qE attribute ignored",
9476 		   name);
9477 	  *no_add_attrs = true;
9478 	}
9479     }
9480 
9481   return NULL_TREE;
9482 }
9483 
9484 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
9485    struct attribute_spec.handler.  */
9486 
9487 static tree
9488 avr_handle_fndecl_attribute (tree *node, tree name,
9489 			     tree args ATTRIBUTE_UNUSED,
9490 			     int flags ATTRIBUTE_UNUSED,
9491 			     bool *no_add_attrs)
9492 {
9493   if (TREE_CODE (*node) != FUNCTION_DECL)
9494     {
9495       warning (OPT_Wattributes, "%qE attribute only applies to functions",
9496 	       name);
9497       *no_add_attrs = true;
9498     }
9499 
9500   return NULL_TREE;
9501 }
9502 
9503 static tree
9504 avr_handle_fntype_attribute (tree *node, tree name,
9505                              tree args ATTRIBUTE_UNUSED,
9506                              int flags ATTRIBUTE_UNUSED,
9507                              bool *no_add_attrs)
9508 {
9509   if (TREE_CODE (*node) != FUNCTION_TYPE)
9510     {
9511       warning (OPT_Wattributes, "%qE attribute only applies to functions",
9512 	       name);
9513       *no_add_attrs = true;
9514     }
9515 
9516   return NULL_TREE;
9517 }
9518 
9519 static tree
9520 avr_handle_absdata_attribute (tree *node, tree name, tree /* args */,
9521                               int /* flags */, bool *no_add)
9522 {
9523   location_t loc = DECL_SOURCE_LOCATION (*node);
9524 
9525   if (AVR_TINY)
9526     {
9527       if (TREE_CODE (*node) != VAR_DECL
9528           || (!TREE_STATIC (*node) && !DECL_EXTERNAL (*node)))
9529         {
9530           warning_at (loc, OPT_Wattributes, "%qE attribute only applies to"
9531                       " variables in static storage", name);
9532           *no_add = true;
9533         }
9534     }
9535   else
9536     {
9537       warning_at (loc, OPT_Wattributes, "%qE attribute only supported"
9538                   " for reduced Tiny cores", name);
9539       *no_add = true;
9540     }
9541 
9542   return NULL_TREE;
9543 }
9544 
9545 static tree
9546 avr_handle_addr_attribute (tree *node, tree name, tree args,
9547 			   int flags ATTRIBUTE_UNUSED, bool *no_add)
9548 {
9549   bool io_p = (strncmp (IDENTIFIER_POINTER (name), "io", 2) == 0);
9550   location_t loc = DECL_SOURCE_LOCATION (*node);
9551 
9552   if (!VAR_P (*node))
9553     {
9554       warning_at (loc, OPT_Wattributes, "%qE attribute only applies to "
9555 		  "variables", name);
9556       *no_add = true;
9557       return NULL_TREE;
9558     }
9559 
9560   if (args != NULL_TREE)
9561     {
9562       if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
9563 	TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
9564       tree arg = TREE_VALUE (args);
9565       if (TREE_CODE (arg) != INTEGER_CST)
9566 	{
9567 	  warning_at (loc, OPT_Wattributes, "%qE attribute allows only an "
9568 		      "integer constant argument", name);
9569 	  *no_add = true;
9570 	}
9571       else if (io_p
9572 	       && (!tree_fits_shwi_p (arg)
9573 		   || !(strcmp (IDENTIFIER_POINTER (name), "io_low") == 0
9574 			? low_io_address_operand : io_address_operand)
9575 			 (GEN_INT (TREE_INT_CST_LOW (arg)), QImode)))
9576 	{
9577 	  warning_at (loc, OPT_Wattributes, "%qE attribute address "
9578 		      "out of range", name);
9579 	  *no_add = true;
9580 	}
9581       else
9582 	{
9583 	  tree attribs = DECL_ATTRIBUTES (*node);
9584 	  const char *names[] = { "io", "io_low", "address", NULL };
9585 	  for (const char **p = names; *p; p++)
9586 	    {
9587 	      tree other = lookup_attribute (*p, attribs);
9588 	      if (other && TREE_VALUE (other))
9589 		{
9590 		  warning_at (loc, OPT_Wattributes,
9591 			      "both %s and %qE attribute provide address",
9592 			      *p, name);
9593 		  *no_add = true;
9594 		  break;
9595 		}
9596 	    }
9597 	}
9598     }
9599 
9600   if (*no_add == false && io_p && !TREE_THIS_VOLATILE (*node))
9601     warning_at (loc, OPT_Wattributes, "%qE attribute on non-volatile variable",
9602 		name);
9603 
9604   return NULL_TREE;
9605 }
9606 
9607 rtx
9608 avr_eval_addr_attrib (rtx x)
9609 {
9610   if (SYMBOL_REF_P (x)
9611       && (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_ADDRESS))
9612     {
9613       tree decl = SYMBOL_REF_DECL (x);
9614       tree attr = NULL_TREE;
9615 
9616       if (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_IO)
9617 	{
9618 	  attr = lookup_attribute ("io", DECL_ATTRIBUTES (decl));
9619           if (!attr || !TREE_VALUE (attr))
9620             attr = lookup_attribute ("io_low", DECL_ATTRIBUTES (decl));
9621 	  gcc_assert (attr);
9622 	}
9623       if (!attr || !TREE_VALUE (attr))
9624 	attr = lookup_attribute ("address", DECL_ATTRIBUTES (decl));
9625       gcc_assert (attr && TREE_VALUE (attr) && TREE_VALUE (TREE_VALUE (attr)));
9626       return GEN_INT (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr))));
9627     }
9628   return x;
9629 }
9630 
9631 
9632 /* AVR attributes.  */
9633 static const struct attribute_spec
9634 avr_attribute_table[] =
9635 {
9636   /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
9637        affects_type_identity } */
9638   { "progmem",   0, 0, false, false, false,  avr_handle_progmem_attribute,
9639     false },
9640   { "signal",    0, 0, true,  false, false,  avr_handle_fndecl_attribute,
9641     false },
9642   { "interrupt", 0, 0, true,  false, false,  avr_handle_fndecl_attribute,
9643     false },
9644   { "naked",     0, 0, false, true,  true,   avr_handle_fntype_attribute,
9645     false },
9646   { "OS_task",   0, 0, false, true,  true,   avr_handle_fntype_attribute,
9647     false },
9648   { "OS_main",   0, 0, false, true,  true,   avr_handle_fntype_attribute,
9649     false },
9650   { "io",        0, 1, true, false, false,  avr_handle_addr_attribute,
9651     false },
9652   { "io_low",    0, 1, true, false, false,  avr_handle_addr_attribute,
9653     false },
9654   { "address",   1, 1, true, false, false,  avr_handle_addr_attribute,
9655     false },
9656   { "absdata",   0, 0, true, false, false,  avr_handle_absdata_attribute,
9657     false },
9658   { NULL,        0, 0, false, false, false, NULL, false }
9659 };
9660 
9661 
9662 /* Return true if we support address space AS for the architecture in effect
9663    and false, otherwise.  If LOC is not UNKNOWN_LOCATION then also issue
9664    a respective error.  */
9665 
9666 bool
9667 avr_addr_space_supported_p (addr_space_t as, location_t loc)
9668 {
9669   if (AVR_TINY)
9670     {
9671       if (loc != UNKNOWN_LOCATION)
9672         error_at (loc, "address spaces are not supported for reduced "
9673                   "Tiny devices");
9674       return false;
9675     }
9676   else if (avr_addrspace[as].segment >= avr_n_flash)
9677     {
9678       if (loc != UNKNOWN_LOCATION)
9679         error_at (loc, "address space %qs not supported for devices with "
9680                   "flash size up to %d KiB", avr_addrspace[as].name,
9681                   64 * avr_n_flash);
9682       return false;
9683     }
9684 
9685   return true;
9686 }
9687 
9688 
9689 /* Implement `TARGET_ADDR_SPACE_DIAGNOSE_USAGE'.  */
9690 
9691 static void
9692 avr_addr_space_diagnose_usage (addr_space_t as, location_t loc)
9693 {
9694   (void) avr_addr_space_supported_p (as, loc);
9695 }
9696 
9697 
9698 /* Look if DECL shall be placed in program memory space by
9699    means of attribute `progmem' or some address-space qualifier.
9700    Return non-zero if DECL is data that must end up in Flash and
9701    zero if the data lives in RAM (.bss, .data, .rodata, ...).
9702 
9703    Return 2   if DECL is located in 24-bit flash address-space
9704    Return 1   if DECL is located in 16-bit flash address-space
9705    Return -1  if attribute `progmem' occurs in DECL or ATTRIBUTES
9706    Return 0   otherwise  */
9707 
9708 int
9709 avr_progmem_p (tree decl, tree attributes)
9710 {
9711   tree a;
9712 
9713   if (TREE_CODE (decl) != VAR_DECL)
9714     return 0;
9715 
9716   if (avr_decl_memx_p (decl))
9717     return 2;
9718 
9719   if (avr_decl_flash_p (decl))
9720     return 1;
9721 
9722   if (NULL_TREE
9723       != lookup_attribute ("progmem", attributes))
9724     return -1;
9725 
9726   a = decl;
9727 
9728   do
9729     a = TREE_TYPE(a);
9730   while (TREE_CODE (a) == ARRAY_TYPE);
9731 
9732   if (a == error_mark_node)
9733     return 0;
9734 
9735   if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
9736     return -1;
9737 
9738   return 0;
9739 }
9740 
9741 
9742 /* Return true if DECL has attribute `absdata' set.  This function should
9743    only be used for AVR_TINY.  */
9744 
9745 static bool
9746 avr_decl_absdata_p (tree decl, tree attributes)
9747 {
9748   return (TREE_CODE (decl) == VAR_DECL
9749           && NULL_TREE != lookup_attribute ("absdata", attributes));
9750 }
9751 
9752 
9753 /* Scan type TYP for pointer references to address space ASn.
9754    Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
9755    the AS are also declared to be CONST.
9756    Otherwise, return the respective address space, i.e. a value != 0.  */
9757 
9758 static addr_space_t
9759 avr_nonconst_pointer_addrspace (tree typ)
9760 {
9761   while (ARRAY_TYPE == TREE_CODE (typ))
9762     typ = TREE_TYPE (typ);
9763 
9764   if (POINTER_TYPE_P (typ))
9765     {
9766       addr_space_t as;
9767       tree target = TREE_TYPE (typ);
9768 
9769       /* Pointer to function: Test the function's return type.  */
9770 
9771       if (FUNCTION_TYPE == TREE_CODE (target))
9772         return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
9773 
9774       /* "Ordinary" pointers... */
9775 
9776       while (TREE_CODE (target) == ARRAY_TYPE)
9777         target = TREE_TYPE (target);
9778 
9779       /* Pointers to non-generic address space must be const.  */
9780 
9781       as = TYPE_ADDR_SPACE (target);
9782 
9783       if (!ADDR_SPACE_GENERIC_P (as)
9784           && !TYPE_READONLY (target)
9785           && avr_addr_space_supported_p (as))
9786         {
9787           return as;
9788         }
9789 
9790       /* Scan pointer's target type.  */
9791 
9792       return avr_nonconst_pointer_addrspace (target);
9793     }
9794 
9795   return ADDR_SPACE_GENERIC;
9796 }
9797 
9798 
9799 /* Sanity check NODE so that all pointers targeting non-generic address spaces
9800    go along with CONST qualifier.  Writing to these address spaces should
9801    be detected and complained about as early as possible.  */
9802 
9803 static bool
9804 avr_pgm_check_var_decl (tree node)
9805 {
9806   const char *reason = NULL;
9807 
9808   addr_space_t as = ADDR_SPACE_GENERIC;
9809 
9810   gcc_assert (as == 0);
9811 
9812   if (avr_log.progmem)
9813     avr_edump ("%?: %t\n", node);
9814 
9815   switch (TREE_CODE (node))
9816     {
9817     default:
9818       break;
9819 
9820     case VAR_DECL:
9821       if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9822         reason = _("variable");
9823       break;
9824 
9825     case PARM_DECL:
9826       if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9827         reason = _("function parameter");
9828       break;
9829 
9830     case FIELD_DECL:
9831       if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
9832         reason = _("structure field");
9833       break;
9834 
9835     case FUNCTION_DECL:
9836       if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
9837           as)
9838         reason = _("return type of function");
9839       break;
9840 
9841     case POINTER_TYPE:
9842       if (as = avr_nonconst_pointer_addrspace (node), as)
9843         reason = _("pointer");
9844       break;
9845     }
9846 
9847   if (reason)
9848     {
9849       if (TYPE_P (node))
9850         error ("pointer targeting address space %qs must be const in %qT",
9851                avr_addrspace[as].name, node);
9852       else
9853         error ("pointer targeting address space %qs must be const"
9854                " in %s %q+D",
9855                avr_addrspace[as].name, reason, node);
9856     }
9857 
9858   return reason == NULL;
9859 }
9860 
9861 
9862 /* Add the section attribute if the variable is in progmem.  */
9863 
9864 static void
9865 avr_insert_attributes (tree node, tree *attributes)
9866 {
9867   avr_pgm_check_var_decl (node);
9868 
9869   if (TREE_CODE (node) == VAR_DECL
9870       && (TREE_STATIC (node) || DECL_EXTERNAL (node))
9871       && avr_progmem_p (node, *attributes))
9872     {
9873       addr_space_t as;
9874       tree node0 = node;
9875 
9876       /* For C++, we have to peel arrays in order to get correct
9877          determination of readonlyness.  */
9878 
9879       do
9880         node0 = TREE_TYPE (node0);
9881       while (TREE_CODE (node0) == ARRAY_TYPE);
9882 
9883       if (error_mark_node == node0)
9884         return;
9885 
9886       as = TYPE_ADDR_SPACE (TREE_TYPE (node));
9887 
9888       if (!TYPE_READONLY (node0)
9889           && !TREE_READONLY (node))
9890         {
9891           const char *reason = "__attribute__((progmem))";
9892 
9893           if (!ADDR_SPACE_GENERIC_P (as))
9894             reason = avr_addrspace[as].name;
9895 
9896           if (avr_log.progmem)
9897             avr_edump ("\n%?: %t\n%t\n", node, node0);
9898 
9899           error ("variable %q+D must be const in order to be put into"
9900                  " read-only section by means of %qs", node, reason);
9901         }
9902     }
9903 }
9904 
9905 
9906 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'.  */
9907 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'.  */
9908 /* Track need of __do_clear_bss.  */
9909 
9910 void
9911 avr_asm_output_aligned_decl_common (FILE * stream,
9912                                     tree decl,
9913                                     const char *name,
9914                                     unsigned HOST_WIDE_INT size,
9915                                     unsigned int align, bool local_p)
9916 {
9917   rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
9918   rtx symbol;
9919 
9920   if (mem != NULL_RTX && MEM_P (mem)
9921       && SYMBOL_REF_P ((symbol = XEXP (mem, 0)))
9922       && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
9923     {
9924       if (!local_p)
9925 	{
9926 	  fprintf (stream, "\t.globl\t");
9927 	  assemble_name (stream, name);
9928 	  fprintf (stream, "\n");
9929 	}
9930       if (SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS)
9931 	{
9932 	  assemble_name (stream, name);
9933 	  fprintf (stream, " = %ld\n",
9934 		   (long) INTVAL (avr_eval_addr_attrib (symbol)));
9935 	}
9936       else if (local_p)
9937 	error_at (DECL_SOURCE_LOCATION (decl),
9938 		  "static IO declaration for %q+D needs an address", decl);
9939       return;
9940     }
9941 
9942   /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
9943      There is no need to trigger __do_clear_bss code for them.  */
9944 
9945   if (!STR_PREFIX_P (name, "__gnu_lto"))
9946     avr_need_clear_bss_p = true;
9947 
9948   if (local_p)
9949     ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
9950   else
9951     ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
9952 }
9953 
9954 void
9955 avr_asm_asm_output_aligned_bss (FILE *file, tree decl, const char *name,
9956 				unsigned HOST_WIDE_INT size, int align,
9957 				void (*default_func)
9958 				  (FILE *, tree, const char *,
9959 				   unsigned HOST_WIDE_INT, int))
9960 {
9961   rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
9962   rtx symbol;
9963 
9964   if (mem != NULL_RTX && MEM_P (mem)
9965       && SYMBOL_REF_P ((symbol = XEXP (mem, 0)))
9966       && (SYMBOL_REF_FLAGS (symbol) & (SYMBOL_FLAG_IO | SYMBOL_FLAG_ADDRESS)))
9967     {
9968       if (!(SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_ADDRESS))
9969 	error_at (DECL_SOURCE_LOCATION (decl),
9970 		  "IO definition for %q+D needs an address", decl);
9971       avr_asm_output_aligned_decl_common (file, decl, name, size, align, false);
9972     }
9973   else
9974     default_func (file, decl, name, size, align);
9975 }
9976 
9977 
9978 /* Unnamed section callback for data_section
9979    to track need of __do_copy_data.  */
9980 
9981 static void
9982 avr_output_data_section_asm_op (const void *data)
9983 {
9984   avr_need_copy_data_p = true;
9985 
9986   /* Dispatch to default.  */
9987   output_section_asm_op (data);
9988 }
9989 
9990 
9991 /* Unnamed section callback for bss_section
9992    to track need of __do_clear_bss.  */
9993 
9994 static void
9995 avr_output_bss_section_asm_op (const void *data)
9996 {
9997   avr_need_clear_bss_p = true;
9998 
9999   /* Dispatch to default.  */
10000   output_section_asm_op (data);
10001 }
10002 
10003 
10004 /* Unnamed section callback for progmem*.data sections.  */
10005 
10006 static void
10007 avr_output_progmem_section_asm_op (const void *data)
10008 {
10009   fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
10010            (const char*) data);
10011 }
10012 
10013 
10014 /* Implement `TARGET_ASM_INIT_SECTIONS'.  */
10015 
10016 static void
10017 avr_asm_init_sections (void)
10018 {
10019   /* Override section callbacks to keep track of `avr_need_clear_bss_p'
10020      resp. `avr_need_copy_data_p'.  */
10021 
10022   readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
10023   data_section->unnamed.callback = avr_output_data_section_asm_op;
10024   bss_section->unnamed.callback = avr_output_bss_section_asm_op;
10025 }
10026 
10027 
10028 /* Implement `TARGET_ASM_NAMED_SECTION'.  */
10029 /* Track need of __do_clear_bss, __do_copy_data for named sections.  */
10030 
10031 static void
10032 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
10033 {
10034   if (flags & AVR_SECTION_PROGMEM)
10035     {
10036       addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
10037       const char *old_prefix = ".rodata";
10038       const char *new_prefix = avr_addrspace[as].section_name;
10039 
10040       if (STR_PREFIX_P (name, old_prefix))
10041         {
10042           const char *sname = ACONCAT ((new_prefix,
10043                                         name + strlen (old_prefix), NULL));
10044           default_elf_asm_named_section (sname, flags, decl);
10045           return;
10046         }
10047 
10048       default_elf_asm_named_section (new_prefix, flags, decl);
10049       return;
10050     }
10051 
10052   if (!avr_need_copy_data_p)
10053     avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
10054                             || STR_PREFIX_P (name, ".rodata")
10055                             || STR_PREFIX_P (name, ".gnu.linkonce.d"));
10056 
10057   if (!avr_need_clear_bss_p)
10058     avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
10059 
10060   default_elf_asm_named_section (name, flags, decl);
10061 }
10062 
10063 
10064 /* Implement `TARGET_SECTION_TYPE_FLAGS'.  */
10065 
10066 static unsigned int
10067 avr_section_type_flags (tree decl, const char *name, int reloc)
10068 {
10069   unsigned int flags = default_section_type_flags (decl, name, reloc);
10070 
10071   if (STR_PREFIX_P (name, ".noinit"))
10072     {
10073       if (decl && TREE_CODE (decl) == VAR_DECL
10074 	  && DECL_INITIAL (decl) == NULL_TREE)
10075 	flags |= SECTION_BSS;  /* @nobits */
10076       else
10077 	warning (0, "only uninitialized variables can be placed in the "
10078 		 ".noinit section");
10079     }
10080 
10081   if (decl && DECL_P (decl)
10082       && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
10083     {
10084       addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
10085 
10086       /* Attribute progmem puts data in generic address space.
10087          Set section flags as if it was in __flash to get the right
10088          section prefix in the remainder.  */
10089 
10090       if (ADDR_SPACE_GENERIC_P (as))
10091         as = ADDR_SPACE_FLASH;
10092 
10093       flags |= as * SECTION_MACH_DEP;
10094       flags &= ~SECTION_WRITE;
10095       flags &= ~SECTION_BSS;
10096     }
10097 
10098   return flags;
10099 }
10100 
10101 
10102 /* A helper for the next function.  NODE is a decl that is associated with
10103    a symbol.  Return TRUE if the respective object may be accessed by LDS.
10104    There might still be other reasons for why LDS is not appropriate.
10105    This function is only appropriate for AVR_TINY.  */
10106 
10107 static bool
10108 avr_decl_maybe_lds_p (tree node)
10109 {
10110   if (!node
10111       || TREE_CODE (node) != VAR_DECL
10112       || DECL_SECTION_NAME (node) != NULL)
10113     return false;
10114 
10115   /* Don't use LDS for objects that go to .rodata.  The current default
10116      linker description file still locates .rodata in RAM, but this is not
10117      a must.  A better linker script would just keep .rodata in flash and
10118      add an offset of 0x4000 to the VMA.  Hence avoid LDS for such data.  */
10119 
10120   if (TREE_READONLY (node))
10121     return false;
10122 
10123   // C++ requires peeling arrays.
10124 
10125   do
10126     node = TREE_TYPE (node);
10127   while (ARRAY_TYPE == TREE_CODE (node));
10128 
10129   return (node != error_mark_node
10130           && !TYPE_READONLY (node));
10131 }
10132 
10133 
10134 /* Implement `TARGET_ENCODE_SECTION_INFO'.  */
10135 
10136 static void
10137 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
10138 {
10139   tree addr_attr = NULL_TREE;
10140 
10141   /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
10142      readily available, see PR34734.  So we postpone the warning
10143      about uninitialized data in program memory section until here.  */
10144 
10145   if (new_decl_p
10146       && decl && DECL_P (decl)
10147       && !DECL_EXTERNAL (decl)
10148       && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
10149     {
10150       if (!TREE_READONLY (decl))
10151         {
10152           // This might happen with C++ if stuff needs constructing.
10153           error ("variable %q+D with dynamic initialization put "
10154                  "into program memory area", decl);
10155         }
10156       else if (NULL_TREE == DECL_INITIAL (decl))
10157         {
10158           // Don't warn for (implicit) aliases like in PR80462.
10159           tree asmname = DECL_ASSEMBLER_NAME (decl);
10160           varpool_node *node = varpool_node::get_for_asmname (asmname);
10161           bool alias_p = node && node->alias;
10162 
10163           if (!alias_p)
10164             warning (OPT_Wuninitialized, "uninitialized variable %q+D put "
10165                      "into program memory area", decl);
10166         }
10167     }
10168 
10169   default_encode_section_info (decl, rtl, new_decl_p);
10170 
10171   if (decl && DECL_P (decl)
10172       && TREE_CODE (decl) != FUNCTION_DECL
10173       && MEM_P (rtl)
10174       && SYMBOL_REF_P (XEXP (rtl, 0)))
10175     {
10176       rtx sym = XEXP (rtl, 0);
10177       tree type = TREE_TYPE (decl);
10178       tree attr = DECL_ATTRIBUTES (decl);
10179       if (type == error_mark_node)
10180 	return;
10181 
10182       addr_space_t as = TYPE_ADDR_SPACE (type);
10183 
10184       /* PSTR strings are in generic space but located in flash:
10185          patch address space.  */
10186 
10187       if (!AVR_TINY
10188           && -1 == avr_progmem_p (decl, attr))
10189         as = ADDR_SPACE_FLASH;
10190 
10191       AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
10192 
10193       tree io_low_attr = lookup_attribute ("io_low", attr);
10194       tree io_attr = lookup_attribute ("io", attr);
10195 
10196       if (io_low_attr
10197 	  && TREE_VALUE (io_low_attr) && TREE_VALUE (TREE_VALUE (io_low_attr)))
10198 	addr_attr = io_attr;
10199       else if (io_attr
10200 	       && TREE_VALUE (io_attr) && TREE_VALUE (TREE_VALUE (io_attr)))
10201 	addr_attr = io_attr;
10202       else
10203 	addr_attr = lookup_attribute ("address", attr);
10204       if (io_low_attr
10205 	  || (io_attr && addr_attr
10206               && low_io_address_operand
10207                   (GEN_INT (TREE_INT_CST_LOW
10208                             (TREE_VALUE (TREE_VALUE (addr_attr)))), QImode)))
10209 	SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO_LOW;
10210       if (io_attr || io_low_attr)
10211 	SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_IO;
10212       /* If we have an (io) address attribute specification, but the variable
10213 	 is external, treat the address as only a tentative definition
10214 	 to be used to determine if an io port is in the lower range, but
10215 	 don't use the exact value for constant propagation.  */
10216       if (addr_attr && !DECL_EXTERNAL (decl))
10217 	SYMBOL_REF_FLAGS (sym) |= SYMBOL_FLAG_ADDRESS;
10218     }
10219 
10220   if (AVR_TINY
10221       && decl
10222       && VAR_DECL == TREE_CODE (decl)
10223       && MEM_P (rtl)
10224       && SYMBOL_REF_P (XEXP (rtl, 0)))
10225     {
10226       rtx sym = XEXP (rtl, 0);
10227       bool progmem_p = -1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl));
10228 
10229       if (progmem_p)
10230         {
10231           // Tag symbols for later addition of 0x4000 (AVR_TINY_PM_OFFSET).
10232           SYMBOL_REF_FLAGS (sym) |= AVR_SYMBOL_FLAG_TINY_PM;
10233         }
10234 
10235       if (avr_decl_absdata_p (decl, DECL_ATTRIBUTES (decl))
10236           || (TARGET_ABSDATA
10237               && !progmem_p
10238               && !addr_attr
10239               && avr_decl_maybe_lds_p (decl))
10240           || (addr_attr
10241               // If addr_attr is non-null, it has an argument.  Peek into it.
10242               && TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (addr_attr))) < 0xc0))
10243         {
10244           // May be accessed by LDS / STS.
10245           SYMBOL_REF_FLAGS (sym) |= AVR_SYMBOL_FLAG_TINY_ABSDATA;
10246         }
10247 
10248       if (progmem_p
10249           && avr_decl_absdata_p (decl, DECL_ATTRIBUTES (decl)))
10250         {
10251           error ("%q+D has incompatible attributes %qs and %qs",
10252                  decl, "progmem", "absdata");
10253         }
10254     }
10255 }
10256 
10257 
10258 /* Implement `TARGET_ASM_SELECT_SECTION' */
10259 
10260 static section *
10261 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
10262 {
10263   section * sect = default_elf_select_section (decl, reloc, align);
10264 
10265   if (decl && DECL_P (decl)
10266       && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
10267     {
10268       addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
10269 
10270       /* __progmem__ goes in generic space but shall be allocated to
10271          .progmem.data  */
10272 
10273       if (ADDR_SPACE_GENERIC_P (as))
10274         as = ADDR_SPACE_FLASH;
10275 
10276       if (sect->common.flags & SECTION_NAMED)
10277         {
10278           const char * name = sect->named.name;
10279           const char * old_prefix = ".rodata";
10280           const char * new_prefix = avr_addrspace[as].section_name;
10281 
10282           if (STR_PREFIX_P (name, old_prefix))
10283             {
10284               const char *sname = ACONCAT ((new_prefix,
10285                                             name + strlen (old_prefix), NULL));
10286               return get_section (sname,
10287                                   sect->common.flags & ~SECTION_DECLARED,
10288                                   sect->named.decl);
10289             }
10290         }
10291 
10292       if (!progmem_section[as])
10293         {
10294           progmem_section[as]
10295             = get_unnamed_section (0, avr_output_progmem_section_asm_op,
10296                                    avr_addrspace[as].section_name);
10297         }
10298 
10299       return progmem_section[as];
10300     }
10301 
10302   return sect;
10303 }
10304 
10305 /* Implement `TARGET_ASM_FILE_START'.  */
10306 /* Outputs some text at the start of each assembler file.  */
10307 
10308 static void
10309 avr_file_start (void)
10310 {
10311   int sfr_offset = avr_arch->sfr_offset;
10312 
10313   if (avr_arch->asm_only)
10314     error ("architecture %qs supported for assembler only", avr_mmcu);
10315 
10316   default_file_start ();
10317 
10318   /* Print I/O addresses of some SFRs used with IN and OUT.  */
10319 
10320   if (AVR_HAVE_SPH)
10321     fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
10322 
10323   fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
10324   fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
10325   if (AVR_HAVE_RAMPZ)
10326     fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
10327   if (AVR_HAVE_RAMPY)
10328     fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
10329   if (AVR_HAVE_RAMPX)
10330     fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
10331   if (AVR_HAVE_RAMPD)
10332     fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
10333   if (AVR_XMEGA || AVR_TINY)
10334     fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
10335   fprintf (asm_out_file, "__tmp_reg__ = %d\n", AVR_TMP_REGNO);
10336   fprintf (asm_out_file, "__zero_reg__ = %d\n", AVR_ZERO_REGNO);
10337 }
10338 
10339 
10340 /* Implement `TARGET_ASM_FILE_END'.  */
10341 /* Outputs to the stdio stream FILE some
10342    appropriate text to go at the end of an assembler file.  */
10343 
10344 static void
10345 avr_file_end (void)
10346 {
10347   /* Output these only if there is anything in the
10348      .data* / .rodata* / .gnu.linkonce.* resp. .bss* or COMMON
10349      input section(s) - some code size can be saved by not
10350      linking in the initialization code from libgcc if resp.
10351      sections are empty, see PR18145.  */
10352 
10353   if (avr_need_copy_data_p)
10354     fputs (".global __do_copy_data\n", asm_out_file);
10355 
10356   if (avr_need_clear_bss_p)
10357     fputs (".global __do_clear_bss\n", asm_out_file);
10358 }
10359 
10360 
10361 /* Worker function for `ADJUST_REG_ALLOC_ORDER'.  */
10362 /* Choose the order in which to allocate hard registers for
10363    pseudo-registers local to a basic block.
10364 
10365    Store the desired register order in the array `reg_alloc_order'.
10366    Element 0 should be the register to allocate first; element 1, the
10367    next register; and so on.  */
10368 
10369 void
10370 avr_adjust_reg_alloc_order (void)
10371 {
10372   static const int order_0[] =
10373     {
10374       24, 25,
10375       18, 19, 20, 21, 22, 23,
10376       30, 31,
10377       26, 27, 28, 29,
10378       17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10379       0, 1,
10380       32, 33, 34, 35
10381     };
10382   static const int tiny_order_0[] = {
10383     20, 21,
10384     22, 23,
10385     24, 25,
10386     30, 31,
10387     26, 27,
10388     28, 29,
10389     19, 18,
10390     16, 17,
10391     32, 33, 34, 35,
10392     15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
10393   };
10394   static const int order_1[] =
10395     {
10396       18, 19, 20, 21, 22, 23, 24, 25,
10397       30, 31,
10398       26, 27, 28, 29,
10399       17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10400       0, 1,
10401       32, 33, 34, 35
10402     };
10403   static const int tiny_order_1[] = {
10404     22, 23,
10405     24, 25,
10406     30, 31,
10407     26, 27,
10408     28, 29,
10409     21, 20, 19, 18,
10410     16, 17,
10411     32, 33, 34, 35,
10412     15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
10413   };
10414   static const int order_2[] =
10415     {
10416       25, 24, 23, 22, 21, 20, 19, 18,
10417       30, 31,
10418       26, 27, 28, 29,
10419       17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2,
10420       1, 0,
10421       32, 33, 34, 35
10422     };
10423 
10424   /* Select specific register allocation order.
10425      Tiny Core (ATtiny4/5/9/10/20/40) devices have only 16 registers,
10426      so different allocation order should be used.  */
10427 
10428   const int *order = (TARGET_ORDER_1 ? (AVR_TINY ? tiny_order_1 : order_1)
10429                       : TARGET_ORDER_2 ? (AVR_TINY ? tiny_order_0 : order_2)
10430                       : (AVR_TINY ? tiny_order_0 : order_0));
10431 
10432   for (size_t i = 0; i < ARRAY_SIZE (order_0); ++i)
10433     reg_alloc_order[i] = order[i];
10434 }
10435 
10436 
10437 /* Implement `TARGET_REGISTER_MOVE_COST' */
10438 
10439 static int
10440 avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
10441                         reg_class_t from, reg_class_t to)
10442 {
10443   return (from == STACK_REG ? 6
10444           : to == STACK_REG ? 12
10445           : 2);
10446 }
10447 
10448 
10449 /* Implement `TARGET_MEMORY_MOVE_COST' */
10450 
10451 static int
10452 avr_memory_move_cost (machine_mode mode,
10453                       reg_class_t rclass ATTRIBUTE_UNUSED,
10454                       bool in ATTRIBUTE_UNUSED)
10455 {
10456   return (mode == QImode ? 2
10457           : mode == HImode ? 4
10458           : mode == SImode ? 8
10459           : mode == SFmode ? 8
10460           : 16);
10461 }
10462 
10463 
10464 /* Cost for mul highpart.  X is a LSHIFTRT, i.e. the outer TRUNCATE is
10465    already stripped off.  */
10466 
10467 static int
10468 avr_mul_highpart_cost (rtx x, int)
10469 {
10470   if (AVR_HAVE_MUL
10471       && LSHIFTRT == GET_CODE (x)
10472       && MULT == GET_CODE (XEXP (x, 0))
10473       && CONST_INT_P (XEXP (x, 1)))
10474     {
10475       // This is the wider mode.
10476       machine_mode mode = GET_MODE (x);
10477 
10478       // The middle-end might still have PR81444, i.e. it is calling the cost
10479       // functions with strange modes.  Fix this now by also considering
10480       // PSImode (should actually be SImode instead).
10481       if (HImode == mode || PSImode == mode || SImode == mode)
10482         {
10483           return COSTS_N_INSNS (2);
10484         }
10485     }
10486 
10487   return 10000;
10488 }
10489 
10490 
10491 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
10492    cost of an RTX operand given its context.  X is the rtx of the
10493    operand, MODE is its mode, and OUTER is the rtx_code of this
10494    operand's parent operator.  */
10495 
10496 static int
10497 avr_operand_rtx_cost (rtx x, machine_mode mode, enum rtx_code outer,
10498 		      int opno, bool speed)
10499 {
10500   enum rtx_code code = GET_CODE (x);
10501   int total;
10502 
10503   switch (code)
10504     {
10505     case REG:
10506     case SUBREG:
10507       return 0;
10508 
10509     case CONST_INT:
10510     case CONST_FIXED:
10511     case CONST_DOUBLE:
10512       return COSTS_N_INSNS (GET_MODE_SIZE (mode));
10513 
10514     default:
10515       break;
10516     }
10517 
10518   total = 0;
10519   avr_rtx_costs (x, mode, outer, opno, &total, speed);
10520   return total;
10521 }
10522 
10523 /* Worker function for AVR backend's rtx_cost function.
10524    X is rtx expression whose cost is to be calculated.
10525    Return true if the complete cost has been computed.
10526    Return false if subexpressions should be scanned.
10527    In either case, *TOTAL contains the cost result.  */
10528 
10529 static bool
10530 avr_rtx_costs_1 (rtx x, machine_mode mode, int outer_code,
10531                  int opno ATTRIBUTE_UNUSED, int *total, bool speed)
10532 {
10533   enum rtx_code code = GET_CODE (x);
10534   HOST_WIDE_INT val;
10535 
10536   switch (code)
10537     {
10538     case CONST_INT:
10539     case CONST_FIXED:
10540     case CONST_DOUBLE:
10541     case SYMBOL_REF:
10542     case CONST:
10543     case LABEL_REF:
10544       /* Immediate constants are as cheap as registers.  */
10545       *total = 0;
10546       return true;
10547 
10548     case MEM:
10549       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10550       return true;
10551 
10552     case NEG:
10553       switch (mode)
10554 	{
10555 	case QImode:
10556 	case SFmode:
10557 	  *total = COSTS_N_INSNS (1);
10558 	  break;
10559 
10560         case HImode:
10561         case PSImode:
10562         case SImode:
10563           *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
10564           break;
10565 
10566 	default:
10567 	  return false;
10568 	}
10569       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10570       return true;
10571 
10572     case ABS:
10573       switch (mode)
10574 	{
10575 	case QImode:
10576 	case SFmode:
10577 	  *total = COSTS_N_INSNS (1);
10578 	  break;
10579 
10580 	default:
10581 	  return false;
10582 	}
10583       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10584       return true;
10585 
10586     case NOT:
10587       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10588       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10589       return true;
10590 
10591     case ZERO_EXTEND:
10592       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
10593 			      - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10594       *total += avr_operand_rtx_cost (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
10595 				      code, 0, speed);
10596       return true;
10597 
10598     case SIGN_EXTEND:
10599       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
10600 			      - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
10601       *total += avr_operand_rtx_cost (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
10602 				      code, 0, speed);
10603       return true;
10604 
10605     case PLUS:
10606       switch (mode)
10607 	{
10608 	case QImode:
10609           if (AVR_HAVE_MUL
10610               && MULT == GET_CODE (XEXP (x, 0))
10611               && register_operand (XEXP (x, 1), QImode))
10612             {
10613               /* multiply-add */
10614               *total = COSTS_N_INSNS (speed ? 4 : 3);
10615               /* multiply-add with constant: will be split and load constant. */
10616               if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10617                 *total = COSTS_N_INSNS (1) + *total;
10618               return true;
10619             }
10620 	  *total = COSTS_N_INSNS (1);
10621 	  if (!CONST_INT_P (XEXP (x, 1)))
10622 	    *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10623 	  break;
10624 
10625 	case HImode:
10626           if (AVR_HAVE_MUL
10627               && (MULT == GET_CODE (XEXP (x, 0))
10628                   || ASHIFT == GET_CODE (XEXP (x, 0)))
10629               && register_operand (XEXP (x, 1), HImode)
10630               && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
10631                   || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
10632             {
10633               /* multiply-add */
10634               *total = COSTS_N_INSNS (speed ? 5 : 4);
10635               /* multiply-add with constant: will be split and load constant. */
10636               if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
10637                 *total = COSTS_N_INSNS (1) + *total;
10638               return true;
10639             }
10640 	  if (!CONST_INT_P (XEXP (x, 1)))
10641 	    {
10642 	      *total = COSTS_N_INSNS (2);
10643 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10644 					      speed);
10645 	    }
10646 	  else if (IN_RANGE (INTVAL (XEXP (x, 1)), -63, 63))
10647 	    *total = COSTS_N_INSNS (1);
10648 	  else
10649 	    *total = COSTS_N_INSNS (2);
10650 	  break;
10651 
10652         case PSImode:
10653           if (!CONST_INT_P (XEXP (x, 1)))
10654             {
10655               *total = COSTS_N_INSNS (3);
10656               *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10657                                               speed);
10658             }
10659           else if (IN_RANGE (INTVAL (XEXP (x, 1)), -63, 63))
10660             *total = COSTS_N_INSNS (2);
10661           else
10662             *total = COSTS_N_INSNS (3);
10663           break;
10664 
10665 	case SImode:
10666 	  if (!CONST_INT_P (XEXP (x, 1)))
10667 	    {
10668 	      *total = COSTS_N_INSNS (4);
10669 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10670 					      speed);
10671 	    }
10672 	  else if (IN_RANGE (INTVAL (XEXP (x, 1)), -63, 63))
10673 	    *total = COSTS_N_INSNS (1);
10674 	  else
10675 	    *total = COSTS_N_INSNS (4);
10676 	  break;
10677 
10678 	default:
10679 	  return false;
10680 	}
10681       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10682       return true;
10683 
10684     case MINUS:
10685       if (AVR_HAVE_MUL
10686           && QImode == mode
10687           && register_operand (XEXP (x, 0), QImode)
10688           && MULT == GET_CODE (XEXP (x, 1)))
10689         {
10690           /* multiply-sub */
10691           *total = COSTS_N_INSNS (speed ? 4 : 3);
10692           /* multiply-sub with constant: will be split and load constant. */
10693           if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10694             *total = COSTS_N_INSNS (1) + *total;
10695           return true;
10696         }
10697       if (AVR_HAVE_MUL
10698           && HImode == mode
10699           && register_operand (XEXP (x, 0), HImode)
10700           && (MULT == GET_CODE (XEXP (x, 1))
10701               || ASHIFT == GET_CODE (XEXP (x, 1)))
10702           && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
10703               || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
10704         {
10705           /* multiply-sub */
10706           *total = COSTS_N_INSNS (speed ? 5 : 4);
10707           /* multiply-sub with constant: will be split and load constant. */
10708           if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
10709             *total = COSTS_N_INSNS (1) + *total;
10710           return true;
10711         }
10712       /* FALLTHRU */
10713     case AND:
10714     case IOR:
10715       if (IOR == code
10716           && HImode == mode
10717           && ASHIFT == GET_CODE (XEXP (x, 0)))
10718         {
10719           *total = COSTS_N_INSNS (2);
10720           // Just a rough estimate.  If we see no sign- or zero-extend,
10721           // then increase the cost a little bit.
10722           if (REG_P (XEXP (XEXP (x, 0), 0)))
10723             *total += COSTS_N_INSNS (1);
10724           if (REG_P (XEXP (x, 1)))
10725             *total += COSTS_N_INSNS (1);
10726           return true;
10727         }
10728       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10729       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10730       if (!CONST_INT_P (XEXP (x, 1)))
10731 	*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10732       return true;
10733 
10734     case XOR:
10735       *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
10736       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10737       *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10738       return true;
10739 
10740     case MULT:
10741       switch (mode)
10742 	{
10743 	case QImode:
10744 	  if (AVR_HAVE_MUL)
10745 	    *total = COSTS_N_INSNS (!speed ? 3 : 4);
10746 	  else if (!speed)
10747 	    *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10748 	  else
10749 	    return false;
10750 	  break;
10751 
10752 	case HImode:
10753 	  if (AVR_HAVE_MUL)
10754             {
10755               rtx op0 = XEXP (x, 0);
10756               rtx op1 = XEXP (x, 1);
10757               enum rtx_code code0 = GET_CODE (op0);
10758               enum rtx_code code1 = GET_CODE (op1);
10759               bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
10760               bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
10761 
10762               if (ex0
10763                   && (u8_operand (op1, HImode)
10764                       || s8_operand (op1, HImode)))
10765                 {
10766                   *total = COSTS_N_INSNS (!speed ? 4 : 6);
10767                   return true;
10768                 }
10769               if (ex0
10770                   && register_operand (op1, HImode))
10771                 {
10772                   *total = COSTS_N_INSNS (!speed ? 5 : 8);
10773                   return true;
10774                 }
10775               else if (ex0 || ex1)
10776                 {
10777                   *total = COSTS_N_INSNS (!speed ? 3 : 5);
10778                   return true;
10779                 }
10780               else if (register_operand (op0, HImode)
10781                        && (u8_operand (op1, HImode)
10782                            || s8_operand (op1, HImode)))
10783                 {
10784                   *total = COSTS_N_INSNS (!speed ? 6 : 9);
10785                   return true;
10786                 }
10787               else
10788                 *total = COSTS_N_INSNS (!speed ? 7 : 10);
10789             }
10790 	  else if (!speed)
10791 	    *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10792 	  else
10793 	    return false;
10794 	  break;
10795 
10796         case PSImode:
10797           if (!speed)
10798             *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10799           else
10800             *total = 10;
10801           break;
10802 
10803 	case SImode:
10804 	case DImode:
10805 	  if (AVR_HAVE_MUL)
10806             {
10807               if (!speed)
10808                 {
10809                   /* Add some additional costs besides CALL like moves etc.  */
10810 
10811                   *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
10812                 }
10813               else
10814                 {
10815                   /* Just a rough estimate.  Even with -O2 we don't want bulky
10816                      code expanded inline.  */
10817 
10818                   *total = COSTS_N_INSNS (25);
10819                 }
10820             }
10821           else
10822             {
10823               if (speed)
10824                 *total = COSTS_N_INSNS (300);
10825               else
10826                 /* Add some additional costs besides CALL like moves etc.  */
10827                 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
10828             }
10829 
10830 	  if (mode == DImode)
10831 	    *total *= 2;
10832 
10833 	  return true;
10834 
10835 	default:
10836 	  return false;
10837 	}
10838       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10839       *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
10840       return true;
10841 
10842     case DIV:
10843     case MOD:
10844     case UDIV:
10845     case UMOD:
10846       if (!speed)
10847         *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
10848       else
10849         *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
10850       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10851       /* For div/mod with const-int divisor we have at least the cost of
10852          loading the divisor. */
10853       if (CONST_INT_P (XEXP (x, 1)))
10854         *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
10855       /* Add some overall penaly for clobbering and moving around registers */
10856       *total += COSTS_N_INSNS (2);
10857       return true;
10858 
10859     case ROTATE:
10860       switch (mode)
10861 	{
10862 	case QImode:
10863 	  if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
10864 	    *total = COSTS_N_INSNS (1);
10865 
10866 	  break;
10867 
10868 	case HImode:
10869 	  if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
10870 	    *total = COSTS_N_INSNS (3);
10871 
10872 	  break;
10873 
10874 	case SImode:
10875 	  if (CONST_INT_P (XEXP (x, 1)))
10876 	    switch (INTVAL (XEXP (x, 1)))
10877 	      {
10878 	      case 8:
10879 	      case 24:
10880 		*total = COSTS_N_INSNS (5);
10881 		break;
10882 	      case 16:
10883 		*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
10884 		break;
10885 	      }
10886 	  break;
10887 
10888 	default:
10889 	  return false;
10890 	}
10891       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
10892       return true;
10893 
10894     case ASHIFT:
10895       switch (mode)
10896 	{
10897 	case QImode:
10898 	  if (!CONST_INT_P (XEXP (x, 1)))
10899 	    {
10900 	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
10901 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10902 					      speed);
10903 	    }
10904 	  else
10905 	    {
10906 	      val = INTVAL (XEXP (x, 1));
10907 	      if (val == 7)
10908 		*total = COSTS_N_INSNS (3);
10909 	      else if (val >= 0 && val <= 7)
10910 		*total = COSTS_N_INSNS (val);
10911 	      else
10912 		*total = COSTS_N_INSNS (1);
10913 	    }
10914 	  break;
10915 
10916 	case HImode:
10917           if (AVR_HAVE_MUL)
10918             {
10919               if (const_2_to_7_operand (XEXP (x, 1), HImode)
10920                   && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
10921                       || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
10922                 {
10923                   *total = COSTS_N_INSNS (!speed ? 4 : 6);
10924                   return true;
10925                 }
10926             }
10927 
10928           if (const1_rtx == (XEXP (x, 1))
10929               && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
10930             {
10931               *total = COSTS_N_INSNS (2);
10932               return true;
10933             }
10934 
10935 	  if (!CONST_INT_P (XEXP (x, 1)))
10936 	    {
10937 	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
10938 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10939 					      speed);
10940 	    }
10941 	  else
10942 	    switch (INTVAL (XEXP (x, 1)))
10943 	      {
10944 	      case 0:
10945 		*total = 0;
10946 		break;
10947 	      case 1:
10948 	      case 8:
10949 		*total = COSTS_N_INSNS (2);
10950 		break;
10951 	      case 9:
10952 		*total = COSTS_N_INSNS (3);
10953 		break;
10954 	      case 2:
10955 	      case 3:
10956 	      case 10:
10957 	      case 15:
10958 		*total = COSTS_N_INSNS (4);
10959 		break;
10960 	      case 7:
10961 	      case 11:
10962 	      case 12:
10963 		*total = COSTS_N_INSNS (5);
10964 		break;
10965 	      case 4:
10966 		*total = COSTS_N_INSNS (!speed ? 5 : 8);
10967 		break;
10968 	      case 6:
10969 		*total = COSTS_N_INSNS (!speed ? 5 : 9);
10970 		break;
10971 	      case 5:
10972 		*total = COSTS_N_INSNS (!speed ? 5 : 10);
10973 		break;
10974 	      default:
10975 	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
10976 	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
10977 						speed);
10978 	      }
10979 	  break;
10980 
10981         case PSImode:
10982           if (!CONST_INT_P (XEXP (x, 1)))
10983             {
10984               *total = COSTS_N_INSNS (!speed ? 6 : 73);
10985             }
10986           else
10987             switch (INTVAL (XEXP (x, 1)))
10988               {
10989               case 0:
10990                 *total = 0;
10991                 break;
10992               case 1:
10993               case 8:
10994               case 16:
10995                 *total = COSTS_N_INSNS (3);
10996                 break;
10997               case 23:
10998                 *total = COSTS_N_INSNS (5);
10999                 break;
11000               default:
11001                 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
11002                 break;
11003               }
11004           break;
11005 
11006 	case SImode:
11007 	  if (!CONST_INT_P (XEXP (x, 1)))
11008 	    {
11009 	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
11010 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11011 					      speed);
11012 	    }
11013 	  else
11014 	    switch (INTVAL (XEXP (x, 1)))
11015 	      {
11016 	      case 0:
11017 		*total = 0;
11018 		break;
11019 	      case 24:
11020 		*total = COSTS_N_INSNS (3);
11021 		break;
11022 	      case 1:
11023 	      case 8:
11024 	      case 16:
11025 		*total = COSTS_N_INSNS (4);
11026 		break;
11027 	      case 31:
11028 		*total = COSTS_N_INSNS (6);
11029 		break;
11030 	      case 2:
11031 		*total = COSTS_N_INSNS (!speed ? 7 : 8);
11032 		break;
11033 	      default:
11034 		*total = COSTS_N_INSNS (!speed ? 7 : 113);
11035 		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11036 						speed);
11037 	      }
11038 	  break;
11039 
11040 	default:
11041 	  return false;
11042 	}
11043       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11044       return true;
11045 
11046     case ASHIFTRT:
11047       switch (mode)
11048 	{
11049 	case QImode:
11050 	  if (!CONST_INT_P (XEXP (x, 1)))
11051 	    {
11052 	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
11053 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11054 					      speed);
11055 	    }
11056 	  else
11057 	    {
11058 	      val = INTVAL (XEXP (x, 1));
11059 	      if (val == 6)
11060 		*total = COSTS_N_INSNS (4);
11061 	      else if (val == 7)
11062 		*total = COSTS_N_INSNS (2);
11063 	      else if (val >= 0 && val <= 7)
11064 		*total = COSTS_N_INSNS (val);
11065 	      else
11066 		*total = COSTS_N_INSNS (1);
11067 	    }
11068 	  break;
11069 
11070 	case HImode:
11071 	  if (!CONST_INT_P (XEXP (x, 1)))
11072 	    {
11073 	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
11074 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11075 					      speed);
11076 	    }
11077 	  else
11078 	    switch (INTVAL (XEXP (x, 1)))
11079 	      {
11080 	      case 0:
11081 		*total = 0;
11082 		break;
11083 	      case 1:
11084 		*total = COSTS_N_INSNS (2);
11085 		break;
11086 	      case 15:
11087 		*total = COSTS_N_INSNS (3);
11088 		break;
11089 	      case 2:
11090 	      case 7:
11091               case 8:
11092               case 9:
11093 		*total = COSTS_N_INSNS (4);
11094 		break;
11095               case 10:
11096 	      case 14:
11097 		*total = COSTS_N_INSNS (5);
11098 		break;
11099               case 11:
11100                 *total = COSTS_N_INSNS (!speed ? 5 : 6);
11101 		break;
11102               case 12:
11103                 *total = COSTS_N_INSNS (!speed ? 5 : 7);
11104 		break;
11105               case 6:
11106 	      case 13:
11107                 *total = COSTS_N_INSNS (!speed ? 5 : 8);
11108 		break;
11109 	      default:
11110 	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
11111 	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11112 						speed);
11113 	      }
11114 	  break;
11115 
11116         case PSImode:
11117           if (!CONST_INT_P (XEXP (x, 1)))
11118             {
11119               *total = COSTS_N_INSNS (!speed ? 6 : 73);
11120             }
11121           else
11122             switch (INTVAL (XEXP (x, 1)))
11123               {
11124               case 0:
11125                 *total = 0;
11126                 break;
11127               case 1:
11128                 *total = COSTS_N_INSNS (3);
11129                 break;
11130               case 16:
11131               case 8:
11132                 *total = COSTS_N_INSNS (5);
11133                 break;
11134               case 23:
11135                 *total = COSTS_N_INSNS (4);
11136                 break;
11137               default:
11138                 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
11139                 break;
11140               }
11141           break;
11142 
11143 	case SImode:
11144 	  if (!CONST_INT_P (XEXP (x, 1)))
11145 	    {
11146 	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
11147 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11148 					      speed);
11149 	    }
11150 	  else
11151 	    switch (INTVAL (XEXP (x, 1)))
11152 	      {
11153 	      case 0:
11154 		*total = 0;
11155 		break;
11156 	      case 1:
11157 		*total = COSTS_N_INSNS (4);
11158 		break;
11159 	      case 8:
11160 	      case 16:
11161 	      case 24:
11162 		*total = COSTS_N_INSNS (6);
11163 		break;
11164 	      case 2:
11165 		*total = COSTS_N_INSNS (!speed ? 7 : 8);
11166 		break;
11167 	      case 31:
11168 		*total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
11169 		break;
11170 	      default:
11171 		*total = COSTS_N_INSNS (!speed ? 7 : 113);
11172 		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11173 						speed);
11174 	      }
11175 	  break;
11176 
11177 	default:
11178 	  return false;
11179 	}
11180       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11181       return true;
11182 
11183     case LSHIFTRT:
11184       if (outer_code == TRUNCATE)
11185         {
11186           *total = avr_mul_highpart_cost (x, speed);
11187           return true;
11188         }
11189 
11190       switch (mode)
11191 	{
11192 	case QImode:
11193 	  if (!CONST_INT_P (XEXP (x, 1)))
11194 	    {
11195 	      *total = COSTS_N_INSNS (!speed ? 4 : 17);
11196 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11197 					      speed);
11198 	    }
11199 	  else
11200 	    {
11201 	      val = INTVAL (XEXP (x, 1));
11202 	      if (val == 7)
11203 		*total = COSTS_N_INSNS (3);
11204 	      else if (val >= 0 && val <= 7)
11205 		*total = COSTS_N_INSNS (val);
11206 	      else
11207 		*total = COSTS_N_INSNS (1);
11208 	    }
11209 	  break;
11210 
11211 	case HImode:
11212 	  if (!CONST_INT_P (XEXP (x, 1)))
11213 	    {
11214 	      *total = COSTS_N_INSNS (!speed ? 5 : 41);
11215 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11216 					      speed);
11217 	    }
11218 	  else
11219 	    switch (INTVAL (XEXP (x, 1)))
11220 	      {
11221 	      case 0:
11222 		*total = 0;
11223 		break;
11224 	      case 1:
11225 	      case 8:
11226 		*total = COSTS_N_INSNS (2);
11227 		break;
11228 	      case 9:
11229 		*total = COSTS_N_INSNS (3);
11230 		break;
11231 	      case 2:
11232 	      case 10:
11233 	      case 15:
11234 		*total = COSTS_N_INSNS (4);
11235 		break;
11236 	      case 7:
11237               case 11:
11238 		*total = COSTS_N_INSNS (5);
11239 		break;
11240 	      case 3:
11241 	      case 12:
11242 	      case 13:
11243 	      case 14:
11244 		*total = COSTS_N_INSNS (!speed ? 5 : 6);
11245 		break;
11246 	      case 4:
11247 		*total = COSTS_N_INSNS (!speed ? 5 : 7);
11248 		break;
11249 	      case 5:
11250 	      case 6:
11251 		*total = COSTS_N_INSNS (!speed ? 5 : 9);
11252 		break;
11253 	      default:
11254 	        *total = COSTS_N_INSNS (!speed ? 5 : 41);
11255 	        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11256 						speed);
11257 	      }
11258 	  break;
11259 
11260         case PSImode:
11261           if (!CONST_INT_P (XEXP (x, 1)))
11262             {
11263               *total = COSTS_N_INSNS (!speed ? 6 : 73);
11264             }
11265           else
11266             switch (INTVAL (XEXP (x, 1)))
11267               {
11268               case 0:
11269                 *total = 0;
11270                 break;
11271               case 1:
11272               case 8:
11273               case 16:
11274                 *total = COSTS_N_INSNS (3);
11275                 break;
11276               case 23:
11277                 *total = COSTS_N_INSNS (5);
11278                 break;
11279               default:
11280                 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
11281                 break;
11282               }
11283           break;
11284 
11285 	case SImode:
11286 	  if (!CONST_INT_P (XEXP (x, 1)))
11287 	    {
11288 	      *total = COSTS_N_INSNS (!speed ? 7 : 113);
11289 	      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11290 					      speed);
11291 	    }
11292 	  else
11293 	    switch (INTVAL (XEXP (x, 1)))
11294 	      {
11295 	      case 0:
11296 		*total = 0;
11297 		break;
11298 	      case 1:
11299 		*total = COSTS_N_INSNS (4);
11300 		break;
11301 	      case 2:
11302 		*total = COSTS_N_INSNS (!speed ? 7 : 8);
11303 		break;
11304 	      case 8:
11305 	      case 16:
11306 	      case 24:
11307 		*total = COSTS_N_INSNS (4);
11308 		break;
11309 	      case 31:
11310 		*total = COSTS_N_INSNS (6);
11311 		break;
11312 	      default:
11313 		*total = COSTS_N_INSNS (!speed ? 7 : 113);
11314 		*total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
11315 						speed);
11316 	      }
11317 	  break;
11318 
11319 	default:
11320 	  return false;
11321 	}
11322       *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
11323       return true;
11324 
11325     case COMPARE:
11326       switch (GET_MODE (XEXP (x, 0)))
11327 	{
11328 	case QImode:
11329 	  *total = COSTS_N_INSNS (1);
11330 	  if (!CONST_INT_P (XEXP (x, 1)))
11331 	    *total += avr_operand_rtx_cost (XEXP (x, 1), QImode, code,
11332 					    1, speed);
11333 	  break;
11334 
11335         case HImode:
11336 	  *total = COSTS_N_INSNS (2);
11337 	  if (!CONST_INT_P (XEXP (x, 1)))
11338             *total += avr_operand_rtx_cost (XEXP (x, 1), HImode, code,
11339 					    1, speed);
11340 	  else if (INTVAL (XEXP (x, 1)) != 0)
11341 	    *total += COSTS_N_INSNS (1);
11342           break;
11343 
11344         case PSImode:
11345           *total = COSTS_N_INSNS (3);
11346           if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
11347             *total += COSTS_N_INSNS (2);
11348           break;
11349 
11350         case SImode:
11351           *total = COSTS_N_INSNS (4);
11352           if (!CONST_INT_P (XEXP (x, 1)))
11353             *total += avr_operand_rtx_cost (XEXP (x, 1), SImode, code,
11354 					    1, speed);
11355 	  else if (INTVAL (XEXP (x, 1)) != 0)
11356 	    *total += COSTS_N_INSNS (3);
11357           break;
11358 
11359 	default:
11360 	  return false;
11361 	}
11362       *total += avr_operand_rtx_cost (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
11363 				      code, 0, speed);
11364       return true;
11365 
11366     case TRUNCATE:
11367       if (LSHIFTRT == GET_CODE (XEXP (x, 0)))
11368         {
11369           *total = avr_mul_highpart_cost (XEXP (x, 0), speed);
11370           return true;
11371         }
11372       break;
11373 
11374     default:
11375       break;
11376     }
11377   return false;
11378 }
11379 
11380 
11381 /* Implement `TARGET_RTX_COSTS'.  */
11382 
11383 static bool
11384 avr_rtx_costs (rtx x, machine_mode mode, int outer_code,
11385 	       int opno, int *total, bool speed)
11386 {
11387   bool done = avr_rtx_costs_1 (x, mode, outer_code, opno, total, speed);
11388 
11389   if (avr_log.rtx_costs)
11390     {
11391       avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
11392                  done, speed ? "speed" : "size", *total, outer_code, x);
11393     }
11394 
11395   return done;
11396 }
11397 
11398 
11399 /* Implement `TARGET_ADDRESS_COST'.  */
11400 
11401 static int
11402 avr_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
11403                   addr_space_t as ATTRIBUTE_UNUSED,
11404                   bool speed ATTRIBUTE_UNUSED)
11405 {
11406   int cost = 4;
11407 
11408   if (GET_CODE (x) == PLUS
11409       && CONST_INT_P (XEXP (x, 1))
11410       && (REG_P (XEXP (x, 0))
11411           || SUBREG_P (XEXP (x, 0))))
11412     {
11413       if (INTVAL (XEXP (x, 1)) > MAX_LD_OFFSET(mode))
11414         cost = 18;
11415     }
11416   else if (CONSTANT_ADDRESS_P (x))
11417     {
11418       if (io_address_operand (x, QImode))
11419         cost = 2;
11420 
11421       if (AVR_TINY
11422           && avr_address_tiny_absdata_p (x, QImode))
11423         cost = 2;
11424     }
11425 
11426   if (avr_log.address_cost)
11427     avr_edump ("\n%?: %d = %r\n", cost, x);
11428 
11429   return cost;
11430 }
11431 
11432 /* Test for extra memory constraint 'Q'.
11433    It's a memory address based on Y or Z pointer with valid displacement.  */
11434 
11435 int
11436 extra_constraint_Q (rtx x)
11437 {
11438   int ok = 0;
11439   rtx plus = XEXP (x, 0);
11440 
11441   if (GET_CODE (plus) == PLUS
11442       && REG_P (XEXP (plus, 0))
11443       && CONST_INT_P (XEXP (plus, 1))
11444       && (INTVAL (XEXP (plus, 1))
11445 	  <= MAX_LD_OFFSET (GET_MODE (x))))
11446     {
11447       rtx xx = XEXP (plus, 0);
11448       int regno = REGNO (xx);
11449 
11450       ok = (/* allocate pseudos */
11451             regno >= FIRST_PSEUDO_REGISTER
11452             /* strictly check */
11453             || regno == REG_Z || regno == REG_Y
11454             /* XXX frame & arg pointer checks */
11455             || xx == frame_pointer_rtx
11456             || xx == arg_pointer_rtx);
11457 
11458       if (avr_log.constraints)
11459         avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
11460                    ok, reload_completed, reload_in_progress, x);
11461     }
11462 
11463   return ok;
11464 }
11465 
11466 /* Convert condition code CONDITION to the valid AVR condition code.  */
11467 
11468 RTX_CODE
11469 avr_normalize_condition (RTX_CODE condition)
11470 {
11471   switch (condition)
11472     {
11473     case GT:
11474       return GE;
11475     case GTU:
11476       return GEU;
11477     case LE:
11478       return LT;
11479     case LEU:
11480       return LTU;
11481     default:
11482       gcc_unreachable ();
11483     }
11484 }
11485 
11486 /* Helper function for `avr_reorg'.  */
11487 
11488 static rtx
11489 avr_compare_pattern (rtx_insn *insn)
11490 {
11491   rtx pattern = single_set (insn);
11492 
11493   if (pattern
11494       && NONJUMP_INSN_P (insn)
11495       && SET_DEST (pattern) == cc0_rtx
11496       && GET_CODE (SET_SRC (pattern)) == COMPARE)
11497     {
11498       machine_mode mode0 = GET_MODE (XEXP (SET_SRC (pattern), 0));
11499       machine_mode mode1 = GET_MODE (XEXP (SET_SRC (pattern), 1));
11500 
11501       /* The 64-bit comparisons have fixed operands ACC_A and ACC_B.
11502          They must not be swapped, thus skip them.  */
11503 
11504       if ((mode0 == VOIDmode || GET_MODE_SIZE (mode0) <= 4)
11505           && (mode1 == VOIDmode || GET_MODE_SIZE (mode1) <= 4))
11506         return pattern;
11507     }
11508 
11509   return NULL_RTX;
11510 }
11511 
11512 /* Helper function for `avr_reorg'.  */
11513 
11514 /* Expansion of switch/case decision trees leads to code like
11515 
11516        cc0 = compare (Reg, Num)
11517        if (cc0 == 0)
11518          goto L1
11519 
11520        cc0 = compare (Reg, Num)
11521        if (cc0 > 0)
11522          goto L2
11523 
11524    The second comparison is superfluous and can be deleted.
11525    The second jump condition can be transformed from a
11526    "difficult" one to a "simple" one because "cc0 > 0" and
11527    "cc0 >= 0" will have the same effect here.
11528 
11529    This function relies on the way switch/case is being expaned
11530    as binary decision tree.  For example code see PR 49903.
11531 
11532    Return TRUE if optimization performed.
11533    Return FALSE if nothing changed.
11534 
11535    INSN1 is a comparison, i.e. avr_compare_pattern != 0.
11536 
11537    We don't want to do this in text peephole because it is
11538    tedious to work out jump offsets there and the second comparison
11539    might have been transormed by `avr_reorg'.
11540 
11541    RTL peephole won't do because peephole2 does not scan across
11542    basic blocks.  */
11543 
11544 static bool
11545 avr_reorg_remove_redundant_compare (rtx_insn *insn1)
11546 {
11547   rtx comp1, ifelse1, xcond1;
11548   rtx_insn *branch1;
11549   rtx comp2, ifelse2, xcond2;
11550   rtx_insn *branch2, *insn2;
11551   enum rtx_code code;
11552   rtx_insn *jump;
11553   rtx target, cond;
11554 
11555   /* Look out for:  compare1 - branch1 - compare2 - branch2  */
11556 
11557   branch1 = next_nonnote_nondebug_insn (insn1);
11558   if (!branch1 || !JUMP_P (branch1))
11559     return false;
11560 
11561   insn2 = next_nonnote_nondebug_insn (branch1);
11562   if (!insn2 || !avr_compare_pattern (insn2))
11563     return false;
11564 
11565   branch2 = next_nonnote_nondebug_insn (insn2);
11566   if (!branch2 || !JUMP_P (branch2))
11567     return false;
11568 
11569   comp1 = avr_compare_pattern (insn1);
11570   comp2 = avr_compare_pattern (insn2);
11571   xcond1 = single_set (branch1);
11572   xcond2 = single_set (branch2);
11573 
11574   if (!comp1 || !comp2
11575       || !rtx_equal_p (comp1, comp2)
11576       || !xcond1 || SET_DEST (xcond1) != pc_rtx
11577       || !xcond2 || SET_DEST (xcond2) != pc_rtx
11578       || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
11579       || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
11580     {
11581       return false;
11582     }
11583 
11584   comp1 = SET_SRC (comp1);
11585   ifelse1 = SET_SRC (xcond1);
11586   ifelse2 = SET_SRC (xcond2);
11587 
11588   /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE.  */
11589 
11590   if (EQ != GET_CODE (XEXP (ifelse1, 0))
11591       || !REG_P (XEXP (comp1, 0))
11592       || !CONST_INT_P (XEXP (comp1, 1))
11593       || XEXP (ifelse1, 2) != pc_rtx
11594       || XEXP (ifelse2, 2) != pc_rtx
11595       || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
11596       || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
11597       || !COMPARISON_P (XEXP (ifelse2, 0))
11598       || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
11599       || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
11600       || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
11601       || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
11602     {
11603       return false;
11604     }
11605 
11606   /* We filtered the insn sequence to look like
11607 
11608         (set (cc0)
11609              (compare (reg:M N)
11610                       (const_int VAL)))
11611         (set (pc)
11612              (if_then_else (eq (cc0)
11613                                (const_int 0))
11614                            (label_ref L1)
11615                            (pc)))
11616 
11617         (set (cc0)
11618              (compare (reg:M N)
11619                       (const_int VAL)))
11620         (set (pc)
11621              (if_then_else (CODE (cc0)
11622                                  (const_int 0))
11623                            (label_ref L2)
11624                            (pc)))
11625   */
11626 
11627   code = GET_CODE (XEXP (ifelse2, 0));
11628 
11629   /* Map GT/GTU to GE/GEU which is easier for AVR.
11630      The first two instructions compare/branch on EQ
11631      so we may replace the difficult
11632 
11633         if (x == VAL)   goto L1;
11634         if (x > VAL)    goto L2;
11635 
11636      with easy
11637 
11638          if (x == VAL)   goto L1;
11639          if (x >= VAL)   goto L2;
11640 
11641      Similarly, replace LE/LEU by LT/LTU.  */
11642 
11643   switch (code)
11644     {
11645     case EQ:
11646     case LT:  case LTU:
11647     case GE:  case GEU:
11648       break;
11649 
11650     case LE:  case LEU:
11651     case GT:  case GTU:
11652       code = avr_normalize_condition (code);
11653       break;
11654 
11655     default:
11656       return false;
11657     }
11658 
11659   /* Wrap the branches into UNSPECs so they won't be changed or
11660      optimized in the remainder.  */
11661 
11662   target = XEXP (XEXP (ifelse1, 1), 0);
11663   cond = XEXP (ifelse1, 0);
11664   jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
11665 
11666   JUMP_LABEL (jump) = JUMP_LABEL (branch1);
11667 
11668   target = XEXP (XEXP (ifelse2, 1), 0);
11669   cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11670   jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
11671 
11672   JUMP_LABEL (jump) = JUMP_LABEL (branch2);
11673 
11674   /* The comparisons in insn1 and insn2 are exactly the same;
11675      insn2 is superfluous so delete it.  */
11676 
11677   delete_insn (insn2);
11678   delete_insn (branch1);
11679   delete_insn (branch2);
11680 
11681   return true;
11682 }
11683 
11684 
11685 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'.  */
11686 /* Optimize conditional jumps.  */
11687 
11688 static void
11689 avr_reorg (void)
11690 {
11691   rtx_insn *insn = get_insns();
11692 
11693   for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
11694     {
11695       rtx pattern = avr_compare_pattern (insn);
11696 
11697       if (!pattern)
11698         continue;
11699 
11700       if (optimize
11701           && avr_reorg_remove_redundant_compare (insn))
11702         {
11703           continue;
11704         }
11705 
11706       if (compare_diff_p (insn))
11707 	{
11708           /* Now we work under compare insn with difficult branch.  */
11709 
11710 	  rtx_insn *next = next_real_insn (insn);
11711           rtx pat = PATTERN (next);
11712 
11713           pattern = SET_SRC (pattern);
11714 
11715           if (true_regnum (XEXP (pattern, 0)) >= 0
11716               && true_regnum (XEXP (pattern, 1)) >= 0)
11717             {
11718               rtx x = XEXP (pattern, 0);
11719               rtx src = SET_SRC (pat);
11720               rtx t = XEXP (src, 0);
11721               PUT_CODE (t, swap_condition (GET_CODE (t)));
11722               XEXP (pattern, 0) = XEXP (pattern, 1);
11723               XEXP (pattern, 1) = x;
11724               INSN_CODE (next) = -1;
11725             }
11726           else if (true_regnum (XEXP (pattern, 0)) >= 0
11727                    && XEXP (pattern, 1) == const0_rtx)
11728             {
11729               /* This is a tst insn, we can reverse it.  */
11730               rtx src = SET_SRC (pat);
11731               rtx t = XEXP (src, 0);
11732 
11733               PUT_CODE (t, swap_condition (GET_CODE (t)));
11734               XEXP (pattern, 1) = XEXP (pattern, 0);
11735               XEXP (pattern, 0) = const0_rtx;
11736               INSN_CODE (next) = -1;
11737               INSN_CODE (insn) = -1;
11738             }
11739           else if (true_regnum (XEXP (pattern, 0)) >= 0
11740                    && CONST_INT_P (XEXP (pattern, 1)))
11741             {
11742               rtx x = XEXP (pattern, 1);
11743               rtx src = SET_SRC (pat);
11744               rtx t = XEXP (src, 0);
11745               machine_mode mode = GET_MODE (XEXP (pattern, 0));
11746 
11747               if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
11748                 {
11749                   XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
11750                   PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
11751                   INSN_CODE (next) = -1;
11752                   INSN_CODE (insn) = -1;
11753                 }
11754             }
11755         }
11756     }
11757 }
11758 
11759 /* Returns register number for function return value.*/
11760 
11761 static inline unsigned int
11762 avr_ret_register (void)
11763 {
11764   return 24;
11765 }
11766 
11767 
11768 /* Implement `TARGET_FUNCTION_VALUE_REGNO_P'.  */
11769 
11770 static bool
11771 avr_function_value_regno_p (const unsigned int regno)
11772 {
11773   return (regno == avr_ret_register ());
11774 }
11775 
11776 
11777 /* Implement `TARGET_LIBCALL_VALUE'.  */
11778 /* Create an RTX representing the place where a
11779    library function returns a value of mode MODE.  */
11780 
11781 static rtx
11782 avr_libcall_value (machine_mode mode,
11783 		   const_rtx func ATTRIBUTE_UNUSED)
11784 {
11785   int offs = GET_MODE_SIZE (mode);
11786 
11787   if (offs <= 4)
11788     offs = (offs + 1) & ~1;
11789 
11790   return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
11791 }
11792 
11793 
11794 /* Implement `TARGET_FUNCTION_VALUE'.  */
11795 /* Create an RTX representing the place where a
11796    function returns a value of data type VALTYPE.  */
11797 
11798 static rtx
11799 avr_function_value (const_tree type,
11800                     const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
11801                     bool outgoing ATTRIBUTE_UNUSED)
11802 {
11803   unsigned int offs;
11804 
11805   if (TYPE_MODE (type) != BLKmode)
11806     return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
11807 
11808   offs = int_size_in_bytes (type);
11809   if (offs < 2)
11810     offs = 2;
11811   if (offs > 2 && offs < GET_MODE_SIZE (SImode))
11812     offs = GET_MODE_SIZE (SImode);
11813   else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
11814     offs = GET_MODE_SIZE (DImode);
11815 
11816   return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
11817 }
11818 
11819 int
11820 test_hard_reg_class (enum reg_class rclass, rtx x)
11821 {
11822   int regno = true_regnum (x);
11823   if (regno < 0)
11824     return 0;
11825 
11826   if (TEST_HARD_REG_CLASS (rclass, regno))
11827     return 1;
11828 
11829   return 0;
11830 }
11831 
11832 
11833 /* Helper for jump_over_one_insn_p:  Test if INSN is a 2-word instruction
11834    and thus is suitable to be skipped by CPSE, SBRC, etc.  */
11835 
11836 static bool
11837 avr_2word_insn_p (rtx_insn *insn)
11838 {
11839   if (TARGET_SKIP_BUG
11840       || !insn
11841       || 2 != get_attr_length (insn))
11842     {
11843       return false;
11844     }
11845 
11846   switch (INSN_CODE (insn))
11847     {
11848     default:
11849       return false;
11850 
11851     case CODE_FOR_movqi_insn:
11852     case CODE_FOR_movuqq_insn:
11853     case CODE_FOR_movqq_insn:
11854       {
11855         rtx set  = single_set (insn);
11856         rtx src  = SET_SRC (set);
11857         rtx dest = SET_DEST (set);
11858 
11859         /* Factor out LDS and STS from movqi_insn.  */
11860 
11861         if (MEM_P (dest)
11862             && (REG_P (src) || src == CONST0_RTX (GET_MODE (dest))))
11863           {
11864             return CONSTANT_ADDRESS_P (XEXP (dest, 0));
11865           }
11866         else if (REG_P (dest)
11867                  && MEM_P (src))
11868           {
11869             return CONSTANT_ADDRESS_P (XEXP (src, 0));
11870           }
11871 
11872         return false;
11873       }
11874 
11875     case CODE_FOR_call_insn:
11876     case CODE_FOR_call_value_insn:
11877       return true;
11878     }
11879 }
11880 
11881 
11882 int
11883 jump_over_one_insn_p (rtx_insn *insn, rtx dest)
11884 {
11885   int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
11886 		      ? XEXP (dest, 0)
11887 		      : dest);
11888   int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
11889   int dest_addr = INSN_ADDRESSES (uid);
11890   int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
11891 
11892   return (jump_offset == 1
11893           || (jump_offset == 2
11894               && avr_2word_insn_p (next_active_insn (insn))));
11895 }
11896 
11897 
11898 /* Worker function for `HARD_REGNO_MODE_OK'.  */
11899 /* Returns 1 if a value of mode MODE can be stored starting with hard
11900    register number REGNO.  On the enhanced core, anything larger than
11901    1 byte must start in even numbered register for "movw" to work
11902    (this way we don't have to check for odd registers everywhere).  */
11903 
11904 int
11905 avr_hard_regno_mode_ok (int regno, machine_mode mode)
11906 {
11907   /* NOTE: 8-bit values must not be disallowed for R28 or R29.
11908         Disallowing QI et al. in these regs might lead to code like
11909             (set (subreg:QI (reg:HI 28) n) ...)
11910         which will result in wrong code because reload does not
11911         handle SUBREGs of hard regsisters like this.
11912         This could be fixed in reload.  However, it appears
11913         that fixing reload is not wanted by reload people.  */
11914 
11915   /* Any GENERAL_REGS register can hold 8-bit values.  */
11916 
11917   if (GET_MODE_SIZE (mode) == 1)
11918     return 1;
11919 
11920   /* FIXME: Ideally, the following test is not needed.
11921         However, it turned out that it can reduce the number
11922         of spill fails.  AVR and it's poor endowment with
11923         address registers is extreme stress test for reload.  */
11924 
11925   if (GET_MODE_SIZE (mode) >= 4
11926       && regno >= REG_X)
11927     return 0;
11928 
11929   /* All modes larger than 8 bits should start in an even register.  */
11930 
11931   return !(regno & 1);
11932 }
11933 
11934 
11935 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'.  */
11936 
11937 int
11938 avr_hard_regno_call_part_clobbered (unsigned regno, machine_mode mode)
11939 {
11940   /* FIXME: This hook gets called with MODE:REGNO combinations that don't
11941         represent valid hard registers like, e.g. HI:29.  Returning TRUE
11942         for such registers can lead to performance degradation as mentioned
11943         in PR53595.  Thus, report invalid hard registers as FALSE.  */
11944 
11945   if (!avr_hard_regno_mode_ok (regno, mode))
11946     return 0;
11947 
11948   /* Return true if any of the following boundaries is crossed:
11949      17/18 or 19/20 (if AVR_TINY), 27/28 and 29/30.  */
11950 
11951   return ((regno <= LAST_CALLEE_SAVED_REG
11952            && regno + GET_MODE_SIZE (mode) > 1 + LAST_CALLEE_SAVED_REG)
11953           || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
11954           || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
11955 }
11956 
11957 
11958 /* Implement `MODE_CODE_BASE_REG_CLASS'.  */
11959 
11960 enum reg_class
11961 avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
11962                               addr_space_t as, RTX_CODE outer_code,
11963                               RTX_CODE index_code ATTRIBUTE_UNUSED)
11964 {
11965   if (!ADDR_SPACE_GENERIC_P (as))
11966     {
11967       return POINTER_Z_REGS;
11968     }
11969 
11970   if (!avr_strict_X)
11971     return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
11972 
11973   return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
11974 }
11975 
11976 
11977 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'.  */
11978 
11979 bool
11980 avr_regno_mode_code_ok_for_base_p (int regno,
11981                                    machine_mode mode ATTRIBUTE_UNUSED,
11982                                    addr_space_t as ATTRIBUTE_UNUSED,
11983                                    RTX_CODE outer_code,
11984                                    RTX_CODE index_code ATTRIBUTE_UNUSED)
11985 {
11986   bool ok = false;
11987 
11988   if (!ADDR_SPACE_GENERIC_P (as))
11989     {
11990       if (regno < FIRST_PSEUDO_REGISTER
11991           && regno == REG_Z)
11992         {
11993           return true;
11994         }
11995 
11996       if (reg_renumber)
11997         {
11998           regno = reg_renumber[regno];
11999 
12000           if (regno == REG_Z)
12001             {
12002               return true;
12003             }
12004         }
12005 
12006       return false;
12007     }
12008 
12009   if (regno < FIRST_PSEUDO_REGISTER
12010       && (regno == REG_X
12011           || regno == REG_Y
12012           || regno == REG_Z
12013           || regno == ARG_POINTER_REGNUM))
12014     {
12015       ok = true;
12016     }
12017   else if (reg_renumber)
12018     {
12019       regno = reg_renumber[regno];
12020 
12021       if (regno == REG_X
12022           || regno == REG_Y
12023           || regno == REG_Z
12024           || regno == ARG_POINTER_REGNUM)
12025         {
12026           ok = true;
12027         }
12028     }
12029 
12030   if (avr_strict_X
12031       && PLUS == outer_code
12032       && regno == REG_X)
12033     {
12034       ok = false;
12035     }
12036 
12037   return ok;
12038 }
12039 
12040 
12041 /* A helper for `output_reload_insisf' and `output_reload_inhi'.  */
12042 /* Set 32-bit register OP[0] to compile-time constant OP[1].
12043    CLOBBER_REG is a QI clobber register or NULL_RTX.
12044    LEN == NULL: output instructions.
12045    LEN != NULL: set *LEN to the length of the instruction sequence
12046                 (in words) printed with LEN = NULL.
12047    If CLEAR_P is true, OP[0] had been cleard to Zero already.
12048    If CLEAR_P is false, nothing is known about OP[0].
12049 
12050    The effect on cc0 is as follows:
12051 
12052    Load 0 to any register except ZERO_REG : NONE
12053    Load ld register with any value        : NONE
12054    Anything else:                         : CLOBBER  */
12055 
12056 static void
12057 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
12058 {
12059   rtx src = op[1];
12060   rtx dest = op[0];
12061   rtx xval, xdest[4];
12062   int ival[4];
12063   int clobber_val = 1234;
12064   bool cooked_clobber_p = false;
12065   bool set_p = false;
12066   machine_mode mode = GET_MODE (dest);
12067   int n_bytes = GET_MODE_SIZE (mode);
12068 
12069   gcc_assert (REG_P (dest)
12070               && CONSTANT_P (src));
12071 
12072   if (len)
12073     *len = 0;
12074 
12075   /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
12076      but has some subregs that are in LD_REGS.  Use the MSB (REG:QI 17).  */
12077 
12078   if (REGNO (dest) < 16
12079       && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
12080     {
12081       clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
12082     }
12083 
12084   /* We might need a clobber reg but don't have one.  Look at the value to
12085      be loaded more closely.  A clobber is only needed if it is a symbol
12086      or contains a byte that is neither 0, -1 or a power of 2.  */
12087 
12088   if (NULL_RTX == clobber_reg
12089       && !test_hard_reg_class (LD_REGS, dest)
12090       && (! (CONST_INT_P (src) || CONST_FIXED_P (src) || CONST_DOUBLE_P (src))
12091           || !avr_popcount_each_byte (src, n_bytes,
12092                                       (1 << 0) | (1 << 1) | (1 << 8))))
12093     {
12094       /* We have no clobber register but need one.  Cook one up.
12095          That's cheaper than loading from constant pool.  */
12096 
12097       cooked_clobber_p = true;
12098       clobber_reg = all_regs_rtx[REG_Z + 1];
12099       avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
12100     }
12101 
12102   /* Now start filling DEST from LSB to MSB.  */
12103 
12104   for (int n = 0; n < n_bytes; n++)
12105     {
12106       int ldreg_p;
12107       bool done_byte = false;
12108       rtx xop[3];
12109 
12110       /* Crop the n-th destination byte.  */
12111 
12112       xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
12113       ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
12114 
12115       if (!CONST_INT_P (src)
12116           && !CONST_FIXED_P (src)
12117           && !CONST_DOUBLE_P (src))
12118         {
12119           static const char* const asm_code[][2] =
12120             {
12121               { "ldi %2,lo8(%1)"  CR_TAB "mov %0,%2",    "ldi %0,lo8(%1)"  },
12122               { "ldi %2,hi8(%1)"  CR_TAB "mov %0,%2",    "ldi %0,hi8(%1)"  },
12123               { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2",    "ldi %0,hlo8(%1)" },
12124               { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2",    "ldi %0,hhi8(%1)" }
12125             };
12126 
12127           xop[0] = xdest[n];
12128           xop[1] = src;
12129           xop[2] = clobber_reg;
12130 
12131           avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
12132 
12133           continue;
12134         }
12135 
12136       /* Crop the n-th source byte.  */
12137 
12138       xval = simplify_gen_subreg (QImode, src, mode, n);
12139       ival[n] = INTVAL (xval);
12140 
12141       /* Look if we can reuse the low word by means of MOVW.  */
12142 
12143       if (n == 2
12144           && n_bytes >= 4
12145           && AVR_HAVE_MOVW)
12146         {
12147           rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
12148           rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
12149 
12150           if (INTVAL (lo16) == INTVAL (hi16))
12151             {
12152               if (0 != INTVAL (lo16)
12153                   || !clear_p)
12154                 {
12155                   avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
12156                 }
12157 
12158               break;
12159             }
12160         }
12161 
12162       /* Don't use CLR so that cc0 is set as expected.  */
12163 
12164       if (ival[n] == 0)
12165         {
12166           if (!clear_p)
12167             avr_asm_len (ldreg_p ? "ldi %0,0"
12168                          : AVR_ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
12169                          : "mov %0,__zero_reg__",
12170                          &xdest[n], len, 1);
12171           continue;
12172         }
12173 
12174       if (clobber_val == ival[n]
12175           && REGNO (clobber_reg) == REGNO (xdest[n]))
12176         {
12177           continue;
12178         }
12179 
12180       /* LD_REGS can use LDI to move a constant value */
12181 
12182       if (ldreg_p)
12183         {
12184           xop[0] = xdest[n];
12185           xop[1] = xval;
12186           avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
12187           continue;
12188         }
12189 
12190       /* Try to reuse value already loaded in some lower byte. */
12191 
12192       for (int j = 0; j < n; j++)
12193         if (ival[j] == ival[n])
12194           {
12195             xop[0] = xdest[n];
12196             xop[1] = xdest[j];
12197 
12198             avr_asm_len ("mov %0,%1", xop, len, 1);
12199             done_byte = true;
12200             break;
12201           }
12202 
12203       if (done_byte)
12204         continue;
12205 
12206       /* Need no clobber reg for -1: Use CLR/DEC */
12207 
12208       if (-1 == ival[n])
12209         {
12210           if (!clear_p)
12211             avr_asm_len ("clr %0", &xdest[n], len, 1);
12212 
12213           avr_asm_len ("dec %0", &xdest[n], len, 1);
12214           continue;
12215         }
12216       else if (1 == ival[n])
12217         {
12218           if (!clear_p)
12219             avr_asm_len ("clr %0", &xdest[n], len, 1);
12220 
12221           avr_asm_len ("inc %0", &xdest[n], len, 1);
12222           continue;
12223         }
12224 
12225       /* Use T flag or INC to manage powers of 2 if we have
12226          no clobber reg.  */
12227 
12228       if (NULL_RTX == clobber_reg
12229           && single_one_operand (xval, QImode))
12230         {
12231           xop[0] = xdest[n];
12232           xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
12233 
12234           gcc_assert (constm1_rtx != xop[1]);
12235 
12236           if (!set_p)
12237             {
12238               set_p = true;
12239               avr_asm_len ("set", xop, len, 1);
12240             }
12241 
12242           if (!clear_p)
12243             avr_asm_len ("clr %0", xop, len, 1);
12244 
12245           avr_asm_len ("bld %0,%1", xop, len, 1);
12246           continue;
12247         }
12248 
12249       /* We actually need the LD_REGS clobber reg.  */
12250 
12251       gcc_assert (NULL_RTX != clobber_reg);
12252 
12253       xop[0] = xdest[n];
12254       xop[1] = xval;
12255       xop[2] = clobber_reg;
12256       clobber_val = ival[n];
12257 
12258       avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
12259                    "mov %0,%2", xop, len, 2);
12260     }
12261 
12262   /* If we cooked up a clobber reg above, restore it.  */
12263 
12264   if (cooked_clobber_p)
12265     {
12266       avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
12267     }
12268 }
12269 
12270 
12271 /* Reload the constant OP[1] into the HI register OP[0].
12272    CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
12273    into a NO_LD_REGS register.  If CLOBBER_REG is NULL_RTX we either don't
12274    need a clobber reg or have to cook one up.
12275 
12276    PLEN == NULL: Output instructions.
12277    PLEN != NULL: Output nothing.  Set *PLEN to number of words occupied
12278                  by the insns printed.
12279 
12280    Return "".  */
12281 
12282 const char*
12283 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
12284 {
12285   output_reload_in_const (op, clobber_reg, plen, false);
12286   return "";
12287 }
12288 
12289 
12290 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
12291    CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
12292    into a NO_LD_REGS register.  If CLOBBER_REG is NULL_RTX we either don't
12293    need a clobber reg or have to cook one up.
12294 
12295    LEN == NULL: Output instructions.
12296 
12297    LEN != NULL: Output nothing.  Set *LEN to number of words occupied
12298                 by the insns printed.
12299 
12300    Return "".  */
12301 
12302 const char *
12303 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
12304 {
12305   if (AVR_HAVE_MOVW
12306       && !test_hard_reg_class (LD_REGS, op[0])
12307       && (CONST_INT_P (op[1])
12308           || CONST_FIXED_P (op[1])
12309           || CONST_DOUBLE_P (op[1])))
12310     {
12311       int len_clr, len_noclr;
12312 
12313       /* In some cases it is better to clear the destination beforehand, e.g.
12314 
12315              CLR R2   CLR R3   MOVW R4,R2   INC R2
12316 
12317          is shorther than
12318 
12319              CLR R2   INC R2   CLR  R3      CLR R4   CLR R5
12320 
12321          We find it too tedious to work that out in the print function.
12322          Instead, we call the print function twice to get the lengths of
12323          both methods and use the shortest one.  */
12324 
12325       output_reload_in_const (op, clobber_reg, &len_clr, true);
12326       output_reload_in_const (op, clobber_reg, &len_noclr, false);
12327 
12328       if (len_noclr - len_clr == 4)
12329         {
12330           /* Default needs 4 CLR instructions: clear register beforehand.  */
12331 
12332           avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
12333                        "mov %B0,__zero_reg__" CR_TAB
12334                        "movw %C0,%A0", &op[0], len, 3);
12335 
12336           output_reload_in_const (op, clobber_reg, len, true);
12337 
12338           if (len)
12339             *len += 3;
12340 
12341           return "";
12342         }
12343     }
12344 
12345   /* Default: destination not pre-cleared.  */
12346 
12347   output_reload_in_const (op, clobber_reg, len, false);
12348   return "";
12349 }
12350 
12351 const char*
12352 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
12353 {
12354   output_reload_in_const (op, clobber_reg, len, false);
12355   return "";
12356 }
12357 
12358 
12359 /* Worker function for `ASM_OUTPUT_ADDR_VEC_ELT'.  */
12360 
12361 void
12362 avr_output_addr_vec_elt (FILE *stream, int value)
12363 {
12364   if (AVR_HAVE_JMP_CALL)
12365     fprintf (stream, "\t.word gs(.L%d)\n", value);
12366   else
12367     fprintf (stream, "\trjmp .L%d\n", value);
12368 }
12369 
12370 static void
12371 avr_conditional_register_usage (void)
12372 {
12373   if (AVR_TINY)
12374     {
12375       const int tiny_reg_alloc_order[] = {
12376         24, 25,
12377         22, 23,
12378         30, 31,
12379         26, 27,
12380         28, 29,
12381         21, 20, 19, 18,
12382         16, 17,
12383         32, 33, 34, 35,
12384         15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0
12385       };
12386 
12387       /* Set R0-R17 as fixed registers. Reset R0-R17 in call used register list
12388          - R0-R15 are not available in Tiny Core devices
12389          - R16 and R17 are fixed registers.  */
12390 
12391       for (size_t i = 0; i <= 17;  i++)
12392         {
12393           fixed_regs[i] = 1;
12394           call_used_regs[i] = 1;
12395         }
12396 
12397       /* Set R18 to R21 as callee saved registers
12398          - R18, R19, R20 and R21 are the callee saved registers in
12399            Tiny Core devices  */
12400 
12401       for (size_t i = 18; i <= LAST_CALLEE_SAVED_REG; i++)
12402         {
12403           call_used_regs[i] = 0;
12404         }
12405 
12406       /* Update register allocation order for Tiny Core devices */
12407 
12408       for (size_t i = 0; i < ARRAY_SIZE (tiny_reg_alloc_order); i++)
12409         {
12410           reg_alloc_order[i] = tiny_reg_alloc_order[i];
12411         }
12412 
12413       CLEAR_HARD_REG_SET (reg_class_contents[(int) ADDW_REGS]);
12414       CLEAR_HARD_REG_SET (reg_class_contents[(int) NO_LD_REGS]);
12415     }
12416 }
12417 
12418 /* Implement `TARGET_HARD_REGNO_SCRATCH_OK'.  */
12419 /* Returns true if SCRATCH are safe to be allocated as a scratch
12420    registers (for a define_peephole2) in the current function.  */
12421 
12422 static bool
12423 avr_hard_regno_scratch_ok (unsigned int regno)
12424 {
12425   /* Interrupt functions can only use registers that have already been saved
12426      by the prologue, even if they would normally be call-clobbered.  */
12427 
12428   if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
12429       && !df_regs_ever_live_p (regno))
12430     return false;
12431 
12432   /* Don't allow hard registers that might be part of the frame pointer.
12433      Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
12434      and don't care for a frame pointer that spans more than one register.  */
12435 
12436   if ((!reload_completed || frame_pointer_needed)
12437       && (regno == REG_Y || regno == REG_Y + 1))
12438     {
12439       return false;
12440     }
12441 
12442   return true;
12443 }
12444 
12445 
12446 /* Worker function for `HARD_REGNO_RENAME_OK'.  */
12447 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG.  */
12448 
12449 int
12450 avr_hard_regno_rename_ok (unsigned int old_reg,
12451 			  unsigned int new_reg)
12452 {
12453   /* Interrupt functions can only use registers that have already been
12454      saved by the prologue, even if they would normally be
12455      call-clobbered.  */
12456 
12457   if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
12458       && !df_regs_ever_live_p (new_reg))
12459     return 0;
12460 
12461   /* Don't allow hard registers that might be part of the frame pointer.
12462      Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
12463      and don't care for a frame pointer that spans more than one register.  */
12464 
12465   if ((!reload_completed || frame_pointer_needed)
12466       && (old_reg == REG_Y || old_reg == REG_Y + 1
12467           || new_reg == REG_Y || new_reg == REG_Y + 1))
12468     {
12469       return 0;
12470     }
12471 
12472   return 1;
12473 }
12474 
12475 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
12476    or memory location in the I/O space (QImode only).
12477 
12478    Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
12479    Operand 1: register operand to test, or CONST_INT memory address.
12480    Operand 2: bit number.
12481    Operand 3: label to jump to if the test is true.  */
12482 
12483 const char*
12484 avr_out_sbxx_branch (rtx_insn *insn, rtx operands[])
12485 {
12486   enum rtx_code comp = GET_CODE (operands[0]);
12487   bool long_jump = get_attr_length (insn) >= 4;
12488   bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
12489 
12490   if (comp == GE)
12491     comp = EQ;
12492   else if (comp == LT)
12493     comp = NE;
12494 
12495   if (reverse)
12496     comp = reverse_condition (comp);
12497 
12498   switch (GET_CODE (operands[1]))
12499     {
12500     default:
12501       gcc_unreachable();
12502 
12503     case CONST_INT:
12504     case CONST:
12505     case SYMBOL_REF:
12506 
12507       if (low_io_address_operand (operands[1], QImode))
12508         {
12509           if (comp == EQ)
12510             output_asm_insn ("sbis %i1,%2", operands);
12511           else
12512             output_asm_insn ("sbic %i1,%2", operands);
12513         }
12514       else
12515         {
12516 	  gcc_assert (io_address_operand (operands[1], QImode));
12517           output_asm_insn ("in __tmp_reg__,%i1", operands);
12518           if (comp == EQ)
12519             output_asm_insn ("sbrs __tmp_reg__,%2", operands);
12520           else
12521             output_asm_insn ("sbrc __tmp_reg__,%2", operands);
12522         }
12523 
12524       break; /* CONST_INT */
12525 
12526     case REG:
12527 
12528       if (comp == EQ)
12529         output_asm_insn ("sbrs %T1%T2", operands);
12530       else
12531         output_asm_insn ("sbrc %T1%T2", operands);
12532 
12533       break; /* REG */
12534     }        /* switch */
12535 
12536   if (long_jump)
12537     return ("rjmp .+4" CR_TAB
12538             "jmp %x3");
12539 
12540   if (!reverse)
12541     return "rjmp %x3";
12542 
12543   return "";
12544 }
12545 
12546 /* Worker function for `TARGET_ASM_CONSTRUCTOR'.  */
12547 
12548 static void
12549 avr_asm_out_ctor (rtx symbol, int priority)
12550 {
12551   fputs ("\t.global __do_global_ctors\n", asm_out_file);
12552   default_ctor_section_asm_out_constructor (symbol, priority);
12553 }
12554 
12555 
12556 /* Worker function for `TARGET_ASM_DESTRUCTOR'.  */
12557 
12558 static void
12559 avr_asm_out_dtor (rtx symbol, int priority)
12560 {
12561   fputs ("\t.global __do_global_dtors\n", asm_out_file);
12562   default_dtor_section_asm_out_destructor (symbol, priority);
12563 }
12564 
12565 
12566 /* Worker function for `TARGET_RETURN_IN_MEMORY'.  */
12567 
12568 static bool
12569 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
12570 {
12571   HOST_WIDE_INT size = int_size_in_bytes (type);
12572   HOST_WIDE_INT ret_size_limit = AVR_TINY ? 4 : 8;
12573 
12574   /* In avr, there are 8 return registers. But, for Tiny Core
12575      (ATtiny4/5/9/10/20/40) devices, only 4 registers are available.
12576      Return true if size is unknown or greater than the limit.  */
12577 
12578   if (size == -1 || size > ret_size_limit)
12579     {
12580       return true;
12581     }
12582   else
12583     {
12584       return false;
12585     }
12586 }
12587 
12588 
12589 /* Implement `CASE_VALUES_THRESHOLD'.  */
12590 /* Supply the default for --param case-values-threshold=0  */
12591 
12592 static unsigned int
12593 avr_case_values_threshold (void)
12594 {
12595   /* The exact break-even point between a jump table and an if-else tree
12596      depends on several factors not available here like, e.g. if 8-bit
12597      comparisons can be used in the if-else tree or not, on the
12598      range of the case values, if the case value can be reused, on the
12599      register allocation, etc.  '7' appears to be a good choice.  */
12600 
12601   return 7;
12602 }
12603 
12604 
12605 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'.  */
12606 
12607 static machine_mode
12608 avr_addr_space_address_mode (addr_space_t as)
12609 {
12610   return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
12611 }
12612 
12613 
12614 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'.  */
12615 
12616 static machine_mode
12617 avr_addr_space_pointer_mode (addr_space_t as)
12618 {
12619   return avr_addr_space_address_mode (as);
12620 }
12621 
12622 
12623 /* Helper for following function.  */
12624 
12625 static bool
12626 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
12627 {
12628   gcc_assert (REG_P (reg));
12629 
12630   if (strict)
12631     {
12632       return REGNO (reg) == REG_Z;
12633     }
12634 
12635   /* Avoid combine to propagate hard regs.  */
12636 
12637   if (can_create_pseudo_p()
12638       && REGNO (reg) < REG_Z)
12639     {
12640       return false;
12641     }
12642 
12643   return true;
12644 }
12645 
12646 
12647 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'.  */
12648 
12649 static bool
12650 avr_addr_space_legitimate_address_p (machine_mode mode, rtx x,
12651                                      bool strict, addr_space_t as)
12652 {
12653   bool ok = false;
12654 
12655   switch (as)
12656     {
12657     default:
12658       gcc_unreachable();
12659 
12660     case ADDR_SPACE_GENERIC:
12661       return avr_legitimate_address_p (mode, x, strict);
12662 
12663     case ADDR_SPACE_FLASH:
12664     case ADDR_SPACE_FLASH1:
12665     case ADDR_SPACE_FLASH2:
12666     case ADDR_SPACE_FLASH3:
12667     case ADDR_SPACE_FLASH4:
12668     case ADDR_SPACE_FLASH5:
12669 
12670       switch (GET_CODE (x))
12671         {
12672         case REG:
12673           ok = avr_reg_ok_for_pgm_addr (x, strict);
12674           break;
12675 
12676         case POST_INC:
12677           ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
12678           break;
12679 
12680         default:
12681           break;
12682         }
12683 
12684       break; /* FLASH */
12685 
12686     case ADDR_SPACE_MEMX:
12687       if (REG_P (x))
12688         ok = (!strict
12689               && can_create_pseudo_p());
12690 
12691       if (LO_SUM == GET_CODE (x))
12692         {
12693           rtx hi = XEXP (x, 0);
12694           rtx lo = XEXP (x, 1);
12695 
12696           ok = (REG_P (hi)
12697                 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
12698                 && REG_P (lo)
12699                 && REGNO (lo) == REG_Z);
12700         }
12701 
12702       break; /* MEMX */
12703     }
12704 
12705   if (avr_log.legitimate_address_p)
12706     {
12707       avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
12708                  "reload_completed=%d reload_in_progress=%d %s:",
12709                  ok, mode, strict, reload_completed, reload_in_progress,
12710                  reg_renumber ? "(reg_renumber)" : "");
12711 
12712       if (GET_CODE (x) == PLUS
12713           && REG_P (XEXP (x, 0))
12714           && CONST_INT_P (XEXP (x, 1))
12715           && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
12716           && reg_renumber)
12717         {
12718           avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
12719                      true_regnum (XEXP (x, 0)));
12720         }
12721 
12722       avr_edump ("\n%r\n", x);
12723     }
12724 
12725   return ok;
12726 }
12727 
12728 
12729 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'.  */
12730 
12731 static rtx
12732 avr_addr_space_legitimize_address (rtx x, rtx old_x,
12733                                    machine_mode mode, addr_space_t as)
12734 {
12735   if (ADDR_SPACE_GENERIC_P (as))
12736     return avr_legitimize_address (x, old_x, mode);
12737 
12738   if (avr_log.legitimize_address)
12739     {
12740       avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
12741     }
12742 
12743   return old_x;
12744 }
12745 
12746 
12747 /* Implement `TARGET_ADDR_SPACE_CONVERT'.  */
12748 
12749 static rtx
12750 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
12751 {
12752   addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
12753   addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
12754 
12755   if (avr_log.progmem)
12756     avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
12757                src, type_from, type_to);
12758 
12759   /* Up-casting from 16-bit to 24-bit pointer.  */
12760 
12761   if (as_from != ADDR_SPACE_MEMX
12762       && as_to == ADDR_SPACE_MEMX)
12763     {
12764       int msb;
12765       rtx sym = src;
12766       rtx reg = gen_reg_rtx (PSImode);
12767 
12768       while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
12769         sym = XEXP (sym, 0);
12770 
12771       /* Look at symbol flags:  avr_encode_section_info set the flags
12772          also if attribute progmem was seen so that we get the right
12773          promotion for, e.g. PSTR-like strings that reside in generic space
12774          but are located in flash.  In that case we patch the incoming
12775          address space.  */
12776 
12777       if (SYMBOL_REF_P (sym)
12778           && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
12779         {
12780           as_from = ADDR_SPACE_FLASH;
12781         }
12782 
12783       /* Linearize memory: RAM has bit 23 set.  */
12784 
12785       msb = ADDR_SPACE_GENERIC_P (as_from)
12786         ? 0x80
12787         : avr_addrspace[as_from].segment;
12788 
12789       src = force_reg (Pmode, src);
12790 
12791       emit_insn (msb == 0
12792                  ? gen_zero_extendhipsi2 (reg, src)
12793                  : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
12794 
12795       return reg;
12796     }
12797 
12798   /* Down-casting from 24-bit to 16-bit throws away the high byte.  */
12799 
12800   if (as_from == ADDR_SPACE_MEMX
12801       && as_to != ADDR_SPACE_MEMX)
12802     {
12803       rtx new_src = gen_reg_rtx (Pmode);
12804 
12805       src = force_reg (PSImode, src);
12806 
12807       emit_move_insn (new_src,
12808                       simplify_gen_subreg (Pmode, src, PSImode, 0));
12809       return new_src;
12810     }
12811 
12812   return src;
12813 }
12814 
12815 
12816 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'.  */
12817 
12818 static bool
12819 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
12820                          addr_space_t superset ATTRIBUTE_UNUSED)
12821 {
12822   /* Allow any kind of pointer mess.  */
12823 
12824   return true;
12825 }
12826 
12827 
12828 /* Implement `TARGET_CONVERT_TO_TYPE'.  */
12829 
12830 static tree
12831 avr_convert_to_type (tree type, tree expr)
12832 {
12833   /* Print a diagnose for pointer conversion that changes the address
12834      space of the pointer target to a non-enclosing address space,
12835      provided -Waddr-space-convert is on.
12836 
12837      FIXME: Filter out cases where the target object is known to
12838             be located in the right memory, like in
12839 
12840                 (const __flash*) PSTR ("text")
12841 
12842             Also try to distinguish between explicit casts requested by
12843             the user and implicit casts like
12844 
12845                 void f (const __flash char*);
12846 
12847                 void g (const char *p)
12848                 {
12849                     f ((const __flash*) p);
12850                 }
12851 
12852             under the assumption that an explicit casts means that the user
12853             knows what he is doing, e.g. interface with PSTR or old style
12854             code with progmem and pgm_read_xxx.
12855   */
12856 
12857   if (avr_warn_addr_space_convert
12858       && expr != error_mark_node
12859       && POINTER_TYPE_P (type)
12860       && POINTER_TYPE_P (TREE_TYPE (expr)))
12861     {
12862       addr_space_t as_old = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
12863       addr_space_t as_new = TYPE_ADDR_SPACE (TREE_TYPE (type));
12864 
12865       if (avr_log.progmem)
12866         avr_edump ("%?: type = %t\nexpr = %t\n\n", type, expr);
12867 
12868       if (as_new != ADDR_SPACE_MEMX
12869           && as_new != as_old)
12870         {
12871           location_t loc = EXPR_LOCATION (expr);
12872           const char *name_old = avr_addrspace[as_old].name;
12873           const char *name_new = avr_addrspace[as_new].name;
12874 
12875           warning (OPT_Waddr_space_convert,
12876                    "conversion from address space %qs to address space %qs",
12877                    ADDR_SPACE_GENERIC_P (as_old) ? "generic" : name_old,
12878                    ADDR_SPACE_GENERIC_P (as_new) ? "generic" : name_new);
12879 
12880           return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, expr);
12881         }
12882     }
12883 
12884   return NULL_TREE;
12885 }
12886 
12887 
12888 /* Implement `TARGET_LEGITIMATE_COMBINED_INSN'.  */
12889 
12890 /* PR78883: Filter out paradoxical SUBREGs of MEM which are not handled
12891    properly by following passes.  As INSN_SCHEDULING is off and hence
12892    general_operand accepts such expressions, ditch them now.  */
12893 
12894 static bool
12895 avr_legitimate_combined_insn (rtx_insn *insn)
12896 {
12897   subrtx_iterator::array_type array;
12898 
12899   FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
12900     {
12901       const_rtx op = *iter;
12902 
12903       if (SUBREG_P (op)
12904           && MEM_P (SUBREG_REG (op))
12905           && (GET_MODE_SIZE (GET_MODE (op))
12906               > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op)))))
12907         {
12908           return false;
12909         }
12910     }
12911 
12912   return true;
12913 }
12914 
12915 
12916 /* PR63633: The middle-end might come up with hard regs as input operands.
12917 
12918    RMASK is a bit mask representing a subset of hard registers R0...R31:
12919    Rn is an element of that set iff bit n of RMASK is set.
12920    OPMASK describes a subset of OP[]:  If bit n of OPMASK is 1 then
12921    OP[n] has to be fixed; otherwise OP[n] is left alone.
12922 
12923    For each element of OPMASK which is a hard register overlapping RMASK,
12924    replace OP[n] with a newly created pseudo register
12925 
12926    HREG == 0:  Also emit a move insn that copies the contents of that
12927                hard register into the new pseudo.
12928 
12929    HREG != 0:  Also set HREG[n] to the hard register.  */
12930 
12931 static void
12932 avr_fix_operands (rtx *op, rtx *hreg, unsigned opmask, unsigned rmask)
12933 {
12934   for (; opmask; opmask >>= 1, op++)
12935     {
12936       rtx reg = *op;
12937 
12938       if (hreg)
12939         *hreg = NULL_RTX;
12940 
12941       if ((opmask & 1)
12942           && REG_P (reg)
12943           && REGNO (reg) < FIRST_PSEUDO_REGISTER
12944           // This hard-reg overlaps other prohibited hard regs?
12945           && (rmask & regmask (GET_MODE (reg), REGNO (reg))))
12946         {
12947           *op = gen_reg_rtx (GET_MODE (reg));
12948           if (hreg == NULL)
12949             emit_move_insn (*op, reg);
12950           else
12951             *hreg = reg;
12952         }
12953 
12954       if (hreg)
12955         hreg++;
12956     }
12957 }
12958 
12959 
12960 void
12961 avr_fix_inputs (rtx *op, unsigned opmask, unsigned rmask)
12962 {
12963   avr_fix_operands (op, NULL, opmask, rmask);
12964 }
12965 
12966 
12967 /* Helper for the function below:  If bit n of MASK is set and
12968    HREG[n] != NULL, then emit a move insn to copy OP[n] to HREG[n].
12969    Otherwise do nothing for that n.  Return TRUE.  */
12970 
12971 static bool
12972 avr_move_fixed_operands (rtx *op, rtx *hreg, unsigned mask)
12973 {
12974   for (; mask; mask >>= 1, op++, hreg++)
12975     if ((mask & 1)
12976         && *hreg)
12977       emit_move_insn (*hreg, *op);
12978 
12979   return true;
12980 }
12981 
12982 
12983 /* PR63633: The middle-end might come up with hard regs as output operands.
12984 
12985    GEN is a sequence generating function like gen_mulsi3 with 3 operands OP[].
12986    RMASK is a bit mask representing a subset of hard registers R0...R31:
12987    Rn is an element of that set iff bit n of RMASK is set.
12988    OPMASK describes a subset of OP[]:  If bit n of OPMASK is 1 then
12989    OP[n] has to be fixed; otherwise OP[n] is left alone.
12990 
12991    Emit the insn sequence as generated by GEN() with all elements of OPMASK
12992    which are hard registers overlapping RMASK replaced by newly created
12993    pseudo registers.  After the sequence has been emitted, emit insns that
12994    move the contents of respective pseudos to their hard regs.  */
12995 
12996 bool
12997 avr_emit3_fix_outputs (rtx (*gen)(rtx,rtx,rtx), rtx *op,
12998                        unsigned opmask, unsigned rmask)
12999 {
13000   const int n = 3;
13001   rtx hreg[n];
13002 
13003   /* It is letigimate for GEN to call this function, and in order not to
13004      get self-recursive we use the following static kludge.  This is the
13005      only way not to duplicate all expanders and to avoid ugly and
13006      hard-to-maintain C-code instead of the much more appreciated RTL
13007      representation as supplied by define_expand.  */
13008   static bool lock = false;
13009 
13010   gcc_assert (opmask < (1u << n));
13011 
13012   if (lock)
13013     return false;
13014 
13015   avr_fix_operands (op, hreg, opmask, rmask);
13016 
13017   lock = true;
13018   emit_insn (gen (op[0], op[1], op[2]));
13019   lock = false;
13020 
13021   return avr_move_fixed_operands (op, hreg, opmask);
13022 }
13023 
13024 
13025 /* Worker function for movmemhi expander.
13026    XOP[0]  Destination as MEM:BLK
13027    XOP[1]  Source      "     "
13028    XOP[2]  # Bytes to copy
13029 
13030    Return TRUE  if the expansion is accomplished.
13031    Return FALSE if the operand compination is not supported.  */
13032 
13033 bool
13034 avr_emit_movmemhi (rtx *xop)
13035 {
13036   HOST_WIDE_INT count;
13037   machine_mode loop_mode;
13038   addr_space_t as = MEM_ADDR_SPACE (xop[1]);
13039   rtx loop_reg, addr1, a_src, a_dest, insn, xas;
13040   rtx a_hi8 = NULL_RTX;
13041 
13042   if (avr_mem_flash_p (xop[0]))
13043     return false;
13044 
13045   if (!CONST_INT_P (xop[2]))
13046     return false;
13047 
13048   count = INTVAL (xop[2]);
13049   if (count <= 0)
13050     return false;
13051 
13052   a_src  = XEXP (xop[1], 0);
13053   a_dest = XEXP (xop[0], 0);
13054 
13055   if (PSImode == GET_MODE (a_src))
13056     {
13057       gcc_assert (as == ADDR_SPACE_MEMX);
13058 
13059       loop_mode = (count < 0x100) ? QImode : HImode;
13060       loop_reg = gen_rtx_REG (loop_mode, 24);
13061       emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
13062 
13063       addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
13064       a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
13065     }
13066   else
13067     {
13068       int segment = avr_addrspace[as].segment;
13069 
13070       if (segment
13071           && avr_n_flash > 1)
13072         {
13073           a_hi8 = GEN_INT (segment);
13074           emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
13075         }
13076       else if (!ADDR_SPACE_GENERIC_P (as))
13077         {
13078           as = ADDR_SPACE_FLASH;
13079         }
13080 
13081       addr1 = a_src;
13082 
13083       loop_mode = (count <= 0x100) ? QImode : HImode;
13084       loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
13085     }
13086 
13087   xas = GEN_INT (as);
13088 
13089   /* FIXME: Register allocator might come up with spill fails if it is left
13090         on its own.  Thus, we allocate the pointer registers by hand:
13091         Z = source address
13092         X = destination address  */
13093 
13094   emit_move_insn (lpm_addr_reg_rtx, addr1);
13095   emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
13096 
13097   /* FIXME: Register allocator does a bad job and might spill address
13098         register(s) inside the loop leading to additional move instruction
13099         to/from stack which could clobber tmp_reg.  Thus, do *not* emit
13100         load and store as separate insns.  Instead, we perform the copy
13101         by means of one monolithic insn.  */
13102 
13103   gcc_assert (TMP_REGNO == LPM_REGNO);
13104 
13105   if (as != ADDR_SPACE_MEMX)
13106     {
13107       /* Load instruction ([E]LPM or LD) is known at compile time:
13108          Do the copy-loop inline.  */
13109 
13110       rtx (*fun) (rtx, rtx, rtx)
13111         = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
13112 
13113       insn = fun (xas, loop_reg, loop_reg);
13114     }
13115   else
13116     {
13117       rtx (*fun) (rtx, rtx)
13118         = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
13119 
13120       emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
13121 
13122       insn = fun (xas, GEN_INT (avr_addr.rampz));
13123     }
13124 
13125   set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
13126   emit_insn (insn);
13127 
13128   return true;
13129 }
13130 
13131 
13132 /* Print assembler for movmem_qi, movmem_hi insns...
13133        $0     : Address Space
13134        $1, $2 : Loop register
13135        Z      : Source address
13136        X      : Destination address
13137 */
13138 
13139 const char*
13140 avr_out_movmem (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
13141 {
13142   addr_space_t as = (addr_space_t) INTVAL (op[0]);
13143   machine_mode loop_mode = GET_MODE (op[1]);
13144   bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
13145   rtx xop[3];
13146 
13147   if (plen)
13148     *plen = 0;
13149 
13150   xop[0] = op[0];
13151   xop[1] = op[1];
13152   xop[2] = tmp_reg_rtx;
13153 
13154   /* Loop label */
13155 
13156   avr_asm_len ("0:", xop, plen, 0);
13157 
13158   /* Load with post-increment */
13159 
13160   switch (as)
13161     {
13162     default:
13163       gcc_unreachable();
13164 
13165     case ADDR_SPACE_GENERIC:
13166 
13167       avr_asm_len ("ld %2,Z+", xop, plen, 1);
13168       break;
13169 
13170     case ADDR_SPACE_FLASH:
13171 
13172       if (AVR_HAVE_LPMX)
13173         avr_asm_len ("lpm %2,Z+", xop, plen, 1);
13174       else
13175         avr_asm_len ("lpm" CR_TAB
13176                      "adiw r30,1", xop, plen, 2);
13177       break;
13178 
13179     case ADDR_SPACE_FLASH1:
13180     case ADDR_SPACE_FLASH2:
13181     case ADDR_SPACE_FLASH3:
13182     case ADDR_SPACE_FLASH4:
13183     case ADDR_SPACE_FLASH5:
13184 
13185       if (AVR_HAVE_ELPMX)
13186         avr_asm_len ("elpm %2,Z+", xop, plen, 1);
13187       else
13188         avr_asm_len ("elpm" CR_TAB
13189                      "adiw r30,1", xop, plen, 2);
13190       break;
13191     }
13192 
13193   /* Store with post-increment */
13194 
13195   avr_asm_len ("st X+,%2", xop, plen, 1);
13196 
13197   /* Decrement loop-counter and set Z-flag */
13198 
13199   if (QImode == loop_mode)
13200     {
13201       avr_asm_len ("dec %1", xop, plen, 1);
13202     }
13203   else if (sbiw_p)
13204     {
13205       avr_asm_len ("sbiw %1,1", xop, plen, 1);
13206     }
13207   else
13208     {
13209       avr_asm_len ("subi %A1,1" CR_TAB
13210                    "sbci %B1,0", xop, plen, 2);
13211     }
13212 
13213   /* Loop until zero */
13214 
13215   return avr_asm_len ("brne 0b", xop, plen, 1);
13216 }
13217 
13218 
13219 
13220 /* Helper for __builtin_avr_delay_cycles */
13221 
13222 static rtx
13223 avr_mem_clobber (void)
13224 {
13225   rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
13226   MEM_VOLATILE_P (mem) = 1;
13227   return mem;
13228 }
13229 
13230 static void
13231 avr_expand_delay_cycles (rtx operands0)
13232 {
13233   unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
13234   unsigned HOST_WIDE_INT cycles_used;
13235   unsigned HOST_WIDE_INT loop_count;
13236 
13237   if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
13238     {
13239       loop_count = ((cycles - 9) / 6) + 1;
13240       cycles_used = ((loop_count - 1) * 6) + 9;
13241       emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
13242                                      avr_mem_clobber()));
13243       cycles -= cycles_used;
13244     }
13245 
13246   if (IN_RANGE (cycles, 262145, 83886081))
13247     {
13248       loop_count = ((cycles - 7) / 5) + 1;
13249       if (loop_count > 0xFFFFFF)
13250         loop_count = 0xFFFFFF;
13251       cycles_used = ((loop_count - 1) * 5) + 7;
13252       emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
13253                                      avr_mem_clobber()));
13254       cycles -= cycles_used;
13255     }
13256 
13257   if (IN_RANGE (cycles, 768, 262144))
13258     {
13259       loop_count = ((cycles - 5) / 4) + 1;
13260       if (loop_count > 0xFFFF)
13261         loop_count = 0xFFFF;
13262       cycles_used = ((loop_count - 1) * 4) + 5;
13263       emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
13264                                      avr_mem_clobber()));
13265       cycles -= cycles_used;
13266     }
13267 
13268   if (IN_RANGE (cycles, 6, 767))
13269     {
13270       loop_count = cycles / 3;
13271       if (loop_count > 255)
13272         loop_count = 255;
13273       cycles_used = loop_count * 3;
13274       emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
13275                                      avr_mem_clobber()));
13276       cycles -= cycles_used;
13277     }
13278 
13279   while (cycles >= 2)
13280     {
13281       emit_insn (gen_nopv (GEN_INT (2)));
13282       cycles -= 2;
13283     }
13284 
13285   if (cycles == 1)
13286     {
13287       emit_insn (gen_nopv (GEN_INT (1)));
13288       cycles--;
13289     }
13290 }
13291 
13292 
13293 static void
13294 avr_expand_nops (rtx operands0)
13295 {
13296   unsigned HOST_WIDE_INT n_nops = UINTVAL (operands0) & GET_MODE_MASK (HImode);
13297 
13298   while (n_nops--)
13299     {
13300       emit_insn (gen_nopv (const1_rtx));
13301     }
13302 }
13303 
13304 
13305 /* Compute the image of x under f, i.e. perform   x --> f(x)    */
13306 
13307 static int
13308 avr_map (unsigned int f, int x)
13309 {
13310   return x < 8 ? (f >> (4 * x)) & 0xf : 0;
13311 }
13312 
13313 
13314 /* Return some metrics of map A.  */
13315 
13316 enum
13317   {
13318     /* Number of fixed points in { 0 ... 7 } */
13319     MAP_FIXED_0_7,
13320 
13321     /* Size of preimage of non-fixed points in { 0 ... 7 } */
13322     MAP_NONFIXED_0_7,
13323 
13324     /* Mask representing the fixed points in { 0 ... 7 } */
13325     MAP_MASK_FIXED_0_7,
13326 
13327     /* Size of the preimage of { 0 ... 7 } */
13328     MAP_PREIMAGE_0_7,
13329 
13330     /* Mask that represents the preimage of { f } */
13331     MAP_MASK_PREIMAGE_F
13332   };
13333 
13334 static unsigned
13335 avr_map_metric (unsigned int a, int mode)
13336 {
13337   unsigned metric = 0;
13338 
13339   for (unsigned i = 0; i < 8; i++)
13340     {
13341       unsigned ai = avr_map (a, i);
13342 
13343       if (mode == MAP_FIXED_0_7)
13344         metric += ai == i;
13345       else if (mode == MAP_NONFIXED_0_7)
13346         metric += ai < 8 && ai != i;
13347       else if (mode == MAP_MASK_FIXED_0_7)
13348         metric |= ((unsigned) (ai == i)) << i;
13349       else if (mode == MAP_PREIMAGE_0_7)
13350         metric += ai < 8;
13351       else if (mode == MAP_MASK_PREIMAGE_F)
13352         metric |= ((unsigned) (ai == 0xf)) << i;
13353       else
13354         gcc_unreachable();
13355     }
13356 
13357   return metric;
13358 }
13359 
13360 
13361 /* Return true if IVAL has a 0xf in its hexadecimal representation
13362    and false, otherwise.  Only nibbles 0..7 are taken into account.
13363    Used as constraint helper for C0f and Cxf.  */
13364 
13365 bool
13366 avr_has_nibble_0xf (rtx ival)
13367 {
13368   unsigned int map = UINTVAL (ival) & GET_MODE_MASK (SImode);
13369   return 0 != avr_map_metric (map, MAP_MASK_PREIMAGE_F);
13370 }
13371 
13372 
13373 /* We have a set of bits that are mapped by a function F.
13374    Try to decompose F by means of a second function G so that
13375 
13376       F = F o G^-1 o G
13377 
13378    and
13379 
13380       cost (F o G^-1) + cost (G)  <  cost (F)
13381 
13382    Example:  Suppose builtin insert_bits supplies us with the map
13383    F = 0x3210ffff.  Instead of doing 4 bit insertions to get the high
13384    nibble of the result, we can just as well rotate the bits before inserting
13385    them and use the map 0x7654ffff which is cheaper than the original map.
13386    For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff.  */
13387 
13388 typedef struct
13389 {
13390   /* tree code of binary function G */
13391   enum tree_code code;
13392 
13393   /* The constant second argument of G */
13394   int arg;
13395 
13396   /* G^-1, the inverse of G (*, arg) */
13397   unsigned ginv;
13398 
13399   /* The cost of applying G (*, arg) */
13400   int cost;
13401 
13402   /* The composition F o G^-1 (*, arg) for some function F */
13403   unsigned int map;
13404 
13405   /* For debug purpose only */
13406   const char *str;
13407 } avr_map_op_t;
13408 
13409 static const avr_map_op_t avr_map_op[] =
13410   {
13411     { LROTATE_EXPR, 0, 0x76543210, 0, 0, "id" },
13412     { LROTATE_EXPR, 1, 0x07654321, 2, 0, "<<<" },
13413     { LROTATE_EXPR, 2, 0x10765432, 4, 0, "<<<" },
13414     { LROTATE_EXPR, 3, 0x21076543, 4, 0, "<<<" },
13415     { LROTATE_EXPR, 4, 0x32107654, 1, 0, "<<<" },
13416     { LROTATE_EXPR, 5, 0x43210765, 3, 0, "<<<" },
13417     { LROTATE_EXPR, 6, 0x54321076, 5, 0, "<<<" },
13418     { LROTATE_EXPR, 7, 0x65432107, 3, 0, "<<<" },
13419     { RSHIFT_EXPR, 1, 0x6543210c, 1, 0, ">>" },
13420     { RSHIFT_EXPR, 1, 0x7543210c, 1, 0, ">>" },
13421     { RSHIFT_EXPR, 2, 0x543210cc, 2, 0, ">>" },
13422     { RSHIFT_EXPR, 2, 0x643210cc, 2, 0, ">>" },
13423     { RSHIFT_EXPR, 2, 0x743210cc, 2, 0, ">>" },
13424     { LSHIFT_EXPR, 1, 0xc7654321, 1, 0, "<<" },
13425     { LSHIFT_EXPR, 2, 0xcc765432, 2, 0, "<<" }
13426   };
13427 
13428 
13429 /* Try to decompose F as F = (F o G^-1) o G as described above.
13430    The result is a struct representing F o G^-1 and G.
13431    If result.cost < 0 then such a decomposition does not exist.  */
13432 
13433 static avr_map_op_t
13434 avr_map_decompose (unsigned int f, const avr_map_op_t *g, bool val_const_p)
13435 {
13436   bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
13437   avr_map_op_t f_ginv = *g;
13438   unsigned int ginv = g->ginv;
13439 
13440   f_ginv.cost = -1;
13441 
13442   /* Step 1:  Computing F o G^-1  */
13443 
13444   for (int i = 7; i >= 0; i--)
13445     {
13446       int x = avr_map (f, i);
13447 
13448       if (x <= 7)
13449         {
13450           x = avr_map (ginv, x);
13451 
13452           /* The bit is no element of the image of G: no avail (cost = -1)  */
13453 
13454           if (x > 7)
13455             return f_ginv;
13456         }
13457 
13458       f_ginv.map = (f_ginv.map << 4) + x;
13459     }
13460 
13461   /* Step 2:  Compute the cost of the operations.
13462      The overall cost of doing an operation prior to the insertion is
13463       the cost of the insertion plus the cost of the operation.  */
13464 
13465   /* Step 2a:  Compute cost of F o G^-1  */
13466 
13467   if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
13468     {
13469       /* The mapping consists only of fixed points and can be folded
13470          to AND/OR logic in the remainder.  Reasonable cost is 3. */
13471 
13472       f_ginv.cost = 2 + (val_used_p && !val_const_p);
13473     }
13474   else
13475     {
13476       rtx xop[4];
13477 
13478       /* Get the cost of the insn by calling the output worker with some
13479          fake values.  Mimic effect of reloading xop[3]: Unused operands
13480          are mapped to 0 and used operands are reloaded to xop[0].  */
13481 
13482       xop[0] = all_regs_rtx[24];
13483       xop[1] = gen_int_mode (f_ginv.map, SImode);
13484       xop[2] = all_regs_rtx[25];
13485       xop[3] = val_used_p ? xop[0] : const0_rtx;
13486 
13487       avr_out_insert_bits (xop, &f_ginv.cost);
13488 
13489       f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
13490     }
13491 
13492   /* Step 2b:  Add cost of G  */
13493 
13494   f_ginv.cost += g->cost;
13495 
13496   if (avr_log.builtin)
13497     avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
13498 
13499   return f_ginv;
13500 }
13501 
13502 
13503 /* Insert bits from XOP[1] into XOP[0] according to MAP.
13504    XOP[0] and XOP[1] don't overlap.
13505    If FIXP_P = true:  Move all bits according to MAP using BLD/BST sequences.
13506    If FIXP_P = false: Just move the bit if its position in the destination
13507    is different to its source position.  */
13508 
13509 static void
13510 avr_move_bits (rtx *xop, unsigned int map, bool fixp_p, int *plen)
13511 {
13512   /* T-flag contains this bit of the source, i.e. of XOP[1]  */
13513   int t_bit_src = -1;
13514 
13515   /* We order the operations according to the requested source bit b.  */
13516 
13517   for (int b = 0; b < 8; b++)
13518     for (int bit_dest = 0; bit_dest < 8; bit_dest++)
13519       {
13520         int bit_src = avr_map (map, bit_dest);
13521 
13522         if (b != bit_src
13523             || bit_src >= 8
13524             /* Same position: No need to copy as requested by FIXP_P.  */
13525             || (bit_dest == bit_src && !fixp_p))
13526           continue;
13527 
13528         if (t_bit_src != bit_src)
13529           {
13530             /* Source bit is not yet in T: Store it to T.  */
13531 
13532             t_bit_src = bit_src;
13533 
13534             xop[3] = GEN_INT (bit_src);
13535             avr_asm_len ("bst %T1%T3", xop, plen, 1);
13536           }
13537 
13538         /* Load destination bit with T.  */
13539 
13540         xop[3] = GEN_INT (bit_dest);
13541         avr_asm_len ("bld %T0%T3", xop, plen, 1);
13542       }
13543 }
13544 
13545 
13546 /* PLEN == 0: Print assembler code for `insert_bits'.
13547    PLEN != 0: Compute code length in bytes.
13548 
13549    OP[0]:  Result
13550    OP[1]:  The mapping composed of nibbles. If nibble no. N is
13551            0:   Bit N of result is copied from bit OP[2].0
13552            ...  ...
13553            7:   Bit N of result is copied from bit OP[2].7
13554            0xf: Bit N of result is copied from bit OP[3].N
13555    OP[2]:  Bits to be inserted
13556    OP[3]:  Target value  */
13557 
13558 const char*
13559 avr_out_insert_bits (rtx *op, int *plen)
13560 {
13561   unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
13562   unsigned mask_fixed;
13563   bool fixp_p = true;
13564   rtx xop[4];
13565 
13566   xop[0] = op[0];
13567   xop[1] = op[2];
13568   xop[2] = op[3];
13569 
13570   gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
13571 
13572   if (plen)
13573     *plen = 0;
13574   else if (flag_print_asm_name)
13575     fprintf (asm_out_file, ASM_COMMENT_START "map = 0x%08x\n", map);
13576 
13577   /* If MAP has fixed points it might be better to initialize the result
13578      with the bits to be inserted instead of moving all bits by hand.  */
13579 
13580   mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
13581 
13582   if (REGNO (xop[0]) == REGNO (xop[1]))
13583     {
13584       /* Avoid early-clobber conflicts */
13585 
13586       avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
13587       xop[1] = tmp_reg_rtx;
13588       fixp_p = false;
13589     }
13590 
13591   if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
13592     {
13593       /* XOP[2] is used and reloaded to XOP[0] already */
13594 
13595       int n_fix = 0, n_nofix = 0;
13596 
13597       gcc_assert (REG_P (xop[2]));
13598 
13599       /* Get the code size of the bit insertions; once with all bits
13600          moved and once with fixed points omitted.  */
13601 
13602       avr_move_bits (xop, map, true, &n_fix);
13603       avr_move_bits (xop, map, false, &n_nofix);
13604 
13605       if (fixp_p && n_fix - n_nofix > 3)
13606         {
13607           xop[3] = gen_int_mode (~mask_fixed, QImode);
13608 
13609           avr_asm_len ("eor %0,%1"   CR_TAB
13610                        "andi %0,%3"  CR_TAB
13611                        "eor %0,%1", xop, plen, 3);
13612           fixp_p = false;
13613         }
13614     }
13615   else
13616     {
13617       /* XOP[2] is unused */
13618 
13619       if (fixp_p && mask_fixed)
13620         {
13621           avr_asm_len ("mov %0,%1", xop, plen, 1);
13622           fixp_p = false;
13623         }
13624     }
13625 
13626   /* Move/insert remaining bits.  */
13627 
13628   avr_move_bits (xop, map, fixp_p, plen);
13629 
13630   return "";
13631 }
13632 
13633 
13634 /* IDs for all the AVR builtins.  */
13635 
13636 enum avr_builtin_id
13637   {
13638 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME)  \
13639     AVR_BUILTIN_ ## NAME,
13640 #include "builtins.def"
13641 #undef DEF_BUILTIN
13642 
13643     AVR_BUILTIN_COUNT
13644   };
13645 
13646 struct GTY(()) avr_builtin_description
13647 {
13648   enum insn_code icode;
13649   int n_args;
13650   tree fndecl;
13651 };
13652 
13653 
13654 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
13655    that a built-in's ID can be used to access the built-in by means of
13656    avr_bdesc[ID]  */
13657 
13658 static GTY(()) struct avr_builtin_description
13659 avr_bdesc[AVR_BUILTIN_COUNT] =
13660   {
13661 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, ICODE, LIBNAME)         \
13662     { (enum insn_code) CODE_FOR_ ## ICODE, N_ARGS, NULL_TREE },
13663 #include "builtins.def"
13664 #undef DEF_BUILTIN
13665   };
13666 
13667 
13668 /* Implement `TARGET_BUILTIN_DECL'.  */
13669 
13670 static tree
13671 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
13672 {
13673   if (id < AVR_BUILTIN_COUNT)
13674     return avr_bdesc[id].fndecl;
13675 
13676   return error_mark_node;
13677 }
13678 
13679 
13680 static void
13681 avr_init_builtin_int24 (void)
13682 {
13683   tree int24_type  = make_signed_type (GET_MODE_BITSIZE (PSImode));
13684   tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
13685 
13686   lang_hooks.types.register_builtin_type (int24_type, "__int24");
13687   lang_hooks.types.register_builtin_type (uint24_type, "__uint24");
13688 }
13689 
13690 
13691 /* Implement `TARGET_INIT_BUILTINS' */
13692 /* Set up all builtin functions for this target.  */
13693 
13694 static void
13695 avr_init_builtins (void)
13696 {
13697   tree void_ftype_void
13698     = build_function_type_list (void_type_node, NULL_TREE);
13699   tree uchar_ftype_uchar
13700     = build_function_type_list (unsigned_char_type_node,
13701                                 unsigned_char_type_node,
13702                                 NULL_TREE);
13703   tree uint_ftype_uchar_uchar
13704     = build_function_type_list (unsigned_type_node,
13705                                 unsigned_char_type_node,
13706                                 unsigned_char_type_node,
13707                                 NULL_TREE);
13708   tree int_ftype_char_char
13709     = build_function_type_list (integer_type_node,
13710                                 char_type_node,
13711                                 char_type_node,
13712                                 NULL_TREE);
13713   tree int_ftype_char_uchar
13714     = build_function_type_list (integer_type_node,
13715                                 char_type_node,
13716                                 unsigned_char_type_node,
13717                                 NULL_TREE);
13718   tree void_ftype_ulong
13719     = build_function_type_list (void_type_node,
13720                                 long_unsigned_type_node,
13721                                 NULL_TREE);
13722 
13723   tree uchar_ftype_ulong_uchar_uchar
13724     = build_function_type_list (unsigned_char_type_node,
13725                                 long_unsigned_type_node,
13726                                 unsigned_char_type_node,
13727                                 unsigned_char_type_node,
13728                                 NULL_TREE);
13729 
13730   tree const_memx_void_node
13731     = build_qualified_type (void_type_node,
13732                             TYPE_QUAL_CONST
13733                             | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
13734 
13735   tree const_memx_ptr_type_node
13736     = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
13737 
13738   tree char_ftype_const_memx_ptr
13739     = build_function_type_list (char_type_node,
13740                                 const_memx_ptr_type_node,
13741                                 NULL);
13742 
13743 #define ITYP(T)                                                         \
13744   lang_hooks.types.type_for_size (TYPE_PRECISION (T), TYPE_UNSIGNED (T))
13745 
13746 #define FX_FTYPE_FX(fx)                                                 \
13747   tree fx##r_ftype_##fx##r                                              \
13748     = build_function_type_list (node_##fx##r, node_##fx##r, NULL);      \
13749   tree fx##k_ftype_##fx##k                                              \
13750     = build_function_type_list (node_##fx##k, node_##fx##k, NULL)
13751 
13752 #define FX_FTYPE_FX_INT(fx)                                             \
13753   tree fx##r_ftype_##fx##r_int                                          \
13754     = build_function_type_list (node_##fx##r, node_##fx##r,             \
13755                                 integer_type_node, NULL);               \
13756   tree fx##k_ftype_##fx##k_int                                          \
13757     = build_function_type_list (node_##fx##k, node_##fx##k,             \
13758                                 integer_type_node, NULL)
13759 
13760 #define INT_FTYPE_FX(fx)                                                \
13761   tree int_ftype_##fx##r                                                \
13762     = build_function_type_list (integer_type_node, node_##fx##r, NULL); \
13763   tree int_ftype_##fx##k                                                \
13764     = build_function_type_list (integer_type_node, node_##fx##k, NULL)
13765 
13766 #define INTX_FTYPE_FX(fx)                                               \
13767   tree int##fx##r_ftype_##fx##r                                         \
13768     = build_function_type_list (ITYP (node_##fx##r), node_##fx##r, NULL); \
13769   tree int##fx##k_ftype_##fx##k                                         \
13770     = build_function_type_list (ITYP (node_##fx##k), node_##fx##k, NULL)
13771 
13772 #define FX_FTYPE_INTX(fx)                                               \
13773   tree fx##r_ftype_int##fx##r                                           \
13774     = build_function_type_list (node_##fx##r, ITYP (node_##fx##r), NULL); \
13775   tree fx##k_ftype_int##fx##k                                           \
13776     = build_function_type_list (node_##fx##k, ITYP (node_##fx##k), NULL)
13777 
13778   tree node_hr = short_fract_type_node;
13779   tree node_nr = fract_type_node;
13780   tree node_lr = long_fract_type_node;
13781   tree node_llr = long_long_fract_type_node;
13782 
13783   tree node_uhr = unsigned_short_fract_type_node;
13784   tree node_unr = unsigned_fract_type_node;
13785   tree node_ulr = unsigned_long_fract_type_node;
13786   tree node_ullr = unsigned_long_long_fract_type_node;
13787 
13788   tree node_hk = short_accum_type_node;
13789   tree node_nk = accum_type_node;
13790   tree node_lk = long_accum_type_node;
13791   tree node_llk = long_long_accum_type_node;
13792 
13793   tree node_uhk = unsigned_short_accum_type_node;
13794   tree node_unk = unsigned_accum_type_node;
13795   tree node_ulk = unsigned_long_accum_type_node;
13796   tree node_ullk = unsigned_long_long_accum_type_node;
13797 
13798 
13799   /* For absfx builtins.  */
13800 
13801   FX_FTYPE_FX (h);
13802   FX_FTYPE_FX (n);
13803   FX_FTYPE_FX (l);
13804   FX_FTYPE_FX (ll);
13805 
13806   /* For roundfx builtins.  */
13807 
13808   FX_FTYPE_FX_INT (h);
13809   FX_FTYPE_FX_INT (n);
13810   FX_FTYPE_FX_INT (l);
13811   FX_FTYPE_FX_INT (ll);
13812 
13813   FX_FTYPE_FX_INT (uh);
13814   FX_FTYPE_FX_INT (un);
13815   FX_FTYPE_FX_INT (ul);
13816   FX_FTYPE_FX_INT (ull);
13817 
13818   /* For countlsfx builtins.  */
13819 
13820   INT_FTYPE_FX (h);
13821   INT_FTYPE_FX (n);
13822   INT_FTYPE_FX (l);
13823   INT_FTYPE_FX (ll);
13824 
13825   INT_FTYPE_FX (uh);
13826   INT_FTYPE_FX (un);
13827   INT_FTYPE_FX (ul);
13828   INT_FTYPE_FX (ull);
13829 
13830   /* For bitsfx builtins.  */
13831 
13832   INTX_FTYPE_FX (h);
13833   INTX_FTYPE_FX (n);
13834   INTX_FTYPE_FX (l);
13835   INTX_FTYPE_FX (ll);
13836 
13837   INTX_FTYPE_FX (uh);
13838   INTX_FTYPE_FX (un);
13839   INTX_FTYPE_FX (ul);
13840   INTX_FTYPE_FX (ull);
13841 
13842   /* For fxbits builtins.  */
13843 
13844   FX_FTYPE_INTX (h);
13845   FX_FTYPE_INTX (n);
13846   FX_FTYPE_INTX (l);
13847   FX_FTYPE_INTX (ll);
13848 
13849   FX_FTYPE_INTX (uh);
13850   FX_FTYPE_INTX (un);
13851   FX_FTYPE_INTX (ul);
13852   FX_FTYPE_INTX (ull);
13853 
13854 
13855 #define DEF_BUILTIN(NAME, N_ARGS, TYPE, CODE, LIBNAME)                  \
13856   {                                                                     \
13857     int id = AVR_BUILTIN_ ## NAME;                                      \
13858     const char *Name = "__builtin_avr_" #NAME;                          \
13859     char *name = (char*) alloca (1 + strlen (Name));                    \
13860                                                                         \
13861     gcc_assert (id < AVR_BUILTIN_COUNT);                                \
13862     avr_bdesc[id].fndecl                                                \
13863       = add_builtin_function (avr_tolower (name, Name), TYPE, id,       \
13864                               BUILT_IN_MD, LIBNAME, NULL_TREE);         \
13865   }
13866 #include "builtins.def"
13867 #undef DEF_BUILTIN
13868 
13869   avr_init_builtin_int24 ();
13870 }
13871 
13872 
13873 /* Subroutine of avr_expand_builtin to expand vanilla builtins
13874    with non-void result and 1 ... 3 arguments.  */
13875 
13876 static rtx
13877 avr_default_expand_builtin (enum insn_code icode, tree exp, rtx target)
13878 {
13879   rtx pat, xop[3];
13880   int n_args = call_expr_nargs (exp);
13881   machine_mode tmode = insn_data[icode].operand[0].mode;
13882 
13883   gcc_assert (n_args >= 1 && n_args <= 3);
13884 
13885   if (target == NULL_RTX
13886       || GET_MODE (target) != tmode
13887       || !insn_data[icode].operand[0].predicate (target, tmode))
13888     {
13889       target = gen_reg_rtx (tmode);
13890     }
13891 
13892   for (int n = 0; n < n_args; n++)
13893     {
13894       tree arg = CALL_EXPR_ARG (exp, n);
13895       rtx op = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13896       machine_mode opmode = GET_MODE (op);
13897       machine_mode mode = insn_data[icode].operand[n + 1].mode;
13898 
13899       if ((opmode == SImode || opmode == VOIDmode) && mode == HImode)
13900         {
13901           opmode = HImode;
13902           op = gen_lowpart (HImode, op);
13903         }
13904 
13905       /* In case the insn wants input operands in modes different from
13906          the result, abort.  */
13907 
13908       gcc_assert (opmode == mode || opmode == VOIDmode);
13909 
13910       if (!insn_data[icode].operand[n + 1].predicate (op, mode))
13911         op = copy_to_mode_reg (mode, op);
13912 
13913       xop[n] = op;
13914     }
13915 
13916   switch (n_args)
13917     {
13918     case 1: pat = GEN_FCN (icode) (target, xop[0]); break;
13919     case 2: pat = GEN_FCN (icode) (target, xop[0], xop[1]); break;
13920     case 3: pat = GEN_FCN (icode) (target, xop[0], xop[1], xop[2]); break;
13921 
13922     default:
13923       gcc_unreachable();
13924     }
13925 
13926   if (pat == NULL_RTX)
13927     return NULL_RTX;
13928 
13929   emit_insn (pat);
13930 
13931   return target;
13932 }
13933 
13934 
13935 /* Implement `TARGET_EXPAND_BUILTIN'.  */
13936 /* Expand an expression EXP that calls a built-in function,
13937    with result going to TARGET if that's convenient
13938    (and in mode MODE if that's convenient).
13939    SUBTARGET may be used as the target for computing one of EXP's operands.
13940    IGNORE is nonzero if the value is to be ignored.  */
13941 
13942 static rtx
13943 avr_expand_builtin (tree exp, rtx target,
13944                     rtx subtarget ATTRIBUTE_UNUSED,
13945                     machine_mode mode ATTRIBUTE_UNUSED,
13946                     int ignore)
13947 {
13948   tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
13949   const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
13950   unsigned int id = DECL_FUNCTION_CODE (fndecl);
13951   const struct avr_builtin_description *d = &avr_bdesc[id];
13952   tree arg0;
13953   rtx op0;
13954 
13955   gcc_assert (id < AVR_BUILTIN_COUNT);
13956 
13957   switch (id)
13958     {
13959     case AVR_BUILTIN_NOP:
13960       emit_insn (gen_nopv (GEN_INT (1)));
13961       return 0;
13962 
13963     case AVR_BUILTIN_DELAY_CYCLES:
13964       {
13965         arg0 = CALL_EXPR_ARG (exp, 0);
13966         op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13967 
13968         if (!CONST_INT_P (op0))
13969           error ("%s expects a compile time integer constant", bname);
13970         else
13971           avr_expand_delay_cycles (op0);
13972 
13973         return NULL_RTX;
13974       }
13975 
13976     case AVR_BUILTIN_NOPS:
13977       {
13978         arg0 = CALL_EXPR_ARG (exp, 0);
13979         op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13980 
13981         if (!CONST_INT_P (op0))
13982           error ("%s expects a compile time integer constant", bname);
13983         else
13984           avr_expand_nops (op0);
13985 
13986         return NULL_RTX;
13987       }
13988 
13989     case AVR_BUILTIN_INSERT_BITS:
13990       {
13991         arg0 = CALL_EXPR_ARG (exp, 0);
13992         op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
13993 
13994         if (!CONST_INT_P (op0))
13995           {
13996             error ("%s expects a compile time long integer constant"
13997                    " as first argument", bname);
13998             return target;
13999           }
14000 
14001         break;
14002       }
14003 
14004     case AVR_BUILTIN_ROUNDHR:   case AVR_BUILTIN_ROUNDUHR:
14005     case AVR_BUILTIN_ROUNDR:    case AVR_BUILTIN_ROUNDUR:
14006     case AVR_BUILTIN_ROUNDLR:   case AVR_BUILTIN_ROUNDULR:
14007     case AVR_BUILTIN_ROUNDLLR:  case AVR_BUILTIN_ROUNDULLR:
14008 
14009     case AVR_BUILTIN_ROUNDHK:   case AVR_BUILTIN_ROUNDUHK:
14010     case AVR_BUILTIN_ROUNDK:    case AVR_BUILTIN_ROUNDUK:
14011     case AVR_BUILTIN_ROUNDLK:   case AVR_BUILTIN_ROUNDULK:
14012     case AVR_BUILTIN_ROUNDLLK:  case AVR_BUILTIN_ROUNDULLK:
14013 
14014       /* Warn about odd rounding.  Rounding points >= FBIT will have
14015          no effect.  */
14016 
14017       if (TREE_CODE (CALL_EXPR_ARG (exp, 1)) != INTEGER_CST)
14018         break;
14019 
14020       int rbit = (int) TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1));
14021 
14022       if (rbit >= (int) GET_MODE_FBIT (mode))
14023         {
14024           warning (OPT_Wextra, "rounding to %d bits has no effect for "
14025                    "fixed-point value with %d fractional bits",
14026                    rbit, GET_MODE_FBIT (mode));
14027 
14028           return expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX, mode,
14029                               EXPAND_NORMAL);
14030         }
14031       else if (rbit <= - (int) GET_MODE_IBIT (mode))
14032         {
14033           warning (0, "rounding result will always be 0");
14034           return CONST0_RTX (mode);
14035         }
14036 
14037       /* The rounding points RP satisfies now:  -IBIT < RP < FBIT.
14038 
14039          TR 18037 only specifies results for  RP > 0.  However, the
14040          remaining cases of  -IBIT < RP <= 0  can easily be supported
14041          without any additional overhead.  */
14042 
14043       break; /* round */
14044     }
14045 
14046   /* No fold found and no insn:  Call support function from libgcc.  */
14047 
14048   if (d->icode == CODE_FOR_nothing
14049       && DECL_ASSEMBLER_NAME (get_callee_fndecl (exp)) != NULL_TREE)
14050     {
14051       return expand_call (exp, target, ignore);
14052     }
14053 
14054   /* No special treatment needed: vanilla expand.  */
14055 
14056   gcc_assert (d->icode != CODE_FOR_nothing);
14057   gcc_assert (d->n_args == call_expr_nargs (exp));
14058 
14059   if (d->n_args == 0)
14060     {
14061       emit_insn ((GEN_FCN (d->icode)) (target));
14062       return NULL_RTX;
14063     }
14064 
14065   return avr_default_expand_builtin (d->icode, exp, target);
14066 }
14067 
14068 
14069 /* Helper for `avr_fold_builtin' that folds  absfx (FIXED_CST).  */
14070 
14071 static tree
14072 avr_fold_absfx (tree tval)
14073 {
14074   if (FIXED_CST != TREE_CODE (tval))
14075     return NULL_TREE;
14076 
14077   /* Our fixed-points have no padding:  Use double_int payload directly.  */
14078 
14079   FIXED_VALUE_TYPE fval = TREE_FIXED_CST (tval);
14080   unsigned int bits = GET_MODE_BITSIZE (fval.mode);
14081   double_int ival = fval.data.sext (bits);
14082 
14083   if (!ival.is_negative())
14084     return tval;
14085 
14086   /* ISO/IEC TR 18037, 7.18a.6.2:  The absfx functions are saturating.  */
14087 
14088   fval.data = (ival == double_int::min_value (bits, false).sext (bits))
14089     ? double_int::max_value (bits, false)
14090     : -ival;
14091 
14092   return build_fixed (TREE_TYPE (tval), fval);
14093 }
14094 
14095 
14096 /* Implement `TARGET_FOLD_BUILTIN'.  */
14097 
14098 static tree
14099 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
14100                   bool ignore ATTRIBUTE_UNUSED)
14101 {
14102   unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
14103   tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
14104 
14105   if (!optimize)
14106     return NULL_TREE;
14107 
14108   switch (fcode)
14109     {
14110     default:
14111       break;
14112 
14113     case AVR_BUILTIN_SWAP:
14114       {
14115         return fold_build2 (LROTATE_EXPR, val_type, arg[0],
14116                             build_int_cst (val_type, 4));
14117       }
14118 
14119     case AVR_BUILTIN_ABSHR:
14120     case AVR_BUILTIN_ABSR:
14121     case AVR_BUILTIN_ABSLR:
14122     case AVR_BUILTIN_ABSLLR:
14123 
14124     case AVR_BUILTIN_ABSHK:
14125     case AVR_BUILTIN_ABSK:
14126     case AVR_BUILTIN_ABSLK:
14127     case AVR_BUILTIN_ABSLLK:
14128       /* GCC is not good with folding ABS for fixed-point.  Do it by hand.  */
14129 
14130       return avr_fold_absfx (arg[0]);
14131 
14132     case AVR_BUILTIN_BITSHR:    case AVR_BUILTIN_HRBITS:
14133     case AVR_BUILTIN_BITSHK:    case AVR_BUILTIN_HKBITS:
14134     case AVR_BUILTIN_BITSUHR:   case AVR_BUILTIN_UHRBITS:
14135     case AVR_BUILTIN_BITSUHK:   case AVR_BUILTIN_UHKBITS:
14136 
14137     case AVR_BUILTIN_BITSR:     case AVR_BUILTIN_RBITS:
14138     case AVR_BUILTIN_BITSK:     case AVR_BUILTIN_KBITS:
14139     case AVR_BUILTIN_BITSUR:    case AVR_BUILTIN_URBITS:
14140     case AVR_BUILTIN_BITSUK:    case AVR_BUILTIN_UKBITS:
14141 
14142     case AVR_BUILTIN_BITSLR:    case AVR_BUILTIN_LRBITS:
14143     case AVR_BUILTIN_BITSLK:    case AVR_BUILTIN_LKBITS:
14144     case AVR_BUILTIN_BITSULR:   case AVR_BUILTIN_ULRBITS:
14145     case AVR_BUILTIN_BITSULK:   case AVR_BUILTIN_ULKBITS:
14146 
14147     case AVR_BUILTIN_BITSLLR:   case AVR_BUILTIN_LLRBITS:
14148     case AVR_BUILTIN_BITSLLK:   case AVR_BUILTIN_LLKBITS:
14149     case AVR_BUILTIN_BITSULLR:  case AVR_BUILTIN_ULLRBITS:
14150     case AVR_BUILTIN_BITSULLK:  case AVR_BUILTIN_ULLKBITS:
14151 
14152       gcc_assert (TYPE_PRECISION (val_type)
14153                   == TYPE_PRECISION (TREE_TYPE (arg[0])));
14154 
14155       return build1 (VIEW_CONVERT_EXPR, val_type, arg[0]);
14156 
14157     case AVR_BUILTIN_INSERT_BITS:
14158       {
14159         tree tbits = arg[1];
14160         tree tval = arg[2];
14161         tree tmap;
14162         tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
14163         unsigned int map;
14164         bool changed = false;
14165         avr_map_op_t best_g;
14166 
14167         if (TREE_CODE (arg[0]) != INTEGER_CST)
14168           {
14169             /* No constant as first argument: Don't fold this and run into
14170                error in avr_expand_builtin.  */
14171 
14172             break;
14173           }
14174 
14175         tmap = wide_int_to_tree (map_type, arg[0]);
14176         map = TREE_INT_CST_LOW (tmap);
14177 
14178         if (TREE_CODE (tval) != INTEGER_CST
14179             && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
14180           {
14181             /* There are no F in the map, i.e. 3rd operand is unused.
14182                Replace that argument with some constant to render
14183                respective input unused.  */
14184 
14185             tval = build_int_cst (val_type, 0);
14186             changed = true;
14187           }
14188 
14189         if (TREE_CODE (tbits) != INTEGER_CST
14190             && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
14191           {
14192             /* Similar for the bits to be inserted. If they are unused,
14193                we can just as well pass 0.  */
14194 
14195             tbits = build_int_cst (val_type, 0);
14196           }
14197 
14198         if (TREE_CODE (tbits) == INTEGER_CST)
14199           {
14200             /* Inserting bits known at compile time is easy and can be
14201                performed by AND and OR with appropriate masks.  */
14202 
14203             int bits = TREE_INT_CST_LOW (tbits);
14204             int mask_ior = 0, mask_and = 0xff;
14205 
14206             for (size_t i = 0; i < 8; i++)
14207               {
14208                 int mi = avr_map (map, i);
14209 
14210                 if (mi < 8)
14211                   {
14212                     if (bits & (1 << mi))     mask_ior |=  (1 << i);
14213                     else                      mask_and &= ~(1 << i);
14214                   }
14215               }
14216 
14217             tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
14218                                 build_int_cst (val_type, mask_ior));
14219             return fold_build2 (BIT_AND_EXPR, val_type, tval,
14220                                 build_int_cst (val_type, mask_and));
14221           }
14222 
14223         if (changed)
14224           return build_call_expr (fndecl, 3, tmap, tbits, tval);
14225 
14226         /* If bits don't change their position we can use vanilla logic
14227            to merge the two arguments.  */
14228 
14229         if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
14230           {
14231             int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
14232             tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
14233 
14234             tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
14235             tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
14236             return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
14237           }
14238 
14239         /* Try to decomposing map to reduce overall cost.  */
14240 
14241         if (avr_log.builtin)
14242           avr_edump ("\n%?: %x\n%?: ROL cost: ", map);
14243 
14244         best_g = avr_map_op[0];
14245         best_g.cost = 1000;
14246 
14247         for (size_t i = 0; i < ARRAY_SIZE (avr_map_op); i++)
14248           {
14249             avr_map_op_t g
14250               = avr_map_decompose (map, avr_map_op + i,
14251                                    TREE_CODE (tval) == INTEGER_CST);
14252 
14253             if (g.cost >= 0 && g.cost < best_g.cost)
14254               best_g = g;
14255           }
14256 
14257         if (avr_log.builtin)
14258           avr_edump ("\n");
14259 
14260         if (best_g.arg == 0)
14261           /* No optimization found */
14262           break;
14263 
14264         /* Apply operation G to the 2nd argument.  */
14265 
14266         if (avr_log.builtin)
14267           avr_edump ("%?: using OP(%s%d, %x) cost %d\n",
14268                      best_g.str, best_g.arg, best_g.map, best_g.cost);
14269 
14270         /* Do right-shifts arithmetically: They copy the MSB instead of
14271            shifting in a non-usable value (0) as with logic right-shift.  */
14272 
14273         tbits = fold_convert (signed_char_type_node, tbits);
14274         tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
14275                              build_int_cst (val_type, best_g.arg));
14276         tbits = fold_convert (val_type, tbits);
14277 
14278         /* Use map o G^-1 instead of original map to undo the effect of G.  */
14279 
14280         tmap = wide_int_to_tree (map_type, best_g.map);
14281 
14282         return build_call_expr (fndecl, 3, tmap, tbits, tval);
14283       } /* AVR_BUILTIN_INSERT_BITS */
14284     }
14285 
14286   return NULL_TREE;
14287 }
14288 
14289 
14290 
14291 /* Initialize the GCC target structure.  */
14292 
14293 #undef  TARGET_ASM_ALIGNED_HI_OP
14294 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
14295 #undef  TARGET_ASM_ALIGNED_SI_OP
14296 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
14297 #undef  TARGET_ASM_UNALIGNED_HI_OP
14298 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
14299 #undef  TARGET_ASM_UNALIGNED_SI_OP
14300 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
14301 #undef  TARGET_ASM_INTEGER
14302 #define TARGET_ASM_INTEGER avr_assemble_integer
14303 #undef  TARGET_ASM_FILE_START
14304 #define TARGET_ASM_FILE_START avr_file_start
14305 #undef  TARGET_ASM_FILE_END
14306 #define TARGET_ASM_FILE_END avr_file_end
14307 
14308 #undef  TARGET_ASM_FUNCTION_END_PROLOGUE
14309 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
14310 #undef  TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
14311 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
14312 
14313 #undef  TARGET_FUNCTION_VALUE
14314 #define TARGET_FUNCTION_VALUE avr_function_value
14315 #undef  TARGET_LIBCALL_VALUE
14316 #define TARGET_LIBCALL_VALUE avr_libcall_value
14317 #undef  TARGET_FUNCTION_VALUE_REGNO_P
14318 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
14319 
14320 #undef  TARGET_ATTRIBUTE_TABLE
14321 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
14322 #undef  TARGET_INSERT_ATTRIBUTES
14323 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
14324 #undef  TARGET_SECTION_TYPE_FLAGS
14325 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
14326 
14327 #undef  TARGET_ASM_NAMED_SECTION
14328 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
14329 #undef  TARGET_ASM_INIT_SECTIONS
14330 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
14331 #undef  TARGET_ENCODE_SECTION_INFO
14332 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
14333 #undef  TARGET_ASM_SELECT_SECTION
14334 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
14335 
14336 #undef  TARGET_REGISTER_MOVE_COST
14337 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
14338 #undef  TARGET_MEMORY_MOVE_COST
14339 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
14340 #undef  TARGET_RTX_COSTS
14341 #define TARGET_RTX_COSTS avr_rtx_costs
14342 #undef  TARGET_ADDRESS_COST
14343 #define TARGET_ADDRESS_COST avr_address_cost
14344 #undef  TARGET_MACHINE_DEPENDENT_REORG
14345 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
14346 #undef  TARGET_FUNCTION_ARG
14347 #define TARGET_FUNCTION_ARG avr_function_arg
14348 #undef  TARGET_FUNCTION_ARG_ADVANCE
14349 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
14350 
14351 #undef  TARGET_SET_CURRENT_FUNCTION
14352 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
14353 
14354 #undef  TARGET_RETURN_IN_MEMORY
14355 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
14356 
14357 #undef  TARGET_STRICT_ARGUMENT_NAMING
14358 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
14359 
14360 #undef  TARGET_BUILTIN_SETJMP_FRAME_VALUE
14361 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
14362 
14363 #undef TARGET_CONDITIONAL_REGISTER_USAGE
14364 #define TARGET_CONDITIONAL_REGISTER_USAGE avr_conditional_register_usage
14365 
14366 #undef  TARGET_HARD_REGNO_SCRATCH_OK
14367 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
14368 #undef  TARGET_CASE_VALUES_THRESHOLD
14369 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
14370 
14371 #undef  TARGET_FRAME_POINTER_REQUIRED
14372 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
14373 #undef  TARGET_CAN_ELIMINATE
14374 #define TARGET_CAN_ELIMINATE avr_can_eliminate
14375 
14376 #undef  TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
14377 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
14378 
14379 #undef TARGET_WARN_FUNC_RETURN
14380 #define TARGET_WARN_FUNC_RETURN avr_warn_func_return
14381 
14382 #undef  TARGET_CLASS_LIKELY_SPILLED_P
14383 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
14384 
14385 #undef  TARGET_OPTION_OVERRIDE
14386 #define TARGET_OPTION_OVERRIDE avr_option_override
14387 
14388 #undef  TARGET_CANNOT_MODIFY_JUMPS_P
14389 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
14390 
14391 #undef  TARGET_FUNCTION_OK_FOR_SIBCALL
14392 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
14393 
14394 #undef  TARGET_INIT_BUILTINS
14395 #define TARGET_INIT_BUILTINS avr_init_builtins
14396 
14397 #undef  TARGET_BUILTIN_DECL
14398 #define TARGET_BUILTIN_DECL avr_builtin_decl
14399 
14400 #undef  TARGET_EXPAND_BUILTIN
14401 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
14402 
14403 #undef  TARGET_FOLD_BUILTIN
14404 #define TARGET_FOLD_BUILTIN avr_fold_builtin
14405 
14406 #undef  TARGET_SCALAR_MODE_SUPPORTED_P
14407 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
14408 
14409 #undef  TARGET_BUILD_BUILTIN_VA_LIST
14410 #define TARGET_BUILD_BUILTIN_VA_LIST avr_build_builtin_va_list
14411 
14412 #undef  TARGET_FIXED_POINT_SUPPORTED_P
14413 #define TARGET_FIXED_POINT_SUPPORTED_P hook_bool_void_true
14414 
14415 #undef  TARGET_CONVERT_TO_TYPE
14416 #define TARGET_CONVERT_TO_TYPE avr_convert_to_type
14417 
14418 #undef TARGET_LRA_P
14419 #define TARGET_LRA_P hook_bool_void_false
14420 
14421 #undef  TARGET_ADDR_SPACE_SUBSET_P
14422 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
14423 
14424 #undef  TARGET_ADDR_SPACE_CONVERT
14425 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
14426 
14427 #undef  TARGET_ADDR_SPACE_ADDRESS_MODE
14428 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
14429 
14430 #undef  TARGET_ADDR_SPACE_POINTER_MODE
14431 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
14432 
14433 #undef  TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
14434 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P  \
14435   avr_addr_space_legitimate_address_p
14436 
14437 #undef  TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
14438 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
14439 
14440 #undef  TARGET_ADDR_SPACE_DIAGNOSE_USAGE
14441 #define TARGET_ADDR_SPACE_DIAGNOSE_USAGE avr_addr_space_diagnose_usage
14442 
14443 #undef  TARGET_MODE_DEPENDENT_ADDRESS_P
14444 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
14445 
14446 #undef  TARGET_PRINT_OPERAND
14447 #define TARGET_PRINT_OPERAND avr_print_operand
14448 #undef  TARGET_PRINT_OPERAND_ADDRESS
14449 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
14450 #undef  TARGET_PRINT_OPERAND_PUNCT_VALID_P
14451 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
14452 
14453 #undef TARGET_USE_BY_PIECES_INFRASTRUCTURE_P
14454 #define TARGET_USE_BY_PIECES_INFRASTRUCTURE_P \
14455   avr_use_by_pieces_infrastructure_p
14456 
14457 #undef  TARGET_LEGITIMATE_COMBINED_INSN
14458 #define TARGET_LEGITIMATE_COMBINED_INSN avr_legitimate_combined_insn
14459 
14460 struct gcc_target targetm = TARGET_INITIALIZER;
14461 
14462 
14463 #include "gt-avr.h"
14464