xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/arm/thumb1.md (revision d16b7486a53dcb8072b60ec6fcb4373a2d0c27b7)
1;; ARM Thumb-1 Machine Description
2;; Copyright (C) 2007-2020 Free Software Foundation, Inc.
3;;
4;; This file is part of GCC.
5;;
6;; GCC is free software; you can redistribute it and/or modify it
7;; under the terms of the GNU General Public License as published by
8;; the Free Software Foundation; either version 3, or (at your option)
9;; any later version.
10;;
11;; GCC is distributed in the hope that it will be useful, but
12;; WITHOUT ANY WARRANTY; without even the implied warranty of
13;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14;; General Public License for more details.
15;;
16;; You should have received a copy of the GNU General Public License
17;; along with GCC; see the file COPYING3.  If not see
18;; <http://www.gnu.org/licenses/>.  */
19
20
21;;---------------------------------------------------------------------------
22;; Insn patterns
23;;
24
25;; Beware of splitting Thumb1 patterns that output multiple
26;; assembly instructions, in particular instruction such as SBC and
27;; ADC which consume flags.  For example, in the pattern thumb_subdi3
28;; below, the output SUB implicitly sets the flags (assembled to SUBS)
29;; and then the Carry flag is used by SBC to compute the correct
30;; result.  If we split thumb_subdi3 pattern into two separate RTL
31;; insns (using define_insn_and_split), the scheduler might place
32;; other RTL insns between SUB and SBC, possibly modifying the Carry
33;; flag used by SBC.  This might happen because most Thumb1 patterns
34;; for flag-setting instructions do not have explicit RTL for setting
35;; or clobbering the flags.  Instead, they have the attribute "conds"
36;; with value "set" or "clob".  However, this attribute is not used to
37;; identify dependencies and therefore the scheduler might reorder
38;; these instruction.  Currenly, this problem cannot happen because
39;; there are no separate Thumb1 patterns for individual instruction
40;; that consume flags (except conditional execution, which is treated
41;; differently).  In particular there is no Thumb1 armv6-m pattern for
42;; sbc or adc.
43
44
45
46(define_insn "thumb1_movsi_symbol_ref"
47  [(set (match_operand:SI 0 "register_operand" "=l")
48	(match_operand:SI 1 "general_operand" ""))
49   ]
50  "TARGET_THUMB1
51   && arm_disable_literal_pool
52   && GET_CODE (operands[1]) == SYMBOL_REF"
53  "*
54  output_asm_insn (\"movs\\t%0, #:upper8_15:%1\", operands);
55  output_asm_insn (\"lsls\\t%0, #8\", operands);
56  output_asm_insn (\"adds\\t%0, #:upper0_7:%1\", operands);
57  output_asm_insn (\"lsls\\t%0, #8\", operands);
58  output_asm_insn (\"adds\\t%0, #:lower8_15:%1\", operands);
59  output_asm_insn (\"lsls\\t%0, #8\", operands);
60  output_asm_insn (\"adds\\t%0, #:lower0_7:%1\", operands);
61  return \"\";
62  "
63  [(set_attr "length" "14")
64   (set_attr "conds" "clob")]
65)
66
67(define_split
68  [(set (match_operand:SI 0 "register_operand" "")
69	(match_operand:SI 1 "immediate_operand" ""))]
70  "TARGET_THUMB1
71   && arm_disable_literal_pool
72   && GET_CODE (operands[1]) == CONST_INT
73   && !TARGET_HAVE_MOVT
74   && !satisfies_constraint_I (operands[1])"
75  [(clobber (const_int 0))]
76  "
77    thumb1_gen_const_int (operands[0], INTVAL (operands[1]));
78    DONE;
79  "
80)
81
82(define_insn "*thumb1_adddi3"
83  [(set (match_operand:DI          0 "register_operand" "=l")
84	(plus:DI (match_operand:DI 1 "register_operand" "%0")
85		 (match_operand:DI 2 "register_operand" "l")))
86   (clobber (reg:CC CC_REGNUM))
87  ]
88  "TARGET_THUMB1"
89  "adds\\t%Q0, %Q0, %Q2\;adcs\\t%R0, %R0, %R2"
90  [(set_attr "length" "4")
91   (set_attr "type" "multiple")]
92)
93
94;; Changes to the constraints of this pattern must be propagated to those of
95;; atomic additions in sync.md and to the logic for bind_old_new in
96;; arm_split_atomic_op in arm.c.  These must be at least as strict as the
97;; constraints here and aim to be as permissive.
98(define_insn_and_split "*thumb1_addsi3"
99  [(set (match_operand:SI          0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
100	(plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
101		 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
102  "TARGET_THUMB1"
103  "*
104   static const char * const asms[] =
105   {
106     \"adds\\t%0, %0, %2\",
107     \"subs\\t%0, %0, #%n2\",
108     \"adds\\t%0, %1, %2\",
109     \"add\\t%0, %0, %2\",
110     \"add\\t%0, %0, %2\",
111     \"add\\t%0, %1, %2\",
112     \"add\\t%0, %1, %2\",
113     \"#\",
114     \"#\",
115     \"#\"
116   };
117   if ((which_alternative == 2 || which_alternative == 6)
118       && CONST_INT_P (operands[2])
119       && INTVAL (operands[2]) < 0)
120     return (which_alternative == 2) ? \"subs\\t%0, %1, #%n2\" : \"sub\\t%0, %1, #%n2\";
121   return asms[which_alternative];
122  "
123  "&& reload_completed && CONST_INT_P (operands[2])
124   && ((operands[1] != stack_pointer_rtx
125        && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
126       || (operands[1] == stack_pointer_rtx
127 	   && INTVAL (operands[2]) > 1020))"
128  [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
129   (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
130  {
131    HOST_WIDE_INT offset = INTVAL (operands[2]);
132    if (operands[1] == stack_pointer_rtx)
133      offset -= 1020;
134    else
135      {
136        if (offset > 255)
137	  offset = 255;
138	else if (offset < -255)
139	  offset = -255;
140      }
141    operands[3] = GEN_INT (offset);
142    operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
143  }
144  [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")
145   (set_attr "type" "alus_imm,alus_imm,alus_sreg,alus_sreg,alus_sreg,
146		     alus_sreg,alus_sreg,multiple,multiple,multiple")]
147)
148
149;; Reloading and elimination of the frame pointer can
150;; sometimes cause this optimization to be missed.
151(define_peephole2
152  [(set (match_operand:SI 0 "arm_general_register_operand" "")
153	(match_operand:SI 1 "const_int_operand" ""))
154   (set (match_dup 0)
155	(plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
156  "TARGET_THUMB1
157   && UINTVAL (operands[1]) < 1024
158   && (UINTVAL (operands[1]) & 3) == 0"
159  [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
160  ""
161)
162
163(define_insn "*thumb_subdi3"
164  [(set (match_operand:DI           0 "register_operand" "=l")
165	(minus:DI (match_operand:DI 1 "register_operand"  "0")
166		  (match_operand:DI 2 "register_operand"  "l")))
167   (clobber (reg:CC CC_REGNUM))]
168  "TARGET_THUMB1"
169  "subs\\t%Q0, %Q0, %Q2\;sbcs\\t%R0, %R0, %R2"
170  [(set_attr "length" "4")
171   (set_attr "type" "multiple")]
172)
173
174;; Changes to the constraints of this pattern must be propagated to those of
175;; atomic subtractions in sync.md and to the logic for bind_old_new in
176;; arm_split_atomic_op in arm.c.  These must be at least as strict as the
177;; constraints here and aim to be as permissive.
178(define_insn "thumb1_subsi3_insn"
179  [(set (match_operand:SI           0 "register_operand" "=l")
180	(minus:SI (match_operand:SI 1 "register_operand" "l")
181		  (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
182  "TARGET_THUMB1"
183  "subs\\t%0, %1, %2"
184  [(set_attr "length" "2")
185   (set_attr "conds" "set")
186   (set_attr "type" "alus_sreg")]
187)
188
189;; Unfortunately on Thumb the '&'/'0' trick can fail when operands
190;; 1 and 2 are the same, because reload will make operand 0 match
191;; operand 1 without realizing that this conflicts with operand 2.  We fix
192;; this by adding another alternative to match this case, and then `reload'
193;; it ourselves.  This alternative must come first.
194(define_insn "*thumb_mulsi3"
195  [(set (match_operand:SI          0 "register_operand" "=&l,&l,&l")
196	(mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
197		 (match_operand:SI 2 "register_operand" "l,l,l")))]
198 "TARGET_THUMB1 && !arm_arch6"
199  "@
200   movs\\t%0, %1\;muls\\t%0, %2
201   mov\\t%0, %1\;muls\\t%0, %2
202   muls\\t%0, %2"
203  [(set_attr "length" "4,4,2")
204   (set_attr "type" "muls")]
205)
206
207(define_insn "*thumb_mulsi3_v6"
208  [(set (match_operand:SI          0 "register_operand" "=l,l,l")
209	(mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
210		 (match_operand:SI 2 "register_operand" "l,0,0")))]
211  "TARGET_THUMB1 && arm_arch6"
212  "@
213   muls\\t%0, %2
214   muls\\t%0, %1
215   muls\\t%0, %1"
216  [(set_attr "length" "2")
217   (set_attr "type" "muls")]
218)
219
220;; Changes to the constraints of this pattern must be propagated to those of
221;; atomic bitwise ANDs and NANDs in sync.md and to the logic for bind_old_new
222;; in arm_split_atomic_op in arm.c.  These must be at least as strict as the
223;; constraints here and aim to be as permissive.
224(define_insn "*thumb1_andsi3_insn"
225  [(set (match_operand:SI         0 "register_operand" "=l")
226	(and:SI (match_operand:SI 1 "register_operand" "%0")
227		(match_operand:SI 2 "register_operand" "l")))]
228  "TARGET_THUMB1"
229  "ands\\t%0, %2"
230  [(set_attr "length" "2")
231   (set_attr "type"  "logic_imm")
232   (set_attr "conds" "set")])
233
234(define_split
235  [(set (match_operand:SI 0 "s_register_operand" "")
236	(zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
237			 (match_operand:SI 2 "const_int_operand" "")
238			 (match_operand:SI 3 "const_int_operand" "")))
239   (clobber (match_operand:SI 4 "s_register_operand" ""))]
240  "TARGET_THUMB1"
241  [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
242   (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
243  "{
244     HOST_WIDE_INT temp = INTVAL (operands[2]);
245
246     operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
247     operands[3] = GEN_INT (32 - temp);
248   }"
249)
250
251(define_split
252  [(set (match_operand:SI 0 "s_register_operand" "")
253	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
254			 (match_operand:SI 2 "const_int_operand" "")
255			 (match_operand:SI 3 "const_int_operand" "")))]
256  "TARGET_THUMB1"
257  [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
258   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
259  "{
260     HOST_WIDE_INT temp = INTVAL (operands[2]);
261
262     operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
263     operands[3] = GEN_INT (32 - temp);
264   }"
265)
266
267(define_insn "thumb1_bicsi3"
268  [(set (match_operand:SI                 0 "register_operand" "=l")
269	(and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
270		(match_operand:SI         2 "register_operand" "0")))]
271  "TARGET_THUMB1"
272  "bics\\t%0, %1"
273  [(set_attr "length" "2")
274   (set_attr "conds" "set")
275   (set_attr "type" "logics_reg")]
276)
277
278;; Changes to the constraints of this pattern must be propagated to those of
279;; atomic inclusive ORs in sync.md and to the logic for bind_old_new in
280;; arm_split_atomic_op in arm.c.  These must be at least as strict as the
281;; constraints here and aim to be as permissive.
282(define_insn "*thumb1_iorsi3_insn"
283  [(set (match_operand:SI         0 "register_operand" "=l")
284	(ior:SI (match_operand:SI 1 "register_operand" "%0")
285		(match_operand:SI 2 "register_operand" "l")))]
286  "TARGET_THUMB1"
287  "orrs\\t%0, %2"
288  [(set_attr "length" "2")
289   (set_attr "conds" "set")
290   (set_attr "type" "logics_reg")])
291
292;; Changes to the constraints of this pattern must be propagated to those of
293;; atomic exclusive ORs in sync.md and to the logic for bind_old_new in
294;; arm_split_atomic_op in arm.c.  These must be at least as strict as the
295;; constraints here and aim to be as permissive.
296(define_insn "*thumb1_xorsi3_insn"
297  [(set (match_operand:SI         0 "register_operand" "=l")
298	(xor:SI (match_operand:SI 1 "register_operand" "%0")
299		(match_operand:SI 2 "register_operand" "l")))]
300  "TARGET_THUMB1"
301  "eors\\t%0, %2"
302  [(set_attr "length" "2")
303   (set_attr "conds" "set")
304   (set_attr "type" "logics_reg")]
305)
306
307(define_insn "*thumb1_ashlsi3"
308  [(set (match_operand:SI            0 "register_operand" "=l,l")
309	(ashift:SI (match_operand:SI 1 "register_operand" "l,0")
310		   (match_operand:SI 2 "nonmemory_operand" "N,l")))]
311  "TARGET_THUMB1"
312  "lsls\\t%0, %1, %2"
313  [(set_attr "length" "2")
314   (set_attr "type" "shift_imm,shift_reg")
315   (set_attr "conds" "set")])
316
317(define_insn "*thumb1_ashrsi3"
318  [(set (match_operand:SI              0 "register_operand" "=l,l")
319	(ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
320		     (match_operand:SI 2 "nonmemory_operand" "N,l")))]
321  "TARGET_THUMB1"
322  "asrs\\t%0, %1, %2"
323  [(set_attr "length" "2")
324   (set_attr "type" "shift_imm,shift_reg")
325   (set_attr "conds" "set")])
326
327(define_insn "*thumb1_lshrsi3"
328  [(set (match_operand:SI              0 "register_operand" "=l,l")
329	(lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
330		     (match_operand:SI 2 "nonmemory_operand" "N,l")))]
331  "TARGET_THUMB1"
332  "lsrs\\t%0, %1, %2"
333  [(set_attr "length" "2")
334   (set_attr "type" "shift_imm,shift_reg")
335   (set_attr "conds" "set")])
336
337(define_insn "*thumb1_rotrsi3"
338  [(set (match_operand:SI              0 "register_operand" "=l")
339	(rotatert:SI (match_operand:SI 1 "register_operand" "0")
340		     (match_operand:SI 2 "register_operand" "l")))]
341  "TARGET_THUMB1"
342  "rors\\t%0, %0, %2"
343  [(set_attr "type" "shift_reg")
344   (set_attr "length" "2")]
345)
346
347(define_insn "*thumb1_negdi2"
348  [(set (match_operand:DI 0 "register_operand" "=&l")
349	(neg:DI (match_operand:DI 1 "register_operand" "l")))
350   (clobber (reg:CC CC_REGNUM))]
351  "TARGET_THUMB1"
352  "movs\\t%R0, #0\;rsbs\\t%Q0, %Q1, #0\;sbcs\\t%R0, %R1"
353  [(set_attr "length" "6")
354   (set_attr "type" "multiple")]
355)
356
357(define_insn "*thumb1_negsi2"
358  [(set (match_operand:SI         0 "register_operand" "=l")
359	(neg:SI (match_operand:SI 1 "register_operand" "l")))]
360  "TARGET_THUMB1"
361  "rsbs\\t%0, %1, #0"
362  [(set_attr "length" "2")
363   (set_attr "type" "alu_imm")]
364)
365
366(define_insn_and_split "*thumb1_abssi2"
367  [(set (match_operand:SI 0 "s_register_operand" "=l")
368	(abs:SI (match_operand:SI 1 "s_register_operand" "l")))
369   (clobber (match_scratch:SI 2 "=&l"))]
370  "TARGET_THUMB1"
371  "#"
372  "TARGET_THUMB1 && reload_completed"
373  [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
374   (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
375   (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
376  ""
377  [(set_attr "length" "6")
378   (set_attr "type" "multiple")]
379)
380
381(define_insn_and_split "*thumb1_neg_abssi2"
382  [(set (match_operand:SI 0 "s_register_operand" "=l")
383	(neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
384   (clobber (match_scratch:SI 2 "=&l"))]
385  "TARGET_THUMB1"
386  "#"
387  "TARGET_THUMB1 && reload_completed"
388  [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
389   (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
390   (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
391  ""
392  [(set_attr "length" "6")
393   (set_attr "type" "multiple")]
394)
395
396(define_insn "*thumb1_one_cmplsi2"
397  [(set (match_operand:SI         0 "register_operand" "=l")
398	(not:SI (match_operand:SI 1 "register_operand"  "l")))]
399  "TARGET_THUMB1"
400  "mvns\\t%0, %1"
401  [(set_attr "length" "2")
402   (set_attr "type" "mvn_reg")]
403)
404
405(define_insn "*thumb1_zero_extendhisi2"
406  [(set (match_operand:SI 0 "register_operand" "=l,l")
407	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
408  "TARGET_THUMB1"
409{
410  rtx mem;
411
412  if (which_alternative == 0 && arm_arch6)
413    return "uxth\t%0, %1";
414  if (which_alternative == 0)
415    return "#";
416
417  mem = XEXP (operands[1], 0);
418
419  if (GET_CODE (mem) == CONST)
420    mem = XEXP (mem, 0);
421
422  if (GET_CODE (mem) == PLUS)
423    {
424      rtx a = XEXP (mem, 0);
425
426      /* This can happen due to bugs in reload.  */
427      if (REG_P (a) && REGNO (a) == SP_REGNUM)
428        {
429          rtx ops[2];
430          ops[0] = operands[0];
431          ops[1] = a;
432
433          output_asm_insn ("mov\t%0, %1", ops);
434
435          XEXP (mem, 0) = operands[0];
436       }
437    }
438
439  return "ldrh\t%0, %1";
440}
441  [(set_attr_alternative "length"
442			 [(if_then_else (eq_attr "is_arch6" "yes")
443				       (const_int 2) (const_int 4))
444			 (const_int 4)])
445   (set_attr "type" "extend,load_byte")]
446)
447
448(define_insn "*thumb1_zero_extendqisi2"
449  [(set (match_operand:SI 0 "register_operand" "=l,l")
450	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
451  "TARGET_THUMB1 && !arm_arch6"
452  "@
453   #
454   ldrb\\t%0, %1"
455  [(set_attr "length" "4,2")
456   (set_attr "type" "alu_shift_reg,load_byte")
457   (set_attr "pool_range" "*,32")]
458)
459
460(define_insn "*thumb1_zero_extendqisi2_v6"
461  [(set (match_operand:SI 0 "register_operand" "=l,l")
462	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
463  "TARGET_THUMB1 && arm_arch6"
464  "@
465   uxtb\\t%0, %1
466   ldrb\\t%0, %1"
467  [(set_attr "length" "2")
468   (set_attr "type" "extend,load_byte")]
469)
470
471;; We used to have an early-clobber on the scratch register here.
472;; However, there's a bug somewhere in reload which means that this
473;; can be partially ignored during spill allocation if the memory
474;; address also needs reloading; this causes us to die later on when
475;; we try to verify the operands.  Fortunately, we don't really need
476;; the early-clobber: we can always use operand 0 if operand 2
477;; overlaps the address.
478(define_insn "thumb1_extendhisi2"
479  [(set (match_operand:SI 0 "register_operand" "=l,l")
480	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
481   (clobber (match_scratch:SI 2 "=X,l"))]
482  "TARGET_THUMB1"
483  "*
484  {
485    rtx ops[4];
486    rtx mem;
487
488    if (which_alternative == 0 && !arm_arch6)
489      return \"#\";
490    if (which_alternative == 0)
491      return \"sxth\\t%0, %1\";
492
493    mem = XEXP (operands[1], 0);
494
495    /* This code used to try to use 'V', and fix the address only if it was
496       offsettable, but this fails for e.g. REG+48 because 48 is outside the
497       range of QImode offsets, and offsettable_address_p does a QImode
498       address check.  */
499
500    if (GET_CODE (mem) == CONST)
501      mem = XEXP (mem, 0);
502
503    if (GET_CODE (mem) == LABEL_REF)
504      return \"ldr\\t%0, %1\";
505
506    if (GET_CODE (mem) == PLUS)
507      {
508        rtx a = XEXP (mem, 0);
509        rtx b = XEXP (mem, 1);
510
511        if (GET_CODE (a) == LABEL_REF
512	    && CONST_INT_P (b))
513          return \"ldr\\t%0, %1\";
514
515        if (REG_P (b))
516          return \"ldrsh\\t%0, %1\";
517
518        ops[1] = a;
519        ops[2] = b;
520      }
521    else
522      {
523        ops[1] = mem;
524        ops[2] = const0_rtx;
525      }
526
527    gcc_assert (REG_P (ops[1]));
528
529    ops[0] = operands[0];
530    if (reg_mentioned_p (operands[2], ops[1]))
531      ops[3] = ops[0];
532    else
533      ops[3] = operands[2];
534    output_asm_insn (\"movs\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
535    return \"\";
536  }"
537  [(set_attr_alternative "length"
538			 [(if_then_else (eq_attr "is_arch6" "yes")
539					(const_int 2) (const_int 4))
540			  (const_int 4)])
541   (set_attr "type" "extend,load_byte")
542   (set_attr "pool_range" "*,1018")]
543)
544
545(define_split
546  [(set (match_operand:SI 0 "register_operand" "")
547	(sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
548  "TARGET_THUMB1 && reload_completed"
549  [(set (match_dup 0) (match_dup 2))
550   (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
551{
552  rtx addr = XEXP (operands[1], 0);
553
554  if (GET_CODE (addr) == CONST)
555    addr = XEXP (addr, 0);
556
557  if (GET_CODE (addr) == PLUS
558      && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
559    /* No split necessary.  */
560    FAIL;
561
562  if (GET_CODE (addr) == PLUS
563      && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
564    FAIL;
565
566  if (reg_overlap_mentioned_p (operands[0], addr))
567    {
568      rtx t = gen_lowpart (QImode, operands[0]);
569      emit_move_insn (t, operands[1]);
570      emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
571      DONE;
572    }
573
574  if (REG_P (addr))
575    {
576      addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
577      operands[2] = const0_rtx;
578    }
579  else if (GET_CODE (addr) != PLUS)
580    FAIL;
581  else if (REG_P (XEXP (addr, 0)))
582    {
583      operands[2] = XEXP (addr, 1);
584      addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
585    }
586  else
587    {
588      operands[2] = XEXP (addr, 0);
589      addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
590    }
591
592  operands[3] = change_address (operands[1], QImode, addr);
593})
594
595(define_peephole2
596  [(set (match_operand:SI 0 "register_operand" "")
597	(plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
598   (set (match_operand:SI 2 "register_operand" "") (const_int 0))
599   (set (match_operand:SI 3 "register_operand" "")
600	(sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
601  "TARGET_THUMB1
602   && GET_CODE (XEXP (operands[4], 0)) == PLUS
603   && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
604   && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
605   && (peep2_reg_dead_p (3, operands[0])
606       || rtx_equal_p (operands[0], operands[3]))
607   && (peep2_reg_dead_p (3, operands[2])
608       || rtx_equal_p (operands[2], operands[3]))"
609  [(set (match_dup 2) (match_dup 1))
610   (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
611{
612  rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
613  operands[4] = change_address (operands[4], QImode, addr);
614})
615
616(define_insn "thumb1_extendqisi2"
617  [(set (match_operand:SI 0 "register_operand" "=l,l,l")
618	(sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
619  "TARGET_THUMB1"
620{
621  rtx addr;
622
623  if (which_alternative == 0 && arm_arch6)
624    return "sxtb\\t%0, %1";
625  if (which_alternative == 0)
626    return "#";
627
628  addr = XEXP (operands[1], 0);
629  if (GET_CODE (addr) == PLUS
630      && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
631    return "ldrsb\\t%0, %1";
632
633  return "#";
634}
635  [(set_attr_alternative "length"
636			 [(if_then_else (eq_attr "is_arch6" "yes")
637					(const_int 2) (const_int 4))
638			  (const_int 2)
639			  (if_then_else (eq_attr "is_arch6" "yes")
640					(const_int 4) (const_int 6))])
641   (set_attr "type" "extend,load_byte,load_byte")]
642)
643
644;;; ??? This should have alternatives for constants.
645;;; ??? This was originally identical to the movdf_insn pattern.
646;;; ??? The 'i' constraint looks funny, but it should always be replaced by
647;;; thumb_reorg with a memory reference.
648(define_insn "*thumb1_movdi_insn"
649  [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,r,l,>,l, m,*r")
650	(match_operand:DI 1 "general_operand"      "l, I,J,j,>,l,mi,l,*r"))]
651  "TARGET_THUMB1
652   && (   register_operand (operands[0], DImode)
653       || register_operand (operands[1], DImode))"
654  "*
655  {
656  switch (which_alternative)
657    {
658    default:
659    case 0:
660      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
661	return \"add\\t%0,  %1,  #0\;add\\t%H0, %H1, #0\";
662      return   \"add\\t%H0, %H1, #0\;add\\t%0,  %1,  #0\";
663    case 1:
664      return \"movs\\t%Q0, %1\;movs\\t%R0, #0\";
665    case 2:
666      operands[1] = GEN_INT (- INTVAL (operands[1]));
667      return \"movs\\t%Q0, %1\;rsbs\\t%Q0, %Q0, #0\;asrs\\t%R0, %Q0, #31\";
668    case 3:
669      gcc_assert (TARGET_HAVE_MOVT);
670      return \"movw\\t%Q0, %L1\;movs\\tR0, #0\";
671    case 4:
672      return \"ldmia\\t%1, {%0, %H0}\";
673    case 5:
674      return \"stmia\\t%0, {%1, %H1}\";
675    case 6:
676      return thumb_load_double_from_address (operands);
677    case 7:
678      operands[2] = gen_rtx_MEM (SImode,
679			     plus_constant (Pmode, XEXP (operands[0], 0), 4));
680      output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
681      return \"\";
682    case 8:
683      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
684	return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
685      return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
686    }
687  }"
688  [(set_attr "length" "4,4,6,6,2,2,6,4,4")
689   (set_attr "type" "multiple,multiple,multiple,multiple,load_8,store_8,load_8,store_8,multiple")
690   (set_attr "arch" "t1,t1,t1,v8mb,t1,t1,t1,t1,t1")
691   (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")]
692)
693
694(define_insn "*thumb1_movsi_insn"
695  [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,r,l,l,l,>,l, l, m,*l*h*k")
696	(match_operand:SI 1 "general_operand"      "l, I,j,J,K,>,l,i, mi,l,*l*h*k"))]
697  "TARGET_THUMB1
698   && (   register_operand (operands[0], SImode)
699       || register_operand (operands[1], SImode))"
700{
701  switch (which_alternative)
702    {
703      default:
704      case 0: return "movs\t%0, %1";
705      case 1: return "movs\t%0, %1";
706      case 2: return "movw\t%0, %1";
707      case 3: return "#";
708      case 4: return "#";
709      case 5: return "ldmia\t%1, {%0}";
710      case 6: return "stmia\t%0, {%1}";
711      case 7:
712      /* pure-code alternative: build the constant byte by byte,
713	 instead of loading it from a constant pool.  */
714	{
715	  int i;
716	  HOST_WIDE_INT op1 = INTVAL (operands[1]);
717	  bool mov_done_p = false;
718	  rtx ops[2];
719	  ops[0] = operands[0];
720
721	  /* Emit upper 3 bytes if needed.  */
722	  for (i = 0; i < 3; i++)
723	    {
724	       int byte = (op1 >> (8 * (3 - i))) & 0xff;
725
726	      if (byte)
727		{
728		  ops[1] = GEN_INT (byte);
729		  if (mov_done_p)
730		    output_asm_insn ("adds\t%0, %1", ops);
731		  else
732		    output_asm_insn ("movs\t%0, %1", ops);
733		  mov_done_p = true;
734		}
735
736	      if (mov_done_p)
737		output_asm_insn ("lsls\t%0, #8", ops);
738	    }
739
740	  /* Emit lower byte if needed.  */
741	  ops[1] = GEN_INT (op1 & 0xff);
742	  if (!mov_done_p)
743	    output_asm_insn ("movs\t%0, %1", ops);
744	  else if (op1 & 0xff)
745	    output_asm_insn ("adds\t%0, %1", ops);
746	  return "";
747	}
748      case 8: return "ldr\t%0, %1";
749      case 9: return "str\t%1, %0";
750      case 10: return "mov\t%0, %1";
751    }
752}
753  [(set_attr "length" "2,2,4,4,4,2,2,14,2,2,2")
754   (set_attr "type" "mov_reg,mov_imm,mov_imm,multiple,multiple,load_4,store_4,alu_sreg,load_4,store_4,mov_reg")
755   (set_attr "pool_range" "*,*,*,*,*,*,*, *,1018,*,*")
756   (set_attr "arch" "t1,t1,v8mb,t1,t1,t1,t1,t1,t1,t1,t1")
757   (set_attr "required_for_purecode" "no,no,no,no,no,no,no,yes,no,no,no")
758   (set_attr "conds" "set,clob,nocond,*,*,nocond,nocond,nocond,nocond,nocond,nocond")])
759
760; Split the load of 64-bit constant into two loads for high and low 32-bit parts respectively
761; to see if we can load them in fewer instructions or fewer cycles.
762; For the small 64-bit integer constants that satisfy constraint J, the instruction pattern
763; thumb1_movdi_insn has a better way to handle them.
764(define_split
765  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
766       (match_operand:ANY64 1 "immediate_operand" ""))]
767  "TARGET_THUMB1 && reload_completed && !satisfies_constraint_J (operands[1])"
768  [(set (match_dup 0) (match_dup 1))
769   (set (match_dup 2) (match_dup 3))]
770  "
771  operands[2] = gen_highpart (SImode, operands[0]);
772  operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
773                                  operands[1]);
774  operands[0] = gen_lowpart (SImode, operands[0]);
775  operands[1] = gen_lowpart (SImode, operands[1]);
776  "
777)
778
779(define_split
780  [(set (match_operand:SI 0 "register_operand" "")
781	(match_operand:SI 1 "const_int_operand" ""))]
782  "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
783  [(set (match_dup 2) (match_dup 1))
784   (set (match_dup 0) (neg:SI (match_dup 2)))]
785  "
786  {
787    operands[1] = GEN_INT (- INTVAL (operands[1]));
788    operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
789  }"
790)
791
792(define_split
793  [(set (match_operand:SI 0 "register_operand" "")
794	(match_operand:SI 1 "const_int_operand" ""))]
795  "TARGET_THUMB1 && satisfies_constraint_K (operands[1])
796   && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
797  [(set (match_dup 2) (match_dup 1))
798   (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
799  "
800  {
801    unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
802    unsigned HOST_WIDE_INT mask = 0xff;
803    int i;
804
805    for (i = 0; i < 25; i++)
806      if ((val & (mask << i)) == val)
807        break;
808
809    /* Don't split if the shift is zero.  */
810    if (i == 0)
811      FAIL;
812
813    operands[1] = GEN_INT (val >> i);
814    operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
815    operands[3] = GEN_INT (i);
816  }"
817)
818
819;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
820(define_split
821  [(set (match_operand:SI 0 "register_operand" "")
822	(match_operand:SI 1 "const_int_operand" ""))]
823  "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])
824   && !(TARGET_HAVE_MOVT && satisfies_constraint_j (operands[1]))"
825  [(set (match_dup 2) (match_dup 1))
826   (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
827  "
828  {
829    operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
830    operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
831    operands[3] = GEN_INT (255);
832  }"
833)
834
835(define_insn "*thumb1_movhi_insn"
836  [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l,r")
837	(match_operand:HI 1 "general_operand"       "l,m,l,k*h,*r,I,n"))]
838  "TARGET_THUMB1
839   && (   register_operand (operands[0], HImode)
840       || register_operand (operands[1], HImode))"
841  "*
842  switch (which_alternative)
843    {
844    case 0: return \"adds	%0, %1, #0\";
845    case 2: return \"strh	%1, %0\";
846    case 3: return \"mov	%0, %1\";
847    case 4: return \"mov	%0, %1\";
848    case 5: return \"movs	%0, %1\";
849    case 6: gcc_assert (TARGET_HAVE_MOVT);
850	    return \"movw	%0, %L1\";
851    default: gcc_unreachable ();
852    case 1:
853      /* The stack pointer can end up being taken as an index register.
854          Catch this case here and deal with it.  */
855      if (GET_CODE (XEXP (operands[1], 0)) == PLUS
856	  && REG_P (XEXP (XEXP (operands[1], 0), 0))
857	  && REGNO    (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
858        {
859	  rtx ops[2];
860          ops[0] = operands[0];
861          ops[1] = XEXP (XEXP (operands[1], 0), 0);
862
863          output_asm_insn (\"mov	%0, %1\", ops);
864
865          XEXP (XEXP (operands[1], 0), 0) = operands[0];
866
867	}
868      return \"ldrh	%0, %1\";
869    }"
870  [(set_attr "length" "2,4,2,2,2,2,4")
871   (set_attr "type" "alus_imm,load_4,store_4,mov_reg,mov_reg,mov_imm,mov_imm")
872   (set_attr "arch" "t1,t1,t1,t1,t1,t1,v8mb")
873   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob,nocond")])
874
875(define_expand "thumb_movhi_clobber"
876  [(set (match_operand:HI     0 "memory_operand")
877	(match_operand:HI     1 "register_operand"))
878   (clobber (match_operand:DI 2 "register_operand"))]
879  "TARGET_THUMB1"
880  "
881  if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
882      && REGNO (operands[1]) <= LAST_LO_REGNUM)
883    {
884      emit_insn (gen_movhi (operands[0], operands[1]));
885      DONE;
886    }
887  /* XXX Fixme, need to handle other cases here as well.  */
888  gcc_unreachable ();
889  "
890)
891
892(define_insn "*thumb1_movqi_insn"
893  [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,l*r,*h,l")
894	(match_operand:QI 1 "general_operand"       "l,m,l,k*h,*r,I"))]
895  "TARGET_THUMB1
896   && (   register_operand (operands[0], QImode)
897       || register_operand (operands[1], QImode))"
898  "@
899   adds\\t%0, %1, #0
900   ldrb\\t%0, %1
901   strb\\t%1, %0
902   mov\\t%0, %1
903   mov\\t%0, %1
904   movs\\t%0, %1"
905  [(set_attr "length" "2")
906   (set_attr "type" "alu_imm,load_4,store_4,mov_reg,mov_imm,mov_imm")
907   (set_attr "pool_range" "*,32,*,*,*,*")
908   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
909
910(define_insn "*thumb1_movhf"
911  [(set (match_operand:HF     0 "nonimmediate_operand" "=l,l,l,m,*r,*h")
912	(match_operand:HF     1 "general_operand"      "l, m,F,l,*h,*r"))]
913  "TARGET_THUMB1
914   && (	  s_register_operand (operands[0], HFmode)
915       || s_register_operand (operands[1], HFmode))"
916  "*
917  switch (which_alternative)
918    {
919    case 0:
920      return \"movs\\t%0, %1\";
921    case 1:
922      {
923	rtx addr;
924	gcc_assert (MEM_P (operands[1]));
925	addr = XEXP (operands[1], 0);
926	if (GET_CODE (addr) == LABEL_REF
927	    || (GET_CODE (addr) == CONST
928		&& GET_CODE (XEXP (addr, 0)) == PLUS
929		&& GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
930		&& CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
931	  {
932	    /* Constant pool entry.  */
933	    return \"ldr\\t%0, %1\";
934	  }
935	return \"ldrh\\t%0, %1\";
936      }
937    case 2:
938    {
939      int bits;
940      int high;
941      rtx ops[3];
942
943      bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
944			     HFmode);
945      ops[0] = operands[0];
946      high = (bits >> 8) & 0xff;
947      ops[1] = GEN_INT (high);
948      ops[2] = GEN_INT (bits & 0xff);
949      if (high != 0)
950	output_asm_insn (\"movs\\t%0, %1\;lsls\\t%0, #8\;adds\\t%0, %2\", ops);
951      else
952	output_asm_insn (\"movs\\t%0, %2\", ops);
953
954      return \"\";
955    }
956    case 3: return \"strh\\t%1, %0\";
957    default: return \"mov\\t%0, %1\";
958    }
959  "
960  [(set_attr "length" "2,2,6,2,2,2")
961   (set_attr "type" "mov_reg,load_4,mov_reg,store_4,mov_reg,mov_reg")
962   (set_attr "pool_range" "*,1018,*,*,*,*")
963   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond")])
964
965;;; ??? This should have alternatives for constants.
966(define_insn "*thumb1_movsf_insn"
967  [(set (match_operand:SF     0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
968	(match_operand:SF     1 "general_operand"      "l, >,l,mF,l,*h,*r"))]
969  "TARGET_THUMB1
970   && (   register_operand (operands[0], SFmode)
971       || register_operand (operands[1], SFmode))"
972  "@
973   adds\\t%0, %1, #0
974   ldmia\\t%1, {%0}
975   stmia\\t%0, {%1}
976   ldr\\t%0, %1
977   str\\t%1, %0
978   mov\\t%0, %1
979   mov\\t%0, %1"
980  [(set_attr "length" "2")
981   (set_attr "type" "alus_imm,load_4,store_4,load_4,store_4,mov_reg,mov_reg")
982   (set_attr "pool_range" "*,*,*,1018,*,*,*")
983   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
984)
985
986;;; ??? This should have alternatives for constants.
987;;; ??? This was originally identical to the movdi_insn pattern.
988;;; ??? The 'F' constraint looks funny, but it should always be replaced by
989;;; thumb_reorg with a memory reference.
990(define_insn "*thumb_movdf_insn"
991  [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
992	(match_operand:DF 1 "general_operand"      "l, >,l,mF,l,*r"))]
993  "TARGET_THUMB1
994   && (   register_operand (operands[0], DFmode)
995       || register_operand (operands[1], DFmode))"
996  "*
997  switch (which_alternative)
998    {
999    default:
1000    case 0:
1001      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
1002	return \"adds\\t%0, %1, #0\;adds\\t%H0, %H1, #0\";
1003      return \"adds\\t%H0, %H1, #0\;adds\\t%0, %1, #0\";
1004    case 1:
1005      return \"ldmia\\t%1, {%0, %H0}\";
1006    case 2:
1007      return \"stmia\\t%0, {%1, %H1}\";
1008    case 3:
1009      return thumb_load_double_from_address (operands);
1010    case 4:
1011      operands[2] = gen_rtx_MEM (SImode,
1012				 plus_constant (Pmode,
1013						XEXP (operands[0], 0), 4));
1014      output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
1015      return \"\";
1016    case 5:
1017      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
1018	return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
1019      return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
1020    }
1021  "
1022  [(set_attr "length" "4,2,2,6,4,4")
1023   (set_attr "type" "multiple,load_8,store_8,load_8,store_8,multiple")
1024   (set_attr "pool_range" "*,*,*,1018,*,*")]
1025)
1026
1027
1028;; Thumb block-move insns
1029
1030(define_insn "cpymem12b"
1031  [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
1032	(mem:SI (match_operand:SI 3 "register_operand" "1")))
1033   (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
1034	(mem:SI (plus:SI (match_dup 3) (const_int 4))))
1035   (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
1036	(mem:SI (plus:SI (match_dup 3) (const_int 8))))
1037   (set (match_operand:SI 0 "register_operand" "=l")
1038	(plus:SI (match_dup 2) (const_int 12)))
1039   (set (match_operand:SI 1 "register_operand" "=l")
1040	(plus:SI (match_dup 3) (const_int 12)))
1041   (clobber (match_scratch:SI 4 "=&l"))
1042   (clobber (match_scratch:SI 5 "=&l"))
1043   (clobber (match_scratch:SI 6 "=&l"))]
1044  "TARGET_THUMB1"
1045  "* return thumb_output_move_mem_multiple (3, operands);"
1046  [(set_attr "length" "4")
1047   ; This isn't entirely accurate...  It loads as well, but in terms of
1048   ; scheduling the following insn it is better to consider it as a store
1049   (set_attr "type" "store_12")]
1050)
1051
1052(define_insn "cpymem8b"
1053  [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
1054	(mem:SI (match_operand:SI 3 "register_operand" "1")))
1055   (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
1056	(mem:SI (plus:SI (match_dup 3) (const_int 4))))
1057   (set (match_operand:SI 0 "register_operand" "=l")
1058	(plus:SI (match_dup 2) (const_int 8)))
1059   (set (match_operand:SI 1 "register_operand" "=l")
1060	(plus:SI (match_dup 3) (const_int 8)))
1061   (clobber (match_scratch:SI 4 "=&l"))
1062   (clobber (match_scratch:SI 5 "=&l"))]
1063  "TARGET_THUMB1"
1064  "* return thumb_output_move_mem_multiple (2, operands);"
1065  [(set_attr "length" "4")
1066   ; This isn't entirely accurate...  It loads as well, but in terms of
1067   ; scheduling the following insn it is better to consider it as a store
1068   (set_attr "type" "store_8")]
1069)
1070
1071
1072;; A pattern to recognize a special situation and optimize for it.
1073;; On the thumb, zero-extension from memory is preferrable to sign-extension
1074;; due to the available addressing modes.  Hence, convert a signed comparison
1075;; with zero into an unsigned comparison with 127 if possible.
1076(define_expand "cbranchqi4"
1077  [(set (pc) (if_then_else
1078	      (match_operator 0 "lt_ge_comparison_operator"
1079	       [(match_operand:QI 1 "memory_operand")
1080	        (match_operand:QI 2 "const0_operand")])
1081	      (label_ref (match_operand 3 "" ""))
1082	      (pc)))]
1083  "TARGET_THUMB1"
1084{
1085  rtx xops[4];
1086  xops[1] = gen_reg_rtx (SImode);
1087  emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
1088  xops[2] = GEN_INT (127);
1089  xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
1090			    VOIDmode, xops[1], xops[2]);
1091  xops[3] = operands[3];
1092  emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
1093  DONE;
1094})
1095
1096;; A pattern for the CB(N)Z instruction added in ARMv8-M Baseline profile,
1097;; adapted from cbranchsi4_insn.  Modifying cbranchsi4_insn instead leads to
1098;; code generation difference for ARMv6-M because the minimum length of the
1099;; instruction becomes 2 even for ARMv6-M due to a limitation in genattrtab's
1100;; handling of PC in the length condition.
1101(define_insn "thumb1_cbz"
1102  [(set (pc) (if_then_else
1103	      (match_operator 0 "equality_operator"
1104	       [(match_operand:SI 1 "s_register_operand" "l")
1105		(const_int 0)])
1106	      (label_ref (match_operand 2 "" ""))
1107	      (pc)))]
1108  "TARGET_THUMB1 && TARGET_HAVE_CBZ"
1109{
1110  if (get_attr_length (insn) == 2)
1111    {
1112      if (GET_CODE (operands[0]) == EQ)
1113	return "cbz\t%1, %l2";
1114      else
1115	return "cbnz\t%1, %l2";
1116    }
1117  else
1118    {
1119      rtx t = cfun->machine->thumb1_cc_insn;
1120      if (t != NULL_RTX)
1121	{
1122	  if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
1123	      || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
1124	    t = NULL_RTX;
1125	  if (cfun->machine->thumb1_cc_mode == CC_NZmode)
1126	    {
1127	      if (!nz_comparison_operator (operands[0], VOIDmode))
1128		t = NULL_RTX;
1129	    }
1130	  else if (cfun->machine->thumb1_cc_mode != CCmode)
1131	    t = NULL_RTX;
1132	}
1133      if (t == NULL_RTX)
1134	{
1135	  output_asm_insn ("cmp\t%1, #0", operands);
1136	  cfun->machine->thumb1_cc_insn = insn;
1137	  cfun->machine->thumb1_cc_op0 = operands[1];
1138	  cfun->machine->thumb1_cc_op1 = operands[2];
1139	  cfun->machine->thumb1_cc_mode = CCmode;
1140	}
1141      else
1142	/* Ensure we emit the right type of condition code on the jump.  */
1143	XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
1144					     CC_REGNUM);
1145
1146      switch (get_attr_length (insn))
1147	{
1148	case 4:  return "b%d0\t%l2";
1149	case 6:  return "b%D0\t.LCB%=;b\t%l2\t%@long jump\n.LCB%=:";
1150	case 8:  return "b%D0\t.LCB%=;bl\t%l2\t%@far jump\n.LCB%=:";
1151	default: gcc_unreachable ();
1152	}
1153    }
1154}
1155  [(set (attr "far_jump")
1156	(if_then_else
1157	    (eq_attr "length" "8")
1158	    (const_string "yes")
1159	    (const_string "no")))
1160   (set (attr "length")
1161	(if_then_else
1162	    (and (ge (minus (match_dup 2) (pc)) (const_int 2))
1163		 (le (minus (match_dup 2) (pc)) (const_int 128)))
1164	    (const_int 2)
1165	    (if_then_else
1166		(and (ge (minus (match_dup 2) (pc)) (const_int -250))
1167		     (le (minus (match_dup 2) (pc)) (const_int 256)))
1168		(const_int 4)
1169		(if_then_else
1170		    (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
1171			 (le (minus (match_dup 2) (pc)) (const_int 2048)))
1172		    (const_int 6)
1173		    (const_int 8)))))
1174   (set (attr "type")
1175	(if_then_else
1176	    (eq_attr "length" "2")
1177	    (const_string "branch")
1178	    (const_string "multiple")))]
1179)
1180
1181;; Changes to the constraints of this pattern must be propagated to those of
1182;; atomic compare_and_swap splitters in sync.md.  These must be at least as
1183;; strict as the constraints here and aim to be as permissive.
1184(define_insn "cbranchsi4_insn"
1185  [(set (pc) (if_then_else
1186	      (match_operator 0 "arm_comparison_operator"
1187	       [(match_operand:SI 1 "s_register_operand" "l,l*h")
1188	        (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
1189	      (label_ref (match_operand 3 "" ""))
1190	      (pc)))]
1191  "TARGET_THUMB1"
1192{
1193  rtx t = cfun->machine->thumb1_cc_insn;
1194  if (t != NULL_RTX)
1195    {
1196      if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
1197	  || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
1198	t = NULL_RTX;
1199      if (cfun->machine->thumb1_cc_mode == CC_NZmode)
1200	{
1201	  if (!nz_comparison_operator (operands[0], VOIDmode))
1202	    t = NULL_RTX;
1203	}
1204      else if (cfun->machine->thumb1_cc_mode != CCmode)
1205	t = NULL_RTX;
1206    }
1207  if (t == NULL_RTX)
1208    {
1209      output_asm_insn ("cmp\t%1, %2", operands);
1210      cfun->machine->thumb1_cc_insn = insn;
1211      cfun->machine->thumb1_cc_op0 = operands[1];
1212      cfun->machine->thumb1_cc_op1 = operands[2];
1213      cfun->machine->thumb1_cc_mode = CCmode;
1214    }
1215  else
1216    /* Ensure we emit the right type of condition code on the jump.  */
1217    XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
1218					 CC_REGNUM);
1219
1220  switch (get_attr_length (insn))
1221    {
1222    case 4:  return \"b%d0\\t%l3\";
1223    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1224    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1225    }
1226}
1227  [(set (attr "far_jump")
1228        (if_then_else
1229	    (eq_attr "length" "8")
1230	    (const_string "yes")
1231            (const_string "no")))
1232   (set (attr "length")
1233        (if_then_else
1234	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1235	         (le (minus (match_dup 3) (pc)) (const_int 256)))
1236	    (const_int 4)
1237	    (if_then_else
1238	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1239		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
1240		(const_int 6)
1241		(const_int 8))))
1242   (set_attr "type" "multiple")]
1243)
1244
1245;; An expander which makes use of the cbranchsi4_scratch insn, but can
1246;; be used safely after RA.
1247(define_expand "cbranchsi4_neg_late"
1248  [(parallel [
1249     (set (pc) (if_then_else
1250		(match_operator 4 "arm_comparison_operator"
1251		 [(match_operand:SI 1 "s_register_operand")
1252		  (match_operand:SI 2 "thumb1_cmpneg_operand")])
1253		(label_ref (match_operand 3 "" ""))
1254		(pc)))
1255     (clobber (match_operand:SI 0 "s_register_operand"))
1256  ])]
1257  "TARGET_THUMB1"
1258)
1259
1260;; Changes to the constraints of this pattern must be propagated to those of
1261;; atomic compare_and_swap splitters in sync.md.  These must be at least as
1262;; strict as the constraints here and aim to be as permissive.
1263(define_insn "cbranchsi4_scratch"
1264  [(set (pc) (if_then_else
1265	      (match_operator 4 "arm_comparison_operator"
1266	       [(match_operand:SI 1 "s_register_operand" "l,0")
1267	        (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
1268	      (label_ref (match_operand 3 "" ""))
1269	      (pc)))
1270   (clobber (match_scratch:SI 0 "=l,l"))]
1271  "TARGET_THUMB1"
1272  "*
1273  output_asm_insn (\"adds\\t%0, %1, #%n2\", operands);
1274
1275  switch (get_attr_length (insn))
1276    {
1277    case 4:  return \"b%d4\\t%l3\";
1278    case 6:  return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1279    default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1280    }
1281  "
1282  [(set (attr "far_jump")
1283        (if_then_else
1284	    (eq_attr "length" "8")
1285	    (const_string "yes")
1286            (const_string "no")))
1287   (set (attr "length")
1288        (if_then_else
1289	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1290	         (le (minus (match_dup 3) (pc)) (const_int 256)))
1291	    (const_int 4)
1292	    (if_then_else
1293	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1294		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
1295		(const_int 6)
1296		(const_int 8))))
1297   (set_attr "type" "multiple")]
1298)
1299
1300(define_insn "*negated_cbranchsi4"
1301  [(set (pc)
1302	(if_then_else
1303	 (match_operator 0 "equality_operator"
1304	  [(match_operand:SI 1 "s_register_operand" "l")
1305	   (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
1306	 (label_ref (match_operand 3 "" ""))
1307	 (pc)))]
1308  "TARGET_THUMB1"
1309  "*
1310  output_asm_insn (\"cmn\\t%1, %2\", operands);
1311  switch (get_attr_length (insn))
1312    {
1313    case 4:  return \"b%d0\\t%l3\";
1314    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1315    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1316    }
1317  "
1318  [(set (attr "far_jump")
1319        (if_then_else
1320	    (eq_attr "length" "8")
1321	    (const_string "yes")
1322            (const_string "no")))
1323   (set (attr "length")
1324        (if_then_else
1325	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1326	         (le (minus (match_dup 3) (pc)) (const_int 256)))
1327	    (const_int 4)
1328	    (if_then_else
1329	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1330		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
1331		(const_int 6)
1332		(const_int 8))))
1333   (set_attr "type" "multiple")]
1334)
1335
1336(define_insn "*tbit_cbranch"
1337  [(set (pc)
1338	(if_then_else
1339	 (match_operator 0 "equality_operator"
1340	  [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1341			    (const_int 1)
1342			    (match_operand:SI 2 "const_int_operand" "i"))
1343	   (const_int 0)])
1344	 (label_ref (match_operand 3 "" ""))
1345	 (pc)))
1346   (clobber (match_scratch:SI 4 "=l"))]
1347  "TARGET_THUMB1"
1348  "*
1349  {
1350  rtx op[3];
1351  op[0] = operands[4];
1352  op[1] = operands[1];
1353  op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
1354
1355  output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1356  switch (get_attr_length (insn))
1357    {
1358    case 4:  return \"b%d0\\t%l3\";
1359    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1360    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1361    }
1362  }"
1363  [(set (attr "far_jump")
1364        (if_then_else
1365	    (eq_attr "length" "8")
1366	    (const_string "yes")
1367            (const_string "no")))
1368   (set (attr "length")
1369        (if_then_else
1370	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1371	         (le (minus (match_dup 3) (pc)) (const_int 256)))
1372	    (const_int 4)
1373	    (if_then_else
1374	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1375		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
1376		(const_int 6)
1377		(const_int 8))))
1378   (set_attr "type" "multiple")]
1379)
1380
1381(define_insn "*tlobits_cbranch"
1382  [(set (pc)
1383	(if_then_else
1384	 (match_operator 0 "equality_operator"
1385	  [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
1386			    (match_operand:SI 2 "const_int_operand" "i")
1387			    (const_int 0))
1388	   (const_int 0)])
1389	 (label_ref (match_operand 3 "" ""))
1390	 (pc)))
1391   (clobber (match_scratch:SI 4 "=l"))]
1392  "TARGET_THUMB1"
1393  "*
1394  {
1395  rtx op[3];
1396  op[0] = operands[4];
1397  op[1] = operands[1];
1398  op[2] = GEN_INT (32 - INTVAL (operands[2]));
1399
1400  output_asm_insn (\"lsls\\t%0, %1, %2\", op);
1401  switch (get_attr_length (insn))
1402    {
1403    case 4:  return \"b%d0\\t%l3\";
1404    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
1405    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
1406    }
1407  }"
1408  [(set (attr "far_jump")
1409        (if_then_else
1410	    (eq_attr "length" "8")
1411	    (const_string "yes")
1412            (const_string "no")))
1413   (set (attr "length")
1414        (if_then_else
1415	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
1416	         (le (minus (match_dup 3) (pc)) (const_int 256)))
1417	    (const_int 4)
1418	    (if_then_else
1419	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
1420		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
1421		(const_int 6)
1422		(const_int 8))))
1423   (set_attr "type" "multiple")]
1424)
1425
1426(define_insn "*tstsi3_cbranch"
1427  [(set (pc)
1428	(if_then_else
1429	 (match_operator 3 "equality_operator"
1430	  [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
1431		   (match_operand:SI 1 "s_register_operand" "l"))
1432	   (const_int 0)])
1433	 (label_ref (match_operand 2 "" ""))
1434	 (pc)))]
1435  "TARGET_THUMB1"
1436  "*
1437  {
1438  output_asm_insn (\"tst\\t%0, %1\", operands);
1439  switch (get_attr_length (insn))
1440    {
1441    case 4:  return \"b%d3\\t%l2\";
1442    case 6:  return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
1443    default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
1444    }
1445  }"
1446  [(set (attr "far_jump")
1447        (if_then_else
1448	    (eq_attr "length" "8")
1449	    (const_string "yes")
1450            (const_string "no")))
1451   (set (attr "length")
1452        (if_then_else
1453	    (and (ge (minus (match_dup 2) (pc)) (const_int -250))
1454	         (le (minus (match_dup 2) (pc)) (const_int 256)))
1455	    (const_int 4)
1456	    (if_then_else
1457	        (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
1458		     (le (minus (match_dup 2) (pc)) (const_int 2048)))
1459		(const_int 6)
1460		(const_int 8))))
1461   (set_attr "type" "multiple")]
1462)
1463
1464(define_insn "*cbranchne_decr1"
1465  [(set (pc)
1466	(if_then_else (match_operator 3 "equality_operator"
1467		       [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
1468		        (const_int 0)])
1469		      (label_ref (match_operand 4 "" ""))
1470		      (pc)))
1471   (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
1472	(plus:SI (match_dup 2) (const_int -1)))
1473   (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
1474  "TARGET_THUMB1"
1475  "*
1476   {
1477     rtx cond[2];
1478     cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1479				? GEU : LTU),
1480			       VOIDmode, operands[2], const1_rtx);
1481     cond[1] = operands[4];
1482
1483     if (which_alternative == 0)
1484       output_asm_insn (\"subs\\t%0, %2, #1\", operands);
1485     else if (which_alternative == 1)
1486       {
1487	 /* We must provide an alternative for a hi reg because reload
1488	    cannot handle output reloads on a jump instruction, but we
1489	    can't subtract into that.  Fortunately a mov from lo to hi
1490	    does not clobber the condition codes.  */
1491	 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1492	 output_asm_insn (\"mov\\t%0, %1\", operands);
1493       }
1494     else
1495       {
1496	 /* Similarly, but the target is memory.  */
1497	 output_asm_insn (\"subs\\t%1, %2, #1\", operands);
1498	 output_asm_insn (\"str\\t%1, %0\", operands);
1499       }
1500
1501     switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
1502       {
1503	 case 4:
1504	   output_asm_insn (\"b%d0\\t%l1\", cond);
1505	   return \"\";
1506	 case 6:
1507	   output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1508	   return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
1509	 default:
1510	   output_asm_insn (\"b%D0\\t.LCB%=\", cond);
1511	   return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1512       }
1513   }
1514  "
1515  [(set (attr "far_jump")
1516        (if_then_else
1517	    (ior (and (eq (symbol_ref ("which_alternative"))
1518	                  (const_int 0))
1519		      (eq_attr "length" "8"))
1520		 (eq_attr "length" "10"))
1521	    (const_string "yes")
1522            (const_string "no")))
1523   (set_attr_alternative "length"
1524      [
1525       ;; Alternative 0
1526       (if_then_else
1527	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1528	      (le (minus (match_dup 4) (pc)) (const_int 256)))
1529	 (const_int 4)
1530	 (if_then_else
1531	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1532		(le (minus (match_dup 4) (pc)) (const_int 2048)))
1533	   (const_int 6)
1534	   (const_int 8)))
1535       ;; Alternative 1
1536       (if_then_else
1537	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1538	      (le (minus (match_dup 4) (pc)) (const_int 256)))
1539	 (const_int 6)
1540	 (if_then_else
1541	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1542		(le (minus (match_dup 4) (pc)) (const_int 2048)))
1543	   (const_int 8)
1544	   (const_int 10)))
1545       ;; Alternative 2
1546       (if_then_else
1547	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1548	      (le (minus (match_dup 4) (pc)) (const_int 256)))
1549	 (const_int 6)
1550	 (if_then_else
1551	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1552		(le (minus (match_dup 4) (pc)) (const_int 2048)))
1553	   (const_int 8)
1554	   (const_int 10)))
1555       ;; Alternative 3
1556       (if_then_else
1557	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
1558	      (le (minus (match_dup 4) (pc)) (const_int 256)))
1559	 (const_int 6)
1560	 (if_then_else
1561	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
1562		(le (minus (match_dup 4) (pc)) (const_int 2048)))
1563	   (const_int 8)
1564	   (const_int 10)))])
1565   (set_attr "type" "multiple")]
1566)
1567
1568(define_insn "*addsi3_cbranch"
1569  [(set (pc)
1570	(if_then_else
1571	 (match_operator 4 "arm_comparison_operator"
1572	  [(plus:SI
1573	    (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
1574	    (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
1575	   (const_int 0)])
1576	 (label_ref (match_operand 5 "" ""))
1577	 (pc)))
1578   (set
1579    (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
1580    (plus:SI (match_dup 2) (match_dup 3)))
1581   (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
1582  "TARGET_THUMB1
1583   && (GET_CODE (operands[4]) == EQ
1584       || GET_CODE (operands[4]) == NE
1585       || GET_CODE (operands[4]) == GE
1586       || GET_CODE (operands[4]) == LT)"
1587  "*
1588   {
1589     rtx cond[3];
1590
1591     cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
1592     cond[1] = operands[2];
1593     cond[2] = operands[3];
1594
1595     if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
1596       output_asm_insn (\"subs\\t%0, %1, #%n2\", cond);
1597     else
1598       output_asm_insn (\"adds\\t%0, %1, %2\", cond);
1599
1600     if (which_alternative >= 2
1601	 && which_alternative < 4)
1602       output_asm_insn (\"mov\\t%0, %1\", operands);
1603     else if (which_alternative >= 4)
1604       output_asm_insn (\"str\\t%1, %0\", operands);
1605
1606     switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
1607       {
1608	 case 4:
1609	   return \"b%d4\\t%l5\";
1610	 case 6:
1611	   return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
1612	 default:
1613	   return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
1614       }
1615   }
1616  "
1617  [(set (attr "far_jump")
1618        (if_then_else
1619	    (ior (and (lt (symbol_ref ("which_alternative"))
1620	                  (const_int 2))
1621		      (eq_attr "length" "8"))
1622		 (eq_attr "length" "10"))
1623	    (const_string "yes")
1624            (const_string "no")))
1625   (set (attr "length")
1626     (if_then_else
1627       (lt (symbol_ref ("which_alternative"))
1628		       (const_int 2))
1629       (if_then_else
1630	 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
1631	      (le (minus (match_dup 5) (pc)) (const_int 256)))
1632	 (const_int 4)
1633	 (if_then_else
1634	   (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
1635		(le (minus (match_dup 5) (pc)) (const_int 2048)))
1636	   (const_int 6)
1637	   (const_int 8)))
1638       (if_then_else
1639	 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
1640	      (le (minus (match_dup 5) (pc)) (const_int 256)))
1641	 (const_int 6)
1642	 (if_then_else
1643	   (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
1644		(le (minus (match_dup 5) (pc)) (const_int 2048)))
1645	   (const_int 8)
1646	   (const_int 10)))))
1647   (set_attr "type" "multiple")]
1648)
1649
1650(define_insn "*addsi3_cbranch_scratch"
1651  [(set (pc)
1652	(if_then_else
1653	 (match_operator 3 "arm_comparison_operator"
1654	  [(plus:SI
1655	    (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
1656	    (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
1657	   (const_int 0)])
1658	 (label_ref (match_operand 4 "" ""))
1659	 (pc)))
1660   (clobber (match_scratch:SI 0 "=X,X,l,l"))]
1661  "TARGET_THUMB1
1662   && (GET_CODE (operands[3]) == EQ
1663       || GET_CODE (operands[3]) == NE
1664       || GET_CODE (operands[3]) == GE
1665       || GET_CODE (operands[3]) == LT)"
1666  "*
1667   {
1668     switch (which_alternative)
1669       {
1670       case 0:
1671	 output_asm_insn (\"cmp\t%1, #%n2\", operands);
1672	 break;
1673       case 1:
1674	 output_asm_insn (\"cmn\t%1, %2\", operands);
1675	 break;
1676       case 2:
1677	 if (INTVAL (operands[2]) < 0)
1678	   output_asm_insn (\"subs\t%0, %1, %2\", operands);
1679	 else
1680	   output_asm_insn (\"adds\t%0, %1, %2\", operands);
1681	 break;
1682       case 3:
1683	 if (INTVAL (operands[2]) < 0)
1684	   output_asm_insn (\"subs\t%0, %0, %2\", operands);
1685	 else
1686	   output_asm_insn (\"adds\t%0, %0, %2\", operands);
1687	 break;
1688       }
1689
1690     switch (get_attr_length (insn))
1691       {
1692	 case 4:
1693	   return \"b%d3\\t%l4\";
1694	 case 6:
1695	   return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
1696	 default:
1697	   return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
1698       }
1699   }
1700  "
1701  [(set (attr "far_jump")
1702        (if_then_else
1703	    (eq_attr "length" "8")
1704	    (const_string "yes")
1705            (const_string "no")))
1706   (set (attr "length")
1707       (if_then_else
1708	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
1709	      (le (minus (match_dup 4) (pc)) (const_int 256)))
1710	 (const_int 4)
1711	 (if_then_else
1712	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
1713		(le (minus (match_dup 4) (pc)) (const_int 2048)))
1714	   (const_int 6)
1715	   (const_int 8))))
1716   (set_attr "type" "multiple")]
1717)
1718
1719(define_insn "*thumb_cmpdi_zero"
1720  [(set (reg:CC_Z CC_REGNUM)
1721	(compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
1722		      (const_int 0)))
1723   (clobber (match_scratch:SI 1 "=l"))]
1724  "TARGET_THUMB1"
1725  "orrs\\t%1, %Q0, %R0"
1726  [(set_attr "conds" "set")
1727   (set_attr "length" "2")
1728   (set_attr "type" "logics_reg")]
1729)
1730
1731(define_expand "cstoresi_eq0_thumb1"
1732  [(parallel
1733    [(set (match_operand:SI 0 "s_register_operand")
1734	  (eq:SI (match_operand:SI 1 "s_register_operand")
1735		 (const_int 0)))
1736     (clobber (match_dup:SI 2))])]
1737  "TARGET_THUMB1"
1738  "operands[2] = gen_reg_rtx (SImode);"
1739)
1740
1741(define_expand "cstoresi_ne0_thumb1"
1742  [(parallel
1743    [(set (match_operand:SI 0 "s_register_operand")
1744	  (ne:SI (match_operand:SI 1 "s_register_operand")
1745		 (const_int 0)))
1746     (clobber (match_dup:SI 2))])]
1747  "TARGET_THUMB1"
1748  "operands[2] = gen_reg_rtx (SImode);"
1749)
1750
1751(define_insn "*cstoresi_eq0_thumb1_insn"
1752  [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
1753	(eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
1754	       (const_int 0)))
1755   (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
1756  "TARGET_THUMB1"
1757  "@
1758   rsbs\\t%0, %1, #0\;adcs\\t%0, %0, %1
1759   rsbs\\t%2, %1, #0\;adcs\\t%0, %1, %2"
1760  [(set_attr "length" "4")
1761   (set_attr "type" "multiple")]
1762)
1763
1764(define_insn "*cstoresi_ne0_thumb1_insn"
1765  [(set (match_operand:SI 0 "s_register_operand" "=l")
1766	(ne:SI (match_operand:SI 1 "s_register_operand" "0")
1767	       (const_int 0)))
1768   (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
1769  "TARGET_THUMB1"
1770  "subs\\t%2, %1, #1\;sbcs\\t%0, %1, %2"
1771  [(set_attr "length" "4")]
1772)
1773
1774;; Used as part of the expansion of thumb ltu and gtu sequences
1775(define_insn "cstoresi_nltu_thumb1"
1776  [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1777        (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1778			(match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
1779  "TARGET_THUMB1"
1780  "cmp\\t%1, %2\;sbcs\\t%0, %0, %0"
1781  [(set_attr "length" "4")
1782   (set_attr "type" "multiple")]
1783)
1784
1785(define_insn_and_split "cstoresi_ltu_thumb1"
1786  [(set (match_operand:SI 0 "s_register_operand" "=l,l")
1787        (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
1788		(match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
1789  "TARGET_THUMB1"
1790  "#"
1791  "TARGET_THUMB1"
1792  [(set (match_dup 3)
1793	(neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
1794   (set (match_dup 0) (neg:SI (match_dup 3)))]
1795  "operands[3] = gen_reg_rtx (SImode);"
1796  [(set_attr "length" "4")
1797   (set_attr "type" "multiple")]
1798)
1799
1800;; Used as part of the expansion of thumb les sequence.
1801(define_insn "thumb1_addsi3_addgeu"
1802  [(set (match_operand:SI 0 "s_register_operand" "=l")
1803        (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
1804			  (match_operand:SI 2 "s_register_operand" "l"))
1805		 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
1806			 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
1807  "TARGET_THUMB1"
1808  "cmp\\t%3, %4\;adcs\\t%0, %1, %2"
1809  [(set_attr "length" "4")
1810   (set_attr "type" "multiple")]
1811)
1812
1813
1814(define_insn "*thumb_jump"
1815  [(set (pc)
1816	(label_ref (match_operand 0 "" "")))]
1817  "TARGET_THUMB1"
1818  "*
1819  if (get_attr_length (insn) == 2)
1820    return \"b\\t%l0\";
1821  return \"bl\\t%l0\\t%@ far jump\";
1822  "
1823  [(set (attr "far_jump")
1824        (if_then_else
1825	    (eq_attr "length" "4")
1826	    (const_string "yes")
1827	    (const_string "no")))
1828   (set (attr "length")
1829        (if_then_else
1830	    (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
1831		 (le (minus (match_dup 0) (pc)) (const_int 2048)))
1832  	    (const_int 2)
1833	    (const_int 4)))
1834   (set_attr "type" "branch")]
1835)
1836
1837(define_insn "*call_reg_thumb1_v5"
1838  [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1839	 (match_operand 1 "" ""))
1840   (use (match_operand 2 "" ""))
1841   (clobber (reg:SI LR_REGNUM))]
1842  "TARGET_THUMB1 && arm_arch5t && !SIBLING_CALL_P (insn)"
1843  "blx\\t%0"
1844  [(set_attr "length" "2")
1845   (set_attr "type" "call")]
1846)
1847
1848(define_insn "*nonsecure_call_reg_thumb1_v5"
1849  [(call (unspec:SI [(mem:SI (reg:SI R4_REGNUM))]
1850		    UNSPEC_NONSECURE_MEM)
1851	 (match_operand 0 "" ""))
1852   (use (match_operand 1 "" ""))
1853   (clobber (reg:SI LR_REGNUM))]
1854  "TARGET_THUMB1 && use_cmse && !SIBLING_CALL_P (insn)"
1855  "bl\\t__gnu_cmse_nonsecure_call"
1856  [(set_attr "length" "4")
1857   (set_attr "type" "call")]
1858)
1859
1860(define_insn "*call_reg_thumb1"
1861  [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
1862	 (match_operand 1 "" ""))
1863   (use (match_operand 2 "" ""))
1864   (clobber (reg:SI LR_REGNUM))]
1865  "TARGET_THUMB1 && !arm_arch5t && !SIBLING_CALL_P (insn)"
1866  "*
1867  {
1868    if (!TARGET_CALLER_INTERWORKING)
1869      return thumb_call_via_reg (operands[0]);
1870    else if (operands[1] == const0_rtx)
1871      return \"bl\\t%__interwork_call_via_%0\";
1872    else if (frame_pointer_needed)
1873      return \"bl\\t%__interwork_r7_call_via_%0\";
1874    else
1875      return \"bl\\t%__interwork_r11_call_via_%0\";
1876  }"
1877  [(set_attr "type" "call")]
1878)
1879
1880(define_insn "*call_value_reg_thumb1_v5"
1881  [(set (match_operand 0 "" "")
1882	(call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1883	      (match_operand 2 "" "")))
1884   (use (match_operand 3 "" ""))
1885   (clobber (reg:SI LR_REGNUM))]
1886  "TARGET_THUMB1 && arm_arch5t"
1887  "blx\\t%1"
1888  [(set_attr "length" "2")
1889   (set_attr "type" "call")]
1890)
1891
1892(define_insn "*nonsecure_call_value_reg_thumb1_v5"
1893  [(set (match_operand 0 "" "")
1894	(call (unspec:SI
1895	       [(mem:SI (reg:SI R4_REGNUM))]
1896	       UNSPEC_NONSECURE_MEM)
1897	      (match_operand 1 "" "")))
1898   (use (match_operand 2 "" ""))
1899   (clobber (reg:SI LR_REGNUM))]
1900  "TARGET_THUMB1 && use_cmse"
1901  "bl\\t__gnu_cmse_nonsecure_call"
1902  [(set_attr "length" "4")
1903   (set_attr "type" "call")]
1904)
1905
1906(define_insn "*call_value_reg_thumb1"
1907  [(set (match_operand 0 "" "")
1908	(call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
1909	      (match_operand 2 "" "")))
1910   (use (match_operand 3 "" ""))
1911   (clobber (reg:SI LR_REGNUM))]
1912  "TARGET_THUMB1 && !arm_arch5t"
1913  "*
1914  {
1915    if (!TARGET_CALLER_INTERWORKING)
1916      return thumb_call_via_reg (operands[1]);
1917    else if (operands[2] == const0_rtx)
1918      return \"bl\\t%__interwork_call_via_%1\";
1919    else if (frame_pointer_needed)
1920      return \"bl\\t%__interwork_r7_call_via_%1\";
1921    else
1922      return \"bl\\t%__interwork_r11_call_via_%1\";
1923  }"
1924  [(set_attr "type" "call")]
1925)
1926
1927(define_insn "*call_insn"
1928  [(call (mem:SI (match_operand:SI 0 "" ""))
1929	 (match_operand:SI 1 "" ""))
1930   (use (match_operand 2 "" ""))
1931   (clobber (reg:SI LR_REGNUM))]
1932  "TARGET_THUMB1
1933   && GET_CODE (operands[0]) == SYMBOL_REF
1934   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
1935  "bl\\t%a0"
1936  [(set_attr "length" "4")
1937   (set_attr "type" "call")]
1938)
1939
1940(define_insn "*call_value_insn"
1941  [(set (match_operand 0 "" "")
1942	(call (mem:SI (match_operand 1 "" ""))
1943	      (match_operand 2 "" "")))
1944   (use (match_operand 3 "" ""))
1945   (clobber (reg:SI LR_REGNUM))]
1946  "TARGET_THUMB1
1947   && GET_CODE (operands[1]) == SYMBOL_REF
1948   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
1949  "bl\\t%a1"
1950  [(set_attr "length" "4")
1951   (set_attr "type" "call")]
1952)
1953
1954(define_expand "thumb1_casesi_internal_pic"
1955  [(match_operand:SI 0 "s_register_operand")
1956   (match_operand:SI 1 "thumb1_cmp_operand")
1957   (match_operand 2 "" "")
1958   (match_operand 3 "" "")]
1959  "TARGET_THUMB1"
1960  {
1961    rtx reg0;
1962    rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
1963    emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
1964				    operands[3]));
1965    reg0 = gen_rtx_REG (SImode, 0);
1966    emit_move_insn (reg0, operands[0]);
1967    emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
1968    DONE;
1969  }
1970)
1971
1972(define_insn "thumb1_casesi_dispatch"
1973  [(parallel [(set (pc) (unspec [(reg:SI 0)
1974				 (label_ref (match_operand 0 "" ""))
1975;;				 (label_ref (match_operand 1 "" ""))
1976]
1977			 UNSPEC_THUMB1_CASESI))
1978	      (clobber (reg:SI IP_REGNUM))
1979              (clobber (reg:SI LR_REGNUM))])]
1980  "TARGET_THUMB1"
1981  "* return thumb1_output_casesi(operands);"
1982  [(set_attr "length" "4")
1983   (set_attr "type" "multiple")]
1984)
1985
1986;; NB Never uses BX.
1987(define_insn "*thumb1_indirect_jump"
1988  [(set (pc)
1989	(match_operand:SI 0 "register_operand" "l*r"))]
1990  "TARGET_THUMB1"
1991  "mov\\tpc, %0"
1992  [(set_attr "conds" "clob")
1993   (set_attr "length" "2")
1994   (set_attr "type" "branch")]
1995)
1996
1997
1998(define_insn "prologue_thumb1_interwork"
1999  [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
2000  "TARGET_THUMB1"
2001  "* return thumb1_output_interwork ();"
2002  [(set_attr "length" "8")
2003   (set_attr "type" "multiple")]
2004)
2005
2006(define_insn "*epilogue_insns"
2007  [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
2008  "TARGET_THUMB1"
2009  "*
2010    return thumb1_unexpanded_epilogue ();
2011  "
2012  ; Length is absolute worst case, when using CMSE and if this is an entry
2013  ; function an extra 4 (MSR) bytes will be added.
2014  [(set (attr "length")
2015	(if_then_else
2016	 (match_test "IS_CMSE_ENTRY (arm_current_func_type ())")
2017	 (const_int 48)
2018	 (const_int 44)))
2019   (set_attr "type" "block")
2020   ;; We don't clobber the conditions, but the potential length of this
2021   ;; operation is sufficient to make conditionalizing the sequence
2022   ;; unlikely to be profitable.
2023   (set_attr "conds" "clob")]
2024)
2025
2026;; Miscellaneous Thumb patterns
2027(define_expand "tablejump"
2028  [(parallel [(set (pc) (match_operand:SI 0 "register_operand"))
2029	      (use (label_ref (match_operand 1 "" "")))])]
2030  "TARGET_THUMB1"
2031  "
2032  if (flag_pic)
2033    {
2034      /* Hopefully, CSE will eliminate this copy.  */
2035      rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
2036      rtx reg2 = gen_reg_rtx (SImode);
2037
2038      emit_insn (gen_addsi3 (reg2, operands[0], reg1));
2039      operands[0] = reg2;
2040    }
2041  "
2042)
2043
2044(define_insn "*thumb1_movpc_insn"
2045  [(set (match_operand:SI 0 "s_register_operand" "=l")
2046	(reg:SI PC_REGNUM))]
2047  "TARGET_THUMB1"
2048  "mov\\t%0, pc"
2049  [(set_attr "length" "2")
2050   (set_attr "conds"  "nocond")
2051   (set_attr "type"   "mov_reg")]
2052)
2053
2054;; NB never uses BX.
2055(define_insn "*thumb1_tablejump"
2056  [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
2057   (use (label_ref (match_operand 1 "" "")))]
2058  "TARGET_THUMB1"
2059  "mov\\t%|pc, %0"
2060  [(set_attr "length" "2")
2061   (set_attr "type" "branch")]
2062)
2063
2064(define_insn_and_split "thumb_eh_return"
2065  [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
2066		    VUNSPEC_EH_RETURN)
2067   (clobber (match_scratch:SI 1 "=&l"))]
2068  "TARGET_THUMB1"
2069  "#"
2070  "&& reload_completed"
2071  [(const_int 0)]
2072  "
2073  {
2074    thumb_set_return_address (operands[0], operands[1]);
2075    DONE;
2076  }"
2077  [(set_attr "type" "mov_reg")]
2078)
2079
2080;; DO NOT SPLIT THIS PATTERN.  It is important for security reasons that the
2081;; canary value does not live beyond the end of this sequence.
2082(define_insn "thumb1_stack_protect_test_insn"
2083  [(set (match_operand:SI 0 "register_operand" "=&l")
2084	(unspec:SI [(match_operand:SI 1 "memory_operand" "m")
2085		    (mem:SI (match_operand:SI 2 "register_operand" "+l"))]
2086	 UNSPEC_SP_TEST))
2087   (clobber (match_dup 2))]
2088  "TARGET_THUMB1"
2089  "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;movs\t%2, #0"
2090  [(set_attr "length" "10")
2091   (set_attr "conds" "clob")
2092   (set_attr "type" "multiple")]
2093)
2094
2095