xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/arm/arm.md (revision 6cd39ddb8550f6fa1bff3fed32053d7f19fd0453)
1;;- Machine description for ARM for GNU compiler
2;;  Copyright (C) 1991-2013 Free Software Foundation, Inc.
3;;  Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4;;  and Martin Simmons (@harleqn.co.uk).
5;;  More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7;; This file is part of GCC.
8
9;; GCC is free software; you can redistribute it and/or modify it
10;; under the terms of the GNU General Public License as published
11;; by the Free Software Foundation; either version 3, or (at your
12;; option) any later version.
13
14;; GCC is distributed in the hope that it will be useful, but WITHOUT
15;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16;; or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
17;; License for more details.
18
19;; You should have received a copy of the GNU General Public License
20;; along with GCC; see the file COPYING3.  If not see
21;; <http://www.gnu.org/licenses/>.
22
23;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25
26;;---------------------------------------------------------------------------
27;; Constants
28
29;; Register numbers -- All machine registers should be defined here
30(define_constants
31  [(R0_REGNUM         0)	; First CORE register
32   (R1_REGNUM	      1)	; Second CORE register
33   (IP_REGNUM	     12)	; Scratch register
34   (SP_REGNUM	     13)	; Stack pointer
35   (LR_REGNUM        14)	; Return address register
36   (PC_REGNUM	     15)	; Program counter
37   (LAST_ARM_REGNUM  15)	;
38   (CC_REGNUM       100)	; Condition code pseudo register
39   (VFPCC_REGNUM    101)	; VFP Condition code pseudo register
40  ]
41)
42;; 3rd operand to select_dominance_cc_mode
43(define_constants
44  [(DOM_CC_X_AND_Y  0)
45   (DOM_CC_NX_OR_Y  1)
46   (DOM_CC_X_OR_Y   2)
47  ]
48)
49;; conditional compare combination
50(define_constants
51  [(CMP_CMP 0)
52   (CMN_CMP 1)
53   (CMP_CMN 2)
54   (CMN_CMN 3)
55   (NUM_OF_COND_CMP 4)
56  ]
57)
58
59
60;;---------------------------------------------------------------------------
61;; Attributes
62
63;; Processor type.  This is created automatically from arm-cores.def.
64(include "arm-tune.md")
65
66; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
67; generating ARM code.  This is used to control the length of some insn
68; patterns that share the same RTL in both ARM and Thumb code.
69(define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
70
71; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
72(define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
73
74; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
75(define_attr "is_thumb1" "no,yes" (const (symbol_ref "thumb1_code")))
76
77;; Operand number of an input operand that is shifted.  Zero if the
78;; given instruction does not shift one of its input operands.
79(define_attr "shift" "" (const_int 0))
80
81; Floating Point Unit.  If we only have floating point emulation, then there
82; is no point in scheduling the floating point insns.  (Well, for best
83; performance we should try and group them together).
84(define_attr "fpu" "none,vfp"
85  (const (symbol_ref "arm_fpu_attr")))
86
87; LENGTH of an instruction (in bytes)
88(define_attr "length" ""
89  (const_int 4))
90
91; The architecture which supports the instruction (or alternative).
92; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
93; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode.  "v6"
94; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
95; arm_arch6.  "v6t2" for Thumb-2 with arm_arch6.  This attribute is
96; used to compute attribute "enabled", use type "any" to enable an
97; alternative in all cases.
98
99(define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,onlya8,neon_onlya8,nota8,neon_nota8,iwmmxt,iwmmxt2"
100  (const_string "any"))
101
102(define_attr "arch_enabled" "no,yes"
103  (cond [(eq_attr "arch" "any")
104	 (const_string "yes")
105
106	 (and (eq_attr "arch" "a")
107	      (match_test "TARGET_ARM"))
108	 (const_string "yes")
109
110	 (and (eq_attr "arch" "t")
111	      (match_test "TARGET_THUMB"))
112	 (const_string "yes")
113
114	 (and (eq_attr "arch" "t1")
115	      (match_test "TARGET_THUMB1"))
116	 (const_string "yes")
117
118	 (and (eq_attr "arch" "t2")
119	      (match_test "TARGET_THUMB2"))
120	 (const_string "yes")
121
122	 (and (eq_attr "arch" "32")
123	      (match_test "TARGET_32BIT"))
124	 (const_string "yes")
125
126	 (and (eq_attr "arch" "v6")
127	      (match_test "TARGET_32BIT && arm_arch6"))
128	 (const_string "yes")
129
130	 (and (eq_attr "arch" "nov6")
131	      (match_test "TARGET_32BIT && !arm_arch6"))
132	 (const_string "yes")
133
134	 (and (eq_attr "arch" "v6t2")
135	      (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
136	 (const_string "yes")
137
138	 (and (eq_attr "arch" "onlya8")
139	      (eq_attr "tune" "cortexa8"))
140	 (const_string "yes")
141
142	 (and (eq_attr "arch" "neon_onlya8")
143	      (eq_attr "tune" "cortexa8")
144	      (match_test "TARGET_NEON"))
145	 (const_string "yes")
146
147	 (and (eq_attr "arch" "nota8")
148	      (not (eq_attr "tune" "cortexa8")))
149	 (const_string "yes")
150
151	 (and (eq_attr "arch" "neon_nota8")
152	      (not (eq_attr "tune" "cortexa8"))
153	      (match_test "TARGET_NEON"))
154	 (const_string "yes")
155
156	 (and (eq_attr "arch" "iwmmxt2")
157	      (match_test "TARGET_REALLY_IWMMXT2"))
158	 (const_string "yes")]
159
160	(const_string "no")))
161
162(define_attr "opt" "any,speed,size"
163  (const_string "any"))
164
165(define_attr "opt_enabled" "no,yes"
166  (cond [(eq_attr "opt" "any")
167         (const_string "yes")
168
169	 (and (eq_attr "opt" "speed")
170	      (match_test "optimize_function_for_speed_p (cfun)"))
171	 (const_string "yes")
172
173	 (and (eq_attr "opt" "size")
174	      (match_test "optimize_function_for_size_p (cfun)"))
175	 (const_string "yes")]
176	(const_string "no")))
177
178; Allows an insn to disable certain alternatives for reasons other than
179; arch support.
180(define_attr "insn_enabled" "no,yes"
181  (const_string "yes"))
182
183; Enable all alternatives that are both arch_enabled and insn_enabled.
184 (define_attr "enabled" "no,yes"
185   (cond [(eq_attr "insn_enabled" "no")
186	  (const_string "no")
187
188	  (eq_attr "arch_enabled" "no")
189	  (const_string "no")
190
191	  (eq_attr "opt_enabled" "no")
192	  (const_string "no")]
193	 (const_string "yes")))
194
195; POOL_RANGE is how far away from a constant pool entry that this insn
196; can be placed.  If the distance is zero, then this insn will never
197; reference the pool.
198; Note that for Thumb constant pools the PC value is rounded down to the
199; nearest multiple of four.  Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
200; Thumb insns) should be set to <max_range> - 2.
201; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
202; before its address.  It is set to <max_range> - (8 + <data_size>).
203(define_attr "arm_pool_range" "" (const_int 0))
204(define_attr "thumb2_pool_range" "" (const_int 0))
205(define_attr "arm_neg_pool_range" "" (const_int 0))
206(define_attr "thumb2_neg_pool_range" "" (const_int 0))
207
208(define_attr "pool_range" ""
209  (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
210	(attr "arm_pool_range")))
211(define_attr "neg_pool_range" ""
212  (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
213	(attr "arm_neg_pool_range")))
214
215; An assembler sequence may clobber the condition codes without us knowing.
216; If such an insn references the pool, then we have no way of knowing how,
217; so use the most conservative value for pool_range.
218(define_asm_attributes
219 [(set_attr "conds" "clob")
220  (set_attr "length" "4")
221  (set_attr "pool_range" "250")])
222
223;; The instruction used to implement a particular pattern.  This
224;; information is used by pipeline descriptions to provide accurate
225;; scheduling information.
226
227(define_attr "insn"
228        "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,sat,other"
229        (const_string "other"))
230
231; TYPE attribute is used to detect floating point instructions which, if
232; running on a co-processor can run in parallel with other, basic instructions
233; If write-buffer scheduling is enabled then it can also be used in the
234; scheduling of writes.
235
236; Classification of each insn
237; Note: vfp.md has different meanings for some of these, and some further
238; types as well.  See that file for details.
239; simple_alu_imm  a simple alu instruction that doesn't hit memory or fp
240;               regs or have a shifted source operand and has an immediate
241;               operand. This currently only tracks very basic immediate
242;               alu operations.
243; alu_reg       any alu instruction that doesn't hit memory or fp
244;               regs or have a shifted source operand
245;               and does not have an immediate operand. This is
246;               also the default
247; simple_alu_shift covers UXTH, UXTB, SXTH, SXTB
248; alu_shift	any data instruction that doesn't hit memory or fp
249;		regs, but has a source operand shifted by a constant
250; alu_shift_reg	any data instruction that doesn't hit memory or fp
251;		regs, but has a source operand shifted by a register value
252; mult		a multiply instruction
253; block		blockage insn, this blocks all functional units
254; float		a floating point arithmetic operation (subject to expansion)
255; fdivd		DFmode floating point division
256; fdivs		SFmode floating point division
257; f_load[sd]	A single/double load from memory. Used for VFP unit.
258; f_store[sd]	A single/double store to memory. Used for VFP unit.
259; f_flag	a transfer of co-processor flags to the CPSR
260; f_2_r		transfer float to core (no memory needed)
261; r_2_f		transfer core to float
262; f_cvt		convert floating<->integral
263; branch	a branch
264; call		a subroutine call
265; load_byte	load byte(s) from memory to arm registers
266; load1		load 1 word from memory to arm registers
267; load2         load 2 words from memory to arm registers
268; load3         load 3 words from memory to arm registers
269; load4         load 4 words from memory to arm registers
270; store		store 1 word to memory from arm registers
271; store2	store 2 words
272; store3	store 3 words
273; store4	store 4 (or more) words
274;
275
276(define_attr "type"
277 "simple_alu_imm,\
278  alu_reg,\
279  simple_alu_shift,\
280  alu_shift,\
281  alu_shift_reg,\
282  mult,\
283  block,\
284  float,\
285  fdivd,\
286  fdivs,\
287  fmuls,\
288  fmuld,\
289  fmacs,\
290  fmacd,\
291  ffmas,\
292  ffmad,\
293  f_rints,\
294  f_rintd,\
295  f_minmaxs,\
296  f_minmaxd,\
297  f_flag,\
298  f_loads,\
299  f_loadd,\
300  f_stores,\
301  f_stored,\
302  f_2_r,\
303  r_2_f,\
304  f_cvt,\
305  branch,\
306  call,\
307  load_byte,\
308  load1,\
309  load2,\
310  load3,\
311  load4,\
312  store1,\
313  store2,\
314  store3,\
315  store4,\
316  fconsts,\
317  fconstd,\
318  fadds,\
319  faddd,\
320  ffariths,\
321  ffarithd,\
322  fcmps,\
323  fcmpd,\
324  fcpys"
325 (if_then_else
326    (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,\
327	     	     umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
328    (const_string "mult")
329    (const_string "alu_reg")))
330
331; Is this an (integer side) multiply with a 64-bit result?
332(define_attr "mul64" "no,yes"
333  (if_then_else
334    (eq_attr "insn"
335     "smlalxy,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
336    (const_string "yes")
337    (const_string "no")))
338
339; wtype for WMMX insn scheduling purposes.
340(define_attr "wtype"
341        "none,wor,wxor,wand,wandn,wmov,tmcrr,tmrrc,wldr,wstr,tmcr,tmrc,wadd,wsub,wmul,wmac,wavg2,tinsr,textrm,wshufh,wcmpeq,wcmpgt,wmax,wmin,wpack,wunpckih,wunpckil,wunpckeh,wunpckel,wror,wsra,wsrl,wsll,wmadd,tmia,tmiaph,tmiaxy,tbcst,tmovmsk,wacc,waligni,walignr,tandc,textrc,torc,torvsc,wsad,wabs,wabsdiff,waddsubhx,wsubaddhx,wavg4,wmulw,wqmulm,wqmulwm,waddbhus,wqmiaxy,wmiaxy,wmiawxy,wmerge" (const_string "none"))
342
343; Load scheduling, set from the arm_ld_sched variable
344; initialized by arm_option_override()
345(define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
346
347;; Classification of NEON instructions for scheduling purposes.
348(define_attr "neon_type"
349   "neon_int_1,\
350   neon_int_2,\
351   neon_int_3,\
352   neon_int_4,\
353   neon_int_5,\
354   neon_vqneg_vqabs,\
355   neon_vmov,\
356   neon_vaba,\
357   neon_vsma,\
358   neon_vaba_qqq,\
359   neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
360   neon_mul_qqq_8_16_32_ddd_32,\
361   neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
362   neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
363   neon_mla_qqq_8_16,\
364   neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
365   neon_mla_qqq_32_qqd_32_scalar,\
366   neon_mul_ddd_16_scalar_32_16_long_scalar,\
367   neon_mul_qqd_32_scalar,\
368   neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
369   neon_shift_1,\
370   neon_shift_2,\
371   neon_shift_3,\
372   neon_vshl_ddd,\
373   neon_vqshl_vrshl_vqrshl_qqq,\
374   neon_vsra_vrsra,\
375   neon_fp_vadd_ddd_vabs_dd,\
376   neon_fp_vadd_qqq_vabs_qq,\
377   neon_fp_vsum,\
378   neon_fp_vmul_ddd,\
379   neon_fp_vmul_qqd,\
380   neon_fp_vmla_ddd,\
381   neon_fp_vmla_qqq,\
382   neon_fp_vmla_ddd_scalar,\
383   neon_fp_vmla_qqq_scalar,\
384   neon_fp_vrecps_vrsqrts_ddd,\
385   neon_fp_vrecps_vrsqrts_qqq,\
386   neon_bp_simple,\
387   neon_bp_2cycle,\
388   neon_bp_3cycle,\
389   neon_ldr,\
390   neon_str,\
391   neon_vld1_1_2_regs,\
392   neon_vld1_3_4_regs,\
393   neon_vld2_2_regs_vld1_vld2_all_lanes,\
394   neon_vld2_4_regs,\
395   neon_vld3_vld4,\
396   neon_vst1_1_2_regs_vst2_2_regs,\
397   neon_vst1_3_4_regs,\
398   neon_vst2_4_regs_vst3_vst4,\
399   neon_vst3_vst4,\
400   neon_vld1_vld2_lane,\
401   neon_vld3_vld4_lane,\
402   neon_vst1_vst2_lane,\
403   neon_vst3_vst4_lane,\
404   neon_vld3_vld4_all_lanes,\
405   neon_mcr,\
406   neon_mcr_2_mcrr,\
407   neon_mrc,\
408   neon_mrrc,\
409   neon_ldm_2,\
410   neon_stm_2,\
411   none"
412 (const_string "none"))
413
414; condition codes: this one is used by final_prescan_insn to speed up
415; conditionalizing instructions.  It saves having to scan the rtl to see if
416; it uses or alters the condition codes.
417;
418; USE means that the condition codes are used by the insn in the process of
419;   outputting code, this means (at present) that we can't use the insn in
420;   inlined branches
421;
422; SET means that the purpose of the insn is to set the condition codes in a
423;   well defined manner.
424;
425; CLOB means that the condition codes are altered in an undefined manner, if
426;   they are altered at all
427;
428; UNCONDITIONAL means the instruction can not be conditionally executed and
429;   that the instruction does not use or alter the condition codes.
430;
431; NOCOND means that the instruction does not use or alter the condition
432;   codes but can be converted into a conditionally exectuted instruction.
433
434(define_attr "conds" "use,set,clob,unconditional,nocond"
435	(if_then_else
436	 (ior (eq_attr "is_thumb1" "yes")
437	      (eq_attr "type" "call"))
438	 (const_string "clob")
439	 (if_then_else (eq_attr "neon_type" "none")
440	  (const_string "nocond")
441	  (const_string "unconditional"))))
442
443; Predicable means that the insn can be conditionally executed based on
444; an automatically added predicate (additional patterns are generated by
445; gen...).  We default to 'no' because no Thumb patterns match this rule
446; and not all ARM patterns do.
447(define_attr "predicable" "no,yes" (const_string "no"))
448
449; Only model the write buffer for ARM6 and ARM7.  Earlier processors don't
450; have one.  Later ones, such as StrongARM, have write-back caches, so don't
451; suffer blockages enough to warrant modelling this (and it can adversely
452; affect the schedule).
453(define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
454
455; WRITE_CONFLICT implies that a read following an unrelated write is likely
456; to stall the processor.  Used with model_wbuf above.
457(define_attr "write_conflict" "no,yes"
458  (if_then_else (eq_attr "type"
459		 "block,call,load1")
460		(const_string "yes")
461		(const_string "no")))
462
463; Classify the insns into those that take one cycle and those that take more
464; than one on the main cpu execution unit.
465(define_attr "core_cycles" "single,multi"
466  (if_then_else (eq_attr "type"
467		 "simple_alu_imm,alu_reg,\
468                  simple_alu_shift,alu_shift,\
469                  float,fdivd,fdivs")
470		(const_string "single")
471	        (const_string "multi")))
472
473;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
474;; distant label.  Only applicable to Thumb code.
475(define_attr "far_jump" "yes,no" (const_string "no"))
476
477
478;; The number of machine instructions this pattern expands to.
479;; Used for Thumb-2 conditional execution.
480(define_attr "ce_count" "" (const_int 1))
481
482;;---------------------------------------------------------------------------
483;; Unspecs
484
485(include "unspecs.md")
486
487;;---------------------------------------------------------------------------
488;; Mode iterators
489
490(include "iterators.md")
491
492;;---------------------------------------------------------------------------
493;; Predicates
494
495(include "predicates.md")
496(include "constraints.md")
497
498;;---------------------------------------------------------------------------
499;; Pipeline descriptions
500
501(define_attr "tune_cortexr4" "yes,no"
502  (const (if_then_else
503	  (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
504	  (const_string "yes")
505	  (const_string "no"))))
506
507;; True if the generic scheduling description should be used.
508
509(define_attr "generic_sched" "yes,no"
510  (const (if_then_else
511          (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa5,cortexa7,cortexa8,cortexa9,cortexa15,cortexm4,marvell_pj4")
512	       (eq_attr "tune_cortexr4" "yes"))
513          (const_string "no")
514          (const_string "yes"))))
515
516(define_attr "generic_vfp" "yes,no"
517  (const (if_then_else
518	  (and (eq_attr "fpu" "vfp")
519	       (eq_attr "tune" "!arm1020e,arm1022e,cortexa5,cortexa7,cortexa8,cortexa9,cortexm4,marvell_pj4")
520	       (eq_attr "tune_cortexr4" "no"))
521	  (const_string "yes")
522	  (const_string "no"))))
523
524(include "marvell-f-iwmmxt.md")
525(include "arm-generic.md")
526(include "arm926ejs.md")
527(include "arm1020e.md")
528(include "arm1026ejs.md")
529(include "arm1136jfs.md")
530(include "fa526.md")
531(include "fa606te.md")
532(include "fa626te.md")
533(include "fmp626.md")
534(include "fa726te.md")
535(include "cortex-a5.md")
536(include "cortex-a7.md")
537(include "cortex-a8.md")
538(include "cortex-a9.md")
539(include "cortex-a15.md")
540(include "cortex-r4.md")
541(include "cortex-r4f.md")
542(include "cortex-m4.md")
543(include "cortex-m4-fpu.md")
544(include "vfp11.md")
545(include "marvell-pj4.md")
546
547
548;;---------------------------------------------------------------------------
549;; Insn patterns
550;;
551;; Addition insns.
552
553;; Note: For DImode insns, there is normally no reason why operands should
554;; not be in the same register, what we don't want is for something being
555;; written to partially overlap something that is an input.
556
557(define_expand "adddi3"
558 [(parallel
559   [(set (match_operand:DI           0 "s_register_operand" "")
560	  (plus:DI (match_operand:DI 1 "s_register_operand" "")
561	           (match_operand:DI 2 "arm_adddi_operand"  "")))
562    (clobber (reg:CC CC_REGNUM))])]
563  "TARGET_EITHER"
564  "
565  if (TARGET_THUMB1)
566    {
567      if (!REG_P (operands[1]))
568        operands[1] = force_reg (DImode, operands[1]);
569      if (!REG_P (operands[2]))
570        operands[2] = force_reg (DImode, operands[2]);
571     }
572  "
573)
574
575(define_insn "*thumb1_adddi3"
576  [(set (match_operand:DI          0 "register_operand" "=l")
577	(plus:DI (match_operand:DI 1 "register_operand" "%0")
578		 (match_operand:DI 2 "register_operand" "l")))
579   (clobber (reg:CC CC_REGNUM))
580  ]
581  "TARGET_THUMB1"
582  "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
583  [(set_attr "length" "4")]
584)
585
586(define_insn_and_split "*arm_adddi3"
587  [(set (match_operand:DI          0 "s_register_operand" "=&r,&r,&r,&r,&r")
588	(plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0, r, 0, r")
589		 (match_operand:DI 2 "arm_adddi_operand"  "r,  0, r, Dd, Dd")))
590   (clobber (reg:CC CC_REGNUM))]
591  "TARGET_32BIT && !TARGET_NEON"
592  "#"
593  "TARGET_32BIT && reload_completed
594   && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))"
595  [(parallel [(set (reg:CC_C CC_REGNUM)
596		   (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
597				 (match_dup 1)))
598	      (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
599   (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (match_dup 5))
600			       (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
601  "
602  {
603    operands[3] = gen_highpart (SImode, operands[0]);
604    operands[0] = gen_lowpart (SImode, operands[0]);
605    operands[4] = gen_highpart (SImode, operands[1]);
606    operands[1] = gen_lowpart (SImode, operands[1]);
607    operands[5] = gen_highpart_mode (SImode, DImode, operands[2]);
608    operands[2] = gen_lowpart (SImode, operands[2]);
609  }"
610  [(set_attr "conds" "clob")
611   (set_attr "length" "8")]
612)
613
614(define_insn_and_split "*adddi_sesidi_di"
615  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
616	(plus:DI (sign_extend:DI
617		  (match_operand:SI 2 "s_register_operand" "r,r"))
618		 (match_operand:DI 1 "s_register_operand" "0,r")))
619   (clobber (reg:CC CC_REGNUM))]
620  "TARGET_32BIT"
621  "#"
622  "TARGET_32BIT && reload_completed"
623  [(parallel [(set (reg:CC_C CC_REGNUM)
624		   (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
625				 (match_dup 1)))
626	      (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
627   (set (match_dup 3) (plus:SI (plus:SI (ashiftrt:SI (match_dup 2)
628						     (const_int 31))
629					(match_dup 4))
630			       (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
631  "
632  {
633    operands[3] = gen_highpart (SImode, operands[0]);
634    operands[0] = gen_lowpart (SImode, operands[0]);
635    operands[4] = gen_highpart (SImode, operands[1]);
636    operands[1] = gen_lowpart (SImode, operands[1]);
637    operands[2] = gen_lowpart (SImode, operands[2]);
638  }"
639  [(set_attr "conds" "clob")
640   (set_attr "length" "8")]
641)
642
643(define_insn_and_split "*adddi_zesidi_di"
644  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
645	(plus:DI (zero_extend:DI
646		  (match_operand:SI 2 "s_register_operand" "r,r"))
647		 (match_operand:DI 1 "s_register_operand" "0,r")))
648   (clobber (reg:CC CC_REGNUM))]
649  "TARGET_32BIT"
650  "#"
651  "TARGET_32BIT && reload_completed"
652  [(parallel [(set (reg:CC_C CC_REGNUM)
653		   (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
654				 (match_dup 1)))
655	      (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
656   (set (match_dup 3) (plus:SI (plus:SI (match_dup 4) (const_int 0))
657			       (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
658  "
659  {
660    operands[3] = gen_highpart (SImode, operands[0]);
661    operands[0] = gen_lowpart (SImode, operands[0]);
662    operands[4] = gen_highpart (SImode, operands[1]);
663    operands[1] = gen_lowpart (SImode, operands[1]);
664    operands[2] = gen_lowpart (SImode, operands[2]);
665  }"
666  [(set_attr "conds" "clob")
667   (set_attr "length" "8")]
668)
669
670(define_expand "addsi3"
671  [(set (match_operand:SI          0 "s_register_operand" "")
672	(plus:SI (match_operand:SI 1 "s_register_operand" "")
673		 (match_operand:SI 2 "reg_or_int_operand" "")))]
674  "TARGET_EITHER"
675  "
676  if (TARGET_32BIT && CONST_INT_P (operands[2]))
677    {
678      arm_split_constant (PLUS, SImode, NULL_RTX,
679	                  INTVAL (operands[2]), operands[0], operands[1],
680			  optimize && can_create_pseudo_p ());
681      DONE;
682    }
683  "
684)
685
686; If there is a scratch available, this will be faster than synthesizing the
687; addition.
688(define_peephole2
689  [(match_scratch:SI 3 "r")
690   (set (match_operand:SI          0 "arm_general_register_operand" "")
691	(plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
692		 (match_operand:SI 2 "const_int_operand"  "")))]
693  "TARGET_32BIT &&
694   !(const_ok_for_arm (INTVAL (operands[2]))
695     || const_ok_for_arm (-INTVAL (operands[2])))
696    && const_ok_for_arm (~INTVAL (operands[2]))"
697  [(set (match_dup 3) (match_dup 2))
698   (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
699  ""
700)
701
702;; The r/r/k alternative is required when reloading the address
703;;  (plus (reg rN) (reg sp)) into (reg rN).  In this case reload will
704;; put the duplicated register first, and not try the commutative version.
705(define_insn_and_split "*arm_addsi3"
706  [(set (match_operand:SI          0 "s_register_operand" "=rk, r,k, r,r, k, r, k,k,r, k, r")
707	(plus:SI (match_operand:SI 1 "s_register_operand" "%0, rk,k, r,rk,k, rk,k,r,rk,k, rk")
708		 (match_operand:SI 2 "reg_or_int_operand" "rk, rI,rI,k,Pj,Pj,L, L,L,PJ,PJ,?n")))]
709  "TARGET_32BIT"
710  "@
711   add%?\\t%0, %0, %2
712   add%?\\t%0, %1, %2
713   add%?\\t%0, %1, %2
714   add%?\\t%0, %2, %1
715   addw%?\\t%0, %1, %2
716   addw%?\\t%0, %1, %2
717   sub%?\\t%0, %1, #%n2
718   sub%?\\t%0, %1, #%n2
719   sub%?\\t%0, %1, #%n2
720   subw%?\\t%0, %1, #%n2
721   subw%?\\t%0, %1, #%n2
722   #"
723  "TARGET_32BIT
724   && CONST_INT_P (operands[2])
725   && !const_ok_for_op (INTVAL (operands[2]), PLUS)
726   && (reload_completed || !arm_eliminable_register (operands[1]))"
727  [(clobber (const_int 0))]
728  "
729  arm_split_constant (PLUS, SImode, curr_insn,
730	              INTVAL (operands[2]), operands[0],
731		      operands[1], 0);
732  DONE;
733  "
734  [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,16")
735   (set_attr "predicable" "yes")
736   (set_attr "arch" "t2,*,*,*,t2,t2,*,*,a,t2,t2,*")
737   (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
738		      (const_string "simple_alu_imm")
739		      (const_string "alu_reg")))
740 ]
741)
742
743(define_insn_and_split "*thumb1_addsi3"
744  [(set (match_operand:SI          0 "register_operand" "=l,l,l,*rk,*hk,l,k,l,l,l")
745	(plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,k,k,0,l,k")
746		 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,M,O,Pa,Pb,Pc")))]
747  "TARGET_THUMB1"
748  "*
749   static const char * const asms[] =
750   {
751     \"add\\t%0, %0, %2\",
752     \"sub\\t%0, %0, #%n2\",
753     \"add\\t%0, %1, %2\",
754     \"add\\t%0, %0, %2\",
755     \"add\\t%0, %0, %2\",
756     \"add\\t%0, %1, %2\",
757     \"add\\t%0, %1, %2\",
758     \"#\",
759     \"#\",
760     \"#\"
761   };
762   if ((which_alternative == 2 || which_alternative == 6)
763       && CONST_INT_P (operands[2])
764       && INTVAL (operands[2]) < 0)
765     return \"sub\\t%0, %1, #%n2\";
766   return asms[which_alternative];
767  "
768  "&& reload_completed && CONST_INT_P (operands[2])
769   && ((operands[1] != stack_pointer_rtx
770        && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255))
771       || (operands[1] == stack_pointer_rtx
772 	   && INTVAL (operands[2]) > 1020))"
773  [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
774   (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
775  {
776    HOST_WIDE_INT offset = INTVAL (operands[2]);
777    if (operands[1] == stack_pointer_rtx)
778      offset -= 1020;
779    else
780      {
781        if (offset > 255)
782	  offset = 255;
783	else if (offset < -255)
784	  offset = -255;
785      }
786    operands[3] = GEN_INT (offset);
787    operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
788  }
789  [(set_attr "length" "2,2,2,2,2,2,2,4,4,4")]
790)
791
792;; Reloading and elimination of the frame pointer can
793;; sometimes cause this optimization to be missed.
794(define_peephole2
795  [(set (match_operand:SI 0 "arm_general_register_operand" "")
796	(match_operand:SI 1 "const_int_operand" ""))
797   (set (match_dup 0)
798	(plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
799  "TARGET_THUMB1
800   && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
801   && (INTVAL (operands[1]) & 3) == 0"
802  [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
803  ""
804)
805
806(define_insn "addsi3_compare0"
807  [(set (reg:CC_NOOV CC_REGNUM)
808	(compare:CC_NOOV
809	 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
810		  (match_operand:SI 2 "arm_add_operand"    "I,L,r"))
811	 (const_int 0)))
812   (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
813	(plus:SI (match_dup 1) (match_dup 2)))]
814  "TARGET_ARM"
815  "@
816   add%.\\t%0, %1, %2
817   sub%.\\t%0, %1, #%n2
818   add%.\\t%0, %1, %2"
819  [(set_attr "conds" "set")
820   (set_attr "type" "simple_alu_imm, simple_alu_imm, *")]
821)
822
823(define_insn "*addsi3_compare0_scratch"
824  [(set (reg:CC_NOOV CC_REGNUM)
825	(compare:CC_NOOV
826	 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
827		  (match_operand:SI 1 "arm_add_operand"    "I,L, r"))
828	 (const_int 0)))]
829  "TARGET_ARM"
830  "@
831   cmn%?\\t%0, %1
832   cmp%?\\t%0, #%n1
833   cmn%?\\t%0, %1"
834  [(set_attr "conds" "set")
835   (set_attr "predicable" "yes")
836   (set_attr "type" "simple_alu_imm, simple_alu_imm, *")
837   ]
838)
839
840(define_insn "*compare_negsi_si"
841  [(set (reg:CC_Z CC_REGNUM)
842	(compare:CC_Z
843	 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
844	 (match_operand:SI 1 "s_register_operand" "r")))]
845  "TARGET_32BIT"
846  "cmn%?\\t%1, %0"
847  [(set_attr "conds" "set")
848   (set_attr "predicable" "yes")]
849)
850
851;; This is the canonicalization of addsi3_compare0_for_combiner when the
852;; addend is a constant.
853(define_insn "*cmpsi2_addneg"
854  [(set (reg:CC CC_REGNUM)
855	(compare:CC
856	 (match_operand:SI 1 "s_register_operand" "r,r")
857	 (match_operand:SI 2 "arm_addimm_operand" "L,I")))
858   (set (match_operand:SI 0 "s_register_operand" "=r,r")
859	(plus:SI (match_dup 1)
860		 (match_operand:SI 3 "arm_addimm_operand" "I,L")))]
861  "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
862  "@
863   add%.\\t%0, %1, %3
864   sub%.\\t%0, %1, #%n3"
865  [(set_attr "conds" "set")]
866)
867
868;; Convert the sequence
869;;  sub  rd, rn, #1
870;;  cmn  rd, #1	(equivalent to cmp rd, #-1)
871;;  bne  dest
872;; into
873;;  subs rd, rn, #1
874;;  bcs  dest	((unsigned)rn >= 1)
875;; similarly for the beq variant using bcc.
876;; This is a common looping idiom (while (n--))
877(define_peephole2
878  [(set (match_operand:SI 0 "arm_general_register_operand" "")
879	(plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
880		 (const_int -1)))
881   (set (match_operand 2 "cc_register" "")
882	(compare (match_dup 0) (const_int -1)))
883   (set (pc)
884	(if_then_else (match_operator 3 "equality_operator"
885		       [(match_dup 2) (const_int 0)])
886		      (match_operand 4 "" "")
887		      (match_operand 5 "" "")))]
888  "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
889  [(parallel[
890    (set (match_dup 2)
891	 (compare:CC
892	  (match_dup 1) (const_int 1)))
893    (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
894   (set (pc)
895	(if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
896		      (match_dup 4)
897		      (match_dup 5)))]
898  "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
899   operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
900				  ? GEU : LTU),
901				 VOIDmode,
902				 operands[2], const0_rtx);"
903)
904
905;; The next four insns work because they compare the result with one of
906;; the operands, and we know that the use of the condition code is
907;; either GEU or LTU, so we can use the carry flag from the addition
908;; instead of doing the compare a second time.
909(define_insn "*addsi3_compare_op1"
910  [(set (reg:CC_C CC_REGNUM)
911	(compare:CC_C
912	 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
913		  (match_operand:SI 2 "arm_add_operand" "I,L,r"))
914	 (match_dup 1)))
915   (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
916	(plus:SI (match_dup 1) (match_dup 2)))]
917  "TARGET_32BIT"
918  "@
919   add%.\\t%0, %1, %2
920   sub%.\\t%0, %1, #%n2
921   add%.\\t%0, %1, %2"
922  [(set_attr "conds" "set")
923   (set_attr "type"  "simple_alu_imm,simple_alu_imm,*")]
924)
925
926(define_insn "*addsi3_compare_op2"
927  [(set (reg:CC_C CC_REGNUM)
928	(compare:CC_C
929	 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
930		  (match_operand:SI 2 "arm_add_operand" "I,L,r"))
931	 (match_dup 2)))
932   (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
933	(plus:SI (match_dup 1) (match_dup 2)))]
934  "TARGET_32BIT"
935  "@
936   add%.\\t%0, %1, %2
937   add%.\\t%0, %1, %2
938   sub%.\\t%0, %1, #%n2"
939  [(set_attr "conds" "set")
940   (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
941)
942
943(define_insn "*compare_addsi2_op0"
944  [(set (reg:CC_C CC_REGNUM)
945	(compare:CC_C
946	 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
947		  (match_operand:SI 1 "arm_add_operand" "I,L,r"))
948	 (match_dup 0)))]
949  "TARGET_32BIT"
950  "@
951   cmn%?\\t%0, %1
952   cmp%?\\t%0, #%n1
953   cmn%?\\t%0, %1"
954  [(set_attr "conds" "set")
955   (set_attr "predicable" "yes")
956   (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
957)
958
959(define_insn "*compare_addsi2_op1"
960  [(set (reg:CC_C CC_REGNUM)
961	(compare:CC_C
962	 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
963		  (match_operand:SI 1 "arm_add_operand" "I,L,r"))
964	 (match_dup 1)))]
965  "TARGET_32BIT"
966  "@
967   cmn%?\\t%0, %1
968   cmp%?\\t%0, #%n1
969   cmn%?\\t%0, %1"
970  [(set_attr "conds" "set")
971   (set_attr "predicable" "yes")
972   (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
973)
974
975(define_insn "*addsi3_carryin_<optab>"
976  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
977	(plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r,r")
978			  (match_operand:SI 2 "arm_not_operand" "rI,K"))
979		 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
980  "TARGET_32BIT"
981  "@
982   adc%?\\t%0, %1, %2
983   sbc%?\\t%0, %1, #%B2"
984  [(set_attr "conds" "use")]
985)
986
987(define_insn "*addsi3_carryin_alt2_<optab>"
988  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
989	(plus:SI (plus:SI (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))
990			  (match_operand:SI 1 "s_register_operand" "%r,r"))
991		 (match_operand:SI 2 "arm_rhs_operand" "rI,K")))]
992  "TARGET_32BIT"
993  "@
994   adc%?\\t%0, %1, %2
995   sbc%?\\t%0, %1, #%B2"
996  [(set_attr "conds" "use")]
997)
998
999(define_insn "*addsi3_carryin_shift_<optab>"
1000  [(set (match_operand:SI 0 "s_register_operand" "=r")
1001	(plus:SI (plus:SI
1002		  (match_operator:SI 2 "shift_operator"
1003		    [(match_operand:SI 3 "s_register_operand" "r")
1004		     (match_operand:SI 4 "reg_or_int_operand" "rM")])
1005		  (match_operand:SI 1 "s_register_operand" "r"))
1006		 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))]
1007  "TARGET_32BIT"
1008  "adc%?\\t%0, %1, %3%S2"
1009  [(set_attr "conds" "use")
1010   (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1011		      (const_string "alu_shift")
1012		      (const_string "alu_shift_reg")))]
1013)
1014
1015(define_insn "*addsi3_carryin_clobercc_<optab>"
1016  [(set (match_operand:SI 0 "s_register_operand" "=r")
1017	(plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1018			  (match_operand:SI 2 "arm_rhs_operand" "rI"))
1019		 (LTUGEU:SI (reg:<cnb> CC_REGNUM) (const_int 0))))
1020   (clobber (reg:CC CC_REGNUM))]
1021   "TARGET_32BIT"
1022   "adc%.\\t%0, %1, %2"
1023   [(set_attr "conds" "set")]
1024)
1025
1026(define_expand "incscc"
1027  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1028        (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1029                    [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1030                 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1031  "TARGET_32BIT"
1032  ""
1033)
1034
1035(define_insn "*arm_incscc"
1036  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1037        (plus:SI (match_operator:SI 2 "arm_comparison_operator"
1038                    [(match_operand:CC 3 "cc_register" "") (const_int 0)])
1039                 (match_operand:SI 1 "s_register_operand" "0,?r")))]
1040  "TARGET_ARM"
1041  "@
1042  add%d2\\t%0, %1, #1
1043  mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
1044  [(set_attr "conds" "use")
1045   (set_attr "length" "4,8")]
1046)
1047
1048; transform ((x << y) - 1) to ~(~(x-1) << y)  Where X is a constant.
1049(define_split
1050  [(set (match_operand:SI 0 "s_register_operand" "")
1051	(plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1052			    (match_operand:SI 2 "s_register_operand" ""))
1053		 (const_int -1)))
1054   (clobber (match_operand:SI 3 "s_register_operand" ""))]
1055  "TARGET_32BIT"
1056  [(set (match_dup 3) (match_dup 1))
1057   (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1058  "
1059  operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1060")
1061
1062(define_expand "addsf3"
1063  [(set (match_operand:SF          0 "s_register_operand" "")
1064	(plus:SF (match_operand:SF 1 "s_register_operand" "")
1065		 (match_operand:SF 2 "s_register_operand" "")))]
1066  "TARGET_32BIT && TARGET_HARD_FLOAT"
1067  "
1068")
1069
1070(define_expand "adddf3"
1071  [(set (match_operand:DF          0 "s_register_operand" "")
1072	(plus:DF (match_operand:DF 1 "s_register_operand" "")
1073		 (match_operand:DF 2 "s_register_operand" "")))]
1074  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1075  "
1076")
1077
1078(define_expand "subdi3"
1079 [(parallel
1080   [(set (match_operand:DI            0 "s_register_operand" "")
1081	  (minus:DI (match_operand:DI 1 "s_register_operand" "")
1082	            (match_operand:DI 2 "s_register_operand" "")))
1083    (clobber (reg:CC CC_REGNUM))])]
1084  "TARGET_EITHER"
1085  "
1086  if (TARGET_THUMB1)
1087    {
1088      if (!REG_P (operands[1]))
1089        operands[1] = force_reg (DImode, operands[1]);
1090      if (!REG_P (operands[2]))
1091        operands[2] = force_reg (DImode, operands[2]);
1092     }
1093  "
1094)
1095
1096(define_insn "*arm_subdi3"
1097  [(set (match_operand:DI           0 "s_register_operand" "=&r,&r,&r")
1098	(minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
1099		  (match_operand:DI 2 "s_register_operand" "r,0,0")))
1100   (clobber (reg:CC CC_REGNUM))]
1101  "TARGET_32BIT && !TARGET_NEON"
1102  "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1103  [(set_attr "conds" "clob")
1104   (set_attr "length" "8")]
1105)
1106
1107(define_insn "*thumb_subdi3"
1108  [(set (match_operand:DI           0 "register_operand" "=l")
1109	(minus:DI (match_operand:DI 1 "register_operand"  "0")
1110		  (match_operand:DI 2 "register_operand"  "l")))
1111   (clobber (reg:CC CC_REGNUM))]
1112  "TARGET_THUMB1"
1113  "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1114  [(set_attr "length" "4")]
1115)
1116
1117(define_insn "*subdi_di_zesidi"
1118  [(set (match_operand:DI           0 "s_register_operand" "=&r,&r")
1119	(minus:DI (match_operand:DI 1 "s_register_operand"  "0,r")
1120		  (zero_extend:DI
1121		   (match_operand:SI 2 "s_register_operand"  "r,r"))))
1122   (clobber (reg:CC CC_REGNUM))]
1123  "TARGET_32BIT"
1124  "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1125  [(set_attr "conds" "clob")
1126   (set_attr "length" "8")]
1127)
1128
1129(define_insn "*subdi_di_sesidi"
1130  [(set (match_operand:DI            0 "s_register_operand" "=&r,&r")
1131	(minus:DI (match_operand:DI  1 "s_register_operand"  "0,r")
1132		  (sign_extend:DI
1133		   (match_operand:SI 2 "s_register_operand"  "r,r"))))
1134   (clobber (reg:CC CC_REGNUM))]
1135  "TARGET_32BIT"
1136  "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1137  [(set_attr "conds" "clob")
1138   (set_attr "length" "8")]
1139)
1140
1141(define_insn "*subdi_zesidi_di"
1142  [(set (match_operand:DI            0 "s_register_operand" "=&r,&r")
1143	(minus:DI (zero_extend:DI
1144		   (match_operand:SI 2 "s_register_operand"  "r,r"))
1145		  (match_operand:DI  1 "s_register_operand" "0,r")))
1146   (clobber (reg:CC CC_REGNUM))]
1147  "TARGET_ARM"
1148  "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1149  [(set_attr "conds" "clob")
1150   (set_attr "length" "8")]
1151)
1152
1153(define_insn "*subdi_sesidi_di"
1154  [(set (match_operand:DI            0 "s_register_operand" "=&r,&r")
1155	(minus:DI (sign_extend:DI
1156		   (match_operand:SI 2 "s_register_operand"   "r,r"))
1157		  (match_operand:DI  1 "s_register_operand"  "0,r")))
1158   (clobber (reg:CC CC_REGNUM))]
1159  "TARGET_ARM"
1160  "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1161  [(set_attr "conds" "clob")
1162   (set_attr "length" "8")]
1163)
1164
1165(define_insn "*subdi_zesidi_zesidi"
1166  [(set (match_operand:DI            0 "s_register_operand" "=r")
1167	(minus:DI (zero_extend:DI
1168		   (match_operand:SI 1 "s_register_operand"  "r"))
1169		  (zero_extend:DI
1170		   (match_operand:SI 2 "s_register_operand"  "r"))))
1171   (clobber (reg:CC CC_REGNUM))]
1172  "TARGET_32BIT"
1173  "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1174  [(set_attr "conds" "clob")
1175   (set_attr "length" "8")]
1176)
1177
1178(define_expand "subsi3"
1179  [(set (match_operand:SI           0 "s_register_operand" "")
1180	(minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1181		  (match_operand:SI 2 "s_register_operand" "")))]
1182  "TARGET_EITHER"
1183  "
1184  if (CONST_INT_P (operands[1]))
1185    {
1186      if (TARGET_32BIT)
1187        {
1188          arm_split_constant (MINUS, SImode, NULL_RTX,
1189	                      INTVAL (operands[1]), operands[0],
1190	  		      operands[2], optimize && can_create_pseudo_p ());
1191          DONE;
1192	}
1193      else /* TARGET_THUMB1 */
1194        operands[1] = force_reg (SImode, operands[1]);
1195    }
1196  "
1197)
1198
1199(define_insn "thumb1_subsi3_insn"
1200  [(set (match_operand:SI           0 "register_operand" "=l")
1201	(minus:SI (match_operand:SI 1 "register_operand" "l")
1202		  (match_operand:SI 2 "reg_or_int_operand" "lPd")))]
1203  "TARGET_THUMB1"
1204  "sub\\t%0, %1, %2"
1205  [(set_attr "length" "2")
1206   (set_attr "conds" "set")])
1207
1208; ??? Check Thumb-2 split length
1209(define_insn_and_split "*arm_subsi3_insn"
1210  [(set (match_operand:SI           0 "s_register_operand" "=r,r,r,rk,r")
1211	(minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,r,r,k,?n")
1212		  (match_operand:SI 2 "reg_or_int_operand" "r,I,r,r, r")))]
1213  "TARGET_32BIT"
1214  "@
1215   rsb%?\\t%0, %2, %1
1216   sub%?\\t%0, %1, %2
1217   sub%?\\t%0, %1, %2
1218   sub%?\\t%0, %1, %2
1219   #"
1220  "&& (CONST_INT_P (operands[1])
1221       && !const_ok_for_arm (INTVAL (operands[1])))"
1222  [(clobber (const_int 0))]
1223  "
1224  arm_split_constant (MINUS, SImode, curr_insn,
1225                      INTVAL (operands[1]), operands[0], operands[2], 0);
1226  DONE;
1227  "
1228  [(set_attr "length" "4,4,4,4,16")
1229   (set_attr "predicable" "yes")
1230   (set_attr "type"  "*,simple_alu_imm,*,*,*")]
1231)
1232
1233(define_peephole2
1234  [(match_scratch:SI 3 "r")
1235   (set (match_operand:SI 0 "arm_general_register_operand" "")
1236	(minus:SI (match_operand:SI 1 "const_int_operand" "")
1237		  (match_operand:SI 2 "arm_general_register_operand" "")))]
1238  "TARGET_32BIT
1239   && !const_ok_for_arm (INTVAL (operands[1]))
1240   && const_ok_for_arm (~INTVAL (operands[1]))"
1241  [(set (match_dup 3) (match_dup 1))
1242   (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1243  ""
1244)
1245
1246(define_insn "*subsi3_compare0"
1247  [(set (reg:CC_NOOV CC_REGNUM)
1248	(compare:CC_NOOV
1249	 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1250		   (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1251	 (const_int 0)))
1252   (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1253	(minus:SI (match_dup 1) (match_dup 2)))]
1254  "TARGET_32BIT"
1255  "@
1256   sub%.\\t%0, %1, %2
1257   sub%.\\t%0, %1, %2
1258   rsb%.\\t%0, %2, %1"
1259  [(set_attr "conds" "set")
1260   (set_attr "type"  "simple_alu_imm,*,*")]
1261)
1262
1263(define_insn "*subsi3_compare"
1264  [(set (reg:CC CC_REGNUM)
1265	(compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1266		    (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1267   (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1268	(minus:SI (match_dup 1) (match_dup 2)))]
1269  "TARGET_32BIT"
1270  "@
1271   sub%.\\t%0, %1, %2
1272   sub%.\\t%0, %1, %2
1273   rsb%.\\t%0, %2, %1"
1274  [(set_attr "conds" "set")
1275   (set_attr "type" "simple_alu_imm,*,*")]
1276)
1277
1278(define_expand "decscc"
1279  [(set (match_operand:SI            0 "s_register_operand" "=r,r")
1280        (minus:SI (match_operand:SI  1 "s_register_operand" "0,?r")
1281		  (match_operator:SI 2 "arm_comparison_operator"
1282                   [(match_operand   3 "cc_register" "") (const_int 0)])))]
1283  "TARGET_32BIT"
1284  ""
1285)
1286
1287(define_insn "*arm_decscc"
1288  [(set (match_operand:SI            0 "s_register_operand" "=r,r")
1289        (minus:SI (match_operand:SI  1 "s_register_operand" "0,?r")
1290		  (match_operator:SI 2 "arm_comparison_operator"
1291                   [(match_operand   3 "cc_register" "") (const_int 0)])))]
1292  "TARGET_ARM"
1293  "@
1294   sub%d2\\t%0, %1, #1
1295   mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1296  [(set_attr "conds" "use")
1297   (set_attr "length" "*,8")
1298   (set_attr "type" "simple_alu_imm,*")]
1299)
1300
1301(define_expand "subsf3"
1302  [(set (match_operand:SF           0 "s_register_operand" "")
1303	(minus:SF (match_operand:SF 1 "s_register_operand" "")
1304		  (match_operand:SF 2 "s_register_operand" "")))]
1305  "TARGET_32BIT && TARGET_HARD_FLOAT"
1306  "
1307")
1308
1309(define_expand "subdf3"
1310  [(set (match_operand:DF           0 "s_register_operand" "")
1311	(minus:DF (match_operand:DF 1 "s_register_operand" "")
1312		  (match_operand:DF 2 "s_register_operand" "")))]
1313  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1314  "
1315")
1316
1317
1318;; Multiplication insns
1319
1320(define_expand "mulsi3"
1321  [(set (match_operand:SI          0 "s_register_operand" "")
1322	(mult:SI (match_operand:SI 2 "s_register_operand" "")
1323		 (match_operand:SI 1 "s_register_operand" "")))]
1324  "TARGET_EITHER"
1325  ""
1326)
1327
1328;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1329(define_insn "*arm_mulsi3"
1330  [(set (match_operand:SI          0 "s_register_operand" "=&r,&r")
1331	(mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1332		 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1333  "TARGET_32BIT && !arm_arch6"
1334  "mul%?\\t%0, %2, %1"
1335  [(set_attr "insn" "mul")
1336   (set_attr "predicable" "yes")]
1337)
1338
1339(define_insn "*arm_mulsi3_v6"
1340  [(set (match_operand:SI          0 "s_register_operand" "=r")
1341	(mult:SI (match_operand:SI 1 "s_register_operand" "r")
1342		 (match_operand:SI 2 "s_register_operand" "r")))]
1343  "TARGET_32BIT && arm_arch6"
1344  "mul%?\\t%0, %1, %2"
1345  [(set_attr "insn" "mul")
1346   (set_attr "predicable" "yes")]
1347)
1348
1349; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1350; 1 and 2; are the same, because reload will make operand 0 match
1351; operand 1 without realizing that this conflicts with operand 2.  We fix
1352; this by adding another alternative to match this case, and then `reload'
1353; it ourselves.  This alternative must come first.
1354(define_insn "*thumb_mulsi3"
1355  [(set (match_operand:SI          0 "register_operand" "=&l,&l,&l")
1356	(mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1357		 (match_operand:SI 2 "register_operand" "l,l,l")))]
1358  "TARGET_THUMB1 && !arm_arch6"
1359  "*
1360  if (which_alternative < 2)
1361    return \"mov\\t%0, %1\;mul\\t%0, %2\";
1362  else
1363    return \"mul\\t%0, %2\";
1364  "
1365  [(set_attr "length" "4,4,2")
1366   (set_attr "insn" "mul")]
1367)
1368
1369(define_insn "*thumb_mulsi3_v6"
1370  [(set (match_operand:SI          0 "register_operand" "=l,l,l")
1371	(mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1372		 (match_operand:SI 2 "register_operand" "l,0,0")))]
1373  "TARGET_THUMB1 && arm_arch6"
1374  "@
1375   mul\\t%0, %2
1376   mul\\t%0, %1
1377   mul\\t%0, %1"
1378  [(set_attr "length" "2")
1379   (set_attr "insn" "mul")]
1380)
1381
1382(define_insn "*mulsi3_compare0"
1383  [(set (reg:CC_NOOV CC_REGNUM)
1384	(compare:CC_NOOV (mult:SI
1385			  (match_operand:SI 2 "s_register_operand" "r,r")
1386			  (match_operand:SI 1 "s_register_operand" "%0,r"))
1387			 (const_int 0)))
1388   (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1389	(mult:SI (match_dup 2) (match_dup 1)))]
1390  "TARGET_ARM && !arm_arch6"
1391  "mul%.\\t%0, %2, %1"
1392  [(set_attr "conds" "set")
1393   (set_attr "insn" "muls")]
1394)
1395
1396(define_insn "*mulsi3_compare0_v6"
1397  [(set (reg:CC_NOOV CC_REGNUM)
1398	(compare:CC_NOOV (mult:SI
1399			  (match_operand:SI 2 "s_register_operand" "r")
1400			  (match_operand:SI 1 "s_register_operand" "r"))
1401			 (const_int 0)))
1402   (set (match_operand:SI 0 "s_register_operand" "=r")
1403	(mult:SI (match_dup 2) (match_dup 1)))]
1404  "TARGET_ARM && arm_arch6 && optimize_size"
1405  "mul%.\\t%0, %2, %1"
1406  [(set_attr "conds" "set")
1407   (set_attr "insn" "muls")]
1408)
1409
1410(define_insn "*mulsi_compare0_scratch"
1411  [(set (reg:CC_NOOV CC_REGNUM)
1412	(compare:CC_NOOV (mult:SI
1413			  (match_operand:SI 2 "s_register_operand" "r,r")
1414			  (match_operand:SI 1 "s_register_operand" "%0,r"))
1415			 (const_int 0)))
1416   (clobber (match_scratch:SI 0 "=&r,&r"))]
1417  "TARGET_ARM && !arm_arch6"
1418  "mul%.\\t%0, %2, %1"
1419  [(set_attr "conds" "set")
1420   (set_attr "insn" "muls")]
1421)
1422
1423(define_insn "*mulsi_compare0_scratch_v6"
1424  [(set (reg:CC_NOOV CC_REGNUM)
1425	(compare:CC_NOOV (mult:SI
1426			  (match_operand:SI 2 "s_register_operand" "r")
1427			  (match_operand:SI 1 "s_register_operand" "r"))
1428			 (const_int 0)))
1429   (clobber (match_scratch:SI 0 "=r"))]
1430  "TARGET_ARM && arm_arch6 && optimize_size"
1431  "mul%.\\t%0, %2, %1"
1432  [(set_attr "conds" "set")
1433   (set_attr "insn" "muls")]
1434)
1435
1436;; Unnamed templates to match MLA instruction.
1437
1438(define_insn "*mulsi3addsi"
1439  [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1440	(plus:SI
1441	  (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1442		   (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1443	  (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1444  "TARGET_32BIT && !arm_arch6"
1445  "mla%?\\t%0, %2, %1, %3"
1446  [(set_attr "insn" "mla")
1447   (set_attr "predicable" "yes")]
1448)
1449
1450(define_insn "*mulsi3addsi_v6"
1451  [(set (match_operand:SI 0 "s_register_operand" "=r")
1452	(plus:SI
1453	  (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1454		   (match_operand:SI 1 "s_register_operand" "r"))
1455	  (match_operand:SI 3 "s_register_operand" "r")))]
1456  "TARGET_32BIT && arm_arch6"
1457  "mla%?\\t%0, %2, %1, %3"
1458  [(set_attr "insn" "mla")
1459   (set_attr "predicable" "yes")]
1460)
1461
1462(define_insn "*mulsi3addsi_compare0"
1463  [(set (reg:CC_NOOV CC_REGNUM)
1464	(compare:CC_NOOV
1465	 (plus:SI (mult:SI
1466		   (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1467		   (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1468		  (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1469	 (const_int 0)))
1470   (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1471	(plus:SI (mult:SI (match_dup 2) (match_dup 1))
1472		 (match_dup 3)))]
1473  "TARGET_ARM && arm_arch6"
1474  "mla%.\\t%0, %2, %1, %3"
1475  [(set_attr "conds" "set")
1476   (set_attr "insn" "mlas")]
1477)
1478
1479(define_insn "*mulsi3addsi_compare0_v6"
1480  [(set (reg:CC_NOOV CC_REGNUM)
1481	(compare:CC_NOOV
1482	 (plus:SI (mult:SI
1483		   (match_operand:SI 2 "s_register_operand" "r")
1484		   (match_operand:SI 1 "s_register_operand" "r"))
1485		  (match_operand:SI 3 "s_register_operand" "r"))
1486	 (const_int 0)))
1487   (set (match_operand:SI 0 "s_register_operand" "=r")
1488	(plus:SI (mult:SI (match_dup 2) (match_dup 1))
1489		 (match_dup 3)))]
1490  "TARGET_ARM && arm_arch6 && optimize_size"
1491  "mla%.\\t%0, %2, %1, %3"
1492  [(set_attr "conds" "set")
1493   (set_attr "insn" "mlas")]
1494)
1495
1496(define_insn "*mulsi3addsi_compare0_scratch"
1497  [(set (reg:CC_NOOV CC_REGNUM)
1498	(compare:CC_NOOV
1499	 (plus:SI (mult:SI
1500		   (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1501		   (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1502		  (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1503	 (const_int 0)))
1504   (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1505  "TARGET_ARM && !arm_arch6"
1506  "mla%.\\t%0, %2, %1, %3"
1507  [(set_attr "conds" "set")
1508   (set_attr "insn" "mlas")]
1509)
1510
1511(define_insn "*mulsi3addsi_compare0_scratch_v6"
1512  [(set (reg:CC_NOOV CC_REGNUM)
1513	(compare:CC_NOOV
1514	 (plus:SI (mult:SI
1515		   (match_operand:SI 2 "s_register_operand" "r")
1516		   (match_operand:SI 1 "s_register_operand" "r"))
1517		  (match_operand:SI 3 "s_register_operand" "r"))
1518	 (const_int 0)))
1519   (clobber (match_scratch:SI 0 "=r"))]
1520  "TARGET_ARM && arm_arch6 && optimize_size"
1521  "mla%.\\t%0, %2, %1, %3"
1522  [(set_attr "conds" "set")
1523   (set_attr "insn" "mlas")]
1524)
1525
1526(define_insn "*mulsi3subsi"
1527  [(set (match_operand:SI 0 "s_register_operand" "=r")
1528	(minus:SI
1529	  (match_operand:SI 3 "s_register_operand" "r")
1530	  (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1531		   (match_operand:SI 1 "s_register_operand" "r"))))]
1532  "TARGET_32BIT && arm_arch_thumb2"
1533  "mls%?\\t%0, %2, %1, %3"
1534  [(set_attr "insn" "mla")
1535   (set_attr "predicable" "yes")]
1536)
1537
1538(define_expand "maddsidi4"
1539  [(set (match_operand:DI 0 "s_register_operand" "")
1540	(plus:DI
1541	 (mult:DI
1542	  (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1543	  (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1544	 (match_operand:DI 3 "s_register_operand" "")))]
1545  "TARGET_32BIT && arm_arch3m"
1546  "")
1547
1548(define_insn "*mulsidi3adddi"
1549  [(set (match_operand:DI 0 "s_register_operand" "=&r")
1550	(plus:DI
1551	 (mult:DI
1552	  (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1553	  (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1554	 (match_operand:DI 1 "s_register_operand" "0")))]
1555  "TARGET_32BIT && arm_arch3m && !arm_arch6"
1556  "smlal%?\\t%Q0, %R0, %3, %2"
1557  [(set_attr "insn" "smlal")
1558   (set_attr "predicable" "yes")]
1559)
1560
1561(define_insn "*mulsidi3adddi_v6"
1562  [(set (match_operand:DI 0 "s_register_operand" "=r")
1563	(plus:DI
1564	 (mult:DI
1565	  (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1566	  (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1567	 (match_operand:DI 1 "s_register_operand" "0")))]
1568  "TARGET_32BIT && arm_arch6"
1569  "smlal%?\\t%Q0, %R0, %3, %2"
1570  [(set_attr "insn" "smlal")
1571   (set_attr "predicable" "yes")]
1572)
1573
1574;; 32x32->64 widening multiply.
1575;; As with mulsi3, the only difference between the v3-5 and v6+
1576;; versions of these patterns is the requirement that the output not
1577;; overlap the inputs, but that still means we have to have a named
1578;; expander and two different starred insns.
1579
1580(define_expand "mulsidi3"
1581  [(set (match_operand:DI 0 "s_register_operand" "")
1582	(mult:DI
1583	 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1584	 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1585  "TARGET_32BIT && arm_arch3m"
1586  ""
1587)
1588
1589(define_insn "*mulsidi3_nov6"
1590  [(set (match_operand:DI 0 "s_register_operand" "=&r")
1591	(mult:DI
1592	 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1593	 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1594  "TARGET_32BIT && arm_arch3m && !arm_arch6"
1595  "smull%?\\t%Q0, %R0, %1, %2"
1596  [(set_attr "insn" "smull")
1597   (set_attr "predicable" "yes")]
1598)
1599
1600(define_insn "*mulsidi3_v6"
1601  [(set (match_operand:DI 0 "s_register_operand" "=r")
1602	(mult:DI
1603	 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1604	 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1605  "TARGET_32BIT && arm_arch6"
1606  "smull%?\\t%Q0, %R0, %1, %2"
1607  [(set_attr "insn" "smull")
1608   (set_attr "predicable" "yes")]
1609)
1610
1611(define_expand "umulsidi3"
1612  [(set (match_operand:DI 0 "s_register_operand" "")
1613	(mult:DI
1614	 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1615	 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1616  "TARGET_32BIT && arm_arch3m"
1617  ""
1618)
1619
1620(define_insn "*umulsidi3_nov6"
1621  [(set (match_operand:DI 0 "s_register_operand" "=&r")
1622	(mult:DI
1623	 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1624	 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1625  "TARGET_32BIT && arm_arch3m && !arm_arch6"
1626  "umull%?\\t%Q0, %R0, %1, %2"
1627  [(set_attr "insn" "umull")
1628   (set_attr "predicable" "yes")]
1629)
1630
1631(define_insn "*umulsidi3_v6"
1632  [(set (match_operand:DI 0 "s_register_operand" "=r")
1633	(mult:DI
1634	 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1635	 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1636  "TARGET_32BIT && arm_arch6"
1637  "umull%?\\t%Q0, %R0, %1, %2"
1638  [(set_attr "insn" "umull")
1639   (set_attr "predicable" "yes")]
1640)
1641
1642(define_expand "umaddsidi4"
1643  [(set (match_operand:DI 0 "s_register_operand" "")
1644	(plus:DI
1645	 (mult:DI
1646	  (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1647	  (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1648	 (match_operand:DI 3 "s_register_operand" "")))]
1649  "TARGET_32BIT && arm_arch3m"
1650  "")
1651
1652(define_insn "*umulsidi3adddi"
1653  [(set (match_operand:DI 0 "s_register_operand" "=&r")
1654	(plus:DI
1655	 (mult:DI
1656	  (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1657	  (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1658	 (match_operand:DI 1 "s_register_operand" "0")))]
1659  "TARGET_32BIT && arm_arch3m && !arm_arch6"
1660  "umlal%?\\t%Q0, %R0, %3, %2"
1661  [(set_attr "insn" "umlal")
1662   (set_attr "predicable" "yes")]
1663)
1664
1665(define_insn "*umulsidi3adddi_v6"
1666  [(set (match_operand:DI 0 "s_register_operand" "=r")
1667	(plus:DI
1668	 (mult:DI
1669	  (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1670	  (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1671	 (match_operand:DI 1 "s_register_operand" "0")))]
1672  "TARGET_32BIT && arm_arch6"
1673  "umlal%?\\t%Q0, %R0, %3, %2"
1674  [(set_attr "insn" "umlal")
1675   (set_attr "predicable" "yes")]
1676)
1677
1678(define_expand "smulsi3_highpart"
1679  [(parallel
1680    [(set (match_operand:SI 0 "s_register_operand" "")
1681	  (truncate:SI
1682	   (lshiftrt:DI
1683	    (mult:DI
1684	     (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1685	     (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1686	    (const_int 32))))
1687     (clobber (match_scratch:SI 3 ""))])]
1688  "TARGET_32BIT && arm_arch3m"
1689  ""
1690)
1691
1692(define_insn "*smulsi3_highpart_nov6"
1693  [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1694	(truncate:SI
1695	 (lshiftrt:DI
1696	  (mult:DI
1697	   (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1698	   (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1699	  (const_int 32))))
1700   (clobber (match_scratch:SI 3 "=&r,&r"))]
1701  "TARGET_32BIT && arm_arch3m && !arm_arch6"
1702  "smull%?\\t%3, %0, %2, %1"
1703  [(set_attr "insn" "smull")
1704   (set_attr "predicable" "yes")]
1705)
1706
1707(define_insn "*smulsi3_highpart_v6"
1708  [(set (match_operand:SI 0 "s_register_operand" "=r")
1709	(truncate:SI
1710	 (lshiftrt:DI
1711	  (mult:DI
1712	   (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1713	   (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1714	  (const_int 32))))
1715   (clobber (match_scratch:SI 3 "=r"))]
1716  "TARGET_32BIT && arm_arch6"
1717  "smull%?\\t%3, %0, %2, %1"
1718  [(set_attr "insn" "smull")
1719   (set_attr "predicable" "yes")]
1720)
1721
1722(define_expand "umulsi3_highpart"
1723  [(parallel
1724    [(set (match_operand:SI 0 "s_register_operand" "")
1725	  (truncate:SI
1726	   (lshiftrt:DI
1727	    (mult:DI
1728	     (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1729	      (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1730	    (const_int 32))))
1731     (clobber (match_scratch:SI 3 ""))])]
1732  "TARGET_32BIT && arm_arch3m"
1733  ""
1734)
1735
1736(define_insn "*umulsi3_highpart_nov6"
1737  [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1738	(truncate:SI
1739	 (lshiftrt:DI
1740	  (mult:DI
1741	   (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1742	   (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1743	  (const_int 32))))
1744   (clobber (match_scratch:SI 3 "=&r,&r"))]
1745  "TARGET_32BIT && arm_arch3m && !arm_arch6"
1746  "umull%?\\t%3, %0, %2, %1"
1747  [(set_attr "insn" "umull")
1748   (set_attr "predicable" "yes")]
1749)
1750
1751(define_insn "*umulsi3_highpart_v6"
1752  [(set (match_operand:SI 0 "s_register_operand" "=r")
1753	(truncate:SI
1754	 (lshiftrt:DI
1755	  (mult:DI
1756	   (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1757	   (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1758	  (const_int 32))))
1759   (clobber (match_scratch:SI 3 "=r"))]
1760  "TARGET_32BIT && arm_arch6"
1761  "umull%?\\t%3, %0, %2, %1"
1762  [(set_attr "insn" "umull")
1763   (set_attr "predicable" "yes")]
1764)
1765
1766(define_insn "mulhisi3"
1767  [(set (match_operand:SI 0 "s_register_operand" "=r")
1768	(mult:SI (sign_extend:SI
1769		  (match_operand:HI 1 "s_register_operand" "%r"))
1770		 (sign_extend:SI
1771		  (match_operand:HI 2 "s_register_operand" "r"))))]
1772  "TARGET_DSP_MULTIPLY"
1773  "smulbb%?\\t%0, %1, %2"
1774  [(set_attr "insn" "smulxy")
1775   (set_attr "predicable" "yes")]
1776)
1777
1778(define_insn "*mulhisi3tb"
1779  [(set (match_operand:SI 0 "s_register_operand" "=r")
1780	(mult:SI (ashiftrt:SI
1781		  (match_operand:SI 1 "s_register_operand" "r")
1782		  (const_int 16))
1783		 (sign_extend:SI
1784		  (match_operand:HI 2 "s_register_operand" "r"))))]
1785  "TARGET_DSP_MULTIPLY"
1786  "smultb%?\\t%0, %1, %2"
1787  [(set_attr "insn" "smulxy")
1788   (set_attr "predicable" "yes")]
1789)
1790
1791(define_insn "*mulhisi3bt"
1792  [(set (match_operand:SI 0 "s_register_operand" "=r")
1793	(mult:SI (sign_extend:SI
1794		  (match_operand:HI 1 "s_register_operand" "r"))
1795		 (ashiftrt:SI
1796		  (match_operand:SI 2 "s_register_operand" "r")
1797		  (const_int 16))))]
1798  "TARGET_DSP_MULTIPLY"
1799  "smulbt%?\\t%0, %1, %2"
1800  [(set_attr "insn" "smulxy")
1801   (set_attr "predicable" "yes")]
1802)
1803
1804(define_insn "*mulhisi3tt"
1805  [(set (match_operand:SI 0 "s_register_operand" "=r")
1806	(mult:SI (ashiftrt:SI
1807		  (match_operand:SI 1 "s_register_operand" "r")
1808		  (const_int 16))
1809		 (ashiftrt:SI
1810		  (match_operand:SI 2 "s_register_operand" "r")
1811		  (const_int 16))))]
1812  "TARGET_DSP_MULTIPLY"
1813  "smultt%?\\t%0, %1, %2"
1814  [(set_attr "insn" "smulxy")
1815   (set_attr "predicable" "yes")]
1816)
1817
1818(define_insn "maddhisi4"
1819  [(set (match_operand:SI 0 "s_register_operand" "=r")
1820	(plus:SI (mult:SI (sign_extend:SI
1821			   (match_operand:HI 1 "s_register_operand" "r"))
1822			  (sign_extend:SI
1823			   (match_operand:HI 2 "s_register_operand" "r")))
1824		 (match_operand:SI 3 "s_register_operand" "r")))]
1825  "TARGET_DSP_MULTIPLY"
1826  "smlabb%?\\t%0, %1, %2, %3"
1827  [(set_attr "insn" "smlaxy")
1828   (set_attr "predicable" "yes")]
1829)
1830
1831;; Note: there is no maddhisi4ibt because this one is canonical form
1832(define_insn "*maddhisi4tb"
1833  [(set (match_operand:SI 0 "s_register_operand" "=r")
1834	(plus:SI (mult:SI (ashiftrt:SI
1835			   (match_operand:SI 1 "s_register_operand" "r")
1836			   (const_int 16))
1837			  (sign_extend:SI
1838			   (match_operand:HI 2 "s_register_operand" "r")))
1839		 (match_operand:SI 3 "s_register_operand" "r")))]
1840  "TARGET_DSP_MULTIPLY"
1841  "smlatb%?\\t%0, %1, %2, %3"
1842  [(set_attr "insn" "smlaxy")
1843   (set_attr "predicable" "yes")]
1844)
1845
1846(define_insn "*maddhisi4tt"
1847  [(set (match_operand:SI 0 "s_register_operand" "=r")
1848	(plus:SI (mult:SI (ashiftrt:SI
1849			   (match_operand:SI 1 "s_register_operand" "r")
1850			   (const_int 16))
1851			  (ashiftrt:SI
1852			   (match_operand:SI 2 "s_register_operand" "r")
1853			   (const_int 16)))
1854		 (match_operand:SI 3 "s_register_operand" "r")))]
1855  "TARGET_DSP_MULTIPLY"
1856  "smlatt%?\\t%0, %1, %2, %3"
1857  [(set_attr "insn" "smlaxy")
1858   (set_attr "predicable" "yes")]
1859)
1860
1861(define_insn "maddhidi4"
1862  [(set (match_operand:DI 0 "s_register_operand" "=r")
1863	(plus:DI
1864	  (mult:DI (sign_extend:DI
1865	 	    (match_operand:HI 1 "s_register_operand" "r"))
1866		   (sign_extend:DI
1867		    (match_operand:HI 2 "s_register_operand" "r")))
1868	  (match_operand:DI 3 "s_register_operand" "0")))]
1869  "TARGET_DSP_MULTIPLY"
1870  "smlalbb%?\\t%Q0, %R0, %1, %2"
1871  [(set_attr "insn" "smlalxy")
1872   (set_attr "predicable" "yes")])
1873
1874;; Note: there is no maddhidi4ibt because this one is canonical form
1875(define_insn "*maddhidi4tb"
1876  [(set (match_operand:DI 0 "s_register_operand" "=r")
1877	(plus:DI
1878	  (mult:DI (sign_extend:DI
1879		    (ashiftrt:SI
1880		     (match_operand:SI 1 "s_register_operand" "r")
1881		     (const_int 16)))
1882		   (sign_extend:DI
1883		    (match_operand:HI 2 "s_register_operand" "r")))
1884	  (match_operand:DI 3 "s_register_operand" "0")))]
1885  "TARGET_DSP_MULTIPLY"
1886  "smlaltb%?\\t%Q0, %R0, %1, %2"
1887  [(set_attr "insn" "smlalxy")
1888   (set_attr "predicable" "yes")])
1889
1890(define_insn "*maddhidi4tt"
1891  [(set (match_operand:DI 0 "s_register_operand" "=r")
1892	(plus:DI
1893	  (mult:DI (sign_extend:DI
1894		    (ashiftrt:SI
1895		     (match_operand:SI 1 "s_register_operand" "r")
1896		     (const_int 16)))
1897		   (sign_extend:DI
1898		    (ashiftrt:SI
1899		     (match_operand:SI 2 "s_register_operand" "r")
1900		     (const_int 16))))
1901	  (match_operand:DI 3 "s_register_operand" "0")))]
1902  "TARGET_DSP_MULTIPLY"
1903  "smlaltt%?\\t%Q0, %R0, %1, %2"
1904  [(set_attr "insn" "smlalxy")
1905   (set_attr "predicable" "yes")])
1906
1907(define_expand "mulsf3"
1908  [(set (match_operand:SF          0 "s_register_operand" "")
1909	(mult:SF (match_operand:SF 1 "s_register_operand" "")
1910		 (match_operand:SF 2 "s_register_operand" "")))]
1911  "TARGET_32BIT && TARGET_HARD_FLOAT"
1912  "
1913")
1914
1915(define_expand "muldf3"
1916  [(set (match_operand:DF          0 "s_register_operand" "")
1917	(mult:DF (match_operand:DF 1 "s_register_operand" "")
1918		 (match_operand:DF 2 "s_register_operand" "")))]
1919  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1920  "
1921")
1922
1923;; Division insns
1924
1925(define_expand "divsf3"
1926  [(set (match_operand:SF 0 "s_register_operand" "")
1927	(div:SF (match_operand:SF 1 "s_register_operand" "")
1928		(match_operand:SF 2 "s_register_operand" "")))]
1929  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
1930  "")
1931
1932(define_expand "divdf3"
1933  [(set (match_operand:DF 0 "s_register_operand" "")
1934	(div:DF (match_operand:DF 1 "s_register_operand" "")
1935		(match_operand:DF 2 "s_register_operand" "")))]
1936  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
1937  "")
1938
1939;; Boolean and,ior,xor insns
1940
1941;; Split up double word logical operations
1942
1943;; Split up simple DImode logical operations.  Simply perform the logical
1944;; operation on the upper and lower halves of the registers.
1945(define_split
1946  [(set (match_operand:DI 0 "s_register_operand" "")
1947	(match_operator:DI 6 "logical_binary_operator"
1948	  [(match_operand:DI 1 "s_register_operand" "")
1949	   (match_operand:DI 2 "s_register_operand" "")]))]
1950  "TARGET_32BIT && reload_completed
1951   && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
1952   && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1953  [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1954   (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1955  "
1956  {
1957    operands[3] = gen_highpart (SImode, operands[0]);
1958    operands[0] = gen_lowpart (SImode, operands[0]);
1959    operands[4] = gen_highpart (SImode, operands[1]);
1960    operands[1] = gen_lowpart (SImode, operands[1]);
1961    operands[5] = gen_highpart (SImode, operands[2]);
1962    operands[2] = gen_lowpart (SImode, operands[2]);
1963  }"
1964)
1965
1966(define_split
1967  [(set (match_operand:DI 0 "s_register_operand" "")
1968	(match_operator:DI 6 "logical_binary_operator"
1969	  [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1970	   (match_operand:DI 1 "s_register_operand" "")]))]
1971  "TARGET_32BIT && reload_completed"
1972  [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1973   (set (match_dup 3) (match_op_dup:SI 6
1974			[(ashiftrt:SI (match_dup 2) (const_int 31))
1975			 (match_dup 4)]))]
1976  "
1977  {
1978    operands[3] = gen_highpart (SImode, operands[0]);
1979    operands[0] = gen_lowpart (SImode, operands[0]);
1980    operands[4] = gen_highpart (SImode, operands[1]);
1981    operands[1] = gen_lowpart (SImode, operands[1]);
1982    operands[5] = gen_highpart (SImode, operands[2]);
1983    operands[2] = gen_lowpart (SImode, operands[2]);
1984  }"
1985)
1986
1987;; The zero extend of operand 2 means we can just copy the high part of
1988;; operand1 into operand0.
1989(define_split
1990  [(set (match_operand:DI 0 "s_register_operand" "")
1991	(ior:DI
1992	  (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1993	  (match_operand:DI 1 "s_register_operand" "")))]
1994  "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1995  [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1996   (set (match_dup 3) (match_dup 4))]
1997  "
1998  {
1999    operands[4] = gen_highpart (SImode, operands[1]);
2000    operands[3] = gen_highpart (SImode, operands[0]);
2001    operands[0] = gen_lowpart (SImode, operands[0]);
2002    operands[1] = gen_lowpart (SImode, operands[1]);
2003  }"
2004)
2005
2006;; The zero extend of operand 2 means we can just copy the high part of
2007;; operand1 into operand0.
2008(define_split
2009  [(set (match_operand:DI 0 "s_register_operand" "")
2010	(xor:DI
2011	  (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
2012	  (match_operand:DI 1 "s_register_operand" "")))]
2013  "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
2014  [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
2015   (set (match_dup 3) (match_dup 4))]
2016  "
2017  {
2018    operands[4] = gen_highpart (SImode, operands[1]);
2019    operands[3] = gen_highpart (SImode, operands[0]);
2020    operands[0] = gen_lowpart (SImode, operands[0]);
2021    operands[1] = gen_lowpart (SImode, operands[1]);
2022  }"
2023)
2024
2025(define_expand "anddi3"
2026  [(set (match_operand:DI         0 "s_register_operand" "")
2027	(and:DI (match_operand:DI 1 "s_register_operand" "")
2028		(match_operand:DI 2 "neon_inv_logic_op2" "")))]
2029  "TARGET_32BIT"
2030  ""
2031)
2032
2033(define_insn "*anddi3_insn"
2034  [(set (match_operand:DI         0 "s_register_operand" "=&r,&r")
2035	(and:DI (match_operand:DI 1 "s_register_operand"  "%0,r")
2036		(match_operand:DI 2 "s_register_operand"   "r,r")))]
2037  "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2038  "#"
2039  [(set_attr "length" "8")]
2040)
2041
2042(define_insn_and_split "*anddi_zesidi_di"
2043  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2044	(and:DI (zero_extend:DI
2045		 (match_operand:SI 2 "s_register_operand" "r,r"))
2046		(match_operand:DI 1 "s_register_operand" "0,r")))]
2047  "TARGET_32BIT"
2048  "#"
2049  "TARGET_32BIT && reload_completed"
2050  ; The zero extend of operand 2 clears the high word of the output
2051  ; operand.
2052  [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
2053   (set (match_dup 3) (const_int 0))]
2054  "
2055  {
2056    operands[3] = gen_highpart (SImode, operands[0]);
2057    operands[0] = gen_lowpart (SImode, operands[0]);
2058    operands[1] = gen_lowpart (SImode, operands[1]);
2059  }"
2060  [(set_attr "length" "8")]
2061)
2062
2063(define_insn "*anddi_sesdi_di"
2064  [(set (match_operand:DI          0 "s_register_operand" "=&r,&r")
2065	(and:DI (sign_extend:DI
2066		 (match_operand:SI 2 "s_register_operand" "r,r"))
2067		(match_operand:DI  1 "s_register_operand" "0,r")))]
2068  "TARGET_32BIT"
2069  "#"
2070  [(set_attr "length" "8")]
2071)
2072
2073(define_expand "andsi3"
2074  [(set (match_operand:SI         0 "s_register_operand" "")
2075	(and:SI (match_operand:SI 1 "s_register_operand" "")
2076		(match_operand:SI 2 "reg_or_int_operand" "")))]
2077  "TARGET_EITHER"
2078  "
2079  if (TARGET_32BIT)
2080    {
2081      if (CONST_INT_P (operands[2]))
2082        {
2083	  if (INTVAL (operands[2]) == 255 && arm_arch6)
2084	    {
2085	      operands[1] = convert_to_mode (QImode, operands[1], 1);
2086	      emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2087							 operands[1]));
2088	    }
2089	  else
2090	    arm_split_constant (AND, SImode, NULL_RTX,
2091				INTVAL (operands[2]), operands[0],
2092				operands[1],
2093				optimize && can_create_pseudo_p ());
2094
2095          DONE;
2096        }
2097    }
2098  else /* TARGET_THUMB1 */
2099    {
2100      if (!CONST_INT_P (operands[2]))
2101        {
2102          rtx tmp = force_reg (SImode, operands[2]);
2103	  if (rtx_equal_p (operands[0], operands[1]))
2104	    operands[2] = tmp;
2105	  else
2106	    {
2107              operands[2] = operands[1];
2108              operands[1] = tmp;
2109	    }
2110        }
2111      else
2112        {
2113          int i;
2114
2115          if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2116  	    {
2117	      operands[2] = force_reg (SImode,
2118				       GEN_INT (~INTVAL (operands[2])));
2119
2120	      emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2121
2122	      DONE;
2123	    }
2124
2125          for (i = 9; i <= 31; i++)
2126	    {
2127	      if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
2128	        {
2129	          emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2130			 	        const0_rtx));
2131	          DONE;
2132	        }
2133	      else if ((((HOST_WIDE_INT) 1) << i) - 1
2134		       == ~INTVAL (operands[2]))
2135	        {
2136	          rtx shift = GEN_INT (i);
2137	          rtx reg = gen_reg_rtx (SImode);
2138
2139	          emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2140	          emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2141
2142	          DONE;
2143	        }
2144	    }
2145
2146          operands[2] = force_reg (SImode, operands[2]);
2147        }
2148    }
2149  "
2150)
2151
2152; ??? Check split length for Thumb-2
2153(define_insn_and_split "*arm_andsi3_insn"
2154  [(set (match_operand:SI         0 "s_register_operand" "=r,r,r,r")
2155	(and:SI (match_operand:SI 1 "s_register_operand" "r,r,r,r")
2156		(match_operand:SI 2 "reg_or_int_operand" "I,K,r,?n")))]
2157  "TARGET_32BIT"
2158  "@
2159   and%?\\t%0, %1, %2
2160   bic%?\\t%0, %1, #%B2
2161   and%?\\t%0, %1, %2
2162   #"
2163  "TARGET_32BIT
2164   && CONST_INT_P (operands[2])
2165   && !(const_ok_for_arm (INTVAL (operands[2]))
2166	|| const_ok_for_arm (~INTVAL (operands[2])))"
2167  [(clobber (const_int 0))]
2168  "
2169  arm_split_constant  (AND, SImode, curr_insn,
2170	               INTVAL (operands[2]), operands[0], operands[1], 0);
2171  DONE;
2172  "
2173  [(set_attr "length" "4,4,4,16")
2174   (set_attr "predicable" "yes")
2175   (set_attr "type" 	"simple_alu_imm,simple_alu_imm,*,simple_alu_imm")]
2176)
2177
2178(define_insn "*thumb1_andsi3_insn"
2179  [(set (match_operand:SI         0 "register_operand" "=l")
2180	(and:SI (match_operand:SI 1 "register_operand" "%0")
2181		(match_operand:SI 2 "register_operand" "l")))]
2182  "TARGET_THUMB1"
2183  "and\\t%0, %2"
2184  [(set_attr "length" "2")
2185   (set_attr "type"  "simple_alu_imm")
2186   (set_attr "conds" "set")])
2187
2188(define_insn "*andsi3_compare0"
2189  [(set (reg:CC_NOOV CC_REGNUM)
2190	(compare:CC_NOOV
2191	 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2192		 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2193	 (const_int 0)))
2194   (set (match_operand:SI          0 "s_register_operand" "=r,r,r")
2195	(and:SI (match_dup 1) (match_dup 2)))]
2196  "TARGET_32BIT"
2197  "@
2198   and%.\\t%0, %1, %2
2199   bic%.\\t%0, %1, #%B2
2200   and%.\\t%0, %1, %2"
2201  [(set_attr "conds" "set")
2202   (set_attr "type" "simple_alu_imm,simple_alu_imm,*")]
2203)
2204
2205(define_insn "*andsi3_compare0_scratch"
2206  [(set (reg:CC_NOOV CC_REGNUM)
2207	(compare:CC_NOOV
2208	 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2209		 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2210	 (const_int 0)))
2211   (clobber (match_scratch:SI 2 "=X,r,X"))]
2212  "TARGET_32BIT"
2213  "@
2214   tst%?\\t%0, %1
2215   bic%.\\t%2, %0, #%B1
2216   tst%?\\t%0, %1"
2217  [(set_attr "conds" "set")
2218   (set_attr "type"  "simple_alu_imm,simple_alu_imm,*")]
2219)
2220
2221(define_insn "*zeroextractsi_compare0_scratch"
2222  [(set (reg:CC_NOOV CC_REGNUM)
2223	(compare:CC_NOOV (zero_extract:SI
2224			  (match_operand:SI 0 "s_register_operand" "r")
2225		 	  (match_operand 1 "const_int_operand" "n")
2226			  (match_operand 2 "const_int_operand" "n"))
2227			 (const_int 0)))]
2228  "TARGET_32BIT
2229  && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2230      && INTVAL (operands[1]) > 0
2231      && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2232      && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2233  "*
2234  operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2235			 << INTVAL (operands[2]));
2236  output_asm_insn (\"tst%?\\t%0, %1\", operands);
2237  return \"\";
2238  "
2239  [(set_attr "conds" "set")
2240   (set_attr "predicable" "yes")
2241   (set_attr "type" "simple_alu_imm")]
2242)
2243
2244(define_insn_and_split "*ne_zeroextractsi"
2245  [(set (match_operand:SI 0 "s_register_operand" "=r")
2246	(ne:SI (zero_extract:SI
2247		(match_operand:SI 1 "s_register_operand" "r")
2248		(match_operand:SI 2 "const_int_operand" "n")
2249		(match_operand:SI 3 "const_int_operand" "n"))
2250	       (const_int 0)))
2251   (clobber (reg:CC CC_REGNUM))]
2252  "TARGET_32BIT
2253   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2254       && INTVAL (operands[2]) > 0
2255       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2256       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2257  "#"
2258  "TARGET_32BIT
2259   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2260       && INTVAL (operands[2]) > 0
2261       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2262       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2263  [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2264		   (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2265				    (const_int 0)))
2266	      (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2267   (set (match_dup 0)
2268	(if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2269			 (match_dup 0) (const_int 1)))]
2270  "
2271  operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2272			 << INTVAL (operands[3]));
2273  "
2274  [(set_attr "conds" "clob")
2275   (set (attr "length")
2276	(if_then_else (eq_attr "is_thumb" "yes")
2277		      (const_int 12)
2278		      (const_int 8)))]
2279)
2280
2281(define_insn_and_split "*ne_zeroextractsi_shifted"
2282  [(set (match_operand:SI 0 "s_register_operand" "=r")
2283	(ne:SI (zero_extract:SI
2284		(match_operand:SI 1 "s_register_operand" "r")
2285		(match_operand:SI 2 "const_int_operand" "n")
2286		(const_int 0))
2287	       (const_int 0)))
2288   (clobber (reg:CC CC_REGNUM))]
2289  "TARGET_ARM"
2290  "#"
2291  "TARGET_ARM"
2292  [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2293		   (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2294				    (const_int 0)))
2295	      (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2296   (set (match_dup 0)
2297	(if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2298			 (match_dup 0) (const_int 1)))]
2299  "
2300  operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2301  "
2302  [(set_attr "conds" "clob")
2303   (set_attr "length" "8")]
2304)
2305
2306(define_insn_and_split "*ite_ne_zeroextractsi"
2307  [(set (match_operand:SI 0 "s_register_operand" "=r")
2308	(if_then_else:SI (ne (zero_extract:SI
2309			      (match_operand:SI 1 "s_register_operand" "r")
2310			      (match_operand:SI 2 "const_int_operand" "n")
2311			      (match_operand:SI 3 "const_int_operand" "n"))
2312			     (const_int 0))
2313			 (match_operand:SI 4 "arm_not_operand" "rIK")
2314			 (const_int 0)))
2315   (clobber (reg:CC CC_REGNUM))]
2316  "TARGET_ARM
2317   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2318       && INTVAL (operands[2]) > 0
2319       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2320       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2321   && !reg_overlap_mentioned_p (operands[0], operands[4])"
2322  "#"
2323  "TARGET_ARM
2324   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2325       && INTVAL (operands[2]) > 0
2326       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2327       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2328   && !reg_overlap_mentioned_p (operands[0], operands[4])"
2329  [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2330		   (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2331				    (const_int 0)))
2332	      (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2333   (set (match_dup 0)
2334	(if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2335			 (match_dup 0) (match_dup 4)))]
2336  "
2337  operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2338			 << INTVAL (operands[3]));
2339  "
2340  [(set_attr "conds" "clob")
2341   (set_attr "length" "8")]
2342)
2343
2344(define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2345  [(set (match_operand:SI 0 "s_register_operand" "=r")
2346	(if_then_else:SI (ne (zero_extract:SI
2347			      (match_operand:SI 1 "s_register_operand" "r")
2348			      (match_operand:SI 2 "const_int_operand" "n")
2349			      (const_int 0))
2350			     (const_int 0))
2351			 (match_operand:SI 3 "arm_not_operand" "rIK")
2352			 (const_int 0)))
2353   (clobber (reg:CC CC_REGNUM))]
2354  "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2355  "#"
2356  "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2357  [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2358		   (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2359				    (const_int 0)))
2360	      (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2361   (set (match_dup 0)
2362	(if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2363			 (match_dup 0) (match_dup 3)))]
2364  "
2365  operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2366  "
2367  [(set_attr "conds" "clob")
2368   (set_attr "length" "8")]
2369)
2370
2371(define_split
2372  [(set (match_operand:SI 0 "s_register_operand" "")
2373	(zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2374			 (match_operand:SI 2 "const_int_operand" "")
2375			 (match_operand:SI 3 "const_int_operand" "")))
2376   (clobber (match_operand:SI 4 "s_register_operand" ""))]
2377  "TARGET_THUMB1"
2378  [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2379   (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2380  "{
2381     HOST_WIDE_INT temp = INTVAL (operands[2]);
2382
2383     operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2384     operands[3] = GEN_INT (32 - temp);
2385   }"
2386)
2387
2388;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2389(define_split
2390  [(set (match_operand:SI 0 "s_register_operand" "")
2391	(match_operator:SI 1 "shiftable_operator"
2392	 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2393			   (match_operand:SI 3 "const_int_operand" "")
2394			   (match_operand:SI 4 "const_int_operand" ""))
2395	  (match_operand:SI 5 "s_register_operand" "")]))
2396   (clobber (match_operand:SI 6 "s_register_operand" ""))]
2397  "TARGET_ARM"
2398  [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2399   (set (match_dup 0)
2400	(match_op_dup 1
2401	 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2402	  (match_dup 5)]))]
2403  "{
2404     HOST_WIDE_INT temp = INTVAL (operands[3]);
2405
2406     operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2407     operands[4] = GEN_INT (32 - temp);
2408   }"
2409)
2410
2411(define_split
2412  [(set (match_operand:SI 0 "s_register_operand" "")
2413	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2414			 (match_operand:SI 2 "const_int_operand" "")
2415			 (match_operand:SI 3 "const_int_operand" "")))]
2416  "TARGET_THUMB1"
2417  [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2418   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2419  "{
2420     HOST_WIDE_INT temp = INTVAL (operands[2]);
2421
2422     operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2423     operands[3] = GEN_INT (32 - temp);
2424   }"
2425)
2426
2427(define_split
2428  [(set (match_operand:SI 0 "s_register_operand" "")
2429	(match_operator:SI 1 "shiftable_operator"
2430	 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2431			   (match_operand:SI 3 "const_int_operand" "")
2432			   (match_operand:SI 4 "const_int_operand" ""))
2433	  (match_operand:SI 5 "s_register_operand" "")]))
2434   (clobber (match_operand:SI 6 "s_register_operand" ""))]
2435  "TARGET_ARM"
2436  [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2437   (set (match_dup 0)
2438	(match_op_dup 1
2439	 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2440	  (match_dup 5)]))]
2441  "{
2442     HOST_WIDE_INT temp = INTVAL (operands[3]);
2443
2444     operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2445     operands[4] = GEN_INT (32 - temp);
2446   }"
2447)
2448
2449;;; ??? This pattern is bogus.  If operand3 has bits outside the range
2450;;; represented by the bitfield, then this will produce incorrect results.
2451;;; Somewhere, the value needs to be truncated.  On targets like the m68k,
2452;;; which have a real bit-field insert instruction, the truncation happens
2453;;; in the bit-field insert instruction itself.  Since arm does not have a
2454;;; bit-field insert instruction, we would have to emit code here to truncate
2455;;; the value before we insert.  This loses some of the advantage of having
2456;;; this insv pattern, so this pattern needs to be reevalutated.
2457
2458(define_expand "insv"
2459  [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
2460                      (match_operand 1 "general_operand" "")
2461                      (match_operand 2 "general_operand" ""))
2462        (match_operand 3 "reg_or_int_operand" ""))]
2463  "TARGET_ARM || arm_arch_thumb2"
2464  "
2465  {
2466    int start_bit = INTVAL (operands[2]);
2467    int width = INTVAL (operands[1]);
2468    HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2469    rtx target, subtarget;
2470
2471    if (arm_arch_thumb2)
2472      {
2473        if (unaligned_access && MEM_P (operands[0])
2474	    && s_register_operand (operands[3], GET_MODE (operands[3]))
2475	    && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
2476	  {
2477	    rtx base_addr;
2478
2479	    if (BYTES_BIG_ENDIAN)
2480	      start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
2481			  - start_bit;
2482
2483	    if (width == 32)
2484	      {
2485	        base_addr = adjust_address (operands[0], SImode,
2486					    start_bit / BITS_PER_UNIT);
2487		emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
2488	      }
2489	    else
2490	      {
2491	        rtx tmp = gen_reg_rtx (HImode);
2492
2493	        base_addr = adjust_address (operands[0], HImode,
2494					    start_bit / BITS_PER_UNIT);
2495		emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
2496		emit_insn (gen_unaligned_storehi (base_addr, tmp));
2497	      }
2498	    DONE;
2499	  }
2500	else if (s_register_operand (operands[0], GET_MODE (operands[0])))
2501	  {
2502	    bool use_bfi = TRUE;
2503
2504	    if (CONST_INT_P (operands[3]))
2505	      {
2506		HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2507
2508		if (val == 0)
2509		  {
2510		    emit_insn (gen_insv_zero (operands[0], operands[1],
2511					      operands[2]));
2512		    DONE;
2513		  }
2514
2515		/* See if the set can be done with a single orr instruction.  */
2516		if (val == mask && const_ok_for_arm (val << start_bit))
2517		  use_bfi = FALSE;
2518	      }
2519
2520	    if (use_bfi)
2521	      {
2522		if (!REG_P (operands[3]))
2523		  operands[3] = force_reg (SImode, operands[3]);
2524
2525		emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2526					operands[3]));
2527		DONE;
2528	      }
2529	  }
2530	else
2531	  FAIL;
2532      }
2533
2534    if (!s_register_operand (operands[0], GET_MODE (operands[0])))
2535      FAIL;
2536
2537    target = copy_rtx (operands[0]);
2538    /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2539       subreg as the final target.  */
2540    if (GET_CODE (target) == SUBREG)
2541      {
2542	subtarget = gen_reg_rtx (SImode);
2543	if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2544	    < GET_MODE_SIZE (SImode))
2545	  target = SUBREG_REG (target);
2546      }
2547    else
2548      subtarget = target;
2549
2550    if (CONST_INT_P (operands[3]))
2551      {
2552	/* Since we are inserting a known constant, we may be able to
2553	   reduce the number of bits that we have to clear so that
2554	   the mask becomes simple.  */
2555	/* ??? This code does not check to see if the new mask is actually
2556	   simpler.  It may not be.  */
2557	rtx op1 = gen_reg_rtx (SImode);
2558	/* ??? Truncate operand3 to fit in the bitfield.  See comment before
2559	   start of this pattern.  */
2560	HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2561	HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2562
2563	emit_insn (gen_andsi3 (op1, operands[0],
2564			       gen_int_mode (~mask2, SImode)));
2565	emit_insn (gen_iorsi3 (subtarget, op1,
2566			       gen_int_mode (op3_value << start_bit, SImode)));
2567      }
2568    else if (start_bit == 0
2569	     && !(const_ok_for_arm (mask)
2570		  || const_ok_for_arm (~mask)))
2571      {
2572	/* A Trick, since we are setting the bottom bits in the word,
2573	   we can shift operand[3] up, operand[0] down, OR them together
2574	   and rotate the result back again.  This takes 3 insns, and
2575	   the third might be mergeable into another op.  */
2576	/* The shift up copes with the possibility that operand[3] is
2577           wider than the bitfield.  */
2578	rtx op0 = gen_reg_rtx (SImode);
2579	rtx op1 = gen_reg_rtx (SImode);
2580
2581	emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2582	emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2583	emit_insn (gen_iorsi3  (op1, op1, op0));
2584	emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2585      }
2586    else if ((width + start_bit == 32)
2587	     && !(const_ok_for_arm (mask)
2588		  || const_ok_for_arm (~mask)))
2589      {
2590	/* Similar trick, but slightly less efficient.  */
2591
2592	rtx op0 = gen_reg_rtx (SImode);
2593	rtx op1 = gen_reg_rtx (SImode);
2594
2595	emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2596	emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2597	emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2598	emit_insn (gen_iorsi3 (subtarget, op1, op0));
2599      }
2600    else
2601      {
2602	rtx op0 = gen_int_mode (mask, SImode);
2603	rtx op1 = gen_reg_rtx (SImode);
2604	rtx op2 = gen_reg_rtx (SImode);
2605
2606	if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2607	  {
2608	    rtx tmp = gen_reg_rtx (SImode);
2609
2610	    emit_insn (gen_movsi (tmp, op0));
2611	    op0 = tmp;
2612	  }
2613
2614	/* Mask out any bits in operand[3] that are not needed.  */
2615	   emit_insn (gen_andsi3 (op1, operands[3], op0));
2616
2617	if (CONST_INT_P (op0)
2618	    && (const_ok_for_arm (mask << start_bit)
2619		|| const_ok_for_arm (~(mask << start_bit))))
2620	  {
2621	    op0 = gen_int_mode (~(mask << start_bit), SImode);
2622	    emit_insn (gen_andsi3 (op2, operands[0], op0));
2623	  }
2624	else
2625	  {
2626	    if (CONST_INT_P (op0))
2627	      {
2628		rtx tmp = gen_reg_rtx (SImode);
2629
2630		emit_insn (gen_movsi (tmp, op0));
2631		op0 = tmp;
2632	      }
2633
2634	    if (start_bit != 0)
2635	      emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2636
2637	    emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2638	  }
2639
2640	if (start_bit != 0)
2641          emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2642
2643	emit_insn (gen_iorsi3 (subtarget, op1, op2));
2644      }
2645
2646    if (subtarget != target)
2647      {
2648	/* If TARGET is still a SUBREG, then it must be wider than a word,
2649	   so we must be careful only to set the subword we were asked to.  */
2650	if (GET_CODE (target) == SUBREG)
2651	  emit_move_insn (target, subtarget);
2652	else
2653	  emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2654      }
2655
2656    DONE;
2657  }"
2658)
2659
2660(define_insn "insv_zero"
2661  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2662                         (match_operand:SI 1 "const_int_operand" "M")
2663                         (match_operand:SI 2 "const_int_operand" "M"))
2664        (const_int 0))]
2665  "arm_arch_thumb2"
2666  "bfc%?\t%0, %2, %1"
2667  [(set_attr "length" "4")
2668   (set_attr "predicable" "yes")]
2669)
2670
2671(define_insn "insv_t2"
2672  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2673                         (match_operand:SI 1 "const_int_operand" "M")
2674                         (match_operand:SI 2 "const_int_operand" "M"))
2675        (match_operand:SI 3 "s_register_operand" "r"))]
2676  "arm_arch_thumb2"
2677  "bfi%?\t%0, %3, %2, %1"
2678  [(set_attr "length" "4")
2679   (set_attr "predicable" "yes")]
2680)
2681
2682; constants for op 2 will never be given to these patterns.
2683(define_insn_and_split "*anddi_notdi_di"
2684  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2685	(and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2686		(match_operand:DI 2 "s_register_operand" "r,0")))]
2687  "TARGET_32BIT"
2688  "#"
2689  "TARGET_32BIT && reload_completed
2690   && ! (TARGET_NEON && IS_VFP_REGNUM (REGNO (operands[0])))
2691   && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2692  [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2693   (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2694  "
2695  {
2696    operands[3] = gen_highpart (SImode, operands[0]);
2697    operands[0] = gen_lowpart (SImode, operands[0]);
2698    operands[4] = gen_highpart (SImode, operands[1]);
2699    operands[1] = gen_lowpart (SImode, operands[1]);
2700    operands[5] = gen_highpart (SImode, operands[2]);
2701    operands[2] = gen_lowpart (SImode, operands[2]);
2702  }"
2703  [(set_attr "length" "8")
2704   (set_attr "predicable" "yes")]
2705)
2706
2707(define_insn_and_split "*anddi_notzesidi_di"
2708  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2709	(and:DI (not:DI (zero_extend:DI
2710			 (match_operand:SI 2 "s_register_operand" "r,r")))
2711		(match_operand:DI 1 "s_register_operand" "0,?r")))]
2712  "TARGET_32BIT"
2713  "@
2714   bic%?\\t%Q0, %Q1, %2
2715   #"
2716  ; (not (zero_extend ...)) allows us to just copy the high word from
2717  ; operand1 to operand0.
2718  "TARGET_32BIT
2719   && reload_completed
2720   && operands[0] != operands[1]"
2721  [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2722   (set (match_dup 3) (match_dup 4))]
2723  "
2724  {
2725    operands[3] = gen_highpart (SImode, operands[0]);
2726    operands[0] = gen_lowpart (SImode, operands[0]);
2727    operands[4] = gen_highpart (SImode, operands[1]);
2728    operands[1] = gen_lowpart (SImode, operands[1]);
2729  }"
2730  [(set_attr "length" "4,8")
2731   (set_attr "predicable" "yes")]
2732)
2733
2734(define_insn_and_split "*anddi_notsesidi_di"
2735  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2736	(and:DI (not:DI (sign_extend:DI
2737			 (match_operand:SI 2 "s_register_operand" "r,r")))
2738		(match_operand:DI 1 "s_register_operand" "0,r")))]
2739  "TARGET_32BIT"
2740  "#"
2741  "TARGET_32BIT && reload_completed"
2742  [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2743   (set (match_dup 3) (and:SI (not:SI
2744				(ashiftrt:SI (match_dup 2) (const_int 31)))
2745			       (match_dup 4)))]
2746  "
2747  {
2748    operands[3] = gen_highpart (SImode, operands[0]);
2749    operands[0] = gen_lowpart (SImode, operands[0]);
2750    operands[4] = gen_highpart (SImode, operands[1]);
2751    operands[1] = gen_lowpart (SImode, operands[1]);
2752  }"
2753  [(set_attr "length" "8")
2754   (set_attr "predicable" "yes")]
2755)
2756
2757(define_insn "andsi_notsi_si"
2758  [(set (match_operand:SI 0 "s_register_operand" "=r")
2759	(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2760		(match_operand:SI 1 "s_register_operand" "r")))]
2761  "TARGET_32BIT"
2762  "bic%?\\t%0, %1, %2"
2763  [(set_attr "predicable" "yes")]
2764)
2765
2766(define_insn "thumb1_bicsi3"
2767  [(set (match_operand:SI                 0 "register_operand" "=l")
2768	(and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2769		(match_operand:SI         2 "register_operand" "0")))]
2770  "TARGET_THUMB1"
2771  "bic\\t%0, %1"
2772  [(set_attr "length" "2")
2773   (set_attr "conds" "set")])
2774
2775(define_insn "andsi_not_shiftsi_si"
2776  [(set (match_operand:SI 0 "s_register_operand" "=r")
2777	(and:SI (not:SI (match_operator:SI 4 "shift_operator"
2778			 [(match_operand:SI 2 "s_register_operand" "r")
2779			  (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2780		(match_operand:SI 1 "s_register_operand" "r")))]
2781  "TARGET_ARM"
2782  "bic%?\\t%0, %1, %2%S4"
2783  [(set_attr "predicable" "yes")
2784   (set_attr "shift" "2")
2785   (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2786		      (const_string "alu_shift")
2787		      (const_string "alu_shift_reg")))]
2788)
2789
2790(define_insn "*andsi_notsi_si_compare0"
2791  [(set (reg:CC_NOOV CC_REGNUM)
2792	(compare:CC_NOOV
2793	 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2794		 (match_operand:SI 1 "s_register_operand" "r"))
2795	 (const_int 0)))
2796   (set (match_operand:SI 0 "s_register_operand" "=r")
2797	(and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2798  "TARGET_32BIT"
2799  "bic%.\\t%0, %1, %2"
2800  [(set_attr "conds" "set")]
2801)
2802
2803(define_insn "*andsi_notsi_si_compare0_scratch"
2804  [(set (reg:CC_NOOV CC_REGNUM)
2805	(compare:CC_NOOV
2806	 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2807		 (match_operand:SI 1 "s_register_operand" "r"))
2808	 (const_int 0)))
2809   (clobber (match_scratch:SI 0 "=r"))]
2810  "TARGET_32BIT"
2811  "bic%.\\t%0, %1, %2"
2812  [(set_attr "conds" "set")]
2813)
2814
2815(define_expand "iordi3"
2816  [(set (match_operand:DI         0 "s_register_operand" "")
2817	(ior:DI (match_operand:DI 1 "s_register_operand" "")
2818		(match_operand:DI 2 "neon_logic_op2" "")))]
2819  "TARGET_32BIT"
2820  ""
2821)
2822
2823(define_insn "*iordi3_insn"
2824  [(set (match_operand:DI         0 "s_register_operand" "=&r,&r")
2825	(ior:DI (match_operand:DI 1 "s_register_operand"  "%0,r")
2826		(match_operand:DI 2 "s_register_operand"   "r,r")))]
2827  "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2828  "#"
2829  [(set_attr "length" "8")
2830   (set_attr "predicable" "yes")]
2831)
2832
2833(define_insn "*iordi_zesidi_di"
2834  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2835	(ior:DI (zero_extend:DI
2836		 (match_operand:SI 2 "s_register_operand" "r,r"))
2837		(match_operand:DI 1 "s_register_operand" "0,?r")))]
2838  "TARGET_32BIT"
2839  "@
2840   orr%?\\t%Q0, %Q1, %2
2841   #"
2842  [(set_attr "length" "4,8")
2843   (set_attr "predicable" "yes")]
2844)
2845
2846(define_insn "*iordi_sesidi_di"
2847  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2848	(ior:DI (sign_extend:DI
2849		 (match_operand:SI 2 "s_register_operand" "r,r"))
2850		(match_operand:DI 1 "s_register_operand" "0,r")))]
2851  "TARGET_32BIT"
2852  "#"
2853  [(set_attr "length" "8")
2854   (set_attr "predicable" "yes")]
2855)
2856
2857(define_expand "iorsi3"
2858  [(set (match_operand:SI         0 "s_register_operand" "")
2859	(ior:SI (match_operand:SI 1 "s_register_operand" "")
2860		(match_operand:SI 2 "reg_or_int_operand" "")))]
2861  "TARGET_EITHER"
2862  "
2863  if (CONST_INT_P (operands[2]))
2864    {
2865      if (TARGET_32BIT)
2866        {
2867          arm_split_constant (IOR, SImode, NULL_RTX,
2868	                      INTVAL (operands[2]), operands[0], operands[1],
2869			      optimize && can_create_pseudo_p ());
2870          DONE;
2871	}
2872      else /* TARGET_THUMB1 */
2873        {
2874          rtx tmp = force_reg (SImode, operands[2]);
2875	  if (rtx_equal_p (operands[0], operands[1]))
2876	    operands[2] = tmp;
2877	  else
2878	    {
2879              operands[2] = operands[1];
2880              operands[1] = tmp;
2881	    }
2882        }
2883    }
2884  "
2885)
2886
2887(define_insn_and_split "*iorsi3_insn"
2888  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
2889	(ior:SI (match_operand:SI 1 "s_register_operand" "%r,r,r,r")
2890		(match_operand:SI 2 "reg_or_int_operand" "I,K,r,?n")))]
2891  "TARGET_32BIT"
2892  "@
2893   orr%?\\t%0, %1, %2
2894   orn%?\\t%0, %1, #%B2
2895   orr%?\\t%0, %1, %2
2896   #"
2897  "TARGET_32BIT
2898   && CONST_INT_P (operands[2])
2899   && !(const_ok_for_arm (INTVAL (operands[2]))
2900        || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
2901  [(clobber (const_int 0))]
2902{
2903  arm_split_constant (IOR, SImode, curr_insn,
2904                      INTVAL (operands[2]), operands[0], operands[1], 0);
2905  DONE;
2906}
2907  [(set_attr "length" "4,4,4,16")
2908   (set_attr "arch" "32,t2,32,32")
2909   (set_attr "predicable" "yes")
2910   (set_attr "type" "simple_alu_imm,simple_alu_imm,*,*")]
2911)
2912
2913(define_insn "*thumb1_iorsi3_insn"
2914  [(set (match_operand:SI         0 "register_operand" "=l")
2915	(ior:SI (match_operand:SI 1 "register_operand" "%0")
2916		(match_operand:SI 2 "register_operand" "l")))]
2917  "TARGET_THUMB1"
2918  "orr\\t%0, %2"
2919  [(set_attr "length" "2")
2920   (set_attr "conds" "set")])
2921
2922(define_peephole2
2923  [(match_scratch:SI 3 "r")
2924   (set (match_operand:SI 0 "arm_general_register_operand" "")
2925	(ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2926		(match_operand:SI 2 "const_int_operand" "")))]
2927  "TARGET_ARM
2928   && !const_ok_for_arm (INTVAL (operands[2]))
2929   && const_ok_for_arm (~INTVAL (operands[2]))"
2930  [(set (match_dup 3) (match_dup 2))
2931   (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2932  ""
2933)
2934
2935(define_insn "*iorsi3_compare0"
2936  [(set (reg:CC_NOOV CC_REGNUM)
2937	(compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
2938				 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2939			 (const_int 0)))
2940   (set (match_operand:SI 0 "s_register_operand" "=r,r")
2941	(ior:SI (match_dup 1) (match_dup 2)))]
2942  "TARGET_32BIT"
2943  "orr%.\\t%0, %1, %2"
2944  [(set_attr "conds" "set")
2945   (set_attr "type" "simple_alu_imm,*")]
2946)
2947
2948(define_insn "*iorsi3_compare0_scratch"
2949  [(set (reg:CC_NOOV CC_REGNUM)
2950	(compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r,r")
2951				 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
2952			 (const_int 0)))
2953   (clobber (match_scratch:SI 0 "=r,r"))]
2954  "TARGET_32BIT"
2955  "orr%.\\t%0, %1, %2"
2956  [(set_attr "conds" "set")
2957   (set_attr "type" "simple_alu_imm, *")]
2958)
2959
2960(define_expand "xordi3"
2961  [(set (match_operand:DI         0 "s_register_operand" "")
2962	(xor:DI (match_operand:DI 1 "s_register_operand" "")
2963		(match_operand:DI 2 "s_register_operand" "")))]
2964  "TARGET_32BIT"
2965  ""
2966)
2967
2968(define_insn "*xordi3_insn"
2969  [(set (match_operand:DI         0 "s_register_operand" "=&r,&r")
2970	(xor:DI (match_operand:DI 1 "s_register_operand"  "%0,r")
2971		(match_operand:DI 2 "s_register_operand"   "r,r")))]
2972  "TARGET_32BIT && !TARGET_IWMMXT && !TARGET_NEON"
2973  "#"
2974  [(set_attr "length" "8")
2975   (set_attr "predicable" "yes")]
2976)
2977
2978(define_insn "*xordi_zesidi_di"
2979  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2980	(xor:DI (zero_extend:DI
2981		 (match_operand:SI 2 "s_register_operand" "r,r"))
2982		(match_operand:DI 1 "s_register_operand" "0,?r")))]
2983  "TARGET_32BIT"
2984  "@
2985   eor%?\\t%Q0, %Q1, %2
2986   #"
2987  [(set_attr "length" "4,8")
2988   (set_attr "predicable" "yes")]
2989)
2990
2991(define_insn "*xordi_sesidi_di"
2992  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2993	(xor:DI (sign_extend:DI
2994		 (match_operand:SI 2 "s_register_operand" "r,r"))
2995		(match_operand:DI 1 "s_register_operand" "0,r")))]
2996  "TARGET_32BIT"
2997  "#"
2998  [(set_attr "length" "8")
2999   (set_attr "predicable" "yes")]
3000)
3001
3002(define_expand "xorsi3"
3003  [(set (match_operand:SI         0 "s_register_operand" "")
3004	(xor:SI (match_operand:SI 1 "s_register_operand" "")
3005		(match_operand:SI 2 "reg_or_int_operand" "")))]
3006  "TARGET_EITHER"
3007  "if (CONST_INT_P (operands[2]))
3008    {
3009      if (TARGET_32BIT)
3010        {
3011          arm_split_constant (XOR, SImode, NULL_RTX,
3012	                      INTVAL (operands[2]), operands[0], operands[1],
3013			      optimize && can_create_pseudo_p ());
3014          DONE;
3015	}
3016      else /* TARGET_THUMB1 */
3017        {
3018          rtx tmp = force_reg (SImode, operands[2]);
3019	  if (rtx_equal_p (operands[0], operands[1]))
3020	    operands[2] = tmp;
3021	  else
3022	    {
3023              operands[2] = operands[1];
3024              operands[1] = tmp;
3025	    }
3026        }
3027    }"
3028)
3029
3030(define_insn_and_split "*arm_xorsi3"
3031  [(set (match_operand:SI         0 "s_register_operand" "=r,r,r")
3032	(xor:SI (match_operand:SI 1 "s_register_operand" "%r,r,r")
3033		(match_operand:SI 2 "reg_or_int_operand" "I,r,?n")))]
3034  "TARGET_32BIT"
3035  "@
3036   eor%?\\t%0, %1, %2
3037   eor%?\\t%0, %1, %2
3038   #"
3039  "TARGET_32BIT
3040   && CONST_INT_P (operands[2])
3041   && !const_ok_for_arm (INTVAL (operands[2]))"
3042  [(clobber (const_int 0))]
3043{
3044  arm_split_constant (XOR, SImode, curr_insn,
3045                      INTVAL (operands[2]), operands[0], operands[1], 0);
3046  DONE;
3047}
3048  [(set_attr "length" "4,4,16")
3049   (set_attr "predicable" "yes")
3050   (set_attr "type"  "simple_alu_imm,*,*")]
3051)
3052
3053(define_insn "*thumb1_xorsi3_insn"
3054  [(set (match_operand:SI         0 "register_operand" "=l")
3055	(xor:SI (match_operand:SI 1 "register_operand" "%0")
3056		(match_operand:SI 2 "register_operand" "l")))]
3057  "TARGET_THUMB1"
3058  "eor\\t%0, %2"
3059  [(set_attr "length" "2")
3060   (set_attr "conds" "set")
3061   (set_attr "type" "simple_alu_imm")]
3062)
3063
3064(define_insn "*xorsi3_compare0"
3065  [(set (reg:CC_NOOV CC_REGNUM)
3066	(compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3067				 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3068			 (const_int 0)))
3069   (set (match_operand:SI 0 "s_register_operand" "=r,r")
3070	(xor:SI (match_dup 1) (match_dup 2)))]
3071  "TARGET_32BIT"
3072  "eor%.\\t%0, %1, %2"
3073  [(set_attr "conds" "set")
3074   (set_attr "type" "simple_alu_imm,*")]
3075)
3076
3077(define_insn "*xorsi3_compare0_scratch"
3078  [(set (reg:CC_NOOV CC_REGNUM)
3079	(compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3080				 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3081			 (const_int 0)))]
3082  "TARGET_32BIT"
3083  "teq%?\\t%0, %1"
3084  [(set_attr "conds" "set")
3085   (set_attr "type" "simple_alu_imm, *")]
3086)
3087
3088; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3089; (NOT D) we can sometimes merge the final NOT into one of the following
3090; insns.
3091
3092(define_split
3093  [(set (match_operand:SI 0 "s_register_operand" "")
3094	(ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3095			(not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3096		(match_operand:SI 3 "arm_rhs_operand" "")))
3097   (clobber (match_operand:SI 4 "s_register_operand" ""))]
3098  "TARGET_32BIT"
3099  [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3100			      (not:SI (match_dup 3))))
3101   (set (match_dup 0) (not:SI (match_dup 4)))]
3102  ""
3103)
3104
3105(define_insn "*andsi_iorsi3_notsi"
3106  [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3107	(and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3108			(match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3109		(not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3110  "TARGET_32BIT"
3111  "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3112  [(set_attr "length" "8")
3113   (set_attr "ce_count" "2")
3114   (set_attr "predicable" "yes")]
3115)
3116
3117; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3118; insns are available?
3119(define_split
3120  [(set (match_operand:SI 0 "s_register_operand" "")
3121	(match_operator:SI 1 "logical_binary_operator"
3122	 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3123			   (match_operand:SI 3 "const_int_operand" "")
3124			   (match_operand:SI 4 "const_int_operand" ""))
3125	  (match_operator:SI 9 "logical_binary_operator"
3126	   [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3127			 (match_operand:SI 6 "const_int_operand" ""))
3128	    (match_operand:SI 7 "s_register_operand" "")])]))
3129   (clobber (match_operand:SI 8 "s_register_operand" ""))]
3130  "TARGET_32BIT
3131   && GET_CODE (operands[1]) == GET_CODE (operands[9])
3132   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3133  [(set (match_dup 8)
3134	(match_op_dup 1
3135	 [(ashift:SI (match_dup 2) (match_dup 4))
3136	  (match_dup 5)]))
3137   (set (match_dup 0)
3138	(match_op_dup 1
3139	 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3140	  (match_dup 7)]))]
3141  "
3142  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3143")
3144
3145(define_split
3146  [(set (match_operand:SI 0 "s_register_operand" "")
3147	(match_operator:SI 1 "logical_binary_operator"
3148	 [(match_operator:SI 9 "logical_binary_operator"
3149	   [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3150			 (match_operand:SI 6 "const_int_operand" ""))
3151	    (match_operand:SI 7 "s_register_operand" "")])
3152	  (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3153			   (match_operand:SI 3 "const_int_operand" "")
3154			   (match_operand:SI 4 "const_int_operand" ""))]))
3155   (clobber (match_operand:SI 8 "s_register_operand" ""))]
3156  "TARGET_32BIT
3157   && GET_CODE (operands[1]) == GET_CODE (operands[9])
3158   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3159  [(set (match_dup 8)
3160	(match_op_dup 1
3161	 [(ashift:SI (match_dup 2) (match_dup 4))
3162	  (match_dup 5)]))
3163   (set (match_dup 0)
3164	(match_op_dup 1
3165	 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3166	  (match_dup 7)]))]
3167  "
3168  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3169")
3170
3171(define_split
3172  [(set (match_operand:SI 0 "s_register_operand" "")
3173	(match_operator:SI 1 "logical_binary_operator"
3174	 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3175			   (match_operand:SI 3 "const_int_operand" "")
3176			   (match_operand:SI 4 "const_int_operand" ""))
3177	  (match_operator:SI 9 "logical_binary_operator"
3178	   [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3179			 (match_operand:SI 6 "const_int_operand" ""))
3180	    (match_operand:SI 7 "s_register_operand" "")])]))
3181   (clobber (match_operand:SI 8 "s_register_operand" ""))]
3182  "TARGET_32BIT
3183   && GET_CODE (operands[1]) == GET_CODE (operands[9])
3184   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3185  [(set (match_dup 8)
3186	(match_op_dup 1
3187	 [(ashift:SI (match_dup 2) (match_dup 4))
3188	  (match_dup 5)]))
3189   (set (match_dup 0)
3190	(match_op_dup 1
3191	 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3192	  (match_dup 7)]))]
3193  "
3194  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3195")
3196
3197(define_split
3198  [(set (match_operand:SI 0 "s_register_operand" "")
3199	(match_operator:SI 1 "logical_binary_operator"
3200	 [(match_operator:SI 9 "logical_binary_operator"
3201	   [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3202			 (match_operand:SI 6 "const_int_operand" ""))
3203	    (match_operand:SI 7 "s_register_operand" "")])
3204	  (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3205			   (match_operand:SI 3 "const_int_operand" "")
3206			   (match_operand:SI 4 "const_int_operand" ""))]))
3207   (clobber (match_operand:SI 8 "s_register_operand" ""))]
3208  "TARGET_32BIT
3209   && GET_CODE (operands[1]) == GET_CODE (operands[9])
3210   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3211  [(set (match_dup 8)
3212	(match_op_dup 1
3213	 [(ashift:SI (match_dup 2) (match_dup 4))
3214	  (match_dup 5)]))
3215   (set (match_dup 0)
3216	(match_op_dup 1
3217	 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3218	  (match_dup 7)]))]
3219  "
3220  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3221")
3222
3223
3224;; Minimum and maximum insns
3225
3226(define_expand "smaxsi3"
3227  [(parallel [
3228    (set (match_operand:SI 0 "s_register_operand" "")
3229	 (smax:SI (match_operand:SI 1 "s_register_operand" "")
3230		  (match_operand:SI 2 "arm_rhs_operand" "")))
3231    (clobber (reg:CC CC_REGNUM))])]
3232  "TARGET_32BIT"
3233  "
3234  if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3235    {
3236      /* No need for a clobber of the condition code register here.  */
3237      emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3238			      gen_rtx_SMAX (SImode, operands[1],
3239					    operands[2])));
3240      DONE;
3241    }
3242")
3243
3244(define_insn "*smax_0"
3245  [(set (match_operand:SI 0 "s_register_operand" "=r")
3246	(smax:SI (match_operand:SI 1 "s_register_operand" "r")
3247		 (const_int 0)))]
3248  "TARGET_32BIT"
3249  "bic%?\\t%0, %1, %1, asr #31"
3250  [(set_attr "predicable" "yes")]
3251)
3252
3253(define_insn "*smax_m1"
3254  [(set (match_operand:SI 0 "s_register_operand" "=r")
3255	(smax:SI (match_operand:SI 1 "s_register_operand" "r")
3256		 (const_int -1)))]
3257  "TARGET_32BIT"
3258  "orr%?\\t%0, %1, %1, asr #31"
3259  [(set_attr "predicable" "yes")]
3260)
3261
3262(define_insn "*arm_smax_insn"
3263  [(set (match_operand:SI          0 "s_register_operand" "=r,r")
3264	(smax:SI (match_operand:SI 1 "s_register_operand"  "%0,?r")
3265		 (match_operand:SI 2 "arm_rhs_operand"    "rI,rI")))
3266   (clobber (reg:CC CC_REGNUM))]
3267  "TARGET_ARM"
3268  "@
3269   cmp\\t%1, %2\;movlt\\t%0, %2
3270   cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3271  [(set_attr "conds" "clob")
3272   (set_attr "length" "8,12")]
3273)
3274
3275(define_expand "sminsi3"
3276  [(parallel [
3277    (set (match_operand:SI 0 "s_register_operand" "")
3278	 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3279		  (match_operand:SI 2 "arm_rhs_operand" "")))
3280    (clobber (reg:CC CC_REGNUM))])]
3281  "TARGET_32BIT"
3282  "
3283  if (operands[2] == const0_rtx)
3284    {
3285      /* No need for a clobber of the condition code register here.  */
3286      emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3287			      gen_rtx_SMIN (SImode, operands[1],
3288					    operands[2])));
3289      DONE;
3290    }
3291")
3292
3293(define_insn "*smin_0"
3294  [(set (match_operand:SI 0 "s_register_operand" "=r")
3295	(smin:SI (match_operand:SI 1 "s_register_operand" "r")
3296		 (const_int 0)))]
3297  "TARGET_32BIT"
3298  "and%?\\t%0, %1, %1, asr #31"
3299  [(set_attr "predicable" "yes")]
3300)
3301
3302(define_insn "*arm_smin_insn"
3303  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3304	(smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3305		 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3306   (clobber (reg:CC CC_REGNUM))]
3307  "TARGET_ARM"
3308  "@
3309   cmp\\t%1, %2\;movge\\t%0, %2
3310   cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3311  [(set_attr "conds" "clob")
3312   (set_attr "length" "8,12")]
3313)
3314
3315(define_expand "umaxsi3"
3316  [(parallel [
3317    (set (match_operand:SI 0 "s_register_operand" "")
3318	 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3319		  (match_operand:SI 2 "arm_rhs_operand" "")))
3320    (clobber (reg:CC CC_REGNUM))])]
3321  "TARGET_32BIT"
3322  ""
3323)
3324
3325(define_insn "*arm_umaxsi3"
3326  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3327	(umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3328		 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3329   (clobber (reg:CC CC_REGNUM))]
3330  "TARGET_ARM"
3331  "@
3332   cmp\\t%1, %2\;movcc\\t%0, %2
3333   cmp\\t%1, %2\;movcs\\t%0, %1
3334   cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3335  [(set_attr "conds" "clob")
3336   (set_attr "length" "8,8,12")]
3337)
3338
3339(define_expand "uminsi3"
3340  [(parallel [
3341    (set (match_operand:SI 0 "s_register_operand" "")
3342	 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3343		  (match_operand:SI 2 "arm_rhs_operand" "")))
3344    (clobber (reg:CC CC_REGNUM))])]
3345  "TARGET_32BIT"
3346  ""
3347)
3348
3349(define_insn "*arm_uminsi3"
3350  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3351	(umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3352		 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3353   (clobber (reg:CC CC_REGNUM))]
3354  "TARGET_ARM"
3355  "@
3356   cmp\\t%1, %2\;movcs\\t%0, %2
3357   cmp\\t%1, %2\;movcc\\t%0, %1
3358   cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3359  [(set_attr "conds" "clob")
3360   (set_attr "length" "8,8,12")]
3361)
3362
3363(define_insn "*store_minmaxsi"
3364  [(set (match_operand:SI 0 "memory_operand" "=m")
3365	(match_operator:SI 3 "minmax_operator"
3366	 [(match_operand:SI 1 "s_register_operand" "r")
3367	  (match_operand:SI 2 "s_register_operand" "r")]))
3368   (clobber (reg:CC CC_REGNUM))]
3369  "TARGET_32BIT"
3370  "*
3371  operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3372				operands[1], operands[2]);
3373  output_asm_insn (\"cmp\\t%1, %2\", operands);
3374  if (TARGET_THUMB2)
3375    output_asm_insn (\"ite\t%d3\", operands);
3376  output_asm_insn (\"str%d3\\t%1, %0\", operands);
3377  output_asm_insn (\"str%D3\\t%2, %0\", operands);
3378  return \"\";
3379  "
3380  [(set_attr "conds" "clob")
3381   (set (attr "length")
3382	(if_then_else (eq_attr "is_thumb" "yes")
3383		      (const_int 14)
3384		      (const_int 12)))
3385   (set_attr "type" "store1")]
3386)
3387
3388; Reject the frame pointer in operand[1], since reloading this after
3389; it has been eliminated can cause carnage.
3390(define_insn "*minmax_arithsi"
3391  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3392	(match_operator:SI 4 "shiftable_operator"
3393	 [(match_operator:SI 5 "minmax_operator"
3394	   [(match_operand:SI 2 "s_register_operand" "r,r")
3395	    (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3396	  (match_operand:SI 1 "s_register_operand" "0,?r")]))
3397   (clobber (reg:CC CC_REGNUM))]
3398  "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3399  "*
3400  {
3401    enum rtx_code code = GET_CODE (operands[4]);
3402    bool need_else;
3403
3404    if (which_alternative != 0 || operands[3] != const0_rtx
3405        || (code != PLUS && code != IOR && code != XOR))
3406      need_else = true;
3407    else
3408      need_else = false;
3409
3410    operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3411				  operands[2], operands[3]);
3412    output_asm_insn (\"cmp\\t%2, %3\", operands);
3413    if (TARGET_THUMB2)
3414      {
3415	if (need_else)
3416	  output_asm_insn (\"ite\\t%d5\", operands);
3417	else
3418	  output_asm_insn (\"it\\t%d5\", operands);
3419      }
3420    output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3421    if (need_else)
3422      output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3423    return \"\";
3424  }"
3425  [(set_attr "conds" "clob")
3426   (set (attr "length")
3427	(if_then_else (eq_attr "is_thumb" "yes")
3428		      (const_int 14)
3429		      (const_int 12)))]
3430)
3431
3432(define_code_iterator SAT [smin smax])
3433(define_code_iterator SATrev [smin smax])
3434(define_code_attr SATlo [(smin "1") (smax "2")])
3435(define_code_attr SAThi [(smin "2") (smax "1")])
3436
3437(define_insn "*satsi_<SAT:code>"
3438  [(set (match_operand:SI 0 "s_register_operand" "=r")
3439        (SAT:SI (SATrev:SI (match_operand:SI 3 "s_register_operand" "r")
3440                           (match_operand:SI 1 "const_int_operand" "i"))
3441                (match_operand:SI 2 "const_int_operand" "i")))]
3442  "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
3443   && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3444{
3445  int mask;
3446  bool signed_sat;
3447  if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3448                               &mask, &signed_sat))
3449    gcc_unreachable ();
3450
3451  operands[1] = GEN_INT (mask);
3452  if (signed_sat)
3453    return "ssat%?\t%0, %1, %3";
3454  else
3455    return "usat%?\t%0, %1, %3";
3456}
3457  [(set_attr "predicable" "yes")
3458   (set_attr "insn" "sat")])
3459
3460(define_insn "*satsi_<SAT:code>_shift"
3461  [(set (match_operand:SI 0 "s_register_operand" "=r")
3462        (SAT:SI (SATrev:SI (match_operator:SI 3 "sat_shift_operator"
3463                             [(match_operand:SI 4 "s_register_operand" "r")
3464                              (match_operand:SI 5 "const_int_operand" "i")])
3465                           (match_operand:SI 1 "const_int_operand" "i"))
3466                (match_operand:SI 2 "const_int_operand" "i")))]
3467  "TARGET_32BIT && arm_arch6 && <SAT:CODE> != <SATrev:CODE>
3468   && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
3469{
3470  int mask;
3471  bool signed_sat;
3472  if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
3473                               &mask, &signed_sat))
3474    gcc_unreachable ();
3475
3476  operands[1] = GEN_INT (mask);
3477  if (signed_sat)
3478    return "ssat%?\t%0, %1, %4%S3";
3479  else
3480    return "usat%?\t%0, %1, %4%S3";
3481}
3482  [(set_attr "predicable" "yes")
3483   (set_attr "insn" "sat")
3484   (set_attr "shift" "3")
3485   (set_attr "type" "alu_shift")])
3486
3487;; Shift and rotation insns
3488
3489(define_expand "ashldi3"
3490  [(set (match_operand:DI            0 "s_register_operand" "")
3491        (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3492                   (match_operand:SI 2 "general_operand" "")))]
3493  "TARGET_32BIT"
3494  "
3495  if (TARGET_NEON)
3496    {
3497      /* Delay the decision whether to use NEON or core-regs until
3498	 register allocation.  */
3499      emit_insn (gen_ashldi3_neon (operands[0], operands[1], operands[2]));
3500      DONE;
3501    }
3502  else
3503    {
3504      /* Only the NEON case can handle in-memory shift counts.  */
3505      if (!reg_or_int_operand (operands[2], SImode))
3506        operands[2] = force_reg (SImode, operands[2]);
3507    }
3508
3509  if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
3510    ; /* No special preparation statements; expand pattern as above.  */
3511  else
3512    {
3513      rtx scratch1, scratch2;
3514
3515      if (CONST_INT_P (operands[2])
3516	  && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3517        {
3518          emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3519          DONE;
3520        }
3521
3522      /* Ideally we should use iwmmxt here if we could know that operands[1]
3523         ends up already living in an iwmmxt register. Otherwise it's
3524         cheaper to have the alternate code being generated than moving
3525         values to iwmmxt regs and back.  */
3526
3527      /* If we're optimizing for size, we prefer the libgcc calls.  */
3528      if (optimize_function_for_size_p (cfun))
3529	FAIL;
3530
3531      /* Expand operation using core-registers.
3532	 'FAIL' would achieve the same thing, but this is a bit smarter.  */
3533      scratch1 = gen_reg_rtx (SImode);
3534      scratch2 = gen_reg_rtx (SImode);
3535      arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
3536				     operands[2], scratch1, scratch2);
3537      DONE;
3538    }
3539  "
3540)
3541
3542(define_insn "arm_ashldi3_1bit"
3543  [(set (match_operand:DI            0 "s_register_operand" "=r,&r")
3544        (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3545                   (const_int 1)))
3546   (clobber (reg:CC CC_REGNUM))]
3547  "TARGET_32BIT"
3548  "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3549  [(set_attr "conds" "clob")
3550   (set_attr "length" "8")]
3551)
3552
3553(define_expand "ashlsi3"
3554  [(set (match_operand:SI            0 "s_register_operand" "")
3555	(ashift:SI (match_operand:SI 1 "s_register_operand" "")
3556		   (match_operand:SI 2 "arm_rhs_operand" "")))]
3557  "TARGET_EITHER"
3558  "
3559  if (CONST_INT_P (operands[2])
3560      && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3561    {
3562      emit_insn (gen_movsi (operands[0], const0_rtx));
3563      DONE;
3564    }
3565  "
3566)
3567
3568(define_insn "*thumb1_ashlsi3"
3569  [(set (match_operand:SI            0 "register_operand" "=l,l")
3570	(ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3571		   (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3572  "TARGET_THUMB1"
3573  "lsl\\t%0, %1, %2"
3574  [(set_attr "length" "2")
3575   (set_attr "conds" "set")])
3576
3577(define_expand "ashrdi3"
3578  [(set (match_operand:DI              0 "s_register_operand" "")
3579        (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3580                     (match_operand:SI 2 "reg_or_int_operand" "")))]
3581  "TARGET_32BIT"
3582  "
3583  if (TARGET_NEON)
3584    {
3585      /* Delay the decision whether to use NEON or core-regs until
3586	 register allocation.  */
3587      emit_insn (gen_ashrdi3_neon (operands[0], operands[1], operands[2]));
3588      DONE;
3589    }
3590
3591  if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
3592    ; /* No special preparation statements; expand pattern as above.  */
3593  else
3594    {
3595      rtx scratch1, scratch2;
3596
3597      if (CONST_INT_P (operands[2])
3598	  && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3599        {
3600          emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3601          DONE;
3602        }
3603
3604      /* Ideally we should use iwmmxt here if we could know that operands[1]
3605         ends up already living in an iwmmxt register. Otherwise it's
3606         cheaper to have the alternate code being generated than moving
3607         values to iwmmxt regs and back.  */
3608
3609      /* If we're optimizing for size, we prefer the libgcc calls.  */
3610      if (optimize_function_for_size_p (cfun))
3611	FAIL;
3612
3613      /* Expand operation using core-registers.
3614	 'FAIL' would achieve the same thing, but this is a bit smarter.  */
3615      scratch1 = gen_reg_rtx (SImode);
3616      scratch2 = gen_reg_rtx (SImode);
3617      arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
3618				     operands[2], scratch1, scratch2);
3619      DONE;
3620    }
3621  "
3622)
3623
3624(define_insn "arm_ashrdi3_1bit"
3625  [(set (match_operand:DI              0 "s_register_operand" "=r,&r")
3626        (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3627                     (const_int 1)))
3628   (clobber (reg:CC CC_REGNUM))]
3629  "TARGET_32BIT"
3630  "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3631  [(set_attr "conds" "clob")
3632   (set_attr "insn" "mov")
3633   (set_attr "length" "8")]
3634)
3635
3636(define_expand "ashrsi3"
3637  [(set (match_operand:SI              0 "s_register_operand" "")
3638	(ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3639		     (match_operand:SI 2 "arm_rhs_operand" "")))]
3640  "TARGET_EITHER"
3641  "
3642  if (CONST_INT_P (operands[2])
3643      && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3644    operands[2] = GEN_INT (31);
3645  "
3646)
3647
3648(define_insn "*thumb1_ashrsi3"
3649  [(set (match_operand:SI              0 "register_operand" "=l,l")
3650	(ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3651		     (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3652  "TARGET_THUMB1"
3653  "asr\\t%0, %1, %2"
3654  [(set_attr "length" "2")
3655   (set_attr "conds" "set")])
3656
3657(define_expand "lshrdi3"
3658  [(set (match_operand:DI              0 "s_register_operand" "")
3659        (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3660                     (match_operand:SI 2 "reg_or_int_operand" "")))]
3661  "TARGET_32BIT"
3662  "
3663  if (TARGET_NEON)
3664    {
3665      /* Delay the decision whether to use NEON or core-regs until
3666	 register allocation.  */
3667      emit_insn (gen_lshrdi3_neon (operands[0], operands[1], operands[2]));
3668      DONE;
3669    }
3670
3671  if (!CONST_INT_P (operands[2]) && TARGET_REALLY_IWMMXT)
3672    ; /* No special preparation statements; expand pattern as above.  */
3673  else
3674    {
3675      rtx scratch1, scratch2;
3676
3677      if (CONST_INT_P (operands[2])
3678	  && (HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3679        {
3680          emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3681          DONE;
3682        }
3683
3684      /* Ideally we should use iwmmxt here if we could know that operands[1]
3685         ends up already living in an iwmmxt register. Otherwise it's
3686         cheaper to have the alternate code being generated than moving
3687         values to iwmmxt regs and back.  */
3688
3689      /* If we're optimizing for size, we prefer the libgcc calls.  */
3690      if (optimize_function_for_size_p (cfun))
3691	FAIL;
3692
3693      /* Expand operation using core-registers.
3694	 'FAIL' would achieve the same thing, but this is a bit smarter.  */
3695      scratch1 = gen_reg_rtx (SImode);
3696      scratch2 = gen_reg_rtx (SImode);
3697      arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
3698				     operands[2], scratch1, scratch2);
3699      DONE;
3700    }
3701  "
3702)
3703
3704(define_insn "arm_lshrdi3_1bit"
3705  [(set (match_operand:DI              0 "s_register_operand" "=r,&r")
3706        (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3707                     (const_int 1)))
3708   (clobber (reg:CC CC_REGNUM))]
3709  "TARGET_32BIT"
3710  "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3711  [(set_attr "conds" "clob")
3712   (set_attr "insn" "mov")
3713   (set_attr "length" "8")]
3714)
3715
3716(define_expand "lshrsi3"
3717  [(set (match_operand:SI              0 "s_register_operand" "")
3718	(lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3719		     (match_operand:SI 2 "arm_rhs_operand" "")))]
3720  "TARGET_EITHER"
3721  "
3722  if (CONST_INT_P (operands[2])
3723      && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3724    {
3725      emit_insn (gen_movsi (operands[0], const0_rtx));
3726      DONE;
3727    }
3728  "
3729)
3730
3731(define_insn "*thumb1_lshrsi3"
3732  [(set (match_operand:SI              0 "register_operand" "=l,l")
3733	(lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3734		     (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3735  "TARGET_THUMB1"
3736  "lsr\\t%0, %1, %2"
3737  [(set_attr "length" "2")
3738   (set_attr "conds" "set")])
3739
3740(define_expand "rotlsi3"
3741  [(set (match_operand:SI              0 "s_register_operand" "")
3742	(rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3743		     (match_operand:SI 2 "reg_or_int_operand" "")))]
3744  "TARGET_32BIT"
3745  "
3746  if (CONST_INT_P (operands[2]))
3747    operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3748  else
3749    {
3750      rtx reg = gen_reg_rtx (SImode);
3751      emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3752      operands[2] = reg;
3753    }
3754  "
3755)
3756
3757(define_expand "rotrsi3"
3758  [(set (match_operand:SI              0 "s_register_operand" "")
3759	(rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3760		     (match_operand:SI 2 "arm_rhs_operand" "")))]
3761  "TARGET_EITHER"
3762  "
3763  if (TARGET_32BIT)
3764    {
3765      if (CONST_INT_P (operands[2])
3766          && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3767        operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3768    }
3769  else /* TARGET_THUMB1 */
3770    {
3771      if (CONST_INT_P (operands [2]))
3772        operands [2] = force_reg (SImode, operands[2]);
3773    }
3774  "
3775)
3776
3777(define_insn "*thumb1_rotrsi3"
3778  [(set (match_operand:SI              0 "register_operand" "=l")
3779	(rotatert:SI (match_operand:SI 1 "register_operand" "0")
3780		     (match_operand:SI 2 "register_operand" "l")))]
3781  "TARGET_THUMB1"
3782  "ror\\t%0, %0, %2"
3783  [(set_attr "length" "2")]
3784)
3785
3786(define_insn "*arm_shiftsi3"
3787  [(set (match_operand:SI   0 "s_register_operand" "=r")
3788	(match_operator:SI  3 "shift_operator"
3789	 [(match_operand:SI 1 "s_register_operand"  "r")
3790	  (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3791  "TARGET_32BIT"
3792  "* return arm_output_shift(operands, 0);"
3793  [(set_attr "predicable" "yes")
3794   (set_attr "shift" "1")
3795   (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3796		      (const_string "alu_shift")
3797		      (const_string "alu_shift_reg")))]
3798)
3799
3800(define_insn "*shiftsi3_compare0"
3801  [(set (reg:CC_NOOV CC_REGNUM)
3802	(compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3803			  [(match_operand:SI 1 "s_register_operand" "r")
3804			   (match_operand:SI 2 "arm_rhs_operand" "rM")])
3805			 (const_int 0)))
3806   (set (match_operand:SI 0 "s_register_operand" "=r")
3807	(match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3808  "TARGET_32BIT"
3809  "* return arm_output_shift(operands, 1);"
3810  [(set_attr "conds" "set")
3811   (set_attr "shift" "1")
3812   (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3813		      (const_string "alu_shift")
3814		      (const_string "alu_shift_reg")))]
3815)
3816
3817(define_insn "*shiftsi3_compare0_scratch"
3818  [(set (reg:CC_NOOV CC_REGNUM)
3819	(compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3820			  [(match_operand:SI 1 "s_register_operand" "r")
3821			   (match_operand:SI 2 "arm_rhs_operand" "rM")])
3822			 (const_int 0)))
3823   (clobber (match_scratch:SI 0 "=r"))]
3824  "TARGET_32BIT"
3825  "* return arm_output_shift(operands, 1);"
3826  [(set_attr "conds" "set")
3827   (set_attr "shift" "1")]
3828)
3829
3830(define_insn "*not_shiftsi"
3831  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3832	(not:SI (match_operator:SI 3 "shift_operator"
3833		 [(match_operand:SI 1 "s_register_operand" "r,r")
3834		  (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
3835  "TARGET_32BIT"
3836  "mvn%?\\t%0, %1%S3"
3837  [(set_attr "predicable" "yes")
3838   (set_attr "shift" "1")
3839   (set_attr "insn" "mvn")
3840   (set_attr "arch" "32,a")
3841   (set_attr "type" "alu_shift,alu_shift_reg")])
3842
3843(define_insn "*not_shiftsi_compare0"
3844  [(set (reg:CC_NOOV CC_REGNUM)
3845	(compare:CC_NOOV
3846	 (not:SI (match_operator:SI 3 "shift_operator"
3847		  [(match_operand:SI 1 "s_register_operand" "r,r")
3848		   (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3849	 (const_int 0)))
3850   (set (match_operand:SI 0 "s_register_operand" "=r,r")
3851	(not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3852  "TARGET_32BIT"
3853  "mvn%.\\t%0, %1%S3"
3854  [(set_attr "conds" "set")
3855   (set_attr "shift" "1")
3856   (set_attr "insn" "mvn")
3857   (set_attr "arch" "32,a")
3858   (set_attr "type" "alu_shift,alu_shift_reg")])
3859
3860(define_insn "*not_shiftsi_compare0_scratch"
3861  [(set (reg:CC_NOOV CC_REGNUM)
3862	(compare:CC_NOOV
3863	 (not:SI (match_operator:SI 3 "shift_operator"
3864		  [(match_operand:SI 1 "s_register_operand" "r,r")
3865		   (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
3866	 (const_int 0)))
3867   (clobber (match_scratch:SI 0 "=r,r"))]
3868  "TARGET_32BIT"
3869  "mvn%.\\t%0, %1%S3"
3870  [(set_attr "conds" "set")
3871   (set_attr "shift" "1")
3872   (set_attr "insn" "mvn")
3873   (set_attr "arch" "32,a")
3874   (set_attr "type" "alu_shift,alu_shift_reg")])
3875
3876;; We don't really have extzv, but defining this using shifts helps
3877;; to reduce register pressure later on.
3878
3879(define_expand "extzv"
3880  [(set (match_operand 0 "s_register_operand" "")
3881	(zero_extract (match_operand 1 "nonimmediate_operand" "")
3882		      (match_operand 2 "const_int_operand" "")
3883		      (match_operand 3 "const_int_operand" "")))]
3884  "TARGET_THUMB1 || arm_arch_thumb2"
3885  "
3886  {
3887    HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3888    HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3889
3890    if (arm_arch_thumb2)
3891      {
3892	HOST_WIDE_INT width = INTVAL (operands[2]);
3893	HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3894
3895	if (unaligned_access && MEM_P (operands[1])
3896	    && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
3897	  {
3898	    rtx base_addr;
3899
3900	    if (BYTES_BIG_ENDIAN)
3901	      bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
3902		       - bitpos;
3903
3904	    if (width == 32)
3905              {
3906		base_addr = adjust_address (operands[1], SImode,
3907					    bitpos / BITS_PER_UNIT);
3908		emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3909              }
3910	    else
3911              {
3912		rtx dest = operands[0];
3913		rtx tmp = gen_reg_rtx (SImode);
3914
3915		/* We may get a paradoxical subreg here.  Strip it off.  */
3916		if (GET_CODE (dest) == SUBREG
3917		    && GET_MODE (dest) == SImode
3918		    && GET_MODE (SUBREG_REG (dest)) == HImode)
3919		  dest = SUBREG_REG (dest);
3920
3921		if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
3922		  FAIL;
3923
3924		base_addr = adjust_address (operands[1], HImode,
3925					    bitpos / BITS_PER_UNIT);
3926		emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
3927		emit_move_insn (gen_lowpart (SImode, dest), tmp);
3928	      }
3929	    DONE;
3930	  }
3931	else if (s_register_operand (operands[1], GET_MODE (operands[1])))
3932	  {
3933	    emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3934				     operands[3]));
3935	    DONE;
3936	  }
3937	else
3938	  FAIL;
3939      }
3940
3941    if (!s_register_operand (operands[1], GET_MODE (operands[1])))
3942      FAIL;
3943
3944    operands[3] = GEN_INT (rshift);
3945
3946    if (lshift == 0)
3947      {
3948        emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3949        DONE;
3950      }
3951
3952    emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
3953			     operands[3], gen_reg_rtx (SImode)));
3954    DONE;
3955  }"
3956)
3957
3958;; Helper for extzv, for the Thumb-1 register-shifts case.
3959
3960(define_expand "extzv_t1"
3961  [(set (match_operand:SI 4 "s_register_operand" "")
3962	(ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
3963		   (match_operand:SI 2 "const_int_operand" "")))
3964   (set (match_operand:SI 0 "s_register_operand" "")
3965	(lshiftrt:SI (match_dup 4)
3966		     (match_operand:SI 3 "const_int_operand" "")))]
3967  "TARGET_THUMB1"
3968  "")
3969
3970(define_expand "extv"
3971  [(set (match_operand 0 "s_register_operand" "")
3972	(sign_extract (match_operand 1 "nonimmediate_operand" "")
3973		      (match_operand 2 "const_int_operand" "")
3974		      (match_operand 3 "const_int_operand" "")))]
3975  "arm_arch_thumb2"
3976{
3977  HOST_WIDE_INT width = INTVAL (operands[2]);
3978  HOST_WIDE_INT bitpos = INTVAL (operands[3]);
3979
3980  if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
3981      && (bitpos % BITS_PER_UNIT)  == 0)
3982    {
3983      rtx base_addr;
3984
3985      if (BYTES_BIG_ENDIAN)
3986	bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
3987
3988      if (width == 32)
3989        {
3990	  base_addr = adjust_address (operands[1], SImode,
3991				      bitpos / BITS_PER_UNIT);
3992	  emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
3993        }
3994      else
3995        {
3996	  rtx dest = operands[0];
3997	  rtx tmp = gen_reg_rtx (SImode);
3998
3999	  /* We may get a paradoxical subreg here.  Strip it off.  */
4000	  if (GET_CODE (dest) == SUBREG
4001	      && GET_MODE (dest) == SImode
4002	      && GET_MODE (SUBREG_REG (dest)) == HImode)
4003	    dest = SUBREG_REG (dest);
4004
4005	  if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4006	    FAIL;
4007
4008	  base_addr = adjust_address (operands[1], HImode,
4009				      bitpos / BITS_PER_UNIT);
4010	  emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4011	  emit_move_insn (gen_lowpart (SImode, dest), tmp);
4012	}
4013
4014      DONE;
4015    }
4016  else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4017    FAIL;
4018  else if (GET_MODE (operands[0]) == SImode
4019	   && GET_MODE (operands[1]) == SImode)
4020    {
4021      emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4022				 operands[3]));
4023      DONE;
4024    }
4025
4026  FAIL;
4027})
4028
4029; Helper to expand register forms of extv with the proper modes.
4030
4031(define_expand "extv_regsi"
4032  [(set (match_operand:SI 0 "s_register_operand" "")
4033	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
4034			 (match_operand 2 "const_int_operand" "")
4035			 (match_operand 3 "const_int_operand" "")))]
4036  ""
4037{
4038})
4039
4040; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4041
4042(define_insn "unaligned_loadsi"
4043  [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4044	(unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
4045		   UNSPEC_UNALIGNED_LOAD))]
4046  "unaligned_access && TARGET_32BIT"
4047  "ldr%?\t%0, %1\t@ unaligned"
4048  [(set_attr "arch" "t2,any")
4049   (set_attr "length" "2,4")
4050   (set_attr "predicable" "yes")
4051   (set_attr "type" "load1")])
4052
4053(define_insn "unaligned_loadhis"
4054  [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4055	(sign_extend:SI
4056	  (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,Uh")]
4057		     UNSPEC_UNALIGNED_LOAD)))]
4058  "unaligned_access && TARGET_32BIT"
4059  "ldr%(sh%)\t%0, %1\t@ unaligned"
4060  [(set_attr "arch" "t2,any")
4061   (set_attr "length" "2,4")
4062   (set_attr "predicable" "yes")
4063   (set_attr "type" "load_byte")])
4064
4065(define_insn "unaligned_loadhiu"
4066  [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4067	(zero_extend:SI
4068	  (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
4069		     UNSPEC_UNALIGNED_LOAD)))]
4070  "unaligned_access && TARGET_32BIT"
4071  "ldr%(h%)\t%0, %1\t@ unaligned"
4072  [(set_attr "arch" "t2,any")
4073   (set_attr "length" "2,4")
4074   (set_attr "predicable" "yes")
4075   (set_attr "type" "load_byte")])
4076
4077(define_insn "unaligned_storesi"
4078  [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
4079	(unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
4080		   UNSPEC_UNALIGNED_STORE))]
4081  "unaligned_access && TARGET_32BIT"
4082  "str%?\t%1, %0\t@ unaligned"
4083  [(set_attr "arch" "t2,any")
4084   (set_attr "length" "2,4")
4085   (set_attr "predicable" "yes")
4086   (set_attr "type" "store1")])
4087
4088(define_insn "unaligned_storehi"
4089  [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
4090	(unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
4091		   UNSPEC_UNALIGNED_STORE))]
4092  "unaligned_access && TARGET_32BIT"
4093  "str%(h%)\t%1, %0\t@ unaligned"
4094  [(set_attr "arch" "t2,any")
4095   (set_attr "length" "2,4")
4096   (set_attr "predicable" "yes")
4097   (set_attr "type" "store1")])
4098
4099(define_insn "*extv_reg"
4100  [(set (match_operand:SI 0 "s_register_operand" "=r")
4101	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4102                         (match_operand:SI 2 "const_int_operand" "M")
4103                         (match_operand:SI 3 "const_int_operand" "M")))]
4104  "arm_arch_thumb2"
4105  "sbfx%?\t%0, %1, %3, %2"
4106  [(set_attr "length" "4")
4107   (set_attr "predicable" "yes")]
4108)
4109
4110(define_insn "extzv_t2"
4111  [(set (match_operand:SI 0 "s_register_operand" "=r")
4112	(zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4113                         (match_operand:SI 2 "const_int_operand" "M")
4114                         (match_operand:SI 3 "const_int_operand" "M")))]
4115  "arm_arch_thumb2"
4116  "ubfx%?\t%0, %1, %3, %2"
4117  [(set_attr "length" "4")
4118   (set_attr "predicable" "yes")]
4119)
4120
4121
4122;; Division instructions
4123(define_insn "divsi3"
4124  [(set (match_operand:SI	  0 "s_register_operand" "=r")
4125	(div:SI (match_operand:SI 1 "s_register_operand"  "r")
4126		(match_operand:SI 2 "s_register_operand"  "r")))]
4127  "TARGET_IDIV"
4128  "sdiv%?\t%0, %1, %2"
4129  [(set_attr "predicable" "yes")
4130   (set_attr "insn" "sdiv")]
4131)
4132
4133(define_insn "udivsi3"
4134  [(set (match_operand:SI	   0 "s_register_operand" "=r")
4135	(udiv:SI (match_operand:SI 1 "s_register_operand"  "r")
4136		 (match_operand:SI 2 "s_register_operand"  "r")))]
4137  "TARGET_IDIV"
4138  "udiv%?\t%0, %1, %2"
4139  [(set_attr "predicable" "yes")
4140   (set_attr "insn" "udiv")]
4141)
4142
4143
4144;; Unary arithmetic insns
4145
4146(define_expand "negdi2"
4147 [(parallel
4148   [(set (match_operand:DI 0 "s_register_operand" "")
4149	 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
4150    (clobber (reg:CC CC_REGNUM))])]
4151  "TARGET_EITHER"
4152  {
4153    if (TARGET_NEON)
4154      {
4155        emit_insn (gen_negdi2_neon (operands[0], operands[1]));
4156	DONE;
4157      }
4158  }
4159)
4160
4161;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
4162;; The first alternative allows the common case of a *full* overlap.
4163(define_insn "*arm_negdi2"
4164  [(set (match_operand:DI         0 "s_register_operand" "=r,&r")
4165	(neg:DI (match_operand:DI 1 "s_register_operand"  "0,r")))
4166   (clobber (reg:CC CC_REGNUM))]
4167  "TARGET_ARM"
4168  "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
4169  [(set_attr "conds" "clob")
4170   (set_attr "length" "8")]
4171)
4172
4173(define_insn "*thumb1_negdi2"
4174  [(set (match_operand:DI 0 "register_operand" "=&l")
4175	(neg:DI (match_operand:DI 1 "register_operand" "l")))
4176   (clobber (reg:CC CC_REGNUM))]
4177  "TARGET_THUMB1"
4178  "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
4179  [(set_attr "length" "6")]
4180)
4181
4182(define_expand "negsi2"
4183  [(set (match_operand:SI         0 "s_register_operand" "")
4184	(neg:SI (match_operand:SI 1 "s_register_operand" "")))]
4185  "TARGET_EITHER"
4186  ""
4187)
4188
4189(define_insn "*arm_negsi2"
4190  [(set (match_operand:SI         0 "s_register_operand" "=r")
4191	(neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
4192  "TARGET_32BIT"
4193  "rsb%?\\t%0, %1, #0"
4194  [(set_attr "predicable" "yes")]
4195)
4196
4197(define_insn "*thumb1_negsi2"
4198  [(set (match_operand:SI         0 "register_operand" "=l")
4199	(neg:SI (match_operand:SI 1 "register_operand" "l")))]
4200  "TARGET_THUMB1"
4201  "neg\\t%0, %1"
4202  [(set_attr "length" "2")]
4203)
4204
4205(define_expand "negsf2"
4206  [(set (match_operand:SF         0 "s_register_operand" "")
4207	(neg:SF (match_operand:SF 1 "s_register_operand" "")))]
4208  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
4209  ""
4210)
4211
4212(define_expand "negdf2"
4213  [(set (match_operand:DF         0 "s_register_operand" "")
4214	(neg:DF (match_operand:DF 1 "s_register_operand" "")))]
4215  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4216  "")
4217
4218;; abssi2 doesn't really clobber the condition codes if a different register
4219;; is being set.  To keep things simple, assume during rtl manipulations that
4220;; it does, but tell the final scan operator the truth.  Similarly for
4221;; (neg (abs...))
4222
4223(define_expand "abssi2"
4224  [(parallel
4225    [(set (match_operand:SI         0 "s_register_operand" "")
4226	  (abs:SI (match_operand:SI 1 "s_register_operand" "")))
4227     (clobber (match_dup 2))])]
4228  "TARGET_EITHER"
4229  "
4230  if (TARGET_THUMB1)
4231    operands[2] = gen_rtx_SCRATCH (SImode);
4232  else
4233    operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4234")
4235
4236(define_insn "*arm_abssi2"
4237  [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4238	(abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4239   (clobber (reg:CC CC_REGNUM))]
4240  "TARGET_ARM"
4241  "@
4242   cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4243   eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
4244  [(set_attr "conds" "clob,*")
4245   (set_attr "shift" "1")
4246   (set_attr "predicable" "no, yes")
4247   (set_attr "length" "8")]
4248)
4249
4250(define_insn_and_split "*thumb1_abssi2"
4251  [(set (match_operand:SI 0 "s_register_operand" "=l")
4252	(abs:SI (match_operand:SI 1 "s_register_operand" "l")))
4253   (clobber (match_scratch:SI 2 "=&l"))]
4254  "TARGET_THUMB1"
4255  "#"
4256  "TARGET_THUMB1 && reload_completed"
4257  [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4258   (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
4259   (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4260  ""
4261  [(set_attr "length" "6")]
4262)
4263
4264(define_insn "*arm_neg_abssi2"
4265  [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4266	(neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4267   (clobber (reg:CC CC_REGNUM))]
4268  "TARGET_ARM"
4269  "@
4270   cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4271   eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
4272  [(set_attr "conds" "clob,*")
4273   (set_attr "shift" "1")
4274   (set_attr "predicable" "no, yes")
4275   (set_attr "length" "8")]
4276)
4277
4278(define_insn_and_split "*thumb1_neg_abssi2"
4279  [(set (match_operand:SI 0 "s_register_operand" "=l")
4280	(neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
4281   (clobber (match_scratch:SI 2 "=&l"))]
4282  "TARGET_THUMB1"
4283  "#"
4284  "TARGET_THUMB1 && reload_completed"
4285  [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
4286   (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
4287   (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
4288  ""
4289  [(set_attr "length" "6")]
4290)
4291
4292(define_expand "abssf2"
4293  [(set (match_operand:SF         0 "s_register_operand" "")
4294	(abs:SF (match_operand:SF 1 "s_register_operand" "")))]
4295  "TARGET_32BIT && TARGET_HARD_FLOAT"
4296  "")
4297
4298(define_expand "absdf2"
4299  [(set (match_operand:DF         0 "s_register_operand" "")
4300	(abs:DF (match_operand:DF 1 "s_register_operand" "")))]
4301  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4302  "")
4303
4304(define_expand "sqrtsf2"
4305  [(set (match_operand:SF 0 "s_register_operand" "")
4306	(sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
4307  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
4308  "")
4309
4310(define_expand "sqrtdf2"
4311  [(set (match_operand:DF 0 "s_register_operand" "")
4312	(sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
4313  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4314  "")
4315
4316(define_insn_and_split "one_cmpldi2"
4317  [(set (match_operand:DI 0 "s_register_operand"	 "=w,&r,&r,?w")
4318	(not:DI (match_operand:DI 1 "s_register_operand" " w, 0, r, w")))]
4319  "TARGET_32BIT"
4320  "@
4321   vmvn\t%P0, %P1
4322   #
4323   #
4324   vmvn\t%P0, %P1"
4325  "TARGET_32BIT && reload_completed
4326   && arm_general_register_operand (operands[0], DImode)"
4327  [(set (match_dup 0) (not:SI (match_dup 1)))
4328   (set (match_dup 2) (not:SI (match_dup 3)))]
4329  "
4330  {
4331    operands[2] = gen_highpart (SImode, operands[0]);
4332    operands[0] = gen_lowpart (SImode, operands[0]);
4333    operands[3] = gen_highpart (SImode, operands[1]);
4334    operands[1] = gen_lowpart (SImode, operands[1]);
4335  }"
4336  [(set_attr "length" "*,8,8,*")
4337   (set_attr "predicable" "no,yes,yes,no")
4338   (set_attr "neon_type" "neon_int_1,*,*,neon_int_1")
4339   (set_attr "arch" "neon_nota8,*,*,neon_onlya8")]
4340)
4341
4342(define_expand "one_cmplsi2"
4343  [(set (match_operand:SI         0 "s_register_operand" "")
4344	(not:SI (match_operand:SI 1 "s_register_operand" "")))]
4345  "TARGET_EITHER"
4346  ""
4347)
4348
4349(define_insn "*arm_one_cmplsi2"
4350  [(set (match_operand:SI         0 "s_register_operand" "=r")
4351	(not:SI (match_operand:SI 1 "s_register_operand"  "r")))]
4352  "TARGET_32BIT"
4353  "mvn%?\\t%0, %1"
4354  [(set_attr "predicable" "yes")
4355   (set_attr "insn" "mvn")]
4356)
4357
4358(define_insn "*thumb1_one_cmplsi2"
4359  [(set (match_operand:SI         0 "register_operand" "=l")
4360	(not:SI (match_operand:SI 1 "register_operand"  "l")))]
4361  "TARGET_THUMB1"
4362  "mvn\\t%0, %1"
4363  [(set_attr "length" "2")
4364   (set_attr "insn" "mvn")]
4365)
4366
4367(define_insn "*notsi_compare0"
4368  [(set (reg:CC_NOOV CC_REGNUM)
4369	(compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4370			 (const_int 0)))
4371   (set (match_operand:SI 0 "s_register_operand" "=r")
4372	(not:SI (match_dup 1)))]
4373  "TARGET_32BIT"
4374  "mvn%.\\t%0, %1"
4375  [(set_attr "conds" "set")
4376   (set_attr "insn" "mvn")]
4377)
4378
4379(define_insn "*notsi_compare0_scratch"
4380  [(set (reg:CC_NOOV CC_REGNUM)
4381	(compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4382			 (const_int 0)))
4383   (clobber (match_scratch:SI 0 "=r"))]
4384  "TARGET_32BIT"
4385  "mvn%.\\t%0, %1"
4386  [(set_attr "conds" "set")
4387   (set_attr "insn" "mvn")]
4388)
4389
4390;; Fixed <--> Floating conversion insns
4391
4392(define_expand "floatsihf2"
4393  [(set (match_operand:HF           0 "general_operand" "")
4394	(float:HF (match_operand:SI 1 "general_operand" "")))]
4395  "TARGET_EITHER"
4396  "
4397  {
4398    rtx op1 = gen_reg_rtx (SFmode);
4399    expand_float (op1, operands[1], 0);
4400    op1 = convert_to_mode (HFmode, op1, 0);
4401    emit_move_insn (operands[0], op1);
4402    DONE;
4403  }"
4404)
4405
4406(define_expand "floatdihf2"
4407  [(set (match_operand:HF           0 "general_operand" "")
4408	(float:HF (match_operand:DI 1 "general_operand" "")))]
4409  "TARGET_EITHER"
4410  "
4411  {
4412    rtx op1 = gen_reg_rtx (SFmode);
4413    expand_float (op1, operands[1], 0);
4414    op1 = convert_to_mode (HFmode, op1, 0);
4415    emit_move_insn (operands[0], op1);
4416    DONE;
4417  }"
4418)
4419
4420(define_expand "floatsisf2"
4421  [(set (match_operand:SF           0 "s_register_operand" "")
4422	(float:SF (match_operand:SI 1 "s_register_operand" "")))]
4423  "TARGET_32BIT && TARGET_HARD_FLOAT"
4424  "
4425")
4426
4427(define_expand "floatsidf2"
4428  [(set (match_operand:DF           0 "s_register_operand" "")
4429	(float:DF (match_operand:SI 1 "s_register_operand" "")))]
4430  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4431  "
4432")
4433
4434(define_expand "fix_trunchfsi2"
4435  [(set (match_operand:SI         0 "general_operand" "")
4436	(fix:SI (fix:HF (match_operand:HF 1 "general_operand"  ""))))]
4437  "TARGET_EITHER"
4438  "
4439  {
4440    rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4441    expand_fix (operands[0], op1, 0);
4442    DONE;
4443  }"
4444)
4445
4446(define_expand "fix_trunchfdi2"
4447  [(set (match_operand:DI         0 "general_operand" "")
4448	(fix:DI (fix:HF (match_operand:HF 1 "general_operand"  ""))))]
4449  "TARGET_EITHER"
4450  "
4451  {
4452    rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4453    expand_fix (operands[0], op1, 0);
4454    DONE;
4455  }"
4456)
4457
4458(define_expand "fix_truncsfsi2"
4459  [(set (match_operand:SI         0 "s_register_operand" "")
4460	(fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"  ""))))]
4461  "TARGET_32BIT && TARGET_HARD_FLOAT"
4462  "
4463")
4464
4465(define_expand "fix_truncdfsi2"
4466  [(set (match_operand:SI         0 "s_register_operand" "")
4467	(fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"  ""))))]
4468  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4469  "
4470")
4471
4472;; Truncation insns
4473
4474(define_expand "truncdfsf2"
4475  [(set (match_operand:SF  0 "s_register_operand" "")
4476	(float_truncate:SF
4477 	 (match_operand:DF 1 "s_register_operand" "")))]
4478  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4479  ""
4480)
4481
4482/* DFmode -> HFmode conversions have to go through SFmode.  */
4483(define_expand "truncdfhf2"
4484  [(set (match_operand:HF  0 "general_operand" "")
4485	(float_truncate:HF
4486 	 (match_operand:DF 1 "general_operand" "")))]
4487  "TARGET_EITHER"
4488  "
4489  {
4490    rtx op1;
4491    op1 = convert_to_mode (SFmode, operands[1], 0);
4492    op1 = convert_to_mode (HFmode, op1, 0);
4493    emit_move_insn (operands[0], op1);
4494    DONE;
4495  }"
4496)
4497
4498;; Zero and sign extension instructions.
4499
4500(define_insn "zero_extend<mode>di2"
4501  [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,w")
4502        (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>"
4503					    "<qhs_zextenddi_cstr>")))]
4504  "TARGET_32BIT <qhs_zextenddi_cond>"
4505  "#"
4506  [(set_attr "length" "8,4,8,8")
4507   (set_attr "arch" "neon_nota8,*,*,neon_onlya8")
4508   (set_attr "ce_count" "2")
4509   (set_attr "predicable" "yes")]
4510)
4511
4512(define_insn "extend<mode>di2"
4513  [(set (match_operand:DI 0 "s_register_operand" "=w,r,?r,?r,w")
4514        (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>"
4515					    "<qhs_extenddi_cstr>")))]
4516  "TARGET_32BIT <qhs_sextenddi_cond>"
4517  "#"
4518  [(set_attr "length" "8,4,8,8,8")
4519   (set_attr "ce_count" "2")
4520   (set_attr "shift" "1")
4521   (set_attr "predicable" "yes")
4522   (set_attr "arch" "neon_nota8,*,a,t,neon_onlya8")]
4523)
4524
4525;; Splits for all extensions to DImode
4526(define_split
4527  [(set (match_operand:DI 0 "s_register_operand" "")
4528        (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4529  "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
4530  [(set (match_dup 0) (match_dup 1))]
4531{
4532  rtx lo_part = gen_lowpart (SImode, operands[0]);
4533  enum machine_mode src_mode = GET_MODE (operands[1]);
4534
4535  if (REG_P (operands[0])
4536      && !reg_overlap_mentioned_p (operands[0], operands[1]))
4537    emit_clobber (operands[0]);
4538  if (!REG_P (lo_part) || src_mode != SImode
4539      || !rtx_equal_p (lo_part, operands[1]))
4540    {
4541      if (src_mode == SImode)
4542        emit_move_insn (lo_part, operands[1]);
4543      else
4544        emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4545				gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4546      operands[1] = lo_part;
4547    }
4548  operands[0] = gen_highpart (SImode, operands[0]);
4549  operands[1] = const0_rtx;
4550})
4551
4552(define_split
4553  [(set (match_operand:DI 0 "s_register_operand" "")
4554        (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
4555  "TARGET_32BIT && reload_completed && !IS_VFP_REGNUM (REGNO (operands[0]))"
4556  [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
4557{
4558  rtx lo_part = gen_lowpart (SImode, operands[0]);
4559  enum machine_mode src_mode = GET_MODE (operands[1]);
4560
4561  if (REG_P (operands[0])
4562      && !reg_overlap_mentioned_p (operands[0], operands[1]))
4563    emit_clobber (operands[0]);
4564
4565  if (!REG_P (lo_part) || src_mode != SImode
4566      || !rtx_equal_p (lo_part, operands[1]))
4567    {
4568      if (src_mode == SImode)
4569        emit_move_insn (lo_part, operands[1]);
4570      else
4571        emit_insn (gen_rtx_SET (VOIDmode, lo_part,
4572				gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4573      operands[1] = lo_part;
4574    }
4575  operands[0] = gen_highpart (SImode, operands[0]);
4576})
4577
4578(define_expand "zero_extendhisi2"
4579  [(set (match_operand:SI 0 "s_register_operand" "")
4580	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4581  "TARGET_EITHER"
4582{
4583  if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
4584    {
4585      emit_insn (gen_movhi_bytes (operands[0], operands[1]));
4586      DONE;
4587    }
4588  if (!arm_arch6 && !MEM_P (operands[1]))
4589    {
4590      rtx t = gen_lowpart (SImode, operands[1]);
4591      rtx tmp = gen_reg_rtx (SImode);
4592      emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4593      emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
4594      DONE;
4595    }
4596})
4597
4598(define_split
4599  [(set (match_operand:SI 0 "s_register_operand" "")
4600	(zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
4601  "!TARGET_THUMB2 && !arm_arch6"
4602  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4603   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
4604{
4605  operands[2] = gen_lowpart (SImode, operands[1]);
4606})
4607
4608(define_insn "*thumb1_zero_extendhisi2"
4609  [(set (match_operand:SI 0 "register_operand" "=l,l")
4610	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4611  "TARGET_THUMB1"
4612{
4613  rtx mem;
4614
4615  if (which_alternative == 0 && arm_arch6)
4616    return "uxth\t%0, %1";
4617  if (which_alternative == 0)
4618    return "#";
4619
4620  mem = XEXP (operands[1], 0);
4621
4622  if (GET_CODE (mem) == CONST)
4623    mem = XEXP (mem, 0);
4624
4625  if (GET_CODE (mem) == PLUS)
4626    {
4627      rtx a = XEXP (mem, 0);
4628
4629      /* This can happen due to bugs in reload.  */
4630      if (REG_P (a) && REGNO (a) == SP_REGNUM)
4631        {
4632          rtx ops[2];
4633          ops[0] = operands[0];
4634          ops[1] = a;
4635
4636          output_asm_insn ("mov\t%0, %1", ops);
4637
4638          XEXP (mem, 0) = operands[0];
4639       }
4640    }
4641
4642  return "ldrh\t%0, %1";
4643}
4644  [(set_attr_alternative "length"
4645			 [(if_then_else (eq_attr "is_arch6" "yes")
4646				       (const_int 2) (const_int 4))
4647			 (const_int 4)])
4648   (set_attr "type" "simple_alu_shift, load_byte")]
4649)
4650
4651(define_insn "*arm_zero_extendhisi2"
4652  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4653	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4654  "TARGET_ARM && arm_arch4 && !arm_arch6"
4655  "@
4656   #
4657   ldr%(h%)\\t%0, %1"
4658  [(set_attr "type" "alu_shift,load_byte")
4659   (set_attr "predicable" "yes")]
4660)
4661
4662(define_insn "*arm_zero_extendhisi2_v6"
4663  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4664	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4665  "TARGET_ARM && arm_arch6"
4666  "@
4667   uxth%?\\t%0, %1
4668   ldr%(h%)\\t%0, %1"
4669  [(set_attr "predicable" "yes")
4670   (set_attr "type" "simple_alu_shift,load_byte")]
4671)
4672
4673(define_insn "*arm_zero_extendhisi2addsi"
4674  [(set (match_operand:SI 0 "s_register_operand" "=r")
4675	(plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4676		 (match_operand:SI 2 "s_register_operand" "r")))]
4677  "TARGET_INT_SIMD"
4678  "uxtah%?\\t%0, %2, %1"
4679  [(set_attr "type" "alu_shift")
4680   (set_attr "predicable" "yes")]
4681)
4682
4683(define_expand "zero_extendqisi2"
4684  [(set (match_operand:SI 0 "s_register_operand" "")
4685	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4686  "TARGET_EITHER"
4687{
4688  if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
4689    {
4690      emit_insn (gen_andsi3 (operands[0],
4691			     gen_lowpart (SImode, operands[1]),
4692					  GEN_INT (255)));
4693      DONE;
4694    }
4695  if (!arm_arch6 && !MEM_P (operands[1]))
4696    {
4697      rtx t = gen_lowpart (SImode, operands[1]);
4698      rtx tmp = gen_reg_rtx (SImode);
4699      emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
4700      emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
4701      DONE;
4702    }
4703})
4704
4705(define_split
4706  [(set (match_operand:SI 0 "s_register_operand" "")
4707	(zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
4708  "!arm_arch6"
4709  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
4710   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
4711{
4712  operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
4713  if (TARGET_ARM)
4714    {
4715      emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
4716      DONE;
4717    }
4718})
4719
4720(define_insn "*thumb1_zero_extendqisi2"
4721  [(set (match_operand:SI 0 "register_operand" "=l,l")
4722	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4723  "TARGET_THUMB1 && !arm_arch6"
4724  "@
4725   #
4726   ldrb\\t%0, %1"
4727  [(set_attr "length" "4,2")
4728   (set_attr "type" "alu_shift,load_byte")
4729   (set_attr "pool_range" "*,32")]
4730)
4731
4732(define_insn "*thumb1_zero_extendqisi2_v6"
4733  [(set (match_operand:SI 0 "register_operand" "=l,l")
4734	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4735  "TARGET_THUMB1 && arm_arch6"
4736  "@
4737   uxtb\\t%0, %1
4738   ldrb\\t%0, %1"
4739  [(set_attr "length" "2")
4740   (set_attr "type" "simple_alu_shift,load_byte")]
4741)
4742
4743(define_insn "*arm_zero_extendqisi2"
4744  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4745	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4746  "TARGET_ARM && !arm_arch6"
4747  "@
4748   #
4749   ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4750  [(set_attr "length" "8,4")
4751   (set_attr "type" "alu_shift,load_byte")
4752   (set_attr "predicable" "yes")]
4753)
4754
4755(define_insn "*arm_zero_extendqisi2_v6"
4756  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4757	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
4758  "TARGET_ARM && arm_arch6"
4759  "@
4760   uxtb%(%)\\t%0, %1
4761   ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4762  [(set_attr "type" "simple_alu_shift,load_byte")
4763   (set_attr "predicable" "yes")]
4764)
4765
4766(define_insn "*arm_zero_extendqisi2addsi"
4767  [(set (match_operand:SI 0 "s_register_operand" "=r")
4768	(plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4769		 (match_operand:SI 2 "s_register_operand" "r")))]
4770  "TARGET_INT_SIMD"
4771  "uxtab%?\\t%0, %2, %1"
4772  [(set_attr "predicable" "yes")
4773   (set_attr "insn" "xtab")
4774   (set_attr "type" "alu_shift")]
4775)
4776
4777(define_split
4778  [(set (match_operand:SI 0 "s_register_operand" "")
4779	(zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4780   (clobber (match_operand:SI 2 "s_register_operand" ""))]
4781  "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
4782  [(set (match_dup 2) (match_dup 1))
4783   (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4784  ""
4785)
4786
4787(define_split
4788  [(set (match_operand:SI 0 "s_register_operand" "")
4789	(zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4790   (clobber (match_operand:SI 2 "s_register_operand" ""))]
4791  "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
4792  [(set (match_dup 2) (match_dup 1))
4793   (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4794  ""
4795)
4796
4797
4798(define_split
4799  [(set (match_operand:SI 0 "s_register_operand" "")
4800	(ior_xor:SI (and:SI (ashift:SI
4801			     (match_operand:SI 1 "s_register_operand" "")
4802			     (match_operand:SI 2 "const_int_operand" ""))
4803			    (match_operand:SI 3 "const_int_operand" ""))
4804		    (zero_extend:SI
4805		     (match_operator 5 "subreg_lowpart_operator"
4806		      [(match_operand:SI 4 "s_register_operand" "")]))))]
4807  "TARGET_32BIT
4808   && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4809       == (GET_MODE_MASK (GET_MODE (operands[5]))
4810           & (GET_MODE_MASK (GET_MODE (operands[5]))
4811	      << (INTVAL (operands[2])))))"
4812  [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4813				  (match_dup 4)))
4814   (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4815  "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4816)
4817
4818(define_insn "*compareqi_eq0"
4819  [(set (reg:CC_Z CC_REGNUM)
4820	(compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4821			 (const_int 0)))]
4822  "TARGET_32BIT"
4823  "tst%?\\t%0, #255"
4824  [(set_attr "conds" "set")
4825   (set_attr "predicable" "yes")]
4826)
4827
4828(define_expand "extendhisi2"
4829  [(set (match_operand:SI 0 "s_register_operand" "")
4830	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
4831  "TARGET_EITHER"
4832{
4833  if (TARGET_THUMB1)
4834    {
4835      emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4836      DONE;
4837    }
4838  if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
4839    {
4840      emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4841      DONE;
4842    }
4843
4844  if (!arm_arch6 && !MEM_P (operands[1]))
4845    {
4846      rtx t = gen_lowpart (SImode, operands[1]);
4847      rtx tmp = gen_reg_rtx (SImode);
4848      emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
4849      emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
4850      DONE;
4851    }
4852})
4853
4854(define_split
4855  [(parallel
4856    [(set (match_operand:SI 0 "register_operand" "")
4857	  (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
4858     (clobber (match_scratch:SI 2 ""))])]
4859  "!arm_arch6"
4860  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4861   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4862{
4863  operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4864})
4865
4866;; We used to have an early-clobber on the scratch register here.
4867;; However, there's a bug somewhere in reload which means that this
4868;; can be partially ignored during spill allocation if the memory
4869;; address also needs reloading; this causes us to die later on when
4870;; we try to verify the operands.  Fortunately, we don't really need
4871;; the early-clobber: we can always use operand 0 if operand 2
4872;; overlaps the address.
4873(define_insn "thumb1_extendhisi2"
4874  [(set (match_operand:SI 0 "register_operand" "=l,l")
4875	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4876   (clobber (match_scratch:SI 2 "=X,l"))]
4877  "TARGET_THUMB1"
4878  "*
4879  {
4880    rtx ops[4];
4881    rtx mem;
4882
4883    if (which_alternative == 0 && !arm_arch6)
4884      return \"#\";
4885    if (which_alternative == 0)
4886      return \"sxth\\t%0, %1\";
4887
4888    mem = XEXP (operands[1], 0);
4889
4890    /* This code used to try to use 'V', and fix the address only if it was
4891       offsettable, but this fails for e.g. REG+48 because 48 is outside the
4892       range of QImode offsets, and offsettable_address_p does a QImode
4893       address check.  */
4894
4895    if (GET_CODE (mem) == CONST)
4896      mem = XEXP (mem, 0);
4897
4898    if (GET_CODE (mem) == LABEL_REF)
4899      return \"ldr\\t%0, %1\";
4900
4901    if (GET_CODE (mem) == PLUS)
4902      {
4903        rtx a = XEXP (mem, 0);
4904        rtx b = XEXP (mem, 1);
4905
4906        if (GET_CODE (a) == LABEL_REF
4907	    && CONST_INT_P (b))
4908          return \"ldr\\t%0, %1\";
4909
4910        if (REG_P (b))
4911          return \"ldrsh\\t%0, %1\";
4912
4913        ops[1] = a;
4914        ops[2] = b;
4915      }
4916    else
4917      {
4918        ops[1] = mem;
4919        ops[2] = const0_rtx;
4920      }
4921
4922    gcc_assert (REG_P (ops[1]));
4923
4924    ops[0] = operands[0];
4925    if (reg_mentioned_p (operands[2], ops[1]))
4926      ops[3] = ops[0];
4927    else
4928      ops[3] = operands[2];
4929    output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4930    return \"\";
4931  }"
4932  [(set_attr_alternative "length"
4933			 [(if_then_else (eq_attr "is_arch6" "yes")
4934					(const_int 2) (const_int 4))
4935			  (const_int 4)])
4936   (set_attr "type" "simple_alu_shift,load_byte")
4937   (set_attr "pool_range" "*,1018")]
4938)
4939
4940;; This pattern will only be used when ldsh is not available
4941(define_expand "extendhisi2_mem"
4942  [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4943   (set (match_dup 3)
4944	(zero_extend:SI (match_dup 7)))
4945   (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4946   (set (match_operand:SI 0 "" "")
4947	(ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4948  "TARGET_ARM"
4949  "
4950  {
4951    rtx mem1, mem2;
4952    rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4953
4954    mem1 = change_address (operands[1], QImode, addr);
4955    mem2 = change_address (operands[1], QImode,
4956			   plus_constant (Pmode, addr, 1));
4957    operands[0] = gen_lowpart (SImode, operands[0]);
4958    operands[1] = mem1;
4959    operands[2] = gen_reg_rtx (SImode);
4960    operands[3] = gen_reg_rtx (SImode);
4961    operands[6] = gen_reg_rtx (SImode);
4962    operands[7] = mem2;
4963
4964    if (BYTES_BIG_ENDIAN)
4965      {
4966	operands[4] = operands[2];
4967	operands[5] = operands[3];
4968      }
4969    else
4970      {
4971	operands[4] = operands[3];
4972	operands[5] = operands[2];
4973      }
4974  }"
4975)
4976
4977(define_split
4978  [(set (match_operand:SI 0 "register_operand" "")
4979	(sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
4980  "!arm_arch6"
4981  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
4982   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
4983{
4984  operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
4985})
4986
4987(define_insn "*arm_extendhisi2"
4988  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4989	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
4990  "TARGET_ARM && arm_arch4 && !arm_arch6"
4991  "@
4992   #
4993   ldr%(sh%)\\t%0, %1"
4994  [(set_attr "length" "8,4")
4995   (set_attr "type" "alu_shift,load_byte")
4996   (set_attr "predicable" "yes")]
4997)
4998
4999;; ??? Check Thumb-2 pool range
5000(define_insn "*arm_extendhisi2_v6"
5001  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5002	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5003  "TARGET_32BIT && arm_arch6"
5004  "@
5005   sxth%?\\t%0, %1
5006   ldr%(sh%)\\t%0, %1"
5007  [(set_attr "type" "simple_alu_shift,load_byte")
5008   (set_attr "predicable" "yes")]
5009)
5010
5011(define_insn "*arm_extendhisi2addsi"
5012  [(set (match_operand:SI 0 "s_register_operand" "=r")
5013	(plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5014		 (match_operand:SI 2 "s_register_operand" "r")))]
5015  "TARGET_INT_SIMD"
5016  "sxtah%?\\t%0, %2, %1"
5017)
5018
5019(define_expand "extendqihi2"
5020  [(set (match_dup 2)
5021	(ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
5022		   (const_int 24)))
5023   (set (match_operand:HI 0 "s_register_operand" "")
5024	(ashiftrt:SI (match_dup 2)
5025		     (const_int 24)))]
5026  "TARGET_ARM"
5027  "
5028  {
5029    if (arm_arch4 && MEM_P (operands[1]))
5030      {
5031	emit_insn (gen_rtx_SET (VOIDmode,
5032				operands[0],
5033				gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5034	DONE;
5035      }
5036    if (!s_register_operand (operands[1], QImode))
5037      operands[1] = copy_to_mode_reg (QImode, operands[1]);
5038    operands[0] = gen_lowpart (SImode, operands[0]);
5039    operands[1] = gen_lowpart (SImode, operands[1]);
5040    operands[2] = gen_reg_rtx (SImode);
5041  }"
5042)
5043
5044(define_insn "*arm_extendqihi_insn"
5045  [(set (match_operand:HI 0 "s_register_operand" "=r")
5046	(sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5047  "TARGET_ARM && arm_arch4"
5048  "ldr%(sb%)\\t%0, %1"
5049  [(set_attr "type" "load_byte")
5050   (set_attr "predicable" "yes")]
5051)
5052
5053(define_expand "extendqisi2"
5054  [(set (match_operand:SI 0 "s_register_operand" "")
5055	(sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")))]
5056  "TARGET_EITHER"
5057{
5058  if (!arm_arch4 && MEM_P (operands[1]))
5059    operands[1] = copy_to_mode_reg (QImode, operands[1]);
5060
5061  if (!arm_arch6 && !MEM_P (operands[1]))
5062    {
5063      rtx t = gen_lowpart (SImode, operands[1]);
5064      rtx tmp = gen_reg_rtx (SImode);
5065      emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5066      emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5067      DONE;
5068    }
5069})
5070
5071(define_split
5072  [(set (match_operand:SI 0 "register_operand" "")
5073	(sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5074  "!arm_arch6"
5075  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5076   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5077{
5078  operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5079})
5080
5081(define_insn "*arm_extendqisi"
5082  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5083	(sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5084  "TARGET_ARM && arm_arch4 && !arm_arch6"
5085  "@
5086   #
5087   ldr%(sb%)\\t%0, %1"
5088  [(set_attr "length" "8,4")
5089   (set_attr "type" "alu_shift,load_byte")
5090   (set_attr "predicable" "yes")]
5091)
5092
5093(define_insn "*arm_extendqisi_v6"
5094  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5095	(sign_extend:SI
5096	 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5097  "TARGET_ARM && arm_arch6"
5098  "@
5099   sxtb%?\\t%0, %1
5100   ldr%(sb%)\\t%0, %1"
5101  [(set_attr "type" "simple_alu_shift,load_byte")
5102   (set_attr "predicable" "yes")]
5103)
5104
5105(define_insn "*arm_extendqisi2addsi"
5106  [(set (match_operand:SI 0 "s_register_operand" "=r")
5107	(plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5108		 (match_operand:SI 2 "s_register_operand" "r")))]
5109  "TARGET_INT_SIMD"
5110  "sxtab%?\\t%0, %2, %1"
5111  [(set_attr "type" "alu_shift")
5112   (set_attr "insn" "xtab")
5113   (set_attr "predicable" "yes")]
5114)
5115
5116(define_split
5117  [(set (match_operand:SI 0 "register_operand" "")
5118	(sign_extend:SI (match_operand:QI 1 "memory_operand" "")))]
5119  "TARGET_THUMB1 && reload_completed"
5120  [(set (match_dup 0) (match_dup 2))
5121   (set (match_dup 0) (sign_extend:SI (match_dup 3)))]
5122{
5123  rtx addr = XEXP (operands[1], 0);
5124
5125  if (GET_CODE (addr) == CONST)
5126    addr = XEXP (addr, 0);
5127
5128  if (GET_CODE (addr) == PLUS
5129      && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5130    /* No split necessary.  */
5131    FAIL;
5132
5133  if (GET_CODE (addr) == PLUS
5134      && !REG_P (XEXP (addr, 0)) && !REG_P (XEXP (addr, 1)))
5135    FAIL;
5136
5137  if (reg_overlap_mentioned_p (operands[0], addr))
5138    {
5139      rtx t = gen_lowpart (QImode, operands[0]);
5140      emit_move_insn (t, operands[1]);
5141      emit_insn (gen_thumb1_extendqisi2 (operands[0], t));
5142      DONE;
5143    }
5144
5145  if (REG_P (addr))
5146    {
5147      addr = gen_rtx_PLUS (Pmode, addr, operands[0]);
5148      operands[2] = const0_rtx;
5149    }
5150  else if (GET_CODE (addr) != PLUS)
5151    FAIL;
5152  else if (REG_P (XEXP (addr, 0)))
5153    {
5154      operands[2] = XEXP (addr, 1);
5155      addr = gen_rtx_PLUS (Pmode, XEXP (addr, 0), operands[0]);
5156    }
5157  else
5158    {
5159      operands[2] = XEXP (addr, 0);
5160      addr = gen_rtx_PLUS (Pmode, XEXP (addr, 1), operands[0]);
5161    }
5162
5163  operands[3] = change_address (operands[1], QImode, addr);
5164})
5165
5166(define_peephole2
5167  [(set (match_operand:SI 0 "register_operand" "")
5168	(plus:SI (match_dup 0) (match_operand 1 "const_int_operand")))
5169   (set (match_operand:SI 2 "register_operand" "") (const_int 0))
5170   (set (match_operand:SI 3 "register_operand" "")
5171	(sign_extend:SI (match_operand:QI 4 "memory_operand" "")))]
5172  "TARGET_THUMB1
5173   && GET_CODE (XEXP (operands[4], 0)) == PLUS
5174   && rtx_equal_p (operands[0], XEXP (XEXP (operands[4], 0), 0))
5175   && rtx_equal_p (operands[2], XEXP (XEXP (operands[4], 0), 1))
5176   && (peep2_reg_dead_p (3, operands[0])
5177       || rtx_equal_p (operands[0], operands[3]))
5178   && (peep2_reg_dead_p (3, operands[2])
5179       || rtx_equal_p (operands[2], operands[3]))"
5180  [(set (match_dup 2) (match_dup 1))
5181   (set (match_dup 3) (sign_extend:SI (match_dup 4)))]
5182{
5183  rtx addr = gen_rtx_PLUS (Pmode, operands[0], operands[2]);
5184  operands[4] = change_address (operands[4], QImode, addr);
5185})
5186
5187(define_insn "thumb1_extendqisi2"
5188  [(set (match_operand:SI 0 "register_operand" "=l,l,l")
5189	(sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
5190  "TARGET_THUMB1"
5191{
5192  rtx addr;
5193
5194  if (which_alternative == 0 && arm_arch6)
5195    return "sxtb\\t%0, %1";
5196  if (which_alternative == 0)
5197    return "#";
5198
5199  addr = XEXP (operands[1], 0);
5200  if (GET_CODE (addr) == PLUS
5201      && REG_P (XEXP (addr, 0)) && REG_P (XEXP (addr, 1)))
5202    return "ldrsb\\t%0, %1";
5203
5204  return "#";
5205}
5206  [(set_attr_alternative "length"
5207			 [(if_then_else (eq_attr "is_arch6" "yes")
5208					(const_int 2) (const_int 4))
5209			  (const_int 2)
5210			  (if_then_else (eq_attr "is_arch6" "yes")
5211					(const_int 4) (const_int 6))])
5212   (set_attr "type" "simple_alu_shift,load_byte,load_byte")]
5213)
5214
5215(define_expand "extendsfdf2"
5216  [(set (match_operand:DF                  0 "s_register_operand" "")
5217	(float_extend:DF (match_operand:SF 1 "s_register_operand"  "")))]
5218  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5219  ""
5220)
5221
5222/* HFmode -> DFmode conversions have to go through SFmode.  */
5223(define_expand "extendhfdf2"
5224  [(set (match_operand:DF                  0 "general_operand" "")
5225	(float_extend:DF (match_operand:HF 1 "general_operand"  "")))]
5226  "TARGET_EITHER"
5227  "
5228  {
5229    rtx op1;
5230    op1 = convert_to_mode (SFmode, operands[1], 0);
5231    op1 = convert_to_mode (DFmode, op1, 0);
5232    emit_insn (gen_movdf (operands[0], op1));
5233    DONE;
5234  }"
5235)
5236
5237;; Move insns (including loads and stores)
5238
5239;; XXX Just some ideas about movti.
5240;; I don't think these are a good idea on the arm, there just aren't enough
5241;; registers
5242;;(define_expand "loadti"
5243;;  [(set (match_operand:TI 0 "s_register_operand" "")
5244;;	(mem:TI (match_operand:SI 1 "address_operand" "")))]
5245;;  "" "")
5246
5247;;(define_expand "storeti"
5248;;  [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
5249;;	(match_operand:TI 1 "s_register_operand" ""))]
5250;;  "" "")
5251
5252;;(define_expand "movti"
5253;;  [(set (match_operand:TI 0 "general_operand" "")
5254;;	(match_operand:TI 1 "general_operand" ""))]
5255;;  ""
5256;;  "
5257;;{
5258;;  rtx insn;
5259;;
5260;;  if (MEM_P (operands[0]) && MEM_P (operands[1]))
5261;;    operands[1] = copy_to_reg (operands[1]);
5262;;  if (MEM_P (operands[0]))
5263;;    insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5264;;  else if (MEM_P (operands[1]))
5265;;    insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5266;;  else
5267;;    FAIL;
5268;;
5269;;  emit_insn (insn);
5270;;  DONE;
5271;;}")
5272
5273;; Recognize garbage generated above.
5274
5275;;(define_insn ""
5276;;  [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5277;;	(match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5278;;  ""
5279;;  "*
5280;;  {
5281;;    register mem = (which_alternative < 3);
5282;;    register const char *template;
5283;;
5284;;    operands[mem] = XEXP (operands[mem], 0);
5285;;    switch (which_alternative)
5286;;      {
5287;;      case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5288;;      case 1: template = \"ldmia\\t%1!, %M0\"; break;
5289;;      case 2: template = \"ldmia\\t%1, %M0\"; break;
5290;;      case 3: template = \"stmdb\\t%0!, %M1\"; break;
5291;;      case 4: template = \"stmia\\t%0!, %M1\"; break;
5292;;      case 5: template = \"stmia\\t%0, %M1\"; break;
5293;;      }
5294;;    output_asm_insn (template, operands);
5295;;    return \"\";
5296;;  }")
5297
5298(define_expand "movdi"
5299  [(set (match_operand:DI 0 "general_operand" "")
5300	(match_operand:DI 1 "general_operand" ""))]
5301  "TARGET_EITHER"
5302  "
5303  if (can_create_pseudo_p ())
5304    {
5305      if (!REG_P (operands[0]))
5306	operands[1] = force_reg (DImode, operands[1]);
5307    }
5308  "
5309)
5310
5311(define_insn "*arm_movdi"
5312  [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5313	(match_operand:DI 1 "di_operand"              "rDa,Db,Dc,mi,r"))]
5314  "TARGET_32BIT
5315   && !(TARGET_HARD_FLOAT && TARGET_VFP)
5316   && !TARGET_IWMMXT
5317   && (   register_operand (operands[0], DImode)
5318       || register_operand (operands[1], DImode))"
5319  "*
5320  switch (which_alternative)
5321    {
5322    case 0:
5323    case 1:
5324    case 2:
5325      return \"#\";
5326    default:
5327      return output_move_double (operands, true, NULL);
5328    }
5329  "
5330  [(set_attr "length" "8,12,16,8,8")
5331   (set_attr "type" "*,*,*,load2,store2")
5332   (set_attr "arm_pool_range" "*,*,*,1020,*")
5333   (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5334   (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5335   (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5336)
5337
5338(define_split
5339  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5340	(match_operand:ANY64 1 "const_double_operand" ""))]
5341  "TARGET_32BIT
5342   && reload_completed
5343   && (arm_const_double_inline_cost (operands[1])
5344       <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
5345  [(const_int 0)]
5346  "
5347  arm_split_constant (SET, SImode, curr_insn,
5348		      INTVAL (gen_lowpart (SImode, operands[1])),
5349		      gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5350  arm_split_constant (SET, SImode, curr_insn,
5351		      INTVAL (gen_highpart_mode (SImode,
5352						 GET_MODE (operands[0]),
5353						 operands[1])),
5354		      gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5355  DONE;
5356  "
5357)
5358
5359; If optimizing for size, or if we have load delay slots, then
5360; we want to split the constant into two separate operations.
5361; In both cases this may split a trivial part into a single data op
5362; leaving a single complex constant to load.  We can also get longer
5363; offsets in a LDR which means we get better chances of sharing the pool
5364; entries.  Finally, we can normally do a better job of scheduling
5365; LDR instructions than we can with LDM.
5366; This pattern will only match if the one above did not.
5367(define_split
5368  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5369	(match_operand:ANY64 1 "const_double_operand" ""))]
5370  "TARGET_ARM && reload_completed
5371   && arm_const_double_by_parts (operands[1])"
5372  [(set (match_dup 0) (match_dup 1))
5373   (set (match_dup 2) (match_dup 3))]
5374  "
5375  operands[2] = gen_highpart (SImode, operands[0]);
5376  operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5377				   operands[1]);
5378  operands[0] = gen_lowpart (SImode, operands[0]);
5379  operands[1] = gen_lowpart (SImode, operands[1]);
5380  "
5381)
5382
5383(define_split
5384  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5385	(match_operand:ANY64 1 "arm_general_register_operand" ""))]
5386  "TARGET_EITHER && reload_completed"
5387  [(set (match_dup 0) (match_dup 1))
5388   (set (match_dup 2) (match_dup 3))]
5389  "
5390  operands[2] = gen_highpart (SImode, operands[0]);
5391  operands[3] = gen_highpart (SImode, operands[1]);
5392  operands[0] = gen_lowpart (SImode, operands[0]);
5393  operands[1] = gen_lowpart (SImode, operands[1]);
5394
5395  /* Handle a partial overlap.  */
5396  if (rtx_equal_p (operands[0], operands[3]))
5397    {
5398      rtx tmp0 = operands[0];
5399      rtx tmp1 = operands[1];
5400
5401      operands[0] = operands[2];
5402      operands[1] = operands[3];
5403      operands[2] = tmp0;
5404      operands[3] = tmp1;
5405    }
5406  "
5407)
5408
5409;; We can't actually do base+index doubleword loads if the index and
5410;; destination overlap.  Split here so that we at least have chance to
5411;; schedule.
5412(define_split
5413  [(set (match_operand:DI 0 "s_register_operand" "")
5414	(mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5415			 (match_operand:SI 2 "s_register_operand" ""))))]
5416  "TARGET_LDRD
5417  && reg_overlap_mentioned_p (operands[0], operands[1])
5418  && reg_overlap_mentioned_p (operands[0], operands[2])"
5419  [(set (match_dup 4)
5420	(plus:SI (match_dup 1)
5421		 (match_dup 2)))
5422   (set (match_dup 0)
5423	(mem:DI (match_dup 4)))]
5424  "
5425  operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5426  "
5427)
5428
5429;;; ??? This should have alternatives for constants.
5430;;; ??? This was originally identical to the movdf_insn pattern.
5431;;; ??? The 'i' constraint looks funny, but it should always be replaced by
5432;;; thumb_reorg with a memory reference.
5433(define_insn "*thumb1_movdi_insn"
5434  [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
5435	(match_operand:DI 1 "general_operand"      "l, I,J,>,l,mi,l,*r"))]
5436  "TARGET_THUMB1
5437   && (   register_operand (operands[0], DImode)
5438       || register_operand (operands[1], DImode))"
5439  "*
5440  {
5441  switch (which_alternative)
5442    {
5443    default:
5444    case 0:
5445      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5446	return \"add\\t%0,  %1,  #0\;add\\t%H0, %H1, #0\";
5447      return   \"add\\t%H0, %H1, #0\;add\\t%0,  %1,  #0\";
5448    case 1:
5449      return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5450    case 2:
5451      operands[1] = GEN_INT (- INTVAL (operands[1]));
5452      return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5453    case 3:
5454      return \"ldmia\\t%1, {%0, %H0}\";
5455    case 4:
5456      return \"stmia\\t%0, {%1, %H1}\";
5457    case 5:
5458      return thumb_load_double_from_address (operands);
5459    case 6:
5460      operands[2] = gen_rtx_MEM (SImode,
5461			     plus_constant (Pmode, XEXP (operands[0], 0), 4));
5462      output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5463      return \"\";
5464    case 7:
5465      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5466	return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5467      return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5468    }
5469  }"
5470  [(set_attr "length" "4,4,6,2,2,6,4,4")
5471   (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5472   (set_attr "insn" "*,mov,*,*,*,*,*,mov")
5473   (set_attr "pool_range" "*,*,*,*,*,1018,*,*")]
5474)
5475
5476(define_expand "movsi"
5477  [(set (match_operand:SI 0 "general_operand" "")
5478        (match_operand:SI 1 "general_operand" ""))]
5479  "TARGET_EITHER"
5480  "
5481  {
5482  rtx base, offset, tmp;
5483
5484  if (TARGET_32BIT)
5485    {
5486      /* Everything except mem = const or mem = mem can be done easily.  */
5487      if (MEM_P (operands[0]))
5488        operands[1] = force_reg (SImode, operands[1]);
5489      if (arm_general_register_operand (operands[0], SImode)
5490	  && CONST_INT_P (operands[1])
5491          && !(const_ok_for_arm (INTVAL (operands[1]))
5492               || const_ok_for_arm (~INTVAL (operands[1]))))
5493        {
5494           arm_split_constant (SET, SImode, NULL_RTX,
5495	                       INTVAL (operands[1]), operands[0], NULL_RTX,
5496			       optimize && can_create_pseudo_p ());
5497          DONE;
5498        }
5499    }
5500  else /* TARGET_THUMB1...  */
5501    {
5502      if (can_create_pseudo_p ())
5503        {
5504          if (!REG_P (operands[0]))
5505	    operands[1] = force_reg (SImode, operands[1]);
5506        }
5507    }
5508
5509  if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5510    {
5511      split_const (operands[1], &base, &offset);
5512      if (GET_CODE (base) == SYMBOL_REF
5513	  && !offset_within_block_p (base, INTVAL (offset)))
5514	{
5515	  tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5516	  emit_move_insn (tmp, base);
5517	  emit_insn (gen_addsi3 (operands[0], tmp, offset));
5518	  DONE;
5519	}
5520    }
5521
5522  /* Recognize the case where operand[1] is a reference to thread-local
5523     data and load its address to a register.  */
5524  if (arm_tls_referenced_p (operands[1]))
5525    {
5526      rtx tmp = operands[1];
5527      rtx addend = NULL;
5528
5529      if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5530        {
5531          addend = XEXP (XEXP (tmp, 0), 1);
5532          tmp = XEXP (XEXP (tmp, 0), 0);
5533        }
5534
5535      gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5536      gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5537
5538      tmp = legitimize_tls_address (tmp,
5539				    !can_create_pseudo_p () ? operands[0] : 0);
5540      if (addend)
5541        {
5542          tmp = gen_rtx_PLUS (SImode, tmp, addend);
5543          tmp = force_operand (tmp, operands[0]);
5544        }
5545      operands[1] = tmp;
5546    }
5547  else if (flag_pic
5548	   && (CONSTANT_P (operands[1])
5549	       || symbol_mentioned_p (operands[1])
5550	       || label_mentioned_p (operands[1])))
5551      operands[1] = legitimize_pic_address (operands[1], SImode,
5552					    (!can_create_pseudo_p ()
5553					     ? operands[0]
5554					     : 0));
5555  }
5556  "
5557)
5558
5559;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5560;; LO_SUM adds in the high bits.  Fortunately these are opaque operations
5561;; so this does not matter.
5562(define_insn "*arm_movt"
5563  [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5564	(lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5565		   (match_operand:SI 2 "general_operand"      "i")))]
5566  "arm_arch_thumb2"
5567  "movt%?\t%0, #:upper16:%c2"
5568  [(set_attr "predicable" "yes")
5569   (set_attr "length" "4")]
5570)
5571
5572(define_insn "*arm_movsi_insn"
5573  [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5574	(match_operand:SI 1 "general_operand"      "rk, I,K,j,mi,rk"))]
5575  "TARGET_ARM && ! TARGET_IWMMXT
5576   && !(TARGET_HARD_FLOAT && TARGET_VFP)
5577   && (   register_operand (operands[0], SImode)
5578       || register_operand (operands[1], SImode))"
5579  "@
5580   mov%?\\t%0, %1
5581   mov%?\\t%0, %1
5582   mvn%?\\t%0, #%B1
5583   movw%?\\t%0, %1
5584   ldr%?\\t%0, %1
5585   str%?\\t%1, %0"
5586  [(set_attr "type" "*,simple_alu_imm,simple_alu_imm,simple_alu_imm,load1,store1")
5587   (set_attr "insn" "mov,mov,mvn,mov,*,*")
5588   (set_attr "predicable" "yes")
5589   (set_attr "pool_range" "*,*,*,*,4096,*")
5590   (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5591)
5592
5593(define_split
5594  [(set (match_operand:SI 0 "arm_general_register_operand" "")
5595	(match_operand:SI 1 "const_int_operand" ""))]
5596  "TARGET_32BIT
5597  && (!(const_ok_for_arm (INTVAL (operands[1]))
5598        || const_ok_for_arm (~INTVAL (operands[1]))))"
5599  [(clobber (const_int 0))]
5600  "
5601  arm_split_constant (SET, SImode, NULL_RTX,
5602                      INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5603  DONE;
5604  "
5605)
5606
5607;; Split symbol_refs at the later stage (after cprop), instead of generating
5608;; movt/movw pair directly at expand.  Otherwise corresponding high_sum
5609;; and lo_sum would be merged back into memory load at cprop.  However,
5610;; if the default is to prefer movt/movw rather than a load from the constant
5611;; pool, the performance is better.
5612(define_split
5613  [(set (match_operand:SI 0 "arm_general_register_operand" "")
5614       (match_operand:SI 1 "general_operand" ""))]
5615  "TARGET_32BIT
5616   && TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5617   && !flag_pic && !target_word_relocations
5618   && !arm_tls_referenced_p (operands[1])"
5619  [(clobber (const_int 0))]
5620{
5621  arm_emit_movpair (operands[0], operands[1]);
5622  DONE;
5623})
5624
5625(define_insn "*thumb1_movsi_insn"
5626  [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*l*h*k")
5627	(match_operand:SI 1 "general_operand"      "l, I,J,K,>,l,mi,l,*l*h*k"))]
5628  "TARGET_THUMB1
5629   && (   register_operand (operands[0], SImode)
5630       || register_operand (operands[1], SImode))"
5631  "@
5632   mov	%0, %1
5633   mov	%0, %1
5634   #
5635   #
5636   ldmia\\t%1, {%0}
5637   stmia\\t%0, {%1}
5638   ldr\\t%0, %1
5639   str\\t%1, %0
5640   mov\\t%0, %1"
5641  [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5642   (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5643   (set_attr "pool_range" "*,*,*,*,*,*,1018,*,*")
5644   (set_attr "conds" "set,clob,*,*,nocond,nocond,nocond,nocond,nocond")])
5645
5646(define_split
5647  [(set (match_operand:SI 0 "register_operand" "")
5648	(match_operand:SI 1 "const_int_operand" ""))]
5649  "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5650  [(set (match_dup 2) (match_dup 1))
5651   (set (match_dup 0) (neg:SI (match_dup 2)))]
5652  "
5653  {
5654    operands[1] = GEN_INT (- INTVAL (operands[1]));
5655    operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5656  }"
5657)
5658
5659(define_split
5660  [(set (match_operand:SI 0 "register_operand" "")
5661	(match_operand:SI 1 "const_int_operand" ""))]
5662  "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5663  [(set (match_dup 2) (match_dup 1))
5664   (set (match_dup 0) (ashift:SI (match_dup 2) (match_dup 3)))]
5665  "
5666  {
5667    unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5668    unsigned HOST_WIDE_INT mask = 0xff;
5669    int i;
5670
5671    for (i = 0; i < 25; i++)
5672      if ((val & (mask << i)) == val)
5673        break;
5674
5675    /* Don't split if the shift is zero.  */
5676    if (i == 0)
5677      FAIL;
5678
5679    operands[1] = GEN_INT (val >> i);
5680    operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5681    operands[3] = GEN_INT (i);
5682  }"
5683)
5684
5685;; For thumb1 split imm move [256-510] into mov [1-255] and add #255
5686(define_split
5687  [(set (match_operand:SI 0 "register_operand" "")
5688	(match_operand:SI 1 "const_int_operand" ""))]
5689  "TARGET_THUMB1 && satisfies_constraint_Pe (operands[1])"
5690  [(set (match_dup 2) (match_dup 1))
5691   (set (match_dup 0) (plus:SI (match_dup 2) (match_dup 3)))]
5692  "
5693  {
5694    operands[1] = GEN_INT (INTVAL (operands[1]) - 255);
5695    operands[2] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5696    operands[3] = GEN_INT (255);
5697  }"
5698)
5699
5700;; When generating pic, we need to load the symbol offset into a register.
5701;; So that the optimizer does not confuse this with a normal symbol load
5702;; we use an unspec.  The offset will be loaded from a constant pool entry,
5703;; since that is the only type of relocation we can use.
5704
5705;; Wrap calculation of the whole PIC address in a single pattern for the
5706;; benefit of optimizers, particularly, PRE and HOIST.  Calculation of
5707;; a PIC address involves two loads from memory, so we want to CSE it
5708;; as often as possible.
5709;; This pattern will be split into one of the pic_load_addr_* patterns
5710;; and a move after GCSE optimizations.
5711;;
5712;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
5713(define_expand "calculate_pic_address"
5714  [(set (match_operand:SI 0 "register_operand" "")
5715	(mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5716			 (unspec:SI [(match_operand:SI 2 "" "")]
5717				    UNSPEC_PIC_SYM))))]
5718  "flag_pic"
5719)
5720
5721;; Split calculate_pic_address into pic_load_addr_* and a move.
5722(define_split
5723  [(set (match_operand:SI 0 "register_operand" "")
5724	(mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
5725			 (unspec:SI [(match_operand:SI 2 "" "")]
5726				    UNSPEC_PIC_SYM))))]
5727  "flag_pic"
5728  [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
5729   (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
5730  "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
5731)
5732
5733;; operand1 is the memory address to go into
5734;; pic_load_addr_32bit.
5735;; operand2 is the PIC label to be emitted
5736;; from pic_add_dot_plus_eight.
5737;; We do this to allow hoisting of the entire insn.
5738(define_insn_and_split "pic_load_addr_unified"
5739  [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
5740	(unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
5741		    (match_operand:SI 2 "" "")]
5742		    UNSPEC_PIC_UNIFIED))]
5743 "flag_pic"
5744 "#"
5745 "&& reload_completed"
5746 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
5747  (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
5748       		     		 (match_dup 2)] UNSPEC_PIC_BASE))]
5749 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
5750 [(set_attr "type" "load1,load1,load1")
5751  (set_attr "pool_range" "4096,4094,1022")
5752  (set_attr "neg_pool_range" "4084,0,0")
5753  (set_attr "arch"  "a,t2,t1")
5754  (set_attr "length" "8,6,4")]
5755)
5756
5757;; The rather odd constraints on the following are to force reload to leave
5758;; the insn alone, and to force the minipool generation pass to then move
5759;; the GOT symbol to memory.
5760
5761(define_insn "pic_load_addr_32bit"
5762  [(set (match_operand:SI 0 "s_register_operand" "=r")
5763	(unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5764  "TARGET_32BIT && flag_pic"
5765  "ldr%?\\t%0, %1"
5766  [(set_attr "type" "load1")
5767   (set (attr "pool_range")
5768	(if_then_else (eq_attr "is_thumb" "no")
5769		      (const_int 4096)
5770		      (const_int 4094)))
5771   (set (attr "neg_pool_range")
5772	(if_then_else (eq_attr "is_thumb" "no")
5773		      (const_int 4084)
5774		      (const_int 0)))]
5775)
5776
5777(define_insn "pic_load_addr_thumb1"
5778  [(set (match_operand:SI 0 "s_register_operand" "=l")
5779	(unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5780  "TARGET_THUMB1 && flag_pic"
5781  "ldr\\t%0, %1"
5782  [(set_attr "type" "load1")
5783   (set (attr "pool_range") (const_int 1018))]
5784)
5785
5786(define_insn "pic_add_dot_plus_four"
5787  [(set (match_operand:SI 0 "register_operand" "=r")
5788	(unspec:SI [(match_operand:SI 1 "register_operand" "0")
5789		    (const_int 4)
5790		    (match_operand 2 "" "")]
5791		   UNSPEC_PIC_BASE))]
5792  "TARGET_THUMB"
5793  "*
5794  (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5795				     INTVAL (operands[2]));
5796  return \"add\\t%0, %|pc\";
5797  "
5798  [(set_attr "length" "2")]
5799)
5800
5801(define_insn "pic_add_dot_plus_eight"
5802  [(set (match_operand:SI 0 "register_operand" "=r")
5803	(unspec:SI [(match_operand:SI 1 "register_operand" "r")
5804		    (const_int 8)
5805		    (match_operand 2 "" "")]
5806		   UNSPEC_PIC_BASE))]
5807  "TARGET_ARM"
5808  "*
5809    (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5810				       INTVAL (operands[2]));
5811    return \"add%?\\t%0, %|pc, %1\";
5812  "
5813  [(set_attr "predicable" "yes")]
5814)
5815
5816(define_insn "tls_load_dot_plus_eight"
5817  [(set (match_operand:SI 0 "register_operand" "=r")
5818	(mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5819			    (const_int 8)
5820			    (match_operand 2 "" "")]
5821			   UNSPEC_PIC_BASE)))]
5822  "TARGET_ARM"
5823  "*
5824    (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5825				       INTVAL (operands[2]));
5826    return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5827  "
5828  [(set_attr "predicable" "yes")]
5829)
5830
5831;; PIC references to local variables can generate pic_add_dot_plus_eight
5832;; followed by a load.  These sequences can be crunched down to
5833;; tls_load_dot_plus_eight by a peephole.
5834
5835(define_peephole2
5836  [(set (match_operand:SI 0 "register_operand" "")
5837	(unspec:SI [(match_operand:SI 3 "register_operand" "")
5838		    (const_int 8)
5839		    (match_operand 1 "" "")]
5840		   UNSPEC_PIC_BASE))
5841   (set (match_operand:SI 2 "arm_general_register_operand" "")
5842	(mem:SI (match_dup 0)))]
5843  "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5844  [(set (match_dup 2)
5845	(mem:SI (unspec:SI [(match_dup 3)
5846			    (const_int 8)
5847			    (match_dup 1)]
5848			   UNSPEC_PIC_BASE)))]
5849  ""
5850)
5851
5852(define_insn "pic_offset_arm"
5853  [(set (match_operand:SI 0 "register_operand" "=r")
5854	(mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5855			 (unspec:SI [(match_operand:SI 2 "" "X")]
5856				    UNSPEC_PIC_OFFSET))))]
5857  "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5858  "ldr%?\\t%0, [%1,%2]"
5859  [(set_attr "type" "load1")]
5860)
5861
5862(define_expand "builtin_setjmp_receiver"
5863  [(label_ref (match_operand 0 "" ""))]
5864  "flag_pic"
5865  "
5866{
5867  /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5868     register.  */
5869  if (arm_pic_register != INVALID_REGNUM)
5870    arm_load_pic_register (1UL << 3);
5871  DONE;
5872}")
5873
5874;; If copying one reg to another we can set the condition codes according to
5875;; its value.  Such a move is common after a return from subroutine and the
5876;; result is being tested against zero.
5877
5878(define_insn "*movsi_compare0"
5879  [(set (reg:CC CC_REGNUM)
5880	(compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5881		    (const_int 0)))
5882   (set (match_operand:SI 0 "s_register_operand" "=r,r")
5883	(match_dup 1))]
5884  "TARGET_32BIT"
5885  "@
5886   cmp%?\\t%0, #0
5887   sub%.\\t%0, %1, #0"
5888  [(set_attr "conds" "set")
5889   (set_attr "type" "simple_alu_imm,simple_alu_imm")]
5890)
5891
5892;; Subroutine to store a half word from a register into memory.
5893;; Operand 0 is the source register (HImode)
5894;; Operand 1 is the destination address in a register (SImode)
5895
5896;; In both this routine and the next, we must be careful not to spill
5897;; a memory address of reg+large_const into a separate PLUS insn, since this
5898;; can generate unrecognizable rtl.
5899
5900(define_expand "storehi"
5901  [;; store the low byte
5902   (set (match_operand 1 "" "") (match_dup 3))
5903   ;; extract the high byte
5904   (set (match_dup 2)
5905	(ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5906   ;; store the high byte
5907   (set (match_dup 4) (match_dup 5))]
5908  "TARGET_ARM"
5909  "
5910  {
5911    rtx op1 = operands[1];
5912    rtx addr = XEXP (op1, 0);
5913    enum rtx_code code = GET_CODE (addr);
5914
5915    if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5916	|| code == MINUS)
5917      op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5918
5919    operands[4] = adjust_address (op1, QImode, 1);
5920    operands[1] = adjust_address (operands[1], QImode, 0);
5921    operands[3] = gen_lowpart (QImode, operands[0]);
5922    operands[0] = gen_lowpart (SImode, operands[0]);
5923    operands[2] = gen_reg_rtx (SImode);
5924    operands[5] = gen_lowpart (QImode, operands[2]);
5925  }"
5926)
5927
5928(define_expand "storehi_bigend"
5929  [(set (match_dup 4) (match_dup 3))
5930   (set (match_dup 2)
5931	(ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5932   (set (match_operand 1 "" "")	(match_dup 5))]
5933  "TARGET_ARM"
5934  "
5935  {
5936    rtx op1 = operands[1];
5937    rtx addr = XEXP (op1, 0);
5938    enum rtx_code code = GET_CODE (addr);
5939
5940    if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5941	|| code == MINUS)
5942      op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5943
5944    operands[4] = adjust_address (op1, QImode, 1);
5945    operands[1] = adjust_address (operands[1], QImode, 0);
5946    operands[3] = gen_lowpart (QImode, operands[0]);
5947    operands[0] = gen_lowpart (SImode, operands[0]);
5948    operands[2] = gen_reg_rtx (SImode);
5949    operands[5] = gen_lowpart (QImode, operands[2]);
5950  }"
5951)
5952
5953;; Subroutine to store a half word integer constant into memory.
5954(define_expand "storeinthi"
5955  [(set (match_operand 0 "" "")
5956	(match_operand 1 "" ""))
5957   (set (match_dup 3) (match_dup 2))]
5958  "TARGET_ARM"
5959  "
5960  {
5961    HOST_WIDE_INT value = INTVAL (operands[1]);
5962    rtx addr = XEXP (operands[0], 0);
5963    rtx op0 = operands[0];
5964    enum rtx_code code = GET_CODE (addr);
5965
5966    if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
5967	|| code == MINUS)
5968      op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5969
5970    operands[1] = gen_reg_rtx (SImode);
5971    if (BYTES_BIG_ENDIAN)
5972      {
5973	emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5974	if ((value & 255) == ((value >> 8) & 255))
5975	  operands[2] = operands[1];
5976	else
5977	  {
5978	    operands[2] = gen_reg_rtx (SImode);
5979	    emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5980	  }
5981      }
5982    else
5983      {
5984	emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5985	if ((value & 255) == ((value >> 8) & 255))
5986	  operands[2] = operands[1];
5987	else
5988	  {
5989	    operands[2] = gen_reg_rtx (SImode);
5990	    emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5991	  }
5992      }
5993
5994    operands[3] = adjust_address (op0, QImode, 1);
5995    operands[0] = adjust_address (operands[0], QImode, 0);
5996    operands[2] = gen_lowpart (QImode, operands[2]);
5997    operands[1] = gen_lowpart (QImode, operands[1]);
5998  }"
5999)
6000
6001(define_expand "storehi_single_op"
6002  [(set (match_operand:HI 0 "memory_operand" "")
6003	(match_operand:HI 1 "general_operand" ""))]
6004  "TARGET_32BIT && arm_arch4"
6005  "
6006  if (!s_register_operand (operands[1], HImode))
6007    operands[1] = copy_to_mode_reg (HImode, operands[1]);
6008  "
6009)
6010
6011(define_expand "movhi"
6012  [(set (match_operand:HI 0 "general_operand" "")
6013	(match_operand:HI 1 "general_operand" ""))]
6014  "TARGET_EITHER"
6015  "
6016  if (TARGET_ARM)
6017    {
6018      if (can_create_pseudo_p ())
6019        {
6020          if (MEM_P (operands[0]))
6021	    {
6022	      if (arm_arch4)
6023	        {
6024	          emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6025	          DONE;
6026	        }
6027	      if (CONST_INT_P (operands[1]))
6028	        emit_insn (gen_storeinthi (operands[0], operands[1]));
6029	      else
6030	        {
6031	          if (MEM_P (operands[1]))
6032		    operands[1] = force_reg (HImode, operands[1]);
6033	          if (BYTES_BIG_ENDIAN)
6034		    emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6035	          else
6036		   emit_insn (gen_storehi (operands[1], operands[0]));
6037	        }
6038	      DONE;
6039	    }
6040          /* Sign extend a constant, and keep it in an SImode reg.  */
6041          else if (CONST_INT_P (operands[1]))
6042	    {
6043	      rtx reg = gen_reg_rtx (SImode);
6044	      HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6045
6046	      /* If the constant is already valid, leave it alone.  */
6047	      if (!const_ok_for_arm (val))
6048	        {
6049	          /* If setting all the top bits will make the constant
6050		     loadable in a single instruction, then set them.
6051		     Otherwise, sign extend the number.  */
6052
6053	          if (const_ok_for_arm (~(val | ~0xffff)))
6054		    val |= ~0xffff;
6055	          else if (val & 0x8000)
6056		    val |= ~0xffff;
6057	        }
6058
6059	      emit_insn (gen_movsi (reg, GEN_INT (val)));
6060	      operands[1] = gen_lowpart (HImode, reg);
6061	    }
6062	  else if (arm_arch4 && optimize && can_create_pseudo_p ()
6063		   && MEM_P (operands[1]))
6064	    {
6065	      rtx reg = gen_reg_rtx (SImode);
6066
6067	      emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6068	      operands[1] = gen_lowpart (HImode, reg);
6069	    }
6070          else if (!arm_arch4)
6071	    {
6072	      if (MEM_P (operands[1]))
6073	        {
6074		  rtx base;
6075		  rtx offset = const0_rtx;
6076		  rtx reg = gen_reg_rtx (SImode);
6077
6078		  if ((REG_P (base = XEXP (operands[1], 0))
6079		       || (GET_CODE (base) == PLUS
6080			   && (CONST_INT_P (offset = XEXP (base, 1)))
6081                           && ((INTVAL(offset) & 1) != 1)
6082			   && REG_P (base = XEXP (base, 0))))
6083		      && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6084		    {
6085		      rtx new_rtx;
6086
6087		      new_rtx = widen_memory_access (operands[1], SImode,
6088						     ((INTVAL (offset) & ~3)
6089						      - INTVAL (offset)));
6090		      emit_insn (gen_movsi (reg, new_rtx));
6091		      if (((INTVAL (offset) & 2) != 0)
6092			  ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6093			{
6094			  rtx reg2 = gen_reg_rtx (SImode);
6095
6096			  emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6097			  reg = reg2;
6098			}
6099		    }
6100		  else
6101		    emit_insn (gen_movhi_bytes (reg, operands[1]));
6102
6103		  operands[1] = gen_lowpart (HImode, reg);
6104	       }
6105	   }
6106        }
6107      /* Handle loading a large integer during reload.  */
6108      else if (CONST_INT_P (operands[1])
6109	       && !const_ok_for_arm (INTVAL (operands[1]))
6110	       && !const_ok_for_arm (~INTVAL (operands[1])))
6111        {
6112          /* Writing a constant to memory needs a scratch, which should
6113	     be handled with SECONDARY_RELOADs.  */
6114          gcc_assert (REG_P (operands[0]));
6115
6116          operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6117          emit_insn (gen_movsi (operands[0], operands[1]));
6118          DONE;
6119       }
6120    }
6121  else if (TARGET_THUMB2)
6122    {
6123      /* Thumb-2 can do everything except mem=mem and mem=const easily.  */
6124      if (can_create_pseudo_p ())
6125	{
6126	  if (!REG_P (operands[0]))
6127	    operands[1] = force_reg (HImode, operands[1]);
6128          /* Zero extend a constant, and keep it in an SImode reg.  */
6129          else if (CONST_INT_P (operands[1]))
6130	    {
6131	      rtx reg = gen_reg_rtx (SImode);
6132	      HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6133
6134	      emit_insn (gen_movsi (reg, GEN_INT (val)));
6135	      operands[1] = gen_lowpart (HImode, reg);
6136	    }
6137	}
6138    }
6139  else /* TARGET_THUMB1 */
6140    {
6141      if (can_create_pseudo_p ())
6142        {
6143	  if (CONST_INT_P (operands[1]))
6144	    {
6145	      rtx reg = gen_reg_rtx (SImode);
6146
6147	      emit_insn (gen_movsi (reg, operands[1]));
6148	      operands[1] = gen_lowpart (HImode, reg);
6149	    }
6150
6151          /* ??? We shouldn't really get invalid addresses here, but this can
6152	     happen if we are passed a SP (never OK for HImode/QImode) or
6153	     virtual register (also rejected as illegitimate for HImode/QImode)
6154	     relative address.  */
6155          /* ??? This should perhaps be fixed elsewhere, for instance, in
6156	     fixup_stack_1, by checking for other kinds of invalid addresses,
6157	     e.g. a bare reference to a virtual register.  This may confuse the
6158	     alpha though, which must handle this case differently.  */
6159          if (MEM_P (operands[0])
6160	      && !memory_address_p (GET_MODE (operands[0]),
6161				    XEXP (operands[0], 0)))
6162	    operands[0]
6163	      = replace_equiv_address (operands[0],
6164				       copy_to_reg (XEXP (operands[0], 0)));
6165
6166          if (MEM_P (operands[1])
6167	      && !memory_address_p (GET_MODE (operands[1]),
6168				    XEXP (operands[1], 0)))
6169	    operands[1]
6170	      = replace_equiv_address (operands[1],
6171				       copy_to_reg (XEXP (operands[1], 0)));
6172
6173	  if (MEM_P (operands[1]) && optimize > 0)
6174	    {
6175	      rtx reg = gen_reg_rtx (SImode);
6176
6177	      emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6178	      operands[1] = gen_lowpart (HImode, reg);
6179	    }
6180
6181          if (MEM_P (operands[0]))
6182	    operands[1] = force_reg (HImode, operands[1]);
6183        }
6184      else if (CONST_INT_P (operands[1])
6185	        && !satisfies_constraint_I (operands[1]))
6186        {
6187	  /* Handle loading a large integer during reload.  */
6188
6189          /* Writing a constant to memory needs a scratch, which should
6190	     be handled with SECONDARY_RELOADs.  */
6191          gcc_assert (REG_P (operands[0]));
6192
6193          operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6194          emit_insn (gen_movsi (operands[0], operands[1]));
6195          DONE;
6196        }
6197    }
6198  "
6199)
6200
6201(define_insn "*thumb1_movhi_insn"
6202  [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6203	(match_operand:HI 1 "general_operand"       "l,m,l,*h,*r,I"))]
6204  "TARGET_THUMB1
6205   && (   register_operand (operands[0], HImode)
6206       || register_operand (operands[1], HImode))"
6207  "*
6208  switch (which_alternative)
6209    {
6210    case 0: return \"add	%0, %1, #0\";
6211    case 2: return \"strh	%1, %0\";
6212    case 3: return \"mov	%0, %1\";
6213    case 4: return \"mov	%0, %1\";
6214    case 5: return \"mov	%0, %1\";
6215    default: gcc_unreachable ();
6216    case 1:
6217      /* The stack pointer can end up being taken as an index register.
6218          Catch this case here and deal with it.  */
6219      if (GET_CODE (XEXP (operands[1], 0)) == PLUS
6220	  && REG_P (XEXP (XEXP (operands[1], 0), 0))
6221	  && REGNO    (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
6222        {
6223	  rtx ops[2];
6224          ops[0] = operands[0];
6225          ops[1] = XEXP (XEXP (operands[1], 0), 0);
6226
6227          output_asm_insn (\"mov	%0, %1\", ops);
6228
6229          XEXP (XEXP (operands[1], 0), 0) = operands[0];
6230
6231	}
6232      return \"ldrh	%0, %1\";
6233    }"
6234  [(set_attr "length" "2,4,2,2,2,2")
6235   (set_attr "type" "*,load1,store1,*,*,*")
6236   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6237
6238
6239(define_expand "movhi_bytes"
6240  [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6241   (set (match_dup 3)
6242	(zero_extend:SI (match_dup 6)))
6243   (set (match_operand:SI 0 "" "")
6244	 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6245  "TARGET_ARM"
6246  "
6247  {
6248    rtx mem1, mem2;
6249    rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6250
6251    mem1 = change_address (operands[1], QImode, addr);
6252    mem2 = change_address (operands[1], QImode,
6253			   plus_constant (Pmode, addr, 1));
6254    operands[0] = gen_lowpart (SImode, operands[0]);
6255    operands[1] = mem1;
6256    operands[2] = gen_reg_rtx (SImode);
6257    operands[3] = gen_reg_rtx (SImode);
6258    operands[6] = mem2;
6259
6260    if (BYTES_BIG_ENDIAN)
6261      {
6262	operands[4] = operands[2];
6263	operands[5] = operands[3];
6264      }
6265    else
6266      {
6267	operands[4] = operands[3];
6268	operands[5] = operands[2];
6269      }
6270  }"
6271)
6272
6273(define_expand "movhi_bigend"
6274  [(set (match_dup 2)
6275	(rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
6276		   (const_int 16)))
6277   (set (match_dup 3)
6278	(ashiftrt:SI (match_dup 2) (const_int 16)))
6279   (set (match_operand:HI 0 "s_register_operand" "")
6280	(match_dup 4))]
6281  "TARGET_ARM"
6282  "
6283  operands[2] = gen_reg_rtx (SImode);
6284  operands[3] = gen_reg_rtx (SImode);
6285  operands[4] = gen_lowpart (HImode, operands[3]);
6286  "
6287)
6288
6289;; Pattern to recognize insn generated default case above
6290(define_insn "*movhi_insn_arch4"
6291  [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
6292	(match_operand:HI 1 "general_operand"      "rI,K,n,r,mi"))]
6293  "TARGET_ARM
6294   && arm_arch4
6295   && (register_operand (operands[0], HImode)
6296       || register_operand (operands[1], HImode))"
6297  "@
6298   mov%?\\t%0, %1\\t%@ movhi
6299   mvn%?\\t%0, #%B1\\t%@ movhi
6300   movw%?\\t%0, %L1\\t%@ movhi
6301   str%(h%)\\t%1, %0\\t%@ movhi
6302   ldr%(h%)\\t%0, %1\\t%@ movhi"
6303  [(set_attr "predicable" "yes")
6304   (set_attr "insn" "mov,mvn,mov,*,*")
6305   (set_attr "pool_range" "*,*,*,*,256")
6306   (set_attr "neg_pool_range" "*,*,*,*,244")
6307   (set_attr "arch" "*,*,v6t2,*,*")
6308   (set_attr_alternative "type"
6309                         [(if_then_else (match_operand 1 "const_int_operand" "")
6310                                        (const_string "simple_alu_imm" )
6311                                        (const_string "*"))
6312                          (const_string "simple_alu_imm")
6313                          (const_string "simple_alu_imm")
6314                          (const_string "store1")
6315                          (const_string "load1")])]
6316)
6317
6318(define_insn "*movhi_bytes"
6319  [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6320	(match_operand:HI 1 "arm_rhs_operand"  "I,r,K"))]
6321  "TARGET_ARM"
6322  "@
6323   mov%?\\t%0, %1\\t%@ movhi
6324   mov%?\\t%0, %1\\t%@ movhi
6325   mvn%?\\t%0, #%B1\\t%@ movhi"
6326  [(set_attr "predicable" "yes")
6327   (set_attr "insn" "mov, mov,mvn")
6328   (set_attr "type" "simple_alu_imm,*,simple_alu_imm")]
6329)
6330
6331(define_expand "thumb_movhi_clobber"
6332  [(set (match_operand:HI     0 "memory_operand"   "")
6333	(match_operand:HI     1 "register_operand" ""))
6334   (clobber (match_operand:DI 2 "register_operand" ""))]
6335  "TARGET_THUMB1"
6336  "
6337  if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
6338      && REGNO (operands[1]) <= LAST_LO_REGNUM)
6339    {
6340      emit_insn (gen_movhi (operands[0], operands[1]));
6341      DONE;
6342    }
6343  /* XXX Fixme, need to handle other cases here as well.  */
6344  gcc_unreachable ();
6345  "
6346)
6347
6348;; We use a DImode scratch because we may occasionally need an additional
6349;; temporary if the address isn't offsettable -- push_reload doesn't seem
6350;; to take any notice of the "o" constraints on reload_memory_operand operand.
6351(define_expand "reload_outhi"
6352  [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6353	      (match_operand:HI 1 "s_register_operand"        "r")
6354	      (match_operand:DI 2 "s_register_operand"        "=&l")])]
6355  "TARGET_EITHER"
6356  "if (TARGET_ARM)
6357     arm_reload_out_hi (operands);
6358   else
6359     thumb_reload_out_hi (operands);
6360  DONE;
6361  "
6362)
6363
6364(define_expand "reload_inhi"
6365  [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6366	      (match_operand:HI 1 "arm_reload_memory_operand" "o")
6367	      (match_operand:DI 2 "s_register_operand" "=&r")])]
6368  "TARGET_EITHER"
6369  "
6370  if (TARGET_ARM)
6371    arm_reload_in_hi (operands);
6372  else
6373    thumb_reload_out_hi (operands);
6374  DONE;
6375")
6376
6377(define_expand "movqi"
6378  [(set (match_operand:QI 0 "general_operand" "")
6379        (match_operand:QI 1 "general_operand" ""))]
6380  "TARGET_EITHER"
6381  "
6382  /* Everything except mem = const or mem = mem can be done easily */
6383
6384  if (can_create_pseudo_p ())
6385    {
6386      if (CONST_INT_P (operands[1]))
6387	{
6388	  rtx reg = gen_reg_rtx (SImode);
6389
6390	  /* For thumb we want an unsigned immediate, then we are more likely
6391	     to be able to use a movs insn.  */
6392	  if (TARGET_THUMB)
6393	    operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6394
6395	  emit_insn (gen_movsi (reg, operands[1]));
6396	  operands[1] = gen_lowpart (QImode, reg);
6397	}
6398
6399      if (TARGET_THUMB)
6400	{
6401          /* ??? We shouldn't really get invalid addresses here, but this can
6402	     happen if we are passed a SP (never OK for HImode/QImode) or
6403	     virtual register (also rejected as illegitimate for HImode/QImode)
6404	     relative address.  */
6405          /* ??? This should perhaps be fixed elsewhere, for instance, in
6406	     fixup_stack_1, by checking for other kinds of invalid addresses,
6407	     e.g. a bare reference to a virtual register.  This may confuse the
6408	     alpha though, which must handle this case differently.  */
6409          if (MEM_P (operands[0])
6410	      && !memory_address_p (GET_MODE (operands[0]),
6411		  		     XEXP (operands[0], 0)))
6412	    operands[0]
6413	      = replace_equiv_address (operands[0],
6414				       copy_to_reg (XEXP (operands[0], 0)));
6415          if (MEM_P (operands[1])
6416	      && !memory_address_p (GET_MODE (operands[1]),
6417				    XEXP (operands[1], 0)))
6418	     operands[1]
6419	       = replace_equiv_address (operands[1],
6420					copy_to_reg (XEXP (operands[1], 0)));
6421	}
6422
6423      if (MEM_P (operands[1]) && optimize > 0)
6424	{
6425	  rtx reg = gen_reg_rtx (SImode);
6426
6427	  emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6428	  operands[1] = gen_lowpart (QImode, reg);
6429	}
6430
6431      if (MEM_P (operands[0]))
6432	operands[1] = force_reg (QImode, operands[1]);
6433    }
6434  else if (TARGET_THUMB
6435	   && CONST_INT_P (operands[1])
6436	   && !satisfies_constraint_I (operands[1]))
6437    {
6438      /* Handle loading a large integer during reload.  */
6439
6440      /* Writing a constant to memory needs a scratch, which should
6441	 be handled with SECONDARY_RELOADs.  */
6442      gcc_assert (REG_P (operands[0]));
6443
6444      operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6445      emit_insn (gen_movsi (operands[0], operands[1]));
6446      DONE;
6447    }
6448  "
6449)
6450
6451
6452(define_insn "*arm_movqi_insn"
6453  [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,Uu,r,m")
6454	(match_operand:QI 1 "general_operand" "r,I,K,Uu,l,m,r"))]
6455  "TARGET_32BIT
6456   && (   register_operand (operands[0], QImode)
6457       || register_operand (operands[1], QImode))"
6458  "@
6459   mov%?\\t%0, %1
6460   mov%?\\t%0, %1
6461   mvn%?\\t%0, #%B1
6462   ldr%(b%)\\t%0, %1
6463   str%(b%)\\t%1, %0
6464   ldr%(b%)\\t%0, %1
6465   str%(b%)\\t%1, %0"
6466  [(set_attr "type" "*,simple_alu_imm,simple_alu_imm,load1, store1, load1, store1")
6467   (set_attr "insn" "mov,mov,mvn,*,*,*,*")
6468   (set_attr "predicable" "yes")
6469   (set_attr "arch" "any,any,any,t2,t2,any,any")
6470   (set_attr "length" "4,4,4,2,2,4,4")]
6471)
6472
6473(define_insn "*thumb1_movqi_insn"
6474  [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
6475	(match_operand:QI 1 "general_operand"      "l, m,l,*h,*r,I"))]
6476  "TARGET_THUMB1
6477   && (   register_operand (operands[0], QImode)
6478       || register_operand (operands[1], QImode))"
6479  "@
6480   add\\t%0, %1, #0
6481   ldrb\\t%0, %1
6482   strb\\t%1, %0
6483   mov\\t%0, %1
6484   mov\\t%0, %1
6485   mov\\t%0, %1"
6486  [(set_attr "length" "2")
6487   (set_attr "type" "simple_alu_imm,load1,store1,*,*,simple_alu_imm")
6488   (set_attr "insn" "*,*,*,mov,mov,mov")
6489   (set_attr "pool_range" "*,32,*,*,*,*")
6490   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,clob")])
6491
6492;; HFmode moves
6493(define_expand "movhf"
6494  [(set (match_operand:HF 0 "general_operand" "")
6495	(match_operand:HF 1 "general_operand" ""))]
6496  "TARGET_EITHER"
6497  "
6498  if (TARGET_32BIT)
6499    {
6500      if (MEM_P (operands[0]))
6501        operands[1] = force_reg (HFmode, operands[1]);
6502    }
6503  else /* TARGET_THUMB1 */
6504    {
6505      if (can_create_pseudo_p ())
6506        {
6507           if (!REG_P (operands[0]))
6508	     operands[1] = force_reg (HFmode, operands[1]);
6509        }
6510    }
6511  "
6512)
6513
6514(define_insn "*arm32_movhf"
6515  [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6516	(match_operand:HF 1 "general_operand"	   " m,r,r,F"))]
6517  "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
6518   && (	  s_register_operand (operands[0], HFmode)
6519       || s_register_operand (operands[1], HFmode))"
6520  "*
6521  switch (which_alternative)
6522    {
6523    case 0:	/* ARM register from memory */
6524      return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
6525    case 1:	/* memory from ARM register */
6526      return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
6527    case 2:	/* ARM register from ARM register */
6528      return \"mov%?\\t%0, %1\\t%@ __fp16\";
6529    case 3:	/* ARM register from constant */
6530      {
6531	REAL_VALUE_TYPE r;
6532	long bits;
6533	rtx ops[4];
6534
6535	REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6536	bits = real_to_target (NULL, &r, HFmode);
6537	ops[0] = operands[0];
6538	ops[1] = GEN_INT (bits);
6539	ops[2] = GEN_INT (bits & 0xff00);
6540	ops[3] = GEN_INT (bits & 0x00ff);
6541
6542	if (arm_arch_thumb2)
6543	  output_asm_insn (\"movw%?\\t%0, %1\", ops);
6544	else
6545	  output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6546	return \"\";
6547       }
6548    default:
6549      gcc_unreachable ();
6550    }
6551  "
6552  [(set_attr "conds" "unconditional")
6553   (set_attr "type" "load1,store1,*,*")
6554   (set_attr "insn" "*,*,mov,mov")
6555   (set_attr "length" "4,4,4,8")
6556   (set_attr "predicable" "yes")]
6557)
6558
6559(define_insn "*thumb1_movhf"
6560  [(set (match_operand:HF     0 "nonimmediate_operand" "=l,l,m,*r,*h")
6561	(match_operand:HF     1 "general_operand"      "l,mF,l,*h,*r"))]
6562  "TARGET_THUMB1
6563   && (	  s_register_operand (operands[0], HFmode)
6564       || s_register_operand (operands[1], HFmode))"
6565  "*
6566  switch (which_alternative)
6567    {
6568    case 1:
6569      {
6570	rtx addr;
6571	gcc_assert (MEM_P (operands[1]));
6572	addr = XEXP (operands[1], 0);
6573	if (GET_CODE (addr) == LABEL_REF
6574	    || (GET_CODE (addr) == CONST
6575		&& GET_CODE (XEXP (addr, 0)) == PLUS
6576		&& GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6577		&& CONST_INT_P (XEXP (XEXP (addr, 0), 1))))
6578	  {
6579	    /* Constant pool entry.  */
6580	    return \"ldr\\t%0, %1\";
6581	  }
6582	return \"ldrh\\t%0, %1\";
6583      }
6584    case 2: return \"strh\\t%1, %0\";
6585    default: return \"mov\\t%0, %1\";
6586    }
6587  "
6588  [(set_attr "length" "2")
6589   (set_attr "type" "*,load1,store1,*,*")
6590   (set_attr "insn" "mov,*,*,mov,mov")
6591   (set_attr "pool_range" "*,1018,*,*,*")
6592   (set_attr "conds" "clob,nocond,nocond,nocond,nocond")])
6593
6594(define_expand "movsf"
6595  [(set (match_operand:SF 0 "general_operand" "")
6596	(match_operand:SF 1 "general_operand" ""))]
6597  "TARGET_EITHER"
6598  "
6599  if (TARGET_32BIT)
6600    {
6601      if (MEM_P (operands[0]))
6602        operands[1] = force_reg (SFmode, operands[1]);
6603    }
6604  else /* TARGET_THUMB1 */
6605    {
6606      if (can_create_pseudo_p ())
6607        {
6608           if (!REG_P (operands[0]))
6609	     operands[1] = force_reg (SFmode, operands[1]);
6610        }
6611    }
6612  "
6613)
6614
6615;; Transform a floating-point move of a constant into a core register into
6616;; an SImode operation.
6617(define_split
6618  [(set (match_operand:SF 0 "arm_general_register_operand" "")
6619	(match_operand:SF 1 "immediate_operand" ""))]
6620  "TARGET_EITHER
6621   && reload_completed
6622   && CONST_DOUBLE_P (operands[1])"
6623  [(set (match_dup 2) (match_dup 3))]
6624  "
6625  operands[2] = gen_lowpart (SImode, operands[0]);
6626  operands[3] = gen_lowpart (SImode, operands[1]);
6627  if (operands[2] == 0 || operands[3] == 0)
6628    FAIL;
6629  "
6630)
6631
6632(define_insn "*arm_movsf_soft_insn"
6633  [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6634	(match_operand:SF 1 "general_operand"  "r,mE,r"))]
6635  "TARGET_32BIT
6636   && TARGET_SOFT_FLOAT
6637   && (!MEM_P (operands[0])
6638       || register_operand (operands[1], SFmode))"
6639  "@
6640   mov%?\\t%0, %1
6641   ldr%?\\t%0, %1\\t%@ float
6642   str%?\\t%1, %0\\t%@ float"
6643  [(set_attr "predicable" "yes")
6644   (set_attr "type" "*,load1,store1")
6645   (set_attr "insn" "mov,*,*")
6646   (set_attr "arm_pool_range" "*,4096,*")
6647   (set_attr "thumb2_pool_range" "*,4094,*")
6648   (set_attr "arm_neg_pool_range" "*,4084,*")
6649   (set_attr "thumb2_neg_pool_range" "*,0,*")]
6650)
6651
6652;;; ??? This should have alternatives for constants.
6653(define_insn "*thumb1_movsf_insn"
6654  [(set (match_operand:SF     0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6655	(match_operand:SF     1 "general_operand"      "l, >,l,mF,l,*h,*r"))]
6656  "TARGET_THUMB1
6657   && (   register_operand (operands[0], SFmode)
6658       || register_operand (operands[1], SFmode))"
6659  "@
6660   add\\t%0, %1, #0
6661   ldmia\\t%1, {%0}
6662   stmia\\t%0, {%1}
6663   ldr\\t%0, %1
6664   str\\t%1, %0
6665   mov\\t%0, %1
6666   mov\\t%0, %1"
6667  [(set_attr "length" "2")
6668   (set_attr "type" "*,load1,store1,load1,store1,*,*")
6669   (set_attr "pool_range" "*,*,*,1018,*,*,*")
6670   (set_attr "insn" "*,*,*,*,*,mov,mov")
6671   (set_attr "conds" "clob,nocond,nocond,nocond,nocond,nocond,nocond")]
6672)
6673
6674(define_expand "movdf"
6675  [(set (match_operand:DF 0 "general_operand" "")
6676	(match_operand:DF 1 "general_operand" ""))]
6677  "TARGET_EITHER"
6678  "
6679  if (TARGET_32BIT)
6680    {
6681      if (MEM_P (operands[0]))
6682        operands[1] = force_reg (DFmode, operands[1]);
6683    }
6684  else /* TARGET_THUMB */
6685    {
6686      if (can_create_pseudo_p ())
6687        {
6688          if (!REG_P (operands[0]))
6689	    operands[1] = force_reg (DFmode, operands[1]);
6690        }
6691    }
6692  "
6693)
6694
6695;; Reloading a df mode value stored in integer regs to memory can require a
6696;; scratch reg.
6697(define_expand "reload_outdf"
6698  [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6699   (match_operand:DF 1 "s_register_operand" "r")
6700   (match_operand:SI 2 "s_register_operand" "=&r")]
6701  "TARGET_THUMB2"
6702  "
6703  {
6704    enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6705
6706    if (code == REG)
6707      operands[2] = XEXP (operands[0], 0);
6708    else if (code == POST_INC || code == PRE_DEC)
6709      {
6710	operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6711	operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6712	emit_insn (gen_movdi (operands[0], operands[1]));
6713	DONE;
6714      }
6715    else if (code == PRE_INC)
6716      {
6717	rtx reg = XEXP (XEXP (operands[0], 0), 0);
6718
6719	emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6720	operands[2] = reg;
6721      }
6722    else if (code == POST_DEC)
6723      operands[2] = XEXP (XEXP (operands[0], 0), 0);
6724    else
6725      emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6726			     XEXP (XEXP (operands[0], 0), 1)));
6727
6728    emit_insn (gen_rtx_SET (VOIDmode,
6729			    replace_equiv_address (operands[0], operands[2]),
6730			    operands[1]));
6731
6732    if (code == POST_DEC)
6733      emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6734
6735    DONE;
6736  }"
6737)
6738
6739(define_insn "*movdf_soft_insn"
6740  [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6741	(match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6742  "TARGET_32BIT && TARGET_SOFT_FLOAT
6743   && (   register_operand (operands[0], DFmode)
6744       || register_operand (operands[1], DFmode))"
6745  "*
6746  switch (which_alternative)
6747    {
6748    case 0:
6749    case 1:
6750    case 2:
6751      return \"#\";
6752    default:
6753      return output_move_double (operands, true, NULL);
6754    }
6755  "
6756  [(set_attr "length" "8,12,16,8,8")
6757   (set_attr "type" "*,*,*,load2,store2")
6758   (set_attr "arm_pool_range" "*,*,*,1020,*")
6759   (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6760   (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6761   (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6762)
6763
6764;;; ??? This should have alternatives for constants.
6765;;; ??? This was originally identical to the movdi_insn pattern.
6766;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6767;;; thumb_reorg with a memory reference.
6768(define_insn "*thumb_movdf_insn"
6769  [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6770	(match_operand:DF 1 "general_operand"      "l, >,l,mF,l,*r"))]
6771  "TARGET_THUMB1
6772   && (   register_operand (operands[0], DFmode)
6773       || register_operand (operands[1], DFmode))"
6774  "*
6775  switch (which_alternative)
6776    {
6777    default:
6778    case 0:
6779      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6780	return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6781      return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6782    case 1:
6783      return \"ldmia\\t%1, {%0, %H0}\";
6784    case 2:
6785      return \"stmia\\t%0, {%1, %H1}\";
6786    case 3:
6787      return thumb_load_double_from_address (operands);
6788    case 4:
6789      operands[2] = gen_rtx_MEM (SImode,
6790				 plus_constant (Pmode,
6791						XEXP (operands[0], 0), 4));
6792      output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6793      return \"\";
6794    case 5:
6795      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6796	return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6797      return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6798    }
6799  "
6800  [(set_attr "length" "4,2,2,6,4,4")
6801   (set_attr "type" "*,load2,store2,load2,store2,*")
6802   (set_attr "insn" "*,*,*,*,*,mov")
6803   (set_attr "pool_range" "*,*,*,1018,*,*")]
6804)
6805
6806
6807;; load- and store-multiple insns
6808;; The arm can load/store any set of registers, provided that they are in
6809;; ascending order, but these expanders assume a contiguous set.
6810
6811(define_expand "load_multiple"
6812  [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6813                          (match_operand:SI 1 "" ""))
6814                     (use (match_operand:SI 2 "" ""))])]
6815  "TARGET_32BIT"
6816{
6817  HOST_WIDE_INT offset = 0;
6818
6819  /* Support only fixed point registers.  */
6820  if (!CONST_INT_P (operands[2])
6821      || INTVAL (operands[2]) > 14
6822      || INTVAL (operands[2]) < 2
6823      || !MEM_P (operands[1])
6824      || !REG_P (operands[0])
6825      || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6826      || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6827    FAIL;
6828
6829  operands[3]
6830    = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
6831			     INTVAL (operands[2]),
6832			     force_reg (SImode, XEXP (operands[1], 0)),
6833			     FALSE, operands[1], &offset);
6834})
6835
6836(define_expand "store_multiple"
6837  [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6838                          (match_operand:SI 1 "" ""))
6839                     (use (match_operand:SI 2 "" ""))])]
6840  "TARGET_32BIT"
6841{
6842  HOST_WIDE_INT offset = 0;
6843
6844  /* Support only fixed point registers.  */
6845  if (!CONST_INT_P (operands[2])
6846      || INTVAL (operands[2]) > 14
6847      || INTVAL (operands[2]) < 2
6848      || !REG_P (operands[1])
6849      || !MEM_P (operands[0])
6850      || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6851      || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6852    FAIL;
6853
6854  operands[3]
6855    = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
6856			      INTVAL (operands[2]),
6857			      force_reg (SImode, XEXP (operands[0], 0)),
6858			      FALSE, operands[0], &offset);
6859})
6860
6861
6862;; Move a block of memory if it is word aligned and MORE than 2 words long.
6863;; We could let this apply for blocks of less than this, but it clobbers so
6864;; many registers that there is then probably a better way.
6865
6866(define_expand "movmemqi"
6867  [(match_operand:BLK 0 "general_operand" "")
6868   (match_operand:BLK 1 "general_operand" "")
6869   (match_operand:SI 2 "const_int_operand" "")
6870   (match_operand:SI 3 "const_int_operand" "")]
6871  "TARGET_EITHER"
6872  "
6873  if (TARGET_32BIT)
6874    {
6875      if (arm_gen_movmemqi (operands))
6876        DONE;
6877      FAIL;
6878    }
6879  else /* TARGET_THUMB1 */
6880    {
6881      if (   INTVAL (operands[3]) != 4
6882          || INTVAL (operands[2]) > 48)
6883        FAIL;
6884
6885      thumb_expand_movmemqi (operands);
6886      DONE;
6887    }
6888  "
6889)
6890
6891;; Thumb block-move insns
6892
6893(define_insn "movmem12b"
6894  [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6895	(mem:SI (match_operand:SI 3 "register_operand" "1")))
6896   (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6897	(mem:SI (plus:SI (match_dup 3) (const_int 4))))
6898   (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6899	(mem:SI (plus:SI (match_dup 3) (const_int 8))))
6900   (set (match_operand:SI 0 "register_operand" "=l")
6901	(plus:SI (match_dup 2) (const_int 12)))
6902   (set (match_operand:SI 1 "register_operand" "=l")
6903	(plus:SI (match_dup 3) (const_int 12)))
6904   (clobber (match_scratch:SI 4 "=&l"))
6905   (clobber (match_scratch:SI 5 "=&l"))
6906   (clobber (match_scratch:SI 6 "=&l"))]
6907  "TARGET_THUMB1"
6908  "* return thumb_output_move_mem_multiple (3, operands);"
6909  [(set_attr "length" "4")
6910   ; This isn't entirely accurate...  It loads as well, but in terms of
6911   ; scheduling the following insn it is better to consider it as a store
6912   (set_attr "type" "store3")]
6913)
6914
6915(define_insn "movmem8b"
6916  [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6917	(mem:SI (match_operand:SI 3 "register_operand" "1")))
6918   (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6919	(mem:SI (plus:SI (match_dup 3) (const_int 4))))
6920   (set (match_operand:SI 0 "register_operand" "=l")
6921	(plus:SI (match_dup 2) (const_int 8)))
6922   (set (match_operand:SI 1 "register_operand" "=l")
6923	(plus:SI (match_dup 3) (const_int 8)))
6924   (clobber (match_scratch:SI 4 "=&l"))
6925   (clobber (match_scratch:SI 5 "=&l"))]
6926  "TARGET_THUMB1"
6927  "* return thumb_output_move_mem_multiple (2, operands);"
6928  [(set_attr "length" "4")
6929   ; This isn't entirely accurate...  It loads as well, but in terms of
6930   ; scheduling the following insn it is better to consider it as a store
6931   (set_attr "type" "store2")]
6932)
6933
6934
6935
6936;; Compare & branch insns
6937;; The range calculations are based as follows:
6938;; For forward branches, the address calculation returns the address of
6939;; the next instruction.  This is 2 beyond the branch instruction.
6940;; For backward branches, the address calculation returns the address of
6941;; the first instruction in this pattern (cmp).  This is 2 before the branch
6942;; instruction for the shortest sequence, and 4 before the branch instruction
6943;; if we have to jump around an unconditional branch.
6944;; To the basic branch range the PC offset must be added (this is +4).
6945;; So for forward branches we have
6946;;   (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6947;; And for backward branches we have
6948;;   (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6949;;
6950;; For a 'b'       pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6951;; For a 'b<cond>' pos_range = 254,  neg_range = -256  giving (-250 ->256).
6952
6953(define_expand "cbranchsi4"
6954  [(set (pc) (if_then_else
6955	      (match_operator 0 "expandable_comparison_operator"
6956	       [(match_operand:SI 1 "s_register_operand" "")
6957	        (match_operand:SI 2 "nonmemory_operand" "")])
6958	      (label_ref (match_operand 3 "" ""))
6959	      (pc)))]
6960  "TARGET_EITHER"
6961  "
6962  if (!TARGET_THUMB1)
6963    {
6964      if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
6965        FAIL;
6966      emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6967				      operands[3]));
6968      DONE;
6969    }
6970  if (thumb1_cmpneg_operand (operands[2], SImode))
6971    {
6972      emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6973					      operands[3], operands[0]));
6974      DONE;
6975    }
6976  if (!thumb1_cmp_operand (operands[2], SImode))
6977    operands[2] = force_reg (SImode, operands[2]);
6978  ")
6979
6980;; A pattern to recognize a special situation and optimize for it.
6981;; On the thumb, zero-extension from memory is preferrable to sign-extension
6982;; due to the available addressing modes.  Hence, convert a signed comparison
6983;; with zero into an unsigned comparison with 127 if possible.
6984(define_expand "cbranchqi4"
6985  [(set (pc) (if_then_else
6986	      (match_operator 0 "lt_ge_comparison_operator"
6987	       [(match_operand:QI 1 "memory_operand" "")
6988	        (match_operand:QI 2 "const0_operand" "")])
6989	      (label_ref (match_operand 3 "" ""))
6990	      (pc)))]
6991  "TARGET_THUMB1"
6992{
6993  rtx xops[4];
6994  xops[1] = gen_reg_rtx (SImode);
6995  emit_insn (gen_zero_extendqisi2 (xops[1], operands[1]));
6996  xops[2] = GEN_INT (127);
6997  xops[0] = gen_rtx_fmt_ee (GET_CODE (operands[0]) == GE ? LEU : GTU,
6998			    VOIDmode, xops[1], xops[2]);
6999  xops[3] = operands[3];
7000  emit_insn (gen_cbranchsi4 (xops[0], xops[1], xops[2], xops[3]));
7001  DONE;
7002})
7003
7004(define_expand "cbranchsf4"
7005  [(set (pc) (if_then_else
7006	      (match_operator 0 "expandable_comparison_operator"
7007	       [(match_operand:SF 1 "s_register_operand" "")
7008	        (match_operand:SF 2 "arm_float_compare_operand" "")])
7009	      (label_ref (match_operand 3 "" ""))
7010	      (pc)))]
7011  "TARGET_32BIT && TARGET_HARD_FLOAT"
7012  "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7013				   operands[3])); DONE;"
7014)
7015
7016(define_expand "cbranchdf4"
7017  [(set (pc) (if_then_else
7018	      (match_operator 0 "expandable_comparison_operator"
7019	       [(match_operand:DF 1 "s_register_operand" "")
7020	        (match_operand:DF 2 "arm_float_compare_operand" "")])
7021	      (label_ref (match_operand 3 "" ""))
7022	      (pc)))]
7023  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7024  "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7025				   operands[3])); DONE;"
7026)
7027
7028(define_expand "cbranchdi4"
7029  [(set (pc) (if_then_else
7030	      (match_operator 0 "expandable_comparison_operator"
7031	       [(match_operand:DI 1 "s_register_operand" "")
7032	        (match_operand:DI 2 "cmpdi_operand" "")])
7033	      (label_ref (match_operand 3 "" ""))
7034	      (pc)))]
7035  "TARGET_32BIT"
7036  "{
7037     if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7038       FAIL;
7039     emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7040				       operands[3]));
7041     DONE;
7042   }"
7043)
7044
7045(define_insn "cbranchsi4_insn"
7046  [(set (pc) (if_then_else
7047	      (match_operator 0 "arm_comparison_operator"
7048	       [(match_operand:SI 1 "s_register_operand" "l,l*h")
7049	        (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
7050	      (label_ref (match_operand 3 "" ""))
7051	      (pc)))]
7052  "TARGET_THUMB1"
7053{
7054  rtx t = cfun->machine->thumb1_cc_insn;
7055  if (t != NULL_RTX)
7056    {
7057      if (!rtx_equal_p (cfun->machine->thumb1_cc_op0, operands[1])
7058	  || !rtx_equal_p (cfun->machine->thumb1_cc_op1, operands[2]))
7059	t = NULL_RTX;
7060      if (cfun->machine->thumb1_cc_mode == CC_NOOVmode)
7061	{
7062	  if (!noov_comparison_operator (operands[0], VOIDmode))
7063	    t = NULL_RTX;
7064	}
7065      else if (cfun->machine->thumb1_cc_mode != CCmode)
7066	t = NULL_RTX;
7067    }
7068  if (t == NULL_RTX)
7069    {
7070      output_asm_insn ("cmp\t%1, %2", operands);
7071      cfun->machine->thumb1_cc_insn = insn;
7072      cfun->machine->thumb1_cc_op0 = operands[1];
7073      cfun->machine->thumb1_cc_op1 = operands[2];
7074      cfun->machine->thumb1_cc_mode = CCmode;
7075    }
7076  else
7077    /* Ensure we emit the right type of condition code on the jump.  */
7078    XEXP (operands[0], 0) = gen_rtx_REG (cfun->machine->thumb1_cc_mode,
7079					 CC_REGNUM);
7080
7081  switch (get_attr_length (insn))
7082    {
7083    case 4:  return \"b%d0\\t%l3\";
7084    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7085    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7086    }
7087}
7088  [(set (attr "far_jump")
7089        (if_then_else
7090	    (eq_attr "length" "8")
7091	    (const_string "yes")
7092            (const_string "no")))
7093   (set (attr "length")
7094        (if_then_else
7095	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7096	         (le (minus (match_dup 3) (pc)) (const_int 256)))
7097	    (const_int 4)
7098	    (if_then_else
7099	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7100		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
7101		(const_int 6)
7102		(const_int 8))))]
7103)
7104
7105(define_insn "cbranchsi4_scratch"
7106  [(set (pc) (if_then_else
7107	      (match_operator 4 "arm_comparison_operator"
7108	       [(match_operand:SI 1 "s_register_operand" "l,0")
7109	        (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
7110	      (label_ref (match_operand 3 "" ""))
7111	      (pc)))
7112   (clobber (match_scratch:SI 0 "=l,l"))]
7113  "TARGET_THUMB1"
7114  "*
7115  output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
7116
7117  switch (get_attr_length (insn))
7118    {
7119    case 4:  return \"b%d4\\t%l3\";
7120    case 6:  return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7121    default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7122    }
7123  "
7124  [(set (attr "far_jump")
7125        (if_then_else
7126	    (eq_attr "length" "8")
7127	    (const_string "yes")
7128            (const_string "no")))
7129   (set (attr "length")
7130        (if_then_else
7131	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7132	         (le (minus (match_dup 3) (pc)) (const_int 256)))
7133	    (const_int 4)
7134	    (if_then_else
7135	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7136		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
7137		(const_int 6)
7138		(const_int 8))))]
7139)
7140
7141(define_insn "*negated_cbranchsi4"
7142  [(set (pc)
7143	(if_then_else
7144	 (match_operator 0 "equality_operator"
7145	  [(match_operand:SI 1 "s_register_operand" "l")
7146	   (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
7147	 (label_ref (match_operand 3 "" ""))
7148	 (pc)))]
7149  "TARGET_THUMB1"
7150  "*
7151  output_asm_insn (\"cmn\\t%1, %2\", operands);
7152  switch (get_attr_length (insn))
7153    {
7154    case 4:  return \"b%d0\\t%l3\";
7155    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7156    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7157    }
7158  "
7159  [(set (attr "far_jump")
7160        (if_then_else
7161	    (eq_attr "length" "8")
7162	    (const_string "yes")
7163            (const_string "no")))
7164   (set (attr "length")
7165        (if_then_else
7166	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7167	         (le (minus (match_dup 3) (pc)) (const_int 256)))
7168	    (const_int 4)
7169	    (if_then_else
7170	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7171		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
7172		(const_int 6)
7173		(const_int 8))))]
7174)
7175
7176(define_insn "*tbit_cbranch"
7177  [(set (pc)
7178	(if_then_else
7179	 (match_operator 0 "equality_operator"
7180	  [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7181			    (const_int 1)
7182			    (match_operand:SI 2 "const_int_operand" "i"))
7183	   (const_int 0)])
7184	 (label_ref (match_operand 3 "" ""))
7185	 (pc)))
7186   (clobber (match_scratch:SI 4 "=l"))]
7187  "TARGET_THUMB1"
7188  "*
7189  {
7190  rtx op[3];
7191  op[0] = operands[4];
7192  op[1] = operands[1];
7193  op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
7194
7195  output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7196  switch (get_attr_length (insn))
7197    {
7198    case 4:  return \"b%d0\\t%l3\";
7199    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7200    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7201    }
7202  }"
7203  [(set (attr "far_jump")
7204        (if_then_else
7205	    (eq_attr "length" "8")
7206	    (const_string "yes")
7207            (const_string "no")))
7208   (set (attr "length")
7209        (if_then_else
7210	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7211	         (le (minus (match_dup 3) (pc)) (const_int 256)))
7212	    (const_int 4)
7213	    (if_then_else
7214	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7215		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
7216		(const_int 6)
7217		(const_int 8))))]
7218)
7219
7220(define_insn "*tlobits_cbranch"
7221  [(set (pc)
7222	(if_then_else
7223	 (match_operator 0 "equality_operator"
7224	  [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
7225			    (match_operand:SI 2 "const_int_operand" "i")
7226			    (const_int 0))
7227	   (const_int 0)])
7228	 (label_ref (match_operand 3 "" ""))
7229	 (pc)))
7230   (clobber (match_scratch:SI 4 "=l"))]
7231  "TARGET_THUMB1"
7232  "*
7233  {
7234  rtx op[3];
7235  op[0] = operands[4];
7236  op[1] = operands[1];
7237  op[2] = GEN_INT (32 - INTVAL (operands[2]));
7238
7239  output_asm_insn (\"lsl\\t%0, %1, %2\", op);
7240  switch (get_attr_length (insn))
7241    {
7242    case 4:  return \"b%d0\\t%l3\";
7243    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7244    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7245    }
7246  }"
7247  [(set (attr "far_jump")
7248        (if_then_else
7249	    (eq_attr "length" "8")
7250	    (const_string "yes")
7251            (const_string "no")))
7252   (set (attr "length")
7253        (if_then_else
7254	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7255	         (le (minus (match_dup 3) (pc)) (const_int 256)))
7256	    (const_int 4)
7257	    (if_then_else
7258	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7259		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
7260		(const_int 6)
7261		(const_int 8))))]
7262)
7263
7264(define_insn "*tstsi3_cbranch"
7265  [(set (pc)
7266	(if_then_else
7267	 (match_operator 3 "equality_operator"
7268	  [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7269		   (match_operand:SI 1 "s_register_operand" "l"))
7270	   (const_int 0)])
7271	 (label_ref (match_operand 2 "" ""))
7272	 (pc)))]
7273  "TARGET_THUMB1"
7274  "*
7275  {
7276  output_asm_insn (\"tst\\t%0, %1\", operands);
7277  switch (get_attr_length (insn))
7278    {
7279    case 4:  return \"b%d3\\t%l2\";
7280    case 6:  return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7281    default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7282    }
7283  }"
7284  [(set (attr "far_jump")
7285        (if_then_else
7286	    (eq_attr "length" "8")
7287	    (const_string "yes")
7288            (const_string "no")))
7289   (set (attr "length")
7290        (if_then_else
7291	    (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7292	         (le (minus (match_dup 2) (pc)) (const_int 256)))
7293	    (const_int 4)
7294	    (if_then_else
7295	        (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7296		     (le (minus (match_dup 2) (pc)) (const_int 2048)))
7297		(const_int 6)
7298		(const_int 8))))]
7299)
7300
7301(define_insn "*cbranchne_decr1"
7302  [(set (pc)
7303	(if_then_else (match_operator 3 "equality_operator"
7304		       [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7305		        (const_int 0)])
7306		      (label_ref (match_operand 4 "" ""))
7307		      (pc)))
7308   (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7309	(plus:SI (match_dup 2) (const_int -1)))
7310   (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7311  "TARGET_THUMB1"
7312  "*
7313   {
7314     rtx cond[2];
7315     cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7316				? GEU : LTU),
7317			       VOIDmode, operands[2], const1_rtx);
7318     cond[1] = operands[4];
7319
7320     if (which_alternative == 0)
7321       output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7322     else if (which_alternative == 1)
7323       {
7324	 /* We must provide an alternative for a hi reg because reload
7325	    cannot handle output reloads on a jump instruction, but we
7326	    can't subtract into that.  Fortunately a mov from lo to hi
7327	    does not clobber the condition codes.  */
7328	 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7329	 output_asm_insn (\"mov\\t%0, %1\", operands);
7330       }
7331     else
7332       {
7333	 /* Similarly, but the target is memory.  */
7334	 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7335	 output_asm_insn (\"str\\t%1, %0\", operands);
7336       }
7337
7338     switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7339       {
7340	 case 4:
7341	   output_asm_insn (\"b%d0\\t%l1\", cond);
7342	   return \"\";
7343	 case 6:
7344	   output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7345	   return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7346	 default:
7347	   output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7348	   return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7349       }
7350   }
7351  "
7352  [(set (attr "far_jump")
7353        (if_then_else
7354	    (ior (and (eq (symbol_ref ("which_alternative"))
7355	                  (const_int 0))
7356		      (eq_attr "length" "8"))
7357		 (eq_attr "length" "10"))
7358	    (const_string "yes")
7359            (const_string "no")))
7360   (set_attr_alternative "length"
7361      [
7362       ;; Alternative 0
7363       (if_then_else
7364	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7365	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7366	 (const_int 4)
7367	 (if_then_else
7368	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7369		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7370	   (const_int 6)
7371	   (const_int 8)))
7372       ;; Alternative 1
7373       (if_then_else
7374	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7375	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7376	 (const_int 6)
7377	 (if_then_else
7378	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7379		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7380	   (const_int 8)
7381	   (const_int 10)))
7382       ;; Alternative 2
7383       (if_then_else
7384	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7385	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7386	 (const_int 6)
7387	 (if_then_else
7388	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7389		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7390	   (const_int 8)
7391	   (const_int 10)))
7392       ;; Alternative 3
7393       (if_then_else
7394	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7395	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7396	 (const_int 6)
7397	 (if_then_else
7398	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7399		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7400	   (const_int 8)
7401	   (const_int 10)))])]
7402)
7403
7404(define_insn "*addsi3_cbranch"
7405  [(set (pc)
7406	(if_then_else
7407	 (match_operator 4 "arm_comparison_operator"
7408	  [(plus:SI
7409	    (match_operand:SI 2 "s_register_operand" "%0,l,*l,1,1,1")
7410	    (match_operand:SI 3 "reg_or_int_operand" "IJ,lL,*l,lIJ,lIJ,lIJ"))
7411	   (const_int 0)])
7412	 (label_ref (match_operand 5 "" ""))
7413	 (pc)))
7414   (set
7415    (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7416    (plus:SI (match_dup 2) (match_dup 3)))
7417   (clobber (match_scratch:SI 1 "=X,X,l,l,&l,&l"))]
7418  "TARGET_THUMB1
7419   && (GET_CODE (operands[4]) == EQ
7420       || GET_CODE (operands[4]) == NE
7421       || GET_CODE (operands[4]) == GE
7422       || GET_CODE (operands[4]) == LT)"
7423  "*
7424   {
7425     rtx cond[3];
7426
7427     cond[0] = (which_alternative < 2) ? operands[0] : operands[1];
7428     cond[1] = operands[2];
7429     cond[2] = operands[3];
7430
7431     if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
7432       output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7433     else
7434       output_asm_insn (\"add\\t%0, %1, %2\", cond);
7435
7436     if (which_alternative >= 2
7437	 && which_alternative < 4)
7438       output_asm_insn (\"mov\\t%0, %1\", operands);
7439     else if (which_alternative >= 4)
7440       output_asm_insn (\"str\\t%1, %0\", operands);
7441
7442     switch (get_attr_length (insn) - ((which_alternative >= 2) ? 2 : 0))
7443       {
7444	 case 4:
7445	   return \"b%d4\\t%l5\";
7446	 case 6:
7447	   return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7448	 default:
7449	   return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7450       }
7451   }
7452  "
7453  [(set (attr "far_jump")
7454        (if_then_else
7455	    (ior (and (lt (symbol_ref ("which_alternative"))
7456	                  (const_int 2))
7457		      (eq_attr "length" "8"))
7458		 (eq_attr "length" "10"))
7459	    (const_string "yes")
7460            (const_string "no")))
7461   (set (attr "length")
7462     (if_then_else
7463       (lt (symbol_ref ("which_alternative"))
7464		       (const_int 2))
7465       (if_then_else
7466	 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7467	      (le (minus (match_dup 5) (pc)) (const_int 256)))
7468	 (const_int 4)
7469	 (if_then_else
7470	   (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7471		(le (minus (match_dup 5) (pc)) (const_int 2048)))
7472	   (const_int 6)
7473	   (const_int 8)))
7474       (if_then_else
7475	 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7476	      (le (minus (match_dup 5) (pc)) (const_int 256)))
7477	 (const_int 6)
7478	 (if_then_else
7479	   (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7480		(le (minus (match_dup 5) (pc)) (const_int 2048)))
7481	   (const_int 8)
7482	   (const_int 10)))))]
7483)
7484
7485(define_insn "*addsi3_cbranch_scratch"
7486  [(set (pc)
7487	(if_then_else
7488	 (match_operator 3 "arm_comparison_operator"
7489	  [(plus:SI
7490	    (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7491	    (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7492	   (const_int 0)])
7493	 (label_ref (match_operand 4 "" ""))
7494	 (pc)))
7495   (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7496  "TARGET_THUMB1
7497   && (GET_CODE (operands[3]) == EQ
7498       || GET_CODE (operands[3]) == NE
7499       || GET_CODE (operands[3]) == GE
7500       || GET_CODE (operands[3]) == LT)"
7501  "*
7502   {
7503     switch (which_alternative)
7504       {
7505       case 0:
7506	 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7507	 break;
7508       case 1:
7509	 output_asm_insn (\"cmn\t%1, %2\", operands);
7510	 break;
7511       case 2:
7512	 if (INTVAL (operands[2]) < 0)
7513	   output_asm_insn (\"sub\t%0, %1, %2\", operands);
7514	 else
7515	   output_asm_insn (\"add\t%0, %1, %2\", operands);
7516	 break;
7517       case 3:
7518	 if (INTVAL (operands[2]) < 0)
7519	   output_asm_insn (\"sub\t%0, %0, %2\", operands);
7520	 else
7521	   output_asm_insn (\"add\t%0, %0, %2\", operands);
7522	 break;
7523       }
7524
7525     switch (get_attr_length (insn))
7526       {
7527	 case 4:
7528	   return \"b%d3\\t%l4\";
7529	 case 6:
7530	   return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7531	 default:
7532	   return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7533       }
7534   }
7535  "
7536  [(set (attr "far_jump")
7537        (if_then_else
7538	    (eq_attr "length" "8")
7539	    (const_string "yes")
7540            (const_string "no")))
7541   (set (attr "length")
7542       (if_then_else
7543	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7544	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7545	 (const_int 4)
7546	 (if_then_else
7547	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7548		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7549	   (const_int 6)
7550	   (const_int 8))))]
7551)
7552
7553
7554;; Comparison and test insns
7555
7556(define_insn "*arm_cmpsi_insn"
7557  [(set (reg:CC CC_REGNUM)
7558	(compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r")
7559		    (match_operand:SI 1 "arm_add_operand"    "Py,r,rI,L")))]
7560  "TARGET_32BIT"
7561  "@
7562   cmp%?\\t%0, %1
7563   cmp%?\\t%0, %1
7564   cmp%?\\t%0, %1
7565   cmn%?\\t%0, #%n1"
7566  [(set_attr "conds" "set")
7567   (set_attr "arch" "t2,t2,any,any")
7568   (set_attr "length" "2,2,4,4")
7569   (set_attr "predicable" "yes")
7570   (set_attr "type" "*,*,*,simple_alu_imm")]
7571)
7572
7573(define_insn "*cmpsi_shiftsi"
7574  [(set (reg:CC CC_REGNUM)
7575	(compare:CC (match_operand:SI   0 "s_register_operand" "r,r")
7576		    (match_operator:SI  3 "shift_operator"
7577		     [(match_operand:SI 1 "s_register_operand" "r,r")
7578		      (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
7579  "TARGET_32BIT"
7580  "cmp%?\\t%0, %1%S3"
7581  [(set_attr "conds" "set")
7582   (set_attr "shift" "1")
7583   (set_attr "arch" "32,a")
7584   (set_attr "type" "alu_shift,alu_shift_reg")])
7585
7586(define_insn "*cmpsi_shiftsi_swp"
7587  [(set (reg:CC_SWP CC_REGNUM)
7588	(compare:CC_SWP (match_operator:SI 3 "shift_operator"
7589			 [(match_operand:SI 1 "s_register_operand" "r,r")
7590			  (match_operand:SI 2 "shift_amount_operand" "M,rM")])
7591			(match_operand:SI 0 "s_register_operand" "r,r")))]
7592  "TARGET_32BIT"
7593  "cmp%?\\t%0, %1%S3"
7594  [(set_attr "conds" "set")
7595   (set_attr "shift" "1")
7596   (set_attr "arch" "32,a")
7597   (set_attr "type" "alu_shift,alu_shift_reg")])
7598
7599(define_insn "*arm_cmpsi_negshiftsi_si"
7600  [(set (reg:CC_Z CC_REGNUM)
7601	(compare:CC_Z
7602	 (neg:SI (match_operator:SI 1 "shift_operator"
7603		    [(match_operand:SI 2 "s_register_operand" "r")
7604		     (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7605	 (match_operand:SI 0 "s_register_operand" "r")))]
7606  "TARGET_ARM"
7607  "cmn%?\\t%0, %2%S1"
7608  [(set_attr "conds" "set")
7609   (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7610				    (const_string "alu_shift")
7611				    (const_string "alu_shift_reg")))
7612   (set_attr "predicable" "yes")]
7613)
7614
7615;; DImode comparisons.  The generic code generates branches that
7616;; if-conversion can not reduce to a conditional compare, so we do
7617;; that directly.
7618
7619(define_insn "*arm_cmpdi_insn"
7620  [(set (reg:CC_NCV CC_REGNUM)
7621	(compare:CC_NCV (match_operand:DI 0 "s_register_operand" "r")
7622			(match_operand:DI 1 "arm_di_operand"	   "rDi")))
7623   (clobber (match_scratch:SI 2 "=r"))]
7624  "TARGET_32BIT"
7625  "cmp\\t%Q0, %Q1\;sbcs\\t%2, %R0, %R1"
7626  [(set_attr "conds" "set")
7627   (set_attr "length" "8")]
7628)
7629
7630(define_insn "*arm_cmpdi_unsigned"
7631  [(set (reg:CC_CZ CC_REGNUM)
7632	(compare:CC_CZ (match_operand:DI 0 "s_register_operand" "r,r")
7633		       (match_operand:DI 1 "arm_di_operand"	"rDi,rDi")))]
7634  "TARGET_32BIT"
7635  "cmp\\t%R0, %R1\;it eq\;cmpeq\\t%Q0, %Q1"
7636  [(set_attr "conds" "set")
7637   (set_attr "arch" "a,t2")
7638   (set_attr "length" "8,10")]
7639)
7640
7641(define_insn "*arm_cmpdi_zero"
7642  [(set (reg:CC_Z CC_REGNUM)
7643	(compare:CC_Z (match_operand:DI 0 "s_register_operand" "r")
7644		      (const_int 0)))
7645   (clobber (match_scratch:SI 1 "=r"))]
7646  "TARGET_32BIT"
7647  "orr%.\\t%1, %Q0, %R0"
7648  [(set_attr "conds" "set")]
7649)
7650
7651(define_insn "*thumb_cmpdi_zero"
7652  [(set (reg:CC_Z CC_REGNUM)
7653	(compare:CC_Z (match_operand:DI 0 "s_register_operand" "l")
7654		      (const_int 0)))
7655   (clobber (match_scratch:SI 1 "=l"))]
7656  "TARGET_THUMB1"
7657  "orr\\t%1, %Q0, %R0"
7658  [(set_attr "conds" "set")
7659   (set_attr "length" "2")]
7660)
7661
7662; This insn allows redundant compares to be removed by cse, nothing should
7663; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7664; is deleted later on. The match_dup will match the mode here, so that
7665; mode changes of the condition codes aren't lost by this even though we don't
7666; specify what they are.
7667
7668(define_insn "*deleted_compare"
7669  [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7670  "TARGET_32BIT"
7671  "\\t%@ deleted compare"
7672  [(set_attr "conds" "set")
7673   (set_attr "length" "0")]
7674)
7675
7676
7677;; Conditional branch insns
7678
7679(define_expand "cbranch_cc"
7680  [(set (pc)
7681	(if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7682					    (match_operand 2 "" "")])
7683		      (label_ref (match_operand 3 "" ""))
7684		      (pc)))]
7685  "TARGET_32BIT"
7686  "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7687				      operands[1], operands[2], NULL_RTX);
7688   operands[2] = const0_rtx;"
7689)
7690
7691;;
7692;; Patterns to match conditional branch insns.
7693;;
7694
7695(define_insn "arm_cond_branch"
7696  [(set (pc)
7697	(if_then_else (match_operator 1 "arm_comparison_operator"
7698		       [(match_operand 2 "cc_register" "") (const_int 0)])
7699		      (label_ref (match_operand 0 "" ""))
7700		      (pc)))]
7701  "TARGET_32BIT"
7702  "*
7703  if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7704    {
7705      arm_ccfsm_state += 2;
7706      return \"\";
7707    }
7708  return \"b%d1\\t%l0\";
7709  "
7710  [(set_attr "conds" "use")
7711   (set_attr "type" "branch")
7712   (set (attr "length")
7713	(if_then_else
7714	   (and (match_test "TARGET_THUMB2")
7715		(and (ge (minus (match_dup 0) (pc)) (const_int -250))
7716		     (le (minus (match_dup 0) (pc)) (const_int 256))))
7717	   (const_int 2)
7718	   (const_int 4)))]
7719)
7720
7721(define_insn "*arm_cond_branch_reversed"
7722  [(set (pc)
7723	(if_then_else (match_operator 1 "arm_comparison_operator"
7724		       [(match_operand 2 "cc_register" "") (const_int 0)])
7725		      (pc)
7726		      (label_ref (match_operand 0 "" ""))))]
7727  "TARGET_32BIT"
7728  "*
7729  if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7730    {
7731      arm_ccfsm_state += 2;
7732      return \"\";
7733    }
7734  return \"b%D1\\t%l0\";
7735  "
7736  [(set_attr "conds" "use")
7737   (set_attr "type" "branch")
7738   (set (attr "length")
7739	(if_then_else
7740	   (and (match_test "TARGET_THUMB2")
7741		(and (ge (minus (match_dup 0) (pc)) (const_int -250))
7742		     (le (minus (match_dup 0) (pc)) (const_int 256))))
7743	   (const_int 2)
7744	   (const_int 4)))]
7745)
7746
7747
7748
7749; scc insns
7750
7751(define_expand "cstore_cc"
7752  [(set (match_operand:SI 0 "s_register_operand" "")
7753	(match_operator:SI 1 "" [(match_operand 2 "" "")
7754				 (match_operand 3 "" "")]))]
7755  "TARGET_32BIT"
7756  "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7757				      operands[2], operands[3], NULL_RTX);
7758   operands[3] = const0_rtx;"
7759)
7760
7761(define_insn "*mov_scc"
7762  [(set (match_operand:SI 0 "s_register_operand" "=r")
7763	(match_operator:SI 1 "arm_comparison_operator"
7764	 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7765  "TARGET_ARM"
7766  "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7767  [(set_attr "conds" "use")
7768   (set_attr "insn" "mov")
7769   (set_attr "length" "8")]
7770)
7771
7772(define_insn "*mov_negscc"
7773  [(set (match_operand:SI 0 "s_register_operand" "=r")
7774	(neg:SI (match_operator:SI 1 "arm_comparison_operator"
7775		 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7776  "TARGET_ARM"
7777  "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7778  [(set_attr "conds" "use")
7779   (set_attr "insn" "mov")
7780   (set_attr "length" "8")]
7781)
7782
7783(define_insn "*mov_notscc"
7784  [(set (match_operand:SI 0 "s_register_operand" "=r")
7785	(not:SI (match_operator:SI 1 "arm_comparison_operator"
7786		 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7787  "TARGET_ARM"
7788  "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7789  [(set_attr "conds" "use")
7790   (set_attr "insn" "mov")
7791   (set_attr "length" "8")]
7792)
7793
7794(define_expand "cstoresi4"
7795  [(set (match_operand:SI 0 "s_register_operand" "")
7796	(match_operator:SI 1 "expandable_comparison_operator"
7797	 [(match_operand:SI 2 "s_register_operand" "")
7798	  (match_operand:SI 3 "reg_or_int_operand" "")]))]
7799  "TARGET_32BIT || TARGET_THUMB1"
7800  "{
7801  rtx op3, scratch, scratch2;
7802
7803  if (!TARGET_THUMB1)
7804    {
7805      if (!arm_add_operand (operands[3], SImode))
7806	operands[3] = force_reg (SImode, operands[3]);
7807      emit_insn (gen_cstore_cc (operands[0], operands[1],
7808				operands[2], operands[3]));
7809      DONE;
7810    }
7811
7812  if (operands[3] == const0_rtx)
7813    {
7814      switch (GET_CODE (operands[1]))
7815	{
7816	case EQ:
7817	  emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7818	  break;
7819
7820	case NE:
7821	  emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7822	  break;
7823
7824	case LE:
7825          scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7826				  NULL_RTX, 0, OPTAB_WIDEN);
7827          scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7828				  NULL_RTX, 0, OPTAB_WIDEN);
7829          expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7830			operands[0], 1, OPTAB_WIDEN);
7831	  break;
7832
7833        case GE:
7834          scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7835				 NULL_RTX, 1);
7836          expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7837			NULL_RTX, 1, OPTAB_WIDEN);
7838          break;
7839
7840        case GT:
7841          scratch = expand_binop (SImode, ashr_optab, operands[2],
7842				  GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7843          scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7844				  NULL_RTX, 0, OPTAB_WIDEN);
7845          expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7846			0, OPTAB_WIDEN);
7847          break;
7848
7849	/* LT is handled by generic code.  No need for unsigned with 0.  */
7850	default:
7851	  FAIL;
7852	}
7853      DONE;
7854    }
7855
7856  switch (GET_CODE (operands[1]))
7857    {
7858    case EQ:
7859      scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7860			      NULL_RTX, 0, OPTAB_WIDEN);
7861      emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7862      break;
7863
7864    case NE:
7865      scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7866			      NULL_RTX, 0, OPTAB_WIDEN);
7867      emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7868      break;
7869
7870    case LE:
7871      op3 = force_reg (SImode, operands[3]);
7872
7873      scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7874			      NULL_RTX, 1, OPTAB_WIDEN);
7875      scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7876			      NULL_RTX, 0, OPTAB_WIDEN);
7877      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7878					  op3, operands[2]));
7879      break;
7880
7881    case GE:
7882      op3 = operands[3];
7883      if (!thumb1_cmp_operand (op3, SImode))
7884        op3 = force_reg (SImode, op3);
7885      scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7886			      NULL_RTX, 0, OPTAB_WIDEN);
7887      scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7888			       NULL_RTX, 1, OPTAB_WIDEN);
7889      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7890					  operands[2], op3));
7891      break;
7892
7893    case LEU:
7894      op3 = force_reg (SImode, operands[3]);
7895      scratch = force_reg (SImode, const0_rtx);
7896      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7897					  op3, operands[2]));
7898      break;
7899
7900    case GEU:
7901      op3 = operands[3];
7902      if (!thumb1_cmp_operand (op3, SImode))
7903        op3 = force_reg (SImode, op3);
7904      scratch = force_reg (SImode, const0_rtx);
7905      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7906					  operands[2], op3));
7907      break;
7908
7909    case LTU:
7910      op3 = operands[3];
7911      if (!thumb1_cmp_operand (op3, SImode))
7912        op3 = force_reg (SImode, op3);
7913      scratch = gen_reg_rtx (SImode);
7914      emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7915      break;
7916
7917    case GTU:
7918      op3 = force_reg (SImode, operands[3]);
7919      scratch = gen_reg_rtx (SImode);
7920      emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7921      break;
7922
7923    /* No good sequences for GT, LT.  */
7924    default:
7925      FAIL;
7926    }
7927  DONE;
7928}")
7929
7930(define_expand "cstoresf4"
7931  [(set (match_operand:SI 0 "s_register_operand" "")
7932	(match_operator:SI 1 "expandable_comparison_operator"
7933	 [(match_operand:SF 2 "s_register_operand" "")
7934	  (match_operand:SF 3 "arm_float_compare_operand" "")]))]
7935  "TARGET_32BIT && TARGET_HARD_FLOAT"
7936  "emit_insn (gen_cstore_cc (operands[0], operands[1],
7937			     operands[2], operands[3])); DONE;"
7938)
7939
7940(define_expand "cstoredf4"
7941  [(set (match_operand:SI 0 "s_register_operand" "")
7942	(match_operator:SI 1 "expandable_comparison_operator"
7943	 [(match_operand:DF 2 "s_register_operand" "")
7944	  (match_operand:DF 3 "arm_float_compare_operand" "")]))]
7945  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7946  "emit_insn (gen_cstore_cc (operands[0], operands[1],
7947			     operands[2], operands[3])); DONE;"
7948)
7949
7950(define_expand "cstoredi4"
7951  [(set (match_operand:SI 0 "s_register_operand" "")
7952	(match_operator:SI 1 "expandable_comparison_operator"
7953	 [(match_operand:DI 2 "s_register_operand" "")
7954	  (match_operand:DI 3 "cmpdi_operand" "")]))]
7955  "TARGET_32BIT"
7956  "{
7957     if (!arm_validize_comparison (&operands[1],
7958     				   &operands[2],
7959				   &operands[3]))
7960       FAIL;
7961     emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7962		      	         operands[3]));
7963     DONE;
7964   }"
7965)
7966
7967(define_expand "cstoresi_eq0_thumb1"
7968  [(parallel
7969    [(set (match_operand:SI 0 "s_register_operand" "")
7970	  (eq:SI (match_operand:SI 1 "s_register_operand" "")
7971		 (const_int 0)))
7972     (clobber (match_dup:SI 2))])]
7973  "TARGET_THUMB1"
7974  "operands[2] = gen_reg_rtx (SImode);"
7975)
7976
7977(define_expand "cstoresi_ne0_thumb1"
7978  [(parallel
7979    [(set (match_operand:SI 0 "s_register_operand" "")
7980	  (ne:SI (match_operand:SI 1 "s_register_operand" "")
7981		 (const_int 0)))
7982     (clobber (match_dup:SI 2))])]
7983  "TARGET_THUMB1"
7984  "operands[2] = gen_reg_rtx (SImode);"
7985)
7986
7987(define_insn "*cstoresi_eq0_thumb1_insn"
7988  [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
7989	(eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
7990	       (const_int 0)))
7991   (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
7992  "TARGET_THUMB1"
7993  "@
7994   neg\\t%0, %1\;adc\\t%0, %0, %1
7995   neg\\t%2, %1\;adc\\t%0, %1, %2"
7996  [(set_attr "length" "4")]
7997)
7998
7999(define_insn "*cstoresi_ne0_thumb1_insn"
8000  [(set (match_operand:SI 0 "s_register_operand" "=l")
8001	(ne:SI (match_operand:SI 1 "s_register_operand" "0")
8002	       (const_int 0)))
8003   (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8004  "TARGET_THUMB1"
8005  "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8006  [(set_attr "length" "4")]
8007)
8008
8009;; Used as part of the expansion of thumb ltu and gtu sequences
8010(define_insn "cstoresi_nltu_thumb1"
8011  [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8012        (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8013			(match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8014  "TARGET_THUMB1"
8015  "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8016  [(set_attr "length" "4")]
8017)
8018
8019(define_insn_and_split "cstoresi_ltu_thumb1"
8020  [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8021        (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8022		(match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8023  "TARGET_THUMB1"
8024  "#"
8025  "TARGET_THUMB1"
8026  [(set (match_dup 3)
8027	(neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8028   (set (match_dup 0) (neg:SI (match_dup 3)))]
8029  "operands[3] = gen_reg_rtx (SImode);"
8030  [(set_attr "length" "4")]
8031)
8032
8033;; Used as part of the expansion of thumb les sequence.
8034(define_insn "thumb1_addsi3_addgeu"
8035  [(set (match_operand:SI 0 "s_register_operand" "=l")
8036        (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8037			  (match_operand:SI 2 "s_register_operand" "l"))
8038		 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8039			 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8040  "TARGET_THUMB1"
8041  "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8042  [(set_attr "length" "4")]
8043)
8044
8045
8046;; Conditional move insns
8047
8048(define_expand "movsicc"
8049  [(set (match_operand:SI 0 "s_register_operand" "")
8050	(if_then_else:SI (match_operand 1 "expandable_comparison_operator" "")
8051			 (match_operand:SI 2 "arm_not_operand" "")
8052			 (match_operand:SI 3 "arm_not_operand" "")))]
8053  "TARGET_32BIT"
8054  "
8055  {
8056    enum rtx_code code;
8057    rtx ccreg;
8058
8059    if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8060       				  &XEXP (operands[1], 1)))
8061      FAIL;
8062
8063    code = GET_CODE (operands[1]);
8064    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8065				 XEXP (operands[1], 1), NULL_RTX);
8066    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8067  }"
8068)
8069
8070(define_expand "movsfcc"
8071  [(set (match_operand:SF 0 "s_register_operand" "")
8072	(if_then_else:SF (match_operand 1 "expandable_comparison_operator" "")
8073			 (match_operand:SF 2 "s_register_operand" "")
8074			 (match_operand:SF 3 "s_register_operand" "")))]
8075  "TARGET_32BIT && TARGET_HARD_FLOAT"
8076  "
8077  {
8078    enum rtx_code code = GET_CODE (operands[1]);
8079    rtx ccreg;
8080
8081    if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8082       				  &XEXP (operands[1], 1)))
8083       FAIL;
8084
8085    code = GET_CODE (operands[1]);
8086    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8087				 XEXP (operands[1], 1), NULL_RTX);
8088    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8089  }"
8090)
8091
8092(define_expand "movdfcc"
8093  [(set (match_operand:DF 0 "s_register_operand" "")
8094	(if_then_else:DF (match_operand 1 "expandable_comparison_operator" "")
8095			 (match_operand:DF 2 "s_register_operand" "")
8096			 (match_operand:DF 3 "s_register_operand" "")))]
8097  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8098  "
8099  {
8100    enum rtx_code code = GET_CODE (operands[1]);
8101    rtx ccreg;
8102
8103    if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8104       				  &XEXP (operands[1], 1)))
8105       FAIL;
8106    code = GET_CODE (operands[1]);
8107    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8108				 XEXP (operands[1], 1), NULL_RTX);
8109    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8110  }"
8111)
8112
8113(define_insn "*movsicc_insn"
8114  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8115	(if_then_else:SI
8116	 (match_operator 3 "arm_comparison_operator"
8117	  [(match_operand 4 "cc_register" "") (const_int 0)])
8118	 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8119	 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8120  "TARGET_ARM"
8121  "@
8122   mov%D3\\t%0, %2
8123   mvn%D3\\t%0, #%B2
8124   mov%d3\\t%0, %1
8125   mvn%d3\\t%0, #%B1
8126   mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8127   mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8128   mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8129   mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8130  [(set_attr "length" "4,4,4,4,8,8,8,8")
8131   (set_attr "conds" "use")
8132   (set_attr "insn" "mov,mvn,mov,mvn,mov,mov,mvn,mvn")
8133   (set_attr_alternative "type"
8134                         [(if_then_else (match_operand 2 "const_int_operand" "")
8135                                        (const_string "simple_alu_imm")
8136                                        (const_string "*"))
8137                          (const_string "simple_alu_imm")
8138                          (if_then_else (match_operand 1 "const_int_operand" "")
8139                                        (const_string "simple_alu_imm")
8140                                        (const_string "*"))
8141                          (const_string "simple_alu_imm")
8142                          (const_string "*")
8143                          (const_string "*")
8144                          (const_string "*")
8145                          (const_string "*")])]
8146)
8147
8148(define_insn "*movsfcc_soft_insn"
8149  [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8150	(if_then_else:SF (match_operator 3 "arm_comparison_operator"
8151			  [(match_operand 4 "cc_register" "") (const_int 0)])
8152			 (match_operand:SF 1 "s_register_operand" "0,r")
8153			 (match_operand:SF 2 "s_register_operand" "r,0")))]
8154  "TARGET_ARM && TARGET_SOFT_FLOAT"
8155  "@
8156   mov%D3\\t%0, %2
8157   mov%d3\\t%0, %1"
8158  [(set_attr "conds" "use")
8159   (set_attr "insn" "mov")]
8160)
8161
8162
8163;; Jump and linkage insns
8164
8165(define_expand "jump"
8166  [(set (pc)
8167	(label_ref (match_operand 0 "" "")))]
8168  "TARGET_EITHER"
8169  ""
8170)
8171
8172(define_insn "*arm_jump"
8173  [(set (pc)
8174	(label_ref (match_operand 0 "" "")))]
8175  "TARGET_32BIT"
8176  "*
8177  {
8178    if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8179      {
8180        arm_ccfsm_state += 2;
8181        return \"\";
8182      }
8183    return \"b%?\\t%l0\";
8184  }
8185  "
8186  [(set_attr "predicable" "yes")
8187   (set (attr "length")
8188	(if_then_else
8189	   (and (match_test "TARGET_THUMB2")
8190		(and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8191		     (le (minus (match_dup 0) (pc)) (const_int 2048))))
8192	   (const_int 2)
8193	   (const_int 4)))]
8194)
8195
8196(define_insn "*thumb_jump"
8197  [(set (pc)
8198	(label_ref (match_operand 0 "" "")))]
8199  "TARGET_THUMB1"
8200  "*
8201  if (get_attr_length (insn) == 2)
8202    return \"b\\t%l0\";
8203  return \"bl\\t%l0\\t%@ far jump\";
8204  "
8205  [(set (attr "far_jump")
8206        (if_then_else
8207	    (eq_attr "length" "4")
8208	    (const_string "yes")
8209	    (const_string "no")))
8210   (set (attr "length")
8211        (if_then_else
8212	    (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8213		 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8214  	    (const_int 2)
8215	    (const_int 4)))]
8216)
8217
8218(define_expand "call"
8219  [(parallel [(call (match_operand 0 "memory_operand" "")
8220	            (match_operand 1 "general_operand" ""))
8221	      (use (match_operand 2 "" ""))
8222	      (clobber (reg:SI LR_REGNUM))])]
8223  "TARGET_EITHER"
8224  "
8225  {
8226    rtx callee, pat;
8227
8228    /* In an untyped call, we can get NULL for operand 2.  */
8229    if (operands[2] == NULL_RTX)
8230      operands[2] = const0_rtx;
8231
8232    /* Decide if we should generate indirect calls by loading the
8233       32-bit address of the callee into a register before performing the
8234       branch and link.  */
8235    callee = XEXP (operands[0], 0);
8236    if (GET_CODE (callee) == SYMBOL_REF
8237	? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8238	: !REG_P (callee))
8239      XEXP (operands[0], 0) = force_reg (Pmode, callee);
8240
8241    pat = gen_call_internal (operands[0], operands[1], operands[2]);
8242    arm_emit_call_insn (pat, XEXP (operands[0], 0));
8243    DONE;
8244  }"
8245)
8246
8247(define_expand "call_internal"
8248  [(parallel [(call (match_operand 0 "memory_operand" "")
8249	            (match_operand 1 "general_operand" ""))
8250	      (use (match_operand 2 "" ""))
8251	      (clobber (reg:SI LR_REGNUM))])])
8252
8253(define_insn "*call_reg_armv5"
8254  [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8255         (match_operand 1 "" ""))
8256   (use (match_operand 2 "" ""))
8257   (clobber (reg:SI LR_REGNUM))]
8258  "TARGET_ARM && arm_arch5"
8259  "blx%?\\t%0"
8260  [(set_attr "type" "call")]
8261)
8262
8263(define_insn "*call_reg_arm"
8264  [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8265         (match_operand 1 "" ""))
8266   (use (match_operand 2 "" ""))
8267   (clobber (reg:SI LR_REGNUM))]
8268  "TARGET_ARM && !arm_arch5"
8269  "*
8270  return output_call (operands);
8271  "
8272  ;; length is worst case, normally it is only two
8273  [(set_attr "length" "12")
8274   (set_attr "type" "call")]
8275)
8276
8277
8278;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8279;; considered a function call by the branch predictor of some cores (PR40887).
8280;; Falls back to blx rN (*call_reg_armv5).
8281
8282(define_insn "*call_mem"
8283  [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8284	 (match_operand 1 "" ""))
8285   (use (match_operand 2 "" ""))
8286   (clobber (reg:SI LR_REGNUM))]
8287  "TARGET_ARM && !arm_arch5"
8288  "*
8289  return output_call_mem (operands);
8290  "
8291  [(set_attr "length" "12")
8292   (set_attr "type" "call")]
8293)
8294
8295(define_insn "*call_reg_thumb1_v5"
8296  [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8297	 (match_operand 1 "" ""))
8298   (use (match_operand 2 "" ""))
8299   (clobber (reg:SI LR_REGNUM))]
8300  "TARGET_THUMB1 && arm_arch5"
8301  "blx\\t%0"
8302  [(set_attr "length" "2")
8303   (set_attr "type" "call")]
8304)
8305
8306(define_insn "*call_reg_thumb1"
8307  [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8308	 (match_operand 1 "" ""))
8309   (use (match_operand 2 "" ""))
8310   (clobber (reg:SI LR_REGNUM))]
8311  "TARGET_THUMB1 && !arm_arch5"
8312  "*
8313  {
8314    if (!TARGET_CALLER_INTERWORKING)
8315      return thumb_call_via_reg (operands[0]);
8316    else if (operands[1] == const0_rtx)
8317      return \"bl\\t%__interwork_call_via_%0\";
8318    else if (frame_pointer_needed)
8319      return \"bl\\t%__interwork_r7_call_via_%0\";
8320    else
8321      return \"bl\\t%__interwork_r11_call_via_%0\";
8322  }"
8323  [(set_attr "type" "call")]
8324)
8325
8326(define_expand "call_value"
8327  [(parallel [(set (match_operand       0 "" "")
8328	           (call (match_operand 1 "memory_operand" "")
8329		         (match_operand 2 "general_operand" "")))
8330	      (use (match_operand 3 "" ""))
8331	      (clobber (reg:SI LR_REGNUM))])]
8332  "TARGET_EITHER"
8333  "
8334  {
8335    rtx pat, callee;
8336
8337    /* In an untyped call, we can get NULL for operand 2.  */
8338    if (operands[3] == 0)
8339      operands[3] = const0_rtx;
8340
8341    /* Decide if we should generate indirect calls by loading the
8342       32-bit address of the callee into a register before performing the
8343       branch and link.  */
8344    callee = XEXP (operands[1], 0);
8345    if (GET_CODE (callee) == SYMBOL_REF
8346	? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8347	: !REG_P (callee))
8348      XEXP (operands[1], 0) = force_reg (Pmode, callee);
8349
8350    pat = gen_call_value_internal (operands[0], operands[1],
8351				   operands[2], operands[3]);
8352    arm_emit_call_insn (pat, XEXP (operands[1], 0));
8353    DONE;
8354  }"
8355)
8356
8357(define_expand "call_value_internal"
8358  [(parallel [(set (match_operand       0 "" "")
8359	           (call (match_operand 1 "memory_operand" "")
8360		         (match_operand 2 "general_operand" "")))
8361	      (use (match_operand 3 "" ""))
8362	      (clobber (reg:SI LR_REGNUM))])])
8363
8364(define_insn "*call_value_reg_armv5"
8365  [(set (match_operand 0 "" "")
8366        (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8367	      (match_operand 2 "" "")))
8368   (use (match_operand 3 "" ""))
8369   (clobber (reg:SI LR_REGNUM))]
8370  "TARGET_ARM && arm_arch5"
8371  "blx%?\\t%1"
8372  [(set_attr "type" "call")]
8373)
8374
8375(define_insn "*call_value_reg_arm"
8376  [(set (match_operand 0 "" "")
8377        (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8378	      (match_operand 2 "" "")))
8379   (use (match_operand 3 "" ""))
8380   (clobber (reg:SI LR_REGNUM))]
8381  "TARGET_ARM && !arm_arch5"
8382  "*
8383  return output_call (&operands[1]);
8384  "
8385  [(set_attr "length" "12")
8386   (set_attr "type" "call")]
8387)
8388
8389;; Note: see *call_mem
8390
8391(define_insn "*call_value_mem"
8392  [(set (match_operand 0 "" "")
8393	(call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8394	      (match_operand 2 "" "")))
8395   (use (match_operand 3 "" ""))
8396   (clobber (reg:SI LR_REGNUM))]
8397  "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8398  "*
8399  return output_call_mem (&operands[1]);
8400  "
8401  [(set_attr "length" "12")
8402   (set_attr "type" "call")]
8403)
8404
8405(define_insn "*call_value_reg_thumb1_v5"
8406  [(set (match_operand 0 "" "")
8407	(call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8408	      (match_operand 2 "" "")))
8409   (use (match_operand 3 "" ""))
8410   (clobber (reg:SI LR_REGNUM))]
8411  "TARGET_THUMB1 && arm_arch5"
8412  "blx\\t%1"
8413  [(set_attr "length" "2")
8414   (set_attr "type" "call")]
8415)
8416
8417(define_insn "*call_value_reg_thumb1"
8418  [(set (match_operand 0 "" "")
8419	(call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8420	      (match_operand 2 "" "")))
8421   (use (match_operand 3 "" ""))
8422   (clobber (reg:SI LR_REGNUM))]
8423  "TARGET_THUMB1 && !arm_arch5"
8424  "*
8425  {
8426    if (!TARGET_CALLER_INTERWORKING)
8427      return thumb_call_via_reg (operands[1]);
8428    else if (operands[2] == const0_rtx)
8429      return \"bl\\t%__interwork_call_via_%1\";
8430    else if (frame_pointer_needed)
8431      return \"bl\\t%__interwork_r7_call_via_%1\";
8432    else
8433      return \"bl\\t%__interwork_r11_call_via_%1\";
8434  }"
8435  [(set_attr "type" "call")]
8436)
8437
8438;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8439;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8440
8441(define_insn "*call_symbol"
8442  [(call (mem:SI (match_operand:SI 0 "" ""))
8443	 (match_operand 1 "" ""))
8444   (use (match_operand 2 "" ""))
8445   (clobber (reg:SI LR_REGNUM))]
8446  "TARGET_32BIT
8447   && (GET_CODE (operands[0]) == SYMBOL_REF)
8448   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8449  "*
8450  {
8451    return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8452  }"
8453  [(set_attr "type" "call")]
8454)
8455
8456(define_insn "*call_value_symbol"
8457  [(set (match_operand 0 "" "")
8458	(call (mem:SI (match_operand:SI 1 "" ""))
8459	(match_operand:SI 2 "" "")))
8460   (use (match_operand 3 "" ""))
8461   (clobber (reg:SI LR_REGNUM))]
8462  "TARGET_32BIT
8463   && (GET_CODE (operands[1]) == SYMBOL_REF)
8464   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8465  "*
8466  {
8467    return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8468  }"
8469  [(set_attr "type" "call")]
8470)
8471
8472(define_insn "*call_insn"
8473  [(call (mem:SI (match_operand:SI 0 "" ""))
8474	 (match_operand:SI 1 "" ""))
8475   (use (match_operand 2 "" ""))
8476   (clobber (reg:SI LR_REGNUM))]
8477  "TARGET_THUMB1
8478   && GET_CODE (operands[0]) == SYMBOL_REF
8479   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8480  "bl\\t%a0"
8481  [(set_attr "length" "4")
8482   (set_attr "type" "call")]
8483)
8484
8485(define_insn "*call_value_insn"
8486  [(set (match_operand 0 "" "")
8487	(call (mem:SI (match_operand 1 "" ""))
8488	      (match_operand 2 "" "")))
8489   (use (match_operand 3 "" ""))
8490   (clobber (reg:SI LR_REGNUM))]
8491  "TARGET_THUMB1
8492   && GET_CODE (operands[1]) == SYMBOL_REF
8493   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8494  "bl\\t%a1"
8495  [(set_attr "length" "4")
8496   (set_attr "type" "call")]
8497)
8498
8499;; We may also be able to do sibcalls for Thumb, but it's much harder...
8500(define_expand "sibcall"
8501  [(parallel [(call (match_operand 0 "memory_operand" "")
8502		    (match_operand 1 "general_operand" ""))
8503	      (return)
8504	      (use (match_operand 2 "" ""))])]
8505  "TARGET_32BIT"
8506  "
8507  {
8508    if (operands[2] == NULL_RTX)
8509      operands[2] = const0_rtx;
8510  }"
8511)
8512
8513(define_expand "sibcall_value"
8514  [(parallel [(set (match_operand 0 "" "")
8515		   (call (match_operand 1 "memory_operand" "")
8516			 (match_operand 2 "general_operand" "")))
8517	      (return)
8518	      (use (match_operand 3 "" ""))])]
8519  "TARGET_32BIT"
8520  "
8521  {
8522    if (operands[3] == NULL_RTX)
8523      operands[3] = const0_rtx;
8524  }"
8525)
8526
8527(define_insn "*sibcall_insn"
8528 [(call (mem:SI (match_operand:SI 0 "" "X"))
8529	(match_operand 1 "" ""))
8530  (return)
8531  (use (match_operand 2 "" ""))]
8532  "TARGET_32BIT && GET_CODE (operands[0]) == SYMBOL_REF"
8533  "*
8534  return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8535  "
8536  [(set_attr "type" "call")]
8537)
8538
8539(define_insn "*sibcall_value_insn"
8540 [(set (match_operand 0 "" "")
8541       (call (mem:SI (match_operand:SI 1 "" "X"))
8542	     (match_operand 2 "" "")))
8543  (return)
8544  (use (match_operand 3 "" ""))]
8545  "TARGET_32BIT && GET_CODE (operands[1]) == SYMBOL_REF"
8546  "*
8547  return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8548  "
8549  [(set_attr "type" "call")]
8550)
8551
8552(define_expand "return"
8553  [(return)]
8554  "(TARGET_ARM || (TARGET_THUMB2
8555                   && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8556                   && !IS_STACKALIGN (arm_current_func_type ())))
8557    && USE_RETURN_INSN (FALSE)"
8558  "
8559  {
8560    if (TARGET_THUMB2)
8561      {
8562        thumb2_expand_return ();
8563        DONE;
8564      }
8565  }
8566  "
8567)
8568
8569;; Often the return insn will be the same as loading from memory, so set attr
8570(define_insn "*arm_return"
8571  [(return)]
8572  "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8573  "*
8574  {
8575    if (arm_ccfsm_state == 2)
8576      {
8577        arm_ccfsm_state += 2;
8578        return \"\";
8579      }
8580    return output_return_instruction (const_true_rtx, true, false, false);
8581  }"
8582  [(set_attr "type" "load1")
8583   (set_attr "length" "12")
8584   (set_attr "predicable" "yes")]
8585)
8586
8587(define_insn "*cond_return"
8588  [(set (pc)
8589        (if_then_else (match_operator 0 "arm_comparison_operator"
8590		       [(match_operand 1 "cc_register" "") (const_int 0)])
8591                      (return)
8592                      (pc)))]
8593  "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8594  "*
8595  {
8596    if (arm_ccfsm_state == 2)
8597      {
8598        arm_ccfsm_state += 2;
8599        return \"\";
8600      }
8601    return output_return_instruction (operands[0], true, false, false);
8602  }"
8603  [(set_attr "conds" "use")
8604   (set_attr "length" "12")
8605   (set_attr "type" "load1")]
8606)
8607
8608(define_insn "*cond_return_inverted"
8609  [(set (pc)
8610        (if_then_else (match_operator 0 "arm_comparison_operator"
8611		       [(match_operand 1 "cc_register" "") (const_int 0)])
8612                      (pc)
8613		      (return)))]
8614  "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8615  "*
8616  {
8617    if (arm_ccfsm_state == 2)
8618      {
8619        arm_ccfsm_state += 2;
8620        return \"\";
8621      }
8622    return output_return_instruction (operands[0], true, true, false);
8623  }"
8624  [(set_attr "conds" "use")
8625   (set_attr "length" "12")
8626   (set_attr "type" "load1")]
8627)
8628
8629(define_insn "*arm_simple_return"
8630  [(simple_return)]
8631  "TARGET_ARM"
8632  "*
8633  {
8634    if (arm_ccfsm_state == 2)
8635      {
8636        arm_ccfsm_state += 2;
8637        return \"\";
8638      }
8639    return output_return_instruction (const_true_rtx, true, false, true);
8640  }"
8641  [(set_attr "type" "branch")
8642   (set_attr "length" "4")
8643   (set_attr "predicable" "yes")]
8644)
8645
8646;; Generate a sequence of instructions to determine if the processor is
8647;; in 26-bit or 32-bit mode, and return the appropriate return address
8648;; mask.
8649
8650(define_expand "return_addr_mask"
8651  [(set (match_dup 1)
8652      (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8653		       (const_int 0)))
8654   (set (match_operand:SI 0 "s_register_operand" "")
8655      (if_then_else:SI (eq (match_dup 1) (const_int 0))
8656		       (const_int -1)
8657		       (const_int 67108860)))] ; 0x03fffffc
8658  "TARGET_ARM"
8659  "
8660  operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8661  ")
8662
8663(define_insn "*check_arch2"
8664  [(set (match_operand:CC_NOOV 0 "cc_register" "")
8665      (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8666		       (const_int 0)))]
8667  "TARGET_ARM"
8668  "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8669  [(set_attr "length" "8")
8670   (set_attr "conds" "set")]
8671)
8672
8673;; Call subroutine returning any type.
8674
8675(define_expand "untyped_call"
8676  [(parallel [(call (match_operand 0 "" "")
8677		    (const_int 0))
8678	      (match_operand 1 "" "")
8679	      (match_operand 2 "" "")])]
8680  "TARGET_EITHER"
8681  "
8682  {
8683    int i;
8684    rtx par = gen_rtx_PARALLEL (VOIDmode,
8685				rtvec_alloc (XVECLEN (operands[2], 0)));
8686    rtx addr = gen_reg_rtx (Pmode);
8687    rtx mem;
8688    int size = 0;
8689
8690    emit_move_insn (addr, XEXP (operands[1], 0));
8691    mem = change_address (operands[1], BLKmode, addr);
8692
8693    for (i = 0; i < XVECLEN (operands[2], 0); i++)
8694      {
8695	rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8696
8697	/* Default code only uses r0 as a return value, but we could
8698	   be using anything up to 4 registers.  */
8699	if (REGNO (src) == R0_REGNUM)
8700	  src = gen_rtx_REG (TImode, R0_REGNUM);
8701
8702        XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8703						 GEN_INT (size));
8704        size += GET_MODE_SIZE (GET_MODE (src));
8705      }
8706
8707    emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8708				    const0_rtx));
8709
8710    size = 0;
8711
8712    for (i = 0; i < XVECLEN (par, 0); i++)
8713      {
8714	HOST_WIDE_INT offset = 0;
8715	rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8716
8717	if (size != 0)
8718	  emit_move_insn (addr, plus_constant (Pmode, addr, size));
8719
8720	mem = change_address (mem, GET_MODE (reg), NULL);
8721	if (REGNO (reg) == R0_REGNUM)
8722	  {
8723	    /* On thumb we have to use a write-back instruction.  */
8724	    emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8725 		       TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8726	    size = TARGET_ARM ? 16 : 0;
8727	  }
8728	else
8729	  {
8730	    emit_move_insn (mem, reg);
8731	    size = GET_MODE_SIZE (GET_MODE (reg));
8732	  }
8733      }
8734
8735    /* The optimizer does not know that the call sets the function value
8736       registers we stored in the result block.  We avoid problems by
8737       claiming that all hard registers are used and clobbered at this
8738       point.  */
8739    emit_insn (gen_blockage ());
8740
8741    DONE;
8742  }"
8743)
8744
8745(define_expand "untyped_return"
8746  [(match_operand:BLK 0 "memory_operand" "")
8747   (match_operand 1 "" "")]
8748  "TARGET_EITHER"
8749  "
8750  {
8751    int i;
8752    rtx addr = gen_reg_rtx (Pmode);
8753    rtx mem;
8754    int size = 0;
8755
8756    emit_move_insn (addr, XEXP (operands[0], 0));
8757    mem = change_address (operands[0], BLKmode, addr);
8758
8759    for (i = 0; i < XVECLEN (operands[1], 0); i++)
8760      {
8761	HOST_WIDE_INT offset = 0;
8762	rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8763
8764	if (size != 0)
8765	  emit_move_insn (addr, plus_constant (Pmode, addr, size));
8766
8767	mem = change_address (mem, GET_MODE (reg), NULL);
8768	if (REGNO (reg) == R0_REGNUM)
8769	  {
8770	    /* On thumb we have to use a write-back instruction.  */
8771	    emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8772 		       TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8773	    size = TARGET_ARM ? 16 : 0;
8774	  }
8775	else
8776	  {
8777	    emit_move_insn (reg, mem);
8778	    size = GET_MODE_SIZE (GET_MODE (reg));
8779	  }
8780      }
8781
8782    /* Emit USE insns before the return.  */
8783    for (i = 0; i < XVECLEN (operands[1], 0); i++)
8784      emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8785
8786    /* Construct the return.  */
8787    expand_naked_return ();
8788
8789    DONE;
8790  }"
8791)
8792
8793;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8794;; all of memory.  This blocks insns from being moved across this point.
8795
8796(define_insn "blockage"
8797  [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8798  "TARGET_EITHER"
8799  ""
8800  [(set_attr "length" "0")
8801   (set_attr "type" "block")]
8802)
8803
8804(define_expand "casesi"
8805  [(match_operand:SI 0 "s_register_operand" "")	; index to jump on
8806   (match_operand:SI 1 "const_int_operand" "")	; lower bound
8807   (match_operand:SI 2 "const_int_operand" "")	; total range
8808   (match_operand:SI 3 "" "")			; table label
8809   (match_operand:SI 4 "" "")]			; Out of range label
8810  "TARGET_32BIT || optimize_size || flag_pic"
8811  "
8812  {
8813    enum insn_code code;
8814    if (operands[1] != const0_rtx)
8815      {
8816	rtx reg = gen_reg_rtx (SImode);
8817
8818	emit_insn (gen_addsi3 (reg, operands[0],
8819			       gen_int_mode (-INTVAL (operands[1]),
8820			       		     SImode)));
8821	operands[0] = reg;
8822      }
8823
8824    if (TARGET_ARM)
8825      code = CODE_FOR_arm_casesi_internal;
8826    else if (TARGET_THUMB1)
8827      code = CODE_FOR_thumb1_casesi_internal_pic;
8828    else if (flag_pic)
8829      code = CODE_FOR_thumb2_casesi_internal_pic;
8830    else
8831      code = CODE_FOR_thumb2_casesi_internal;
8832
8833    if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8834      operands[2] = force_reg (SImode, operands[2]);
8835
8836    emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8837					  operands[3], operands[4]));
8838    DONE;
8839  }"
8840)
8841
8842;; The USE in this pattern is needed to tell flow analysis that this is
8843;; a CASESI insn.  It has no other purpose.
8844(define_insn "arm_casesi_internal"
8845  [(parallel [(set (pc)
8846	       (if_then_else
8847		(leu (match_operand:SI 0 "s_register_operand" "r")
8848		     (match_operand:SI 1 "arm_rhs_operand" "rI"))
8849		(mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8850				 (label_ref (match_operand 2 "" ""))))
8851		(label_ref (match_operand 3 "" ""))))
8852	      (clobber (reg:CC CC_REGNUM))
8853	      (use (label_ref (match_dup 2)))])]
8854  "TARGET_ARM"
8855  "*
8856    if (flag_pic)
8857      return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8858    return   \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8859  "
8860  [(set_attr "conds" "clob")
8861   (set_attr "length" "12")]
8862)
8863
8864(define_expand "thumb1_casesi_internal_pic"
8865  [(match_operand:SI 0 "s_register_operand" "")
8866   (match_operand:SI 1 "thumb1_cmp_operand" "")
8867   (match_operand 2 "" "")
8868   (match_operand 3 "" "")]
8869  "TARGET_THUMB1"
8870  {
8871    rtx reg0;
8872    rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
8873    emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
8874				    operands[3]));
8875    reg0 = gen_rtx_REG (SImode, 0);
8876    emit_move_insn (reg0, operands[0]);
8877    emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
8878    DONE;
8879  }
8880)
8881
8882(define_insn "thumb1_casesi_dispatch"
8883  [(parallel [(set (pc) (unspec [(reg:SI 0)
8884				 (label_ref (match_operand 0 "" ""))
8885;;				 (label_ref (match_operand 1 "" ""))
8886]
8887			 UNSPEC_THUMB1_CASESI))
8888	      (clobber (reg:SI IP_REGNUM))
8889              (clobber (reg:SI LR_REGNUM))])]
8890  "TARGET_THUMB1"
8891  "* return thumb1_output_casesi(operands);"
8892  [(set_attr "length" "4")]
8893)
8894
8895(define_expand "indirect_jump"
8896  [(set (pc)
8897	(match_operand:SI 0 "s_register_operand" ""))]
8898  "TARGET_EITHER"
8899  "
8900  /* Thumb-2 doesn't have mov pc, reg.  Explicitly set the low bit of the
8901     address and use bx.  */
8902  if (TARGET_THUMB2)
8903    {
8904      rtx tmp;
8905      tmp = gen_reg_rtx (SImode);
8906      emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8907      operands[0] = tmp;
8908    }
8909  "
8910)
8911
8912;; NB Never uses BX.
8913(define_insn "*arm_indirect_jump"
8914  [(set (pc)
8915	(match_operand:SI 0 "s_register_operand" "r"))]
8916  "TARGET_ARM"
8917  "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8918  [(set_attr "predicable" "yes")]
8919)
8920
8921(define_insn "*load_indirect_jump"
8922  [(set (pc)
8923	(match_operand:SI 0 "memory_operand" "m"))]
8924  "TARGET_ARM"
8925  "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8926  [(set_attr "type" "load1")
8927   (set_attr "pool_range" "4096")
8928   (set_attr "neg_pool_range" "4084")
8929   (set_attr "predicable" "yes")]
8930)
8931
8932;; NB Never uses BX.
8933(define_insn "*thumb1_indirect_jump"
8934  [(set (pc)
8935	(match_operand:SI 0 "register_operand" "l*r"))]
8936  "TARGET_THUMB1"
8937  "mov\\tpc, %0"
8938  [(set_attr "conds" "clob")
8939   (set_attr "length" "2")]
8940)
8941
8942
8943;; Misc insns
8944
8945(define_insn "nop"
8946  [(const_int 0)]
8947  "TARGET_EITHER"
8948  "*
8949  if (TARGET_UNIFIED_ASM)
8950    return \"nop\";
8951  if (TARGET_ARM)
8952    return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8953  return  \"mov\\tr8, r8\";
8954  "
8955  [(set (attr "length")
8956	(if_then_else (eq_attr "is_thumb" "yes")
8957		      (const_int 2)
8958		      (const_int 4)))]
8959)
8960
8961
8962;; Patterns to allow combination of arithmetic, cond code and shifts
8963
8964(define_insn "*arith_shiftsi"
8965  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
8966        (match_operator:SI 1 "shiftable_operator"
8967          [(match_operator:SI 3 "shift_operator"
8968             [(match_operand:SI 4 "s_register_operand" "r,r,r,r")
8969              (match_operand:SI 5 "shift_amount_operand" "M,M,M,r")])
8970           (match_operand:SI 2 "s_register_operand" "rk,rk,r,rk")]))]
8971  "TARGET_32BIT"
8972  "%i1%?\\t%0, %2, %4%S3"
8973  [(set_attr "predicable" "yes")
8974   (set_attr "shift" "4")
8975   (set_attr "arch" "a,t2,t2,a")
8976   ;; Thumb2 doesn't allow the stack pointer to be used for
8977   ;; operand1 for all operations other than add and sub. In this case
8978   ;; the minus operation is a candidate for an rsub and hence needs
8979   ;; to be disabled.
8980   ;; We have to make sure to disable the fourth alternative if
8981   ;; the shift_operator is MULT, since otherwise the insn will
8982   ;; also match a multiply_accumulate pattern and validate_change
8983   ;; will allow a replacement of the constant with a register
8984   ;; despite the checks done in shift_operator.
8985   (set_attr_alternative "insn_enabled"
8986			 [(const_string "yes")
8987			  (if_then_else
8988			   (match_operand:SI 1 "add_operator" "")
8989			   (const_string "yes") (const_string "no"))
8990			  (const_string "yes")
8991			  (if_then_else
8992			   (match_operand:SI 3 "mult_operator" "")
8993			   (const_string "no") (const_string "yes"))])
8994   (set_attr "type" "alu_shift,alu_shift,alu_shift,alu_shift_reg")])
8995
8996(define_split
8997  [(set (match_operand:SI 0 "s_register_operand" "")
8998	(match_operator:SI 1 "shiftable_operator"
8999	 [(match_operator:SI 2 "shiftable_operator"
9000	   [(match_operator:SI 3 "shift_operator"
9001	     [(match_operand:SI 4 "s_register_operand" "")
9002	      (match_operand:SI 5 "reg_or_int_operand" "")])
9003	    (match_operand:SI 6 "s_register_operand" "")])
9004	  (match_operand:SI 7 "arm_rhs_operand" "")]))
9005   (clobber (match_operand:SI 8 "s_register_operand" ""))]
9006  "TARGET_32BIT"
9007  [(set (match_dup 8)
9008	(match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9009			 (match_dup 6)]))
9010   (set (match_dup 0)
9011	(match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9012  "")
9013
9014(define_insn "*arith_shiftsi_compare0"
9015  [(set (reg:CC_NOOV CC_REGNUM)
9016        (compare:CC_NOOV
9017	 (match_operator:SI 1 "shiftable_operator"
9018	  [(match_operator:SI 3 "shift_operator"
9019	    [(match_operand:SI 4 "s_register_operand" "r,r")
9020	     (match_operand:SI 5 "shift_amount_operand" "M,r")])
9021	   (match_operand:SI 2 "s_register_operand" "r,r")])
9022	 (const_int 0)))
9023   (set (match_operand:SI 0 "s_register_operand" "=r,r")
9024	(match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9025			 (match_dup 2)]))]
9026  "TARGET_32BIT"
9027  "%i1%.\\t%0, %2, %4%S3"
9028  [(set_attr "conds" "set")
9029   (set_attr "shift" "4")
9030   (set_attr "arch" "32,a")
9031   (set_attr "type" "alu_shift,alu_shift_reg")])
9032
9033(define_insn "*arith_shiftsi_compare0_scratch"
9034  [(set (reg:CC_NOOV CC_REGNUM)
9035        (compare:CC_NOOV
9036	 (match_operator:SI 1 "shiftable_operator"
9037	  [(match_operator:SI 3 "shift_operator"
9038	    [(match_operand:SI 4 "s_register_operand" "r,r")
9039	     (match_operand:SI 5 "shift_amount_operand" "M,r")])
9040	   (match_operand:SI 2 "s_register_operand" "r,r")])
9041	 (const_int 0)))
9042   (clobber (match_scratch:SI 0 "=r,r"))]
9043  "TARGET_32BIT"
9044  "%i1%.\\t%0, %2, %4%S3"
9045  [(set_attr "conds" "set")
9046   (set_attr "shift" "4")
9047   (set_attr "arch" "32,a")
9048   (set_attr "type" "alu_shift,alu_shift_reg")])
9049
9050(define_insn "*sub_shiftsi"
9051  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9052	(minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9053		  (match_operator:SI 2 "shift_operator"
9054		   [(match_operand:SI 3 "s_register_operand" "r,r")
9055		    (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9056  "TARGET_32BIT"
9057  "sub%?\\t%0, %1, %3%S2"
9058  [(set_attr "predicable" "yes")
9059   (set_attr "shift" "3")
9060   (set_attr "arch" "32,a")
9061   (set_attr "type" "alu_shift,alu_shift_reg")])
9062
9063(define_insn "*sub_shiftsi_compare0"
9064  [(set (reg:CC_NOOV CC_REGNUM)
9065	(compare:CC_NOOV
9066	 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9067		   (match_operator:SI 2 "shift_operator"
9068		    [(match_operand:SI 3 "s_register_operand" "r,r")
9069		     (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9070	 (const_int 0)))
9071   (set (match_operand:SI 0 "s_register_operand" "=r,r")
9072	(minus:SI (match_dup 1)
9073		  (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9074  "TARGET_32BIT"
9075  "sub%.\\t%0, %1, %3%S2"
9076  [(set_attr "conds" "set")
9077   (set_attr "shift" "3")
9078   (set_attr "arch" "32,a")
9079   (set_attr "type" "alu_shift,alu_shift_reg")])
9080
9081(define_insn "*sub_shiftsi_compare0_scratch"
9082  [(set (reg:CC_NOOV CC_REGNUM)
9083	(compare:CC_NOOV
9084	 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9085		   (match_operator:SI 2 "shift_operator"
9086		    [(match_operand:SI 3 "s_register_operand" "r,r")
9087		     (match_operand:SI 4 "shift_amount_operand" "M,rM")]))
9088	 (const_int 0)))
9089   (clobber (match_scratch:SI 0 "=r,r"))]
9090  "TARGET_32BIT"
9091  "sub%.\\t%0, %1, %3%S2"
9092  [(set_attr "conds" "set")
9093   (set_attr "shift" "3")
9094   (set_attr "arch" "32,a")
9095   (set_attr "type" "alu_shift,alu_shift_reg")])
9096
9097
9098(define_insn "*and_scc"
9099  [(set (match_operand:SI 0 "s_register_operand" "=r")
9100	(and:SI (match_operator:SI 1 "arm_comparison_operator"
9101		 [(match_operand 3 "cc_register" "") (const_int 0)])
9102		(match_operand:SI 2 "s_register_operand" "r")))]
9103  "TARGET_ARM"
9104  "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9105  [(set_attr "conds" "use")
9106   (set_attr "insn" "mov")
9107   (set_attr "length" "8")]
9108)
9109
9110(define_insn "*ior_scc"
9111  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9112	(ior:SI (match_operator:SI 2 "arm_comparison_operator"
9113		 [(match_operand 3 "cc_register" "") (const_int 0)])
9114		(match_operand:SI 1 "s_register_operand" "0,?r")))]
9115  "TARGET_ARM"
9116  "@
9117   orr%d2\\t%0, %1, #1
9118   mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9119  [(set_attr "conds" "use")
9120   (set_attr "length" "4,8")]
9121)
9122
9123; A series of splitters for the compare_scc pattern below.  Note that
9124; order is important.
9125(define_split
9126  [(set (match_operand:SI 0 "s_register_operand" "")
9127	(lt:SI (match_operand:SI 1 "s_register_operand" "")
9128	       (const_int 0)))
9129   (clobber (reg:CC CC_REGNUM))]
9130  "TARGET_32BIT && reload_completed"
9131  [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9132
9133(define_split
9134  [(set (match_operand:SI 0 "s_register_operand" "")
9135	(ge:SI (match_operand:SI 1 "s_register_operand" "")
9136	       (const_int 0)))
9137   (clobber (reg:CC CC_REGNUM))]
9138  "TARGET_32BIT && reload_completed"
9139  [(set (match_dup 0) (not:SI (match_dup 1)))
9140   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9141
9142(define_split
9143  [(set (match_operand:SI 0 "s_register_operand" "")
9144	(eq:SI (match_operand:SI 1 "s_register_operand" "")
9145	       (const_int 0)))
9146   (clobber (reg:CC CC_REGNUM))]
9147  "TARGET_32BIT && reload_completed"
9148  [(parallel
9149    [(set (reg:CC CC_REGNUM)
9150	  (compare:CC (const_int 1) (match_dup 1)))
9151     (set (match_dup 0)
9152	  (minus:SI (const_int 1) (match_dup 1)))])
9153   (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9154	      (set (match_dup 0) (const_int 0)))])
9155
9156(define_split
9157  [(set (match_operand:SI 0 "s_register_operand" "")
9158	(ne:SI (match_operand:SI 1 "s_register_operand" "")
9159	       (match_operand:SI 2 "const_int_operand" "")))
9160   (clobber (reg:CC CC_REGNUM))]
9161  "TARGET_32BIT && reload_completed"
9162  [(parallel
9163    [(set (reg:CC CC_REGNUM)
9164	  (compare:CC (match_dup 1) (match_dup 2)))
9165     (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9166   (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9167	      (set (match_dup 0) (const_int 1)))]
9168{
9169  operands[3] = GEN_INT (-INTVAL (operands[2]));
9170})
9171
9172(define_split
9173  [(set (match_operand:SI 0 "s_register_operand" "")
9174	(ne:SI (match_operand:SI 1 "s_register_operand" "")
9175	       (match_operand:SI 2 "arm_add_operand" "")))
9176   (clobber (reg:CC CC_REGNUM))]
9177  "TARGET_32BIT && reload_completed"
9178  [(parallel
9179    [(set (reg:CC_NOOV CC_REGNUM)
9180	  (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9181			   (const_int 0)))
9182     (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9183   (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9184	      (set (match_dup 0) (const_int 1)))])
9185
9186(define_insn_and_split "*compare_scc"
9187  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9188	(match_operator:SI 1 "arm_comparison_operator"
9189	 [(match_operand:SI 2 "s_register_operand" "r,r")
9190	  (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9191   (clobber (reg:CC CC_REGNUM))]
9192  "TARGET_32BIT"
9193  "#"
9194  "&& reload_completed"
9195  [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9196   (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9197   (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9198{
9199  rtx tmp1;
9200  enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9201					   operands[2], operands[3]);
9202  enum rtx_code rc = GET_CODE (operands[1]);
9203
9204  tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9205
9206  operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9207  if (mode == CCFPmode || mode == CCFPEmode)
9208    rc = reverse_condition_maybe_unordered (rc);
9209  else
9210    rc = reverse_condition (rc);
9211  operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9212})
9213
9214;; Attempt to improve the sequence generated by the compare_scc splitters
9215;; not to use conditional execution.
9216(define_peephole2
9217  [(set (reg:CC CC_REGNUM)
9218	(compare:CC (match_operand:SI 1 "register_operand" "")
9219		    (match_operand:SI 2 "arm_rhs_operand" "")))
9220   (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9221	      (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9222   (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9223	      (set (match_dup 0) (const_int 1)))
9224   (match_scratch:SI 3 "r")]
9225  "TARGET_32BIT"
9226  [(parallel
9227    [(set (reg:CC CC_REGNUM)
9228	  (compare:CC (match_dup 1) (match_dup 2)))
9229     (set (match_dup 3) (minus:SI (match_dup 1) (match_dup 2)))])
9230   (parallel
9231    [(set (reg:CC CC_REGNUM)
9232	  (compare:CC (const_int 0) (match_dup 3)))
9233     (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9234   (parallel
9235    [(set (match_dup 0)
9236	  (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9237		   (geu:SI (reg:CC CC_REGNUM) (const_int 0))))
9238     (clobber (reg:CC CC_REGNUM))])])
9239
9240(define_insn "*cond_move"
9241  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9242	(if_then_else:SI (match_operator 3 "equality_operator"
9243			  [(match_operator 4 "arm_comparison_operator"
9244			    [(match_operand 5 "cc_register" "") (const_int 0)])
9245			   (const_int 0)])
9246			 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9247			 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9248  "TARGET_ARM"
9249  "*
9250    if (GET_CODE (operands[3]) == NE)
9251      {
9252        if (which_alternative != 1)
9253	  output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9254        if (which_alternative != 0)
9255	  output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9256        return \"\";
9257      }
9258    if (which_alternative != 0)
9259      output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9260    if (which_alternative != 1)
9261      output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9262    return \"\";
9263  "
9264  [(set_attr "conds" "use")
9265   (set_attr "insn" "mov")
9266   (set_attr "length" "4,4,8")]
9267)
9268
9269(define_insn "*cond_arith"
9270  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9271        (match_operator:SI 5 "shiftable_operator"
9272	 [(match_operator:SI 4 "arm_comparison_operator"
9273           [(match_operand:SI 2 "s_register_operand" "r,r")
9274	    (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9275          (match_operand:SI 1 "s_register_operand" "0,?r")]))
9276   (clobber (reg:CC CC_REGNUM))]
9277  "TARGET_ARM"
9278  "*
9279    if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9280      return \"%i5\\t%0, %1, %2, lsr #31\";
9281
9282    output_asm_insn (\"cmp\\t%2, %3\", operands);
9283    if (GET_CODE (operands[5]) == AND)
9284      output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9285    else if (GET_CODE (operands[5]) == MINUS)
9286      output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9287    else if (which_alternative != 0)
9288      output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9289    return \"%i5%d4\\t%0, %1, #1\";
9290  "
9291  [(set_attr "conds" "clob")
9292   (set_attr "length" "12")]
9293)
9294
9295(define_insn "*cond_sub"
9296  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9297        (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9298		  (match_operator:SI 4 "arm_comparison_operator"
9299                   [(match_operand:SI 2 "s_register_operand" "r,r")
9300		    (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9301   (clobber (reg:CC CC_REGNUM))]
9302  "TARGET_ARM"
9303  "*
9304    output_asm_insn (\"cmp\\t%2, %3\", operands);
9305    if (which_alternative != 0)
9306      output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9307    return \"sub%d4\\t%0, %1, #1\";
9308  "
9309  [(set_attr "conds" "clob")
9310   (set_attr "length" "8,12")]
9311)
9312
9313(define_insn "*cmp_ite0"
9314  [(set (match_operand 6 "dominant_cc_register" "")
9315	(compare
9316	 (if_then_else:SI
9317	  (match_operator 4 "arm_comparison_operator"
9318	   [(match_operand:SI 0 "s_register_operand"
9319	        "l,l,l,r,r,r,r,r,r")
9320	    (match_operand:SI 1 "arm_add_operand"
9321	        "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9322	  (match_operator:SI 5 "arm_comparison_operator"
9323	   [(match_operand:SI 2 "s_register_operand"
9324	        "l,r,r,l,l,r,r,r,r")
9325	    (match_operand:SI 3 "arm_add_operand"
9326	        "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9327	  (const_int 0))
9328	 (const_int 0)))]
9329  "TARGET_32BIT"
9330  "*
9331  {
9332    static const char * const cmp1[NUM_OF_COND_CMP][2] =
9333    {
9334      {\"cmp%d5\\t%0, %1\",
9335       \"cmp%d4\\t%2, %3\"},
9336      {\"cmn%d5\\t%0, #%n1\",
9337       \"cmp%d4\\t%2, %3\"},
9338      {\"cmp%d5\\t%0, %1\",
9339       \"cmn%d4\\t%2, #%n3\"},
9340      {\"cmn%d5\\t%0, #%n1\",
9341       \"cmn%d4\\t%2, #%n3\"}
9342    };
9343    static const char * const cmp2[NUM_OF_COND_CMP][2] =
9344    {
9345      {\"cmp\\t%2, %3\",
9346       \"cmp\\t%0, %1\"},
9347      {\"cmp\\t%2, %3\",
9348       \"cmn\\t%0, #%n1\"},
9349      {\"cmn\\t%2, #%n3\",
9350       \"cmp\\t%0, %1\"},
9351      {\"cmn\\t%2, #%n3\",
9352       \"cmn\\t%0, #%n1\"}
9353    };
9354    static const char * const ite[2] =
9355    {
9356      \"it\\t%d5\",
9357      \"it\\t%d4\"
9358    };
9359    static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9360                                   CMP_CMP, CMN_CMP, CMP_CMP,
9361                                   CMN_CMP, CMP_CMN, CMN_CMN};
9362    int swap =
9363      comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9364
9365    output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9366    if (TARGET_THUMB2) {
9367      output_asm_insn (ite[swap], operands);
9368    }
9369    output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9370    return \"\";
9371  }"
9372  [(set_attr "conds" "set")
9373   (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9374   (set_attr_alternative "length"
9375      [(const_int 6)
9376       (const_int 8)
9377       (const_int 8)
9378       (const_int 8)
9379       (const_int 8)
9380       (if_then_else (eq_attr "is_thumb" "no")
9381           (const_int 8)
9382           (const_int 10))
9383       (if_then_else (eq_attr "is_thumb" "no")
9384           (const_int 8)
9385           (const_int 10))
9386       (if_then_else (eq_attr "is_thumb" "no")
9387           (const_int 8)
9388           (const_int 10))
9389       (if_then_else (eq_attr "is_thumb" "no")
9390           (const_int 8)
9391           (const_int 10))])]
9392)
9393
9394(define_insn "*cmp_ite1"
9395  [(set (match_operand 6 "dominant_cc_register" "")
9396	(compare
9397	 (if_then_else:SI
9398	  (match_operator 4 "arm_comparison_operator"
9399	   [(match_operand:SI 0 "s_register_operand"
9400	        "l,l,l,r,r,r,r,r,r")
9401	    (match_operand:SI 1 "arm_add_operand"
9402	        "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9403	  (match_operator:SI 5 "arm_comparison_operator"
9404	   [(match_operand:SI 2 "s_register_operand"
9405	        "l,r,r,l,l,r,r,r,r")
9406	    (match_operand:SI 3 "arm_add_operand"
9407	        "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9408	  (const_int 1))
9409	 (const_int 0)))]
9410  "TARGET_32BIT"
9411  "*
9412  {
9413    static const char * const cmp1[NUM_OF_COND_CMP][2] =
9414    {
9415      {\"cmp\\t%0, %1\",
9416       \"cmp\\t%2, %3\"},
9417      {\"cmn\\t%0, #%n1\",
9418       \"cmp\\t%2, %3\"},
9419      {\"cmp\\t%0, %1\",
9420       \"cmn\\t%2, #%n3\"},
9421      {\"cmn\\t%0, #%n1\",
9422       \"cmn\\t%2, #%n3\"}
9423    };
9424    static const char * const cmp2[NUM_OF_COND_CMP][2] =
9425    {
9426      {\"cmp%d4\\t%2, %3\",
9427       \"cmp%D5\\t%0, %1\"},
9428      {\"cmp%d4\\t%2, %3\",
9429       \"cmn%D5\\t%0, #%n1\"},
9430      {\"cmn%d4\\t%2, #%n3\",
9431       \"cmp%D5\\t%0, %1\"},
9432      {\"cmn%d4\\t%2, #%n3\",
9433       \"cmn%D5\\t%0, #%n1\"}
9434    };
9435    static const char * const ite[2] =
9436    {
9437      \"it\\t%d4\",
9438      \"it\\t%D5\"
9439    };
9440    static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9441                                   CMP_CMP, CMN_CMP, CMP_CMP,
9442                                   CMN_CMP, CMP_CMN, CMN_CMN};
9443    int swap =
9444      comparison_dominates_p (GET_CODE (operands[5]),
9445			      reverse_condition (GET_CODE (operands[4])));
9446
9447    output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9448    if (TARGET_THUMB2) {
9449      output_asm_insn (ite[swap], operands);
9450    }
9451    output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9452    return \"\";
9453  }"
9454  [(set_attr "conds" "set")
9455   (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9456   (set_attr_alternative "length"
9457      [(const_int 6)
9458       (const_int 8)
9459       (const_int 8)
9460       (const_int 8)
9461       (const_int 8)
9462       (if_then_else (eq_attr "is_thumb" "no")
9463           (const_int 8)
9464           (const_int 10))
9465       (if_then_else (eq_attr "is_thumb" "no")
9466           (const_int 8)
9467           (const_int 10))
9468       (if_then_else (eq_attr "is_thumb" "no")
9469           (const_int 8)
9470           (const_int 10))
9471       (if_then_else (eq_attr "is_thumb" "no")
9472           (const_int 8)
9473           (const_int 10))])]
9474)
9475
9476(define_insn "*cmp_and"
9477  [(set (match_operand 6 "dominant_cc_register" "")
9478	(compare
9479	 (and:SI
9480	  (match_operator 4 "arm_comparison_operator"
9481	   [(match_operand:SI 0 "s_register_operand"
9482	        "l,l,l,r,r,r,r,r,r")
9483	    (match_operand:SI 1 "arm_add_operand"
9484	        "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9485	  (match_operator:SI 5 "arm_comparison_operator"
9486	   [(match_operand:SI 2 "s_register_operand"
9487	        "l,r,r,l,l,r,r,r,r")
9488	    (match_operand:SI 3 "arm_add_operand"
9489	        "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9490	 (const_int 0)))]
9491  "TARGET_32BIT"
9492  "*
9493  {
9494    static const char *const cmp1[NUM_OF_COND_CMP][2] =
9495    {
9496      {\"cmp%d5\\t%0, %1\",
9497       \"cmp%d4\\t%2, %3\"},
9498      {\"cmn%d5\\t%0, #%n1\",
9499       \"cmp%d4\\t%2, %3\"},
9500      {\"cmp%d5\\t%0, %1\",
9501       \"cmn%d4\\t%2, #%n3\"},
9502      {\"cmn%d5\\t%0, #%n1\",
9503       \"cmn%d4\\t%2, #%n3\"}
9504    };
9505    static const char *const cmp2[NUM_OF_COND_CMP][2] =
9506    {
9507      {\"cmp\\t%2, %3\",
9508       \"cmp\\t%0, %1\"},
9509      {\"cmp\\t%2, %3\",
9510       \"cmn\\t%0, #%n1\"},
9511      {\"cmn\\t%2, #%n3\",
9512       \"cmp\\t%0, %1\"},
9513      {\"cmn\\t%2, #%n3\",
9514       \"cmn\\t%0, #%n1\"}
9515    };
9516    static const char *const ite[2] =
9517    {
9518      \"it\\t%d5\",
9519      \"it\\t%d4\"
9520    };
9521    static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9522                                   CMP_CMP, CMN_CMP, CMP_CMP,
9523                                   CMN_CMP, CMP_CMN, CMN_CMN};
9524    int swap =
9525      comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9526
9527    output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9528    if (TARGET_THUMB2) {
9529      output_asm_insn (ite[swap], operands);
9530    }
9531    output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9532    return \"\";
9533  }"
9534  [(set_attr "conds" "set")
9535   (set_attr "predicable" "no")
9536   (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9537   (set_attr_alternative "length"
9538      [(const_int 6)
9539       (const_int 8)
9540       (const_int 8)
9541       (const_int 8)
9542       (const_int 8)
9543       (if_then_else (eq_attr "is_thumb" "no")
9544           (const_int 8)
9545           (const_int 10))
9546       (if_then_else (eq_attr "is_thumb" "no")
9547           (const_int 8)
9548           (const_int 10))
9549       (if_then_else (eq_attr "is_thumb" "no")
9550           (const_int 8)
9551           (const_int 10))
9552       (if_then_else (eq_attr "is_thumb" "no")
9553           (const_int 8)
9554           (const_int 10))])]
9555)
9556
9557(define_insn "*cmp_ior"
9558  [(set (match_operand 6 "dominant_cc_register" "")
9559	(compare
9560	 (ior:SI
9561	  (match_operator 4 "arm_comparison_operator"
9562	   [(match_operand:SI 0 "s_register_operand"
9563	        "l,l,l,r,r,r,r,r,r")
9564	    (match_operand:SI 1 "arm_add_operand"
9565	        "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9566	  (match_operator:SI 5 "arm_comparison_operator"
9567	   [(match_operand:SI 2 "s_register_operand"
9568	        "l,r,r,l,l,r,r,r,r")
9569	    (match_operand:SI 3 "arm_add_operand"
9570	        "lPy,rI,L,lPy,lPy,rI,rI,L,L")]))
9571	 (const_int 0)))]
9572  "TARGET_32BIT"
9573  "*
9574  {
9575    static const char *const cmp1[NUM_OF_COND_CMP][2] =
9576    {
9577      {\"cmp\\t%0, %1\",
9578       \"cmp\\t%2, %3\"},
9579      {\"cmn\\t%0, #%n1\",
9580       \"cmp\\t%2, %3\"},
9581      {\"cmp\\t%0, %1\",
9582       \"cmn\\t%2, #%n3\"},
9583      {\"cmn\\t%0, #%n1\",
9584       \"cmn\\t%2, #%n3\"}
9585    };
9586    static const char *const cmp2[NUM_OF_COND_CMP][2] =
9587    {
9588      {\"cmp%D4\\t%2, %3\",
9589       \"cmp%D5\\t%0, %1\"},
9590      {\"cmp%D4\\t%2, %3\",
9591       \"cmn%D5\\t%0, #%n1\"},
9592      {\"cmn%D4\\t%2, #%n3\",
9593       \"cmp%D5\\t%0, %1\"},
9594      {\"cmn%D4\\t%2, #%n3\",
9595       \"cmn%D5\\t%0, #%n1\"}
9596    };
9597    static const char *const ite[2] =
9598    {
9599      \"it\\t%D4\",
9600      \"it\\t%D5\"
9601    };
9602    static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9603                                   CMP_CMP, CMN_CMP, CMP_CMP,
9604                                   CMN_CMP, CMP_CMN, CMN_CMN};
9605    int swap =
9606      comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9607
9608    output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9609    if (TARGET_THUMB2) {
9610      output_asm_insn (ite[swap], operands);
9611    }
9612    output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9613    return \"\";
9614  }
9615  "
9616  [(set_attr "conds" "set")
9617   (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9618   (set_attr_alternative "length"
9619      [(const_int 6)
9620       (const_int 8)
9621       (const_int 8)
9622       (const_int 8)
9623       (const_int 8)
9624       (if_then_else (eq_attr "is_thumb" "no")
9625           (const_int 8)
9626           (const_int 10))
9627       (if_then_else (eq_attr "is_thumb" "no")
9628           (const_int 8)
9629           (const_int 10))
9630       (if_then_else (eq_attr "is_thumb" "no")
9631           (const_int 8)
9632           (const_int 10))
9633       (if_then_else (eq_attr "is_thumb" "no")
9634           (const_int 8)
9635           (const_int 10))])]
9636)
9637
9638(define_insn_and_split "*ior_scc_scc"
9639  [(set (match_operand:SI 0 "s_register_operand" "=r")
9640	(ior:SI (match_operator:SI 3 "arm_comparison_operator"
9641		 [(match_operand:SI 1 "s_register_operand" "r")
9642		  (match_operand:SI 2 "arm_add_operand" "rIL")])
9643		(match_operator:SI 6 "arm_comparison_operator"
9644		 [(match_operand:SI 4 "s_register_operand" "r")
9645		  (match_operand:SI 5 "arm_add_operand" "rIL")])))
9646   (clobber (reg:CC CC_REGNUM))]
9647  "TARGET_32BIT
9648   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9649       != CCmode)"
9650  "#"
9651  "TARGET_32BIT && reload_completed"
9652  [(set (match_dup 7)
9653	(compare
9654	 (ior:SI
9655	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9656	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9657	 (const_int 0)))
9658   (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9659  "operands[7]
9660     = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9661						  DOM_CC_X_OR_Y),
9662		    CC_REGNUM);"
9663  [(set_attr "conds" "clob")
9664   (set_attr "length" "16")])
9665
9666; If the above pattern is followed by a CMP insn, then the compare is
9667; redundant, since we can rework the conditional instruction that follows.
9668(define_insn_and_split "*ior_scc_scc_cmp"
9669  [(set (match_operand 0 "dominant_cc_register" "")
9670	(compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9671			  [(match_operand:SI 1 "s_register_operand" "r")
9672			   (match_operand:SI 2 "arm_add_operand" "rIL")])
9673			 (match_operator:SI 6 "arm_comparison_operator"
9674			  [(match_operand:SI 4 "s_register_operand" "r")
9675			   (match_operand:SI 5 "arm_add_operand" "rIL")]))
9676		 (const_int 0)))
9677   (set (match_operand:SI 7 "s_register_operand" "=r")
9678	(ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9679		(match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9680  "TARGET_32BIT"
9681  "#"
9682  "TARGET_32BIT && reload_completed"
9683  [(set (match_dup 0)
9684	(compare
9685	 (ior:SI
9686	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9687	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9688	 (const_int 0)))
9689   (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9690  ""
9691  [(set_attr "conds" "set")
9692   (set_attr "length" "16")])
9693
9694(define_insn_and_split "*and_scc_scc"
9695  [(set (match_operand:SI 0 "s_register_operand" "=r")
9696	(and:SI (match_operator:SI 3 "arm_comparison_operator"
9697		 [(match_operand:SI 1 "s_register_operand" "r")
9698		  (match_operand:SI 2 "arm_add_operand" "rIL")])
9699		(match_operator:SI 6 "arm_comparison_operator"
9700		 [(match_operand:SI 4 "s_register_operand" "r")
9701		  (match_operand:SI 5 "arm_add_operand" "rIL")])))
9702   (clobber (reg:CC CC_REGNUM))]
9703  "TARGET_32BIT
9704   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9705       != CCmode)"
9706  "#"
9707  "TARGET_32BIT && reload_completed
9708   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9709       != CCmode)"
9710  [(set (match_dup 7)
9711	(compare
9712	 (and:SI
9713	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9714	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9715	 (const_int 0)))
9716   (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9717  "operands[7]
9718     = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9719						  DOM_CC_X_AND_Y),
9720		    CC_REGNUM);"
9721  [(set_attr "conds" "clob")
9722   (set_attr "length" "16")])
9723
9724; If the above pattern is followed by a CMP insn, then the compare is
9725; redundant, since we can rework the conditional instruction that follows.
9726(define_insn_and_split "*and_scc_scc_cmp"
9727  [(set (match_operand 0 "dominant_cc_register" "")
9728	(compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9729			  [(match_operand:SI 1 "s_register_operand" "r")
9730			   (match_operand:SI 2 "arm_add_operand" "rIL")])
9731			 (match_operator:SI 6 "arm_comparison_operator"
9732			  [(match_operand:SI 4 "s_register_operand" "r")
9733			   (match_operand:SI 5 "arm_add_operand" "rIL")]))
9734		 (const_int 0)))
9735   (set (match_operand:SI 7 "s_register_operand" "=r")
9736	(and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9737		(match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9738  "TARGET_32BIT"
9739  "#"
9740  "TARGET_32BIT && reload_completed"
9741  [(set (match_dup 0)
9742	(compare
9743	 (and:SI
9744	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9745	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9746	 (const_int 0)))
9747   (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9748  ""
9749  [(set_attr "conds" "set")
9750   (set_attr "length" "16")])
9751
9752;; If there is no dominance in the comparison, then we can still save an
9753;; instruction in the AND case, since we can know that the second compare
9754;; need only zero the value if false (if true, then the value is already
9755;; correct).
9756(define_insn_and_split "*and_scc_scc_nodom"
9757  [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9758	(and:SI (match_operator:SI 3 "arm_comparison_operator"
9759		 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9760		  (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9761		(match_operator:SI 6 "arm_comparison_operator"
9762		 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9763		  (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9764   (clobber (reg:CC CC_REGNUM))]
9765  "TARGET_32BIT
9766   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9767       == CCmode)"
9768  "#"
9769  "TARGET_32BIT && reload_completed"
9770  [(parallel [(set (match_dup 0)
9771		   (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9772	      (clobber (reg:CC CC_REGNUM))])
9773   (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9774   (set (match_dup 0)
9775	(if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9776			 (match_dup 0)
9777			 (const_int 0)))]
9778  "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9779					      operands[4], operands[5]),
9780			      CC_REGNUM);
9781   operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9782				  operands[5]);"
9783  [(set_attr "conds" "clob")
9784   (set_attr "length" "20")])
9785
9786(define_split
9787  [(set (reg:CC_NOOV CC_REGNUM)
9788	(compare:CC_NOOV (ior:SI
9789			  (and:SI (match_operand:SI 0 "s_register_operand" "")
9790				  (const_int 1))
9791			  (match_operator:SI 1 "arm_comparison_operator"
9792			   [(match_operand:SI 2 "s_register_operand" "")
9793			    (match_operand:SI 3 "arm_add_operand" "")]))
9794			 (const_int 0)))
9795   (clobber (match_operand:SI 4 "s_register_operand" ""))]
9796  "TARGET_ARM"
9797  [(set (match_dup 4)
9798	(ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9799		(match_dup 0)))
9800   (set (reg:CC_NOOV CC_REGNUM)
9801	(compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9802			 (const_int 0)))]
9803  "")
9804
9805(define_split
9806  [(set (reg:CC_NOOV CC_REGNUM)
9807	(compare:CC_NOOV (ior:SI
9808			  (match_operator:SI 1 "arm_comparison_operator"
9809			   [(match_operand:SI 2 "s_register_operand" "")
9810			    (match_operand:SI 3 "arm_add_operand" "")])
9811			  (and:SI (match_operand:SI 0 "s_register_operand" "")
9812				  (const_int 1)))
9813			 (const_int 0)))
9814   (clobber (match_operand:SI 4 "s_register_operand" ""))]
9815  "TARGET_ARM"
9816  [(set (match_dup 4)
9817	(ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9818		(match_dup 0)))
9819   (set (reg:CC_NOOV CC_REGNUM)
9820	(compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9821			 (const_int 0)))]
9822  "")
9823;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9824
9825(define_insn "*negscc"
9826  [(set (match_operand:SI 0 "s_register_operand" "=r")
9827	(neg:SI (match_operator 3 "arm_comparison_operator"
9828		 [(match_operand:SI 1 "s_register_operand" "r")
9829		  (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9830   (clobber (reg:CC CC_REGNUM))]
9831  "TARGET_ARM"
9832  "*
9833  if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9834    return \"mov\\t%0, %1, asr #31\";
9835
9836  if (GET_CODE (operands[3]) == NE)
9837    return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9838
9839  output_asm_insn (\"cmp\\t%1, %2\", operands);
9840  output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9841  return \"mvn%d3\\t%0, #0\";
9842  "
9843  [(set_attr "conds" "clob")
9844   (set_attr "length" "12")]
9845)
9846
9847(define_insn "movcond"
9848  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9849	(if_then_else:SI
9850	 (match_operator 5 "arm_comparison_operator"
9851	  [(match_operand:SI 3 "s_register_operand" "r,r,r")
9852	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9853	 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9854	 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9855   (clobber (reg:CC CC_REGNUM))]
9856  "TARGET_ARM"
9857  "*
9858  if (GET_CODE (operands[5]) == LT
9859      && (operands[4] == const0_rtx))
9860    {
9861      if (which_alternative != 1 && REG_P (operands[1]))
9862	{
9863	  if (operands[2] == const0_rtx)
9864	    return \"and\\t%0, %1, %3, asr #31\";
9865	  return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9866	}
9867      else if (which_alternative != 0 && REG_P (operands[2]))
9868	{
9869	  if (operands[1] == const0_rtx)
9870	    return \"bic\\t%0, %2, %3, asr #31\";
9871	  return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9872	}
9873      /* The only case that falls through to here is when both ops 1 & 2
9874	 are constants.  */
9875    }
9876
9877  if (GET_CODE (operands[5]) == GE
9878      && (operands[4] == const0_rtx))
9879    {
9880      if (which_alternative != 1 && REG_P (operands[1]))
9881	{
9882	  if (operands[2] == const0_rtx)
9883	    return \"bic\\t%0, %1, %3, asr #31\";
9884	  return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9885	}
9886      else if (which_alternative != 0 && REG_P (operands[2]))
9887	{
9888	  if (operands[1] == const0_rtx)
9889	    return \"and\\t%0, %2, %3, asr #31\";
9890	  return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9891	}
9892      /* The only case that falls through to here is when both ops 1 & 2
9893	 are constants.  */
9894    }
9895  if (CONST_INT_P (operands[4])
9896      && !const_ok_for_arm (INTVAL (operands[4])))
9897    output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9898  else
9899    output_asm_insn (\"cmp\\t%3, %4\", operands);
9900  if (which_alternative != 0)
9901    output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9902  if (which_alternative != 1)
9903    output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9904  return \"\";
9905  "
9906  [(set_attr "conds" "clob")
9907   (set_attr "length" "8,8,12")]
9908)
9909
9910;; ??? The patterns below need checking for Thumb-2 usefulness.
9911
9912(define_insn "*ifcompare_plus_move"
9913  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9914	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
9915			  [(match_operand:SI 4 "s_register_operand" "r,r")
9916			   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9917			 (plus:SI
9918			  (match_operand:SI 2 "s_register_operand" "r,r")
9919			  (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9920			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9921   (clobber (reg:CC CC_REGNUM))]
9922  "TARGET_ARM"
9923  "#"
9924  [(set_attr "conds" "clob")
9925   (set_attr "length" "8,12")]
9926)
9927
9928(define_insn "*if_plus_move"
9929  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9930	(if_then_else:SI
9931	 (match_operator 4 "arm_comparison_operator"
9932	  [(match_operand 5 "cc_register" "") (const_int 0)])
9933	 (plus:SI
9934	  (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9935	  (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9936	 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9937  "TARGET_ARM"
9938  "@
9939   add%d4\\t%0, %2, %3
9940   sub%d4\\t%0, %2, #%n3
9941   add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9942   sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9943  [(set_attr "conds" "use")
9944   (set_attr "length" "4,4,8,8")
9945   (set_attr_alternative "type"
9946                         [(if_then_else (match_operand 3 "const_int_operand" "")
9947                                        (const_string "simple_alu_imm" )
9948                                        (const_string "*"))
9949                          (const_string "simple_alu_imm")
9950                          (const_string "*")
9951                          (const_string "*")])]
9952)
9953
9954(define_insn "*ifcompare_move_plus"
9955  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9956	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
9957			  [(match_operand:SI 4 "s_register_operand" "r,r")
9958			   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9959			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9960			 (plus:SI
9961			  (match_operand:SI 2 "s_register_operand" "r,r")
9962			  (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9963   (clobber (reg:CC CC_REGNUM))]
9964  "TARGET_ARM"
9965  "#"
9966  [(set_attr "conds" "clob")
9967   (set_attr "length" "8,12")]
9968)
9969
9970(define_insn "*if_move_plus"
9971  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9972	(if_then_else:SI
9973	 (match_operator 4 "arm_comparison_operator"
9974	  [(match_operand 5 "cc_register" "") (const_int 0)])
9975	 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9976	 (plus:SI
9977	  (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9978	  (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9979  "TARGET_ARM"
9980  "@
9981   add%D4\\t%0, %2, %3
9982   sub%D4\\t%0, %2, #%n3
9983   add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9984   sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9985  [(set_attr "conds" "use")
9986   (set_attr "length" "4,4,8,8")
9987   (set_attr_alternative "type"
9988                         [(if_then_else (match_operand 3 "const_int_operand" "")
9989                                        (const_string "simple_alu_imm" )
9990                                        (const_string "*"))
9991                          (const_string "simple_alu_imm")
9992                          (const_string "*")
9993                          (const_string "*")])]
9994)
9995
9996(define_insn "*ifcompare_arith_arith"
9997  [(set (match_operand:SI 0 "s_register_operand" "=r")
9998	(if_then_else:SI (match_operator 9 "arm_comparison_operator"
9999			  [(match_operand:SI 5 "s_register_operand" "r")
10000			   (match_operand:SI 6 "arm_add_operand" "rIL")])
10001			 (match_operator:SI 8 "shiftable_operator"
10002			  [(match_operand:SI 1 "s_register_operand" "r")
10003			   (match_operand:SI 2 "arm_rhs_operand" "rI")])
10004			 (match_operator:SI 7 "shiftable_operator"
10005			  [(match_operand:SI 3 "s_register_operand" "r")
10006			   (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10007   (clobber (reg:CC CC_REGNUM))]
10008  "TARGET_ARM"
10009  "#"
10010  [(set_attr "conds" "clob")
10011   (set_attr "length" "12")]
10012)
10013
10014(define_insn "*if_arith_arith"
10015  [(set (match_operand:SI 0 "s_register_operand" "=r")
10016	(if_then_else:SI (match_operator 5 "arm_comparison_operator"
10017			  [(match_operand 8 "cc_register" "") (const_int 0)])
10018			 (match_operator:SI 6 "shiftable_operator"
10019			  [(match_operand:SI 1 "s_register_operand" "r")
10020			   (match_operand:SI 2 "arm_rhs_operand" "rI")])
10021			 (match_operator:SI 7 "shiftable_operator"
10022			  [(match_operand:SI 3 "s_register_operand" "r")
10023			   (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10024  "TARGET_ARM"
10025  "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10026  [(set_attr "conds" "use")
10027   (set_attr "length" "8")]
10028)
10029
10030(define_insn "*ifcompare_arith_move"
10031  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10032	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
10033			  [(match_operand:SI 2 "s_register_operand" "r,r")
10034			   (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10035			 (match_operator:SI 7 "shiftable_operator"
10036			  [(match_operand:SI 4 "s_register_operand" "r,r")
10037			   (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10038			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10039   (clobber (reg:CC CC_REGNUM))]
10040  "TARGET_ARM"
10041  "*
10042  /* If we have an operation where (op x 0) is the identity operation and
10043     the conditional operator is LT or GE and we are comparing against zero and
10044     everything is in registers then we can do this in two instructions.  */
10045  if (operands[3] == const0_rtx
10046      && GET_CODE (operands[7]) != AND
10047      && REG_P (operands[5])
10048      && REG_P (operands[1])
10049      && REGNO (operands[1]) == REGNO (operands[4])
10050      && REGNO (operands[4]) != REGNO (operands[0]))
10051    {
10052      if (GET_CODE (operands[6]) == LT)
10053	return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10054      else if (GET_CODE (operands[6]) == GE)
10055	return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10056    }
10057  if (CONST_INT_P (operands[3])
10058      && !const_ok_for_arm (INTVAL (operands[3])))
10059    output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10060  else
10061    output_asm_insn (\"cmp\\t%2, %3\", operands);
10062  output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10063  if (which_alternative != 0)
10064    return \"mov%D6\\t%0, %1\";
10065  return \"\";
10066  "
10067  [(set_attr "conds" "clob")
10068   (set_attr "length" "8,12")]
10069)
10070
10071(define_insn "*if_arith_move"
10072  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10073	(if_then_else:SI (match_operator 4 "arm_comparison_operator"
10074			  [(match_operand 6 "cc_register" "") (const_int 0)])
10075			 (match_operator:SI 5 "shiftable_operator"
10076			  [(match_operand:SI 2 "s_register_operand" "r,r")
10077			   (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10078			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10079  "TARGET_ARM"
10080  "@
10081   %I5%d4\\t%0, %2, %3
10082   %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10083  [(set_attr "conds" "use")
10084   (set_attr "length" "4,8")
10085   (set_attr "type" "*,*")]
10086)
10087
10088(define_insn "*ifcompare_move_arith"
10089  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10090	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
10091			  [(match_operand:SI 4 "s_register_operand" "r,r")
10092			   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10093			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10094			 (match_operator:SI 7 "shiftable_operator"
10095			  [(match_operand:SI 2 "s_register_operand" "r,r")
10096			   (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10097   (clobber (reg:CC CC_REGNUM))]
10098  "TARGET_ARM"
10099  "*
10100  /* If we have an operation where (op x 0) is the identity operation and
10101     the conditional operator is LT or GE and we are comparing against zero and
10102     everything is in registers then we can do this in two instructions */
10103  if (operands[5] == const0_rtx
10104      && GET_CODE (operands[7]) != AND
10105      && REG_P (operands[3])
10106      && REG_P (operands[1])
10107      && REGNO (operands[1]) == REGNO (operands[2])
10108      && REGNO (operands[2]) != REGNO (operands[0]))
10109    {
10110      if (GET_CODE (operands[6]) == GE)
10111	return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10112      else if (GET_CODE (operands[6]) == LT)
10113	return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10114    }
10115
10116  if (CONST_INT_P (operands[5])
10117      && !const_ok_for_arm (INTVAL (operands[5])))
10118    output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10119  else
10120    output_asm_insn (\"cmp\\t%4, %5\", operands);
10121
10122  if (which_alternative != 0)
10123    output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10124  return \"%I7%D6\\t%0, %2, %3\";
10125  "
10126  [(set_attr "conds" "clob")
10127   (set_attr "length" "8,12")]
10128)
10129
10130(define_insn "*if_move_arith"
10131  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10132	(if_then_else:SI
10133	 (match_operator 4 "arm_comparison_operator"
10134	  [(match_operand 6 "cc_register" "") (const_int 0)])
10135	 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10136	 (match_operator:SI 5 "shiftable_operator"
10137	  [(match_operand:SI 2 "s_register_operand" "r,r")
10138	   (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10139  "TARGET_ARM"
10140  "@
10141   %I5%D4\\t%0, %2, %3
10142   %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10143  [(set_attr "conds" "use")
10144   (set_attr "length" "4,8")
10145   (set_attr "type" "*,*")]
10146)
10147
10148(define_insn "*ifcompare_move_not"
10149  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10150	(if_then_else:SI
10151	 (match_operator 5 "arm_comparison_operator"
10152	  [(match_operand:SI 3 "s_register_operand" "r,r")
10153	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10154	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10155	 (not:SI
10156	  (match_operand:SI 2 "s_register_operand" "r,r"))))
10157   (clobber (reg:CC CC_REGNUM))]
10158  "TARGET_ARM"
10159  "#"
10160  [(set_attr "conds" "clob")
10161   (set_attr "length" "8,12")]
10162)
10163
10164(define_insn "*if_move_not"
10165  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10166	(if_then_else:SI
10167	 (match_operator 4 "arm_comparison_operator"
10168	  [(match_operand 3 "cc_register" "") (const_int 0)])
10169	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10170	 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10171  "TARGET_ARM"
10172  "@
10173   mvn%D4\\t%0, %2
10174   mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10175   mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10176  [(set_attr "conds" "use")
10177   (set_attr "insn" "mvn")
10178   (set_attr "length" "4,8,8")]
10179)
10180
10181(define_insn "*ifcompare_not_move"
10182  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10183	(if_then_else:SI
10184	 (match_operator 5 "arm_comparison_operator"
10185	  [(match_operand:SI 3 "s_register_operand" "r,r")
10186	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10187	 (not:SI
10188	  (match_operand:SI 2 "s_register_operand" "r,r"))
10189	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10190   (clobber (reg:CC CC_REGNUM))]
10191  "TARGET_ARM"
10192  "#"
10193  [(set_attr "conds" "clob")
10194   (set_attr "length" "8,12")]
10195)
10196
10197(define_insn "*if_not_move"
10198  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10199	(if_then_else:SI
10200	 (match_operator 4 "arm_comparison_operator"
10201	  [(match_operand 3 "cc_register" "") (const_int 0)])
10202	 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10203	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10204  "TARGET_ARM"
10205  "@
10206   mvn%d4\\t%0, %2
10207   mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10208   mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10209  [(set_attr "conds" "use")
10210   (set_attr "insn" "mvn")
10211   (set_attr "length" "4,8,8")]
10212)
10213
10214(define_insn "*ifcompare_shift_move"
10215  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10216	(if_then_else:SI
10217	 (match_operator 6 "arm_comparison_operator"
10218	  [(match_operand:SI 4 "s_register_operand" "r,r")
10219	   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10220	 (match_operator:SI 7 "shift_operator"
10221	  [(match_operand:SI 2 "s_register_operand" "r,r")
10222	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10223	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10224   (clobber (reg:CC CC_REGNUM))]
10225  "TARGET_ARM"
10226  "#"
10227  [(set_attr "conds" "clob")
10228   (set_attr "length" "8,12")]
10229)
10230
10231(define_insn "*if_shift_move"
10232  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10233	(if_then_else:SI
10234	 (match_operator 5 "arm_comparison_operator"
10235	  [(match_operand 6 "cc_register" "") (const_int 0)])
10236	 (match_operator:SI 4 "shift_operator"
10237	  [(match_operand:SI 2 "s_register_operand" "r,r,r")
10238	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10239	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10240  "TARGET_ARM"
10241  "@
10242   mov%d5\\t%0, %2%S4
10243   mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10244   mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10245  [(set_attr "conds" "use")
10246   (set_attr "shift" "2")
10247   (set_attr "length" "4,8,8")
10248   (set_attr "insn" "mov")
10249   (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10250		      (const_string "alu_shift")
10251		      (const_string "alu_shift_reg")))]
10252)
10253
10254(define_insn "*ifcompare_move_shift"
10255  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10256	(if_then_else:SI
10257	 (match_operator 6 "arm_comparison_operator"
10258	  [(match_operand:SI 4 "s_register_operand" "r,r")
10259	   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10260	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10261	 (match_operator:SI 7 "shift_operator"
10262	  [(match_operand:SI 2 "s_register_operand" "r,r")
10263	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10264   (clobber (reg:CC CC_REGNUM))]
10265  "TARGET_ARM"
10266  "#"
10267  [(set_attr "conds" "clob")
10268   (set_attr "length" "8,12")]
10269)
10270
10271(define_insn "*if_move_shift"
10272  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10273	(if_then_else:SI
10274	 (match_operator 5 "arm_comparison_operator"
10275	  [(match_operand 6 "cc_register" "") (const_int 0)])
10276	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10277	 (match_operator:SI 4 "shift_operator"
10278	  [(match_operand:SI 2 "s_register_operand" "r,r,r")
10279	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10280  "TARGET_ARM"
10281  "@
10282   mov%D5\\t%0, %2%S4
10283   mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10284   mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10285  [(set_attr "conds" "use")
10286   (set_attr "shift" "2")
10287   (set_attr "length" "4,8,8")
10288   (set_attr "insn" "mov")
10289   (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10290		      (const_string "alu_shift")
10291		      (const_string "alu_shift_reg")))]
10292)
10293
10294(define_insn "*ifcompare_shift_shift"
10295  [(set (match_operand:SI 0 "s_register_operand" "=r")
10296	(if_then_else:SI
10297	 (match_operator 7 "arm_comparison_operator"
10298	  [(match_operand:SI 5 "s_register_operand" "r")
10299	   (match_operand:SI 6 "arm_add_operand" "rIL")])
10300	 (match_operator:SI 8 "shift_operator"
10301	  [(match_operand:SI 1 "s_register_operand" "r")
10302	   (match_operand:SI 2 "arm_rhs_operand" "rM")])
10303	 (match_operator:SI 9 "shift_operator"
10304	  [(match_operand:SI 3 "s_register_operand" "r")
10305	   (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10306   (clobber (reg:CC CC_REGNUM))]
10307  "TARGET_ARM"
10308  "#"
10309  [(set_attr "conds" "clob")
10310   (set_attr "length" "12")]
10311)
10312
10313(define_insn "*if_shift_shift"
10314  [(set (match_operand:SI 0 "s_register_operand" "=r")
10315	(if_then_else:SI
10316	 (match_operator 5 "arm_comparison_operator"
10317	  [(match_operand 8 "cc_register" "") (const_int 0)])
10318	 (match_operator:SI 6 "shift_operator"
10319	  [(match_operand:SI 1 "s_register_operand" "r")
10320	   (match_operand:SI 2 "arm_rhs_operand" "rM")])
10321	 (match_operator:SI 7 "shift_operator"
10322	  [(match_operand:SI 3 "s_register_operand" "r")
10323	   (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10324  "TARGET_ARM"
10325  "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10326  [(set_attr "conds" "use")
10327   (set_attr "shift" "1")
10328   (set_attr "length" "8")
10329   (set_attr "insn" "mov")
10330   (set (attr "type") (if_then_else
10331		        (and (match_operand 2 "const_int_operand" "")
10332                             (match_operand 4 "const_int_operand" ""))
10333		      (const_string "alu_shift")
10334		      (const_string "alu_shift_reg")))]
10335)
10336
10337(define_insn "*ifcompare_not_arith"
10338  [(set (match_operand:SI 0 "s_register_operand" "=r")
10339	(if_then_else:SI
10340	 (match_operator 6 "arm_comparison_operator"
10341	  [(match_operand:SI 4 "s_register_operand" "r")
10342	   (match_operand:SI 5 "arm_add_operand" "rIL")])
10343	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10344	 (match_operator:SI 7 "shiftable_operator"
10345	  [(match_operand:SI 2 "s_register_operand" "r")
10346	   (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10347   (clobber (reg:CC CC_REGNUM))]
10348  "TARGET_ARM"
10349  "#"
10350  [(set_attr "conds" "clob")
10351   (set_attr "length" "12")]
10352)
10353
10354(define_insn "*if_not_arith"
10355  [(set (match_operand:SI 0 "s_register_operand" "=r")
10356	(if_then_else:SI
10357	 (match_operator 5 "arm_comparison_operator"
10358	  [(match_operand 4 "cc_register" "") (const_int 0)])
10359	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10360	 (match_operator:SI 6 "shiftable_operator"
10361	  [(match_operand:SI 2 "s_register_operand" "r")
10362	   (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10363  "TARGET_ARM"
10364  "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10365  [(set_attr "conds" "use")
10366   (set_attr "insn" "mvn")
10367   (set_attr "length" "8")]
10368)
10369
10370(define_insn "*ifcompare_arith_not"
10371  [(set (match_operand:SI 0 "s_register_operand" "=r")
10372	(if_then_else:SI
10373	 (match_operator 6 "arm_comparison_operator"
10374	  [(match_operand:SI 4 "s_register_operand" "r")
10375	   (match_operand:SI 5 "arm_add_operand" "rIL")])
10376	 (match_operator:SI 7 "shiftable_operator"
10377	  [(match_operand:SI 2 "s_register_operand" "r")
10378	   (match_operand:SI 3 "arm_rhs_operand" "rI")])
10379	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10380   (clobber (reg:CC CC_REGNUM))]
10381  "TARGET_ARM"
10382  "#"
10383  [(set_attr "conds" "clob")
10384   (set_attr "length" "12")]
10385)
10386
10387(define_insn "*if_arith_not"
10388  [(set (match_operand:SI 0 "s_register_operand" "=r")
10389	(if_then_else:SI
10390	 (match_operator 5 "arm_comparison_operator"
10391	  [(match_operand 4 "cc_register" "") (const_int 0)])
10392	 (match_operator:SI 6 "shiftable_operator"
10393	  [(match_operand:SI 2 "s_register_operand" "r")
10394	   (match_operand:SI 3 "arm_rhs_operand" "rI")])
10395	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10396  "TARGET_ARM"
10397  "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10398  [(set_attr "conds" "use")
10399   (set_attr "insn" "mvn")
10400   (set_attr "length" "8")]
10401)
10402
10403(define_insn "*ifcompare_neg_move"
10404  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10405	(if_then_else:SI
10406	 (match_operator 5 "arm_comparison_operator"
10407	  [(match_operand:SI 3 "s_register_operand" "r,r")
10408	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10409	 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10410	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10411   (clobber (reg:CC CC_REGNUM))]
10412  "TARGET_ARM"
10413  "#"
10414  [(set_attr "conds" "clob")
10415   (set_attr "length" "8,12")]
10416)
10417
10418(define_insn "*if_neg_move"
10419  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10420	(if_then_else:SI
10421	 (match_operator 4 "arm_comparison_operator"
10422	  [(match_operand 3 "cc_register" "") (const_int 0)])
10423	 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10424	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10425  "TARGET_ARM"
10426  "@
10427   rsb%d4\\t%0, %2, #0
10428   mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10429   mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10430  [(set_attr "conds" "use")
10431   (set_attr "length" "4,8,8")]
10432)
10433
10434(define_insn "*ifcompare_move_neg"
10435  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10436	(if_then_else:SI
10437	 (match_operator 5 "arm_comparison_operator"
10438	  [(match_operand:SI 3 "s_register_operand" "r,r")
10439	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10440	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10441	 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10442   (clobber (reg:CC CC_REGNUM))]
10443  "TARGET_ARM"
10444  "#"
10445  [(set_attr "conds" "clob")
10446   (set_attr "length" "8,12")]
10447)
10448
10449(define_insn "*if_move_neg"
10450  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10451	(if_then_else:SI
10452	 (match_operator 4 "arm_comparison_operator"
10453	  [(match_operand 3 "cc_register" "") (const_int 0)])
10454	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10455	 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10456  "TARGET_ARM"
10457  "@
10458   rsb%D4\\t%0, %2, #0
10459   mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10460   mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10461  [(set_attr "conds" "use")
10462   (set_attr "length" "4,8,8")]
10463)
10464
10465(define_insn "*arith_adjacentmem"
10466  [(set (match_operand:SI 0 "s_register_operand" "=r")
10467	(match_operator:SI 1 "shiftable_operator"
10468	 [(match_operand:SI 2 "memory_operand" "m")
10469	  (match_operand:SI 3 "memory_operand" "m")]))
10470   (clobber (match_scratch:SI 4 "=r"))]
10471  "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10472  "*
10473  {
10474    rtx ldm[3];
10475    rtx arith[4];
10476    rtx base_reg;
10477    HOST_WIDE_INT val1 = 0, val2 = 0;
10478
10479    if (REGNO (operands[0]) > REGNO (operands[4]))
10480      {
10481	ldm[1] = operands[4];
10482	ldm[2] = operands[0];
10483      }
10484    else
10485      {
10486	ldm[1] = operands[0];
10487	ldm[2] = operands[4];
10488      }
10489
10490    base_reg = XEXP (operands[2], 0);
10491
10492    if (!REG_P (base_reg))
10493      {
10494	val1 = INTVAL (XEXP (base_reg, 1));
10495	base_reg = XEXP (base_reg, 0);
10496      }
10497
10498    if (!REG_P (XEXP (operands[3], 0)))
10499      val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10500
10501    arith[0] = operands[0];
10502    arith[3] = operands[1];
10503
10504    if (val1 < val2)
10505      {
10506	arith[1] = ldm[1];
10507	arith[2] = ldm[2];
10508      }
10509    else
10510      {
10511	arith[1] = ldm[2];
10512	arith[2] = ldm[1];
10513      }
10514
10515    ldm[0] = base_reg;
10516    if (val1 !=0 && val2 != 0)
10517      {
10518	rtx ops[3];
10519
10520	if (val1 == 4 || val2 == 4)
10521	  /* Other val must be 8, since we know they are adjacent and neither
10522	     is zero.  */
10523	  output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10524	else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10525	  {
10526	    ldm[0] = ops[0] = operands[4];
10527	    ops[1] = base_reg;
10528	    ops[2] = GEN_INT (val1);
10529	    output_add_immediate (ops);
10530	    if (val1 < val2)
10531	      output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10532	    else
10533	      output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10534	  }
10535	else
10536	  {
10537	    /* Offset is out of range for a single add, so use two ldr.  */
10538	    ops[0] = ldm[1];
10539	    ops[1] = base_reg;
10540	    ops[2] = GEN_INT (val1);
10541	    output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10542	    ops[0] = ldm[2];
10543	    ops[2] = GEN_INT (val2);
10544	    output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10545	  }
10546      }
10547    else if (val1 != 0)
10548      {
10549	if (val1 < val2)
10550	  output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10551	else
10552	  output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10553      }
10554    else
10555      {
10556	if (val1 < val2)
10557	  output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10558	else
10559	  output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10560      }
10561    output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10562    return \"\";
10563  }"
10564  [(set_attr "length" "12")
10565   (set_attr "predicable" "yes")
10566   (set_attr "type" "load1")]
10567)
10568
10569; This pattern is never tried by combine, so do it as a peephole
10570
10571(define_peephole2
10572  [(set (match_operand:SI 0 "arm_general_register_operand" "")
10573	(match_operand:SI 1 "arm_general_register_operand" ""))
10574   (set (reg:CC CC_REGNUM)
10575	(compare:CC (match_dup 1) (const_int 0)))]
10576  "TARGET_ARM"
10577  [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10578	      (set (match_dup 0) (match_dup 1))])]
10579  ""
10580)
10581
10582(define_split
10583  [(set (match_operand:SI 0 "s_register_operand" "")
10584	(and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10585		       (const_int 0))
10586		(neg:SI (match_operator:SI 2 "arm_comparison_operator"
10587			 [(match_operand:SI 3 "s_register_operand" "")
10588			  (match_operand:SI 4 "arm_rhs_operand" "")]))))
10589   (clobber (match_operand:SI 5 "s_register_operand" ""))]
10590  "TARGET_ARM"
10591  [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10592   (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10593			      (match_dup 5)))]
10594  ""
10595)
10596
10597;; This split can be used because CC_Z mode implies that the following
10598;; branch will be an equality, or an unsigned inequality, so the sign
10599;; extension is not needed.
10600
10601(define_split
10602  [(set (reg:CC_Z CC_REGNUM)
10603	(compare:CC_Z
10604	 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10605		    (const_int 24))
10606	 (match_operand 1 "const_int_operand" "")))
10607   (clobber (match_scratch:SI 2 ""))]
10608  "TARGET_ARM
10609   && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10610       == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10611  [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10612   (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10613  "
10614  operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10615  "
10616)
10617;; ??? Check the patterns above for Thumb-2 usefulness
10618
10619(define_expand "prologue"
10620  [(clobber (const_int 0))]
10621  "TARGET_EITHER"
10622  "if (TARGET_32BIT)
10623     arm_expand_prologue ();
10624   else
10625     thumb1_expand_prologue ();
10626  DONE;
10627  "
10628)
10629
10630(define_expand "epilogue"
10631  [(clobber (const_int 0))]
10632  "TARGET_EITHER"
10633  "
10634  if (crtl->calls_eh_return)
10635    emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10636  if (TARGET_THUMB1)
10637   {
10638     thumb1_expand_epilogue ();
10639     emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10640                     gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10641   }
10642  else if (HAVE_return)
10643   {
10644     /* HAVE_return is testing for USE_RETURN_INSN (FALSE).  Hence,
10645        no need for explicit testing again.  */
10646     emit_jump_insn (gen_return ());
10647   }
10648  else if (TARGET_32BIT)
10649   {
10650    arm_expand_epilogue (true);
10651   }
10652  DONE;
10653  "
10654)
10655
10656(define_insn "prologue_thumb1_interwork"
10657  [(unspec_volatile [(const_int 0)] VUNSPEC_THUMB1_INTERWORK)]
10658  "TARGET_THUMB1"
10659  "* return thumb1_output_interwork ();"
10660  [(set_attr "length" "8")]
10661)
10662
10663;; Note - although unspec_volatile's USE all hard registers,
10664;; USEs are ignored after relaod has completed.  Thus we need
10665;; to add an unspec of the link register to ensure that flow
10666;; does not think that it is unused by the sibcall branch that
10667;; will replace the standard function epilogue.
10668(define_expand "sibcall_epilogue"
10669   [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10670               (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10671   "TARGET_32BIT"
10672   "
10673   arm_expand_epilogue (false);
10674   DONE;
10675   "
10676)
10677
10678(define_insn "*epilogue_insns"
10679  [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10680  "TARGET_THUMB1"
10681  "*
10682    return thumb1_unexpanded_epilogue ();
10683  "
10684  ; Length is absolute worst case
10685  [(set_attr "length" "44")
10686   (set_attr "type" "block")
10687   ;; We don't clobber the conditions, but the potential length of this
10688   ;; operation is sufficient to make conditionalizing the sequence
10689   ;; unlikely to be profitable.
10690   (set_attr "conds" "clob")]
10691)
10692
10693(define_expand "eh_epilogue"
10694  [(use (match_operand:SI 0 "register_operand" ""))
10695   (use (match_operand:SI 1 "register_operand" ""))
10696   (use (match_operand:SI 2 "register_operand" ""))]
10697  "TARGET_EITHER"
10698  "
10699  {
10700    cfun->machine->eh_epilogue_sp_ofs = operands[1];
10701    if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10702      {
10703	rtx ra = gen_rtx_REG (Pmode, 2);
10704
10705	emit_move_insn (ra, operands[2]);
10706	operands[2] = ra;
10707      }
10708    /* This is a hack -- we may have crystalized the function type too
10709       early.  */
10710    cfun->machine->func_type = 0;
10711  }"
10712)
10713
10714;; This split is only used during output to reduce the number of patterns
10715;; that need assembler instructions adding to them.  We allowed the setting
10716;; of the conditions to be implicit during rtl generation so that
10717;; the conditional compare patterns would work.  However this conflicts to
10718;; some extent with the conditional data operations, so we have to split them
10719;; up again here.
10720
10721;; ??? Need to audit these splitters for Thumb-2.  Why isn't normal
10722;; conditional execution sufficient?
10723
10724(define_split
10725  [(set (match_operand:SI 0 "s_register_operand" "")
10726	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
10727			  [(match_operand 2 "" "") (match_operand 3 "" "")])
10728			 (match_dup 0)
10729			 (match_operand 4 "" "")))
10730   (clobber (reg:CC CC_REGNUM))]
10731  "TARGET_ARM && reload_completed"
10732  [(set (match_dup 5) (match_dup 6))
10733   (cond_exec (match_dup 7)
10734	      (set (match_dup 0) (match_dup 4)))]
10735  "
10736  {
10737    enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10738					     operands[2], operands[3]);
10739    enum rtx_code rc = GET_CODE (operands[1]);
10740
10741    operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10742    operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10743    if (mode == CCFPmode || mode == CCFPEmode)
10744      rc = reverse_condition_maybe_unordered (rc);
10745    else
10746      rc = reverse_condition (rc);
10747
10748    operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10749  }"
10750)
10751
10752(define_split
10753  [(set (match_operand:SI 0 "s_register_operand" "")
10754	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
10755			  [(match_operand 2 "" "") (match_operand 3 "" "")])
10756			 (match_operand 4 "" "")
10757			 (match_dup 0)))
10758   (clobber (reg:CC CC_REGNUM))]
10759  "TARGET_ARM && reload_completed"
10760  [(set (match_dup 5) (match_dup 6))
10761   (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10762	      (set (match_dup 0) (match_dup 4)))]
10763  "
10764  {
10765    enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10766					     operands[2], operands[3]);
10767
10768    operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10769    operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10770  }"
10771)
10772
10773(define_split
10774  [(set (match_operand:SI 0 "s_register_operand" "")
10775	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
10776			  [(match_operand 2 "" "") (match_operand 3 "" "")])
10777			 (match_operand 4 "" "")
10778			 (match_operand 5 "" "")))
10779   (clobber (reg:CC CC_REGNUM))]
10780  "TARGET_ARM && reload_completed"
10781  [(set (match_dup 6) (match_dup 7))
10782   (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10783	      (set (match_dup 0) (match_dup 4)))
10784   (cond_exec (match_dup 8)
10785	      (set (match_dup 0) (match_dup 5)))]
10786  "
10787  {
10788    enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10789					     operands[2], operands[3]);
10790    enum rtx_code rc = GET_CODE (operands[1]);
10791
10792    operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10793    operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10794    if (mode == CCFPmode || mode == CCFPEmode)
10795      rc = reverse_condition_maybe_unordered (rc);
10796    else
10797      rc = reverse_condition (rc);
10798
10799    operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10800  }"
10801)
10802
10803(define_split
10804  [(set (match_operand:SI 0 "s_register_operand" "")
10805	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
10806			  [(match_operand:SI 2 "s_register_operand" "")
10807			   (match_operand:SI 3 "arm_add_operand" "")])
10808			 (match_operand:SI 4 "arm_rhs_operand" "")
10809			 (not:SI
10810			  (match_operand:SI 5 "s_register_operand" ""))))
10811   (clobber (reg:CC CC_REGNUM))]
10812  "TARGET_ARM && reload_completed"
10813  [(set (match_dup 6) (match_dup 7))
10814   (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10815	      (set (match_dup 0) (match_dup 4)))
10816   (cond_exec (match_dup 8)
10817	      (set (match_dup 0) (not:SI (match_dup 5))))]
10818  "
10819  {
10820    enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10821					     operands[2], operands[3]);
10822    enum rtx_code rc = GET_CODE (operands[1]);
10823
10824    operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10825    operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10826    if (mode == CCFPmode || mode == CCFPEmode)
10827      rc = reverse_condition_maybe_unordered (rc);
10828    else
10829      rc = reverse_condition (rc);
10830
10831    operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10832  }"
10833)
10834
10835(define_insn "*cond_move_not"
10836  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10837	(if_then_else:SI (match_operator 4 "arm_comparison_operator"
10838			  [(match_operand 3 "cc_register" "") (const_int 0)])
10839			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10840			 (not:SI
10841			  (match_operand:SI 2 "s_register_operand" "r,r"))))]
10842  "TARGET_ARM"
10843  "@
10844   mvn%D4\\t%0, %2
10845   mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10846  [(set_attr "conds" "use")
10847   (set_attr "insn" "mvn")
10848   (set_attr "length" "4,8")]
10849)
10850
10851;; The next two patterns occur when an AND operation is followed by a
10852;; scc insn sequence
10853
10854(define_insn "*sign_extract_onebit"
10855  [(set (match_operand:SI 0 "s_register_operand" "=r")
10856	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10857			 (const_int 1)
10858			 (match_operand:SI 2 "const_int_operand" "n")))
10859    (clobber (reg:CC CC_REGNUM))]
10860  "TARGET_ARM"
10861  "*
10862    operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10863    output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10864    return \"mvnne\\t%0, #0\";
10865  "
10866  [(set_attr "conds" "clob")
10867   (set_attr "length" "8")]
10868)
10869
10870(define_insn "*not_signextract_onebit"
10871  [(set (match_operand:SI 0 "s_register_operand" "=r")
10872	(not:SI
10873	 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10874			  (const_int 1)
10875			  (match_operand:SI 2 "const_int_operand" "n"))))
10876   (clobber (reg:CC CC_REGNUM))]
10877  "TARGET_ARM"
10878  "*
10879    operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10880    output_asm_insn (\"tst\\t%1, %2\", operands);
10881    output_asm_insn (\"mvneq\\t%0, #0\", operands);
10882    return \"movne\\t%0, #0\";
10883  "
10884  [(set_attr "conds" "clob")
10885   (set_attr "length" "12")]
10886)
10887;; ??? The above patterns need auditing for Thumb-2
10888
10889;; Push multiple registers to the stack.  Registers are in parallel (use ...)
10890;; expressions.  For simplicity, the first register is also in the unspec
10891;; part.
10892;; To avoid the usage of GNU extension, the length attribute is computed
10893;; in a C function arm_attr_length_push_multi.
10894(define_insn "*push_multi"
10895  [(match_parallel 2 "multi_register_push"
10896    [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
10897	  (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10898		      UNSPEC_PUSH_MULT))])]
10899  ""
10900  "*
10901  {
10902    int num_saves = XVECLEN (operands[2], 0);
10903
10904    /* For the StrongARM at least it is faster to
10905       use STR to store only a single register.
10906       In Thumb mode always use push, and the assembler will pick
10907       something appropriate.  */
10908    if (num_saves == 1 && TARGET_ARM)
10909      output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
10910    else
10911      {
10912	int i;
10913	char pattern[100];
10914
10915	if (TARGET_ARM)
10916	    strcpy (pattern, \"stm%(fd%)\\t%m0!, {%1\");
10917	else if (TARGET_THUMB2)
10918	    strcpy (pattern, \"push%?\\t{%1\");
10919	else
10920	    strcpy (pattern, \"push\\t{%1\");
10921
10922	for (i = 1; i < num_saves; i++)
10923	  {
10924	    strcat (pattern, \", %|\");
10925	    strcat (pattern,
10926		    reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10927	  }
10928
10929	strcat (pattern, \"}\");
10930	output_asm_insn (pattern, operands);
10931      }
10932
10933    return \"\";
10934  }"
10935  [(set_attr "type" "store4")
10936   (set (attr "length")
10937	(symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
10938)
10939
10940(define_insn "stack_tie"
10941  [(set (mem:BLK (scratch))
10942	(unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10943		     (match_operand:SI 1 "s_register_operand" "rk")]
10944		    UNSPEC_PRLG_STK))]
10945  ""
10946  ""
10947  [(set_attr "length" "0")]
10948)
10949
10950;; Pop (as used in epilogue RTL)
10951;;
10952(define_insn "*load_multiple_with_writeback"
10953  [(match_parallel 0 "load_multiple_operation"
10954    [(set (match_operand:SI 1 "s_register_operand" "+rk")
10955          (plus:SI (match_dup 1)
10956                   (match_operand:SI 2 "const_int_operand" "I")))
10957     (set (match_operand:SI 3 "s_register_operand" "=rk")
10958          (mem:SI (match_dup 1)))
10959        ])]
10960  "TARGET_32BIT && (reload_in_progress || reload_completed)"
10961  "*
10962  {
10963    arm_output_multireg_pop (operands, /*return_pc=*/false,
10964                                       /*cond=*/const_true_rtx,
10965                                       /*reverse=*/false,
10966                                       /*update=*/true);
10967    return \"\";
10968  }
10969  "
10970  [(set_attr "type" "load4")
10971   (set_attr "predicable" "yes")]
10972)
10973
10974;; Pop with return (as used in epilogue RTL)
10975;;
10976;; This instruction is generated when the registers are popped at the end of
10977;; epilogue.  Here, instead of popping the value into LR and then generating
10978;; jump to LR, value is popped into PC directly.  Hence, the pattern is combined
10979;;  with (return).
10980(define_insn "*pop_multiple_with_writeback_and_return"
10981  [(match_parallel 0 "pop_multiple_return"
10982    [(return)
10983     (set (match_operand:SI 1 "s_register_operand" "+rk")
10984          (plus:SI (match_dup 1)
10985                   (match_operand:SI 2 "const_int_operand" "I")))
10986     (set (match_operand:SI 3 "s_register_operand" "=rk")
10987          (mem:SI (match_dup 1)))
10988        ])]
10989  "TARGET_32BIT && (reload_in_progress || reload_completed)"
10990  "*
10991  {
10992    arm_output_multireg_pop (operands, /*return_pc=*/true,
10993                                       /*cond=*/const_true_rtx,
10994                                       /*reverse=*/false,
10995                                       /*update=*/true);
10996    return \"\";
10997  }
10998  "
10999  [(set_attr "type" "load4")
11000   (set_attr "predicable" "yes")]
11001)
11002
11003(define_insn "*pop_multiple_with_return"
11004  [(match_parallel 0 "pop_multiple_return"
11005    [(return)
11006     (set (match_operand:SI 2 "s_register_operand" "=rk")
11007          (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11008        ])]
11009  "TARGET_32BIT && (reload_in_progress || reload_completed)"
11010  "*
11011  {
11012    arm_output_multireg_pop (operands, /*return_pc=*/true,
11013                                       /*cond=*/const_true_rtx,
11014                                       /*reverse=*/false,
11015                                       /*update=*/false);
11016    return \"\";
11017  }
11018  "
11019  [(set_attr "type" "load4")
11020   (set_attr "predicable" "yes")]
11021)
11022
11023;; Load into PC and return
11024(define_insn "*ldr_with_return"
11025  [(return)
11026   (set (reg:SI PC_REGNUM)
11027        (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11028  "TARGET_32BIT && (reload_in_progress || reload_completed)"
11029  "ldr%?\t%|pc, [%0], #4"
11030  [(set_attr "type" "load1")
11031   (set_attr "predicable" "yes")]
11032)
11033;; Pop for floating point registers (as used in epilogue RTL)
11034(define_insn "*vfp_pop_multiple_with_writeback"
11035  [(match_parallel 0 "pop_multiple_fp"
11036    [(set (match_operand:SI 1 "s_register_operand" "+rk")
11037          (plus:SI (match_dup 1)
11038                   (match_operand:SI 2 "const_int_operand" "I")))
11039     (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11040          (mem:DF (match_dup 1)))])]
11041  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP"
11042  "*
11043  {
11044    int num_regs = XVECLEN (operands[0], 0);
11045    char pattern[100];
11046    rtx op_list[2];
11047    strcpy (pattern, \"fldmfdd\\t\");
11048    strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11049    strcat (pattern, \"!, {\");
11050    op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11051    strcat (pattern, \"%P0\");
11052    if ((num_regs - 1) > 1)
11053      {
11054        strcat (pattern, \"-%P1\");
11055        op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11056      }
11057
11058    strcat (pattern, \"}\");
11059    output_asm_insn (pattern, op_list);
11060    return \"\";
11061  }
11062  "
11063  [(set_attr "type" "load4")
11064   (set_attr "conds" "unconditional")
11065   (set_attr "predicable" "no")]
11066)
11067
11068;; Special patterns for dealing with the constant pool
11069
11070(define_insn "align_4"
11071  [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11072  "TARGET_EITHER"
11073  "*
11074  assemble_align (32);
11075  return \"\";
11076  "
11077)
11078
11079(define_insn "align_8"
11080  [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11081  "TARGET_EITHER"
11082  "*
11083  assemble_align (64);
11084  return \"\";
11085  "
11086)
11087
11088(define_insn "consttable_end"
11089  [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11090  "TARGET_EITHER"
11091  "*
11092  making_const_table = FALSE;
11093  return \"\";
11094  "
11095)
11096
11097(define_insn "consttable_1"
11098  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11099  "TARGET_EITHER"
11100  "*
11101  making_const_table = TRUE;
11102  assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11103  assemble_zeros (3);
11104  return \"\";
11105  "
11106  [(set_attr "length" "4")]
11107)
11108
11109(define_insn "consttable_2"
11110  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11111  "TARGET_EITHER"
11112  "*
11113  {
11114    rtx x = operands[0];
11115    making_const_table = TRUE;
11116    switch (GET_MODE_CLASS (GET_MODE (x)))
11117      {
11118      case MODE_FLOAT:
11119	arm_emit_fp16_const (x);
11120	break;
11121      default:
11122	assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11123	assemble_zeros (2);
11124	break;
11125      }
11126    return \"\";
11127  }"
11128  [(set_attr "length" "4")]
11129)
11130
11131(define_insn "consttable_4"
11132  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11133  "TARGET_EITHER"
11134  "*
11135  {
11136    rtx x = operands[0];
11137    making_const_table = TRUE;
11138    switch (GET_MODE_CLASS (GET_MODE (x)))
11139      {
11140      case MODE_FLOAT:
11141	{
11142	  REAL_VALUE_TYPE r;
11143	  REAL_VALUE_FROM_CONST_DOUBLE (r, x);
11144	  assemble_real (r, GET_MODE (x), BITS_PER_WORD);
11145	  break;
11146	}
11147      default:
11148	/* XXX: Sometimes gcc does something really dumb and ends up with
11149	   a HIGH in a constant pool entry, usually because it's trying to
11150	   load into a VFP register.  We know this will always be used in
11151	   combination with a LO_SUM which ignores the high bits, so just
11152	   strip off the HIGH.  */
11153	if (GET_CODE (x) == HIGH)
11154	  x = XEXP (x, 0);
11155        assemble_integer (x, 4, BITS_PER_WORD, 1);
11156	mark_symbol_refs_as_used (x);
11157        break;
11158      }
11159    return \"\";
11160  }"
11161  [(set_attr "length" "4")]
11162)
11163
11164(define_insn "consttable_8"
11165  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11166  "TARGET_EITHER"
11167  "*
11168  {
11169    making_const_table = TRUE;
11170    switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11171      {
11172       case MODE_FLOAT:
11173        {
11174          REAL_VALUE_TYPE r;
11175          REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11176          assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11177          break;
11178        }
11179      default:
11180        assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11181        break;
11182      }
11183    return \"\";
11184  }"
11185  [(set_attr "length" "8")]
11186)
11187
11188(define_insn "consttable_16"
11189  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11190  "TARGET_EITHER"
11191  "*
11192  {
11193    making_const_table = TRUE;
11194    switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11195      {
11196       case MODE_FLOAT:
11197        {
11198          REAL_VALUE_TYPE r;
11199          REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11200          assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11201          break;
11202        }
11203      default:
11204        assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11205        break;
11206      }
11207    return \"\";
11208  }"
11209  [(set_attr "length" "16")]
11210)
11211
11212;; Miscellaneous Thumb patterns
11213
11214(define_expand "tablejump"
11215  [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11216	      (use (label_ref (match_operand 1 "" "")))])]
11217  "TARGET_THUMB1"
11218  "
11219  if (flag_pic)
11220    {
11221      /* Hopefully, CSE will eliminate this copy.  */
11222      rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11223      rtx reg2 = gen_reg_rtx (SImode);
11224
11225      emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11226      operands[0] = reg2;
11227    }
11228  "
11229)
11230
11231;; NB never uses BX.
11232(define_insn "*thumb1_tablejump"
11233  [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11234   (use (label_ref (match_operand 1 "" "")))]
11235  "TARGET_THUMB1"
11236  "mov\\t%|pc, %0"
11237  [(set_attr "length" "2")]
11238)
11239
11240;; V5 Instructions,
11241
11242(define_insn "clzsi2"
11243  [(set (match_operand:SI 0 "s_register_operand" "=r")
11244	(clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11245  "TARGET_32BIT && arm_arch5"
11246  "clz%?\\t%0, %1"
11247  [(set_attr "predicable" "yes")
11248   (set_attr "insn" "clz")])
11249
11250(define_insn "rbitsi2"
11251  [(set (match_operand:SI 0 "s_register_operand" "=r")
11252	(unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11253  "TARGET_32BIT && arm_arch_thumb2"
11254  "rbit%?\\t%0, %1"
11255  [(set_attr "predicable" "yes")
11256   (set_attr "insn" "clz")])
11257
11258(define_expand "ctzsi2"
11259 [(set (match_operand:SI           0 "s_register_operand" "")
11260       (ctz:SI (match_operand:SI  1 "s_register_operand" "")))]
11261  "TARGET_32BIT && arm_arch_thumb2"
11262  "
11263   {
11264     rtx tmp = gen_reg_rtx (SImode);
11265     emit_insn (gen_rbitsi2 (tmp, operands[1]));
11266     emit_insn (gen_clzsi2 (operands[0], tmp));
11267   }
11268   DONE;
11269  "
11270)
11271
11272;; V5E instructions.
11273
11274(define_insn "prefetch"
11275  [(prefetch (match_operand:SI 0 "address_operand" "p")
11276	     (match_operand:SI 1 "" "")
11277	     (match_operand:SI 2 "" ""))]
11278  "TARGET_32BIT && arm_arch5e"
11279  "pld\\t%a0"
11280  [(set_attr "type" "load1")]
11281)
11282
11283;; General predication pattern
11284
11285(define_cond_exec
11286  [(match_operator 0 "arm_comparison_operator"
11287    [(match_operand 1 "cc_register" "")
11288     (const_int 0)])]
11289  "TARGET_32BIT"
11290  ""
11291)
11292
11293(define_insn "force_register_use"
11294  [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
11295  ""
11296  "%@ %0 needed"
11297  [(set_attr "length" "0")]
11298)
11299
11300
11301;; Patterns for exception handling
11302
11303(define_expand "eh_return"
11304  [(use (match_operand 0 "general_operand" ""))]
11305  "TARGET_EITHER"
11306  "
11307  {
11308    if (TARGET_32BIT)
11309      emit_insn (gen_arm_eh_return (operands[0]));
11310    else
11311      emit_insn (gen_thumb_eh_return (operands[0]));
11312    DONE;
11313  }"
11314)
11315
11316;; We can't expand this before we know where the link register is stored.
11317(define_insn_and_split "arm_eh_return"
11318  [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11319		    VUNSPEC_EH_RETURN)
11320   (clobber (match_scratch:SI 1 "=&r"))]
11321  "TARGET_ARM"
11322  "#"
11323  "&& reload_completed"
11324  [(const_int 0)]
11325  "
11326  {
11327    arm_set_return_address (operands[0], operands[1]);
11328    DONE;
11329  }"
11330)
11331
11332(define_insn_and_split "thumb_eh_return"
11333  [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11334		    VUNSPEC_EH_RETURN)
11335   (clobber (match_scratch:SI 1 "=&l"))]
11336  "TARGET_THUMB1"
11337  "#"
11338  "&& reload_completed"
11339  [(const_int 0)]
11340  "
11341  {
11342    thumb_set_return_address (operands[0], operands[1]);
11343    DONE;
11344  }"
11345)
11346
11347
11348;; TLS support
11349
11350(define_insn "load_tp_hard"
11351  [(set (match_operand:SI 0 "register_operand" "=r")
11352	(unspec:SI [(const_int 0)] UNSPEC_TLS))]
11353  "TARGET_HARD_TP"
11354  "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11355  [(set_attr "predicable" "yes")]
11356)
11357
11358;; Doesn't clobber R1-R3.  Must use r0 for the first operand.
11359(define_insn "load_tp_soft"
11360  [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11361   (clobber (reg:SI LR_REGNUM))
11362   (clobber (reg:SI IP_REGNUM))
11363   (clobber (reg:CC CC_REGNUM))]
11364  "TARGET_SOFT_TP"
11365  "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11366  [(set_attr "conds" "clob")]
11367)
11368
11369;; tls descriptor call
11370(define_insn "tlscall"
11371  [(set (reg:SI R0_REGNUM)
11372        (unspec:SI [(reg:SI R0_REGNUM)
11373                    (match_operand:SI 0 "" "X")
11374	            (match_operand 1 "" "")] UNSPEC_TLS))
11375   (clobber (reg:SI R1_REGNUM))
11376   (clobber (reg:SI LR_REGNUM))
11377   (clobber (reg:SI CC_REGNUM))]
11378  "TARGET_GNU2_TLS"
11379  {
11380    targetm.asm_out.internal_label (asm_out_file, "LPIC",
11381				    INTVAL (operands[1]));
11382    return "bl\\t%c0(tlscall)";
11383  }
11384  [(set_attr "conds" "clob")
11385   (set_attr "length" "4")]
11386)
11387
11388;; For thread pointer builtin
11389(define_expand "get_thread_pointersi"
11390  [(match_operand:SI 0 "s_register_operand" "=r")]
11391 ""
11392 "
11393 {
11394   arm_load_tp (operands[0]);
11395   DONE;
11396 }")
11397
11398;;
11399
11400;; We only care about the lower 16 bits of the constant
11401;; being inserted into the upper 16 bits of the register.
11402(define_insn "*arm_movtas_ze"
11403  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11404                   (const_int 16)
11405                   (const_int 16))
11406        (match_operand:SI 1 "const_int_operand" ""))]
11407  "arm_arch_thumb2"
11408  "movt%?\t%0, %L1"
11409 [(set_attr "predicable" "yes")
11410   (set_attr "length" "4")]
11411)
11412
11413(define_insn "*arm_rev"
11414  [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11415	(bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
11416  "arm_arch6"
11417  "@
11418   rev\t%0, %1
11419   rev%?\t%0, %1
11420   rev%?\t%0, %1"
11421  [(set_attr "arch" "t1,t2,32")
11422   (set_attr "length" "2,2,4")]
11423)
11424
11425(define_expand "arm_legacy_rev"
11426  [(set (match_operand:SI 2 "s_register_operand" "")
11427	(xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11428			     (const_int 16))
11429		(match_dup 1)))
11430   (set (match_dup 2)
11431	(lshiftrt:SI (match_dup 2)
11432		     (const_int 8)))
11433   (set (match_operand:SI 3 "s_register_operand" "")
11434	(rotatert:SI (match_dup 1)
11435		     (const_int 8)))
11436   (set (match_dup 2)
11437	(and:SI (match_dup 2)
11438		(const_int -65281)))
11439   (set (match_operand:SI 0 "s_register_operand" "")
11440	(xor:SI (match_dup 3)
11441		(match_dup 2)))]
11442  "TARGET_32BIT"
11443  ""
11444)
11445
11446;; Reuse temporaries to keep register pressure down.
11447(define_expand "thumb_legacy_rev"
11448  [(set (match_operand:SI 2 "s_register_operand" "")
11449     (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11450                (const_int 24)))
11451   (set (match_operand:SI 3 "s_register_operand" "")
11452     (lshiftrt:SI (match_dup 1)
11453		  (const_int 24)))
11454   (set (match_dup 3)
11455     (ior:SI (match_dup 3)
11456	     (match_dup 2)))
11457   (set (match_operand:SI 4 "s_register_operand" "")
11458     (const_int 16))
11459   (set (match_operand:SI 5 "s_register_operand" "")
11460     (rotatert:SI (match_dup 1)
11461		  (match_dup 4)))
11462   (set (match_dup 2)
11463     (ashift:SI (match_dup 5)
11464                (const_int 24)))
11465   (set (match_dup 5)
11466     (lshiftrt:SI (match_dup 5)
11467		  (const_int 24)))
11468   (set (match_dup 5)
11469     (ior:SI (match_dup 5)
11470	     (match_dup 2)))
11471   (set (match_dup 5)
11472     (rotatert:SI (match_dup 5)
11473		  (match_dup 4)))
11474   (set (match_operand:SI 0 "s_register_operand" "")
11475     (ior:SI (match_dup 5)
11476             (match_dup 3)))]
11477  "TARGET_THUMB"
11478  ""
11479)
11480
11481(define_expand "bswapsi2"
11482  [(set (match_operand:SI 0 "s_register_operand" "=r")
11483  	(bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11484"TARGET_EITHER && (arm_arch6 || !optimize_size)"
11485"
11486    if (!arm_arch6)
11487      {
11488	rtx op2 = gen_reg_rtx (SImode);
11489	rtx op3 = gen_reg_rtx (SImode);
11490
11491	if (TARGET_THUMB)
11492	  {
11493	    rtx op4 = gen_reg_rtx (SImode);
11494	    rtx op5 = gen_reg_rtx (SImode);
11495
11496	    emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11497					     op2, op3, op4, op5));
11498	  }
11499	else
11500	  {
11501	    emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11502					   op2, op3));
11503	  }
11504
11505	DONE;
11506      }
11507  "
11508)
11509
11510;; bswap16 patterns: use revsh and rev16 instructions for the signed
11511;; and unsigned variants, respectively. For rev16, expose
11512;; byte-swapping in the lower 16 bits only.
11513(define_insn "*arm_revsh"
11514  [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11515	(sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11516  "arm_arch6"
11517  "@
11518  revsh\t%0, %1
11519  revsh%?\t%0, %1
11520  revsh%?\t%0, %1"
11521  [(set_attr "arch" "t1,t2,32")
11522   (set_attr "length" "2,2,4")]
11523)
11524
11525(define_insn "*arm_rev16"
11526  [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11527	(bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11528  "arm_arch6"
11529  "@
11530   rev16\t%0, %1
11531   rev16%?\t%0, %1
11532   rev16%?\t%0, %1"
11533  [(set_attr "arch" "t1,t2,32")
11534   (set_attr "length" "2,2,4")]
11535)
11536
11537(define_expand "bswaphi2"
11538  [(set (match_operand:HI 0 "s_register_operand" "=r")
11539	(bswap:HI (match_operand:HI 1 "s_register_operand" "r")))]
11540"arm_arch6"
11541""
11542)
11543
11544;; Patterns for LDRD/STRD in Thumb2 mode
11545
11546(define_insn "*thumb2_ldrd"
11547  [(set (match_operand:SI 0 "s_register_operand" "=r")
11548        (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11549                         (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11550   (set (match_operand:SI 3 "s_register_operand" "=r")
11551        (mem:SI (plus:SI (match_dup 1)
11552                         (match_operand:SI 4 "const_int_operand" ""))))]
11553  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11554     && current_tune->prefer_ldrd_strd
11555     && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11556     && (operands_ok_ldrd_strd (operands[0], operands[3],
11557                                  operands[1], INTVAL (operands[2]),
11558                                  false, true))"
11559  "ldrd%?\t%0, %3, [%1, %2]"
11560  [(set_attr "type" "load2")
11561   (set_attr "predicable" "yes")])
11562
11563(define_insn "*thumb2_ldrd_base"
11564  [(set (match_operand:SI 0 "s_register_operand" "=r")
11565        (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11566   (set (match_operand:SI 2 "s_register_operand" "=r")
11567        (mem:SI (plus:SI (match_dup 1)
11568                         (const_int 4))))]
11569  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11570     && current_tune->prefer_ldrd_strd
11571     && (operands_ok_ldrd_strd (operands[0], operands[2],
11572                                  operands[1], 0, false, true))"
11573  "ldrd%?\t%0, %2, [%1]"
11574  [(set_attr "type" "load2")
11575   (set_attr "predicable" "yes")])
11576
11577(define_insn "*thumb2_ldrd_base_neg"
11578  [(set (match_operand:SI 0 "s_register_operand" "=r")
11579	(mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11580                         (const_int -4))))
11581   (set (match_operand:SI 2 "s_register_operand" "=r")
11582        (mem:SI (match_dup 1)))]
11583  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11584     && current_tune->prefer_ldrd_strd
11585     && (operands_ok_ldrd_strd (operands[0], operands[2],
11586                                  operands[1], -4, false, true))"
11587  "ldrd%?\t%0, %2, [%1, #-4]"
11588  [(set_attr "type" "load2")
11589   (set_attr "predicable" "yes")])
11590
11591(define_insn "*thumb2_strd"
11592  [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11593                         (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11594        (match_operand:SI 2 "s_register_operand" "r"))
11595   (set (mem:SI (plus:SI (match_dup 0)
11596                         (match_operand:SI 3 "const_int_operand" "")))
11597        (match_operand:SI 4 "s_register_operand" "r"))]
11598  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11599     && current_tune->prefer_ldrd_strd
11600     && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11601     && (operands_ok_ldrd_strd (operands[2], operands[4],
11602                                  operands[0], INTVAL (operands[1]),
11603                                  false, false))"
11604  "strd%?\t%2, %4, [%0, %1]"
11605  [(set_attr "type" "store2")
11606   (set_attr "predicable" "yes")])
11607
11608(define_insn "*thumb2_strd_base"
11609  [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11610        (match_operand:SI 1 "s_register_operand" "r"))
11611   (set (mem:SI (plus:SI (match_dup 0)
11612                         (const_int 4)))
11613        (match_operand:SI 2 "s_register_operand" "r"))]
11614  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11615     && current_tune->prefer_ldrd_strd
11616     && (operands_ok_ldrd_strd (operands[1], operands[2],
11617                                  operands[0], 0, false, false))"
11618  "strd%?\t%1, %2, [%0]"
11619  [(set_attr "type" "store2")
11620   (set_attr "predicable" "yes")])
11621
11622(define_insn "*thumb2_strd_base_neg"
11623  [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11624                         (const_int -4)))
11625        (match_operand:SI 1 "s_register_operand" "r"))
11626   (set (mem:SI (match_dup 0))
11627        (match_operand:SI 2 "s_register_operand" "r"))]
11628  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11629     && current_tune->prefer_ldrd_strd
11630     && (operands_ok_ldrd_strd (operands[1], operands[2],
11631                                  operands[0], -4, false, false))"
11632  "strd%?\t%1, %2, [%0, #-4]"
11633  [(set_attr "type" "store2")
11634   (set_attr "predicable" "yes")])
11635
11636
11637;; Load the load/store multiple patterns
11638(include "ldmstm.md")
11639
11640;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11641;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11642(define_insn "*load_multiple"
11643  [(match_parallel 0 "load_multiple_operation"
11644    [(set (match_operand:SI 2 "s_register_operand" "=rk")
11645          (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11646        ])]
11647  "TARGET_32BIT"
11648  "*
11649  {
11650    arm_output_multireg_pop (operands, /*return_pc=*/false,
11651                                       /*cond=*/const_true_rtx,
11652                                       /*reverse=*/false,
11653                                       /*update=*/false);
11654    return \"\";
11655  }
11656  "
11657  [(set_attr "predicable" "yes")]
11658)
11659
11660;; Vector bits common to IWMMXT and Neon
11661(include "vec-common.md")
11662;; Load the Intel Wireless Multimedia Extension patterns
11663(include "iwmmxt.md")
11664;; Load the VFP co-processor patterns
11665(include "vfp.md")
11666;; Thumb-2 patterns
11667(include "thumb2.md")
11668;; Neon patterns
11669(include "neon.md")
11670;; Synchronization Primitives
11671(include "sync.md")
11672;; Fixed-point patterns
11673(include "arm-fixed.md")
11674