xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/arm/arm.md (revision 48fb7bfab72acd4281a53bbee5ccf3f809019e75)
1;;- Machine description for ARM for GNU compiler
2;;  Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3;;  2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4;;  Free Software Foundation, Inc.
5;;  Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6;;  and Martin Simmons (@harleqn.co.uk).
7;;  More major hacks by Richard Earnshaw (rearnsha@arm.com).
8
9;; This file is part of GCC.
10
11;; GCC is free software; you can redistribute it and/or modify it
12;; under the terms of the GNU General Public License as published
13;; by the Free Software Foundation; either version 3, or (at your
14;; option) any later version.
15
16;; GCC is distributed in the hope that it will be useful, but WITHOUT
17;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18;; or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
19;; License for more details.
20
21;; You should have received a copy of the GNU General Public License
22;; along with GCC; see the file COPYING3.  If not see
23;; <http://www.gnu.org/licenses/>.
24
25;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26
27
28;;---------------------------------------------------------------------------
29;; Constants
30
31;; Register numbers
32(define_constants
33  [(R0_REGNUM        0)		; First CORE register
34   (IP_REGNUM	    12)		; Scratch register
35   (SP_REGNUM	    13)		; Stack pointer
36   (LR_REGNUM       14)		; Return address register
37   (PC_REGNUM	    15)		; Program counter
38   (CC_REGNUM       24)		; Condition code pseudo register
39   (LAST_ARM_REGNUM 15)		;
40   (FPA_F0_REGNUM   16)		; FIRST_FPA_REGNUM
41   (FPA_F7_REGNUM   23)		; LAST_FPA_REGNUM
42  ]
43)
44;; 3rd operand to select_dominance_cc_mode
45(define_constants
46  [(DOM_CC_X_AND_Y  0)
47   (DOM_CC_NX_OR_Y  1)
48   (DOM_CC_X_OR_Y   2)
49  ]
50)
51
52;; UNSPEC Usage:
53;; Note: sin and cos are no-longer used.
54;; Unspec constants for Neon are defined in neon.md.
55
56(define_constants
57  [(UNSPEC_SIN       0)	; `sin' operation (MODE_FLOAT):
58			;   operand 0 is the result,
59			;   operand 1 the parameter.
60   (UNPSEC_COS	     1)	; `cos' operation (MODE_FLOAT):
61			;   operand 0 is the result,
62			;   operand 1 the parameter.
63   (UNSPEC_PUSH_MULT 2)	; `push multiple' operation:
64			;   operand 0 is the first register,
65			;   subsequent registers are in parallel (use ...)
66			;   expressions.
67   (UNSPEC_PIC_SYM   3) ; A symbol that has been treated properly for pic
68			;   usage, that is, we will add the pic_register
69			;   value to it before trying to dereference it.
70   (UNSPEC_PIC_BASE  4)	; Add PC and all but the last operand together,
71			;   The last operand is the number of a PIC_LABEL
72			;   that points at the containing instruction.
73   (UNSPEC_PRLG_STK  5) ; A special barrier that prevents frame accesses
74			;   being scheduled before the stack adjustment insn.
75   (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
76   			; this unspec is used to prevent the deletion of
77   			; instructions setting registers for EH handling
78   			; and stack frame generation.  Operand 0 is the
79   			; register to "use".
80   (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
81   (UNSPEC_WSHUFH    8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
82   (UNSPEC_WACC      9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
83   (UNSPEC_TMOVMSK  10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
84   (UNSPEC_WSAD     11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
85   (UNSPEC_WSADZ    12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
86   (UNSPEC_WMACS    13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
87   (UNSPEC_WMACU    14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
88   (UNSPEC_WMACSZ   15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
89   (UNSPEC_WMACUZ   16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
90   (UNSPEC_CLRDI    17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
91   (UNSPEC_WMADDS   18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
92   (UNSPEC_WMADDU   19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
93   (UNSPEC_TLS      20) ; A symbol that has been treated properly for TLS usage.
94   (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
95                         ; instruction stream.
96   (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer.  Used to
97			   ; generate correct unwind information.
98   (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
99			  ; correctly for PIC usage.
100   (UNSPEC_GOTSYM_OFF 24) ; The offset of the start of the the GOT from a
101			  ; a given symbolic address.
102   (UNSPEC_THUMB1_CASESI 25) ; A Thumb1 compressed dispatch-table call.
103   (UNSPEC_RBIT 26)       ; rbit operation.
104  ]
105)
106
107;; UNSPEC_VOLATILE Usage:
108
109(define_constants
110  [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
111			;   insn in the code.
112   (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
113			;   instruction epilogue sequence that isn't expanded
114			;   into normal RTL.  Used for both normal and sibcall
115			;   epilogues.
116   (VUNSPEC_ALIGN    2) ; `align' insn.  Used at the head of a minipool table
117			;   for inlined constants.
118   (VUNSPEC_POOL_END 3) ; `end-of-table'.  Used to mark the end of a minipool
119			;   table.
120   (VUNSPEC_POOL_1   4) ; `pool-entry(1)'.  An entry in the constant pool for
121			;   an 8-bit object.
122   (VUNSPEC_POOL_2   5) ; `pool-entry(2)'.  An entry in the constant pool for
123			;   a 16-bit object.
124   (VUNSPEC_POOL_4   6) ; `pool-entry(4)'.  An entry in the constant pool for
125			;   a 32-bit object.
126   (VUNSPEC_POOL_8   7) ; `pool-entry(8)'.  An entry in the constant pool for
127			;   a 64-bit object.
128   (VUNSPEC_POOL_16  8) ; `pool-entry(16)'.  An entry in the constant pool for
129			;   a 128-bit object.
130   (VUNSPEC_TMRC     9) ; Used by the iWMMXt TMRC instruction.
131   (VUNSPEC_TMCR     10) ; Used by the iWMMXt TMCR instruction.
132   (VUNSPEC_ALIGN8   11) ; 8-byte alignment version of VUNSPEC_ALIGN
133   (VUNSPEC_WCMP_EQ  12) ; Used by the iWMMXt WCMPEQ instructions
134   (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
135   (VUNSPEC_WCMP_GT  14) ; Used by the iwMMXT WCMPGT instructions
136   (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
137			 ; handling.
138  ]
139)
140
141;;---------------------------------------------------------------------------
142;; Attributes
143
144; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
145; generating ARM code.  This is used to control the length of some insn
146; patterns that share the same RTL in both ARM and Thumb code.
147(define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
148
149; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
150; scheduling decisions for the load unit and the multiplier.
151(define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
152
153; IS_XSCALE is set to 'yes' when compiling for XScale.
154(define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
155
156;; Operand number of an input operand that is shifted.  Zero if the
157;; given instruction does not shift one of its input operands.
158(define_attr "shift" "" (const_int 0))
159
160; Floating Point Unit.  If we only have floating point emulation, then there
161; is no point in scheduling the floating point insns.  (Well, for best
162; performance we should try and group them together).
163(define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
164  (const (symbol_ref "arm_fpu_attr")))
165
166; LENGTH of an instruction (in bytes)
167(define_attr "length" "" (const_int 4))
168
169; POOL_RANGE is how far away from a constant pool entry that this insn
170; can be placed.  If the distance is zero, then this insn will never
171; reference the pool.
172; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
173; before its address.
174(define_attr "pool_range" "" (const_int 0))
175(define_attr "neg_pool_range" "" (const_int 0))
176
177; An assembler sequence may clobber the condition codes without us knowing.
178; If such an insn references the pool, then we have no way of knowing how,
179; so use the most conservative value for pool_range.
180(define_asm_attributes
181 [(set_attr "conds" "clob")
182  (set_attr "length" "4")
183  (set_attr "pool_range" "250")])
184
185;; The instruction used to implement a particular pattern.  This
186;; information is used by pipeline descriptions to provide accurate
187;; scheduling information.
188
189(define_attr "insn"
190        "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
191        (const_string "other"))
192
193; TYPE attribute is used to detect floating point instructions which, if
194; running on a co-processor can run in parallel with other, basic instructions
195; If write-buffer scheduling is enabled then it can also be used in the
196; scheduling of writes.
197
198; Classification of each insn
199; Note: vfp.md has different meanings for some of these, and some further
200; types as well.  See that file for details.
201; alu		any alu  instruction that doesn't hit memory or fp
202;		regs or have a shifted source operand
203; alu_shift	any data instruction that doesn't hit memory or fp
204;		regs, but has a source operand shifted by a constant
205; alu_shift_reg	any data instruction that doesn't hit memory or fp
206;		regs, but has a source operand shifted by a register value
207; mult		a multiply instruction
208; block		blockage insn, this blocks all functional units
209; float		a floating point arithmetic operation (subject to expansion)
210; fdivd		DFmode floating point division
211; fdivs		SFmode floating point division
212; fmul		Floating point multiply
213; ffmul		Fast floating point multiply
214; farith	Floating point arithmetic (4 cycle)
215; ffarith	Fast floating point arithmetic (2 cycle)
216; float_em	a floating point arithmetic operation that is normally emulated
217;		even on a machine with an fpa.
218; f_load	a floating point load from memory
219; f_store	a floating point store to memory
220; f_load[sd]	single/double load from memory
221; f_store[sd]	single/double store to memory
222; f_flag	a transfer of co-processor flags to the CPSR
223; f_mem_r	a transfer of a floating point register to a real reg via mem
224; r_mem_f	the reverse of f_mem_r
225; f_2_r		fast transfer float to arm (no memory needed)
226; r_2_f		fast transfer arm to float
227; f_cvt		convert floating<->integral
228; branch	a branch
229; call		a subroutine call
230; load_byte	load byte(s) from memory to arm registers
231; load1		load 1 word from memory to arm registers
232; load2         load 2 words from memory to arm registers
233; load3         load 3 words from memory to arm registers
234; load4         load 4 words from memory to arm registers
235; store		store 1 word to memory from arm registers
236; store2	store 2 words
237; store3	store 3 words
238; store4	store 4 (or more) words
239;  Additions for Cirrus Maverick co-processor:
240; mav_farith	Floating point arithmetic (4 cycle)
241; mav_dmult	Double multiplies (7 cycle)
242;
243
244(define_attr "type"
245	"alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult,fconsts,fconstd,fadds,faddd,ffariths,ffarithd,fcmps,fcmpd,fcpys"
246	(if_then_else
247	 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
248	 (const_string "mult")
249	 (const_string "alu")))
250
251; Load scheduling, set from the arm_ld_sched variable
252; initialized by arm_override_options()
253(define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
254
255;; Classification of NEON instructions for scheduling purposes.
256;; Do not set this attribute and the "type" attribute together in
257;; any one instruction pattern.
258(define_attr "neon_type"
259   "neon_int_1,\
260   neon_int_2,\
261   neon_int_3,\
262   neon_int_4,\
263   neon_int_5,\
264   neon_vqneg_vqabs,\
265   neon_vmov,\
266   neon_vaba,\
267   neon_vsma,\
268   neon_vaba_qqq,\
269   neon_mul_ddd_8_16_qdd_16_8_long_32_16_long,\
270   neon_mul_qqq_8_16_32_ddd_32,\
271   neon_mul_qdd_64_32_long_qqd_16_ddd_32_scalar_64_32_long_scalar,\
272   neon_mla_ddd_8_16_qdd_16_8_long_32_16_long,\
273   neon_mla_qqq_8_16,\
274   neon_mla_ddd_32_qqd_16_ddd_32_scalar_qdd_64_32_long_scalar_qdd_64_32_long,\
275   neon_mla_qqq_32_qqd_32_scalar,\
276   neon_mul_ddd_16_scalar_32_16_long_scalar,\
277   neon_mul_qqd_32_scalar,\
278   neon_mla_ddd_16_scalar_qdd_32_16_long_scalar,\
279   neon_shift_1,\
280   neon_shift_2,\
281   neon_shift_3,\
282   neon_vshl_ddd,\
283   neon_vqshl_vrshl_vqrshl_qqq,\
284   neon_vsra_vrsra,\
285   neon_fp_vadd_ddd_vabs_dd,\
286   neon_fp_vadd_qqq_vabs_qq,\
287   neon_fp_vsum,\
288   neon_fp_vmul_ddd,\
289   neon_fp_vmul_qqd,\
290   neon_fp_vmla_ddd,\
291   neon_fp_vmla_qqq,\
292   neon_fp_vmla_ddd_scalar,\
293   neon_fp_vmla_qqq_scalar,\
294   neon_fp_vrecps_vrsqrts_ddd,\
295   neon_fp_vrecps_vrsqrts_qqq,\
296   neon_bp_simple,\
297   neon_bp_2cycle,\
298   neon_bp_3cycle,\
299   neon_ldr,\
300   neon_str,\
301   neon_vld1_1_2_regs,\
302   neon_vld1_3_4_regs,\
303   neon_vld2_2_regs_vld1_vld2_all_lanes,\
304   neon_vld2_4_regs,\
305   neon_vld3_vld4,\
306   neon_vst1_1_2_regs_vst2_2_regs,\
307   neon_vst1_3_4_regs,\
308   neon_vst2_4_regs_vst3_vst4,\
309   neon_vst3_vst4,\
310   neon_vld1_vld2_lane,\
311   neon_vld3_vld4_lane,\
312   neon_vst1_vst2_lane,\
313   neon_vst3_vst4_lane,\
314   neon_vld3_vld4_all_lanes,\
315   neon_mcr,\
316   neon_mcr_2_mcrr,\
317   neon_mrc,\
318   neon_mrrc,\
319   neon_ldm_2,\
320   neon_stm_2,\
321   none"
322 (const_string "none"))
323
324; condition codes: this one is used by final_prescan_insn to speed up
325; conditionalizing instructions.  It saves having to scan the rtl to see if
326; it uses or alters the condition codes.
327;
328; USE means that the condition codes are used by the insn in the process of
329;   outputting code, this means (at present) that we can't use the insn in
330;   inlined branches
331;
332; SET means that the purpose of the insn is to set the condition codes in a
333;   well defined manner.
334;
335; CLOB means that the condition codes are altered in an undefined manner, if
336;   they are altered at all
337;
338; UNCONDITIONAL means the instions can not be conditionally executed.
339;
340; NOCOND means that the condition codes are neither altered nor affect the
341;   output of this insn
342
343(define_attr "conds" "use,set,clob,unconditional,nocond"
344	(if_then_else (eq_attr "type" "call")
345	 (const_string "clob")
346	 (if_then_else (eq_attr "neon_type" "none")
347	  (const_string "nocond")
348	  (const_string "unconditional"))))
349
350; Predicable means that the insn can be conditionally executed based on
351; an automatically added predicate (additional patterns are generated by
352; gen...).  We default to 'no' because no Thumb patterns match this rule
353; and not all ARM patterns do.
354(define_attr "predicable" "no,yes" (const_string "no"))
355
356; Only model the write buffer for ARM6 and ARM7.  Earlier processors don't
357; have one.  Later ones, such as StrongARM, have write-back caches, so don't
358; suffer blockages enough to warrant modelling this (and it can adversely
359; affect the schedule).
360(define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
361
362; WRITE_CONFLICT implies that a read following an unrelated write is likely
363; to stall the processor.  Used with model_wbuf above.
364(define_attr "write_conflict" "no,yes"
365  (if_then_else (eq_attr "type"
366		 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
367		(const_string "yes")
368		(const_string "no")))
369
370; Classify the insns into those that take one cycle and those that take more
371; than one on the main cpu execution unit.
372(define_attr "core_cycles" "single,multi"
373  (if_then_else (eq_attr "type"
374		 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
375		(const_string "single")
376	        (const_string "multi")))
377
378;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
379;; distant label.  Only applicable to Thumb code.
380(define_attr "far_jump" "yes,no" (const_string "no"))
381
382
383;; The number of machine instructions this pattern expands to.
384;; Used for Thumb-2 conditional execution.
385(define_attr "ce_count" "" (const_int 1))
386
387;;---------------------------------------------------------------------------
388;; Mode iterators
389
390; A list of modes that are exactly 64 bits in size.  We use this to expand
391; some splits that are the same for all modes when operating on ARM
392; registers.
393(define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
394
395;; The integer modes up to word size
396(define_mode_iterator QHSI [QI HI SI])
397
398;;---------------------------------------------------------------------------
399;; Predicates
400
401(include "predicates.md")
402(include "constraints.md")
403
404;;---------------------------------------------------------------------------
405;; Pipeline descriptions
406
407;; Processor type.  This is created automatically from arm-cores.def.
408(include "arm-tune.md")
409
410(define_attr "tune_cortexr4" "yes,no"
411  (const (if_then_else
412	  (eq_attr "tune" "cortexr4,cortexr4f")
413	  (const_string "yes")
414	  (const_string "no"))))
415
416;; True if the generic scheduling description should be used.
417
418(define_attr "generic_sched" "yes,no"
419  (const (if_then_else
420          (ior (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexa9")
421	      (eq_attr "tune_cortexr4" "yes"))
422          (const_string "no")
423          (const_string "yes"))))
424
425(define_attr "generic_vfp" "yes,no"
426  (const (if_then_else
427	  (and (eq_attr "fpu" "vfp")
428	       (eq_attr "tune" "!arm1020e,arm1022e,cortexa8,cortexa9")
429	       (eq_attr "tune_cortexr4" "no"))
430	  (const_string "yes")
431	  (const_string "no"))))
432
433(include "arm-generic.md")
434(include "arm926ejs.md")
435(include "arm1020e.md")
436(include "arm1026ejs.md")
437(include "arm1136jfs.md")
438(include "cortex-a8.md")
439(include "cortex-a9.md")
440(include "cortex-r4.md")
441(include "cortex-r4f.md")
442(include "vfp11.md")
443
444
445;;---------------------------------------------------------------------------
446;; Insn patterns
447;;
448;; Addition insns.
449
450;; Note: For DImode insns, there is normally no reason why operands should
451;; not be in the same register, what we don't want is for something being
452;; written to partially overlap something that is an input.
453;; Cirrus 64bit additions should not be split because we have a native
454;; 64bit addition instructions.
455
456(define_expand "adddi3"
457 [(parallel
458   [(set (match_operand:DI           0 "s_register_operand" "")
459	  (plus:DI (match_operand:DI 1 "s_register_operand" "")
460	           (match_operand:DI 2 "s_register_operand" "")))
461    (clobber (reg:CC CC_REGNUM))])]
462  "TARGET_EITHER"
463  "
464  if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
465    {
466      if (!cirrus_fp_register (operands[0], DImode))
467        operands[0] = force_reg (DImode, operands[0]);
468      if (!cirrus_fp_register (operands[1], DImode))
469        operands[1] = force_reg (DImode, operands[1]);
470      emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
471      DONE;
472    }
473
474  if (TARGET_THUMB1)
475    {
476      if (GET_CODE (operands[1]) != REG)
477        operands[1] = force_reg (DImode, operands[1]);
478      if (GET_CODE (operands[2]) != REG)
479        operands[2] = force_reg (DImode, operands[2]);
480     }
481  "
482)
483
484(define_insn "*thumb1_adddi3"
485  [(set (match_operand:DI          0 "register_operand" "=l")
486	(plus:DI (match_operand:DI 1 "register_operand" "%0")
487		 (match_operand:DI 2 "register_operand" "l")))
488   (clobber (reg:CC CC_REGNUM))
489  ]
490  "TARGET_THUMB1"
491  "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
492  [(set_attr "length" "4")]
493)
494
495(define_insn_and_split "*arm_adddi3"
496  [(set (match_operand:DI          0 "s_register_operand" "=&r,&r")
497	(plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
498		 (match_operand:DI 2 "s_register_operand" "r,  0")))
499   (clobber (reg:CC CC_REGNUM))]
500  "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
501  "#"
502  "TARGET_32BIT && reload_completed"
503  [(parallel [(set (reg:CC_C CC_REGNUM)
504		   (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
505				 (match_dup 1)))
506	      (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
507   (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
508			       (plus:SI (match_dup 4) (match_dup 5))))]
509  "
510  {
511    operands[3] = gen_highpart (SImode, operands[0]);
512    operands[0] = gen_lowpart (SImode, operands[0]);
513    operands[4] = gen_highpart (SImode, operands[1]);
514    operands[1] = gen_lowpart (SImode, operands[1]);
515    operands[5] = gen_highpart (SImode, operands[2]);
516    operands[2] = gen_lowpart (SImode, operands[2]);
517  }"
518  [(set_attr "conds" "clob")
519   (set_attr "length" "8")]
520)
521
522(define_insn_and_split "*adddi_sesidi_di"
523  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
524	(plus:DI (sign_extend:DI
525		  (match_operand:SI 2 "s_register_operand" "r,r"))
526		 (match_operand:DI 1 "s_register_operand" "0,r")))
527   (clobber (reg:CC CC_REGNUM))]
528  "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
529  "#"
530  "TARGET_32BIT && reload_completed"
531  [(parallel [(set (reg:CC_C CC_REGNUM)
532		   (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
533				 (match_dup 1)))
534	      (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
535   (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
536			       (plus:SI (ashiftrt:SI (match_dup 2)
537						     (const_int 31))
538					(match_dup 4))))]
539  "
540  {
541    operands[3] = gen_highpart (SImode, operands[0]);
542    operands[0] = gen_lowpart (SImode, operands[0]);
543    operands[4] = gen_highpart (SImode, operands[1]);
544    operands[1] = gen_lowpart (SImode, operands[1]);
545    operands[2] = gen_lowpart (SImode, operands[2]);
546  }"
547  [(set_attr "conds" "clob")
548   (set_attr "length" "8")]
549)
550
551(define_insn_and_split "*adddi_zesidi_di"
552  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
553	(plus:DI (zero_extend:DI
554		  (match_operand:SI 2 "s_register_operand" "r,r"))
555		 (match_operand:DI 1 "s_register_operand" "0,r")))
556   (clobber (reg:CC CC_REGNUM))]
557  "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
558  "#"
559  "TARGET_32BIT && reload_completed"
560  [(parallel [(set (reg:CC_C CC_REGNUM)
561		   (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
562				 (match_dup 1)))
563	      (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
564   (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
565			       (plus:SI (match_dup 4) (const_int 0))))]
566  "
567  {
568    operands[3] = gen_highpart (SImode, operands[0]);
569    operands[0] = gen_lowpart (SImode, operands[0]);
570    operands[4] = gen_highpart (SImode, operands[1]);
571    operands[1] = gen_lowpart (SImode, operands[1]);
572    operands[2] = gen_lowpart (SImode, operands[2]);
573  }"
574  [(set_attr "conds" "clob")
575   (set_attr "length" "8")]
576)
577
578(define_expand "addsi3"
579  [(set (match_operand:SI          0 "s_register_operand" "")
580	(plus:SI (match_operand:SI 1 "s_register_operand" "")
581		 (match_operand:SI 2 "reg_or_int_operand" "")))]
582  "TARGET_EITHER"
583  "
584  if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
585    {
586      arm_split_constant (PLUS, SImode, NULL_RTX,
587	                  INTVAL (operands[2]), operands[0], operands[1],
588			  optimize && can_create_pseudo_p ());
589      DONE;
590    }
591  "
592)
593
594; If there is a scratch available, this will be faster than synthesizing the
595; addition.
596(define_peephole2
597  [(match_scratch:SI 3 "r")
598   (set (match_operand:SI          0 "arm_general_register_operand" "")
599	(plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
600		 (match_operand:SI 2 "const_int_operand"  "")))]
601  "TARGET_32BIT &&
602   !(const_ok_for_arm (INTVAL (operands[2]))
603     || const_ok_for_arm (-INTVAL (operands[2])))
604    && const_ok_for_arm (~INTVAL (operands[2]))"
605  [(set (match_dup 3) (match_dup 2))
606   (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
607  ""
608)
609
610;; The r/r/k alternative is required when reloading the address
611;;  (plus (reg rN) (reg sp)) into (reg rN).  In this case reload will
612;; put the duplicated register first, and not try the commutative version.
613(define_insn_and_split "*arm_addsi3"
614  [(set (match_operand:SI          0 "s_register_operand" "=r, !k, r,r, !k,r")
615	(plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k, r,rk,!k,rk")
616		 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,!k,L, L,?n")))]
617  "TARGET_32BIT"
618  "@
619   add%?\\t%0, %1, %2
620   add%?\\t%0, %1, %2
621   add%?\\t%0, %2, %1
622   sub%?\\t%0, %1, #%n2
623   sub%?\\t%0, %1, #%n2
624   #"
625  "TARGET_32BIT
626   && GET_CODE (operands[2]) == CONST_INT
627   && !(const_ok_for_arm (INTVAL (operands[2]))
628        || const_ok_for_arm (-INTVAL (operands[2])))
629   && (reload_completed || !arm_eliminable_register (operands[1]))"
630  [(clobber (const_int 0))]
631  "
632  arm_split_constant (PLUS, SImode, curr_insn,
633	              INTVAL (operands[2]), operands[0],
634		      operands[1], 0);
635  DONE;
636  "
637  [(set_attr "length" "4,4,4,4,4,16")
638   (set_attr "predicable" "yes")]
639)
640
641;; Register group 'k' is a single register group containing only the stack
642;; register.  Trying to reload it will always fail catastrophically,
643;; so never allow those alternatives to match if reloading is needed.
644
645(define_insn_and_split "*thumb1_addsi3"
646  [(set (match_operand:SI          0 "register_operand" "=l,l,l,*rk,*hk,l,!k,l,l")
647	(plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k,0,l")
648		 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O,Pa,Pb")))]
649  "TARGET_THUMB1"
650  "*
651   static const char * const asms[] =
652   {
653     \"add\\t%0, %0, %2\",
654     \"sub\\t%0, %0, #%n2\",
655     \"add\\t%0, %1, %2\",
656     \"add\\t%0, %0, %2\",
657     \"add\\t%0, %0, %2\",
658     \"add\\t%0, %1, %2\",
659     \"add\\t%0, %1, %2\",
660     \"#\",
661     \"#\"
662   };
663   if ((which_alternative == 2 || which_alternative == 6)
664       && GET_CODE (operands[2]) == CONST_INT
665       && INTVAL (operands[2]) < 0)
666     return \"sub\\t%0, %1, #%n2\";
667   return asms[which_alternative];
668  "
669  "&& reload_completed && CONST_INT_P (operands[2])
670   && operands[1] != stack_pointer_rtx
671   && (INTVAL (operands[2]) > 255 || INTVAL (operands[2]) < -255)"
672  [(set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
673   (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 3)))]
674  {
675    HOST_WIDE_INT offset = INTVAL (operands[2]);
676    if (offset > 255)
677      offset = 255;
678    else if (offset < -255)
679      offset = -255;
680
681    operands[3] = GEN_INT (offset);
682    operands[2] = GEN_INT (INTVAL (operands[2]) - offset);
683  }
684  [(set_attr "length" "2,2,2,2,2,2,2,4,4")]
685)
686
687;; Reloading and elimination of the frame pointer can
688;; sometimes cause this optimization to be missed.
689(define_peephole2
690  [(set (match_operand:SI 0 "arm_general_register_operand" "")
691	(match_operand:SI 1 "const_int_operand" ""))
692   (set (match_dup 0)
693	(plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
694  "TARGET_THUMB1
695   && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
696   && (INTVAL (operands[1]) & 3) == 0"
697  [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
698  ""
699)
700
701;; ??? Make Thumb-2 variants which prefer low regs
702(define_insn "*addsi3_compare0"
703  [(set (reg:CC_NOOV CC_REGNUM)
704	(compare:CC_NOOV
705	 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
706		  (match_operand:SI 2 "arm_add_operand"    "rI,L"))
707	 (const_int 0)))
708   (set (match_operand:SI 0 "s_register_operand" "=r,r")
709	(plus:SI (match_dup 1) (match_dup 2)))]
710  "TARGET_32BIT"
711  "@
712   add%.\\t%0, %1, %2
713   sub%.\\t%0, %1, #%n2"
714  [(set_attr "conds" "set")]
715)
716
717(define_insn "*addsi3_compare0_scratch"
718  [(set (reg:CC_NOOV CC_REGNUM)
719	(compare:CC_NOOV
720	 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
721		  (match_operand:SI 1 "arm_add_operand"    "rI,L"))
722	 (const_int 0)))]
723  "TARGET_32BIT"
724  "@
725   cmn%?\\t%0, %1
726   cmp%?\\t%0, #%n1"
727  [(set_attr "conds" "set")]
728)
729
730(define_insn "*compare_negsi_si"
731  [(set (reg:CC_Z CC_REGNUM)
732	(compare:CC_Z
733	 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
734	 (match_operand:SI 1 "s_register_operand" "r")))]
735  "TARGET_32BIT"
736  "cmn%?\\t%1, %0"
737  [(set_attr "conds" "set")]
738)
739
740;; This is the canonicalization of addsi3_compare0_for_combiner when the
741;; addend is a constant.
742(define_insn "*cmpsi2_addneg"
743  [(set (reg:CC CC_REGNUM)
744	(compare:CC
745	 (match_operand:SI 1 "s_register_operand" "r,r")
746	 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
747   (set (match_operand:SI 0 "s_register_operand" "=r,r")
748	(plus:SI (match_dup 1)
749		 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
750  "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
751  "@
752   sub%.\\t%0, %1, %2
753   add%.\\t%0, %1, #%n2"
754  [(set_attr "conds" "set")]
755)
756
757;; Convert the sequence
758;;  sub  rd, rn, #1
759;;  cmn  rd, #1	(equivalent to cmp rd, #-1)
760;;  bne  dest
761;; into
762;;  subs rd, rn, #1
763;;  bcs  dest	((unsigned)rn >= 1)
764;; similarly for the beq variant using bcc.
765;; This is a common looping idiom (while (n--))
766(define_peephole2
767  [(set (match_operand:SI 0 "arm_general_register_operand" "")
768	(plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
769		 (const_int -1)))
770   (set (match_operand 2 "cc_register" "")
771	(compare (match_dup 0) (const_int -1)))
772   (set (pc)
773	(if_then_else (match_operator 3 "equality_operator"
774		       [(match_dup 2) (const_int 0)])
775		      (match_operand 4 "" "")
776		      (match_operand 5 "" "")))]
777  "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
778  [(parallel[
779    (set (match_dup 2)
780	 (compare:CC
781	  (match_dup 1) (const_int 1)))
782    (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
783   (set (pc)
784	(if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
785		      (match_dup 4)
786		      (match_dup 5)))]
787  "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
788   operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
789				  ? GEU : LTU),
790				 VOIDmode,
791				 operands[2], const0_rtx);"
792)
793
794;; The next four insns work because they compare the result with one of
795;; the operands, and we know that the use of the condition code is
796;; either GEU or LTU, so we can use the carry flag from the addition
797;; instead of doing the compare a second time.
798(define_insn "*addsi3_compare_op1"
799  [(set (reg:CC_C CC_REGNUM)
800	(compare:CC_C
801	 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
802		  (match_operand:SI 2 "arm_add_operand" "rI,L"))
803	 (match_dup 1)))
804   (set (match_operand:SI 0 "s_register_operand" "=r,r")
805	(plus:SI (match_dup 1) (match_dup 2)))]
806  "TARGET_32BIT"
807  "@
808   add%.\\t%0, %1, %2
809   sub%.\\t%0, %1, #%n2"
810  [(set_attr "conds" "set")]
811)
812
813(define_insn "*addsi3_compare_op2"
814  [(set (reg:CC_C CC_REGNUM)
815	(compare:CC_C
816	 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
817		  (match_operand:SI 2 "arm_add_operand" "rI,L"))
818	 (match_dup 2)))
819   (set (match_operand:SI 0 "s_register_operand" "=r,r")
820	(plus:SI (match_dup 1) (match_dup 2)))]
821  "TARGET_32BIT"
822  "@
823   add%.\\t%0, %1, %2
824   sub%.\\t%0, %1, #%n2"
825  [(set_attr "conds" "set")]
826)
827
828(define_insn "*compare_addsi2_op0"
829  [(set (reg:CC_C CC_REGNUM)
830	(compare:CC_C
831	 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
832		  (match_operand:SI 1 "arm_add_operand" "rI,L"))
833	 (match_dup 0)))]
834  "TARGET_32BIT"
835  "@
836   cmn%?\\t%0, %1
837   cmp%?\\t%0, #%n1"
838  [(set_attr "conds" "set")]
839)
840
841(define_insn "*compare_addsi2_op1"
842  [(set (reg:CC_C CC_REGNUM)
843	(compare:CC_C
844	 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
845		  (match_operand:SI 1 "arm_add_operand" "rI,L"))
846	 (match_dup 1)))]
847  "TARGET_32BIT"
848  "@
849   cmn%?\\t%0, %1
850   cmp%?\\t%0, #%n1"
851  [(set_attr "conds" "set")]
852)
853
854(define_insn "*addsi3_carryin"
855  [(set (match_operand:SI 0 "s_register_operand" "=r")
856	(plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
857		 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
858			  (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
859  "TARGET_32BIT"
860  "adc%?\\t%0, %1, %2"
861  [(set_attr "conds" "use")]
862)
863
864(define_insn "*addsi3_carryin_shift"
865  [(set (match_operand:SI 0 "s_register_operand" "=r")
866	(plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
867		 (plus:SI
868		   (match_operator:SI 2 "shift_operator"
869		      [(match_operand:SI 3 "s_register_operand" "r")
870		       (match_operand:SI 4 "reg_or_int_operand" "rM")])
871		    (match_operand:SI 1 "s_register_operand" "r"))))]
872  "TARGET_32BIT"
873  "adc%?\\t%0, %1, %3%S2"
874  [(set_attr "conds" "use")
875   (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
876		      (const_string "alu_shift")
877		      (const_string "alu_shift_reg")))]
878)
879
880(define_insn "*addsi3_carryin_alt1"
881  [(set (match_operand:SI 0 "s_register_operand" "=r")
882	(plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
883			  (match_operand:SI 2 "arm_rhs_operand" "rI"))
884		 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
885  "TARGET_32BIT"
886  "adc%?\\t%0, %1, %2"
887  [(set_attr "conds" "use")]
888)
889
890(define_insn "*addsi3_carryin_alt2"
891  [(set (match_operand:SI 0 "s_register_operand" "=r")
892	(plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
893			  (match_operand:SI 1 "s_register_operand" "r"))
894		 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
895  "TARGET_32BIT"
896  "adc%?\\t%0, %1, %2"
897  [(set_attr "conds" "use")]
898)
899
900(define_insn "*addsi3_carryin_alt3"
901  [(set (match_operand:SI 0 "s_register_operand" "=r")
902	(plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
903			  (match_operand:SI 2 "arm_rhs_operand" "rI"))
904		 (match_operand:SI 1 "s_register_operand" "r")))]
905  "TARGET_32BIT"
906  "adc%?\\t%0, %1, %2"
907  [(set_attr "conds" "use")]
908)
909
910(define_expand "incscc"
911  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
912        (plus:SI (match_operator:SI 2 "arm_comparison_operator"
913                    [(match_operand:CC 3 "cc_register" "") (const_int 0)])
914                 (match_operand:SI 1 "s_register_operand" "0,?r")))]
915  "TARGET_32BIT"
916  ""
917)
918
919(define_insn "*arm_incscc"
920  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
921        (plus:SI (match_operator:SI 2 "arm_comparison_operator"
922                    [(match_operand:CC 3 "cc_register" "") (const_int 0)])
923                 (match_operand:SI 1 "s_register_operand" "0,?r")))]
924  "TARGET_ARM"
925  "@
926  add%d2\\t%0, %1, #1
927  mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
928  [(set_attr "conds" "use")
929   (set_attr "length" "4,8")]
930)
931
932; transform ((x << y) - 1) to ~(~(x-1) << y)  Where X is a constant.
933(define_split
934  [(set (match_operand:SI 0 "s_register_operand" "")
935	(plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
936			    (match_operand:SI 2 "s_register_operand" ""))
937		 (const_int -1)))
938   (clobber (match_operand:SI 3 "s_register_operand" ""))]
939  "TARGET_32BIT"
940  [(set (match_dup 3) (match_dup 1))
941   (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
942  "
943  operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
944")
945
946(define_expand "addsf3"
947  [(set (match_operand:SF          0 "s_register_operand" "")
948	(plus:SF (match_operand:SF 1 "s_register_operand" "")
949		 (match_operand:SF 2 "arm_float_add_operand" "")))]
950  "TARGET_32BIT && TARGET_HARD_FLOAT"
951  "
952  if (TARGET_MAVERICK
953      && !cirrus_fp_register (operands[2], SFmode))
954    operands[2] = force_reg (SFmode, operands[2]);
955")
956
957(define_expand "adddf3"
958  [(set (match_operand:DF          0 "s_register_operand" "")
959	(plus:DF (match_operand:DF 1 "s_register_operand" "")
960		 (match_operand:DF 2 "arm_float_add_operand" "")))]
961  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
962  "
963  if (TARGET_MAVERICK
964      && !cirrus_fp_register (operands[2], DFmode))
965    operands[2] = force_reg (DFmode, operands[2]);
966")
967
968(define_expand "subdi3"
969 [(parallel
970   [(set (match_operand:DI            0 "s_register_operand" "")
971	  (minus:DI (match_operand:DI 1 "s_register_operand" "")
972	            (match_operand:DI 2 "s_register_operand" "")))
973    (clobber (reg:CC CC_REGNUM))])]
974  "TARGET_EITHER"
975  "
976  if (TARGET_HARD_FLOAT && TARGET_MAVERICK
977      && TARGET_32BIT
978      && cirrus_fp_register (operands[0], DImode)
979      && cirrus_fp_register (operands[1], DImode))
980    {
981      emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
982      DONE;
983    }
984
985  if (TARGET_THUMB1)
986    {
987      if (GET_CODE (operands[1]) != REG)
988        operands[1] = force_reg (DImode, operands[1]);
989      if (GET_CODE (operands[2]) != REG)
990        operands[2] = force_reg (DImode, operands[2]);
991     }
992  "
993)
994
995(define_insn "*arm_subdi3"
996  [(set (match_operand:DI           0 "s_register_operand" "=&r,&r,&r")
997	(minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
998		  (match_operand:DI 2 "s_register_operand" "r,0,0")))
999   (clobber (reg:CC CC_REGNUM))]
1000  "TARGET_32BIT"
1001  "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
1002  [(set_attr "conds" "clob")
1003   (set_attr "length" "8")]
1004)
1005
1006(define_insn "*thumb_subdi3"
1007  [(set (match_operand:DI           0 "register_operand" "=l")
1008	(minus:DI (match_operand:DI 1 "register_operand"  "0")
1009		  (match_operand:DI 2 "register_operand"  "l")))
1010   (clobber (reg:CC CC_REGNUM))]
1011  "TARGET_THUMB1"
1012  "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
1013  [(set_attr "length" "4")]
1014)
1015
1016(define_insn "*subdi_di_zesidi"
1017  [(set (match_operand:DI           0 "s_register_operand" "=&r,&r")
1018	(minus:DI (match_operand:DI 1 "s_register_operand"  "0,r")
1019		  (zero_extend:DI
1020		   (match_operand:SI 2 "s_register_operand"  "r,r"))))
1021   (clobber (reg:CC CC_REGNUM))]
1022  "TARGET_32BIT"
1023  "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
1024  [(set_attr "conds" "clob")
1025   (set_attr "length" "8")]
1026)
1027
1028(define_insn "*subdi_di_sesidi"
1029  [(set (match_operand:DI            0 "s_register_operand" "=&r,&r")
1030	(minus:DI (match_operand:DI  1 "s_register_operand"  "0,r")
1031		  (sign_extend:DI
1032		   (match_operand:SI 2 "s_register_operand"  "r,r"))))
1033   (clobber (reg:CC CC_REGNUM))]
1034  "TARGET_32BIT"
1035  "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
1036  [(set_attr "conds" "clob")
1037   (set_attr "length" "8")]
1038)
1039
1040(define_insn "*subdi_zesidi_di"
1041  [(set (match_operand:DI            0 "s_register_operand" "=&r,&r")
1042	(minus:DI (zero_extend:DI
1043		   (match_operand:SI 2 "s_register_operand"  "r,r"))
1044		  (match_operand:DI  1 "s_register_operand" "0,r")))
1045   (clobber (reg:CC CC_REGNUM))]
1046  "TARGET_ARM"
1047  "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
1048  [(set_attr "conds" "clob")
1049   (set_attr "length" "8")]
1050)
1051
1052(define_insn "*subdi_sesidi_di"
1053  [(set (match_operand:DI            0 "s_register_operand" "=&r,&r")
1054	(minus:DI (sign_extend:DI
1055		   (match_operand:SI 2 "s_register_operand"   "r,r"))
1056		  (match_operand:DI  1 "s_register_operand"  "0,r")))
1057   (clobber (reg:CC CC_REGNUM))]
1058  "TARGET_ARM"
1059  "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
1060  [(set_attr "conds" "clob")
1061   (set_attr "length" "8")]
1062)
1063
1064(define_insn "*subdi_zesidi_zesidi"
1065  [(set (match_operand:DI            0 "s_register_operand" "=r")
1066	(minus:DI (zero_extend:DI
1067		   (match_operand:SI 1 "s_register_operand"  "r"))
1068		  (zero_extend:DI
1069		   (match_operand:SI 2 "s_register_operand"  "r"))))
1070   (clobber (reg:CC CC_REGNUM))]
1071  "TARGET_32BIT"
1072  "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
1073  [(set_attr "conds" "clob")
1074   (set_attr "length" "8")]
1075)
1076
1077(define_expand "subsi3"
1078  [(set (match_operand:SI           0 "s_register_operand" "")
1079	(minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
1080		  (match_operand:SI 2 "s_register_operand" "")))]
1081  "TARGET_EITHER"
1082  "
1083  if (GET_CODE (operands[1]) == CONST_INT)
1084    {
1085      if (TARGET_32BIT)
1086        {
1087          arm_split_constant (MINUS, SImode, NULL_RTX,
1088	                      INTVAL (operands[1]), operands[0],
1089	  		      operands[2], optimize && can_create_pseudo_p ());
1090          DONE;
1091	}
1092      else /* TARGET_THUMB1 */
1093        operands[1] = force_reg (SImode, operands[1]);
1094    }
1095  "
1096)
1097
1098(define_insn "*thumb1_subsi3_insn"
1099  [(set (match_operand:SI           0 "register_operand" "=l")
1100	(minus:SI (match_operand:SI 1 "register_operand" "l")
1101		  (match_operand:SI 2 "register_operand" "l")))]
1102  "TARGET_THUMB1"
1103  "sub\\t%0, %1, %2"
1104  [(set_attr "length" "2")]
1105)
1106
1107; ??? Check Thumb-2 split length
1108(define_insn_and_split "*arm_subsi3_insn"
1109  [(set (match_operand:SI           0 "s_register_operand" "=r,rk,r")
1110	(minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1111		  (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1112  "TARGET_32BIT"
1113  "@
1114   rsb%?\\t%0, %2, %1
1115   sub%?\\t%0, %1, %2
1116   #"
1117  "TARGET_32BIT
1118   && GET_CODE (operands[1]) == CONST_INT
1119   && !const_ok_for_arm (INTVAL (operands[1]))"
1120  [(clobber (const_int 0))]
1121  "
1122  arm_split_constant (MINUS, SImode, curr_insn,
1123                      INTVAL (operands[1]), operands[0], operands[2], 0);
1124  DONE;
1125  "
1126  [(set_attr "length" "4,4,16")
1127   (set_attr "predicable" "yes")]
1128)
1129
1130(define_peephole2
1131  [(match_scratch:SI 3 "r")
1132   (set (match_operand:SI 0 "arm_general_register_operand" "")
1133	(minus:SI (match_operand:SI 1 "const_int_operand" "")
1134		  (match_operand:SI 2 "arm_general_register_operand" "")))]
1135  "TARGET_32BIT
1136   && !const_ok_for_arm (INTVAL (operands[1]))
1137   && const_ok_for_arm (~INTVAL (operands[1]))"
1138  [(set (match_dup 3) (match_dup 1))
1139   (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1140  ""
1141)
1142
1143(define_insn "*subsi3_compare0"
1144  [(set (reg:CC_NOOV CC_REGNUM)
1145	(compare:CC_NOOV
1146	 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1147		   (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1148	 (const_int 0)))
1149   (set (match_operand:SI 0 "s_register_operand" "=r,r")
1150	(minus:SI (match_dup 1) (match_dup 2)))]
1151  "TARGET_32BIT"
1152  "@
1153   sub%.\\t%0, %1, %2
1154   rsb%.\\t%0, %2, %1"
1155  [(set_attr "conds" "set")]
1156)
1157
1158(define_expand "decscc"
1159  [(set (match_operand:SI            0 "s_register_operand" "=r,r")
1160        (minus:SI (match_operand:SI  1 "s_register_operand" "0,?r")
1161		  (match_operator:SI 2 "arm_comparison_operator"
1162                   [(match_operand   3 "cc_register" "") (const_int 0)])))]
1163  "TARGET_32BIT"
1164  ""
1165)
1166
1167(define_insn "*arm_decscc"
1168  [(set (match_operand:SI            0 "s_register_operand" "=r,r")
1169        (minus:SI (match_operand:SI  1 "s_register_operand" "0,?r")
1170		  (match_operator:SI 2 "arm_comparison_operator"
1171                   [(match_operand   3 "cc_register" "") (const_int 0)])))]
1172  "TARGET_ARM"
1173  "@
1174   sub%d2\\t%0, %1, #1
1175   mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1176  [(set_attr "conds" "use")
1177   (set_attr "length" "*,8")]
1178)
1179
1180(define_expand "subsf3"
1181  [(set (match_operand:SF           0 "s_register_operand" "")
1182	(minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1183		  (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1184  "TARGET_32BIT && TARGET_HARD_FLOAT"
1185  "
1186  if (TARGET_MAVERICK)
1187    {
1188      if (!cirrus_fp_register (operands[1], SFmode))
1189        operands[1] = force_reg (SFmode, operands[1]);
1190      if (!cirrus_fp_register (operands[2], SFmode))
1191        operands[2] = force_reg (SFmode, operands[2]);
1192    }
1193")
1194
1195(define_expand "subdf3"
1196  [(set (match_operand:DF           0 "s_register_operand" "")
1197	(minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1198		  (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1199  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1200  "
1201  if (TARGET_MAVERICK)
1202    {
1203       if (!cirrus_fp_register (operands[1], DFmode))
1204         operands[1] = force_reg (DFmode, operands[1]);
1205       if (!cirrus_fp_register (operands[2], DFmode))
1206         operands[2] = force_reg (DFmode, operands[2]);
1207    }
1208")
1209
1210
1211;; Multiplication insns
1212
1213(define_expand "mulsi3"
1214  [(set (match_operand:SI          0 "s_register_operand" "")
1215	(mult:SI (match_operand:SI 2 "s_register_operand" "")
1216		 (match_operand:SI 1 "s_register_operand" "")))]
1217  "TARGET_EITHER"
1218  ""
1219)
1220
1221;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1222(define_insn "*arm_mulsi3"
1223  [(set (match_operand:SI          0 "s_register_operand" "=&r,&r")
1224	(mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1225		 (match_operand:SI 1 "s_register_operand" "%0,r")))]
1226  "TARGET_32BIT && !arm_arch6"
1227  "mul%?\\t%0, %2, %1"
1228  [(set_attr "insn" "mul")
1229   (set_attr "predicable" "yes")]
1230)
1231
1232(define_insn "*arm_mulsi3_v6"
1233  [(set (match_operand:SI          0 "s_register_operand" "=r")
1234	(mult:SI (match_operand:SI 1 "s_register_operand" "r")
1235		 (match_operand:SI 2 "s_register_operand" "r")))]
1236  "TARGET_32BIT && arm_arch6"
1237  "mul%?\\t%0, %1, %2"
1238  [(set_attr "insn" "mul")
1239   (set_attr "predicable" "yes")]
1240)
1241
1242; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1243; 1 and 2; are the same, because reload will make operand 0 match
1244; operand 1 without realizing that this conflicts with operand 2.  We fix
1245; this by adding another alternative to match this case, and then `reload'
1246; it ourselves.  This alternative must come first.
1247(define_insn "*thumb_mulsi3"
1248  [(set (match_operand:SI          0 "register_operand" "=&l,&l,&l")
1249	(mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1250		 (match_operand:SI 2 "register_operand" "l,l,l")))]
1251  "TARGET_THUMB1 && !arm_arch6"
1252  "*
1253  if (which_alternative < 2)
1254    return \"mov\\t%0, %1\;mul\\t%0, %2\";
1255  else
1256    return \"mul\\t%0, %2\";
1257  "
1258  [(set_attr "length" "4,4,2")
1259   (set_attr "insn" "mul")]
1260)
1261
1262(define_insn "*thumb_mulsi3_v6"
1263  [(set (match_operand:SI          0 "register_operand" "=l,l,l")
1264	(mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1265		 (match_operand:SI 2 "register_operand" "l,0,0")))]
1266  "TARGET_THUMB1 && arm_arch6"
1267  "@
1268   mul\\t%0, %2
1269   mul\\t%0, %1
1270   mul\\t%0, %1"
1271  [(set_attr "length" "2")
1272   (set_attr "insn" "mul")]
1273)
1274
1275(define_insn "*mulsi3_compare0"
1276  [(set (reg:CC_NOOV CC_REGNUM)
1277	(compare:CC_NOOV (mult:SI
1278			  (match_operand:SI 2 "s_register_operand" "r,r")
1279			  (match_operand:SI 1 "s_register_operand" "%0,r"))
1280			 (const_int 0)))
1281   (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1282	(mult:SI (match_dup 2) (match_dup 1)))]
1283  "TARGET_ARM && !arm_arch6"
1284  "mul%.\\t%0, %2, %1"
1285  [(set_attr "conds" "set")
1286   (set_attr "insn" "muls")]
1287)
1288
1289(define_insn "*mulsi3_compare0_v6"
1290  [(set (reg:CC_NOOV CC_REGNUM)
1291	(compare:CC_NOOV (mult:SI
1292			  (match_operand:SI 2 "s_register_operand" "r")
1293			  (match_operand:SI 1 "s_register_operand" "r"))
1294			 (const_int 0)))
1295   (set (match_operand:SI 0 "s_register_operand" "=r")
1296	(mult:SI (match_dup 2) (match_dup 1)))]
1297  "TARGET_ARM && arm_arch6 && optimize_size"
1298  "mul%.\\t%0, %2, %1"
1299  [(set_attr "conds" "set")
1300   (set_attr "insn" "muls")]
1301)
1302
1303(define_insn "*mulsi_compare0_scratch"
1304  [(set (reg:CC_NOOV CC_REGNUM)
1305	(compare:CC_NOOV (mult:SI
1306			  (match_operand:SI 2 "s_register_operand" "r,r")
1307			  (match_operand:SI 1 "s_register_operand" "%0,r"))
1308			 (const_int 0)))
1309   (clobber (match_scratch:SI 0 "=&r,&r"))]
1310  "TARGET_ARM && !arm_arch6"
1311  "mul%.\\t%0, %2, %1"
1312  [(set_attr "conds" "set")
1313   (set_attr "insn" "muls")]
1314)
1315
1316(define_insn "*mulsi_compare0_scratch_v6"
1317  [(set (reg:CC_NOOV CC_REGNUM)
1318	(compare:CC_NOOV (mult:SI
1319			  (match_operand:SI 2 "s_register_operand" "r")
1320			  (match_operand:SI 1 "s_register_operand" "r"))
1321			 (const_int 0)))
1322   (clobber (match_scratch:SI 0 "=r"))]
1323  "TARGET_ARM && arm_arch6 && optimize_size"
1324  "mul%.\\t%0, %2, %1"
1325  [(set_attr "conds" "set")
1326   (set_attr "insn" "muls")]
1327)
1328
1329;; Unnamed templates to match MLA instruction.
1330
1331(define_insn "*mulsi3addsi"
1332  [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1333	(plus:SI
1334	  (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1335		   (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1336	  (match_operand:SI 3 "s_register_operand" "r,r,0,0")))]
1337  "TARGET_32BIT && !arm_arch6"
1338  "mla%?\\t%0, %2, %1, %3"
1339  [(set_attr "insn" "mla")
1340   (set_attr "predicable" "yes")]
1341)
1342
1343(define_insn "*mulsi3addsi_v6"
1344  [(set (match_operand:SI 0 "s_register_operand" "=r")
1345	(plus:SI
1346	  (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1347		   (match_operand:SI 1 "s_register_operand" "r"))
1348	  (match_operand:SI 3 "s_register_operand" "r")))]
1349  "TARGET_32BIT && arm_arch6"
1350  "mla%?\\t%0, %2, %1, %3"
1351  [(set_attr "insn" "mla")
1352   (set_attr "predicable" "yes")]
1353)
1354
1355(define_insn "*mulsi3addsi_compare0"
1356  [(set (reg:CC_NOOV CC_REGNUM)
1357	(compare:CC_NOOV
1358	 (plus:SI (mult:SI
1359		   (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1360		   (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1361		  (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
1362	 (const_int 0)))
1363   (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1364	(plus:SI (mult:SI (match_dup 2) (match_dup 1))
1365		 (match_dup 3)))]
1366  "TARGET_ARM && arm_arch6"
1367  "mla%.\\t%0, %2, %1, %3"
1368  [(set_attr "conds" "set")
1369   (set_attr "insn" "mlas")]
1370)
1371
1372(define_insn "*mulsi3addsi_compare0_v6"
1373  [(set (reg:CC_NOOV CC_REGNUM)
1374	(compare:CC_NOOV
1375	 (plus:SI (mult:SI
1376		   (match_operand:SI 2 "s_register_operand" "r")
1377		   (match_operand:SI 1 "s_register_operand" "r"))
1378		  (match_operand:SI 3 "s_register_operand" "r"))
1379	 (const_int 0)))
1380   (set (match_operand:SI 0 "s_register_operand" "=r")
1381	(plus:SI (mult:SI (match_dup 2) (match_dup 1))
1382		 (match_dup 3)))]
1383  "TARGET_ARM && arm_arch6 && optimize_size"
1384  "mla%.\\t%0, %2, %1, %3"
1385  [(set_attr "conds" "set")
1386   (set_attr "insn" "mlas")]
1387)
1388
1389(define_insn "*mulsi3addsi_compare0_scratch"
1390  [(set (reg:CC_NOOV CC_REGNUM)
1391	(compare:CC_NOOV
1392	 (plus:SI (mult:SI
1393		   (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1394		   (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
1395		  (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1396	 (const_int 0)))
1397   (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1398  "TARGET_ARM && !arm_arch6"
1399  "mla%.\\t%0, %2, %1, %3"
1400  [(set_attr "conds" "set")
1401   (set_attr "insn" "mlas")]
1402)
1403
1404(define_insn "*mulsi3addsi_compare0_scratch_v6"
1405  [(set (reg:CC_NOOV CC_REGNUM)
1406	(compare:CC_NOOV
1407	 (plus:SI (mult:SI
1408		   (match_operand:SI 2 "s_register_operand" "r")
1409		   (match_operand:SI 1 "s_register_operand" "r"))
1410		  (match_operand:SI 3 "s_register_operand" "r"))
1411	 (const_int 0)))
1412   (clobber (match_scratch:SI 0 "=r"))]
1413  "TARGET_ARM && arm_arch6 && optimize_size"
1414  "mla%.\\t%0, %2, %1, %3"
1415  [(set_attr "conds" "set")
1416   (set_attr "insn" "mlas")]
1417)
1418
1419(define_insn "*mulsi3subsi"
1420  [(set (match_operand:SI 0 "s_register_operand" "=r")
1421	(minus:SI
1422	  (match_operand:SI 3 "s_register_operand" "r")
1423	  (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1424		   (match_operand:SI 1 "s_register_operand" "r"))))]
1425  "TARGET_32BIT && arm_arch_thumb2"
1426  "mls%?\\t%0, %2, %1, %3"
1427  [(set_attr "insn" "mla")
1428   (set_attr "predicable" "yes")]
1429)
1430
1431;; Unnamed template to match long long multiply-accumulate (smlal)
1432
1433(define_insn "*mulsidi3adddi"
1434  [(set (match_operand:DI 0 "s_register_operand" "=&r")
1435	(plus:DI
1436	 (mult:DI
1437	  (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1438	  (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1439	 (match_operand:DI 1 "s_register_operand" "0")))]
1440  "TARGET_32BIT && arm_arch3m && !arm_arch6"
1441  "smlal%?\\t%Q0, %R0, %3, %2"
1442  [(set_attr "insn" "smlal")
1443   (set_attr "predicable" "yes")]
1444)
1445
1446(define_insn "*mulsidi3adddi_v6"
1447  [(set (match_operand:DI 0 "s_register_operand" "=r")
1448	(plus:DI
1449	 (mult:DI
1450	  (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1451	  (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1452	 (match_operand:DI 1 "s_register_operand" "0")))]
1453  "TARGET_32BIT && arm_arch6"
1454  "smlal%?\\t%Q0, %R0, %3, %2"
1455  [(set_attr "insn" "smlal")
1456   (set_attr "predicable" "yes")]
1457)
1458
1459;; 32x32->64 widening multiply.
1460;; As with mulsi3, the only difference between the v3-5 and v6+
1461;; versions of these patterns is the requirement that the output not
1462;; overlap the inputs, but that still means we have to have a named
1463;; expander and two different starred insns.
1464
1465(define_expand "mulsidi3"
1466  [(set (match_operand:DI 0 "s_register_operand" "")
1467	(mult:DI
1468	 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1469	 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1470  "TARGET_32BIT && arm_arch3m"
1471  ""
1472)
1473
1474(define_insn "*mulsidi3_nov6"
1475  [(set (match_operand:DI 0 "s_register_operand" "=&r")
1476	(mult:DI
1477	 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1478	 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1479  "TARGET_32BIT && arm_arch3m && !arm_arch6"
1480  "smull%?\\t%Q0, %R0, %1, %2"
1481  [(set_attr "insn" "smull")
1482   (set_attr "predicable" "yes")]
1483)
1484
1485(define_insn "*mulsidi3_v6"
1486  [(set (match_operand:DI 0 "s_register_operand" "=r")
1487	(mult:DI
1488	 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1489	 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1490  "TARGET_32BIT && arm_arch6"
1491  "smull%?\\t%Q0, %R0, %1, %2"
1492  [(set_attr "insn" "smull")
1493   (set_attr "predicable" "yes")]
1494)
1495
1496(define_expand "umulsidi3"
1497  [(set (match_operand:DI 0 "s_register_operand" "")
1498	(mult:DI
1499	 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1500	 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1501  "TARGET_32BIT && arm_arch3m"
1502  ""
1503)
1504
1505(define_insn "*umulsidi3_nov6"
1506  [(set (match_operand:DI 0 "s_register_operand" "=&r")
1507	(mult:DI
1508	 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1509	 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1510  "TARGET_32BIT && arm_arch3m && !arm_arch6"
1511  "umull%?\\t%Q0, %R0, %1, %2"
1512  [(set_attr "insn" "umull")
1513   (set_attr "predicable" "yes")]
1514)
1515
1516(define_insn "*umulsidi3_v6"
1517  [(set (match_operand:DI 0 "s_register_operand" "=r")
1518	(mult:DI
1519	 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1520	 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1521  "TARGET_32BIT && arm_arch6"
1522  "umull%?\\t%Q0, %R0, %1, %2"
1523  [(set_attr "insn" "umull")
1524   (set_attr "predicable" "yes")]
1525)
1526
1527;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1528
1529(define_insn "*umulsidi3adddi"
1530  [(set (match_operand:DI 0 "s_register_operand" "=&r")
1531	(plus:DI
1532	 (mult:DI
1533	  (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1534	  (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1535	 (match_operand:DI 1 "s_register_operand" "0")))]
1536  "TARGET_32BIT && arm_arch3m && !arm_arch6"
1537  "umlal%?\\t%Q0, %R0, %3, %2"
1538  [(set_attr "insn" "umlal")
1539   (set_attr "predicable" "yes")]
1540)
1541
1542(define_insn "*umulsidi3adddi_v6"
1543  [(set (match_operand:DI 0 "s_register_operand" "=r")
1544	(plus:DI
1545	 (mult:DI
1546	  (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1547	  (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1548	 (match_operand:DI 1 "s_register_operand" "0")))]
1549  "TARGET_32BIT && arm_arch6"
1550  "umlal%?\\t%Q0, %R0, %3, %2"
1551  [(set_attr "insn" "umlal")
1552   (set_attr "predicable" "yes")]
1553)
1554
1555(define_expand "smulsi3_highpart"
1556  [(parallel
1557    [(set (match_operand:SI 0 "s_register_operand" "")
1558	  (truncate:SI
1559	   (lshiftrt:DI
1560	    (mult:DI
1561	     (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1562	     (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1563	    (const_int 32))))
1564     (clobber (match_scratch:SI 3 ""))])]
1565  "TARGET_32BIT && arm_arch3m"
1566  ""
1567)
1568
1569(define_insn "*smulsi3_highpart_nov6"
1570  [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1571	(truncate:SI
1572	 (lshiftrt:DI
1573	  (mult:DI
1574	   (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1575	   (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1576	  (const_int 32))))
1577   (clobber (match_scratch:SI 3 "=&r,&r"))]
1578  "TARGET_32BIT && arm_arch3m && !arm_arch6"
1579  "smull%?\\t%3, %0, %2, %1"
1580  [(set_attr "insn" "smull")
1581   (set_attr "predicable" "yes")]
1582)
1583
1584(define_insn "*smulsi3_highpart_v6"
1585  [(set (match_operand:SI 0 "s_register_operand" "=r")
1586	(truncate:SI
1587	 (lshiftrt:DI
1588	  (mult:DI
1589	   (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1590	   (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1591	  (const_int 32))))
1592   (clobber (match_scratch:SI 3 "=r"))]
1593  "TARGET_32BIT && arm_arch6"
1594  "smull%?\\t%3, %0, %2, %1"
1595  [(set_attr "insn" "smull")
1596   (set_attr "predicable" "yes")]
1597)
1598
1599(define_expand "umulsi3_highpart"
1600  [(parallel
1601    [(set (match_operand:SI 0 "s_register_operand" "")
1602	  (truncate:SI
1603	   (lshiftrt:DI
1604	    (mult:DI
1605	     (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1606	      (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1607	    (const_int 32))))
1608     (clobber (match_scratch:SI 3 ""))])]
1609  "TARGET_32BIT && arm_arch3m"
1610  ""
1611)
1612
1613(define_insn "*umulsi3_highpart_nov6"
1614  [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1615	(truncate:SI
1616	 (lshiftrt:DI
1617	  (mult:DI
1618	   (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r"))
1619	   (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1620	  (const_int 32))))
1621   (clobber (match_scratch:SI 3 "=&r,&r"))]
1622  "TARGET_32BIT && arm_arch3m && !arm_arch6"
1623  "umull%?\\t%3, %0, %2, %1"
1624  [(set_attr "insn" "umull")
1625   (set_attr "predicable" "yes")]
1626)
1627
1628(define_insn "*umulsi3_highpart_v6"
1629  [(set (match_operand:SI 0 "s_register_operand" "=r")
1630	(truncate:SI
1631	 (lshiftrt:DI
1632	  (mult:DI
1633	   (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1634	   (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1635	  (const_int 32))))
1636   (clobber (match_scratch:SI 3 "=r"))]
1637  "TARGET_32BIT && arm_arch6"
1638  "umull%?\\t%3, %0, %2, %1"
1639  [(set_attr "insn" "umull")
1640   (set_attr "predicable" "yes")]
1641)
1642
1643(define_insn "mulhisi3"
1644  [(set (match_operand:SI 0 "s_register_operand" "=r")
1645	(mult:SI (sign_extend:SI
1646		  (match_operand:HI 1 "s_register_operand" "%r"))
1647		 (sign_extend:SI
1648		  (match_operand:HI 2 "s_register_operand" "r"))))]
1649  "TARGET_DSP_MULTIPLY"
1650  "smulbb%?\\t%0, %1, %2"
1651  [(set_attr "insn" "smulxy")
1652   (set_attr "predicable" "yes")]
1653)
1654
1655(define_insn "*mulhisi3tb"
1656  [(set (match_operand:SI 0 "s_register_operand" "=r")
1657	(mult:SI (ashiftrt:SI
1658		  (match_operand:SI 1 "s_register_operand" "r")
1659		  (const_int 16))
1660		 (sign_extend:SI
1661		  (match_operand:HI 2 "s_register_operand" "r"))))]
1662  "TARGET_DSP_MULTIPLY"
1663  "smultb%?\\t%0, %1, %2"
1664  [(set_attr "insn" "smulxy")
1665   (set_attr "predicable" "yes")]
1666)
1667
1668(define_insn "*mulhisi3bt"
1669  [(set (match_operand:SI 0 "s_register_operand" "=r")
1670	(mult:SI (sign_extend:SI
1671		  (match_operand:HI 1 "s_register_operand" "r"))
1672		 (ashiftrt:SI
1673		  (match_operand:SI 2 "s_register_operand" "r")
1674		  (const_int 16))))]
1675  "TARGET_DSP_MULTIPLY"
1676  "smulbt%?\\t%0, %1, %2"
1677  [(set_attr "insn" "smulxy")
1678   (set_attr "predicable" "yes")]
1679)
1680
1681(define_insn "*mulhisi3tt"
1682  [(set (match_operand:SI 0 "s_register_operand" "=r")
1683	(mult:SI (ashiftrt:SI
1684		  (match_operand:SI 1 "s_register_operand" "r")
1685		  (const_int 16))
1686		 (ashiftrt:SI
1687		  (match_operand:SI 2 "s_register_operand" "r")
1688		  (const_int 16))))]
1689  "TARGET_DSP_MULTIPLY"
1690  "smultt%?\\t%0, %1, %2"
1691  [(set_attr "insn" "smulxy")
1692   (set_attr "predicable" "yes")]
1693)
1694
1695(define_insn "*mulhisi3addsi"
1696  [(set (match_operand:SI 0 "s_register_operand" "=r")
1697	(plus:SI (match_operand:SI 1 "s_register_operand" "r")
1698		 (mult:SI (sign_extend:SI
1699			   (match_operand:HI 2 "s_register_operand" "%r"))
1700			  (sign_extend:SI
1701			   (match_operand:HI 3 "s_register_operand" "r")))))]
1702  "TARGET_DSP_MULTIPLY"
1703  "smlabb%?\\t%0, %2, %3, %1"
1704  [(set_attr "insn" "smlaxy")
1705   (set_attr "predicable" "yes")]
1706)
1707
1708(define_insn "*mulhidi3adddi"
1709  [(set (match_operand:DI 0 "s_register_operand" "=r")
1710	(plus:DI
1711	  (match_operand:DI 1 "s_register_operand" "0")
1712	  (mult:DI (sign_extend:DI
1713	 	    (match_operand:HI 2 "s_register_operand" "%r"))
1714		   (sign_extend:DI
1715		    (match_operand:HI 3 "s_register_operand" "r")))))]
1716  "TARGET_DSP_MULTIPLY"
1717  "smlalbb%?\\t%Q0, %R0, %2, %3"
1718  [(set_attr "insn" "smlalxy")
1719   (set_attr "predicable" "yes")])
1720
1721(define_expand "mulsf3"
1722  [(set (match_operand:SF          0 "s_register_operand" "")
1723	(mult:SF (match_operand:SF 1 "s_register_operand" "")
1724		 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1725  "TARGET_32BIT && TARGET_HARD_FLOAT"
1726  "
1727  if (TARGET_MAVERICK
1728      && !cirrus_fp_register (operands[2], SFmode))
1729    operands[2] = force_reg (SFmode, operands[2]);
1730")
1731
1732(define_expand "muldf3"
1733  [(set (match_operand:DF          0 "s_register_operand" "")
1734	(mult:DF (match_operand:DF 1 "s_register_operand" "")
1735		 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1736  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1737  "
1738  if (TARGET_MAVERICK
1739      && !cirrus_fp_register (operands[2], DFmode))
1740    operands[2] = force_reg (DFmode, operands[2]);
1741")
1742
1743;; Division insns
1744
1745(define_expand "divsf3"
1746  [(set (match_operand:SF 0 "s_register_operand" "")
1747	(div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1748		(match_operand:SF 2 "arm_float_rhs_operand" "")))]
1749  "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1750  "")
1751
1752(define_expand "divdf3"
1753  [(set (match_operand:DF 0 "s_register_operand" "")
1754	(div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1755		(match_operand:DF 2 "arm_float_rhs_operand" "")))]
1756  "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
1757  "")
1758
1759;; Modulo insns
1760
1761(define_expand "modsf3"
1762  [(set (match_operand:SF 0 "s_register_operand" "")
1763	(mod:SF (match_operand:SF 1 "s_register_operand" "")
1764		(match_operand:SF 2 "arm_float_rhs_operand" "")))]
1765  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1766  "")
1767
1768(define_expand "moddf3"
1769  [(set (match_operand:DF 0 "s_register_operand" "")
1770	(mod:DF (match_operand:DF 1 "s_register_operand" "")
1771		(match_operand:DF 2 "arm_float_rhs_operand" "")))]
1772  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1773  "")
1774
1775;; Boolean and,ior,xor insns
1776
1777;; Split up double word logical operations
1778
1779;; Split up simple DImode logical operations.  Simply perform the logical
1780;; operation on the upper and lower halves of the registers.
1781(define_split
1782  [(set (match_operand:DI 0 "s_register_operand" "")
1783	(match_operator:DI 6 "logical_binary_operator"
1784	  [(match_operand:DI 1 "s_register_operand" "")
1785	   (match_operand:DI 2 "s_register_operand" "")]))]
1786  "TARGET_32BIT && reload_completed
1787   && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1788  [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1789   (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1790  "
1791  {
1792    operands[3] = gen_highpart (SImode, operands[0]);
1793    operands[0] = gen_lowpart (SImode, operands[0]);
1794    operands[4] = gen_highpart (SImode, operands[1]);
1795    operands[1] = gen_lowpart (SImode, operands[1]);
1796    operands[5] = gen_highpart (SImode, operands[2]);
1797    operands[2] = gen_lowpart (SImode, operands[2]);
1798  }"
1799)
1800
1801(define_split
1802  [(set (match_operand:DI 0 "s_register_operand" "")
1803	(match_operator:DI 6 "logical_binary_operator"
1804	  [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1805	   (match_operand:DI 1 "s_register_operand" "")]))]
1806  "TARGET_32BIT && reload_completed"
1807  [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1808   (set (match_dup 3) (match_op_dup:SI 6
1809			[(ashiftrt:SI (match_dup 2) (const_int 31))
1810			 (match_dup 4)]))]
1811  "
1812  {
1813    operands[3] = gen_highpart (SImode, operands[0]);
1814    operands[0] = gen_lowpart (SImode, operands[0]);
1815    operands[4] = gen_highpart (SImode, operands[1]);
1816    operands[1] = gen_lowpart (SImode, operands[1]);
1817    operands[5] = gen_highpart (SImode, operands[2]);
1818    operands[2] = gen_lowpart (SImode, operands[2]);
1819  }"
1820)
1821
1822;; The zero extend of operand 2 means we can just copy the high part of
1823;; operand1 into operand0.
1824(define_split
1825  [(set (match_operand:DI 0 "s_register_operand" "")
1826	(ior:DI
1827	  (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1828	  (match_operand:DI 1 "s_register_operand" "")))]
1829  "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1830  [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1831   (set (match_dup 3) (match_dup 4))]
1832  "
1833  {
1834    operands[4] = gen_highpart (SImode, operands[1]);
1835    operands[3] = gen_highpart (SImode, operands[0]);
1836    operands[0] = gen_lowpart (SImode, operands[0]);
1837    operands[1] = gen_lowpart (SImode, operands[1]);
1838  }"
1839)
1840
1841;; The zero extend of operand 2 means we can just copy the high part of
1842;; operand1 into operand0.
1843(define_split
1844  [(set (match_operand:DI 0 "s_register_operand" "")
1845	(xor:DI
1846	  (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1847	  (match_operand:DI 1 "s_register_operand" "")))]
1848  "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1849  [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1850   (set (match_dup 3) (match_dup 4))]
1851  "
1852  {
1853    operands[4] = gen_highpart (SImode, operands[1]);
1854    operands[3] = gen_highpart (SImode, operands[0]);
1855    operands[0] = gen_lowpart (SImode, operands[0]);
1856    operands[1] = gen_lowpart (SImode, operands[1]);
1857  }"
1858)
1859
1860(define_insn "anddi3"
1861  [(set (match_operand:DI         0 "s_register_operand" "=&r,&r")
1862	(and:DI (match_operand:DI 1 "s_register_operand"  "%0,r")
1863		(match_operand:DI 2 "s_register_operand"   "r,r")))]
1864  "TARGET_32BIT && ! TARGET_IWMMXT"
1865  "#"
1866  [(set_attr "length" "8")]
1867)
1868
1869(define_insn_and_split "*anddi_zesidi_di"
1870  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1871	(and:DI (zero_extend:DI
1872		 (match_operand:SI 2 "s_register_operand" "r,r"))
1873		(match_operand:DI 1 "s_register_operand" "0,r")))]
1874  "TARGET_32BIT"
1875  "#"
1876  "TARGET_32BIT && reload_completed"
1877  ; The zero extend of operand 2 clears the high word of the output
1878  ; operand.
1879  [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1880   (set (match_dup 3) (const_int 0))]
1881  "
1882  {
1883    operands[3] = gen_highpart (SImode, operands[0]);
1884    operands[0] = gen_lowpart (SImode, operands[0]);
1885    operands[1] = gen_lowpart (SImode, operands[1]);
1886  }"
1887  [(set_attr "length" "8")]
1888)
1889
1890(define_insn "*anddi_sesdi_di"
1891  [(set (match_operand:DI          0 "s_register_operand" "=&r,&r")
1892	(and:DI (sign_extend:DI
1893		 (match_operand:SI 2 "s_register_operand" "r,r"))
1894		(match_operand:DI  1 "s_register_operand" "0,r")))]
1895  "TARGET_32BIT"
1896  "#"
1897  [(set_attr "length" "8")]
1898)
1899
1900(define_expand "andsi3"
1901  [(set (match_operand:SI         0 "s_register_operand" "")
1902	(and:SI (match_operand:SI 1 "s_register_operand" "")
1903		(match_operand:SI 2 "reg_or_int_operand" "")))]
1904  "TARGET_EITHER"
1905  "
1906  if (TARGET_32BIT)
1907    {
1908      if (GET_CODE (operands[2]) == CONST_INT)
1909        {
1910          arm_split_constant (AND, SImode, NULL_RTX,
1911	                      INTVAL (operands[2]), operands[0],
1912			      operands[1], optimize && can_create_pseudo_p ());
1913
1914          DONE;
1915        }
1916    }
1917  else /* TARGET_THUMB1 */
1918    {
1919      if (GET_CODE (operands[2]) != CONST_INT)
1920        {
1921          rtx tmp = force_reg (SImode, operands[2]);
1922	  if (rtx_equal_p (operands[0], operands[1]))
1923	    operands[2] = tmp;
1924	  else
1925	    {
1926              operands[2] = operands[1];
1927              operands[1] = tmp;
1928	    }
1929        }
1930      else
1931        {
1932          int i;
1933
1934          if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1935  	    {
1936	      operands[2] = force_reg (SImode,
1937				       GEN_INT (~INTVAL (operands[2])));
1938
1939	      emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1940
1941	      DONE;
1942	    }
1943
1944          for (i = 9; i <= 31; i++)
1945	    {
1946	      if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1947	        {
1948	          emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1949			 	        const0_rtx));
1950	          DONE;
1951	        }
1952	      else if ((((HOST_WIDE_INT) 1) << i) - 1
1953		       == ~INTVAL (operands[2]))
1954	        {
1955	          rtx shift = GEN_INT (i);
1956	          rtx reg = gen_reg_rtx (SImode);
1957
1958	          emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1959	          emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1960
1961	          DONE;
1962	        }
1963	    }
1964
1965          operands[2] = force_reg (SImode, operands[2]);
1966        }
1967    }
1968  "
1969)
1970
1971; ??? Check split length for Thumb-2
1972(define_insn_and_split "*arm_andsi3_insn"
1973  [(set (match_operand:SI         0 "s_register_operand" "=r,r,r")
1974	(and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1975		(match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1976  "TARGET_32BIT"
1977  "@
1978   and%?\\t%0, %1, %2
1979   bic%?\\t%0, %1, #%B2
1980   #"
1981  "TARGET_32BIT
1982   && GET_CODE (operands[2]) == CONST_INT
1983   && !(const_ok_for_arm (INTVAL (operands[2]))
1984	|| const_ok_for_arm (~INTVAL (operands[2])))"
1985  [(clobber (const_int 0))]
1986  "
1987  arm_split_constant  (AND, SImode, curr_insn,
1988	               INTVAL (operands[2]), operands[0], operands[1], 0);
1989  DONE;
1990  "
1991  [(set_attr "length" "4,4,16")
1992   (set_attr "predicable" "yes")]
1993)
1994
1995(define_insn "*thumb1_andsi3_insn"
1996  [(set (match_operand:SI         0 "register_operand" "=l")
1997	(and:SI (match_operand:SI 1 "register_operand" "%0")
1998		(match_operand:SI 2 "register_operand" "l")))]
1999  "TARGET_THUMB1"
2000  "and\\t%0, %0, %2"
2001  [(set_attr "length" "2")]
2002)
2003
2004(define_insn "*andsi3_compare0"
2005  [(set (reg:CC_NOOV CC_REGNUM)
2006	(compare:CC_NOOV
2007	 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
2008		 (match_operand:SI 2 "arm_not_operand" "rI,K"))
2009	 (const_int 0)))
2010   (set (match_operand:SI          0 "s_register_operand" "=r,r")
2011	(and:SI (match_dup 1) (match_dup 2)))]
2012  "TARGET_32BIT"
2013  "@
2014   and%.\\t%0, %1, %2
2015   bic%.\\t%0, %1, #%B2"
2016  [(set_attr "conds" "set")]
2017)
2018
2019(define_insn "*andsi3_compare0_scratch"
2020  [(set (reg:CC_NOOV CC_REGNUM)
2021	(compare:CC_NOOV
2022	 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
2023		 (match_operand:SI 1 "arm_not_operand" "rI,K"))
2024	 (const_int 0)))
2025   (clobber (match_scratch:SI 2 "=X,r"))]
2026  "TARGET_32BIT"
2027  "@
2028   tst%?\\t%0, %1
2029   bic%.\\t%2, %0, #%B1"
2030  [(set_attr "conds" "set")]
2031)
2032
2033(define_insn "*zeroextractsi_compare0_scratch"
2034  [(set (reg:CC_NOOV CC_REGNUM)
2035	(compare:CC_NOOV (zero_extract:SI
2036			  (match_operand:SI 0 "s_register_operand" "r")
2037		 	  (match_operand 1 "const_int_operand" "n")
2038			  (match_operand 2 "const_int_operand" "n"))
2039			 (const_int 0)))]
2040  "TARGET_32BIT
2041  && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2042      && INTVAL (operands[1]) > 0
2043      && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2044      && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2045  "*
2046  operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2047			 << INTVAL (operands[2]));
2048  output_asm_insn (\"tst%?\\t%0, %1\", operands);
2049  return \"\";
2050  "
2051  [(set_attr "conds" "set")]
2052)
2053
2054(define_insn_and_split "*ne_zeroextractsi"
2055  [(set (match_operand:SI 0 "s_register_operand" "=r")
2056	(ne:SI (zero_extract:SI
2057		(match_operand:SI 1 "s_register_operand" "r")
2058		(match_operand:SI 2 "const_int_operand" "n")
2059		(match_operand:SI 3 "const_int_operand" "n"))
2060	       (const_int 0)))
2061   (clobber (reg:CC CC_REGNUM))]
2062  "TARGET_32BIT
2063   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2064       && INTVAL (operands[2]) > 0
2065       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2066       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2067  "#"
2068  "TARGET_32BIT
2069   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2070       && INTVAL (operands[2]) > 0
2071       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2072       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2073  [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2074		   (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2075				    (const_int 0)))
2076	      (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2077   (set (match_dup 0)
2078	(if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2079			 (match_dup 0) (const_int 1)))]
2080  "
2081  operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2082			 << INTVAL (operands[3]));
2083  "
2084  [(set_attr "conds" "clob")
2085   (set (attr "length")
2086	(if_then_else (eq_attr "is_thumb" "yes")
2087		      (const_int 12)
2088		      (const_int 8)))]
2089)
2090
2091(define_insn_and_split "*ne_zeroextractsi_shifted"
2092  [(set (match_operand:SI 0 "s_register_operand" "=r")
2093	(ne:SI (zero_extract:SI
2094		(match_operand:SI 1 "s_register_operand" "r")
2095		(match_operand:SI 2 "const_int_operand" "n")
2096		(const_int 0))
2097	       (const_int 0)))
2098   (clobber (reg:CC CC_REGNUM))]
2099  "TARGET_ARM"
2100  "#"
2101  "TARGET_ARM"
2102  [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2103		   (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2104				    (const_int 0)))
2105	      (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2106   (set (match_dup 0)
2107	(if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2108			 (match_dup 0) (const_int 1)))]
2109  "
2110  operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2111  "
2112  [(set_attr "conds" "clob")
2113   (set_attr "length" "8")]
2114)
2115
2116(define_insn_and_split "*ite_ne_zeroextractsi"
2117  [(set (match_operand:SI 0 "s_register_operand" "=r")
2118	(if_then_else:SI (ne (zero_extract:SI
2119			      (match_operand:SI 1 "s_register_operand" "r")
2120			      (match_operand:SI 2 "const_int_operand" "n")
2121			      (match_operand:SI 3 "const_int_operand" "n"))
2122			     (const_int 0))
2123			 (match_operand:SI 4 "arm_not_operand" "rIK")
2124			 (const_int 0)))
2125   (clobber (reg:CC CC_REGNUM))]
2126  "TARGET_ARM
2127   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2128       && INTVAL (operands[2]) > 0
2129       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2130       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2131   && !reg_overlap_mentioned_p (operands[0], operands[4])"
2132  "#"
2133  "TARGET_ARM
2134   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2135       && INTVAL (operands[2]) > 0
2136       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2137       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2138   && !reg_overlap_mentioned_p (operands[0], operands[4])"
2139  [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2140		   (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2141				    (const_int 0)))
2142	      (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2143   (set (match_dup 0)
2144	(if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2145			 (match_dup 0) (match_dup 4)))]
2146  "
2147  operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2148			 << INTVAL (operands[3]));
2149  "
2150  [(set_attr "conds" "clob")
2151   (set_attr "length" "8")]
2152)
2153
2154(define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2155  [(set (match_operand:SI 0 "s_register_operand" "=r")
2156	(if_then_else:SI (ne (zero_extract:SI
2157			      (match_operand:SI 1 "s_register_operand" "r")
2158			      (match_operand:SI 2 "const_int_operand" "n")
2159			      (const_int 0))
2160			     (const_int 0))
2161			 (match_operand:SI 3 "arm_not_operand" "rIK")
2162			 (const_int 0)))
2163   (clobber (reg:CC CC_REGNUM))]
2164  "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2165  "#"
2166  "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2167  [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2168		   (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2169				    (const_int 0)))
2170	      (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2171   (set (match_dup 0)
2172	(if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2173			 (match_dup 0) (match_dup 3)))]
2174  "
2175  operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2176  "
2177  [(set_attr "conds" "clob")
2178   (set_attr "length" "8")]
2179)
2180
2181(define_split
2182  [(set (match_operand:SI 0 "s_register_operand" "")
2183	(zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2184			 (match_operand:SI 2 "const_int_operand" "")
2185			 (match_operand:SI 3 "const_int_operand" "")))
2186   (clobber (match_operand:SI 4 "s_register_operand" ""))]
2187  "TARGET_THUMB1"
2188  [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2189   (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2190  "{
2191     HOST_WIDE_INT temp = INTVAL (operands[2]);
2192
2193     operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2194     operands[3] = GEN_INT (32 - temp);
2195   }"
2196)
2197
2198;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2199(define_split
2200  [(set (match_operand:SI 0 "s_register_operand" "")
2201	(match_operator:SI 1 "shiftable_operator"
2202	 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2203			   (match_operand:SI 3 "const_int_operand" "")
2204			   (match_operand:SI 4 "const_int_operand" ""))
2205	  (match_operand:SI 5 "s_register_operand" "")]))
2206   (clobber (match_operand:SI 6 "s_register_operand" ""))]
2207  "TARGET_ARM"
2208  [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2209   (set (match_dup 0)
2210	(match_op_dup 1
2211	 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2212	  (match_dup 5)]))]
2213  "{
2214     HOST_WIDE_INT temp = INTVAL (operands[3]);
2215
2216     operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2217     operands[4] = GEN_INT (32 - temp);
2218   }"
2219)
2220
2221(define_split
2222  [(set (match_operand:SI 0 "s_register_operand" "")
2223	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2224			 (match_operand:SI 2 "const_int_operand" "")
2225			 (match_operand:SI 3 "const_int_operand" "")))]
2226  "TARGET_THUMB1"
2227  [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2228   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2229  "{
2230     HOST_WIDE_INT temp = INTVAL (operands[2]);
2231
2232     operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2233     operands[3] = GEN_INT (32 - temp);
2234   }"
2235)
2236
2237(define_split
2238  [(set (match_operand:SI 0 "s_register_operand" "")
2239	(match_operator:SI 1 "shiftable_operator"
2240	 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2241			   (match_operand:SI 3 "const_int_operand" "")
2242			   (match_operand:SI 4 "const_int_operand" ""))
2243	  (match_operand:SI 5 "s_register_operand" "")]))
2244   (clobber (match_operand:SI 6 "s_register_operand" ""))]
2245  "TARGET_ARM"
2246  [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2247   (set (match_dup 0)
2248	(match_op_dup 1
2249	 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2250	  (match_dup 5)]))]
2251  "{
2252     HOST_WIDE_INT temp = INTVAL (operands[3]);
2253
2254     operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2255     operands[4] = GEN_INT (32 - temp);
2256   }"
2257)
2258
2259;;; ??? This pattern is bogus.  If operand3 has bits outside the range
2260;;; represented by the bitfield, then this will produce incorrect results.
2261;;; Somewhere, the value needs to be truncated.  On targets like the m68k,
2262;;; which have a real bit-field insert instruction, the truncation happens
2263;;; in the bit-field insert instruction itself.  Since arm does not have a
2264;;; bit-field insert instruction, we would have to emit code here to truncate
2265;;; the value before we insert.  This loses some of the advantage of having
2266;;; this insv pattern, so this pattern needs to be reevalutated.
2267
2268(define_expand "insv"
2269  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2270                         (match_operand:SI 1 "general_operand" "")
2271                         (match_operand:SI 2 "general_operand" ""))
2272        (match_operand:SI 3 "reg_or_int_operand" ""))]
2273  "TARGET_ARM || arm_arch_thumb2"
2274  "
2275  {
2276    int start_bit = INTVAL (operands[2]);
2277    int width = INTVAL (operands[1]);
2278    HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2279    rtx target, subtarget;
2280
2281    if (arm_arch_thumb2)
2282      {
2283	bool use_bfi = TRUE;
2284
2285	if (GET_CODE (operands[3]) == CONST_INT)
2286	  {
2287	    HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2288
2289	    if (val == 0)
2290	      {
2291		emit_insn (gen_insv_zero (operands[0], operands[1],
2292					  operands[2]));
2293		DONE;
2294	      }
2295
2296	    /* See if the set can be done with a single orr instruction.  */
2297	    if (val == mask && const_ok_for_arm (val << start_bit))
2298	      use_bfi = FALSE;
2299	  }
2300
2301	if (use_bfi)
2302	  {
2303	    if (GET_CODE (operands[3]) != REG)
2304	      operands[3] = force_reg (SImode, operands[3]);
2305
2306	    emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2307				    operands[3]));
2308	    DONE;
2309	  }
2310      }
2311
2312    target = copy_rtx (operands[0]);
2313    /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2314       subreg as the final target.  */
2315    if (GET_CODE (target) == SUBREG)
2316      {
2317	subtarget = gen_reg_rtx (SImode);
2318	if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2319	    < GET_MODE_SIZE (SImode))
2320	  target = SUBREG_REG (target);
2321      }
2322    else
2323      subtarget = target;
2324
2325    if (GET_CODE (operands[3]) == CONST_INT)
2326      {
2327	/* Since we are inserting a known constant, we may be able to
2328	   reduce the number of bits that we have to clear so that
2329	   the mask becomes simple.  */
2330	/* ??? This code does not check to see if the new mask is actually
2331	   simpler.  It may not be.  */
2332	rtx op1 = gen_reg_rtx (SImode);
2333	/* ??? Truncate operand3 to fit in the bitfield.  See comment before
2334	   start of this pattern.  */
2335	HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2336	HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2337
2338	emit_insn (gen_andsi3 (op1, operands[0],
2339			       gen_int_mode (~mask2, SImode)));
2340	emit_insn (gen_iorsi3 (subtarget, op1,
2341			       gen_int_mode (op3_value << start_bit, SImode)));
2342      }
2343    else if (start_bit == 0
2344	     && !(const_ok_for_arm (mask)
2345		  || const_ok_for_arm (~mask)))
2346      {
2347	/* A Trick, since we are setting the bottom bits in the word,
2348	   we can shift operand[3] up, operand[0] down, OR them together
2349	   and rotate the result back again.  This takes 3 insns, and
2350	   the third might be mergeable into another op.  */
2351	/* The shift up copes with the possibility that operand[3] is
2352           wider than the bitfield.  */
2353	rtx op0 = gen_reg_rtx (SImode);
2354	rtx op1 = gen_reg_rtx (SImode);
2355
2356	emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2357	emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2358	emit_insn (gen_iorsi3  (op1, op1, op0));
2359	emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2360      }
2361    else if ((width + start_bit == 32)
2362	     && !(const_ok_for_arm (mask)
2363		  || const_ok_for_arm (~mask)))
2364      {
2365	/* Similar trick, but slightly less efficient.  */
2366
2367	rtx op0 = gen_reg_rtx (SImode);
2368	rtx op1 = gen_reg_rtx (SImode);
2369
2370	emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2371	emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2372	emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2373	emit_insn (gen_iorsi3 (subtarget, op1, op0));
2374      }
2375    else
2376      {
2377	rtx op0 = gen_int_mode (mask, SImode);
2378	rtx op1 = gen_reg_rtx (SImode);
2379	rtx op2 = gen_reg_rtx (SImode);
2380
2381	if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2382	  {
2383	    rtx tmp = gen_reg_rtx (SImode);
2384
2385	    emit_insn (gen_movsi (tmp, op0));
2386	    op0 = tmp;
2387	  }
2388
2389	/* Mask out any bits in operand[3] that are not needed.  */
2390	   emit_insn (gen_andsi3 (op1, operands[3], op0));
2391
2392	if (GET_CODE (op0) == CONST_INT
2393	    && (const_ok_for_arm (mask << start_bit)
2394		|| const_ok_for_arm (~(mask << start_bit))))
2395	  {
2396	    op0 = gen_int_mode (~(mask << start_bit), SImode);
2397	    emit_insn (gen_andsi3 (op2, operands[0], op0));
2398	  }
2399	else
2400	  {
2401	    if (GET_CODE (op0) == CONST_INT)
2402	      {
2403		rtx tmp = gen_reg_rtx (SImode);
2404
2405		emit_insn (gen_movsi (tmp, op0));
2406		op0 = tmp;
2407	      }
2408
2409	    if (start_bit != 0)
2410	      emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2411
2412	    emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2413	  }
2414
2415	if (start_bit != 0)
2416          emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2417
2418	emit_insn (gen_iorsi3 (subtarget, op1, op2));
2419      }
2420
2421    if (subtarget != target)
2422      {
2423	/* If TARGET is still a SUBREG, then it must be wider than a word,
2424	   so we must be careful only to set the subword we were asked to.  */
2425	if (GET_CODE (target) == SUBREG)
2426	  emit_move_insn (target, subtarget);
2427	else
2428	  emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2429      }
2430
2431    DONE;
2432  }"
2433)
2434
2435(define_insn "insv_zero"
2436  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2437                         (match_operand:SI 1 "const_int_operand" "M")
2438                         (match_operand:SI 2 "const_int_operand" "M"))
2439        (const_int 0))]
2440  "arm_arch_thumb2"
2441  "bfc%?\t%0, %2, %1"
2442  [(set_attr "length" "4")
2443   (set_attr "predicable" "yes")]
2444)
2445
2446(define_insn "insv_t2"
2447  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2448                         (match_operand:SI 1 "const_int_operand" "M")
2449                         (match_operand:SI 2 "const_int_operand" "M"))
2450        (match_operand:SI 3 "s_register_operand" "r"))]
2451  "arm_arch_thumb2"
2452  "bfi%?\t%0, %3, %2, %1"
2453  [(set_attr "length" "4")
2454   (set_attr "predicable" "yes")]
2455)
2456
2457; constants for op 2 will never be given to these patterns.
2458(define_insn_and_split "*anddi_notdi_di"
2459  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2460	(and:DI (not:DI (match_operand:DI 1 "s_register_operand" "0,r"))
2461		(match_operand:DI 2 "s_register_operand" "r,0")))]
2462  "TARGET_32BIT"
2463  "#"
2464  "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2465  [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2466   (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2467  "
2468  {
2469    operands[3] = gen_highpart (SImode, operands[0]);
2470    operands[0] = gen_lowpart (SImode, operands[0]);
2471    operands[4] = gen_highpart (SImode, operands[1]);
2472    operands[1] = gen_lowpart (SImode, operands[1]);
2473    operands[5] = gen_highpart (SImode, operands[2]);
2474    operands[2] = gen_lowpart (SImode, operands[2]);
2475  }"
2476  [(set_attr "length" "8")
2477   (set_attr "predicable" "yes")]
2478)
2479
2480(define_insn_and_split "*anddi_notzesidi_di"
2481  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2482	(and:DI (not:DI (zero_extend:DI
2483			 (match_operand:SI 2 "s_register_operand" "r,r")))
2484		(match_operand:DI 1 "s_register_operand" "0,?r")))]
2485  "TARGET_32BIT"
2486  "@
2487   bic%?\\t%Q0, %Q1, %2
2488   #"
2489  ; (not (zero_extend ...)) allows us to just copy the high word from
2490  ; operand1 to operand0.
2491  "TARGET_32BIT
2492   && reload_completed
2493   && operands[0] != operands[1]"
2494  [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2495   (set (match_dup 3) (match_dup 4))]
2496  "
2497  {
2498    operands[3] = gen_highpart (SImode, operands[0]);
2499    operands[0] = gen_lowpart (SImode, operands[0]);
2500    operands[4] = gen_highpart (SImode, operands[1]);
2501    operands[1] = gen_lowpart (SImode, operands[1]);
2502  }"
2503  [(set_attr "length" "4,8")
2504   (set_attr "predicable" "yes")]
2505)
2506
2507(define_insn_and_split "*anddi_notsesidi_di"
2508  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2509	(and:DI (not:DI (sign_extend:DI
2510			 (match_operand:SI 2 "s_register_operand" "r,r")))
2511		(match_operand:DI 1 "s_register_operand" "0,r")))]
2512  "TARGET_32BIT"
2513  "#"
2514  "TARGET_32BIT && reload_completed"
2515  [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2516   (set (match_dup 3) (and:SI (not:SI
2517				(ashiftrt:SI (match_dup 2) (const_int 31)))
2518			       (match_dup 4)))]
2519  "
2520  {
2521    operands[3] = gen_highpart (SImode, operands[0]);
2522    operands[0] = gen_lowpart (SImode, operands[0]);
2523    operands[4] = gen_highpart (SImode, operands[1]);
2524    operands[1] = gen_lowpart (SImode, operands[1]);
2525  }"
2526  [(set_attr "length" "8")
2527   (set_attr "predicable" "yes")]
2528)
2529
2530(define_insn "andsi_notsi_si"
2531  [(set (match_operand:SI 0 "s_register_operand" "=r")
2532	(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2533		(match_operand:SI 1 "s_register_operand" "r")))]
2534  "TARGET_32BIT"
2535  "bic%?\\t%0, %1, %2"
2536  [(set_attr "predicable" "yes")]
2537)
2538
2539(define_insn "bicsi3"
2540  [(set (match_operand:SI                 0 "register_operand" "=l")
2541	(and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2542		(match_operand:SI         2 "register_operand" "0")))]
2543  "TARGET_THUMB1"
2544  "bic\\t%0, %0, %1"
2545  [(set_attr "length" "2")]
2546)
2547
2548(define_insn "andsi_not_shiftsi_si"
2549  [(set (match_operand:SI 0 "s_register_operand" "=r")
2550	(and:SI (not:SI (match_operator:SI 4 "shift_operator"
2551			 [(match_operand:SI 2 "s_register_operand" "r")
2552			  (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2553		(match_operand:SI 1 "s_register_operand" "r")))]
2554  "TARGET_ARM"
2555  "bic%?\\t%0, %1, %2%S4"
2556  [(set_attr "predicable" "yes")
2557   (set_attr "shift" "2")
2558   (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2559		      (const_string "alu_shift")
2560		      (const_string "alu_shift_reg")))]
2561)
2562
2563(define_insn "*andsi_notsi_si_compare0"
2564  [(set (reg:CC_NOOV CC_REGNUM)
2565	(compare:CC_NOOV
2566	 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2567		 (match_operand:SI 1 "s_register_operand" "r"))
2568	 (const_int 0)))
2569   (set (match_operand:SI 0 "s_register_operand" "=r")
2570	(and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2571  "TARGET_32BIT"
2572  "bic%.\\t%0, %1, %2"
2573  [(set_attr "conds" "set")]
2574)
2575
2576(define_insn "*andsi_notsi_si_compare0_scratch"
2577  [(set (reg:CC_NOOV CC_REGNUM)
2578	(compare:CC_NOOV
2579	 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2580		 (match_operand:SI 1 "s_register_operand" "r"))
2581	 (const_int 0)))
2582   (clobber (match_scratch:SI 0 "=r"))]
2583  "TARGET_32BIT"
2584  "bic%.\\t%0, %1, %2"
2585  [(set_attr "conds" "set")]
2586)
2587
2588(define_insn "iordi3"
2589  [(set (match_operand:DI         0 "s_register_operand" "=&r,&r")
2590	(ior:DI (match_operand:DI 1 "s_register_operand"  "%0,r")
2591		(match_operand:DI 2 "s_register_operand"   "r,r")))]
2592  "TARGET_32BIT && ! TARGET_IWMMXT"
2593  "#"
2594  [(set_attr "length" "8")
2595   (set_attr "predicable" "yes")]
2596)
2597
2598(define_insn "*iordi_zesidi_di"
2599  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2600	(ior:DI (zero_extend:DI
2601		 (match_operand:SI 2 "s_register_operand" "r,r"))
2602		(match_operand:DI 1 "s_register_operand" "0,?r")))]
2603  "TARGET_32BIT"
2604  "@
2605   orr%?\\t%Q0, %Q1, %2
2606   #"
2607  [(set_attr "length" "4,8")
2608   (set_attr "predicable" "yes")]
2609)
2610
2611(define_insn "*iordi_sesidi_di"
2612  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2613	(ior:DI (sign_extend:DI
2614		 (match_operand:SI 2 "s_register_operand" "r,r"))
2615		(match_operand:DI 1 "s_register_operand" "0,r")))]
2616  "TARGET_32BIT"
2617  "#"
2618  [(set_attr "length" "8")
2619   (set_attr "predicable" "yes")]
2620)
2621
2622(define_expand "iorsi3"
2623  [(set (match_operand:SI         0 "s_register_operand" "")
2624	(ior:SI (match_operand:SI 1 "s_register_operand" "")
2625		(match_operand:SI 2 "reg_or_int_operand" "")))]
2626  "TARGET_EITHER"
2627  "
2628  if (GET_CODE (operands[2]) == CONST_INT)
2629    {
2630      if (TARGET_32BIT)
2631        {
2632          arm_split_constant (IOR, SImode, NULL_RTX,
2633	                      INTVAL (operands[2]), operands[0], operands[1],
2634			      optimize && can_create_pseudo_p ());
2635          DONE;
2636	}
2637      else /* TARGET_THUMB1 */
2638        {
2639          rtx tmp = force_reg (SImode, operands[2]);
2640	  if (rtx_equal_p (operands[0], operands[1]))
2641	    operands[2] = tmp;
2642	  else
2643	    {
2644              operands[2] = operands[1];
2645              operands[1] = tmp;
2646	    }
2647        }
2648    }
2649  "
2650)
2651
2652(define_insn_and_split "*arm_iorsi3"
2653  [(set (match_operand:SI         0 "s_register_operand" "=r,r")
2654	(ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2655		(match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2656  "TARGET_ARM"
2657  "@
2658   orr%?\\t%0, %1, %2
2659   #"
2660  "TARGET_ARM
2661   && GET_CODE (operands[2]) == CONST_INT
2662   && !const_ok_for_arm (INTVAL (operands[2]))"
2663  [(clobber (const_int 0))]
2664  "
2665  arm_split_constant (IOR, SImode, curr_insn,
2666                      INTVAL (operands[2]), operands[0], operands[1], 0);
2667  DONE;
2668  "
2669  [(set_attr "length" "4,16")
2670   (set_attr "predicable" "yes")]
2671)
2672
2673(define_insn "*thumb1_iorsi3"
2674  [(set (match_operand:SI         0 "register_operand" "=l")
2675	(ior:SI (match_operand:SI 1 "register_operand" "%0")
2676		(match_operand:SI 2 "register_operand" "l")))]
2677  "TARGET_THUMB1"
2678  "orr\\t%0, %0, %2"
2679  [(set_attr "length" "2")]
2680)
2681
2682(define_peephole2
2683  [(match_scratch:SI 3 "r")
2684   (set (match_operand:SI 0 "arm_general_register_operand" "")
2685	(ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2686		(match_operand:SI 2 "const_int_operand" "")))]
2687  "TARGET_ARM
2688   && !const_ok_for_arm (INTVAL (operands[2]))
2689   && const_ok_for_arm (~INTVAL (operands[2]))"
2690  [(set (match_dup 3) (match_dup 2))
2691   (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2692  ""
2693)
2694
2695(define_insn "*iorsi3_compare0"
2696  [(set (reg:CC_NOOV CC_REGNUM)
2697	(compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2698				 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2699			 (const_int 0)))
2700   (set (match_operand:SI 0 "s_register_operand" "=r")
2701	(ior:SI (match_dup 1) (match_dup 2)))]
2702  "TARGET_32BIT"
2703  "orr%.\\t%0, %1, %2"
2704  [(set_attr "conds" "set")]
2705)
2706
2707(define_insn "*iorsi3_compare0_scratch"
2708  [(set (reg:CC_NOOV CC_REGNUM)
2709	(compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2710				 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2711			 (const_int 0)))
2712   (clobber (match_scratch:SI 0 "=r"))]
2713  "TARGET_32BIT"
2714  "orr%.\\t%0, %1, %2"
2715  [(set_attr "conds" "set")]
2716)
2717
2718(define_insn "xordi3"
2719  [(set (match_operand:DI         0 "s_register_operand" "=&r,&r")
2720	(xor:DI (match_operand:DI 1 "s_register_operand"  "%0,r")
2721		(match_operand:DI 2 "s_register_operand"   "r,r")))]
2722  "TARGET_32BIT && !TARGET_IWMMXT"
2723  "#"
2724  [(set_attr "length" "8")
2725   (set_attr "predicable" "yes")]
2726)
2727
2728(define_insn "*xordi_zesidi_di"
2729  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2730	(xor:DI (zero_extend:DI
2731		 (match_operand:SI 2 "s_register_operand" "r,r"))
2732		(match_operand:DI 1 "s_register_operand" "0,?r")))]
2733  "TARGET_32BIT"
2734  "@
2735   eor%?\\t%Q0, %Q1, %2
2736   #"
2737  [(set_attr "length" "4,8")
2738   (set_attr "predicable" "yes")]
2739)
2740
2741(define_insn "*xordi_sesidi_di"
2742  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2743	(xor:DI (sign_extend:DI
2744		 (match_operand:SI 2 "s_register_operand" "r,r"))
2745		(match_operand:DI 1 "s_register_operand" "0,r")))]
2746  "TARGET_32BIT"
2747  "#"
2748  [(set_attr "length" "8")
2749   (set_attr "predicable" "yes")]
2750)
2751
2752(define_expand "xorsi3"
2753  [(set (match_operand:SI         0 "s_register_operand" "")
2754	(xor:SI (match_operand:SI 1 "s_register_operand" "")
2755		(match_operand:SI 2 "reg_or_int_operand" "")))]
2756  "TARGET_EITHER"
2757  "if (GET_CODE (operands[2]) == CONST_INT)
2758    {
2759      if (TARGET_32BIT)
2760        {
2761          arm_split_constant (XOR, SImode, NULL_RTX,
2762	                      INTVAL (operands[2]), operands[0], operands[1],
2763			      optimize && can_create_pseudo_p ());
2764          DONE;
2765	}
2766      else /* TARGET_THUMB1 */
2767        {
2768          rtx tmp = force_reg (SImode, operands[2]);
2769	  if (rtx_equal_p (operands[0], operands[1]))
2770	    operands[2] = tmp;
2771	  else
2772	    {
2773              operands[2] = operands[1];
2774              operands[1] = tmp;
2775	    }
2776        }
2777    }"
2778)
2779
2780(define_insn "*arm_xorsi3"
2781  [(set (match_operand:SI         0 "s_register_operand" "=r")
2782	(xor:SI (match_operand:SI 1 "s_register_operand" "r")
2783		(match_operand:SI 2 "arm_rhs_operand" "rI")))]
2784  "TARGET_32BIT"
2785  "eor%?\\t%0, %1, %2"
2786  [(set_attr "predicable" "yes")]
2787)
2788
2789(define_insn "*thumb1_xorsi3"
2790  [(set (match_operand:SI         0 "register_operand" "=l")
2791	(xor:SI (match_operand:SI 1 "register_operand" "%0")
2792		(match_operand:SI 2 "register_operand" "l")))]
2793  "TARGET_THUMB1"
2794  "eor\\t%0, %0, %2"
2795  [(set_attr "length" "2")]
2796)
2797
2798(define_insn "*xorsi3_compare0"
2799  [(set (reg:CC_NOOV CC_REGNUM)
2800	(compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2801				 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2802			 (const_int 0)))
2803   (set (match_operand:SI 0 "s_register_operand" "=r")
2804	(xor:SI (match_dup 1) (match_dup 2)))]
2805  "TARGET_32BIT"
2806  "eor%.\\t%0, %1, %2"
2807  [(set_attr "conds" "set")]
2808)
2809
2810(define_insn "*xorsi3_compare0_scratch"
2811  [(set (reg:CC_NOOV CC_REGNUM)
2812	(compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2813				 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2814			 (const_int 0)))]
2815  "TARGET_32BIT"
2816  "teq%?\\t%0, %1"
2817  [(set_attr "conds" "set")]
2818)
2819
2820; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2821; (NOT D) we can sometimes merge the final NOT into one of the following
2822; insns.
2823
2824(define_split
2825  [(set (match_operand:SI 0 "s_register_operand" "")
2826	(ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2827			(not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2828		(match_operand:SI 3 "arm_rhs_operand" "")))
2829   (clobber (match_operand:SI 4 "s_register_operand" ""))]
2830  "TARGET_32BIT"
2831  [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2832			      (not:SI (match_dup 3))))
2833   (set (match_dup 0) (not:SI (match_dup 4)))]
2834  ""
2835)
2836
2837(define_insn "*andsi_iorsi3_notsi"
2838  [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2839	(and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
2840			(match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2841		(not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2842  "TARGET_32BIT"
2843  "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2844  [(set_attr "length" "8")
2845   (set_attr "ce_count" "2")
2846   (set_attr "predicable" "yes")]
2847)
2848
2849; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2850; insns are available?
2851(define_split
2852  [(set (match_operand:SI 0 "s_register_operand" "")
2853	(match_operator:SI 1 "logical_binary_operator"
2854	 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2855			   (match_operand:SI 3 "const_int_operand" "")
2856			   (match_operand:SI 4 "const_int_operand" ""))
2857	  (match_operator:SI 9 "logical_binary_operator"
2858	   [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2859			 (match_operand:SI 6 "const_int_operand" ""))
2860	    (match_operand:SI 7 "s_register_operand" "")])]))
2861   (clobber (match_operand:SI 8 "s_register_operand" ""))]
2862  "TARGET_32BIT
2863   && GET_CODE (operands[1]) == GET_CODE (operands[9])
2864   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2865  [(set (match_dup 8)
2866	(match_op_dup 1
2867	 [(ashift:SI (match_dup 2) (match_dup 4))
2868	  (match_dup 5)]))
2869   (set (match_dup 0)
2870	(match_op_dup 1
2871	 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2872	  (match_dup 7)]))]
2873  "
2874  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2875")
2876
2877(define_split
2878  [(set (match_operand:SI 0 "s_register_operand" "")
2879	(match_operator:SI 1 "logical_binary_operator"
2880	 [(match_operator:SI 9 "logical_binary_operator"
2881	   [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2882			 (match_operand:SI 6 "const_int_operand" ""))
2883	    (match_operand:SI 7 "s_register_operand" "")])
2884	  (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2885			   (match_operand:SI 3 "const_int_operand" "")
2886			   (match_operand:SI 4 "const_int_operand" ""))]))
2887   (clobber (match_operand:SI 8 "s_register_operand" ""))]
2888  "TARGET_32BIT
2889   && GET_CODE (operands[1]) == GET_CODE (operands[9])
2890   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2891  [(set (match_dup 8)
2892	(match_op_dup 1
2893	 [(ashift:SI (match_dup 2) (match_dup 4))
2894	  (match_dup 5)]))
2895   (set (match_dup 0)
2896	(match_op_dup 1
2897	 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2898	  (match_dup 7)]))]
2899  "
2900  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2901")
2902
2903(define_split
2904  [(set (match_operand:SI 0 "s_register_operand" "")
2905	(match_operator:SI 1 "logical_binary_operator"
2906	 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2907			   (match_operand:SI 3 "const_int_operand" "")
2908			   (match_operand:SI 4 "const_int_operand" ""))
2909	  (match_operator:SI 9 "logical_binary_operator"
2910	   [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2911			 (match_operand:SI 6 "const_int_operand" ""))
2912	    (match_operand:SI 7 "s_register_operand" "")])]))
2913   (clobber (match_operand:SI 8 "s_register_operand" ""))]
2914  "TARGET_32BIT
2915   && GET_CODE (operands[1]) == GET_CODE (operands[9])
2916   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2917  [(set (match_dup 8)
2918	(match_op_dup 1
2919	 [(ashift:SI (match_dup 2) (match_dup 4))
2920	  (match_dup 5)]))
2921   (set (match_dup 0)
2922	(match_op_dup 1
2923	 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2924	  (match_dup 7)]))]
2925  "
2926  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2927")
2928
2929(define_split
2930  [(set (match_operand:SI 0 "s_register_operand" "")
2931	(match_operator:SI 1 "logical_binary_operator"
2932	 [(match_operator:SI 9 "logical_binary_operator"
2933	   [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2934			 (match_operand:SI 6 "const_int_operand" ""))
2935	    (match_operand:SI 7 "s_register_operand" "")])
2936	  (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2937			   (match_operand:SI 3 "const_int_operand" "")
2938			   (match_operand:SI 4 "const_int_operand" ""))]))
2939   (clobber (match_operand:SI 8 "s_register_operand" ""))]
2940  "TARGET_32BIT
2941   && GET_CODE (operands[1]) == GET_CODE (operands[9])
2942   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2943  [(set (match_dup 8)
2944	(match_op_dup 1
2945	 [(ashift:SI (match_dup 2) (match_dup 4))
2946	  (match_dup 5)]))
2947   (set (match_dup 0)
2948	(match_op_dup 1
2949	 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2950	  (match_dup 7)]))]
2951  "
2952  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2953")
2954
2955
2956;; Minimum and maximum insns
2957
2958(define_expand "smaxsi3"
2959  [(parallel [
2960    (set (match_operand:SI 0 "s_register_operand" "")
2961	 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2962		  (match_operand:SI 2 "arm_rhs_operand" "")))
2963    (clobber (reg:CC CC_REGNUM))])]
2964  "TARGET_32BIT"
2965  "
2966  if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2967    {
2968      /* No need for a clobber of the condition code register here.  */
2969      emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2970			      gen_rtx_SMAX (SImode, operands[1],
2971					    operands[2])));
2972      DONE;
2973    }
2974")
2975
2976(define_insn "*smax_0"
2977  [(set (match_operand:SI 0 "s_register_operand" "=r")
2978	(smax:SI (match_operand:SI 1 "s_register_operand" "r")
2979		 (const_int 0)))]
2980  "TARGET_32BIT"
2981  "bic%?\\t%0, %1, %1, asr #31"
2982  [(set_attr "predicable" "yes")]
2983)
2984
2985(define_insn "*smax_m1"
2986  [(set (match_operand:SI 0 "s_register_operand" "=r")
2987	(smax:SI (match_operand:SI 1 "s_register_operand" "r")
2988		 (const_int -1)))]
2989  "TARGET_32BIT"
2990  "orr%?\\t%0, %1, %1, asr #31"
2991  [(set_attr "predicable" "yes")]
2992)
2993
2994(define_insn "*arm_smax_insn"
2995  [(set (match_operand:SI          0 "s_register_operand" "=r,r")
2996	(smax:SI (match_operand:SI 1 "s_register_operand"  "%0,?r")
2997		 (match_operand:SI 2 "arm_rhs_operand"    "rI,rI")))
2998   (clobber (reg:CC CC_REGNUM))]
2999  "TARGET_ARM"
3000  "@
3001   cmp\\t%1, %2\;movlt\\t%0, %2
3002   cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3003  [(set_attr "conds" "clob")
3004   (set_attr "length" "8,12")]
3005)
3006
3007(define_expand "sminsi3"
3008  [(parallel [
3009    (set (match_operand:SI 0 "s_register_operand" "")
3010	 (smin:SI (match_operand:SI 1 "s_register_operand" "")
3011		  (match_operand:SI 2 "arm_rhs_operand" "")))
3012    (clobber (reg:CC CC_REGNUM))])]
3013  "TARGET_32BIT"
3014  "
3015  if (operands[2] == const0_rtx)
3016    {
3017      /* No need for a clobber of the condition code register here.  */
3018      emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3019			      gen_rtx_SMIN (SImode, operands[1],
3020					    operands[2])));
3021      DONE;
3022    }
3023")
3024
3025(define_insn "*smin_0"
3026  [(set (match_operand:SI 0 "s_register_operand" "=r")
3027	(smin:SI (match_operand:SI 1 "s_register_operand" "r")
3028		 (const_int 0)))]
3029  "TARGET_32BIT"
3030  "and%?\\t%0, %1, %1, asr #31"
3031  [(set_attr "predicable" "yes")]
3032)
3033
3034(define_insn "*arm_smin_insn"
3035  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3036	(smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3037		 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3038   (clobber (reg:CC CC_REGNUM))]
3039  "TARGET_ARM"
3040  "@
3041   cmp\\t%1, %2\;movge\\t%0, %2
3042   cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3043  [(set_attr "conds" "clob")
3044   (set_attr "length" "8,12")]
3045)
3046
3047(define_expand "umaxsi3"
3048  [(parallel [
3049    (set (match_operand:SI 0 "s_register_operand" "")
3050	 (umax:SI (match_operand:SI 1 "s_register_operand" "")
3051		  (match_operand:SI 2 "arm_rhs_operand" "")))
3052    (clobber (reg:CC CC_REGNUM))])]
3053  "TARGET_32BIT"
3054  ""
3055)
3056
3057(define_insn "*arm_umaxsi3"
3058  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3059	(umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3060		 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3061   (clobber (reg:CC CC_REGNUM))]
3062  "TARGET_ARM"
3063  "@
3064   cmp\\t%1, %2\;movcc\\t%0, %2
3065   cmp\\t%1, %2\;movcs\\t%0, %1
3066   cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3067  [(set_attr "conds" "clob")
3068   (set_attr "length" "8,8,12")]
3069)
3070
3071(define_expand "uminsi3"
3072  [(parallel [
3073    (set (match_operand:SI 0 "s_register_operand" "")
3074	 (umin:SI (match_operand:SI 1 "s_register_operand" "")
3075		  (match_operand:SI 2 "arm_rhs_operand" "")))
3076    (clobber (reg:CC CC_REGNUM))])]
3077  "TARGET_32BIT"
3078  ""
3079)
3080
3081(define_insn "*arm_uminsi3"
3082  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3083	(umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3084		 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3085   (clobber (reg:CC CC_REGNUM))]
3086  "TARGET_ARM"
3087  "@
3088   cmp\\t%1, %2\;movcs\\t%0, %2
3089   cmp\\t%1, %2\;movcc\\t%0, %1
3090   cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3091  [(set_attr "conds" "clob")
3092   (set_attr "length" "8,8,12")]
3093)
3094
3095(define_insn "*store_minmaxsi"
3096  [(set (match_operand:SI 0 "memory_operand" "=m")
3097	(match_operator:SI 3 "minmax_operator"
3098	 [(match_operand:SI 1 "s_register_operand" "r")
3099	  (match_operand:SI 2 "s_register_operand" "r")]))
3100   (clobber (reg:CC CC_REGNUM))]
3101  "TARGET_32BIT"
3102  "*
3103  operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3104				operands[1], operands[2]);
3105  output_asm_insn (\"cmp\\t%1, %2\", operands);
3106  if (TARGET_THUMB2)
3107    output_asm_insn (\"ite\t%d3\", operands);
3108  output_asm_insn (\"str%d3\\t%1, %0\", operands);
3109  output_asm_insn (\"str%D3\\t%2, %0\", operands);
3110  return \"\";
3111  "
3112  [(set_attr "conds" "clob")
3113   (set (attr "length")
3114	(if_then_else (eq_attr "is_thumb" "yes")
3115		      (const_int 14)
3116		      (const_int 12)))
3117   (set_attr "type" "store1")]
3118)
3119
3120; Reject the frame pointer in operand[1], since reloading this after
3121; it has been eliminated can cause carnage.
3122(define_insn "*minmax_arithsi"
3123  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3124	(match_operator:SI 4 "shiftable_operator"
3125	 [(match_operator:SI 5 "minmax_operator"
3126	   [(match_operand:SI 2 "s_register_operand" "r,r")
3127	    (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3128	  (match_operand:SI 1 "s_register_operand" "0,?r")]))
3129   (clobber (reg:CC CC_REGNUM))]
3130  "TARGET_32BIT && !arm_eliminable_register (operands[1])"
3131  "*
3132  {
3133    enum rtx_code code = GET_CODE (operands[4]);
3134    bool need_else;
3135
3136    if (which_alternative != 0 || operands[3] != const0_rtx
3137        || (code != PLUS && code != IOR && code != XOR))
3138      need_else = true;
3139    else
3140      need_else = false;
3141
3142    operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3143				  operands[2], operands[3]);
3144    output_asm_insn (\"cmp\\t%2, %3\", operands);
3145    if (TARGET_THUMB2)
3146      {
3147	if (need_else)
3148	  output_asm_insn (\"ite\\t%d5\", operands);
3149	else
3150	  output_asm_insn (\"it\\t%d5\", operands);
3151      }
3152    output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3153    if (need_else)
3154      output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3155    return \"\";
3156  }"
3157  [(set_attr "conds" "clob")
3158   (set (attr "length")
3159	(if_then_else (eq_attr "is_thumb" "yes")
3160		      (const_int 14)
3161		      (const_int 12)))]
3162)
3163
3164
3165;; Shift and rotation insns
3166
3167(define_expand "ashldi3"
3168  [(set (match_operand:DI            0 "s_register_operand" "")
3169        (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3170                   (match_operand:SI 2 "reg_or_int_operand" "")))]
3171  "TARGET_32BIT"
3172  "
3173  if (GET_CODE (operands[2]) == CONST_INT)
3174    {
3175      if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3176        {
3177          emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3178          DONE;
3179        }
3180        /* Ideally we shouldn't fail here if we could know that operands[1]
3181           ends up already living in an iwmmxt register. Otherwise it's
3182           cheaper to have the alternate code being generated than moving
3183           values to iwmmxt regs and back.  */
3184        FAIL;
3185    }
3186  else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3187    FAIL;
3188  "
3189)
3190
3191(define_insn "arm_ashldi3_1bit"
3192  [(set (match_operand:DI            0 "s_register_operand" "=r,&r")
3193        (ashift:DI (match_operand:DI 1 "s_register_operand" "0,r")
3194                   (const_int 1)))
3195   (clobber (reg:CC CC_REGNUM))]
3196  "TARGET_32BIT"
3197  "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3198  [(set_attr "conds" "clob")
3199   (set_attr "length" "8")]
3200)
3201
3202(define_expand "ashlsi3"
3203  [(set (match_operand:SI            0 "s_register_operand" "")
3204	(ashift:SI (match_operand:SI 1 "s_register_operand" "")
3205		   (match_operand:SI 2 "arm_rhs_operand" "")))]
3206  "TARGET_EITHER"
3207  "
3208  if (GET_CODE (operands[2]) == CONST_INT
3209      && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3210    {
3211      emit_insn (gen_movsi (operands[0], const0_rtx));
3212      DONE;
3213    }
3214  "
3215)
3216
3217(define_insn "*thumb1_ashlsi3"
3218  [(set (match_operand:SI            0 "register_operand" "=l,l")
3219	(ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3220		   (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3221  "TARGET_THUMB1"
3222  "lsl\\t%0, %1, %2"
3223  [(set_attr "length" "2")]
3224)
3225
3226(define_expand "ashrdi3"
3227  [(set (match_operand:DI              0 "s_register_operand" "")
3228        (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3229                     (match_operand:SI 2 "reg_or_int_operand" "")))]
3230  "TARGET_32BIT"
3231  "
3232  if (GET_CODE (operands[2]) == CONST_INT)
3233    {
3234      if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3235        {
3236          emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3237          DONE;
3238        }
3239        /* Ideally we shouldn't fail here if we could know that operands[1]
3240           ends up already living in an iwmmxt register. Otherwise it's
3241           cheaper to have the alternate code being generated than moving
3242           values to iwmmxt regs and back.  */
3243        FAIL;
3244    }
3245  else if (!TARGET_REALLY_IWMMXT)
3246    FAIL;
3247  "
3248)
3249
3250(define_insn "arm_ashrdi3_1bit"
3251  [(set (match_operand:DI              0 "s_register_operand" "=r,&r")
3252        (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3253                     (const_int 1)))
3254   (clobber (reg:CC CC_REGNUM))]
3255  "TARGET_32BIT"
3256  "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3257  [(set_attr "conds" "clob")
3258   (set_attr "length" "8")]
3259)
3260
3261(define_expand "ashrsi3"
3262  [(set (match_operand:SI              0 "s_register_operand" "")
3263	(ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3264		     (match_operand:SI 2 "arm_rhs_operand" "")))]
3265  "TARGET_EITHER"
3266  "
3267  if (GET_CODE (operands[2]) == CONST_INT
3268      && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3269    operands[2] = GEN_INT (31);
3270  "
3271)
3272
3273(define_insn "*thumb1_ashrsi3"
3274  [(set (match_operand:SI              0 "register_operand" "=l,l")
3275	(ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3276		     (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3277  "TARGET_THUMB1"
3278  "asr\\t%0, %1, %2"
3279  [(set_attr "length" "2")]
3280)
3281
3282(define_expand "lshrdi3"
3283  [(set (match_operand:DI              0 "s_register_operand" "")
3284        (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3285                     (match_operand:SI 2 "reg_or_int_operand" "")))]
3286  "TARGET_32BIT"
3287  "
3288  if (GET_CODE (operands[2]) == CONST_INT)
3289    {
3290      if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3291        {
3292          emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3293          DONE;
3294        }
3295        /* Ideally we shouldn't fail here if we could know that operands[1]
3296           ends up already living in an iwmmxt register. Otherwise it's
3297           cheaper to have the alternate code being generated than moving
3298           values to iwmmxt regs and back.  */
3299        FAIL;
3300    }
3301  else if (!TARGET_REALLY_IWMMXT)
3302    FAIL;
3303  "
3304)
3305
3306(define_insn "arm_lshrdi3_1bit"
3307  [(set (match_operand:DI              0 "s_register_operand" "=r,&r")
3308        (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "0,r")
3309                     (const_int 1)))
3310   (clobber (reg:CC CC_REGNUM))]
3311  "TARGET_32BIT"
3312  "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3313  [(set_attr "conds" "clob")
3314   (set_attr "length" "8")]
3315)
3316
3317(define_expand "lshrsi3"
3318  [(set (match_operand:SI              0 "s_register_operand" "")
3319	(lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3320		     (match_operand:SI 2 "arm_rhs_operand" "")))]
3321  "TARGET_EITHER"
3322  "
3323  if (GET_CODE (operands[2]) == CONST_INT
3324      && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3325    {
3326      emit_insn (gen_movsi (operands[0], const0_rtx));
3327      DONE;
3328    }
3329  "
3330)
3331
3332(define_insn "*thumb1_lshrsi3"
3333  [(set (match_operand:SI              0 "register_operand" "=l,l")
3334	(lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3335		     (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3336  "TARGET_THUMB1"
3337  "lsr\\t%0, %1, %2"
3338  [(set_attr "length" "2")]
3339)
3340
3341(define_expand "rotlsi3"
3342  [(set (match_operand:SI              0 "s_register_operand" "")
3343	(rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3344		     (match_operand:SI 2 "reg_or_int_operand" "")))]
3345  "TARGET_32BIT"
3346  "
3347  if (GET_CODE (operands[2]) == CONST_INT)
3348    operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3349  else
3350    {
3351      rtx reg = gen_reg_rtx (SImode);
3352      emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3353      operands[2] = reg;
3354    }
3355  "
3356)
3357
3358(define_expand "rotrsi3"
3359  [(set (match_operand:SI              0 "s_register_operand" "")
3360	(rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3361		     (match_operand:SI 2 "arm_rhs_operand" "")))]
3362  "TARGET_EITHER"
3363  "
3364  if (TARGET_32BIT)
3365    {
3366      if (GET_CODE (operands[2]) == CONST_INT
3367          && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3368        operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3369    }
3370  else /* TARGET_THUMB1 */
3371    {
3372      if (GET_CODE (operands [2]) == CONST_INT)
3373        operands [2] = force_reg (SImode, operands[2]);
3374    }
3375  "
3376)
3377
3378(define_insn "*thumb1_rotrsi3"
3379  [(set (match_operand:SI              0 "register_operand" "=l")
3380	(rotatert:SI (match_operand:SI 1 "register_operand" "0")
3381		     (match_operand:SI 2 "register_operand" "l")))]
3382  "TARGET_THUMB1"
3383  "ror\\t%0, %0, %2"
3384  [(set_attr "length" "2")]
3385)
3386
3387(define_insn "*arm_shiftsi3"
3388  [(set (match_operand:SI   0 "s_register_operand" "=r")
3389	(match_operator:SI  3 "shift_operator"
3390	 [(match_operand:SI 1 "s_register_operand"  "r")
3391	  (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3392  "TARGET_32BIT"
3393  "* return arm_output_shift(operands, 0);"
3394  [(set_attr "predicable" "yes")
3395   (set_attr "shift" "1")
3396   (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3397		      (const_string "alu_shift")
3398		      (const_string "alu_shift_reg")))]
3399)
3400
3401(define_insn "*shiftsi3_compare0"
3402  [(set (reg:CC_NOOV CC_REGNUM)
3403	(compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3404			  [(match_operand:SI 1 "s_register_operand" "r")
3405			   (match_operand:SI 2 "arm_rhs_operand" "rM")])
3406			 (const_int 0)))
3407   (set (match_operand:SI 0 "s_register_operand" "=r")
3408	(match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3409  "TARGET_32BIT"
3410  "* return arm_output_shift(operands, 1);"
3411  [(set_attr "conds" "set")
3412   (set_attr "shift" "1")
3413   (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3414		      (const_string "alu_shift")
3415		      (const_string "alu_shift_reg")))]
3416)
3417
3418(define_insn "*shiftsi3_compare0_scratch"
3419  [(set (reg:CC_NOOV CC_REGNUM)
3420	(compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3421			  [(match_operand:SI 1 "s_register_operand" "r")
3422			   (match_operand:SI 2 "arm_rhs_operand" "rM")])
3423			 (const_int 0)))
3424   (clobber (match_scratch:SI 0 "=r"))]
3425  "TARGET_32BIT"
3426  "* return arm_output_shift(operands, 1);"
3427  [(set_attr "conds" "set")
3428   (set_attr "shift" "1")]
3429)
3430
3431(define_insn "*arm_notsi_shiftsi"
3432  [(set (match_operand:SI 0 "s_register_operand" "=r")
3433	(not:SI (match_operator:SI 3 "shift_operator"
3434		 [(match_operand:SI 1 "s_register_operand" "r")
3435		  (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3436  "TARGET_ARM"
3437  "mvn%?\\t%0, %1%S3"
3438  [(set_attr "predicable" "yes")
3439   (set_attr "shift" "1")
3440   (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3441		      (const_string "alu_shift")
3442		      (const_string "alu_shift_reg")))]
3443)
3444
3445(define_insn "*arm_notsi_shiftsi_compare0"
3446  [(set (reg:CC_NOOV CC_REGNUM)
3447	(compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3448			  [(match_operand:SI 1 "s_register_operand" "r")
3449			   (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3450			 (const_int 0)))
3451   (set (match_operand:SI 0 "s_register_operand" "=r")
3452	(not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3453  "TARGET_ARM"
3454  "mvn%.\\t%0, %1%S3"
3455  [(set_attr "conds" "set")
3456   (set_attr "shift" "1")
3457   (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3458		      (const_string "alu_shift")
3459		      (const_string "alu_shift_reg")))]
3460)
3461
3462(define_insn "*arm_not_shiftsi_compare0_scratch"
3463  [(set (reg:CC_NOOV CC_REGNUM)
3464	(compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3465			  [(match_operand:SI 1 "s_register_operand" "r")
3466			   (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3467			 (const_int 0)))
3468   (clobber (match_scratch:SI 0 "=r"))]
3469  "TARGET_ARM"
3470  "mvn%.\\t%0, %1%S3"
3471  [(set_attr "conds" "set")
3472   (set_attr "shift" "1")
3473   (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3474		      (const_string "alu_shift")
3475		      (const_string "alu_shift_reg")))]
3476)
3477
3478;; We don't really have extzv, but defining this using shifts helps
3479;; to reduce register pressure later on.
3480
3481(define_expand "extzv"
3482  [(set (match_dup 4)
3483	(ashift:SI (match_operand:SI   1 "register_operand" "")
3484		   (match_operand:SI   2 "const_int_operand" "")))
3485   (set (match_operand:SI              0 "register_operand" "")
3486	(lshiftrt:SI (match_dup 4)
3487		     (match_operand:SI 3 "const_int_operand" "")))]
3488  "TARGET_THUMB1 || arm_arch_thumb2"
3489  "
3490  {
3491    HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3492    HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3493
3494    if (arm_arch_thumb2)
3495      {
3496	emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3497				 operands[3]));
3498	DONE;
3499      }
3500
3501    operands[3] = GEN_INT (rshift);
3502
3503    if (lshift == 0)
3504      {
3505        emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3506        DONE;
3507      }
3508
3509    operands[2] = GEN_INT (lshift);
3510    operands[4] = gen_reg_rtx (SImode);
3511  }"
3512)
3513
3514(define_insn "extv"
3515  [(set (match_operand:SI 0 "s_register_operand" "=r")
3516	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3517                         (match_operand:SI 2 "const_int_operand" "M")
3518                         (match_operand:SI 3 "const_int_operand" "M")))]
3519  "arm_arch_thumb2"
3520  "sbfx%?\t%0, %1, %3, %2"
3521  [(set_attr "length" "4")
3522   (set_attr "predicable" "yes")]
3523)
3524
3525(define_insn "extzv_t2"
3526  [(set (match_operand:SI 0 "s_register_operand" "=r")
3527	(zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3528                         (match_operand:SI 2 "const_int_operand" "M")
3529                         (match_operand:SI 3 "const_int_operand" "M")))]
3530  "arm_arch_thumb2"
3531  "ubfx%?\t%0, %1, %3, %2"
3532  [(set_attr "length" "4")
3533   (set_attr "predicable" "yes")]
3534)
3535
3536
3537;; Unary arithmetic insns
3538
3539(define_expand "negdi2"
3540 [(parallel
3541   [(set (match_operand:DI          0 "s_register_operand" "")
3542	  (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3543    (clobber (reg:CC CC_REGNUM))])]
3544  "TARGET_EITHER"
3545  "
3546  if (TARGET_THUMB1)
3547    {
3548      if (GET_CODE (operands[1]) != REG)
3549        operands[1] = force_reg (DImode, operands[1]);
3550     }
3551  "
3552)
3553
3554;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3555;; The first alternative allows the common case of a *full* overlap.
3556(define_insn "*arm_negdi2"
3557  [(set (match_operand:DI         0 "s_register_operand" "=r,&r")
3558	(neg:DI (match_operand:DI 1 "s_register_operand"  "0,r")))
3559   (clobber (reg:CC CC_REGNUM))]
3560  "TARGET_ARM"
3561  "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3562  [(set_attr "conds" "clob")
3563   (set_attr "length" "8")]
3564)
3565
3566(define_insn "*thumb1_negdi2"
3567  [(set (match_operand:DI         0 "register_operand" "=&l")
3568	(neg:DI (match_operand:DI 1 "register_operand"   "l")))
3569   (clobber (reg:CC CC_REGNUM))]
3570  "TARGET_THUMB1"
3571  "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3572  [(set_attr "length" "6")]
3573)
3574
3575(define_expand "negsi2"
3576  [(set (match_operand:SI         0 "s_register_operand" "")
3577	(neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3578  "TARGET_EITHER"
3579  ""
3580)
3581
3582(define_insn "*arm_negsi2"
3583  [(set (match_operand:SI         0 "s_register_operand" "=r")
3584	(neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3585  "TARGET_32BIT"
3586  "rsb%?\\t%0, %1, #0"
3587  [(set_attr "predicable" "yes")]
3588)
3589
3590(define_insn "*thumb1_negsi2"
3591  [(set (match_operand:SI         0 "register_operand" "=l")
3592	(neg:SI (match_operand:SI 1 "register_operand" "l")))]
3593  "TARGET_THUMB1"
3594  "neg\\t%0, %1"
3595  [(set_attr "length" "2")]
3596)
3597
3598(define_expand "negsf2"
3599  [(set (match_operand:SF         0 "s_register_operand" "")
3600	(neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3601  "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3602  ""
3603)
3604
3605(define_expand "negdf2"
3606  [(set (match_operand:DF         0 "s_register_operand" "")
3607	(neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3608  "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3609  "")
3610
3611;; abssi2 doesn't really clobber the condition codes if a different register
3612;; is being set.  To keep things simple, assume during rtl manipulations that
3613;; it does, but tell the final scan operator the truth.  Similarly for
3614;; (neg (abs...))
3615
3616(define_expand "abssi2"
3617  [(parallel
3618    [(set (match_operand:SI         0 "s_register_operand" "")
3619	  (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3620     (clobber (match_dup 2))])]
3621  "TARGET_EITHER"
3622  "
3623  if (TARGET_THUMB1)
3624    operands[2] = gen_rtx_SCRATCH (SImode);
3625  else
3626    operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3627")
3628
3629(define_insn "*arm_abssi2"
3630  [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3631	(abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3632   (clobber (reg:CC CC_REGNUM))]
3633  "TARGET_ARM"
3634  "@
3635   cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3636   eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3637  [(set_attr "conds" "clob,*")
3638   (set_attr "shift" "1")
3639   ;; predicable can't be set based on the variant, so left as no
3640   (set_attr "length" "8")]
3641)
3642
3643(define_insn_and_split "*thumb1_abssi2"
3644  [(set (match_operand:SI 0 "s_register_operand" "=l")
3645	(abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3646   (clobber (match_scratch:SI 2 "=&l"))]
3647  "TARGET_THUMB1"
3648  "#"
3649  "TARGET_THUMB1 && reload_completed"
3650  [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3651   (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3652   (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3653  ""
3654  [(set_attr "length" "6")]
3655)
3656
3657(define_insn "*arm_neg_abssi2"
3658  [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3659	(neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3660   (clobber (reg:CC CC_REGNUM))]
3661  "TARGET_ARM"
3662  "@
3663   cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3664   eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3665  [(set_attr "conds" "clob,*")
3666   (set_attr "shift" "1")
3667   ;; predicable can't be set based on the variant, so left as no
3668   (set_attr "length" "8")]
3669)
3670
3671(define_insn_and_split "*thumb1_neg_abssi2"
3672  [(set (match_operand:SI 0 "s_register_operand" "=l")
3673	(neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3674   (clobber (match_scratch:SI 2 "=&l"))]
3675  "TARGET_THUMB1"
3676  "#"
3677  "TARGET_THUMB1 && reload_completed"
3678  [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3679   (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3680   (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3681  ""
3682  [(set_attr "length" "6")]
3683)
3684
3685(define_expand "abssf2"
3686  [(set (match_operand:SF         0 "s_register_operand" "")
3687	(abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3688  "TARGET_32BIT && TARGET_HARD_FLOAT"
3689  "")
3690
3691(define_expand "absdf2"
3692  [(set (match_operand:DF         0 "s_register_operand" "")
3693	(abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3694  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3695  "")
3696
3697(define_expand "sqrtsf2"
3698  [(set (match_operand:SF 0 "s_register_operand" "")
3699	(sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3700  "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3701  "")
3702
3703(define_expand "sqrtdf2"
3704  [(set (match_operand:DF 0 "s_register_operand" "")
3705	(sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3706  "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
3707  "")
3708
3709(define_insn_and_split "one_cmpldi2"
3710  [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3711	(not:DI (match_operand:DI 1 "s_register_operand" "0,r")))]
3712  "TARGET_32BIT"
3713  "#"
3714  "TARGET_32BIT && reload_completed"
3715  [(set (match_dup 0) (not:SI (match_dup 1)))
3716   (set (match_dup 2) (not:SI (match_dup 3)))]
3717  "
3718  {
3719    operands[2] = gen_highpart (SImode, operands[0]);
3720    operands[0] = gen_lowpart (SImode, operands[0]);
3721    operands[3] = gen_highpart (SImode, operands[1]);
3722    operands[1] = gen_lowpart (SImode, operands[1]);
3723  }"
3724  [(set_attr "length" "8")
3725   (set_attr "predicable" "yes")]
3726)
3727
3728(define_expand "one_cmplsi2"
3729  [(set (match_operand:SI         0 "s_register_operand" "")
3730	(not:SI (match_operand:SI 1 "s_register_operand" "")))]
3731  "TARGET_EITHER"
3732  ""
3733)
3734
3735(define_insn "*arm_one_cmplsi2"
3736  [(set (match_operand:SI         0 "s_register_operand" "=r")
3737	(not:SI (match_operand:SI 1 "s_register_operand"  "r")))]
3738  "TARGET_32BIT"
3739  "mvn%?\\t%0, %1"
3740  [(set_attr "predicable" "yes")]
3741)
3742
3743(define_insn "*thumb1_one_cmplsi2"
3744  [(set (match_operand:SI         0 "register_operand" "=l")
3745	(not:SI (match_operand:SI 1 "register_operand"  "l")))]
3746  "TARGET_THUMB1"
3747  "mvn\\t%0, %1"
3748  [(set_attr "length" "2")]
3749)
3750
3751(define_insn "*notsi_compare0"
3752  [(set (reg:CC_NOOV CC_REGNUM)
3753	(compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3754			 (const_int 0)))
3755   (set (match_operand:SI 0 "s_register_operand" "=r")
3756	(not:SI (match_dup 1)))]
3757  "TARGET_32BIT"
3758  "mvn%.\\t%0, %1"
3759  [(set_attr "conds" "set")]
3760)
3761
3762(define_insn "*notsi_compare0_scratch"
3763  [(set (reg:CC_NOOV CC_REGNUM)
3764	(compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3765			 (const_int 0)))
3766   (clobber (match_scratch:SI 0 "=r"))]
3767  "TARGET_32BIT"
3768  "mvn%.\\t%0, %1"
3769  [(set_attr "conds" "set")]
3770)
3771
3772;; Fixed <--> Floating conversion insns
3773
3774(define_expand "floatsihf2"
3775  [(set (match_operand:HF           0 "general_operand" "")
3776	(float:HF (match_operand:SI 1 "general_operand" "")))]
3777  "TARGET_EITHER"
3778  "
3779  {
3780    rtx op1 = gen_reg_rtx (SFmode);
3781    expand_float (op1, operands[1], 0);
3782    op1 = convert_to_mode (HFmode, op1, 0);
3783    emit_move_insn (operands[0], op1);
3784    DONE;
3785  }"
3786)
3787
3788(define_expand "floatdihf2"
3789  [(set (match_operand:HF           0 "general_operand" "")
3790	(float:HF (match_operand:DI 1 "general_operand" "")))]
3791  "TARGET_EITHER"
3792  "
3793  {
3794    rtx op1 = gen_reg_rtx (SFmode);
3795    expand_float (op1, operands[1], 0);
3796    op1 = convert_to_mode (HFmode, op1, 0);
3797    emit_move_insn (operands[0], op1);
3798    DONE;
3799  }"
3800)
3801
3802(define_expand "floatsisf2"
3803  [(set (match_operand:SF           0 "s_register_operand" "")
3804	(float:SF (match_operand:SI 1 "s_register_operand" "")))]
3805  "TARGET_32BIT && TARGET_HARD_FLOAT"
3806  "
3807  if (TARGET_MAVERICK)
3808    {
3809      emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3810      DONE;
3811    }
3812")
3813
3814(define_expand "floatsidf2"
3815  [(set (match_operand:DF           0 "s_register_operand" "")
3816	(float:DF (match_operand:SI 1 "s_register_operand" "")))]
3817  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3818  "
3819  if (TARGET_MAVERICK)
3820    {
3821      emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3822      DONE;
3823    }
3824")
3825
3826(define_expand "fix_trunchfsi2"
3827  [(set (match_operand:SI         0 "general_operand" "")
3828	(fix:SI (fix:HF (match_operand:HF 1 "general_operand"  ""))))]
3829  "TARGET_EITHER"
3830  "
3831  {
3832    rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3833    expand_fix (operands[0], op1, 0);
3834    DONE;
3835  }"
3836)
3837
3838(define_expand "fix_trunchfdi2"
3839  [(set (match_operand:DI         0 "general_operand" "")
3840	(fix:DI (fix:HF (match_operand:HF 1 "general_operand"  ""))))]
3841  "TARGET_EITHER"
3842  "
3843  {
3844    rtx op1 = convert_to_mode (SFmode, operands[1], 0);
3845    expand_fix (operands[0], op1, 0);
3846    DONE;
3847  }"
3848)
3849
3850(define_expand "fix_truncsfsi2"
3851  [(set (match_operand:SI         0 "s_register_operand" "")
3852	(fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"  ""))))]
3853  "TARGET_32BIT && TARGET_HARD_FLOAT"
3854  "
3855  if (TARGET_MAVERICK)
3856    {
3857      if (!cirrus_fp_register (operands[0], SImode))
3858        operands[0] = force_reg (SImode, operands[0]);
3859      if (!cirrus_fp_register (operands[1], SFmode))
3860        operands[1] = force_reg (SFmode, operands[0]);
3861      emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3862      DONE;
3863    }
3864")
3865
3866(define_expand "fix_truncdfsi2"
3867  [(set (match_operand:SI         0 "s_register_operand" "")
3868	(fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"  ""))))]
3869  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3870  "
3871  if (TARGET_MAVERICK)
3872    {
3873      if (!cirrus_fp_register (operands[1], DFmode))
3874        operands[1] = force_reg (DFmode, operands[0]);
3875      emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3876      DONE;
3877    }
3878")
3879
3880;; Truncation insns
3881
3882(define_expand "truncdfsf2"
3883  [(set (match_operand:SF  0 "s_register_operand" "")
3884	(float_truncate:SF
3885 	 (match_operand:DF 1 "s_register_operand" "")))]
3886  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
3887  ""
3888)
3889
3890/* DFmode -> HFmode conversions have to go through SFmode.  */
3891(define_expand "truncdfhf2"
3892  [(set (match_operand:HF  0 "general_operand" "")
3893	(float_truncate:HF
3894 	 (match_operand:DF 1 "general_operand" "")))]
3895  "TARGET_EITHER"
3896  "
3897  {
3898    rtx op1;
3899    op1 = convert_to_mode (SFmode, operands[1], 0);
3900    op1 = convert_to_mode (HFmode, op1, 0);
3901    emit_move_insn (operands[0], op1);
3902    DONE;
3903  }"
3904)
3905
3906;; Zero and sign extension instructions.
3907
3908(define_expand "zero_extendsidi2"
3909  [(set (match_operand:DI 0 "s_register_operand" "")
3910        (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3911  "TARGET_32BIT"
3912  ""
3913)
3914
3915(define_insn "*arm_zero_extendsidi2"
3916  [(set (match_operand:DI 0 "s_register_operand" "=r")
3917        (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3918  "TARGET_ARM"
3919  "*
3920    if (REGNO (operands[1])
3921        != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3922      output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3923    return \"mov%?\\t%R0, #0\";
3924  "
3925  [(set_attr "length" "8")
3926   (set_attr "predicable" "yes")]
3927)
3928
3929(define_expand "zero_extendqidi2"
3930  [(set (match_operand:DI                 0 "s_register_operand"  "")
3931	(zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3932  "TARGET_32BIT"
3933  ""
3934)
3935
3936(define_insn "*arm_zero_extendqidi2"
3937  [(set (match_operand:DI                 0 "s_register_operand"  "=r,r")
3938	(zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3939  "TARGET_ARM"
3940  "@
3941   and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3942   ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3943  [(set_attr "length" "8")
3944   (set_attr "predicable" "yes")
3945   (set_attr "type" "*,load_byte")
3946   (set_attr "pool_range" "*,4092")
3947   (set_attr "neg_pool_range" "*,4084")]
3948)
3949
3950(define_expand "extendsidi2"
3951  [(set (match_operand:DI 0 "s_register_operand" "")
3952        (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3953  "TARGET_32BIT"
3954  ""
3955)
3956
3957(define_insn "*arm_extendsidi2"
3958  [(set (match_operand:DI 0 "s_register_operand" "=r")
3959        (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3960  "TARGET_ARM"
3961  "*
3962    if (REGNO (operands[1])
3963        != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3964      output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3965    return \"mov%?\\t%R0, %Q0, asr #31\";
3966  "
3967  [(set_attr "length" "8")
3968   (set_attr "shift" "1")
3969   (set_attr "predicable" "yes")]
3970)
3971
3972(define_expand "zero_extendhisi2"
3973  [(set (match_dup 2)
3974	(ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3975		   (const_int 16)))
3976   (set (match_operand:SI 0 "s_register_operand" "")
3977	(lshiftrt:SI (match_dup 2) (const_int 16)))]
3978  "TARGET_EITHER"
3979  "
3980  {
3981    if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3982      {
3983	emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3984				gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3985	DONE;
3986      }
3987
3988    if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3989      {
3990	emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3991	DONE;
3992      }
3993
3994    if (!s_register_operand (operands[1], HImode))
3995      operands[1] = copy_to_mode_reg (HImode, operands[1]);
3996
3997    if (arm_arch6)
3998      {
3999	emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4000				gen_rtx_ZERO_EXTEND (SImode, operands[1])));
4001	DONE;
4002      }
4003
4004    operands[1] = gen_lowpart (SImode, operands[1]);
4005    operands[2] = gen_reg_rtx (SImode);
4006  }"
4007)
4008
4009(define_insn "*thumb1_zero_extendhisi2"
4010  [(set (match_operand:SI 0 "register_operand" "=l")
4011	(zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4012  "TARGET_THUMB1 && !arm_arch6"
4013  "*
4014  rtx mem = XEXP (operands[1], 0);
4015
4016  if (GET_CODE (mem) == CONST)
4017    mem = XEXP (mem, 0);
4018
4019  if (GET_CODE (mem) == LABEL_REF)
4020    return \"ldr\\t%0, %1\";
4021
4022  if (GET_CODE (mem) == PLUS)
4023    {
4024      rtx a = XEXP (mem, 0);
4025      rtx b = XEXP (mem, 1);
4026
4027      /* This can happen due to bugs in reload.  */
4028      if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4029        {
4030          rtx ops[2];
4031          ops[0] = operands[0];
4032          ops[1] = a;
4033
4034          output_asm_insn (\"mov	%0, %1\", ops);
4035
4036          XEXP (mem, 0) = operands[0];
4037       }
4038
4039      else if (   GET_CODE (a) == LABEL_REF
4040	       && GET_CODE (b) == CONST_INT)
4041        return \"ldr\\t%0, %1\";
4042    }
4043
4044  return \"ldrh\\t%0, %1\";
4045  "
4046  [(set_attr "length" "4")
4047   (set_attr "type" "load_byte")
4048   (set_attr "pool_range" "60")]
4049)
4050
4051(define_insn "*thumb1_zero_extendhisi2_v6"
4052  [(set (match_operand:SI 0 "register_operand" "=l,l")
4053	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
4054  "TARGET_THUMB1 && arm_arch6"
4055  "*
4056  rtx mem;
4057
4058  if (which_alternative == 0)
4059    return \"uxth\\t%0, %1\";
4060
4061  mem = XEXP (operands[1], 0);
4062
4063  if (GET_CODE (mem) == CONST)
4064    mem = XEXP (mem, 0);
4065
4066  if (GET_CODE (mem) == LABEL_REF)
4067    return \"ldr\\t%0, %1\";
4068
4069  if (GET_CODE (mem) == PLUS)
4070    {
4071      rtx a = XEXP (mem, 0);
4072      rtx b = XEXP (mem, 1);
4073
4074      /* This can happen due to bugs in reload.  */
4075      if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
4076        {
4077          rtx ops[2];
4078          ops[0] = operands[0];
4079          ops[1] = a;
4080
4081          output_asm_insn (\"mov	%0, %1\", ops);
4082
4083          XEXP (mem, 0) = operands[0];
4084       }
4085
4086      else if (   GET_CODE (a) == LABEL_REF
4087	       && GET_CODE (b) == CONST_INT)
4088        return \"ldr\\t%0, %1\";
4089    }
4090
4091  return \"ldrh\\t%0, %1\";
4092  "
4093  [(set_attr "length" "2,4")
4094   (set_attr "type" "alu_shift,load_byte")
4095   (set_attr "pool_range" "*,60")]
4096)
4097
4098(define_insn "*arm_zero_extendhisi2"
4099  [(set (match_operand:SI 0 "s_register_operand" "=r")
4100	(zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4101  "TARGET_ARM && arm_arch4 && !arm_arch6"
4102  "ldr%(h%)\\t%0, %1"
4103  [(set_attr "type" "load_byte")
4104   (set_attr "predicable" "yes")
4105   (set_attr "pool_range" "256")
4106   (set_attr "neg_pool_range" "244")]
4107)
4108
4109(define_insn "*arm_zero_extendhisi2_v6"
4110  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4111	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4112  "TARGET_ARM && arm_arch6"
4113  "@
4114   uxth%?\\t%0, %1
4115   ldr%(h%)\\t%0, %1"
4116  [(set_attr "type" "alu_shift,load_byte")
4117   (set_attr "predicable" "yes")
4118   (set_attr "pool_range" "*,256")
4119   (set_attr "neg_pool_range" "*,244")]
4120)
4121
4122(define_insn "*arm_zero_extendhisi2addsi"
4123  [(set (match_operand:SI 0 "s_register_operand" "=r")
4124	(plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4125		 (match_operand:SI 2 "s_register_operand" "r")))]
4126  "TARGET_INT_SIMD"
4127  "uxtah%?\\t%0, %2, %1"
4128  [(set_attr "type" "alu_shift")
4129   (set_attr "predicable" "yes")]
4130)
4131
4132(define_expand "zero_extendqisi2"
4133  [(set (match_operand:SI 0 "s_register_operand" "")
4134	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
4135  "TARGET_EITHER"
4136  "
4137  if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
4138    {
4139      if (TARGET_ARM)
4140        {
4141          emit_insn (gen_andsi3 (operands[0],
4142				 gen_lowpart (SImode, operands[1]),
4143			         GEN_INT (255)));
4144        }
4145      else /* TARGET_THUMB */
4146        {
4147          rtx temp = gen_reg_rtx (SImode);
4148	  rtx ops[3];
4149
4150          operands[1] = copy_to_mode_reg (QImode, operands[1]);
4151          operands[1] = gen_lowpart (SImode, operands[1]);
4152
4153	  ops[0] = temp;
4154	  ops[1] = operands[1];
4155	  ops[2] = GEN_INT (24);
4156
4157	  emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4158				  gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
4159
4160          ops[0] = operands[0];
4161	  ops[1] = temp;
4162	  ops[2] = GEN_INT (24);
4163
4164	  emit_insn (gen_rtx_SET (VOIDmode, ops[0],
4165				  gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
4166	}
4167      DONE;
4168    }
4169  "
4170)
4171
4172(define_insn "*thumb1_zero_extendqisi2"
4173  [(set (match_operand:SI 0 "register_operand" "=l")
4174	(zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4175  "TARGET_THUMB1 && !arm_arch6"
4176  "ldrb\\t%0, %1"
4177  [(set_attr "length" "2")
4178   (set_attr "type" "load_byte")
4179   (set_attr "pool_range" "32")]
4180)
4181
4182(define_insn "*thumb1_zero_extendqisi2_v6"
4183  [(set (match_operand:SI 0 "register_operand" "=l,l")
4184	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
4185  "TARGET_THUMB1 && arm_arch6"
4186  "@
4187   uxtb\\t%0, %1
4188   ldrb\\t%0, %1"
4189  [(set_attr "length" "2,2")
4190   (set_attr "type" "alu_shift,load_byte")
4191   (set_attr "pool_range" "*,32")]
4192)
4193
4194(define_insn "*arm_zero_extendqisi2"
4195  [(set (match_operand:SI 0 "s_register_operand" "=r")
4196	(zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
4197  "TARGET_ARM && !arm_arch6"
4198  "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4199  [(set_attr "type" "load_byte")
4200   (set_attr "predicable" "yes")
4201   (set_attr "pool_range" "4096")
4202   (set_attr "neg_pool_range" "4084")]
4203)
4204
4205(define_insn "*arm_zero_extendqisi2_v6"
4206  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4207	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
4208  "TARGET_ARM && arm_arch6"
4209  "@
4210   uxtb%(%)\\t%0, %1
4211   ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
4212  [(set_attr "type" "alu_shift,load_byte")
4213   (set_attr "predicable" "yes")
4214   (set_attr "pool_range" "*,4096")
4215   (set_attr "neg_pool_range" "*,4084")]
4216)
4217
4218(define_insn "*arm_zero_extendqisi2addsi"
4219  [(set (match_operand:SI 0 "s_register_operand" "=r")
4220	(plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4221		 (match_operand:SI 2 "s_register_operand" "r")))]
4222  "TARGET_INT_SIMD"
4223  "uxtab%?\\t%0, %2, %1"
4224  [(set_attr "predicable" "yes")
4225   (set_attr "insn" "xtab")
4226   (set_attr "type" "alu_shift")]
4227)
4228
4229(define_split
4230  [(set (match_operand:SI 0 "s_register_operand" "")
4231	(zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4232   (clobber (match_operand:SI 2 "s_register_operand" ""))]
4233  "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4234  [(set (match_dup 2) (match_dup 1))
4235   (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4236  ""
4237)
4238
4239(define_split
4240  [(set (match_operand:SI 0 "s_register_operand" "")
4241	(zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4242   (clobber (match_operand:SI 2 "s_register_operand" ""))]
4243  "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4244  [(set (match_dup 2) (match_dup 1))
4245   (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4246  ""
4247)
4248
4249(define_code_iterator ior_xor [ior xor])
4250
4251(define_split
4252  [(set (match_operand:SI 0 "s_register_operand" "")
4253	(ior_xor:SI (and:SI (ashift:SI
4254			     (match_operand:SI 1 "s_register_operand" "")
4255			     (match_operand:SI 2 "const_int_operand" ""))
4256			    (match_operand:SI 3 "const_int_operand" ""))
4257		    (zero_extend:SI
4258		     (match_operator 5 "subreg_lowpart_operator"
4259		      [(match_operand:SI 4 "s_register_operand" "")]))))]
4260  "TARGET_32BIT
4261   && ((unsigned HOST_WIDE_INT) INTVAL (operands[3])
4262       == (GET_MODE_MASK (GET_MODE (operands[5]))
4263           & (GET_MODE_MASK (GET_MODE (operands[5]))
4264	      << (INTVAL (operands[2])))))"
4265  [(set (match_dup 0) (ior_xor:SI (ashift:SI (match_dup 1) (match_dup 2))
4266				  (match_dup 4)))
4267   (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
4268  "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
4269)
4270
4271(define_insn "*compareqi_eq0"
4272  [(set (reg:CC_Z CC_REGNUM)
4273	(compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4274			 (const_int 0)))]
4275  "TARGET_32BIT"
4276  "tst\\t%0, #255"
4277  [(set_attr "conds" "set")]
4278)
4279
4280(define_expand "extendhisi2"
4281  [(set (match_dup 2)
4282	(ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4283		   (const_int 16)))
4284   (set (match_operand:SI 0 "s_register_operand" "")
4285	(ashiftrt:SI (match_dup 2)
4286		     (const_int 16)))]
4287  "TARGET_EITHER"
4288  "
4289  {
4290    if (GET_CODE (operands[1]) == MEM)
4291      {
4292	if (TARGET_THUMB1)
4293	  {
4294	    emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4295	    DONE;
4296          }
4297	else if (arm_arch4)
4298	  {
4299	    emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4300		       gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4301	    DONE;
4302	  }
4303      }
4304
4305    if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4306      {
4307        emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4308        DONE;
4309      }
4310
4311    if (!s_register_operand (operands[1], HImode))
4312      operands[1] = copy_to_mode_reg (HImode, operands[1]);
4313
4314    if (arm_arch6)
4315      {
4316	if (TARGET_THUMB1)
4317	  emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4318	else
4319	  emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4320		     gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4321
4322	DONE;
4323      }
4324
4325    operands[1] = gen_lowpart (SImode, operands[1]);
4326    operands[2] = gen_reg_rtx (SImode);
4327  }"
4328)
4329
4330(define_insn "thumb1_extendhisi2"
4331  [(set (match_operand:SI 0 "register_operand" "=l")
4332	(sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4333   (clobber (match_scratch:SI 2 "=&l"))]
4334  "TARGET_THUMB1 && !arm_arch6"
4335  "*
4336  {
4337    rtx ops[4];
4338    rtx mem = XEXP (operands[1], 0);
4339
4340    /* This code used to try to use 'V', and fix the address only if it was
4341       offsettable, but this fails for e.g. REG+48 because 48 is outside the
4342       range of QImode offsets, and offsettable_address_p does a QImode
4343       address check.  */
4344
4345    if (GET_CODE (mem) == CONST)
4346      mem = XEXP (mem, 0);
4347
4348    if (GET_CODE (mem) == LABEL_REF)
4349      return \"ldr\\t%0, %1\";
4350
4351    if (GET_CODE (mem) == PLUS)
4352      {
4353        rtx a = XEXP (mem, 0);
4354        rtx b = XEXP (mem, 1);
4355
4356        if (GET_CODE (a) == LABEL_REF
4357	    && GET_CODE (b) == CONST_INT)
4358          return \"ldr\\t%0, %1\";
4359
4360        if (GET_CODE (b) == REG)
4361          return \"ldrsh\\t%0, %1\";
4362
4363        ops[1] = a;
4364        ops[2] = b;
4365      }
4366    else
4367      {
4368        ops[1] = mem;
4369        ops[2] = const0_rtx;
4370      }
4371
4372    gcc_assert (GET_CODE (ops[1]) == REG);
4373
4374    ops[0] = operands[0];
4375    ops[3] = operands[2];
4376    output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4377    return \"\";
4378  }"
4379  [(set_attr "length" "4")
4380   (set_attr "type" "load_byte")
4381   (set_attr "pool_range" "1020")]
4382)
4383
4384;; We used to have an early-clobber on the scratch register here.
4385;; However, there's a bug somewhere in reload which means that this
4386;; can be partially ignored during spill allocation if the memory
4387;; address also needs reloading; this causes us to die later on when
4388;; we try to verify the operands.  Fortunately, we don't really need
4389;; the early-clobber: we can always use operand 0 if operand 2
4390;; overlaps the address.
4391(define_insn "*thumb1_extendhisi2_insn_v6"
4392  [(set (match_operand:SI 0 "register_operand" "=l,l")
4393	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4394   (clobber (match_scratch:SI 2 "=X,l"))]
4395  "TARGET_THUMB1 && arm_arch6"
4396  "*
4397  {
4398    rtx ops[4];
4399    rtx mem;
4400
4401    if (which_alternative == 0)
4402      return \"sxth\\t%0, %1\";
4403
4404    mem = XEXP (operands[1], 0);
4405
4406    /* This code used to try to use 'V', and fix the address only if it was
4407       offsettable, but this fails for e.g. REG+48 because 48 is outside the
4408       range of QImode offsets, and offsettable_address_p does a QImode
4409       address check.  */
4410
4411    if (GET_CODE (mem) == CONST)
4412      mem = XEXP (mem, 0);
4413
4414    if (GET_CODE (mem) == LABEL_REF)
4415      return \"ldr\\t%0, %1\";
4416
4417    if (GET_CODE (mem) == PLUS)
4418      {
4419        rtx a = XEXP (mem, 0);
4420        rtx b = XEXP (mem, 1);
4421
4422        if (GET_CODE (a) == LABEL_REF
4423	    && GET_CODE (b) == CONST_INT)
4424          return \"ldr\\t%0, %1\";
4425
4426        if (GET_CODE (b) == REG)
4427          return \"ldrsh\\t%0, %1\";
4428
4429        ops[1] = a;
4430        ops[2] = b;
4431      }
4432    else
4433      {
4434        ops[1] = mem;
4435        ops[2] = const0_rtx;
4436      }
4437
4438    gcc_assert (GET_CODE (ops[1]) == REG);
4439
4440    ops[0] = operands[0];
4441    if (reg_mentioned_p (operands[2], ops[1]))
4442      ops[3] = ops[0];
4443    else
4444      ops[3] = operands[2];
4445    output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4446    return \"\";
4447  }"
4448  [(set_attr "length" "2,4")
4449   (set_attr "type" "alu_shift,load_byte")
4450   (set_attr "pool_range" "*,1020")]
4451)
4452
4453;; This pattern will only be used when ldsh is not available
4454(define_expand "extendhisi2_mem"
4455  [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4456   (set (match_dup 3)
4457	(zero_extend:SI (match_dup 7)))
4458   (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4459   (set (match_operand:SI 0 "" "")
4460	(ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4461  "TARGET_ARM"
4462  "
4463  {
4464    rtx mem1, mem2;
4465    rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4466
4467    mem1 = change_address (operands[1], QImode, addr);
4468    mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4469    operands[0] = gen_lowpart (SImode, operands[0]);
4470    operands[1] = mem1;
4471    operands[2] = gen_reg_rtx (SImode);
4472    operands[3] = gen_reg_rtx (SImode);
4473    operands[6] = gen_reg_rtx (SImode);
4474    operands[7] = mem2;
4475
4476    if (BYTES_BIG_ENDIAN)
4477      {
4478	operands[4] = operands[2];
4479	operands[5] = operands[3];
4480      }
4481    else
4482      {
4483	operands[4] = operands[3];
4484	operands[5] = operands[2];
4485      }
4486  }"
4487)
4488
4489(define_insn "*arm_extendhisi2"
4490  [(set (match_operand:SI 0 "s_register_operand" "=r")
4491	(sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4492  "TARGET_ARM && arm_arch4 && !arm_arch6"
4493  "ldr%(sh%)\\t%0, %1"
4494  [(set_attr "type" "load_byte")
4495   (set_attr "predicable" "yes")
4496   (set_attr "pool_range" "256")
4497   (set_attr "neg_pool_range" "244")]
4498)
4499
4500;; ??? Check Thumb-2 pool range
4501(define_insn "*arm_extendhisi2_v6"
4502  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4503	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4504  "TARGET_32BIT && arm_arch6"
4505  "@
4506   sxth%?\\t%0, %1
4507   ldr%(sh%)\\t%0, %1"
4508  [(set_attr "type" "alu_shift,load_byte")
4509   (set_attr "predicable" "yes")
4510   (set_attr "pool_range" "*,256")
4511   (set_attr "neg_pool_range" "*,244")]
4512)
4513
4514(define_insn "*arm_extendhisi2addsi"
4515  [(set (match_operand:SI 0 "s_register_operand" "=r")
4516	(plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4517		 (match_operand:SI 2 "s_register_operand" "r")))]
4518  "TARGET_INT_SIMD"
4519  "sxtah%?\\t%0, %2, %1"
4520)
4521
4522(define_expand "extendqihi2"
4523  [(set (match_dup 2)
4524	(ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4525		   (const_int 24)))
4526   (set (match_operand:HI 0 "s_register_operand" "")
4527	(ashiftrt:SI (match_dup 2)
4528		     (const_int 24)))]
4529  "TARGET_ARM"
4530  "
4531  {
4532    if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4533      {
4534	emit_insn (gen_rtx_SET (VOIDmode,
4535				operands[0],
4536				gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4537	DONE;
4538      }
4539    if (!s_register_operand (operands[1], QImode))
4540      operands[1] = copy_to_mode_reg (QImode, operands[1]);
4541    operands[0] = gen_lowpart (SImode, operands[0]);
4542    operands[1] = gen_lowpart (SImode, operands[1]);
4543    operands[2] = gen_reg_rtx (SImode);
4544  }"
4545)
4546
4547(define_insn "*arm_extendqihi_insn"
4548  [(set (match_operand:HI 0 "s_register_operand" "=r")
4549	(sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4550  "TARGET_ARM && arm_arch4"
4551  "ldr%(sb%)\\t%0, %1"
4552  [(set_attr "type" "load_byte")
4553   (set_attr "predicable" "yes")
4554   (set_attr "pool_range" "256")
4555   (set_attr "neg_pool_range" "244")]
4556)
4557
4558(define_expand "extendqisi2"
4559  [(set (match_dup 2)
4560	(ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "")
4561		   (const_int 24)))
4562   (set (match_operand:SI 0 "s_register_operand" "")
4563	(ashiftrt:SI (match_dup 2)
4564		     (const_int 24)))]
4565  "TARGET_EITHER"
4566  "
4567  {
4568    if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4569      {
4570        emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4571			        gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4572        DONE;
4573      }
4574
4575    if (!s_register_operand (operands[1], QImode))
4576      operands[1] = copy_to_mode_reg (QImode, operands[1]);
4577
4578    if (arm_arch6)
4579      {
4580        emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4581			        gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4582        DONE;
4583      }
4584
4585    operands[1] = gen_lowpart (SImode, operands[1]);
4586    operands[2] = gen_reg_rtx (SImode);
4587  }"
4588)
4589
4590(define_insn "*arm_extendqisi"
4591  [(set (match_operand:SI 0 "s_register_operand" "=r")
4592	(sign_extend:SI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
4593  "TARGET_ARM && arm_arch4 && !arm_arch6"
4594  "ldr%(sb%)\\t%0, %1"
4595  [(set_attr "type" "load_byte")
4596   (set_attr "predicable" "yes")
4597   (set_attr "pool_range" "256")
4598   (set_attr "neg_pool_range" "244")]
4599)
4600
4601(define_insn "*arm_extendqisi_v6"
4602  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4603	(sign_extend:SI
4604	 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
4605  "TARGET_ARM && arm_arch6"
4606  "@
4607   sxtb%?\\t%0, %1
4608   ldr%(sb%)\\t%0, %1"
4609  [(set_attr "type" "alu_shift,load_byte")
4610   (set_attr "predicable" "yes")
4611   (set_attr "pool_range" "*,256")
4612   (set_attr "neg_pool_range" "*,244")]
4613)
4614
4615(define_insn "*arm_extendqisi2addsi"
4616  [(set (match_operand:SI 0 "s_register_operand" "=r")
4617	(plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4618		 (match_operand:SI 2 "s_register_operand" "r")))]
4619  "TARGET_INT_SIMD"
4620  "sxtab%?\\t%0, %2, %1"
4621  [(set_attr "type" "alu_shift")
4622   (set_attr "insn" "xtab")
4623   (set_attr "predicable" "yes")]
4624)
4625
4626(define_insn "*thumb1_extendqisi2"
4627  [(set (match_operand:SI 0 "register_operand" "=l,l")
4628	(sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4629  "TARGET_THUMB1 && !arm_arch6"
4630  "*
4631  {
4632    rtx ops[3];
4633    rtx mem = XEXP (operands[1], 0);
4634
4635    if (GET_CODE (mem) == CONST)
4636      mem = XEXP (mem, 0);
4637
4638    if (GET_CODE (mem) == LABEL_REF)
4639      return \"ldr\\t%0, %1\";
4640
4641    if (GET_CODE (mem) == PLUS
4642        && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4643      return \"ldr\\t%0, %1\";
4644
4645    if (which_alternative == 0)
4646      return \"ldrsb\\t%0, %1\";
4647
4648    ops[0] = operands[0];
4649
4650    if (GET_CODE (mem) == PLUS)
4651      {
4652        rtx a = XEXP (mem, 0);
4653	rtx b = XEXP (mem, 1);
4654
4655        ops[1] = a;
4656        ops[2] = b;
4657
4658        if (GET_CODE (a) == REG)
4659	  {
4660	    if (GET_CODE (b) == REG)
4661              output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4662            else if (REGNO (a) == REGNO (ops[0]))
4663	      {
4664                output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4665		output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4666		output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4667	      }
4668	    else
4669              output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4670	  }
4671	else
4672          {
4673	    gcc_assert (GET_CODE (b) == REG);
4674            if (REGNO (b) == REGNO (ops[0]))
4675	      {
4676                output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4677		output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4678		output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4679	      }
4680	    else
4681              output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4682          }
4683      }
4684    else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4685      {
4686        output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4687	output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4688	output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4689      }
4690    else
4691      {
4692        ops[1] = mem;
4693        ops[2] = const0_rtx;
4694
4695        output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4696      }
4697    return \"\";
4698  }"
4699  [(set_attr "length" "2,6")
4700   (set_attr "type" "load_byte,load_byte")
4701   (set_attr "pool_range" "32,32")]
4702)
4703
4704(define_insn "*thumb1_extendqisi2_v6"
4705  [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4706	(sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4707  "TARGET_THUMB1 && arm_arch6"
4708  "*
4709  {
4710    rtx ops[3];
4711    rtx mem;
4712
4713    if (which_alternative == 0)
4714      return \"sxtb\\t%0, %1\";
4715
4716    mem = XEXP (operands[1], 0);
4717
4718    if (GET_CODE (mem) == CONST)
4719      mem = XEXP (mem, 0);
4720
4721    if (GET_CODE (mem) == LABEL_REF)
4722      return \"ldr\\t%0, %1\";
4723
4724    if (GET_CODE (mem) == PLUS
4725        && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4726      return \"ldr\\t%0, %1\";
4727
4728    if (which_alternative == 0)
4729      return \"ldrsb\\t%0, %1\";
4730
4731    ops[0] = operands[0];
4732
4733    if (GET_CODE (mem) == PLUS)
4734      {
4735        rtx a = XEXP (mem, 0);
4736	rtx b = XEXP (mem, 1);
4737
4738        ops[1] = a;
4739        ops[2] = b;
4740
4741        if (GET_CODE (a) == REG)
4742	  {
4743	    if (GET_CODE (b) == REG)
4744              output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4745            else if (REGNO (a) == REGNO (ops[0]))
4746	      {
4747                output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4748		output_asm_insn (\"sxtb\\t%0, %0\", ops);
4749	      }
4750	    else
4751              output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4752	  }
4753	else
4754          {
4755	    gcc_assert (GET_CODE (b) == REG);
4756            if (REGNO (b) == REGNO (ops[0]))
4757	      {
4758                output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4759		output_asm_insn (\"sxtb\\t%0, %0\", ops);
4760	      }
4761	    else
4762              output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4763          }
4764      }
4765    else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4766      {
4767        output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4768	output_asm_insn (\"sxtb\\t%0, %0\", ops);
4769      }
4770    else
4771      {
4772        ops[1] = mem;
4773        ops[2] = const0_rtx;
4774
4775        output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4776      }
4777    return \"\";
4778  }"
4779  [(set_attr "length" "2,2,4")
4780   (set_attr "type" "alu_shift,load_byte,load_byte")
4781   (set_attr "pool_range" "*,32,32")]
4782)
4783
4784(define_expand "extendsfdf2"
4785  [(set (match_operand:DF                  0 "s_register_operand" "")
4786	(float_extend:DF (match_operand:SF 1 "s_register_operand"  "")))]
4787  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4788  ""
4789)
4790
4791/* HFmode -> DFmode conversions have to go through SFmode.  */
4792(define_expand "extendhfdf2"
4793  [(set (match_operand:DF                  0 "general_operand" "")
4794	(float_extend:DF (match_operand:HF 1 "general_operand"  "")))]
4795  "TARGET_EITHER"
4796  "
4797  {
4798    rtx op1;
4799    op1 = convert_to_mode (SFmode, operands[1], 0);
4800    op1 = convert_to_mode (DFmode, op1, 0);
4801    emit_insn (gen_movdf (operands[0], op1));
4802    DONE;
4803  }"
4804)
4805
4806;; Move insns (including loads and stores)
4807
4808;; XXX Just some ideas about movti.
4809;; I don't think these are a good idea on the arm, there just aren't enough
4810;; registers
4811;;(define_expand "loadti"
4812;;  [(set (match_operand:TI 0 "s_register_operand" "")
4813;;	(mem:TI (match_operand:SI 1 "address_operand" "")))]
4814;;  "" "")
4815
4816;;(define_expand "storeti"
4817;;  [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4818;;	(match_operand:TI 1 "s_register_operand" ""))]
4819;;  "" "")
4820
4821;;(define_expand "movti"
4822;;  [(set (match_operand:TI 0 "general_operand" "")
4823;;	(match_operand:TI 1 "general_operand" ""))]
4824;;  ""
4825;;  "
4826;;{
4827;;  rtx insn;
4828;;
4829;;  if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4830;;    operands[1] = copy_to_reg (operands[1]);
4831;;  if (GET_CODE (operands[0]) == MEM)
4832;;    insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4833;;  else if (GET_CODE (operands[1]) == MEM)
4834;;    insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4835;;  else
4836;;    FAIL;
4837;;
4838;;  emit_insn (insn);
4839;;  DONE;
4840;;}")
4841
4842;; Recognize garbage generated above.
4843
4844;;(define_insn ""
4845;;  [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4846;;	(match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4847;;  ""
4848;;  "*
4849;;  {
4850;;    register mem = (which_alternative < 3);
4851;;    register const char *template;
4852;;
4853;;    operands[mem] = XEXP (operands[mem], 0);
4854;;    switch (which_alternative)
4855;;      {
4856;;      case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4857;;      case 1: template = \"ldmia\\t%1!, %M0\"; break;
4858;;      case 2: template = \"ldmia\\t%1, %M0\"; break;
4859;;      case 3: template = \"stmdb\\t%0!, %M1\"; break;
4860;;      case 4: template = \"stmia\\t%0!, %M1\"; break;
4861;;      case 5: template = \"stmia\\t%0, %M1\"; break;
4862;;      }
4863;;    output_asm_insn (template, operands);
4864;;    return \"\";
4865;;  }")
4866
4867(define_expand "movdi"
4868  [(set (match_operand:DI 0 "general_operand" "")
4869	(match_operand:DI 1 "general_operand" ""))]
4870  "TARGET_EITHER"
4871  "
4872  if (can_create_pseudo_p ())
4873    {
4874      if (GET_CODE (operands[0]) != REG)
4875	operands[1] = force_reg (DImode, operands[1]);
4876    }
4877  "
4878)
4879
4880(define_insn "*arm_movdi"
4881  [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4882	(match_operand:DI 1 "di_operand"              "rDa,Db,Dc,mi,r"))]
4883  "TARGET_ARM
4884   && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4885   && !TARGET_IWMMXT
4886   && (   register_operand (operands[0], DImode)
4887       || register_operand (operands[1], DImode))"
4888  "*
4889  switch (which_alternative)
4890    {
4891    case 0:
4892    case 1:
4893    case 2:
4894      return \"#\";
4895    default:
4896      return output_move_double (operands);
4897    }
4898  "
4899  [(set_attr "length" "8,12,16,8,8")
4900   (set_attr "type" "*,*,*,load2,store2")
4901   (set_attr "pool_range" "*,*,*,1020,*")
4902   (set_attr "neg_pool_range" "*,*,*,1008,*")]
4903)
4904
4905(define_split
4906  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4907	(match_operand:ANY64 1 "const_double_operand" ""))]
4908  "TARGET_32BIT
4909   && reload_completed
4910   && (arm_const_double_inline_cost (operands[1])
4911       <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4912  [(const_int 0)]
4913  "
4914  arm_split_constant (SET, SImode, curr_insn,
4915		      INTVAL (gen_lowpart (SImode, operands[1])),
4916		      gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4917  arm_split_constant (SET, SImode, curr_insn,
4918		      INTVAL (gen_highpart_mode (SImode,
4919						 GET_MODE (operands[0]),
4920						 operands[1])),
4921		      gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4922  DONE;
4923  "
4924)
4925
4926; If optimizing for size, or if we have load delay slots, then
4927; we want to split the constant into two separate operations.
4928; In both cases this may split a trivial part into a single data op
4929; leaving a single complex constant to load.  We can also get longer
4930; offsets in a LDR which means we get better chances of sharing the pool
4931; entries.  Finally, we can normally do a better job of scheduling
4932; LDR instructions than we can with LDM.
4933; This pattern will only match if the one above did not.
4934(define_split
4935  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4936	(match_operand:ANY64 1 "const_double_operand" ""))]
4937  "TARGET_ARM && reload_completed
4938   && arm_const_double_by_parts (operands[1])"
4939  [(set (match_dup 0) (match_dup 1))
4940   (set (match_dup 2) (match_dup 3))]
4941  "
4942  operands[2] = gen_highpart (SImode, operands[0]);
4943  operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4944				   operands[1]);
4945  operands[0] = gen_lowpart (SImode, operands[0]);
4946  operands[1] = gen_lowpart (SImode, operands[1]);
4947  "
4948)
4949
4950(define_split
4951  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4952	(match_operand:ANY64 1 "arm_general_register_operand" ""))]
4953  "TARGET_EITHER && reload_completed"
4954  [(set (match_dup 0) (match_dup 1))
4955   (set (match_dup 2) (match_dup 3))]
4956  "
4957  operands[2] = gen_highpart (SImode, operands[0]);
4958  operands[3] = gen_highpart (SImode, operands[1]);
4959  operands[0] = gen_lowpart (SImode, operands[0]);
4960  operands[1] = gen_lowpart (SImode, operands[1]);
4961
4962  /* Handle a partial overlap.  */
4963  if (rtx_equal_p (operands[0], operands[3]))
4964    {
4965      rtx tmp0 = operands[0];
4966      rtx tmp1 = operands[1];
4967
4968      operands[0] = operands[2];
4969      operands[1] = operands[3];
4970      operands[2] = tmp0;
4971      operands[3] = tmp1;
4972    }
4973  "
4974)
4975
4976;; We can't actually do base+index doubleword loads if the index and
4977;; destination overlap.  Split here so that we at least have chance to
4978;; schedule.
4979(define_split
4980  [(set (match_operand:DI 0 "s_register_operand" "")
4981	(mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4982			 (match_operand:SI 2 "s_register_operand" ""))))]
4983  "TARGET_LDRD
4984  && reg_overlap_mentioned_p (operands[0], operands[1])
4985  && reg_overlap_mentioned_p (operands[0], operands[2])"
4986  [(set (match_dup 4)
4987	(plus:SI (match_dup 1)
4988		 (match_dup 2)))
4989   (set (match_dup 0)
4990	(mem:DI (match_dup 4)))]
4991  "
4992  operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4993  "
4994)
4995
4996;;; ??? This should have alternatives for constants.
4997;;; ??? This was originally identical to the movdf_insn pattern.
4998;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4999;;; thumb_reorg with a memory reference.
5000(define_insn "*thumb1_movdi_insn"
5001  [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
5002	(match_operand:DI 1 "general_operand"      "l, I,J,>,l,mi,l,*r"))]
5003  "TARGET_THUMB1
5004   && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
5005   && (   register_operand (operands[0], DImode)
5006       || register_operand (operands[1], DImode))"
5007  "*
5008  {
5009  switch (which_alternative)
5010    {
5011    default:
5012    case 0:
5013      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5014	return \"add\\t%0,  %1,  #0\;add\\t%H0, %H1, #0\";
5015      return   \"add\\t%H0, %H1, #0\;add\\t%0,  %1,  #0\";
5016    case 1:
5017      return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
5018    case 2:
5019      operands[1] = GEN_INT (- INTVAL (operands[1]));
5020      return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
5021    case 3:
5022      return \"ldmia\\t%1, {%0, %H0}\";
5023    case 4:
5024      return \"stmia\\t%0, {%1, %H1}\";
5025    case 5:
5026      return thumb_load_double_from_address (operands);
5027    case 6:
5028      operands[2] = gen_rtx_MEM (SImode,
5029			     plus_constant (XEXP (operands[0], 0), 4));
5030      output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5031      return \"\";
5032    case 7:
5033      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5034	return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5035      return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5036    }
5037  }"
5038  [(set_attr "length" "4,4,6,2,2,6,4,4")
5039   (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
5040   (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
5041)
5042
5043(define_expand "movsi"
5044  [(set (match_operand:SI 0 "general_operand" "")
5045        (match_operand:SI 1 "general_operand" ""))]
5046  "TARGET_EITHER"
5047  "
5048  {
5049  rtx base, offset, tmp;
5050
5051  if (TARGET_32BIT)
5052    {
5053      /* Everything except mem = const or mem = mem can be done easily.  */
5054      if (GET_CODE (operands[0]) == MEM)
5055        operands[1] = force_reg (SImode, operands[1]);
5056      if (arm_general_register_operand (operands[0], SImode)
5057	  && GET_CODE (operands[1]) == CONST_INT
5058          && !(const_ok_for_arm (INTVAL (operands[1]))
5059               || const_ok_for_arm (~INTVAL (operands[1]))))
5060        {
5061           arm_split_constant (SET, SImode, NULL_RTX,
5062	                       INTVAL (operands[1]), operands[0], NULL_RTX,
5063			       optimize && can_create_pseudo_p ());
5064          DONE;
5065        }
5066
5067      if (TARGET_USE_MOVT && !target_word_relocations
5068	  && GET_CODE (operands[1]) == SYMBOL_REF
5069	  && !flag_pic && !arm_tls_referenced_p (operands[1]))
5070	{
5071	  arm_emit_movpair (operands[0], operands[1]);
5072	  DONE;
5073	}
5074    }
5075  else /* TARGET_THUMB1...  */
5076    {
5077      if (can_create_pseudo_p ())
5078        {
5079          if (GET_CODE (operands[0]) != REG)
5080	    operands[1] = force_reg (SImode, operands[1]);
5081        }
5082    }
5083
5084  if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
5085    {
5086      split_const (operands[1], &base, &offset);
5087      if (GET_CODE (base) == SYMBOL_REF
5088	  && !offset_within_block_p (base, INTVAL (offset)))
5089	{
5090	  tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5091	  emit_move_insn (tmp, base);
5092	  emit_insn (gen_addsi3 (operands[0], tmp, offset));
5093	  DONE;
5094	}
5095    }
5096
5097  /* Recognize the case where operand[1] is a reference to thread-local
5098     data and load its address to a register.  */
5099  if (arm_tls_referenced_p (operands[1]))
5100    {
5101      rtx tmp = operands[1];
5102      rtx addend = NULL;
5103
5104      if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
5105        {
5106          addend = XEXP (XEXP (tmp, 0), 1);
5107          tmp = XEXP (XEXP (tmp, 0), 0);
5108        }
5109
5110      gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
5111      gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
5112
5113      tmp = legitimize_tls_address (tmp,
5114				    !can_create_pseudo_p () ? operands[0] : 0);
5115      if (addend)
5116        {
5117          tmp = gen_rtx_PLUS (SImode, tmp, addend);
5118          tmp = force_operand (tmp, operands[0]);
5119        }
5120      operands[1] = tmp;
5121    }
5122  else if (flag_pic
5123	   && (CONSTANT_P (operands[1])
5124	       || symbol_mentioned_p (operands[1])
5125	       || label_mentioned_p (operands[1])))
5126      operands[1] = legitimize_pic_address (operands[1], SImode,
5127					    (!can_create_pseudo_p ()
5128					     ? operands[0]
5129					     : 0));
5130  }
5131  "
5132)
5133
5134;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5135;; LO_SUM adds in the high bits.  Fortunately these are opaque operations
5136;; so this does not matter.
5137(define_insn "*arm_movt"
5138  [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
5139	(lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0")
5140		   (match_operand:SI 2 "general_operand"      "i")))]
5141  "TARGET_32BIT"
5142  "movt%?\t%0, #:upper16:%c2"
5143  [(set_attr "predicable" "yes")
5144   (set_attr "length" "4")]
5145)
5146
5147(define_insn "*arm_movsi_insn"
5148  [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5149	(match_operand:SI 1 "general_operand"      "rk, I,K,j,mi,rk"))]
5150  "TARGET_ARM && ! TARGET_IWMMXT
5151   && !(TARGET_HARD_FLOAT && TARGET_VFP)
5152   && (   register_operand (operands[0], SImode)
5153       || register_operand (operands[1], SImode))"
5154  "@
5155   mov%?\\t%0, %1
5156   mov%?\\t%0, %1
5157   mvn%?\\t%0, #%B1
5158   movw%?\\t%0, %1
5159   ldr%?\\t%0, %1
5160   str%?\\t%1, %0"
5161  [(set_attr "type" "*,*,*,*,load1,store1")
5162   (set_attr "predicable" "yes")
5163   (set_attr "pool_range" "*,*,*,*,4096,*")
5164   (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5165)
5166
5167(define_split
5168  [(set (match_operand:SI 0 "arm_general_register_operand" "")
5169	(match_operand:SI 1 "const_int_operand" ""))]
5170  "TARGET_32BIT
5171  && (!(const_ok_for_arm (INTVAL (operands[1]))
5172        || const_ok_for_arm (~INTVAL (operands[1]))))"
5173  [(clobber (const_int 0))]
5174  "
5175  arm_split_constant (SET, SImode, NULL_RTX,
5176                      INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5177  DONE;
5178  "
5179)
5180
5181(define_insn "*thumb1_movsi_insn"
5182  [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
5183	(match_operand:SI 1 "general_operand"      "l, I,J,K,>,l,mi,l,*lhk"))]
5184  "TARGET_THUMB1
5185   && (   register_operand (operands[0], SImode)
5186       || register_operand (operands[1], SImode))"
5187  "@
5188   mov	%0, %1
5189   mov	%0, %1
5190   #
5191   #
5192   ldmia\\t%1, {%0}
5193   stmia\\t%0, {%1}
5194   ldr\\t%0, %1
5195   str\\t%1, %0
5196   mov\\t%0, %1"
5197  [(set_attr "length" "2,2,4,4,2,2,2,2,2")
5198   (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
5199   (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
5200)
5201
5202(define_split
5203  [(set (match_operand:SI 0 "register_operand" "")
5204	(match_operand:SI 1 "const_int_operand" ""))]
5205  "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
5206  [(set (match_dup 0) (match_dup 1))
5207   (set (match_dup 0) (neg:SI (match_dup 0)))]
5208  "operands[1] = GEN_INT (- INTVAL (operands[1]));"
5209)
5210
5211(define_split
5212  [(set (match_operand:SI 0 "register_operand" "")
5213	(match_operand:SI 1 "const_int_operand" ""))]
5214  "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
5215  [(set (match_dup 0) (match_dup 1))
5216   (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
5217  "
5218  {
5219    unsigned HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffffffffu;
5220    unsigned HOST_WIDE_INT mask = 0xff;
5221    int i;
5222
5223    for (i = 0; i < 25; i++)
5224      if ((val & (mask << i)) == val)
5225        break;
5226
5227    /* Shouldn't happen, but we don't want to split if the shift is zero.  */
5228    if (i == 0)
5229      FAIL;
5230
5231    operands[1] = GEN_INT (val >> i);
5232    operands[2] = GEN_INT (i);
5233  }"
5234)
5235
5236;; When generating pic, we need to load the symbol offset into a register.
5237;; So that the optimizer does not confuse this with a normal symbol load
5238;; we use an unspec.  The offset will be loaded from a constant pool entry,
5239;; since that is the only type of relocation we can use.
5240
5241;; The rather odd constraints on the following are to force reload to leave
5242;; the insn alone, and to force the minipool generation pass to then move
5243;; the GOT symbol to memory.
5244
5245(define_insn "pic_load_addr_32bit"
5246  [(set (match_operand:SI 0 "s_register_operand" "=r")
5247	(unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5248  "TARGET_32BIT && flag_pic"
5249  "ldr%?\\t%0, %1"
5250  [(set_attr "type" "load1")
5251   (set_attr "pool_range" "4096")
5252   (set (attr "neg_pool_range")
5253	(if_then_else (eq_attr "is_thumb" "no")
5254		      (const_int 4084)
5255		      (const_int 0)))]
5256)
5257
5258(define_insn "pic_load_addr_thumb1"
5259  [(set (match_operand:SI 0 "s_register_operand" "=l")
5260	(unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
5261  "TARGET_THUMB1 && flag_pic"
5262  "ldr\\t%0, %1"
5263  [(set_attr "type" "load1")
5264   (set (attr "pool_range") (const_int 1024))]
5265)
5266
5267(define_insn "pic_add_dot_plus_four"
5268  [(set (match_operand:SI 0 "register_operand" "=r")
5269	(unspec:SI [(match_operand:SI 1 "register_operand" "0")
5270		    (const_int 4)
5271		    (match_operand 2 "" "")]
5272		   UNSPEC_PIC_BASE))]
5273  "TARGET_THUMB"
5274  "*
5275  (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5276				     INTVAL (operands[2]));
5277  return \"add\\t%0, %|pc\";
5278  "
5279  [(set_attr "length" "2")]
5280)
5281
5282(define_insn "pic_add_dot_plus_eight"
5283  [(set (match_operand:SI 0 "register_operand" "=r")
5284	(unspec:SI [(match_operand:SI 1 "register_operand" "r")
5285		    (const_int 8)
5286		    (match_operand 2 "" "")]
5287		   UNSPEC_PIC_BASE))]
5288  "TARGET_ARM"
5289  "*
5290    (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5291				       INTVAL (operands[2]));
5292    return \"add%?\\t%0, %|pc, %1\";
5293  "
5294  [(set_attr "predicable" "yes")]
5295)
5296
5297(define_insn "tls_load_dot_plus_eight"
5298  [(set (match_operand:SI 0 "register_operand" "=r")
5299	(mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
5300			    (const_int 8)
5301			    (match_operand 2 "" "")]
5302			   UNSPEC_PIC_BASE)))]
5303  "TARGET_ARM"
5304  "*
5305    (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5306				       INTVAL (operands[2]));
5307    return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5308  "
5309  [(set_attr "predicable" "yes")]
5310)
5311
5312;; PIC references to local variables can generate pic_add_dot_plus_eight
5313;; followed by a load.  These sequences can be crunched down to
5314;; tls_load_dot_plus_eight by a peephole.
5315
5316(define_peephole2
5317  [(set (match_operand:SI 0 "register_operand" "")
5318	(unspec:SI [(match_operand:SI 3 "register_operand" "")
5319		    (const_int 8)
5320		    (match_operand 1 "" "")]
5321		   UNSPEC_PIC_BASE))
5322   (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5323  "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5324  [(set (match_dup 2)
5325	(mem:SI (unspec:SI [(match_dup 3)
5326			    (const_int 8)
5327			    (match_dup 1)]
5328			   UNSPEC_PIC_BASE)))]
5329  ""
5330)
5331
5332(define_insn "pic_offset_arm"
5333  [(set (match_operand:SI 0 "register_operand" "=r")
5334	(mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5335			 (unspec:SI [(match_operand:SI 2 "" "X")]
5336				    UNSPEC_PIC_OFFSET))))]
5337  "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5338  "ldr%?\\t%0, [%1,%2]"
5339  [(set_attr "type" "load1")]
5340)
5341
5342(define_expand "builtin_setjmp_receiver"
5343  [(label_ref (match_operand 0 "" ""))]
5344  "flag_pic"
5345  "
5346{
5347  /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5348     register.  */
5349  if (arm_pic_register != INVALID_REGNUM)
5350    arm_load_pic_register (1UL << 3);
5351  DONE;
5352}")
5353
5354;; If copying one reg to another we can set the condition codes according to
5355;; its value.  Such a move is common after a return from subroutine and the
5356;; result is being tested against zero.
5357
5358(define_insn "*movsi_compare0"
5359  [(set (reg:CC CC_REGNUM)
5360	(compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5361		    (const_int 0)))
5362   (set (match_operand:SI 0 "s_register_operand" "=r,r")
5363	(match_dup 1))]
5364  "TARGET_32BIT"
5365  "@
5366   cmp%?\\t%0, #0
5367   sub%.\\t%0, %1, #0"
5368  [(set_attr "conds" "set")]
5369)
5370
5371;; Subroutine to store a half word from a register into memory.
5372;; Operand 0 is the source register (HImode)
5373;; Operand 1 is the destination address in a register (SImode)
5374
5375;; In both this routine and the next, we must be careful not to spill
5376;; a memory address of reg+large_const into a separate PLUS insn, since this
5377;; can generate unrecognizable rtl.
5378
5379(define_expand "storehi"
5380  [;; store the low byte
5381   (set (match_operand 1 "" "") (match_dup 3))
5382   ;; extract the high byte
5383   (set (match_dup 2)
5384	(ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5385   ;; store the high byte
5386   (set (match_dup 4) (match_dup 5))]
5387  "TARGET_ARM"
5388  "
5389  {
5390    rtx op1 = operands[1];
5391    rtx addr = XEXP (op1, 0);
5392    enum rtx_code code = GET_CODE (addr);
5393
5394    if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5395	|| code == MINUS)
5396      op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5397
5398    operands[4] = adjust_address (op1, QImode, 1);
5399    operands[1] = adjust_address (operands[1], QImode, 0);
5400    operands[3] = gen_lowpart (QImode, operands[0]);
5401    operands[0] = gen_lowpart (SImode, operands[0]);
5402    operands[2] = gen_reg_rtx (SImode);
5403    operands[5] = gen_lowpart (QImode, operands[2]);
5404  }"
5405)
5406
5407(define_expand "storehi_bigend"
5408  [(set (match_dup 4) (match_dup 3))
5409   (set (match_dup 2)
5410	(ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5411   (set (match_operand 1 "" "")	(match_dup 5))]
5412  "TARGET_ARM"
5413  "
5414  {
5415    rtx op1 = operands[1];
5416    rtx addr = XEXP (op1, 0);
5417    enum rtx_code code = GET_CODE (addr);
5418
5419    if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5420	|| code == MINUS)
5421      op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5422
5423    operands[4] = adjust_address (op1, QImode, 1);
5424    operands[1] = adjust_address (operands[1], QImode, 0);
5425    operands[3] = gen_lowpart (QImode, operands[0]);
5426    operands[0] = gen_lowpart (SImode, operands[0]);
5427    operands[2] = gen_reg_rtx (SImode);
5428    operands[5] = gen_lowpart (QImode, operands[2]);
5429  }"
5430)
5431
5432;; Subroutine to store a half word integer constant into memory.
5433(define_expand "storeinthi"
5434  [(set (match_operand 0 "" "")
5435	(match_operand 1 "" ""))
5436   (set (match_dup 3) (match_dup 2))]
5437  "TARGET_ARM"
5438  "
5439  {
5440    HOST_WIDE_INT value = INTVAL (operands[1]);
5441    rtx addr = XEXP (operands[0], 0);
5442    rtx op0 = operands[0];
5443    enum rtx_code code = GET_CODE (addr);
5444
5445    if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5446	|| code == MINUS)
5447      op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5448
5449    operands[1] = gen_reg_rtx (SImode);
5450    if (BYTES_BIG_ENDIAN)
5451      {
5452	emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5453	if ((value & 255) == ((value >> 8) & 255))
5454	  operands[2] = operands[1];
5455	else
5456	  {
5457	    operands[2] = gen_reg_rtx (SImode);
5458	    emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5459	  }
5460      }
5461    else
5462      {
5463	emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5464	if ((value & 255) == ((value >> 8) & 255))
5465	  operands[2] = operands[1];
5466	else
5467	  {
5468	    operands[2] = gen_reg_rtx (SImode);
5469	    emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5470	  }
5471      }
5472
5473    operands[3] = adjust_address (op0, QImode, 1);
5474    operands[0] = adjust_address (operands[0], QImode, 0);
5475    operands[2] = gen_lowpart (QImode, operands[2]);
5476    operands[1] = gen_lowpart (QImode, operands[1]);
5477  }"
5478)
5479
5480(define_expand "storehi_single_op"
5481  [(set (match_operand:HI 0 "memory_operand" "")
5482	(match_operand:HI 1 "general_operand" ""))]
5483  "TARGET_32BIT && arm_arch4"
5484  "
5485  if (!s_register_operand (operands[1], HImode))
5486    operands[1] = copy_to_mode_reg (HImode, operands[1]);
5487  "
5488)
5489
5490(define_expand "movhi"
5491  [(set (match_operand:HI 0 "general_operand" "")
5492	(match_operand:HI 1 "general_operand" ""))]
5493  "TARGET_EITHER"
5494  "
5495  if (TARGET_ARM)
5496    {
5497      if (can_create_pseudo_p ())
5498        {
5499          if (GET_CODE (operands[0]) == MEM)
5500	    {
5501	      if (arm_arch4)
5502	        {
5503	          emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5504	          DONE;
5505	        }
5506	      if (GET_CODE (operands[1]) == CONST_INT)
5507	        emit_insn (gen_storeinthi (operands[0], operands[1]));
5508	      else
5509	        {
5510	          if (GET_CODE (operands[1]) == MEM)
5511		    operands[1] = force_reg (HImode, operands[1]);
5512	          if (BYTES_BIG_ENDIAN)
5513		    emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5514	          else
5515		   emit_insn (gen_storehi (operands[1], operands[0]));
5516	        }
5517	      DONE;
5518	    }
5519          /* Sign extend a constant, and keep it in an SImode reg.  */
5520          else if (GET_CODE (operands[1]) == CONST_INT)
5521	    {
5522	      rtx reg = gen_reg_rtx (SImode);
5523	      HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5524
5525	      /* If the constant is already valid, leave it alone.  */
5526	      if (!const_ok_for_arm (val))
5527	        {
5528	          /* If setting all the top bits will make the constant
5529		     loadable in a single instruction, then set them.
5530		     Otherwise, sign extend the number.  */
5531
5532	          if (const_ok_for_arm (~(val | ~0xffff)))
5533		    val |= ~0xffff;
5534	          else if (val & 0x8000)
5535		    val |= ~0xffff;
5536	        }
5537
5538	      emit_insn (gen_movsi (reg, GEN_INT (val)));
5539	      operands[1] = gen_lowpart (HImode, reg);
5540	    }
5541	  else if (arm_arch4 && optimize && can_create_pseudo_p ()
5542		   && GET_CODE (operands[1]) == MEM)
5543	    {
5544	      rtx reg = gen_reg_rtx (SImode);
5545
5546	      emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5547	      operands[1] = gen_lowpart (HImode, reg);
5548	    }
5549          else if (!arm_arch4)
5550	    {
5551	      if (GET_CODE (operands[1]) == MEM)
5552	        {
5553		  rtx base;
5554		  rtx offset = const0_rtx;
5555		  rtx reg = gen_reg_rtx (SImode);
5556
5557		  if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5558		       || (GET_CODE (base) == PLUS
5559			   && (GET_CODE (offset = XEXP (base, 1))
5560			       == CONST_INT)
5561                           && ((INTVAL(offset) & 1) != 1)
5562			   && GET_CODE (base = XEXP (base, 0)) == REG))
5563		      && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5564		    {
5565		      rtx new_rtx;
5566
5567		      new_rtx = widen_memory_access (operands[1], SImode,
5568						     ((INTVAL (offset) & ~3)
5569						      - INTVAL (offset)));
5570		      emit_insn (gen_movsi (reg, new_rtx));
5571		      if (((INTVAL (offset) & 2) != 0)
5572			  ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5573			{
5574			  rtx reg2 = gen_reg_rtx (SImode);
5575
5576			  emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5577			  reg = reg2;
5578			}
5579		    }
5580		  else
5581		    emit_insn (gen_movhi_bytes (reg, operands[1]));
5582
5583		  operands[1] = gen_lowpart (HImode, reg);
5584	       }
5585	   }
5586        }
5587      /* Handle loading a large integer during reload.  */
5588      else if (GET_CODE (operands[1]) == CONST_INT
5589	       && !const_ok_for_arm (INTVAL (operands[1]))
5590	       && !const_ok_for_arm (~INTVAL (operands[1])))
5591        {
5592          /* Writing a constant to memory needs a scratch, which should
5593	     be handled with SECONDARY_RELOADs.  */
5594          gcc_assert (GET_CODE (operands[0]) == REG);
5595
5596          operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5597          emit_insn (gen_movsi (operands[0], operands[1]));
5598          DONE;
5599       }
5600    }
5601  else if (TARGET_THUMB2)
5602    {
5603      /* Thumb-2 can do everything except mem=mem and mem=const easily.  */
5604      if (can_create_pseudo_p ())
5605	{
5606	  if (GET_CODE (operands[0]) != REG)
5607	    operands[1] = force_reg (HImode, operands[1]);
5608          /* Zero extend a constant, and keep it in an SImode reg.  */
5609          else if (GET_CODE (operands[1]) == CONST_INT)
5610	    {
5611	      rtx reg = gen_reg_rtx (SImode);
5612	      HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5613
5614	      emit_insn (gen_movsi (reg, GEN_INT (val)));
5615	      operands[1] = gen_lowpart (HImode, reg);
5616	    }
5617	}
5618    }
5619  else /* TARGET_THUMB1 */
5620    {
5621      if (can_create_pseudo_p ())
5622        {
5623	  if (GET_CODE (operands[1]) == CONST_INT)
5624	    {
5625	      rtx reg = gen_reg_rtx (SImode);
5626
5627	      emit_insn (gen_movsi (reg, operands[1]));
5628	      operands[1] = gen_lowpart (HImode, reg);
5629	    }
5630
5631          /* ??? We shouldn't really get invalid addresses here, but this can
5632	     happen if we are passed a SP (never OK for HImode/QImode) or
5633	     virtual register (also rejected as illegitimate for HImode/QImode)
5634	     relative address.  */
5635          /* ??? This should perhaps be fixed elsewhere, for instance, in
5636	     fixup_stack_1, by checking for other kinds of invalid addresses,
5637	     e.g. a bare reference to a virtual register.  This may confuse the
5638	     alpha though, which must handle this case differently.  */
5639          if (GET_CODE (operands[0]) == MEM
5640	      && !memory_address_p (GET_MODE (operands[0]),
5641				    XEXP (operands[0], 0)))
5642	    operands[0]
5643	      = replace_equiv_address (operands[0],
5644				       copy_to_reg (XEXP (operands[0], 0)));
5645
5646          if (GET_CODE (operands[1]) == MEM
5647	      && !memory_address_p (GET_MODE (operands[1]),
5648				    XEXP (operands[1], 0)))
5649	    operands[1]
5650	      = replace_equiv_address (operands[1],
5651				       copy_to_reg (XEXP (operands[1], 0)));
5652
5653	  if (GET_CODE (operands[1]) == MEM && optimize > 0)
5654	    {
5655	      rtx reg = gen_reg_rtx (SImode);
5656
5657	      emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5658	      operands[1] = gen_lowpart (HImode, reg);
5659	    }
5660
5661          if (GET_CODE (operands[0]) == MEM)
5662	    operands[1] = force_reg (HImode, operands[1]);
5663        }
5664      else if (GET_CODE (operands[1]) == CONST_INT
5665	        && !satisfies_constraint_I (operands[1]))
5666        {
5667	  /* Handle loading a large integer during reload.  */
5668
5669          /* Writing a constant to memory needs a scratch, which should
5670	     be handled with SECONDARY_RELOADs.  */
5671          gcc_assert (GET_CODE (operands[0]) == REG);
5672
5673          operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5674          emit_insn (gen_movsi (operands[0], operands[1]));
5675          DONE;
5676        }
5677    }
5678  "
5679)
5680
5681(define_insn "*thumb1_movhi_insn"
5682  [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5683	(match_operand:HI 1 "general_operand"       "l,m,l,*h,*r,I"))]
5684  "TARGET_THUMB1
5685   && (   register_operand (operands[0], HImode)
5686       || register_operand (operands[1], HImode))"
5687  "*
5688  switch (which_alternative)
5689    {
5690    case 0: return \"add	%0, %1, #0\";
5691    case 2: return \"strh	%1, %0\";
5692    case 3: return \"mov	%0, %1\";
5693    case 4: return \"mov	%0, %1\";
5694    case 5: return \"mov	%0, %1\";
5695    default: gcc_unreachable ();
5696    case 1:
5697      /* The stack pointer can end up being taken as an index register.
5698          Catch this case here and deal with it.  */
5699      if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5700	  && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5701	  && REGNO    (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5702        {
5703	  rtx ops[2];
5704          ops[0] = operands[0];
5705          ops[1] = XEXP (XEXP (operands[1], 0), 0);
5706
5707          output_asm_insn (\"mov	%0, %1\", ops);
5708
5709          XEXP (XEXP (operands[1], 0), 0) = operands[0];
5710
5711	}
5712      return \"ldrh	%0, %1\";
5713    }"
5714  [(set_attr "length" "2,4,2,2,2,2")
5715   (set_attr "type" "*,load1,store1,*,*,*")]
5716)
5717
5718
5719(define_expand "movhi_bytes"
5720  [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5721   (set (match_dup 3)
5722	(zero_extend:SI (match_dup 6)))
5723   (set (match_operand:SI 0 "" "")
5724	 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5725  "TARGET_ARM"
5726  "
5727  {
5728    rtx mem1, mem2;
5729    rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5730
5731    mem1 = change_address (operands[1], QImode, addr);
5732    mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5733    operands[0] = gen_lowpart (SImode, operands[0]);
5734    operands[1] = mem1;
5735    operands[2] = gen_reg_rtx (SImode);
5736    operands[3] = gen_reg_rtx (SImode);
5737    operands[6] = mem2;
5738
5739    if (BYTES_BIG_ENDIAN)
5740      {
5741	operands[4] = operands[2];
5742	operands[5] = operands[3];
5743      }
5744    else
5745      {
5746	operands[4] = operands[3];
5747	operands[5] = operands[2];
5748      }
5749  }"
5750)
5751
5752(define_expand "movhi_bigend"
5753  [(set (match_dup 2)
5754	(rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5755		   (const_int 16)))
5756   (set (match_dup 3)
5757	(ashiftrt:SI (match_dup 2) (const_int 16)))
5758   (set (match_operand:HI 0 "s_register_operand" "")
5759	(match_dup 4))]
5760  "TARGET_ARM"
5761  "
5762  operands[2] = gen_reg_rtx (SImode);
5763  operands[3] = gen_reg_rtx (SImode);
5764  operands[4] = gen_lowpart (HImode, operands[3]);
5765  "
5766)
5767
5768;; Pattern to recognize insn generated default case above
5769(define_insn "*movhi_insn_arch4"
5770  [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5771	(match_operand:HI 1 "general_operand"      "rI,K,r,m"))]
5772  "TARGET_ARM
5773   && arm_arch4
5774   && (GET_CODE (operands[1]) != CONST_INT
5775       || const_ok_for_arm (INTVAL (operands[1]))
5776       || const_ok_for_arm (~INTVAL (operands[1])))"
5777  "@
5778   mov%?\\t%0, %1\\t%@ movhi
5779   mvn%?\\t%0, #%B1\\t%@ movhi
5780   str%(h%)\\t%1, %0\\t%@ movhi
5781   ldr%(h%)\\t%0, %1\\t%@ movhi"
5782  [(set_attr "type" "*,*,store1,load1")
5783   (set_attr "predicable" "yes")
5784   (set_attr "pool_range" "*,*,*,256")
5785   (set_attr "neg_pool_range" "*,*,*,244")]
5786)
5787
5788(define_insn "*movhi_bytes"
5789  [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5790	(match_operand:HI 1 "arm_rhs_operand"  "rI,K"))]
5791  "TARGET_ARM"
5792  "@
5793   mov%?\\t%0, %1\\t%@ movhi
5794   mvn%?\\t%0, #%B1\\t%@ movhi"
5795  [(set_attr "predicable" "yes")]
5796)
5797
5798(define_expand "thumb_movhi_clobber"
5799  [(set (match_operand:HI     0 "memory_operand"   "")
5800	(match_operand:HI     1 "register_operand" ""))
5801   (clobber (match_operand:DI 2 "register_operand" ""))]
5802  "TARGET_THUMB1"
5803  "
5804  if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5805      && REGNO (operands[1]) <= LAST_LO_REGNUM)
5806    {
5807      emit_insn (gen_movhi (operands[0], operands[1]));
5808      DONE;
5809    }
5810  /* XXX Fixme, need to handle other cases here as well.  */
5811  gcc_unreachable ();
5812  "
5813)
5814
5815;; We use a DImode scratch because we may occasionally need an additional
5816;; temporary if the address isn't offsettable -- push_reload doesn't seem
5817;; to take any notice of the "o" constraints on reload_memory_operand operand.
5818(define_expand "reload_outhi"
5819  [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5820	      (match_operand:HI 1 "s_register_operand"        "r")
5821	      (match_operand:DI 2 "s_register_operand"        "=&l")])]
5822  "TARGET_EITHER"
5823  "if (TARGET_ARM)
5824     arm_reload_out_hi (operands);
5825   else
5826     thumb_reload_out_hi (operands);
5827  DONE;
5828  "
5829)
5830
5831(define_expand "reload_inhi"
5832  [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5833	      (match_operand:HI 1 "arm_reload_memory_operand" "o")
5834	      (match_operand:DI 2 "s_register_operand" "=&r")])]
5835  "TARGET_EITHER"
5836  "
5837  if (TARGET_ARM)
5838    arm_reload_in_hi (operands);
5839  else
5840    thumb_reload_out_hi (operands);
5841  DONE;
5842")
5843
5844(define_expand "movqi"
5845  [(set (match_operand:QI 0 "general_operand" "")
5846        (match_operand:QI 1 "general_operand" ""))]
5847  "TARGET_EITHER"
5848  "
5849  /* Everything except mem = const or mem = mem can be done easily */
5850
5851  if (can_create_pseudo_p ())
5852    {
5853      if (GET_CODE (operands[1]) == CONST_INT)
5854	{
5855	  rtx reg = gen_reg_rtx (SImode);
5856
5857	  /* For thumb we want an unsigned immediate, then we are more likely
5858	     to be able to use a movs insn.  */
5859	  if (TARGET_THUMB)
5860	    operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
5861
5862	  emit_insn (gen_movsi (reg, operands[1]));
5863	  operands[1] = gen_lowpart (QImode, reg);
5864	}
5865
5866      if (TARGET_THUMB)
5867	{
5868          /* ??? We shouldn't really get invalid addresses here, but this can
5869	     happen if we are passed a SP (never OK for HImode/QImode) or
5870	     virtual register (also rejected as illegitimate for HImode/QImode)
5871	     relative address.  */
5872          /* ??? This should perhaps be fixed elsewhere, for instance, in
5873	     fixup_stack_1, by checking for other kinds of invalid addresses,
5874	     e.g. a bare reference to a virtual register.  This may confuse the
5875	     alpha though, which must handle this case differently.  */
5876          if (GET_CODE (operands[0]) == MEM
5877	      && !memory_address_p (GET_MODE (operands[0]),
5878		  		     XEXP (operands[0], 0)))
5879	    operands[0]
5880	      = replace_equiv_address (operands[0],
5881				       copy_to_reg (XEXP (operands[0], 0)));
5882          if (GET_CODE (operands[1]) == MEM
5883	      && !memory_address_p (GET_MODE (operands[1]),
5884				    XEXP (operands[1], 0)))
5885	     operands[1]
5886	       = replace_equiv_address (operands[1],
5887					copy_to_reg (XEXP (operands[1], 0)));
5888	}
5889
5890      if (GET_CODE (operands[1]) == MEM && optimize > 0)
5891	{
5892	  rtx reg = gen_reg_rtx (SImode);
5893
5894	  emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5895	  operands[1] = gen_lowpart (QImode, reg);
5896	}
5897
5898      if (GET_CODE (operands[0]) == MEM)
5899	operands[1] = force_reg (QImode, operands[1]);
5900    }
5901  else if (TARGET_THUMB
5902	   && GET_CODE (operands[1]) == CONST_INT
5903	   && !satisfies_constraint_I (operands[1]))
5904    {
5905      /* Handle loading a large integer during reload.  */
5906
5907      /* Writing a constant to memory needs a scratch, which should
5908	 be handled with SECONDARY_RELOADs.  */
5909      gcc_assert (GET_CODE (operands[0]) == REG);
5910
5911      operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5912      emit_insn (gen_movsi (operands[0], operands[1]));
5913      DONE;
5914    }
5915  "
5916)
5917
5918
5919(define_insn "*arm_movqi_insn"
5920  [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5921	(match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5922  "TARGET_32BIT
5923   && (   register_operand (operands[0], QImode)
5924       || register_operand (operands[1], QImode))"
5925  "@
5926   mov%?\\t%0, %1
5927   mvn%?\\t%0, #%B1
5928   ldr%(b%)\\t%0, %1
5929   str%(b%)\\t%1, %0"
5930  [(set_attr "type" "*,*,load1,store1")
5931   (set_attr "predicable" "yes")]
5932)
5933
5934(define_insn "*thumb1_movqi_insn"
5935  [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5936	(match_operand:QI 1 "general_operand"      "l, m,l,*h,*r,I"))]
5937  "TARGET_THUMB1
5938   && (   register_operand (operands[0], QImode)
5939       || register_operand (operands[1], QImode))"
5940  "@
5941   add\\t%0, %1, #0
5942   ldrb\\t%0, %1
5943   strb\\t%1, %0
5944   mov\\t%0, %1
5945   mov\\t%0, %1
5946   mov\\t%0, %1"
5947  [(set_attr "length" "2")
5948   (set_attr "type" "*,load1,store1,*,*,*")
5949   (set_attr "pool_range" "*,32,*,*,*,*")]
5950)
5951
5952;; HFmode moves
5953(define_expand "movhf"
5954  [(set (match_operand:HF 0 "general_operand" "")
5955	(match_operand:HF 1 "general_operand" ""))]
5956  "TARGET_EITHER"
5957  "
5958  if (TARGET_32BIT)
5959    {
5960      if (GET_CODE (operands[0]) == MEM)
5961        operands[1] = force_reg (HFmode, operands[1]);
5962    }
5963  else /* TARGET_THUMB1 */
5964    {
5965      if (can_create_pseudo_p ())
5966        {
5967           if (GET_CODE (operands[0]) != REG)
5968	     operands[1] = force_reg (HFmode, operands[1]);
5969        }
5970    }
5971  "
5972)
5973
5974(define_insn "*arm32_movhf"
5975  [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
5976	(match_operand:HF 1 "general_operand"	   " m,r,r,F"))]
5977  "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_FP16)
5978   && (	  s_register_operand (operands[0], HFmode)
5979       || s_register_operand (operands[1], HFmode))"
5980  "*
5981  switch (which_alternative)
5982    {
5983    case 0:	/* ARM register from memory */
5984      return \"ldr%(h%)\\t%0, %1\\t%@ __fp16\";
5985    case 1:	/* memory from ARM register */
5986      return \"str%(h%)\\t%1, %0\\t%@ __fp16\";
5987    case 2:	/* ARM register from ARM register */
5988      return \"mov%?\\t%0, %1\\t%@ __fp16\";
5989    case 3:	/* ARM register from constant */
5990      {
5991	REAL_VALUE_TYPE r;
5992	long bits;
5993	rtx ops[4];
5994
5995	REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
5996	bits = real_to_target (NULL, &r, HFmode);
5997	ops[0] = operands[0];
5998	ops[1] = GEN_INT (bits);
5999	ops[2] = GEN_INT (bits & 0xff00);
6000	ops[3] = GEN_INT (bits & 0x00ff);
6001
6002	if (arm_arch_thumb2)
6003	  output_asm_insn (\"movw%?\\t%0, %1\", ops);
6004	else
6005	  output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6006	return \"\";
6007       }
6008    default:
6009      gcc_unreachable ();
6010    }
6011  "
6012  [(set_attr "conds" "unconditional")
6013   (set_attr "type" "load1,store1,*,*")
6014   (set_attr "length" "4,4,4,8")
6015   (set_attr "predicable" "yes")
6016   ]
6017)
6018
6019(define_insn "*thumb1_movhf"
6020  [(set (match_operand:HF     0 "nonimmediate_operand" "=l,l,m,*r,*h")
6021	(match_operand:HF     1 "general_operand"      "l,mF,l,*h,*r"))]
6022  "TARGET_THUMB1
6023   && (	  s_register_operand (operands[0], HFmode)
6024       || s_register_operand (operands[1], HFmode))"
6025  "*
6026  switch (which_alternative)
6027    {
6028    case 1:
6029      {
6030	rtx addr;
6031	gcc_assert (GET_CODE(operands[1]) == MEM);
6032	addr = XEXP (operands[1], 0);
6033	if (GET_CODE (addr) == LABEL_REF
6034	    || (GET_CODE (addr) == CONST
6035		&& GET_CODE (XEXP (addr, 0)) == PLUS
6036		&& GET_CODE (XEXP (XEXP (addr, 0), 0)) == LABEL_REF
6037		&& GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT))
6038	  {
6039	    /* Constant pool entry.  */
6040	    return \"ldr\\t%0, %1\";
6041	  }
6042	return \"ldrh\\t%0, %1\";
6043      }
6044    case 2: return \"strh\\t%1, %0\";
6045    default: return \"mov\\t%0, %1\";
6046    }
6047  "
6048  [(set_attr "length" "2")
6049   (set_attr "type" "*,load1,store1,*,*")
6050   (set_attr "pool_range" "*,1020,*,*,*")]
6051)
6052
6053(define_expand "movsf"
6054  [(set (match_operand:SF 0 "general_operand" "")
6055	(match_operand:SF 1 "general_operand" ""))]
6056  "TARGET_EITHER"
6057  "
6058  if (TARGET_32BIT)
6059    {
6060      if (GET_CODE (operands[0]) == MEM)
6061        operands[1] = force_reg (SFmode, operands[1]);
6062    }
6063  else /* TARGET_THUMB1 */
6064    {
6065      if (can_create_pseudo_p ())
6066        {
6067           if (GET_CODE (operands[0]) != REG)
6068	     operands[1] = force_reg (SFmode, operands[1]);
6069        }
6070    }
6071  "
6072)
6073
6074;; Transform a floating-point move of a constant into a core register into
6075;; an SImode operation.
6076(define_split
6077  [(set (match_operand:SF 0 "arm_general_register_operand" "")
6078	(match_operand:SF 1 "immediate_operand" ""))]
6079  "TARGET_EITHER
6080   && reload_completed
6081   && GET_CODE (operands[1]) == CONST_DOUBLE"
6082  [(set (match_dup 2) (match_dup 3))]
6083  "
6084  operands[2] = gen_lowpart (SImode, operands[0]);
6085  operands[3] = gen_lowpart (SImode, operands[1]);
6086  if (operands[2] == 0 || operands[3] == 0)
6087    FAIL;
6088  "
6089)
6090
6091(define_insn "*arm_movsf_soft_insn"
6092  [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6093	(match_operand:SF 1 "general_operand"  "r,mE,r"))]
6094  "TARGET_ARM
6095   && TARGET_SOFT_FLOAT
6096   && (GET_CODE (operands[0]) != MEM
6097       || register_operand (operands[1], SFmode))"
6098  "@
6099   mov%?\\t%0, %1
6100   ldr%?\\t%0, %1\\t%@ float
6101   str%?\\t%1, %0\\t%@ float"
6102  [(set_attr "length" "4,4,4")
6103   (set_attr "predicable" "yes")
6104   (set_attr "type" "*,load1,store1")
6105   (set_attr "pool_range" "*,4096,*")
6106   (set_attr "neg_pool_range" "*,4084,*")]
6107)
6108
6109;;; ??? This should have alternatives for constants.
6110(define_insn "*thumb1_movsf_insn"
6111  [(set (match_operand:SF     0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
6112	(match_operand:SF     1 "general_operand"      "l, >,l,mF,l,*h,*r"))]
6113  "TARGET_THUMB1
6114   && (   register_operand (operands[0], SFmode)
6115       || register_operand (operands[1], SFmode))"
6116  "@
6117   add\\t%0, %1, #0
6118   ldmia\\t%1, {%0}
6119   stmia\\t%0, {%1}
6120   ldr\\t%0, %1
6121   str\\t%1, %0
6122   mov\\t%0, %1
6123   mov\\t%0, %1"
6124  [(set_attr "length" "2")
6125   (set_attr "type" "*,load1,store1,load1,store1,*,*")
6126   (set_attr "pool_range" "*,*,*,1020,*,*,*")]
6127)
6128
6129(define_expand "movdf"
6130  [(set (match_operand:DF 0 "general_operand" "")
6131	(match_operand:DF 1 "general_operand" ""))]
6132  "TARGET_EITHER"
6133  "
6134  if (TARGET_32BIT)
6135    {
6136      if (GET_CODE (operands[0]) == MEM)
6137        operands[1] = force_reg (DFmode, operands[1]);
6138    }
6139  else /* TARGET_THUMB */
6140    {
6141      if (can_create_pseudo_p ())
6142        {
6143          if (GET_CODE (operands[0]) != REG)
6144	    operands[1] = force_reg (DFmode, operands[1]);
6145        }
6146    }
6147  "
6148)
6149
6150;; Reloading a df mode value stored in integer regs to memory can require a
6151;; scratch reg.
6152(define_expand "reload_outdf"
6153  [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6154   (match_operand:DF 1 "s_register_operand" "r")
6155   (match_operand:SI 2 "s_register_operand" "=&r")]
6156  "TARGET_32BIT"
6157  "
6158  {
6159    enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6160
6161    if (code == REG)
6162      operands[2] = XEXP (operands[0], 0);
6163    else if (code == POST_INC || code == PRE_DEC)
6164      {
6165	operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6166	operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6167	emit_insn (gen_movdi (operands[0], operands[1]));
6168	DONE;
6169      }
6170    else if (code == PRE_INC)
6171      {
6172	rtx reg = XEXP (XEXP (operands[0], 0), 0);
6173
6174	emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6175	operands[2] = reg;
6176      }
6177    else if (code == POST_DEC)
6178      operands[2] = XEXP (XEXP (operands[0], 0), 0);
6179    else
6180      emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6181			     XEXP (XEXP (operands[0], 0), 1)));
6182
6183    emit_insn (gen_rtx_SET (VOIDmode,
6184			    replace_equiv_address (operands[0], operands[2]),
6185			    operands[1]));
6186
6187    if (code == POST_DEC)
6188      emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6189
6190    DONE;
6191  }"
6192)
6193
6194(define_insn "*movdf_soft_insn"
6195  [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6196	(match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6197  "TARGET_ARM && TARGET_SOFT_FLOAT
6198   && (   register_operand (operands[0], DFmode)
6199       || register_operand (operands[1], DFmode))"
6200  "*
6201  switch (which_alternative)
6202    {
6203    case 0:
6204    case 1:
6205    case 2:
6206      return \"#\";
6207    default:
6208      return output_move_double (operands);
6209    }
6210  "
6211  [(set_attr "length" "8,12,16,8,8")
6212   (set_attr "type" "*,*,*,load2,store2")
6213   (set_attr "pool_range" "1020")
6214   (set_attr "neg_pool_range" "1008")]
6215)
6216
6217;;; ??? This should have alternatives for constants.
6218;;; ??? This was originally identical to the movdi_insn pattern.
6219;;; ??? The 'F' constraint looks funny, but it should always be replaced by
6220;;; thumb_reorg with a memory reference.
6221(define_insn "*thumb_movdf_insn"
6222  [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
6223	(match_operand:DF 1 "general_operand"      "l, >,l,mF,l,*r"))]
6224  "TARGET_THUMB1
6225   && (   register_operand (operands[0], DFmode)
6226       || register_operand (operands[1], DFmode))"
6227  "*
6228  switch (which_alternative)
6229    {
6230    default:
6231    case 0:
6232      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6233	return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
6234      return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
6235    case 1:
6236      return \"ldmia\\t%1, {%0, %H0}\";
6237    case 2:
6238      return \"stmia\\t%0, {%1, %H1}\";
6239    case 3:
6240      return thumb_load_double_from_address (operands);
6241    case 4:
6242      operands[2] = gen_rtx_MEM (SImode,
6243				 plus_constant (XEXP (operands[0], 0), 4));
6244      output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
6245      return \"\";
6246    case 5:
6247      if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
6248	return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
6249      return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
6250    }
6251  "
6252  [(set_attr "length" "4,2,2,6,4,4")
6253   (set_attr "type" "*,load2,store2,load2,store2,*")
6254   (set_attr "pool_range" "*,*,*,1020,*,*")]
6255)
6256
6257(define_expand "movxf"
6258  [(set (match_operand:XF 0 "general_operand" "")
6259	(match_operand:XF 1 "general_operand" ""))]
6260  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
6261  "
6262  if (GET_CODE (operands[0]) == MEM)
6263    operands[1] = force_reg (XFmode, operands[1]);
6264  "
6265)
6266
6267
6268
6269;; load- and store-multiple insns
6270;; The arm can load/store any set of registers, provided that they are in
6271;; ascending order; but that is beyond GCC so stick with what it knows.
6272
6273(define_expand "load_multiple"
6274  [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6275                          (match_operand:SI 1 "" ""))
6276                     (use (match_operand:SI 2 "" ""))])]
6277  "TARGET_32BIT"
6278{
6279  HOST_WIDE_INT offset = 0;
6280
6281  /* Support only fixed point registers.  */
6282  if (GET_CODE (operands[2]) != CONST_INT
6283      || INTVAL (operands[2]) > 14
6284      || INTVAL (operands[2]) < 2
6285      || GET_CODE (operands[1]) != MEM
6286      || GET_CODE (operands[0]) != REG
6287      || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
6288      || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6289    FAIL;
6290
6291  operands[3]
6292    = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
6293			     force_reg (SImode, XEXP (operands[1], 0)),
6294			     TRUE, FALSE, operands[1], &offset);
6295})
6296
6297;; Load multiple with write-back
6298
6299(define_insn "*ldmsi_postinc4"
6300  [(match_parallel 0 "load_multiple_operation"
6301    [(set (match_operand:SI 1 "s_register_operand" "=r")
6302	  (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6303		   (const_int 16)))
6304     (set (match_operand:SI 3 "arm_hard_register_operand" "")
6305	  (mem:SI (match_dup 2)))
6306     (set (match_operand:SI 4 "arm_hard_register_operand" "")
6307	  (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6308     (set (match_operand:SI 5 "arm_hard_register_operand" "")
6309	  (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6310     (set (match_operand:SI 6 "arm_hard_register_operand" "")
6311	  (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6312  "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6313  "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6314  [(set_attr "type" "load4")
6315   (set_attr "predicable" "yes")]
6316)
6317
6318(define_insn "*ldmsi_postinc4_thumb1"
6319  [(match_parallel 0 "load_multiple_operation"
6320    [(set (match_operand:SI 1 "s_register_operand" "=l")
6321	  (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6322		   (const_int 16)))
6323     (set (match_operand:SI 3 "arm_hard_register_operand" "")
6324	  (mem:SI (match_dup 2)))
6325     (set (match_operand:SI 4 "arm_hard_register_operand" "")
6326	  (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6327     (set (match_operand:SI 5 "arm_hard_register_operand" "")
6328	  (mem:SI (plus:SI (match_dup 2) (const_int 8))))
6329     (set (match_operand:SI 6 "arm_hard_register_operand" "")
6330	  (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
6331  "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6332  "ldmia\\t%1!, {%3, %4, %5, %6}"
6333  [(set_attr "type" "load4")]
6334)
6335
6336(define_insn "*ldmsi_postinc3"
6337  [(match_parallel 0 "load_multiple_operation"
6338    [(set (match_operand:SI 1 "s_register_operand" "=r")
6339	  (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6340		   (const_int 12)))
6341     (set (match_operand:SI 3 "arm_hard_register_operand" "")
6342	  (mem:SI (match_dup 2)))
6343     (set (match_operand:SI 4 "arm_hard_register_operand" "")
6344	  (mem:SI (plus:SI (match_dup 2) (const_int 4))))
6345     (set (match_operand:SI 5 "arm_hard_register_operand" "")
6346	  (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
6347  "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6348  "ldm%(ia%)\\t%1!, {%3, %4, %5}"
6349  [(set_attr "type" "load3")
6350   (set_attr "predicable" "yes")]
6351)
6352
6353(define_insn "*ldmsi_postinc2"
6354  [(match_parallel 0 "load_multiple_operation"
6355    [(set (match_operand:SI 1 "s_register_operand" "=r")
6356	  (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6357		   (const_int 8)))
6358     (set (match_operand:SI 3 "arm_hard_register_operand" "")
6359	  (mem:SI (match_dup 2)))
6360     (set (match_operand:SI 4 "arm_hard_register_operand" "")
6361	  (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
6362  "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6363  "ldm%(ia%)\\t%1!, {%3, %4}"
6364  [(set_attr "type" "load2")
6365   (set_attr "predicable" "yes")]
6366)
6367
6368;; Ordinary load multiple
6369
6370(define_insn "*ldmsi4"
6371  [(match_parallel 0 "load_multiple_operation"
6372    [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6373	  (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6374     (set (match_operand:SI 3 "arm_hard_register_operand" "")
6375	  (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6376     (set (match_operand:SI 4 "arm_hard_register_operand" "")
6377	  (mem:SI (plus:SI (match_dup 1) (const_int 8))))
6378     (set (match_operand:SI 5 "arm_hard_register_operand" "")
6379	  (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
6380  "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6381  "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6382  [(set_attr "type" "load4")
6383   (set_attr "predicable" "yes")]
6384)
6385
6386(define_insn "*ldmsi3"
6387  [(match_parallel 0 "load_multiple_operation"
6388    [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6389	  (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6390     (set (match_operand:SI 3 "arm_hard_register_operand" "")
6391	  (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6392     (set (match_operand:SI 4 "arm_hard_register_operand" "")
6393	  (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6394  "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6395  "ldm%(ia%)\\t%1, {%2, %3, %4}"
6396  [(set_attr "type" "load3")
6397   (set_attr "predicable" "yes")]
6398)
6399
6400(define_insn "*ldmsi2"
6401  [(match_parallel 0 "load_multiple_operation"
6402    [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6403	  (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6404     (set (match_operand:SI 3 "arm_hard_register_operand" "")
6405	  (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6406  "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6407  "ldm%(ia%)\\t%1, {%2, %3}"
6408  [(set_attr "type" "load2")
6409   (set_attr "predicable" "yes")]
6410)
6411
6412(define_expand "store_multiple"
6413  [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6414                          (match_operand:SI 1 "" ""))
6415                     (use (match_operand:SI 2 "" ""))])]
6416  "TARGET_32BIT"
6417{
6418  HOST_WIDE_INT offset = 0;
6419
6420  /* Support only fixed point registers.  */
6421  if (GET_CODE (operands[2]) != CONST_INT
6422      || INTVAL (operands[2]) > 14
6423      || INTVAL (operands[2]) < 2
6424      || GET_CODE (operands[1]) != REG
6425      || GET_CODE (operands[0]) != MEM
6426      || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6427      || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6428    FAIL;
6429
6430  operands[3]
6431    = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6432			      force_reg (SImode, XEXP (operands[0], 0)),
6433			      TRUE, FALSE, operands[0], &offset);
6434})
6435
6436;; Store multiple with write-back
6437
6438(define_insn "*stmsi_postinc4"
6439  [(match_parallel 0 "store_multiple_operation"
6440    [(set (match_operand:SI 1 "s_register_operand" "=r")
6441	  (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6442		   (const_int 16)))
6443     (set (mem:SI (match_dup 2))
6444	  (match_operand:SI 3 "arm_hard_register_operand" ""))
6445     (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6446	  (match_operand:SI 4 "arm_hard_register_operand" ""))
6447     (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6448	  (match_operand:SI 5 "arm_hard_register_operand" ""))
6449     (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6450	  (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6451  "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6452  "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6453  [(set_attr "predicable" "yes")
6454   (set_attr "type" "store4")]
6455)
6456
6457(define_insn "*stmsi_postinc4_thumb1"
6458  [(match_parallel 0 "store_multiple_operation"
6459    [(set (match_operand:SI 1 "s_register_operand" "=l")
6460	  (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6461		   (const_int 16)))
6462     (set (mem:SI (match_dup 2))
6463	  (match_operand:SI 3 "arm_hard_register_operand" ""))
6464     (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6465	  (match_operand:SI 4 "arm_hard_register_operand" ""))
6466     (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6467	  (match_operand:SI 5 "arm_hard_register_operand" ""))
6468     (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6469	  (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6470  "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6471  "stmia\\t%1!, {%3, %4, %5, %6}"
6472  [(set_attr "type" "store4")]
6473)
6474
6475(define_insn "*stmsi_postinc3"
6476  [(match_parallel 0 "store_multiple_operation"
6477    [(set (match_operand:SI 1 "s_register_operand" "=r")
6478	  (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6479		   (const_int 12)))
6480     (set (mem:SI (match_dup 2))
6481	  (match_operand:SI 3 "arm_hard_register_operand" ""))
6482     (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6483	  (match_operand:SI 4 "arm_hard_register_operand" ""))
6484     (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6485	  (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6486  "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6487  "stm%(ia%)\\t%1!, {%3, %4, %5}"
6488  [(set_attr "predicable" "yes")
6489   (set_attr "type" "store3")]
6490)
6491
6492(define_insn "*stmsi_postinc2"
6493  [(match_parallel 0 "store_multiple_operation"
6494    [(set (match_operand:SI 1 "s_register_operand" "=r")
6495	  (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6496		   (const_int 8)))
6497     (set (mem:SI (match_dup 2))
6498	  (match_operand:SI 3 "arm_hard_register_operand" ""))
6499     (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6500	  (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6501  "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6502  "stm%(ia%)\\t%1!, {%3, %4}"
6503  [(set_attr "predicable" "yes")
6504   (set_attr "type" "store2")]
6505)
6506
6507;; Ordinary store multiple
6508
6509(define_insn "*stmsi4"
6510  [(match_parallel 0 "store_multiple_operation"
6511    [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6512	  (match_operand:SI 2 "arm_hard_register_operand" ""))
6513     (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6514	  (match_operand:SI 3 "arm_hard_register_operand" ""))
6515     (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6516	  (match_operand:SI 4 "arm_hard_register_operand" ""))
6517     (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6518	  (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6519  "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6520  "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6521  [(set_attr "predicable" "yes")
6522   (set_attr "type" "store4")]
6523)
6524
6525(define_insn "*stmsi3"
6526  [(match_parallel 0 "store_multiple_operation"
6527    [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6528	  (match_operand:SI 2 "arm_hard_register_operand" ""))
6529     (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6530	  (match_operand:SI 3 "arm_hard_register_operand" ""))
6531     (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6532	  (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6533  "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6534  "stm%(ia%)\\t%1, {%2, %3, %4}"
6535  [(set_attr "predicable" "yes")
6536   (set_attr "type" "store3")]
6537)
6538
6539(define_insn "*stmsi2"
6540  [(match_parallel 0 "store_multiple_operation"
6541    [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6542	  (match_operand:SI 2 "arm_hard_register_operand" ""))
6543     (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6544	  (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6545  "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6546  "stm%(ia%)\\t%1, {%2, %3}"
6547  [(set_attr "predicable" "yes")
6548   (set_attr "type" "store2")]
6549)
6550
6551;; Move a block of memory if it is word aligned and MORE than 2 words long.
6552;; We could let this apply for blocks of less than this, but it clobbers so
6553;; many registers that there is then probably a better way.
6554
6555(define_expand "movmemqi"
6556  [(match_operand:BLK 0 "general_operand" "")
6557   (match_operand:BLK 1 "general_operand" "")
6558   (match_operand:SI 2 "const_int_operand" "")
6559   (match_operand:SI 3 "const_int_operand" "")]
6560  "TARGET_EITHER"
6561  "
6562  if (TARGET_32BIT)
6563    {
6564      if (arm_gen_movmemqi (operands))
6565        DONE;
6566      FAIL;
6567    }
6568  else /* TARGET_THUMB1 */
6569    {
6570      if (   INTVAL (operands[3]) != 4
6571          || INTVAL (operands[2]) > 48)
6572        FAIL;
6573
6574      thumb_expand_movmemqi (operands);
6575      DONE;
6576    }
6577  "
6578)
6579
6580;; Thumb block-move insns
6581
6582(define_insn "movmem12b"
6583  [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6584	(mem:SI (match_operand:SI 3 "register_operand" "1")))
6585   (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6586	(mem:SI (plus:SI (match_dup 3) (const_int 4))))
6587   (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6588	(mem:SI (plus:SI (match_dup 3) (const_int 8))))
6589   (set (match_operand:SI 0 "register_operand" "=l")
6590	(plus:SI (match_dup 2) (const_int 12)))
6591   (set (match_operand:SI 1 "register_operand" "=l")
6592	(plus:SI (match_dup 3) (const_int 12)))
6593   (clobber (match_scratch:SI 4 "=&l"))
6594   (clobber (match_scratch:SI 5 "=&l"))
6595   (clobber (match_scratch:SI 6 "=&l"))]
6596  "TARGET_THUMB1"
6597  "* return thumb_output_move_mem_multiple (3, operands);"
6598  [(set_attr "length" "4")
6599   ; This isn't entirely accurate...  It loads as well, but in terms of
6600   ; scheduling the following insn it is better to consider it as a store
6601   (set_attr "type" "store3")]
6602)
6603
6604(define_insn "movmem8b"
6605  [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6606	(mem:SI (match_operand:SI 3 "register_operand" "1")))
6607   (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6608	(mem:SI (plus:SI (match_dup 3) (const_int 4))))
6609   (set (match_operand:SI 0 "register_operand" "=l")
6610	(plus:SI (match_dup 2) (const_int 8)))
6611   (set (match_operand:SI 1 "register_operand" "=l")
6612	(plus:SI (match_dup 3) (const_int 8)))
6613   (clobber (match_scratch:SI 4 "=&l"))
6614   (clobber (match_scratch:SI 5 "=&l"))]
6615  "TARGET_THUMB1"
6616  "* return thumb_output_move_mem_multiple (2, operands);"
6617  [(set_attr "length" "4")
6618   ; This isn't entirely accurate...  It loads as well, but in terms of
6619   ; scheduling the following insn it is better to consider it as a store
6620   (set_attr "type" "store2")]
6621)
6622
6623
6624
6625;; Compare & branch insns
6626;; The range calculations are based as follows:
6627;; For forward branches, the address calculation returns the address of
6628;; the next instruction.  This is 2 beyond the branch instruction.
6629;; For backward branches, the address calculation returns the address of
6630;; the first instruction in this pattern (cmp).  This is 2 before the branch
6631;; instruction for the shortest sequence, and 4 before the branch instruction
6632;; if we have to jump around an unconditional branch.
6633;; To the basic branch range the PC offset must be added (this is +4).
6634;; So for forward branches we have
6635;;   (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6636;; And for backward branches we have
6637;;   (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6638;;
6639;; For a 'b'       pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6640;; For a 'b<cond>' pos_range = 254,  neg_range = -256  giving (-250 ->256).
6641
6642(define_expand "cbranchsi4"
6643  [(set (pc) (if_then_else
6644	      (match_operator 0 "arm_comparison_operator"
6645	       [(match_operand:SI 1 "s_register_operand" "")
6646	        (match_operand:SI 2 "nonmemory_operand" "")])
6647	      (label_ref (match_operand 3 "" ""))
6648	      (pc)))]
6649  "TARGET_THUMB1 || TARGET_32BIT"
6650  "
6651  if (!TARGET_THUMB1)
6652    {
6653      if (!arm_add_operand (operands[2], SImode))
6654	operands[2] = force_reg (SImode, operands[2]);
6655      emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6656				      operands[3]));
6657      DONE;
6658    }
6659  if (thumb1_cmpneg_operand (operands[2], SImode))
6660    {
6661      emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6662					      operands[3], operands[0]));
6663      DONE;
6664    }
6665  if (!thumb1_cmp_operand (operands[2], SImode))
6666    operands[2] = force_reg (SImode, operands[2]);
6667  ")
6668
6669(define_expand "cbranchsf4"
6670  [(set (pc) (if_then_else
6671	      (match_operator 0 "arm_comparison_operator"
6672	       [(match_operand:SF 1 "s_register_operand" "")
6673	        (match_operand:SF 2 "arm_float_compare_operand" "")])
6674	      (label_ref (match_operand 3 "" ""))
6675	      (pc)))]
6676  "TARGET_32BIT && TARGET_HARD_FLOAT"
6677  "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6678				   operands[3])); DONE;"
6679)
6680
6681(define_expand "cbranchdf4"
6682  [(set (pc) (if_then_else
6683	      (match_operator 0 "arm_comparison_operator"
6684	       [(match_operand:DF 1 "s_register_operand" "")
6685	        (match_operand:DF 2 "arm_float_compare_operand" "")])
6686	      (label_ref (match_operand 3 "" ""))
6687	      (pc)))]
6688  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6689  "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6690				   operands[3])); DONE;"
6691)
6692
6693;; this uses the Cirrus DI compare instruction
6694(define_expand "cbranchdi4"
6695  [(set (pc) (if_then_else
6696	      (match_operator 0 "arm_comparison_operator"
6697	       [(match_operand:DI 1 "cirrus_fp_register" "")
6698	        (match_operand:DI 2 "cirrus_fp_register" "")])
6699	      (label_ref (match_operand 3 "" ""))
6700	      (pc)))]
6701  "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
6702  "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
6703				   operands[3])); DONE;"
6704)
6705
6706(define_insn "*cbranchsi4_insn"
6707  [(set (pc) (if_then_else
6708	      (match_operator 0 "arm_comparison_operator"
6709	       [(match_operand:SI 1 "s_register_operand" "l,*h")
6710	        (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6711	      (label_ref (match_operand 3 "" ""))
6712	      (pc)))]
6713  "TARGET_THUMB1"
6714  "*
6715  output_asm_insn (\"cmp\\t%1, %2\", operands);
6716
6717  switch (get_attr_length (insn))
6718    {
6719    case 4:  return \"b%d0\\t%l3\";
6720    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6721    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6722    }
6723  "
6724  [(set (attr "far_jump")
6725        (if_then_else
6726	    (eq_attr "length" "8")
6727	    (const_string "yes")
6728            (const_string "no")))
6729   (set (attr "length")
6730        (if_then_else
6731	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6732	         (le (minus (match_dup 3) (pc)) (const_int 256)))
6733	    (const_int 4)
6734	    (if_then_else
6735	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6736		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
6737		(const_int 6)
6738		(const_int 8))))]
6739)
6740
6741(define_insn "cbranchsi4_scratch"
6742  [(set (pc) (if_then_else
6743	      (match_operator 4 "arm_comparison_operator"
6744	       [(match_operand:SI 1 "s_register_operand" "l,0")
6745	        (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6746	      (label_ref (match_operand 3 "" ""))
6747	      (pc)))
6748   (clobber (match_scratch:SI 0 "=l,l"))]
6749  "TARGET_THUMB1"
6750  "*
6751  output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6752
6753  switch (get_attr_length (insn))
6754    {
6755    case 4:  return \"b%d4\\t%l3\";
6756    case 6:  return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6757    default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6758    }
6759  "
6760  [(set (attr "far_jump")
6761        (if_then_else
6762	    (eq_attr "length" "8")
6763	    (const_string "yes")
6764            (const_string "no")))
6765   (set (attr "length")
6766        (if_then_else
6767	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6768	         (le (minus (match_dup 3) (pc)) (const_int 256)))
6769	    (const_int 4)
6770	    (if_then_else
6771	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6772		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
6773		(const_int 6)
6774		(const_int 8))))]
6775)
6776
6777(define_insn "*movsi_cbranchsi4"
6778  [(set (pc)
6779	(if_then_else
6780	 (match_operator 3 "arm_comparison_operator"
6781	  [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6782	   (const_int 0)])
6783	 (label_ref (match_operand 2 "" ""))
6784	 (pc)))
6785   (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6786	(match_dup 1))]
6787  "TARGET_THUMB1"
6788  "*{
6789  if (which_alternative == 0)
6790    output_asm_insn (\"cmp\t%0, #0\", operands);
6791  else if (which_alternative == 1)
6792    output_asm_insn (\"sub\t%0, %1, #0\", operands);
6793  else
6794    {
6795      output_asm_insn (\"cmp\t%1, #0\", operands);
6796      if (which_alternative == 2)
6797	output_asm_insn (\"mov\t%0, %1\", operands);
6798      else
6799	output_asm_insn (\"str\t%1, %0\", operands);
6800    }
6801  switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6802    {
6803    case 4:  return \"b%d3\\t%l2\";
6804    case 6:  return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6805    default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6806    }
6807  }"
6808  [(set (attr "far_jump")
6809        (if_then_else
6810	    (ior (and (gt (symbol_ref ("which_alternative"))
6811	                  (const_int 1))
6812		      (eq_attr "length" "8"))
6813		 (eq_attr "length" "10"))
6814	    (const_string "yes")
6815            (const_string "no")))
6816   (set (attr "length")
6817     (if_then_else
6818       (le (symbol_ref ("which_alternative"))
6819		       (const_int 1))
6820       (if_then_else
6821	 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6822	      (le (minus (match_dup 2) (pc)) (const_int 256)))
6823	 (const_int 4)
6824	 (if_then_else
6825	   (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6826		(le (minus (match_dup 2) (pc)) (const_int 2048)))
6827	   (const_int 6)
6828	   (const_int 8)))
6829       (if_then_else
6830	 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6831	      (le (minus (match_dup 2) (pc)) (const_int 256)))
6832	 (const_int 6)
6833	 (if_then_else
6834	   (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6835		(le (minus (match_dup 2) (pc)) (const_int 2048)))
6836	   (const_int 8)
6837	   (const_int 10)))))]
6838)
6839
6840(define_peephole2
6841  [(set (match_operand:SI 0 "low_register_operand" "")
6842	(match_operand:SI 1 "low_register_operand" ""))
6843   (set (pc)
6844	(if_then_else (match_operator 2 "arm_comparison_operator"
6845		       [(match_dup 1) (const_int 0)])
6846		      (label_ref (match_operand 3 "" ""))
6847		      (pc)))]
6848  "TARGET_THUMB1"
6849  [(parallel
6850    [(set (pc)
6851	(if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6852		      (label_ref (match_dup 3))
6853		      (pc)))
6854     (set (match_dup 0) (match_dup 1))])]
6855  ""
6856)
6857
6858;; Sigh!  This variant shouldn't be needed, but combine often fails to
6859;; merge cases like this because the op1 is a hard register in
6860;; CLASS_LIKELY_SPILLED_P.
6861(define_peephole2
6862  [(set (match_operand:SI 0 "low_register_operand" "")
6863	(match_operand:SI 1 "low_register_operand" ""))
6864   (set (pc)
6865	(if_then_else (match_operator 2 "arm_comparison_operator"
6866		       [(match_dup 0) (const_int 0)])
6867		      (label_ref (match_operand 3 "" ""))
6868		      (pc)))]
6869  "TARGET_THUMB1"
6870  [(parallel
6871    [(set (pc)
6872	(if_then_else (match_op_dup 2 [(match_dup 1) (const_int 0)])
6873		      (label_ref (match_dup 3))
6874		      (pc)))
6875     (set (match_dup 0) (match_dup 1))])]
6876  ""
6877)
6878
6879(define_insn "*negated_cbranchsi4"
6880  [(set (pc)
6881	(if_then_else
6882	 (match_operator 0 "equality_operator"
6883	  [(match_operand:SI 1 "s_register_operand" "l")
6884	   (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6885	 (label_ref (match_operand 3 "" ""))
6886	 (pc)))]
6887  "TARGET_THUMB1"
6888  "*
6889  output_asm_insn (\"cmn\\t%1, %2\", operands);
6890  switch (get_attr_length (insn))
6891    {
6892    case 4:  return \"b%d0\\t%l3\";
6893    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6894    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6895    }
6896  "
6897  [(set (attr "far_jump")
6898        (if_then_else
6899	    (eq_attr "length" "8")
6900	    (const_string "yes")
6901            (const_string "no")))
6902   (set (attr "length")
6903        (if_then_else
6904	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6905	         (le (minus (match_dup 3) (pc)) (const_int 256)))
6906	    (const_int 4)
6907	    (if_then_else
6908	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6909		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
6910		(const_int 6)
6911		(const_int 8))))]
6912)
6913
6914(define_insn "*tbit_cbranch"
6915  [(set (pc)
6916	(if_then_else
6917	 (match_operator 0 "equality_operator"
6918	  [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6919			    (const_int 1)
6920			    (match_operand:SI 2 "const_int_operand" "i"))
6921	   (const_int 0)])
6922	 (label_ref (match_operand 3 "" ""))
6923	 (pc)))
6924   (clobber (match_scratch:SI 4 "=l"))]
6925  "TARGET_THUMB1"
6926  "*
6927  {
6928  rtx op[3];
6929  op[0] = operands[4];
6930  op[1] = operands[1];
6931  op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6932
6933  output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6934  switch (get_attr_length (insn))
6935    {
6936    case 4:  return \"b%d0\\t%l3\";
6937    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6938    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6939    }
6940  }"
6941  [(set (attr "far_jump")
6942        (if_then_else
6943	    (eq_attr "length" "8")
6944	    (const_string "yes")
6945            (const_string "no")))
6946   (set (attr "length")
6947        (if_then_else
6948	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6949	         (le (minus (match_dup 3) (pc)) (const_int 256)))
6950	    (const_int 4)
6951	    (if_then_else
6952	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6953		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
6954		(const_int 6)
6955		(const_int 8))))]
6956)
6957
6958(define_insn "*tlobits_cbranch"
6959  [(set (pc)
6960	(if_then_else
6961	 (match_operator 0 "equality_operator"
6962	  [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6963			    (match_operand:SI 2 "const_int_operand" "i")
6964			    (const_int 0))
6965	   (const_int 0)])
6966	 (label_ref (match_operand 3 "" ""))
6967	 (pc)))
6968   (clobber (match_scratch:SI 4 "=l"))]
6969  "TARGET_THUMB1"
6970  "*
6971  {
6972  rtx op[3];
6973  op[0] = operands[4];
6974  op[1] = operands[1];
6975  op[2] = GEN_INT (32 - INTVAL (operands[2]));
6976
6977  output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6978  switch (get_attr_length (insn))
6979    {
6980    case 4:  return \"b%d0\\t%l3\";
6981    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6982    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6983    }
6984  }"
6985  [(set (attr "far_jump")
6986        (if_then_else
6987	    (eq_attr "length" "8")
6988	    (const_string "yes")
6989            (const_string "no")))
6990   (set (attr "length")
6991        (if_then_else
6992	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6993	         (le (minus (match_dup 3) (pc)) (const_int 256)))
6994	    (const_int 4)
6995	    (if_then_else
6996	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6997		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
6998		(const_int 6)
6999		(const_int 8))))]
7000)
7001
7002(define_insn "*tstsi3_cbranch"
7003  [(set (pc)
7004	(if_then_else
7005	 (match_operator 3 "equality_operator"
7006	  [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
7007		   (match_operand:SI 1 "s_register_operand" "l"))
7008	   (const_int 0)])
7009	 (label_ref (match_operand 2 "" ""))
7010	 (pc)))]
7011  "TARGET_THUMB1"
7012  "*
7013  {
7014  output_asm_insn (\"tst\\t%0, %1\", operands);
7015  switch (get_attr_length (insn))
7016    {
7017    case 4:  return \"b%d3\\t%l2\";
7018    case 6:  return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
7019    default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
7020    }
7021  }"
7022  [(set (attr "far_jump")
7023        (if_then_else
7024	    (eq_attr "length" "8")
7025	    (const_string "yes")
7026            (const_string "no")))
7027   (set (attr "length")
7028        (if_then_else
7029	    (and (ge (minus (match_dup 2) (pc)) (const_int -250))
7030	         (le (minus (match_dup 2) (pc)) (const_int 256)))
7031	    (const_int 4)
7032	    (if_then_else
7033	        (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
7034		     (le (minus (match_dup 2) (pc)) (const_int 2048)))
7035		(const_int 6)
7036		(const_int 8))))]
7037)
7038
7039(define_insn "*andsi3_cbranch"
7040  [(set (pc)
7041	(if_then_else
7042	 (match_operator 5 "equality_operator"
7043	  [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7044		   (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7045	   (const_int 0)])
7046	 (label_ref (match_operand 4 "" ""))
7047	 (pc)))
7048   (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7049	(and:SI (match_dup 2) (match_dup 3)))
7050   (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7051  "TARGET_THUMB1"
7052  "*
7053  {
7054  if (which_alternative == 0)
7055    output_asm_insn (\"and\\t%0, %3\", operands);
7056  else if (which_alternative == 1)
7057    {
7058      output_asm_insn (\"and\\t%1, %3\", operands);
7059      output_asm_insn (\"mov\\t%0, %1\", operands);
7060    }
7061  else
7062    {
7063      output_asm_insn (\"and\\t%1, %3\", operands);
7064      output_asm_insn (\"str\\t%1, %0\", operands);
7065    }
7066
7067  switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7068    {
7069    case 4:  return \"b%d5\\t%l4\";
7070    case 6:  return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7071    default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7072    }
7073  }"
7074  [(set (attr "far_jump")
7075        (if_then_else
7076	    (ior (and (eq (symbol_ref ("which_alternative"))
7077	                  (const_int 0))
7078		      (eq_attr "length" "8"))
7079		 (eq_attr "length" "10"))
7080	    (const_string "yes")
7081            (const_string "no")))
7082   (set (attr "length")
7083     (if_then_else
7084       (eq (symbol_ref ("which_alternative"))
7085		       (const_int 0))
7086       (if_then_else
7087	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7088	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7089	 (const_int 4)
7090	 (if_then_else
7091	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7092		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7093	   (const_int 6)
7094	   (const_int 8)))
7095       (if_then_else
7096	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7097	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7098	 (const_int 6)
7099	 (if_then_else
7100	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7101		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7102	   (const_int 8)
7103	   (const_int 10)))))]
7104)
7105
7106(define_insn "*orrsi3_cbranch_scratch"
7107  [(set (pc)
7108	(if_then_else
7109	 (match_operator 4 "equality_operator"
7110	  [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
7111		   (match_operand:SI 2 "s_register_operand" "l"))
7112	   (const_int 0)])
7113	 (label_ref (match_operand 3 "" ""))
7114	 (pc)))
7115   (clobber (match_scratch:SI 0 "=l"))]
7116  "TARGET_THUMB1"
7117  "*
7118  {
7119  output_asm_insn (\"orr\\t%0, %2\", operands);
7120  switch (get_attr_length (insn))
7121    {
7122    case 4:  return \"b%d4\\t%l3\";
7123    case 6:  return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7124    default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7125    }
7126  }"
7127  [(set (attr "far_jump")
7128        (if_then_else
7129	    (eq_attr "length" "8")
7130	    (const_string "yes")
7131            (const_string "no")))
7132   (set (attr "length")
7133        (if_then_else
7134	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7135	         (le (minus (match_dup 3) (pc)) (const_int 256)))
7136	    (const_int 4)
7137	    (if_then_else
7138	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7139		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
7140		(const_int 6)
7141		(const_int 8))))]
7142)
7143
7144(define_insn "*orrsi3_cbranch"
7145  [(set (pc)
7146	(if_then_else
7147	 (match_operator 5 "equality_operator"
7148	  [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7149		   (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7150	   (const_int 0)])
7151	 (label_ref (match_operand 4 "" ""))
7152	 (pc)))
7153   (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7154	(ior:SI (match_dup 2) (match_dup 3)))
7155   (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7156  "TARGET_THUMB1"
7157  "*
7158  {
7159  if (which_alternative == 0)
7160    output_asm_insn (\"orr\\t%0, %3\", operands);
7161  else if (which_alternative == 1)
7162    {
7163      output_asm_insn (\"orr\\t%1, %3\", operands);
7164      output_asm_insn (\"mov\\t%0, %1\", operands);
7165    }
7166  else
7167    {
7168      output_asm_insn (\"orr\\t%1, %3\", operands);
7169      output_asm_insn (\"str\\t%1, %0\", operands);
7170    }
7171
7172  switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7173    {
7174    case 4:  return \"b%d5\\t%l4\";
7175    case 6:  return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7176    default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7177    }
7178  }"
7179  [(set (attr "far_jump")
7180        (if_then_else
7181	    (ior (and (eq (symbol_ref ("which_alternative"))
7182	                  (const_int 0))
7183		      (eq_attr "length" "8"))
7184		 (eq_attr "length" "10"))
7185	    (const_string "yes")
7186            (const_string "no")))
7187   (set (attr "length")
7188     (if_then_else
7189       (eq (symbol_ref ("which_alternative"))
7190		       (const_int 0))
7191       (if_then_else
7192	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7193	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7194	 (const_int 4)
7195	 (if_then_else
7196	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7197		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7198	   (const_int 6)
7199	   (const_int 8)))
7200       (if_then_else
7201	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7202	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7203	 (const_int 6)
7204	 (if_then_else
7205	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7206		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7207	   (const_int 8)
7208	   (const_int 10)))))]
7209)
7210
7211(define_insn "*xorsi3_cbranch_scratch"
7212  [(set (pc)
7213	(if_then_else
7214	 (match_operator 4 "equality_operator"
7215	  [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
7216		   (match_operand:SI 2 "s_register_operand" "l"))
7217	   (const_int 0)])
7218	 (label_ref (match_operand 3 "" ""))
7219	 (pc)))
7220   (clobber (match_scratch:SI 0 "=l"))]
7221  "TARGET_THUMB1"
7222  "*
7223  {
7224  output_asm_insn (\"eor\\t%0, %2\", operands);
7225  switch (get_attr_length (insn))
7226    {
7227    case 4:  return \"b%d4\\t%l3\";
7228    case 6:  return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7229    default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7230    }
7231  }"
7232  [(set (attr "far_jump")
7233        (if_then_else
7234	    (eq_attr "length" "8")
7235	    (const_string "yes")
7236            (const_string "no")))
7237   (set (attr "length")
7238        (if_then_else
7239	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7240	         (le (minus (match_dup 3) (pc)) (const_int 256)))
7241	    (const_int 4)
7242	    (if_then_else
7243	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7244		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
7245		(const_int 6)
7246		(const_int 8))))]
7247)
7248
7249(define_insn "*xorsi3_cbranch"
7250  [(set (pc)
7251	(if_then_else
7252	 (match_operator 5 "equality_operator"
7253	  [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
7254		   (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7255	   (const_int 0)])
7256	 (label_ref (match_operand 4 "" ""))
7257	 (pc)))
7258   (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7259	(xor:SI (match_dup 2) (match_dup 3)))
7260   (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7261  "TARGET_THUMB1"
7262  "*
7263  {
7264  if (which_alternative == 0)
7265    output_asm_insn (\"eor\\t%0, %3\", operands);
7266  else if (which_alternative == 1)
7267    {
7268      output_asm_insn (\"eor\\t%1, %3\", operands);
7269      output_asm_insn (\"mov\\t%0, %1\", operands);
7270    }
7271  else
7272    {
7273      output_asm_insn (\"eor\\t%1, %3\", operands);
7274      output_asm_insn (\"str\\t%1, %0\", operands);
7275    }
7276
7277  switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7278    {
7279    case 4:  return \"b%d5\\t%l4\";
7280    case 6:  return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7281    default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7282    }
7283  }"
7284  [(set (attr "far_jump")
7285        (if_then_else
7286	    (ior (and (eq (symbol_ref ("which_alternative"))
7287	                  (const_int 0))
7288		      (eq_attr "length" "8"))
7289		 (eq_attr "length" "10"))
7290	    (const_string "yes")
7291            (const_string "no")))
7292   (set (attr "length")
7293     (if_then_else
7294       (eq (symbol_ref ("which_alternative"))
7295		       (const_int 0))
7296       (if_then_else
7297	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7298	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7299	 (const_int 4)
7300	 (if_then_else
7301	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7302		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7303	   (const_int 6)
7304	   (const_int 8)))
7305       (if_then_else
7306	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7307	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7308	 (const_int 6)
7309	 (if_then_else
7310	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7311		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7312	   (const_int 8)
7313	   (const_int 10)))))]
7314)
7315
7316(define_insn "*bicsi3_cbranch_scratch"
7317  [(set (pc)
7318	(if_then_else
7319	 (match_operator 4 "equality_operator"
7320	  [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
7321		   (match_operand:SI 1 "s_register_operand" "0"))
7322	   (const_int 0)])
7323	 (label_ref (match_operand 3 "" ""))
7324	 (pc)))
7325   (clobber (match_scratch:SI 0 "=l"))]
7326  "TARGET_THUMB1"
7327  "*
7328  {
7329  output_asm_insn (\"bic\\t%0, %2\", operands);
7330  switch (get_attr_length (insn))
7331    {
7332    case 4:  return \"b%d4\\t%l3\";
7333    case 6:  return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7334    default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7335    }
7336  }"
7337  [(set (attr "far_jump")
7338        (if_then_else
7339	    (eq_attr "length" "8")
7340	    (const_string "yes")
7341            (const_string "no")))
7342   (set (attr "length")
7343        (if_then_else
7344	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7345	         (le (minus (match_dup 3) (pc)) (const_int 256)))
7346	    (const_int 4)
7347	    (if_then_else
7348	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7349		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
7350		(const_int 6)
7351		(const_int 8))))]
7352)
7353
7354(define_insn "*bicsi3_cbranch"
7355  [(set (pc)
7356	(if_then_else
7357	 (match_operator 5 "equality_operator"
7358	  [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
7359		   (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
7360	   (const_int 0)])
7361	 (label_ref (match_operand 4 "" ""))
7362	 (pc)))
7363   (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
7364	(and:SI (not:SI (match_dup 3)) (match_dup 2)))
7365   (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
7366  "TARGET_THUMB1"
7367  "*
7368  {
7369  if (which_alternative == 0)
7370    output_asm_insn (\"bic\\t%0, %3\", operands);
7371  else if (which_alternative <= 2)
7372    {
7373      output_asm_insn (\"bic\\t%1, %3\", operands);
7374      /* It's ok if OP0 is a lo-reg, even though the mov will set the
7375	 conditions again, since we're only testing for equality.  */
7376      output_asm_insn (\"mov\\t%0, %1\", operands);
7377    }
7378  else
7379    {
7380      output_asm_insn (\"bic\\t%1, %3\", operands);
7381      output_asm_insn (\"str\\t%1, %0\", operands);
7382    }
7383
7384  switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7385    {
7386    case 4:  return \"b%d5\\t%l4\";
7387    case 6:  return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7388    default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7389    }
7390  }"
7391  [(set (attr "far_jump")
7392        (if_then_else
7393	    (ior (and (eq (symbol_ref ("which_alternative"))
7394	                  (const_int 0))
7395		      (eq_attr "length" "8"))
7396		 (eq_attr "length" "10"))
7397	    (const_string "yes")
7398            (const_string "no")))
7399   (set (attr "length")
7400     (if_then_else
7401       (eq (symbol_ref ("which_alternative"))
7402		       (const_int 0))
7403       (if_then_else
7404	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7405	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7406	 (const_int 4)
7407	 (if_then_else
7408	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7409		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7410	   (const_int 6)
7411	   (const_int 8)))
7412       (if_then_else
7413	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7414	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7415	 (const_int 6)
7416	 (if_then_else
7417	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7418		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7419	   (const_int 8)
7420	   (const_int 10)))))]
7421)
7422
7423(define_insn "*cbranchne_decr1"
7424  [(set (pc)
7425	(if_then_else (match_operator 3 "equality_operator"
7426		       [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
7427		        (const_int 0)])
7428		      (label_ref (match_operand 4 "" ""))
7429		      (pc)))
7430   (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7431	(plus:SI (match_dup 2) (const_int -1)))
7432   (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7433  "TARGET_THUMB1"
7434  "*
7435   {
7436     rtx cond[2];
7437     cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
7438				? GEU : LTU),
7439			       VOIDmode, operands[2], const1_rtx);
7440     cond[1] = operands[4];
7441
7442     if (which_alternative == 0)
7443       output_asm_insn (\"sub\\t%0, %2, #1\", operands);
7444     else if (which_alternative == 1)
7445       {
7446	 /* We must provide an alternative for a hi reg because reload
7447	    cannot handle output reloads on a jump instruction, but we
7448	    can't subtract into that.  Fortunately a mov from lo to hi
7449	    does not clobber the condition codes.  */
7450	 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7451	 output_asm_insn (\"mov\\t%0, %1\", operands);
7452       }
7453     else
7454       {
7455	 /* Similarly, but the target is memory.  */
7456	 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
7457	 output_asm_insn (\"str\\t%1, %0\", operands);
7458       }
7459
7460     switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
7461       {
7462	 case 4:
7463	   output_asm_insn (\"b%d0\\t%l1\", cond);
7464	   return \"\";
7465	 case 6:
7466	   output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7467	   return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7468	 default:
7469	   output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7470	   return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7471       }
7472   }
7473  "
7474  [(set (attr "far_jump")
7475        (if_then_else
7476	    (ior (and (eq (symbol_ref ("which_alternative"))
7477	                  (const_int 0))
7478		      (eq_attr "length" "8"))
7479		 (eq_attr "length" "10"))
7480	    (const_string "yes")
7481            (const_string "no")))
7482   (set_attr_alternative "length"
7483      [
7484       ;; Alternative 0
7485       (if_then_else
7486	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7487	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7488	 (const_int 4)
7489	 (if_then_else
7490	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7491		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7492	   (const_int 6)
7493	   (const_int 8)))
7494       ;; Alternative 1
7495       (if_then_else
7496	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7497	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7498	 (const_int 6)
7499	 (if_then_else
7500	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7501		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7502	   (const_int 8)
7503	   (const_int 10)))
7504       ;; Alternative 2
7505       (if_then_else
7506	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7507	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7508	 (const_int 6)
7509	 (if_then_else
7510	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7511		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7512	   (const_int 8)
7513	   (const_int 10)))
7514       ;; Alternative 3
7515       (if_then_else
7516	 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7517	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7518	 (const_int 6)
7519	 (if_then_else
7520	   (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7521		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7522	   (const_int 8)
7523	   (const_int 10)))])]
7524)
7525
7526(define_insn "*addsi3_cbranch"
7527  [(set (pc)
7528	(if_then_else
7529	 (match_operator 4 "arm_comparison_operator"
7530	  [(plus:SI
7531	    (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7532	    (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7533	   (const_int 0)])
7534	 (label_ref (match_operand 5 "" ""))
7535	 (pc)))
7536   (set
7537    (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7538    (plus:SI (match_dup 2) (match_dup 3)))
7539   (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7540  "TARGET_THUMB1
7541   && (GET_CODE (operands[4]) == EQ
7542       || GET_CODE (operands[4]) == NE
7543       || GET_CODE (operands[4]) == GE
7544       || GET_CODE (operands[4]) == LT)"
7545  "*
7546   {
7547     rtx cond[3];
7548
7549
7550     cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7551     cond[1] = operands[2];
7552     cond[2] = operands[3];
7553
7554     if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7555       output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7556     else
7557       output_asm_insn (\"add\\t%0, %1, %2\", cond);
7558
7559     if (which_alternative >= 3
7560	 && which_alternative < 4)
7561       output_asm_insn (\"mov\\t%0, %1\", operands);
7562     else if (which_alternative >= 4)
7563       output_asm_insn (\"str\\t%1, %0\", operands);
7564
7565     switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7566       {
7567	 case 4:
7568	   return \"b%d4\\t%l5\";
7569	 case 6:
7570	   return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7571	 default:
7572	   return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7573       }
7574   }
7575  "
7576  [(set (attr "far_jump")
7577        (if_then_else
7578	    (ior (and (lt (symbol_ref ("which_alternative"))
7579	                  (const_int 3))
7580		      (eq_attr "length" "8"))
7581		 (eq_attr "length" "10"))
7582	    (const_string "yes")
7583            (const_string "no")))
7584   (set (attr "length")
7585     (if_then_else
7586       (lt (symbol_ref ("which_alternative"))
7587		       (const_int 3))
7588       (if_then_else
7589	 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7590	      (le (minus (match_dup 5) (pc)) (const_int 256)))
7591	 (const_int 4)
7592	 (if_then_else
7593	   (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7594		(le (minus (match_dup 5) (pc)) (const_int 2048)))
7595	   (const_int 6)
7596	   (const_int 8)))
7597       (if_then_else
7598	 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7599	      (le (minus (match_dup 5) (pc)) (const_int 256)))
7600	 (const_int 6)
7601	 (if_then_else
7602	   (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7603		(le (minus (match_dup 5) (pc)) (const_int 2048)))
7604	   (const_int 8)
7605	   (const_int 10)))))]
7606)
7607
7608(define_insn "*addsi3_cbranch_scratch"
7609  [(set (pc)
7610	(if_then_else
7611	 (match_operator 3 "arm_comparison_operator"
7612	  [(plus:SI
7613	    (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7614	    (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7615	   (const_int 0)])
7616	 (label_ref (match_operand 4 "" ""))
7617	 (pc)))
7618   (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7619  "TARGET_THUMB1
7620   && (GET_CODE (operands[3]) == EQ
7621       || GET_CODE (operands[3]) == NE
7622       || GET_CODE (operands[3]) == GE
7623       || GET_CODE (operands[3]) == LT)"
7624  "*
7625   {
7626     switch (which_alternative)
7627       {
7628       case 0:
7629	 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7630	 break;
7631       case 1:
7632	 output_asm_insn (\"cmn\t%1, %2\", operands);
7633	 break;
7634       case 2:
7635	 if (INTVAL (operands[2]) < 0)
7636	   output_asm_insn (\"sub\t%0, %1, %2\", operands);
7637	 else
7638	   output_asm_insn (\"add\t%0, %1, %2\", operands);
7639	 break;
7640       case 3:
7641	 if (INTVAL (operands[2]) < 0)
7642	   output_asm_insn (\"sub\t%0, %0, %2\", operands);
7643	 else
7644	   output_asm_insn (\"add\t%0, %0, %2\", operands);
7645	 break;
7646       }
7647
7648     switch (get_attr_length (insn))
7649       {
7650	 case 4:
7651	   return \"b%d3\\t%l4\";
7652	 case 6:
7653	   return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7654	 default:
7655	   return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7656       }
7657   }
7658  "
7659  [(set (attr "far_jump")
7660        (if_then_else
7661	    (eq_attr "length" "8")
7662	    (const_string "yes")
7663            (const_string "no")))
7664   (set (attr "length")
7665       (if_then_else
7666	 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7667	      (le (minus (match_dup 4) (pc)) (const_int 256)))
7668	 (const_int 4)
7669	 (if_then_else
7670	   (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7671		(le (minus (match_dup 4) (pc)) (const_int 2048)))
7672	   (const_int 6)
7673	   (const_int 8))))]
7674)
7675
7676(define_insn "*subsi3_cbranch"
7677  [(set (pc)
7678	(if_then_else
7679	 (match_operator 4 "arm_comparison_operator"
7680	  [(minus:SI
7681	    (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7682	    (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7683	   (const_int 0)])
7684	 (label_ref (match_operand 5 "" ""))
7685	 (pc)))
7686   (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7687	(minus:SI (match_dup 2) (match_dup 3)))
7688   (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7689  "TARGET_THUMB1
7690   && (GET_CODE (operands[4]) == EQ
7691       || GET_CODE (operands[4]) == NE
7692       || GET_CODE (operands[4]) == GE
7693       || GET_CODE (operands[4]) == LT)"
7694  "*
7695   {
7696     if (which_alternative == 0)
7697       output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7698     else if (which_alternative == 1)
7699       {
7700	 /* We must provide an alternative for a hi reg because reload
7701	    cannot handle output reloads on a jump instruction, but we
7702	    can't subtract into that.  Fortunately a mov from lo to hi
7703	    does not clobber the condition codes.  */
7704	 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7705	 output_asm_insn (\"mov\\t%0, %1\", operands);
7706       }
7707     else
7708       {
7709	 /* Similarly, but the target is memory.  */
7710	 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7711	 output_asm_insn (\"str\\t%1, %0\", operands);
7712       }
7713
7714     switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7715       {
7716	 case 4:
7717	   return \"b%d4\\t%l5\";
7718	 case 6:
7719	   return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7720	 default:
7721	   return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7722       }
7723   }
7724  "
7725  [(set (attr "far_jump")
7726        (if_then_else
7727	    (ior (and (eq (symbol_ref ("which_alternative"))
7728	                  (const_int 0))
7729		      (eq_attr "length" "8"))
7730		 (eq_attr "length" "10"))
7731	    (const_string "yes")
7732            (const_string "no")))
7733   (set (attr "length")
7734     (if_then_else
7735       (eq (symbol_ref ("which_alternative"))
7736		       (const_int 0))
7737       (if_then_else
7738	 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7739	      (le (minus (match_dup 5) (pc)) (const_int 256)))
7740	 (const_int 4)
7741	 (if_then_else
7742	   (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7743		(le (minus (match_dup 5) (pc)) (const_int 2048)))
7744	   (const_int 6)
7745	   (const_int 8)))
7746       (if_then_else
7747	 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7748	      (le (minus (match_dup 5) (pc)) (const_int 256)))
7749	 (const_int 6)
7750	 (if_then_else
7751	   (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7752		(le (minus (match_dup 5) (pc)) (const_int 2048)))
7753	   (const_int 8)
7754	   (const_int 10)))))]
7755)
7756
7757(define_insn "*subsi3_cbranch_scratch"
7758  [(set (pc)
7759	(if_then_else
7760	 (match_operator 0 "arm_comparison_operator"
7761	  [(minus:SI (match_operand:SI 1 "register_operand" "l")
7762		     (match_operand:SI 2 "nonmemory_operand" "l"))
7763	   (const_int 0)])
7764	 (label_ref (match_operand 3 "" ""))
7765	 (pc)))]
7766  "TARGET_THUMB1
7767   && (GET_CODE (operands[0]) == EQ
7768       || GET_CODE (operands[0]) == NE
7769       || GET_CODE (operands[0]) == GE
7770       || GET_CODE (operands[0]) == LT)"
7771  "*
7772  output_asm_insn (\"cmp\\t%1, %2\", operands);
7773  switch (get_attr_length (insn))
7774    {
7775    case 4:  return \"b%d0\\t%l3\";
7776    case 6:  return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7777    default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7778    }
7779  "
7780  [(set (attr "far_jump")
7781        (if_then_else
7782	    (eq_attr "length" "8")
7783	    (const_string "yes")
7784            (const_string "no")))
7785   (set (attr "length")
7786        (if_then_else
7787	    (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7788	         (le (minus (match_dup 3) (pc)) (const_int 256)))
7789	    (const_int 4)
7790	    (if_then_else
7791	        (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7792		     (le (minus (match_dup 3) (pc)) (const_int 2048)))
7793		(const_int 6)
7794		(const_int 8))))]
7795)
7796
7797;; Comparison and test insns
7798
7799(define_insn "*arm_cmpsi_insn"
7800  [(set (reg:CC CC_REGNUM)
7801	(compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7802		    (match_operand:SI 1 "arm_add_operand"    "rI,L")))]
7803  "TARGET_32BIT"
7804  "@
7805   cmp%?\\t%0, %1
7806   cmn%?\\t%0, #%n1"
7807  [(set_attr "conds" "set")]
7808)
7809
7810(define_insn "*arm_cmpsi_shiftsi"
7811  [(set (reg:CC CC_REGNUM)
7812	(compare:CC (match_operand:SI   0 "s_register_operand" "r")
7813		    (match_operator:SI  3 "shift_operator"
7814		     [(match_operand:SI 1 "s_register_operand" "r")
7815		      (match_operand:SI 2 "arm_rhs_operand"    "rM")])))]
7816  "TARGET_ARM"
7817  "cmp%?\\t%0, %1%S3"
7818  [(set_attr "conds" "set")
7819   (set_attr "shift" "1")
7820   (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7821		      (const_string "alu_shift")
7822		      (const_string "alu_shift_reg")))]
7823)
7824
7825(define_insn "*arm_cmpsi_shiftsi_swp"
7826  [(set (reg:CC_SWP CC_REGNUM)
7827	(compare:CC_SWP (match_operator:SI 3 "shift_operator"
7828			 [(match_operand:SI 1 "s_register_operand" "r")
7829			  (match_operand:SI 2 "reg_or_int_operand" "rM")])
7830			(match_operand:SI 0 "s_register_operand" "r")))]
7831  "TARGET_ARM"
7832  "cmp%?\\t%0, %1%S3"
7833  [(set_attr "conds" "set")
7834   (set_attr "shift" "1")
7835   (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7836		      (const_string "alu_shift")
7837		      (const_string "alu_shift_reg")))]
7838)
7839
7840(define_insn "*arm_cmpsi_negshiftsi_si"
7841  [(set (reg:CC_Z CC_REGNUM)
7842	(compare:CC_Z
7843	 (neg:SI (match_operator:SI 1 "shift_operator"
7844		    [(match_operand:SI 2 "s_register_operand" "r")
7845		     (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7846	 (match_operand:SI 0 "s_register_operand" "r")))]
7847  "TARGET_ARM"
7848  "cmn%?\\t%0, %2%S1"
7849  [(set_attr "conds" "set")
7850   (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7851				    (const_string "alu_shift")
7852				    (const_string "alu_shift_reg")))]
7853)
7854
7855;; Cirrus SF compare instruction
7856(define_insn "*cirrus_cmpsf"
7857  [(set (reg:CCFP CC_REGNUM)
7858	(compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7859		      (match_operand:SF 1 "cirrus_fp_register" "v")))]
7860  "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7861  "cfcmps%?\\tr15, %V0, %V1"
7862  [(set_attr "type"   "mav_farith")
7863   (set_attr "cirrus" "compare")]
7864)
7865
7866;; Cirrus DF compare instruction
7867(define_insn "*cirrus_cmpdf"
7868  [(set (reg:CCFP CC_REGNUM)
7869	(compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7870		      (match_operand:DF 1 "cirrus_fp_register" "v")))]
7871  "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7872  "cfcmpd%?\\tr15, %V0, %V1"
7873  [(set_attr "type"   "mav_farith")
7874   (set_attr "cirrus" "compare")]
7875)
7876
7877(define_insn "*cirrus_cmpdi"
7878  [(set (reg:CC CC_REGNUM)
7879	(compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7880		    (match_operand:DI 1 "cirrus_fp_register" "v")))]
7881  "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7882  "cfcmp64%?\\tr15, %V0, %V1"
7883  [(set_attr "type"   "mav_farith")
7884   (set_attr "cirrus" "compare")]
7885)
7886
7887; This insn allows redundant compares to be removed by cse, nothing should
7888; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7889; is deleted later on. The match_dup will match the mode here, so that
7890; mode changes of the condition codes aren't lost by this even though we don't
7891; specify what they are.
7892
7893(define_insn "*deleted_compare"
7894  [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7895  "TARGET_32BIT"
7896  "\\t%@ deleted compare"
7897  [(set_attr "conds" "set")
7898   (set_attr "length" "0")]
7899)
7900
7901
7902;; Conditional branch insns
7903
7904(define_expand "cbranch_cc"
7905  [(set (pc)
7906	(if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7907					    (match_operand 2 "" "")])
7908		      (label_ref (match_operand 3 "" ""))
7909		      (pc)))]
7910  "TARGET_32BIT"
7911  "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7912				      operands[1], operands[2]);
7913   operands[2] = const0_rtx;"
7914)
7915
7916;;
7917;; Patterns to match conditional branch insns.
7918;;
7919
7920(define_insn "*arm_cond_branch"
7921  [(set (pc)
7922	(if_then_else (match_operator 1 "arm_comparison_operator"
7923		       [(match_operand 2 "cc_register" "") (const_int 0)])
7924		      (label_ref (match_operand 0 "" ""))
7925		      (pc)))]
7926  "TARGET_32BIT"
7927  "*
7928  if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7929    {
7930      arm_ccfsm_state += 2;
7931      return \"\";
7932    }
7933  return \"b%d1\\t%l0\";
7934  "
7935  [(set_attr "conds" "use")
7936   (set_attr "type" "branch")]
7937)
7938
7939(define_insn "*arm_cond_branch_reversed"
7940  [(set (pc)
7941	(if_then_else (match_operator 1 "arm_comparison_operator"
7942		       [(match_operand 2 "cc_register" "") (const_int 0)])
7943		      (pc)
7944		      (label_ref (match_operand 0 "" ""))))]
7945  "TARGET_32BIT"
7946  "*
7947  if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7948    {
7949      arm_ccfsm_state += 2;
7950      return \"\";
7951    }
7952  return \"b%D1\\t%l0\";
7953  "
7954  [(set_attr "conds" "use")
7955   (set_attr "type" "branch")]
7956)
7957
7958
7959
7960; scc insns
7961
7962(define_expand "cstore_cc"
7963  [(set (match_operand:SI 0 "s_register_operand" "")
7964	(match_operator:SI 1 "" [(match_operand 2 "" "")
7965				 (match_operand 3 "" "")]))]
7966  "TARGET_32BIT"
7967  "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7968				      operands[2], operands[3]);
7969   operands[3] = const0_rtx;"
7970)
7971
7972(define_insn "*mov_scc"
7973  [(set (match_operand:SI 0 "s_register_operand" "=r")
7974	(match_operator:SI 1 "arm_comparison_operator"
7975	 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7976  "TARGET_ARM"
7977  "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7978  [(set_attr "conds" "use")
7979   (set_attr "length" "8")]
7980)
7981
7982(define_insn "*mov_negscc"
7983  [(set (match_operand:SI 0 "s_register_operand" "=r")
7984	(neg:SI (match_operator:SI 1 "arm_comparison_operator"
7985		 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7986  "TARGET_ARM"
7987  "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7988  [(set_attr "conds" "use")
7989   (set_attr "length" "8")]
7990)
7991
7992(define_insn "*mov_notscc"
7993  [(set (match_operand:SI 0 "s_register_operand" "=r")
7994	(not:SI (match_operator:SI 1 "arm_comparison_operator"
7995		 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7996  "TARGET_ARM"
7997  "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7998  [(set_attr "conds" "use")
7999   (set_attr "length" "8")]
8000)
8001
8002(define_expand "cstoresi4"
8003  [(set (match_operand:SI 0 "s_register_operand" "")
8004	(match_operator:SI 1 "arm_comparison_operator"
8005	 [(match_operand:SI 2 "s_register_operand" "")
8006	  (match_operand:SI 3 "reg_or_int_operand" "")]))]
8007  "TARGET_32BIT || TARGET_THUMB1"
8008  "{
8009  rtx op3, scratch, scratch2;
8010
8011  if (!TARGET_THUMB1)
8012    {
8013      if (!arm_add_operand (operands[3], SImode))
8014	operands[3] = force_reg (SImode, operands[3]);
8015      emit_insn (gen_cstore_cc (operands[0], operands[1],
8016				operands[2], operands[3]));
8017      DONE;
8018    }
8019
8020  if (operands[3] == const0_rtx)
8021    {
8022      switch (GET_CODE (operands[1]))
8023	{
8024	case EQ:
8025	  emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8026	  break;
8027
8028	case NE:
8029	  emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8030	  break;
8031
8032	case LE:
8033          scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8034				  NULL_RTX, 0, OPTAB_WIDEN);
8035          scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8036				  NULL_RTX, 0, OPTAB_WIDEN);
8037          expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8038			operands[0], 1, OPTAB_WIDEN);
8039	  break;
8040
8041        case GE:
8042          scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8043				 NULL_RTX, 1);
8044          expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8045			NULL_RTX, 1, OPTAB_WIDEN);
8046          break;
8047
8048        case GT:
8049          scratch = expand_binop (SImode, ashr_optab, operands[2],
8050				  GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8051          scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8052				  NULL_RTX, 0, OPTAB_WIDEN);
8053          expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8054			0, OPTAB_WIDEN);
8055          break;
8056
8057	/* LT is handled by generic code.  No need for unsigned with 0.  */
8058	default:
8059	  FAIL;
8060	}
8061      DONE;
8062    }
8063
8064  switch (GET_CODE (operands[1]))
8065    {
8066    case EQ:
8067      scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8068			      NULL_RTX, 0, OPTAB_WIDEN);
8069      emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8070      break;
8071
8072    case NE:
8073      scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8074			      NULL_RTX, 0, OPTAB_WIDEN);
8075      emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8076      break;
8077
8078    case LE:
8079      op3 = force_reg (SImode, operands[3]);
8080
8081      scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8082			      NULL_RTX, 1, OPTAB_WIDEN);
8083      scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8084			      NULL_RTX, 0, OPTAB_WIDEN);
8085      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8086					  op3, operands[2]));
8087      break;
8088
8089    case GE:
8090      op3 = operands[3];
8091      if (!thumb1_cmp_operand (op3, SImode))
8092        op3 = force_reg (SImode, op3);
8093      scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8094			      NULL_RTX, 0, OPTAB_WIDEN);
8095      scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8096			       NULL_RTX, 1, OPTAB_WIDEN);
8097      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8098					  operands[2], op3));
8099      break;
8100
8101    case LEU:
8102      op3 = force_reg (SImode, operands[3]);
8103      scratch = force_reg (SImode, const0_rtx);
8104      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8105					  op3, operands[2]));
8106      break;
8107
8108    case GEU:
8109      op3 = operands[3];
8110      if (!thumb1_cmp_operand (op3, SImode))
8111        op3 = force_reg (SImode, op3);
8112      scratch = force_reg (SImode, const0_rtx);
8113      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8114					  operands[2], op3));
8115      break;
8116
8117    case LTU:
8118      op3 = operands[3];
8119      if (!thumb1_cmp_operand (op3, SImode))
8120        op3 = force_reg (SImode, op3);
8121      scratch = gen_reg_rtx (SImode);
8122      emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8123      break;
8124
8125    case GTU:
8126      op3 = force_reg (SImode, operands[3]);
8127      scratch = gen_reg_rtx (SImode);
8128      emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8129      break;
8130
8131    /* No good sequences for GT, LT.  */
8132    default:
8133      FAIL;
8134    }
8135  DONE;
8136}")
8137
8138(define_expand "cstoresf4"
8139  [(set (match_operand:SI 0 "s_register_operand" "")
8140	(match_operator:SI 1 "arm_comparison_operator"
8141	 [(match_operand:SF 2 "s_register_operand" "")
8142	  (match_operand:SF 3 "arm_float_compare_operand" "")]))]
8143  "TARGET_32BIT && TARGET_HARD_FLOAT"
8144  "emit_insn (gen_cstore_cc (operands[0], operands[1],
8145			     operands[2], operands[3])); DONE;"
8146)
8147
8148(define_expand "cstoredf4"
8149  [(set (match_operand:SI 0 "s_register_operand" "")
8150	(match_operator:SI 1 "arm_comparison_operator"
8151	 [(match_operand:DF 2 "s_register_operand" "")
8152	  (match_operand:DF 3 "arm_float_compare_operand" "")]))]
8153  "TARGET_32BIT && TARGET_HARD_FLOAT"
8154  "emit_insn (gen_cstore_cc (operands[0], operands[1],
8155			     operands[2], operands[3])); DONE;"
8156)
8157
8158;; this uses the Cirrus DI compare instruction
8159(define_expand "cstoredi4"
8160  [(set (match_operand:SI 0 "s_register_operand" "")
8161	(match_operator:SI 1 "arm_comparison_operator"
8162	 [(match_operand:DI 2 "cirrus_fp_register" "")
8163	  (match_operand:DI 3 "cirrus_fp_register" "")]))]
8164  "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
8165  "emit_insn (gen_cstore_cc (operands[0], operands[1],
8166			     operands[2], operands[3])); DONE;"
8167)
8168
8169
8170(define_expand "cstoresi_eq0_thumb1"
8171  [(parallel
8172    [(set (match_operand:SI 0 "s_register_operand" "")
8173	  (eq:SI (match_operand:SI 1 "s_register_operand" "")
8174		 (const_int 0)))
8175     (clobber (match_dup:SI 2))])]
8176  "TARGET_THUMB1"
8177  "operands[2] = gen_reg_rtx (SImode);"
8178)
8179
8180(define_expand "cstoresi_ne0_thumb1"
8181  [(parallel
8182    [(set (match_operand:SI 0 "s_register_operand" "")
8183	  (ne:SI (match_operand:SI 1 "s_register_operand" "")
8184		 (const_int 0)))
8185     (clobber (match_dup:SI 2))])]
8186  "TARGET_THUMB1"
8187  "operands[2] = gen_reg_rtx (SImode);"
8188)
8189
8190(define_insn "*cstoresi_eq0_thumb1_insn"
8191  [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8192	(eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8193	       (const_int 0)))
8194   (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8195  "TARGET_THUMB1"
8196  "@
8197   neg\\t%0, %1\;adc\\t%0, %0, %1
8198   neg\\t%2, %1\;adc\\t%0, %1, %2"
8199  [(set_attr "length" "4")]
8200)
8201
8202(define_insn "*cstoresi_ne0_thumb1_insn"
8203  [(set (match_operand:SI 0 "s_register_operand" "=l")
8204	(ne:SI (match_operand:SI 1 "s_register_operand" "0")
8205	       (const_int 0)))
8206   (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8207  "TARGET_THUMB1"
8208  "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8209  [(set_attr "length" "4")]
8210)
8211
8212;; Used as part of the expansion of thumb ltu and gtu sequences
8213(define_insn "cstoresi_nltu_thumb1"
8214  [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8215        (neg:SI (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8216			(match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8217  "TARGET_THUMB1"
8218  "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8219  [(set_attr "length" "4")]
8220)
8221
8222(define_insn_and_split "cstoresi_ltu_thumb1"
8223  [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8224        (ltu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8225		(match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")))]
8226  "TARGET_THUMB1"
8227  "#"
8228  "TARGET_THUMB1"
8229  [(set (match_dup 3)
8230	(neg:SI (ltu:SI (match_dup 1) (match_dup 2))))
8231   (set (match_dup 0) (neg:SI (match_dup 3)))]
8232  "operands[3] = gen_reg_rtx (SImode);"
8233  [(set_attr "length" "4")]
8234)
8235
8236;; Used as part of the expansion of thumb les sequence.
8237(define_insn "thumb1_addsi3_addgeu"
8238  [(set (match_operand:SI 0 "s_register_operand" "=l")
8239        (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8240			  (match_operand:SI 2 "s_register_operand" "l"))
8241		 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8242			 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8243  "TARGET_THUMB1"
8244  "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8245  [(set_attr "length" "4")]
8246)
8247
8248
8249;; Conditional move insns
8250
8251(define_expand "movsicc"
8252  [(set (match_operand:SI 0 "s_register_operand" "")
8253	(if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8254			 (match_operand:SI 2 "arm_not_operand" "")
8255			 (match_operand:SI 3 "arm_not_operand" "")))]
8256  "TARGET_32BIT"
8257  "
8258  {
8259    enum rtx_code code = GET_CODE (operands[1]);
8260    rtx ccreg;
8261
8262    if (code == UNEQ || code == LTGT)
8263      FAIL;
8264
8265    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8266				 XEXP (operands[1], 1));
8267    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8268  }"
8269)
8270
8271(define_expand "movsfcc"
8272  [(set (match_operand:SF 0 "s_register_operand" "")
8273	(if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8274			 (match_operand:SF 2 "s_register_operand" "")
8275			 (match_operand:SF 3 "nonmemory_operand" "")))]
8276  "TARGET_32BIT && TARGET_HARD_FLOAT"
8277  "
8278  {
8279    enum rtx_code code = GET_CODE (operands[1]);
8280    rtx ccreg;
8281
8282    if (code == UNEQ || code == LTGT)
8283      FAIL;
8284
8285    /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8286       Otherwise, ensure it is a valid FP add operand */
8287    if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8288        || (!arm_float_add_operand (operands[3], SFmode)))
8289      operands[3] = force_reg (SFmode, operands[3]);
8290
8291    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8292				 XEXP (operands[1], 1));
8293    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8294  }"
8295)
8296
8297(define_expand "movdfcc"
8298  [(set (match_operand:DF 0 "s_register_operand" "")
8299	(if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8300			 (match_operand:DF 2 "s_register_operand" "")
8301			 (match_operand:DF 3 "arm_float_add_operand" "")))]
8302  "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP_DOUBLE)"
8303  "
8304  {
8305    enum rtx_code code = GET_CODE (operands[1]);
8306    rtx ccreg;
8307
8308    if (code == UNEQ || code == LTGT)
8309      FAIL;
8310
8311    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8312				 XEXP (operands[1], 1));
8313    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8314  }"
8315)
8316
8317(define_insn "*movsicc_insn"
8318  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8319	(if_then_else:SI
8320	 (match_operator 3 "arm_comparison_operator"
8321	  [(match_operand 4 "cc_register" "") (const_int 0)])
8322	 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8323	 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8324  "TARGET_ARM"
8325  "@
8326   mov%D3\\t%0, %2
8327   mvn%D3\\t%0, #%B2
8328   mov%d3\\t%0, %1
8329   mvn%d3\\t%0, #%B1
8330   mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8331   mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8332   mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8333   mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8334  [(set_attr "length" "4,4,4,4,8,8,8,8")
8335   (set_attr "conds" "use")]
8336)
8337
8338(define_insn "*movsfcc_soft_insn"
8339  [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8340	(if_then_else:SF (match_operator 3 "arm_comparison_operator"
8341			  [(match_operand 4 "cc_register" "") (const_int 0)])
8342			 (match_operand:SF 1 "s_register_operand" "0,r")
8343			 (match_operand:SF 2 "s_register_operand" "r,0")))]
8344  "TARGET_ARM && TARGET_SOFT_FLOAT"
8345  "@
8346   mov%D3\\t%0, %2
8347   mov%d3\\t%0, %1"
8348  [(set_attr "conds" "use")]
8349)
8350
8351
8352;; Jump and linkage insns
8353
8354(define_expand "jump"
8355  [(set (pc)
8356	(label_ref (match_operand 0 "" "")))]
8357  "TARGET_EITHER"
8358  ""
8359)
8360
8361(define_insn "*arm_jump"
8362  [(set (pc)
8363	(label_ref (match_operand 0 "" "")))]
8364  "TARGET_32BIT"
8365  "*
8366  {
8367    if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8368      {
8369        arm_ccfsm_state += 2;
8370        return \"\";
8371      }
8372    return \"b%?\\t%l0\";
8373  }
8374  "
8375  [(set_attr "predicable" "yes")]
8376)
8377
8378(define_insn "*thumb_jump"
8379  [(set (pc)
8380	(label_ref (match_operand 0 "" "")))]
8381  "TARGET_THUMB1"
8382  "*
8383  if (get_attr_length (insn) == 2)
8384    return \"b\\t%l0\";
8385  return \"bl\\t%l0\\t%@ far jump\";
8386  "
8387  [(set (attr "far_jump")
8388        (if_then_else
8389	    (eq_attr "length" "4")
8390	    (const_string "yes")
8391	    (const_string "no")))
8392   (set (attr "length")
8393        (if_then_else
8394	    (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8395		 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8396  	    (const_int 2)
8397	    (const_int 4)))]
8398)
8399
8400(define_expand "call"
8401  [(parallel [(call (match_operand 0 "memory_operand" "")
8402	            (match_operand 1 "general_operand" ""))
8403	      (use (match_operand 2 "" ""))
8404	      (clobber (reg:SI LR_REGNUM))])]
8405  "TARGET_EITHER"
8406  "
8407  {
8408    rtx callee, pat;
8409
8410    /* In an untyped call, we can get NULL for operand 2.  */
8411    if (operands[2] == NULL_RTX)
8412      operands[2] = const0_rtx;
8413
8414    /* Decide if we should generate indirect calls by loading the
8415       32-bit address of the callee into a register before performing the
8416       branch and link.  */
8417    callee = XEXP (operands[0], 0);
8418    if (GET_CODE (callee) == SYMBOL_REF
8419	? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8420	: !REG_P (callee))
8421      XEXP (operands[0], 0) = force_reg (Pmode, callee);
8422
8423    pat = gen_call_internal (operands[0], operands[1], operands[2]);
8424    arm_emit_call_insn (pat, XEXP (operands[0], 0));
8425    DONE;
8426  }"
8427)
8428
8429(define_expand "call_internal"
8430  [(parallel [(call (match_operand 0 "memory_operand" "")
8431	            (match_operand 1 "general_operand" ""))
8432	      (use (match_operand 2 "" ""))
8433	      (clobber (reg:SI LR_REGNUM))])])
8434
8435(define_insn "*call_reg_armv5"
8436  [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8437         (match_operand 1 "" ""))
8438   (use (match_operand 2 "" ""))
8439   (clobber (reg:SI LR_REGNUM))]
8440  "TARGET_ARM && arm_arch5"
8441  "blx%?\\t%0"
8442  [(set_attr "type" "call")]
8443)
8444
8445(define_insn "*call_reg_arm"
8446  [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8447         (match_operand 1 "" ""))
8448   (use (match_operand 2 "" ""))
8449   (clobber (reg:SI LR_REGNUM))]
8450  "TARGET_ARM && !arm_arch5"
8451  "*
8452  return output_call (operands);
8453  "
8454  ;; length is worst case, normally it is only two
8455  [(set_attr "length" "12")
8456   (set_attr "type" "call")]
8457)
8458
8459
8460;; Note: not used for armv5+ because the sequence used (ldr pc, ...) is not
8461;; considered a function call by the branch predictor of some cores (PR40887).
8462;; Falls back to blx rN (*call_reg_armv5).
8463
8464(define_insn "*call_mem"
8465  [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8466	 (match_operand 1 "" ""))
8467   (use (match_operand 2 "" ""))
8468   (clobber (reg:SI LR_REGNUM))]
8469  "TARGET_ARM && !arm_arch5"
8470  "*
8471  return output_call_mem (operands);
8472  "
8473  [(set_attr "length" "12")
8474   (set_attr "type" "call")]
8475)
8476
8477(define_insn "*call_reg_thumb1_v5"
8478  [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8479	 (match_operand 1 "" ""))
8480   (use (match_operand 2 "" ""))
8481   (clobber (reg:SI LR_REGNUM))]
8482  "TARGET_THUMB1 && arm_arch5"
8483  "blx\\t%0"
8484  [(set_attr "length" "2")
8485   (set_attr "type" "call")]
8486)
8487
8488(define_insn "*call_reg_thumb1"
8489  [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8490	 (match_operand 1 "" ""))
8491   (use (match_operand 2 "" ""))
8492   (clobber (reg:SI LR_REGNUM))]
8493  "TARGET_THUMB1 && !arm_arch5"
8494  "*
8495  {
8496    if (!TARGET_CALLER_INTERWORKING)
8497      return thumb_call_via_reg (operands[0]);
8498    else if (operands[1] == const0_rtx)
8499      return \"bl\\t%__interwork_call_via_%0\";
8500    else if (frame_pointer_needed)
8501      return \"bl\\t%__interwork_r7_call_via_%0\";
8502    else
8503      return \"bl\\t%__interwork_r11_call_via_%0\";
8504  }"
8505  [(set_attr "type" "call")]
8506)
8507
8508(define_expand "call_value"
8509  [(parallel [(set (match_operand       0 "" "")
8510	           (call (match_operand 1 "memory_operand" "")
8511		         (match_operand 2 "general_operand" "")))
8512	      (use (match_operand 3 "" ""))
8513	      (clobber (reg:SI LR_REGNUM))])]
8514  "TARGET_EITHER"
8515  "
8516  {
8517    rtx pat, callee;
8518
8519    /* In an untyped call, we can get NULL for operand 2.  */
8520    if (operands[3] == 0)
8521      operands[3] = const0_rtx;
8522
8523    /* Decide if we should generate indirect calls by loading the
8524       32-bit address of the callee into a register before performing the
8525       branch and link.  */
8526    callee = XEXP (operands[1], 0);
8527    if (GET_CODE (callee) == SYMBOL_REF
8528	? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8529	: !REG_P (callee))
8530      XEXP (operands[1], 0) = force_reg (Pmode, callee);
8531
8532    pat = gen_call_value_internal (operands[0], operands[1],
8533				   operands[2], operands[3]);
8534    arm_emit_call_insn (pat, XEXP (operands[1], 0));
8535    DONE;
8536  }"
8537)
8538
8539(define_expand "call_value_internal"
8540  [(parallel [(set (match_operand       0 "" "")
8541	           (call (match_operand 1 "memory_operand" "")
8542		         (match_operand 2 "general_operand" "")))
8543	      (use (match_operand 3 "" ""))
8544	      (clobber (reg:SI LR_REGNUM))])])
8545
8546(define_insn "*call_value_reg_armv5"
8547  [(set (match_operand 0 "" "")
8548        (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8549	      (match_operand 2 "" "")))
8550   (use (match_operand 3 "" ""))
8551   (clobber (reg:SI LR_REGNUM))]
8552  "TARGET_ARM && arm_arch5"
8553  "blx%?\\t%1"
8554  [(set_attr "type" "call")]
8555)
8556
8557(define_insn "*call_value_reg_arm"
8558  [(set (match_operand 0 "" "")
8559        (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8560	      (match_operand 2 "" "")))
8561   (use (match_operand 3 "" ""))
8562   (clobber (reg:SI LR_REGNUM))]
8563  "TARGET_ARM && !arm_arch5"
8564  "*
8565  return output_call (&operands[1]);
8566  "
8567  [(set_attr "length" "12")
8568   (set_attr "type" "call")]
8569)
8570
8571;; Note: see *call_mem
8572
8573(define_insn "*call_value_mem"
8574  [(set (match_operand 0 "" "")
8575	(call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8576	      (match_operand 2 "" "")))
8577   (use (match_operand 3 "" ""))
8578   (clobber (reg:SI LR_REGNUM))]
8579  "TARGET_ARM && !arm_arch5 && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8580  "*
8581  return output_call_mem (&operands[1]);
8582  "
8583  [(set_attr "length" "12")
8584   (set_attr "type" "call")]
8585)
8586
8587(define_insn "*call_value_reg_thumb1_v5"
8588  [(set (match_operand 0 "" "")
8589	(call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8590	      (match_operand 2 "" "")))
8591   (use (match_operand 3 "" ""))
8592   (clobber (reg:SI LR_REGNUM))]
8593  "TARGET_THUMB1 && arm_arch5"
8594  "blx\\t%1"
8595  [(set_attr "length" "2")
8596   (set_attr "type" "call")]
8597)
8598
8599(define_insn "*call_value_reg_thumb1"
8600  [(set (match_operand 0 "" "")
8601	(call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8602	      (match_operand 2 "" "")))
8603   (use (match_operand 3 "" ""))
8604   (clobber (reg:SI LR_REGNUM))]
8605  "TARGET_THUMB1 && !arm_arch5"
8606  "*
8607  {
8608    if (!TARGET_CALLER_INTERWORKING)
8609      return thumb_call_via_reg (operands[1]);
8610    else if (operands[2] == const0_rtx)
8611      return \"bl\\t%__interwork_call_via_%1\";
8612    else if (frame_pointer_needed)
8613      return \"bl\\t%__interwork_r7_call_via_%1\";
8614    else
8615      return \"bl\\t%__interwork_r11_call_via_%1\";
8616  }"
8617  [(set_attr "type" "call")]
8618)
8619
8620;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8621;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8622
8623(define_insn "*call_symbol"
8624  [(call (mem:SI (match_operand:SI 0 "" ""))
8625	 (match_operand 1 "" ""))
8626   (use (match_operand 2 "" ""))
8627   (clobber (reg:SI LR_REGNUM))]
8628  "TARGET_ARM
8629   && (GET_CODE (operands[0]) == SYMBOL_REF)
8630   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8631  "*
8632  {
8633    return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8634  }"
8635  [(set_attr "type" "call")]
8636)
8637
8638(define_insn "*call_value_symbol"
8639  [(set (match_operand 0 "" "")
8640	(call (mem:SI (match_operand:SI 1 "" ""))
8641	(match_operand:SI 2 "" "")))
8642   (use (match_operand 3 "" ""))
8643   (clobber (reg:SI LR_REGNUM))]
8644  "TARGET_ARM
8645   && (GET_CODE (operands[1]) == SYMBOL_REF)
8646   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8647  "*
8648  {
8649    return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8650  }"
8651  [(set_attr "type" "call")]
8652)
8653
8654(define_insn "*call_insn"
8655  [(call (mem:SI (match_operand:SI 0 "" ""))
8656	 (match_operand:SI 1 "" ""))
8657   (use (match_operand 2 "" ""))
8658   (clobber (reg:SI LR_REGNUM))]
8659  "TARGET_THUMB
8660   && GET_CODE (operands[0]) == SYMBOL_REF
8661   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8662  "bl\\t%a0"
8663  [(set_attr "length" "4")
8664   (set_attr "type" "call")]
8665)
8666
8667(define_insn "*call_value_insn"
8668  [(set (match_operand 0 "" "")
8669	(call (mem:SI (match_operand 1 "" ""))
8670	      (match_operand 2 "" "")))
8671   (use (match_operand 3 "" ""))
8672   (clobber (reg:SI LR_REGNUM))]
8673  "TARGET_THUMB
8674   && GET_CODE (operands[1]) == SYMBOL_REF
8675   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8676  "bl\\t%a1"
8677  [(set_attr "length" "4")
8678   (set_attr "type" "call")]
8679)
8680
8681;; We may also be able to do sibcalls for Thumb, but it's much harder...
8682(define_expand "sibcall"
8683  [(parallel [(call (match_operand 0 "memory_operand" "")
8684		    (match_operand 1 "general_operand" ""))
8685	      (return)
8686	      (use (match_operand 2 "" ""))])]
8687  "TARGET_ARM"
8688  "
8689  {
8690    if (operands[2] == NULL_RTX)
8691      operands[2] = const0_rtx;
8692  }"
8693)
8694
8695(define_expand "sibcall_value"
8696  [(parallel [(set (match_operand 0 "" "")
8697		   (call (match_operand 1 "memory_operand" "")
8698			 (match_operand 2 "general_operand" "")))
8699	      (return)
8700	      (use (match_operand 3 "" ""))])]
8701  "TARGET_ARM"
8702  "
8703  {
8704    if (operands[3] == NULL_RTX)
8705      operands[3] = const0_rtx;
8706  }"
8707)
8708
8709(define_insn "*sibcall_insn"
8710 [(call (mem:SI (match_operand:SI 0 "" "X"))
8711	(match_operand 1 "" ""))
8712  (return)
8713  (use (match_operand 2 "" ""))]
8714  "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8715  "*
8716  return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8717  "
8718  [(set_attr "type" "call")]
8719)
8720
8721(define_insn "*sibcall_value_insn"
8722 [(set (match_operand 0 "" "")
8723       (call (mem:SI (match_operand:SI 1 "" "X"))
8724	     (match_operand 2 "" "")))
8725  (return)
8726  (use (match_operand 3 "" ""))]
8727  "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8728  "*
8729  return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8730  "
8731  [(set_attr "type" "call")]
8732)
8733
8734;; Often the return insn will be the same as loading from memory, so set attr
8735(define_insn "return"
8736  [(return)]
8737  "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8738  "*
8739  {
8740    if (arm_ccfsm_state == 2)
8741      {
8742        arm_ccfsm_state += 2;
8743        return \"\";
8744      }
8745    return output_return_instruction (const_true_rtx, TRUE, FALSE);
8746  }"
8747  [(set_attr "type" "load1")
8748   (set_attr "length" "12")
8749   (set_attr "predicable" "yes")]
8750)
8751
8752(define_insn "*cond_return"
8753  [(set (pc)
8754        (if_then_else (match_operator 0 "arm_comparison_operator"
8755		       [(match_operand 1 "cc_register" "") (const_int 0)])
8756                      (return)
8757                      (pc)))]
8758  "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8759  "*
8760  {
8761    if (arm_ccfsm_state == 2)
8762      {
8763        arm_ccfsm_state += 2;
8764        return \"\";
8765      }
8766    return output_return_instruction (operands[0], TRUE, FALSE);
8767  }"
8768  [(set_attr "conds" "use")
8769   (set_attr "length" "12")
8770   (set_attr "type" "load1")]
8771)
8772
8773(define_insn "*cond_return_inverted"
8774  [(set (pc)
8775        (if_then_else (match_operator 0 "arm_comparison_operator"
8776		       [(match_operand 1 "cc_register" "") (const_int 0)])
8777                      (pc)
8778		      (return)))]
8779  "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8780  "*
8781  {
8782    if (arm_ccfsm_state == 2)
8783      {
8784        arm_ccfsm_state += 2;
8785        return \"\";
8786      }
8787    return output_return_instruction (operands[0], TRUE, TRUE);
8788  }"
8789  [(set_attr "conds" "use")
8790   (set_attr "length" "12")
8791   (set_attr "type" "load1")]
8792)
8793
8794;; Generate a sequence of instructions to determine if the processor is
8795;; in 26-bit or 32-bit mode, and return the appropriate return address
8796;; mask.
8797
8798(define_expand "return_addr_mask"
8799  [(set (match_dup 1)
8800      (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8801		       (const_int 0)))
8802   (set (match_operand:SI 0 "s_register_operand" "")
8803      (if_then_else:SI (eq (match_dup 1) (const_int 0))
8804		       (const_int -1)
8805		       (const_int 67108860)))] ; 0x03fffffc
8806  "TARGET_ARM"
8807  "
8808  operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8809  ")
8810
8811(define_insn "*check_arch2"
8812  [(set (match_operand:CC_NOOV 0 "cc_register" "")
8813      (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8814		       (const_int 0)))]
8815  "TARGET_ARM"
8816  "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8817  [(set_attr "length" "8")
8818   (set_attr "conds" "set")]
8819)
8820
8821;; Call subroutine returning any type.
8822
8823(define_expand "untyped_call"
8824  [(parallel [(call (match_operand 0 "" "")
8825		    (const_int 0))
8826	      (match_operand 1 "" "")
8827	      (match_operand 2 "" "")])]
8828  "TARGET_EITHER"
8829  "
8830  {
8831    int i;
8832    rtx par = gen_rtx_PARALLEL (VOIDmode,
8833				rtvec_alloc (XVECLEN (operands[2], 0)));
8834    rtx addr = gen_reg_rtx (Pmode);
8835    rtx mem;
8836    int size = 0;
8837
8838    emit_move_insn (addr, XEXP (operands[1], 0));
8839    mem = change_address (operands[1], BLKmode, addr);
8840
8841    for (i = 0; i < XVECLEN (operands[2], 0); i++)
8842      {
8843	rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8844
8845	/* Default code only uses r0 as a return value, but we could
8846	   be using anything up to 4 registers.  */
8847	if (REGNO (src) == R0_REGNUM)
8848	  src = gen_rtx_REG (TImode, R0_REGNUM);
8849
8850        XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8851						 GEN_INT (size));
8852        size += GET_MODE_SIZE (GET_MODE (src));
8853      }
8854
8855    emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8856				    const0_rtx));
8857
8858    size = 0;
8859
8860    for (i = 0; i < XVECLEN (par, 0); i++)
8861      {
8862	HOST_WIDE_INT offset = 0;
8863	rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8864
8865	if (size != 0)
8866	  emit_move_insn (addr, plus_constant (addr, size));
8867
8868	mem = change_address (mem, GET_MODE (reg), NULL);
8869	if (REGNO (reg) == R0_REGNUM)
8870	  {
8871	    /* On thumb we have to use a write-back instruction.  */
8872	    emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8873			TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8874	    size = TARGET_ARM ? 16 : 0;
8875	  }
8876	else
8877	  {
8878	    emit_move_insn (mem, reg);
8879	    size = GET_MODE_SIZE (GET_MODE (reg));
8880	  }
8881      }
8882
8883    /* The optimizer does not know that the call sets the function value
8884       registers we stored in the result block.  We avoid problems by
8885       claiming that all hard registers are used and clobbered at this
8886       point.  */
8887    emit_insn (gen_blockage ());
8888
8889    DONE;
8890  }"
8891)
8892
8893(define_expand "untyped_return"
8894  [(match_operand:BLK 0 "memory_operand" "")
8895   (match_operand 1 "" "")]
8896  "TARGET_EITHER"
8897  "
8898  {
8899    int i;
8900    rtx addr = gen_reg_rtx (Pmode);
8901    rtx mem;
8902    int size = 0;
8903
8904    emit_move_insn (addr, XEXP (operands[0], 0));
8905    mem = change_address (operands[0], BLKmode, addr);
8906
8907    for (i = 0; i < XVECLEN (operands[1], 0); i++)
8908      {
8909	HOST_WIDE_INT offset = 0;
8910	rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8911
8912	if (size != 0)
8913	  emit_move_insn (addr, plus_constant (addr, size));
8914
8915	mem = change_address (mem, GET_MODE (reg), NULL);
8916	if (REGNO (reg) == R0_REGNUM)
8917	  {
8918	    /* On thumb we have to use a write-back instruction.  */
8919	    emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8920			TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8921	    size = TARGET_ARM ? 16 : 0;
8922	  }
8923	else
8924	  {
8925	    emit_move_insn (reg, mem);
8926	    size = GET_MODE_SIZE (GET_MODE (reg));
8927	  }
8928      }
8929
8930    /* Emit USE insns before the return.  */
8931    for (i = 0; i < XVECLEN (operands[1], 0); i++)
8932      emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8933
8934    /* Construct the return.  */
8935    expand_naked_return ();
8936
8937    DONE;
8938  }"
8939)
8940
8941;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8942;; all of memory.  This blocks insns from being moved across this point.
8943
8944(define_insn "blockage"
8945  [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8946  "TARGET_EITHER"
8947  ""
8948  [(set_attr "length" "0")
8949   (set_attr "type" "block")]
8950)
8951
8952(define_expand "casesi"
8953  [(match_operand:SI 0 "s_register_operand" "")	; index to jump on
8954   (match_operand:SI 1 "const_int_operand" "")	; lower bound
8955   (match_operand:SI 2 "const_int_operand" "")	; total range
8956   (match_operand:SI 3 "" "")			; table label
8957   (match_operand:SI 4 "" "")]			; Out of range label
8958  "TARGET_32BIT || optimize_size || flag_pic"
8959  "
8960  {
8961    enum insn_code code;
8962    if (operands[1] != const0_rtx)
8963      {
8964	rtx reg = gen_reg_rtx (SImode);
8965
8966	emit_insn (gen_addsi3 (reg, operands[0],
8967			       GEN_INT (-INTVAL (operands[1]))));
8968	operands[0] = reg;
8969      }
8970
8971    if (TARGET_ARM)
8972      code = CODE_FOR_arm_casesi_internal;
8973    else if (TARGET_THUMB1)
8974      code = CODE_FOR_thumb1_casesi_internal_pic;
8975    else if (flag_pic)
8976      code = CODE_FOR_thumb2_casesi_internal_pic;
8977    else
8978      code = CODE_FOR_thumb2_casesi_internal;
8979
8980    if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8981      operands[2] = force_reg (SImode, operands[2]);
8982
8983    emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8984					  operands[3], operands[4]));
8985    DONE;
8986  }"
8987)
8988
8989;; The USE in this pattern is needed to tell flow analysis that this is
8990;; a CASESI insn.  It has no other purpose.
8991(define_insn "arm_casesi_internal"
8992  [(parallel [(set (pc)
8993	       (if_then_else
8994		(leu (match_operand:SI 0 "s_register_operand" "r")
8995		     (match_operand:SI 1 "arm_rhs_operand" "rI"))
8996		(mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8997				 (label_ref (match_operand 2 "" ""))))
8998		(label_ref (match_operand 3 "" ""))))
8999	      (clobber (reg:CC CC_REGNUM))
9000	      (use (label_ref (match_dup 2)))])]
9001  "TARGET_ARM"
9002  "*
9003    if (flag_pic)
9004      return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9005    return   \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9006  "
9007  [(set_attr "conds" "clob")
9008   (set_attr "length" "12")]
9009)
9010
9011(define_expand "thumb1_casesi_internal_pic"
9012  [(match_operand:SI 0 "s_register_operand" "")
9013   (match_operand:SI 1 "thumb1_cmp_operand" "")
9014   (match_operand 2 "" "")
9015   (match_operand 3 "" "")]
9016  "TARGET_THUMB1"
9017  {
9018    rtx reg0;
9019    rtx test = gen_rtx_GTU (VOIDmode, operands[0], operands[1]);
9020    emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[1],
9021				    operands[3]));
9022    reg0 = gen_rtx_REG (SImode, 0);
9023    emit_move_insn (reg0, operands[0]);
9024    emit_jump_insn (gen_thumb1_casesi_dispatch (operands[2]/*, operands[3]*/));
9025    DONE;
9026  }
9027)
9028
9029(define_insn "thumb1_casesi_dispatch"
9030  [(parallel [(set (pc) (unspec [(reg:SI 0)
9031				 (label_ref (match_operand 0 "" ""))
9032;;				 (label_ref (match_operand 1 "" ""))
9033]
9034			 UNSPEC_THUMB1_CASESI))
9035	      (clobber (reg:SI IP_REGNUM))
9036              (clobber (reg:SI LR_REGNUM))])]
9037  "TARGET_THUMB1"
9038  "* return thumb1_output_casesi(operands);"
9039  [(set_attr "length" "4")]
9040)
9041
9042(define_expand "indirect_jump"
9043  [(set (pc)
9044	(match_operand:SI 0 "s_register_operand" ""))]
9045  "TARGET_EITHER"
9046  "
9047  /* Thumb-2 doesn't have mov pc, reg.  Explicitly set the low bit of the
9048     address and use bx.  */
9049  if (TARGET_THUMB2)
9050    {
9051      rtx tmp;
9052      tmp = gen_reg_rtx (SImode);
9053      emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9054      operands[0] = tmp;
9055    }
9056  "
9057)
9058
9059;; NB Never uses BX.
9060(define_insn "*arm_indirect_jump"
9061  [(set (pc)
9062	(match_operand:SI 0 "s_register_operand" "r"))]
9063  "TARGET_ARM"
9064  "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9065  [(set_attr "predicable" "yes")]
9066)
9067
9068(define_insn "*load_indirect_jump"
9069  [(set (pc)
9070	(match_operand:SI 0 "memory_operand" "m"))]
9071  "TARGET_ARM"
9072  "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9073  [(set_attr "type" "load1")
9074   (set_attr "pool_range" "4096")
9075   (set_attr "neg_pool_range" "4084")
9076   (set_attr "predicable" "yes")]
9077)
9078
9079;; NB Never uses BX.
9080(define_insn "*thumb1_indirect_jump"
9081  [(set (pc)
9082	(match_operand:SI 0 "register_operand" "l*r"))]
9083  "TARGET_THUMB1"
9084  "mov\\tpc, %0"
9085  [(set_attr "conds" "clob")
9086   (set_attr "length" "2")]
9087)
9088
9089
9090;; Misc insns
9091
9092(define_insn "nop"
9093  [(const_int 0)]
9094  "TARGET_EITHER"
9095  "*
9096  if (TARGET_UNIFIED_ASM)
9097    return \"nop\";
9098  if (TARGET_ARM)
9099    return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
9100  return  \"mov\\tr8, r8\";
9101  "
9102  [(set (attr "length")
9103	(if_then_else (eq_attr "is_thumb" "yes")
9104		      (const_int 2)
9105		      (const_int 4)))]
9106)
9107
9108
9109;; Patterns to allow combination of arithmetic, cond code and shifts
9110
9111(define_insn "*arith_shiftsi"
9112  [(set (match_operand:SI 0 "s_register_operand" "=r")
9113        (match_operator:SI 1 "shiftable_operator"
9114          [(match_operator:SI 3 "shift_operator"
9115             [(match_operand:SI 4 "s_register_operand" "r")
9116              (match_operand:SI 5 "reg_or_int_operand" "rI")])
9117           (match_operand:SI 2 "s_register_operand" "r")]))]
9118  "TARGET_ARM"
9119  "%i1%?\\t%0, %2, %4%S3"
9120  [(set_attr "predicable" "yes")
9121   (set_attr "shift" "4")
9122   (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9123		      (const_string "alu_shift")
9124		      (const_string "alu_shift_reg")))]
9125)
9126
9127(define_split
9128  [(set (match_operand:SI 0 "s_register_operand" "")
9129	(match_operator:SI 1 "shiftable_operator"
9130	 [(match_operator:SI 2 "shiftable_operator"
9131	   [(match_operator:SI 3 "shift_operator"
9132	     [(match_operand:SI 4 "s_register_operand" "")
9133	      (match_operand:SI 5 "reg_or_int_operand" "")])
9134	    (match_operand:SI 6 "s_register_operand" "")])
9135	  (match_operand:SI 7 "arm_rhs_operand" "")]))
9136   (clobber (match_operand:SI 8 "s_register_operand" ""))]
9137  "TARGET_ARM"
9138  [(set (match_dup 8)
9139	(match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9140			 (match_dup 6)]))
9141   (set (match_dup 0)
9142	(match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9143  "")
9144
9145(define_insn "*arith_shiftsi_compare0"
9146  [(set (reg:CC_NOOV CC_REGNUM)
9147        (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9148		          [(match_operator:SI 3 "shift_operator"
9149		            [(match_operand:SI 4 "s_register_operand" "r")
9150		             (match_operand:SI 5 "reg_or_int_operand" "rI")])
9151		           (match_operand:SI 2 "s_register_operand" "r")])
9152			 (const_int 0)))
9153   (set (match_operand:SI 0 "s_register_operand" "=r")
9154	(match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9155			 (match_dup 2)]))]
9156  "TARGET_ARM"
9157  "%i1%.\\t%0, %2, %4%S3"
9158  [(set_attr "conds" "set")
9159   (set_attr "shift" "4")
9160   (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9161		      (const_string "alu_shift")
9162		      (const_string "alu_shift_reg")))]
9163)
9164
9165(define_insn "*arith_shiftsi_compare0_scratch"
9166  [(set (reg:CC_NOOV CC_REGNUM)
9167        (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
9168		          [(match_operator:SI 3 "shift_operator"
9169		            [(match_operand:SI 4 "s_register_operand" "r")
9170		             (match_operand:SI 5 "reg_or_int_operand" "rI")])
9171		           (match_operand:SI 2 "s_register_operand" "r")])
9172			 (const_int 0)))
9173   (clobber (match_scratch:SI 0 "=r"))]
9174  "TARGET_ARM"
9175  "%i1%.\\t%0, %2, %4%S3"
9176  [(set_attr "conds" "set")
9177   (set_attr "shift" "4")
9178   (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9179		      (const_string "alu_shift")
9180		      (const_string "alu_shift_reg")))]
9181)
9182
9183(define_insn "*sub_shiftsi"
9184  [(set (match_operand:SI 0 "s_register_operand" "=r")
9185	(minus:SI (match_operand:SI 1 "s_register_operand" "r")
9186		  (match_operator:SI 2 "shift_operator"
9187		   [(match_operand:SI 3 "s_register_operand" "r")
9188		    (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9189  "TARGET_ARM"
9190  "sub%?\\t%0, %1, %3%S2"
9191  [(set_attr "predicable" "yes")
9192   (set_attr "shift" "3")
9193   (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9194		      (const_string "alu_shift")
9195		      (const_string "alu_shift_reg")))]
9196)
9197
9198(define_insn "*sub_shiftsi_compare0"
9199  [(set (reg:CC_NOOV CC_REGNUM)
9200	(compare:CC_NOOV
9201	 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9202		   (match_operator:SI 2 "shift_operator"
9203		    [(match_operand:SI 3 "s_register_operand" "r")
9204		     (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9205	 (const_int 0)))
9206   (set (match_operand:SI 0 "s_register_operand" "=r")
9207	(minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9208						 (match_dup 4)])))]
9209  "TARGET_ARM"
9210  "sub%.\\t%0, %1, %3%S2"
9211  [(set_attr "conds" "set")
9212   (set_attr "shift" "3")
9213   (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9214		      (const_string "alu_shift")
9215		      (const_string "alu_shift_reg")))]
9216)
9217
9218(define_insn "*sub_shiftsi_compare0_scratch"
9219  [(set (reg:CC_NOOV CC_REGNUM)
9220	(compare:CC_NOOV
9221	 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9222		   (match_operator:SI 2 "shift_operator"
9223		    [(match_operand:SI 3 "s_register_operand" "r")
9224		     (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9225	 (const_int 0)))
9226   (clobber (match_scratch:SI 0 "=r"))]
9227  "TARGET_ARM"
9228  "sub%.\\t%0, %1, %3%S2"
9229  [(set_attr "conds" "set")
9230   (set_attr "shift" "3")
9231   (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9232		      (const_string "alu_shift")
9233		      (const_string "alu_shift_reg")))]
9234)
9235
9236
9237
9238(define_insn "*and_scc"
9239  [(set (match_operand:SI 0 "s_register_operand" "=r")
9240	(and:SI (match_operator:SI 1 "arm_comparison_operator"
9241		 [(match_operand 3 "cc_register" "") (const_int 0)])
9242		(match_operand:SI 2 "s_register_operand" "r")))]
9243  "TARGET_ARM"
9244  "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9245  [(set_attr "conds" "use")
9246   (set_attr "length" "8")]
9247)
9248
9249(define_insn "*ior_scc"
9250  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9251	(ior:SI (match_operator:SI 2 "arm_comparison_operator"
9252		 [(match_operand 3 "cc_register" "") (const_int 0)])
9253		(match_operand:SI 1 "s_register_operand" "0,?r")))]
9254  "TARGET_ARM"
9255  "@
9256   orr%d2\\t%0, %1, #1
9257   mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9258  [(set_attr "conds" "use")
9259   (set_attr "length" "4,8")]
9260)
9261
9262(define_insn "*compare_scc"
9263  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9264	(match_operator:SI 1 "arm_comparison_operator"
9265	 [(match_operand:SI 2 "s_register_operand" "r,r")
9266	  (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9267   (clobber (reg:CC CC_REGNUM))]
9268  "TARGET_ARM"
9269  "*
9270    if (operands[3] == const0_rtx)
9271      {
9272	if (GET_CODE (operands[1]) == LT)
9273	  return \"mov\\t%0, %2, lsr #31\";
9274
9275	if (GET_CODE (operands[1]) == GE)
9276	  return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9277
9278	if (GET_CODE (operands[1]) == EQ)
9279	  return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9280      }
9281
9282    if (GET_CODE (operands[1]) == NE)
9283      {
9284        if (which_alternative == 1)
9285	  return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9286        return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9287      }
9288    if (which_alternative == 1)
9289      output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9290    else
9291      output_asm_insn (\"cmp\\t%2, %3\", operands);
9292    return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9293  "
9294  [(set_attr "conds" "clob")
9295   (set_attr "length" "12")]
9296)
9297
9298(define_insn "*cond_move"
9299  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9300	(if_then_else:SI (match_operator 3 "equality_operator"
9301			  [(match_operator 4 "arm_comparison_operator"
9302			    [(match_operand 5 "cc_register" "") (const_int 0)])
9303			   (const_int 0)])
9304			 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9305			 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9306  "TARGET_ARM"
9307  "*
9308    if (GET_CODE (operands[3]) == NE)
9309      {
9310        if (which_alternative != 1)
9311	  output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9312        if (which_alternative != 0)
9313	  output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9314        return \"\";
9315      }
9316    if (which_alternative != 0)
9317      output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9318    if (which_alternative != 1)
9319      output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9320    return \"\";
9321  "
9322  [(set_attr "conds" "use")
9323   (set_attr "length" "4,4,8")]
9324)
9325
9326(define_insn "*cond_arith"
9327  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9328        (match_operator:SI 5 "shiftable_operator"
9329	 [(match_operator:SI 4 "arm_comparison_operator"
9330           [(match_operand:SI 2 "s_register_operand" "r,r")
9331	    (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9332          (match_operand:SI 1 "s_register_operand" "0,?r")]))
9333   (clobber (reg:CC CC_REGNUM))]
9334  "TARGET_ARM"
9335  "*
9336    if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9337      return \"%i5\\t%0, %1, %2, lsr #31\";
9338
9339    output_asm_insn (\"cmp\\t%2, %3\", operands);
9340    if (GET_CODE (operands[5]) == AND)
9341      output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9342    else if (GET_CODE (operands[5]) == MINUS)
9343      output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9344    else if (which_alternative != 0)
9345      output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9346    return \"%i5%d4\\t%0, %1, #1\";
9347  "
9348  [(set_attr "conds" "clob")
9349   (set_attr "length" "12")]
9350)
9351
9352(define_insn "*cond_sub"
9353  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9354        (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9355		  (match_operator:SI 4 "arm_comparison_operator"
9356                   [(match_operand:SI 2 "s_register_operand" "r,r")
9357		    (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9358   (clobber (reg:CC CC_REGNUM))]
9359  "TARGET_ARM"
9360  "*
9361    output_asm_insn (\"cmp\\t%2, %3\", operands);
9362    if (which_alternative != 0)
9363      output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9364    return \"sub%d4\\t%0, %1, #1\";
9365  "
9366  [(set_attr "conds" "clob")
9367   (set_attr "length" "8,12")]
9368)
9369
9370;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9371(define_insn "*cmp_ite0"
9372  [(set (match_operand 6 "dominant_cc_register" "")
9373	(compare
9374	 (if_then_else:SI
9375	  (match_operator 4 "arm_comparison_operator"
9376	   [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9377	    (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9378	  (match_operator:SI 5 "arm_comparison_operator"
9379	   [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9380	    (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9381	  (const_int 0))
9382	 (const_int 0)))]
9383  "TARGET_ARM"
9384  "*
9385  {
9386    static const char * const opcodes[4][2] =
9387    {
9388      {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9389       \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9390      {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9391       \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9392      {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9393       \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9394      {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9395       \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9396    };
9397    int swap =
9398      comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9399
9400    return opcodes[which_alternative][swap];
9401  }"
9402  [(set_attr "conds" "set")
9403   (set_attr "length" "8")]
9404)
9405
9406(define_insn "*cmp_ite1"
9407  [(set (match_operand 6 "dominant_cc_register" "")
9408	(compare
9409	 (if_then_else:SI
9410	  (match_operator 4 "arm_comparison_operator"
9411	   [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9412	    (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9413	  (match_operator:SI 5 "arm_comparison_operator"
9414	   [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9415	    (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9416	  (const_int 1))
9417	 (const_int 0)))]
9418  "TARGET_ARM"
9419  "*
9420  {
9421    static const char * const opcodes[4][2] =
9422    {
9423      {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9424       \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9425      {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9426       \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9427      {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9428       \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9429      {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9430       \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9431    };
9432    int swap =
9433      comparison_dominates_p (GET_CODE (operands[5]),
9434			      reverse_condition (GET_CODE (operands[4])));
9435
9436    return opcodes[which_alternative][swap];
9437  }"
9438  [(set_attr "conds" "set")
9439   (set_attr "length" "8")]
9440)
9441
9442(define_insn "*cmp_and"
9443  [(set (match_operand 6 "dominant_cc_register" "")
9444	(compare
9445	 (and:SI
9446	  (match_operator 4 "arm_comparison_operator"
9447	   [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9448	    (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9449	  (match_operator:SI 5 "arm_comparison_operator"
9450	   [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9451	    (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9452	 (const_int 0)))]
9453  "TARGET_ARM"
9454  "*
9455  {
9456    static const char *const opcodes[4][2] =
9457    {
9458      {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9459       \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9460      {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9461       \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9462      {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9463       \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9464      {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9465       \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9466    };
9467    int swap =
9468      comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9469
9470    return opcodes[which_alternative][swap];
9471  }"
9472  [(set_attr "conds" "set")
9473   (set_attr "predicable" "no")
9474   (set_attr "length" "8")]
9475)
9476
9477(define_insn "*cmp_ior"
9478  [(set (match_operand 6 "dominant_cc_register" "")
9479	(compare
9480	 (ior:SI
9481	  (match_operator 4 "arm_comparison_operator"
9482	   [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9483	    (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9484	  (match_operator:SI 5 "arm_comparison_operator"
9485	   [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9486	    (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9487	 (const_int 0)))]
9488  "TARGET_ARM"
9489  "*
9490{
9491  static const char *const opcodes[4][2] =
9492  {
9493    {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9494     \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9495    {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9496     \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9497    {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9498     \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9499    {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9500     \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9501  };
9502  int swap =
9503    comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9504
9505  return opcodes[which_alternative][swap];
9506}
9507"
9508  [(set_attr "conds" "set")
9509   (set_attr "length" "8")]
9510)
9511
9512(define_insn_and_split "*ior_scc_scc"
9513  [(set (match_operand:SI 0 "s_register_operand" "=r")
9514	(ior:SI (match_operator:SI 3 "arm_comparison_operator"
9515		 [(match_operand:SI 1 "s_register_operand" "r")
9516		  (match_operand:SI 2 "arm_add_operand" "rIL")])
9517		(match_operator:SI 6 "arm_comparison_operator"
9518		 [(match_operand:SI 4 "s_register_operand" "r")
9519		  (match_operand:SI 5 "arm_add_operand" "rIL")])))
9520   (clobber (reg:CC CC_REGNUM))]
9521  "TARGET_ARM
9522   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9523       != CCmode)"
9524  "#"
9525  "TARGET_ARM && reload_completed"
9526  [(set (match_dup 7)
9527	(compare
9528	 (ior:SI
9529	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9530	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9531	 (const_int 0)))
9532   (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9533  "operands[7]
9534     = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9535						  DOM_CC_X_OR_Y),
9536		    CC_REGNUM);"
9537  [(set_attr "conds" "clob")
9538   (set_attr "length" "16")])
9539
9540; If the above pattern is followed by a CMP insn, then the compare is
9541; redundant, since we can rework the conditional instruction that follows.
9542(define_insn_and_split "*ior_scc_scc_cmp"
9543  [(set (match_operand 0 "dominant_cc_register" "")
9544	(compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9545			  [(match_operand:SI 1 "s_register_operand" "r")
9546			   (match_operand:SI 2 "arm_add_operand" "rIL")])
9547			 (match_operator:SI 6 "arm_comparison_operator"
9548			  [(match_operand:SI 4 "s_register_operand" "r")
9549			   (match_operand:SI 5 "arm_add_operand" "rIL")]))
9550		 (const_int 0)))
9551   (set (match_operand:SI 7 "s_register_operand" "=r")
9552	(ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9553		(match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9554  "TARGET_ARM"
9555  "#"
9556  "TARGET_ARM && reload_completed"
9557  [(set (match_dup 0)
9558	(compare
9559	 (ior:SI
9560	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9561	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9562	 (const_int 0)))
9563   (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9564  ""
9565  [(set_attr "conds" "set")
9566   (set_attr "length" "16")])
9567
9568(define_insn_and_split "*and_scc_scc"
9569  [(set (match_operand:SI 0 "s_register_operand" "=r")
9570	(and:SI (match_operator:SI 3 "arm_comparison_operator"
9571		 [(match_operand:SI 1 "s_register_operand" "r")
9572		  (match_operand:SI 2 "arm_add_operand" "rIL")])
9573		(match_operator:SI 6 "arm_comparison_operator"
9574		 [(match_operand:SI 4 "s_register_operand" "r")
9575		  (match_operand:SI 5 "arm_add_operand" "rIL")])))
9576   (clobber (reg:CC CC_REGNUM))]
9577  "TARGET_ARM
9578   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9579       != CCmode)"
9580  "#"
9581  "TARGET_ARM && reload_completed
9582   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9583       != CCmode)"
9584  [(set (match_dup 7)
9585	(compare
9586	 (and:SI
9587	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9588	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9589	 (const_int 0)))
9590   (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9591  "operands[7]
9592     = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9593						  DOM_CC_X_AND_Y),
9594		    CC_REGNUM);"
9595  [(set_attr "conds" "clob")
9596   (set_attr "length" "16")])
9597
9598; If the above pattern is followed by a CMP insn, then the compare is
9599; redundant, since we can rework the conditional instruction that follows.
9600(define_insn_and_split "*and_scc_scc_cmp"
9601  [(set (match_operand 0 "dominant_cc_register" "")
9602	(compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9603			  [(match_operand:SI 1 "s_register_operand" "r")
9604			   (match_operand:SI 2 "arm_add_operand" "rIL")])
9605			 (match_operator:SI 6 "arm_comparison_operator"
9606			  [(match_operand:SI 4 "s_register_operand" "r")
9607			   (match_operand:SI 5 "arm_add_operand" "rIL")]))
9608		 (const_int 0)))
9609   (set (match_operand:SI 7 "s_register_operand" "=r")
9610	(and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9611		(match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9612  "TARGET_ARM"
9613  "#"
9614  "TARGET_ARM && reload_completed"
9615  [(set (match_dup 0)
9616	(compare
9617	 (and:SI
9618	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9619	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9620	 (const_int 0)))
9621   (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9622  ""
9623  [(set_attr "conds" "set")
9624   (set_attr "length" "16")])
9625
9626;; If there is no dominance in the comparison, then we can still save an
9627;; instruction in the AND case, since we can know that the second compare
9628;; need only zero the value if false (if true, then the value is already
9629;; correct).
9630(define_insn_and_split "*and_scc_scc_nodom"
9631  [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9632	(and:SI (match_operator:SI 3 "arm_comparison_operator"
9633		 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9634		  (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9635		(match_operator:SI 6 "arm_comparison_operator"
9636		 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9637		  (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9638   (clobber (reg:CC CC_REGNUM))]
9639  "TARGET_ARM
9640   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9641       == CCmode)"
9642  "#"
9643  "TARGET_ARM && reload_completed"
9644  [(parallel [(set (match_dup 0)
9645		   (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9646	      (clobber (reg:CC CC_REGNUM))])
9647   (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9648   (set (match_dup 0)
9649	(if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9650			 (match_dup 0)
9651			 (const_int 0)))]
9652  "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9653					      operands[4], operands[5]),
9654			      CC_REGNUM);
9655   operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9656				  operands[5]);"
9657  [(set_attr "conds" "clob")
9658   (set_attr "length" "20")])
9659
9660(define_split
9661  [(set (reg:CC_NOOV CC_REGNUM)
9662	(compare:CC_NOOV (ior:SI
9663			  (and:SI (match_operand:SI 0 "s_register_operand" "")
9664				  (const_int 1))
9665			  (match_operator:SI 1 "arm_comparison_operator"
9666			   [(match_operand:SI 2 "s_register_operand" "")
9667			    (match_operand:SI 3 "arm_add_operand" "")]))
9668			 (const_int 0)))
9669   (clobber (match_operand:SI 4 "s_register_operand" ""))]
9670  "TARGET_ARM"
9671  [(set (match_dup 4)
9672	(ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9673		(match_dup 0)))
9674   (set (reg:CC_NOOV CC_REGNUM)
9675	(compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9676			 (const_int 0)))]
9677  "")
9678
9679(define_split
9680  [(set (reg:CC_NOOV CC_REGNUM)
9681	(compare:CC_NOOV (ior:SI
9682			  (match_operator:SI 1 "arm_comparison_operator"
9683			   [(match_operand:SI 2 "s_register_operand" "")
9684			    (match_operand:SI 3 "arm_add_operand" "")])
9685			  (and:SI (match_operand:SI 0 "s_register_operand" "")
9686				  (const_int 1)))
9687			 (const_int 0)))
9688   (clobber (match_operand:SI 4 "s_register_operand" ""))]
9689  "TARGET_ARM"
9690  [(set (match_dup 4)
9691	(ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9692		(match_dup 0)))
9693   (set (reg:CC_NOOV CC_REGNUM)
9694	(compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9695			 (const_int 0)))]
9696  "")
9697;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9698
9699(define_insn "*negscc"
9700  [(set (match_operand:SI 0 "s_register_operand" "=r")
9701	(neg:SI (match_operator 3 "arm_comparison_operator"
9702		 [(match_operand:SI 1 "s_register_operand" "r")
9703		  (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9704   (clobber (reg:CC CC_REGNUM))]
9705  "TARGET_ARM"
9706  "*
9707  if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9708    return \"mov\\t%0, %1, asr #31\";
9709
9710  if (GET_CODE (operands[3]) == NE)
9711    return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9712
9713  output_asm_insn (\"cmp\\t%1, %2\", operands);
9714  output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9715  return \"mvn%d3\\t%0, #0\";
9716  "
9717  [(set_attr "conds" "clob")
9718   (set_attr "length" "12")]
9719)
9720
9721(define_insn "movcond"
9722  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9723	(if_then_else:SI
9724	 (match_operator 5 "arm_comparison_operator"
9725	  [(match_operand:SI 3 "s_register_operand" "r,r,r")
9726	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9727	 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9728	 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9729   (clobber (reg:CC CC_REGNUM))]
9730  "TARGET_ARM"
9731  "*
9732  if (GET_CODE (operands[5]) == LT
9733      && (operands[4] == const0_rtx))
9734    {
9735      if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9736	{
9737	  if (operands[2] == const0_rtx)
9738	    return \"and\\t%0, %1, %3, asr #31\";
9739	  return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9740	}
9741      else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9742	{
9743	  if (operands[1] == const0_rtx)
9744	    return \"bic\\t%0, %2, %3, asr #31\";
9745	  return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9746	}
9747      /* The only case that falls through to here is when both ops 1 & 2
9748	 are constants.  */
9749    }
9750
9751  if (GET_CODE (operands[5]) == GE
9752      && (operands[4] == const0_rtx))
9753    {
9754      if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9755	{
9756	  if (operands[2] == const0_rtx)
9757	    return \"bic\\t%0, %1, %3, asr #31\";
9758	  return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9759	}
9760      else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9761	{
9762	  if (operands[1] == const0_rtx)
9763	    return \"and\\t%0, %2, %3, asr #31\";
9764	  return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9765	}
9766      /* The only case that falls through to here is when both ops 1 & 2
9767	 are constants.  */
9768    }
9769  if (GET_CODE (operands[4]) == CONST_INT
9770      && !const_ok_for_arm (INTVAL (operands[4])))
9771    output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9772  else
9773    output_asm_insn (\"cmp\\t%3, %4\", operands);
9774  if (which_alternative != 0)
9775    output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9776  if (which_alternative != 1)
9777    output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9778  return \"\";
9779  "
9780  [(set_attr "conds" "clob")
9781   (set_attr "length" "8,8,12")]
9782)
9783
9784;; ??? The patterns below need checking for Thumb-2 usefulness.
9785
9786(define_insn "*ifcompare_plus_move"
9787  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9788	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
9789			  [(match_operand:SI 4 "s_register_operand" "r,r")
9790			   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9791			 (plus:SI
9792			  (match_operand:SI 2 "s_register_operand" "r,r")
9793			  (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9794			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9795   (clobber (reg:CC CC_REGNUM))]
9796  "TARGET_ARM"
9797  "#"
9798  [(set_attr "conds" "clob")
9799   (set_attr "length" "8,12")]
9800)
9801
9802(define_insn "*if_plus_move"
9803  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9804	(if_then_else:SI
9805	 (match_operator 4 "arm_comparison_operator"
9806	  [(match_operand 5 "cc_register" "") (const_int 0)])
9807	 (plus:SI
9808	  (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9809	  (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9810	 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9811  "TARGET_ARM"
9812  "@
9813   add%d4\\t%0, %2, %3
9814   sub%d4\\t%0, %2, #%n3
9815   add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9816   sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9817  [(set_attr "conds" "use")
9818   (set_attr "length" "4,4,8,8")
9819   (set_attr "type" "*,*,*,*")]
9820)
9821
9822(define_insn "*ifcompare_move_plus"
9823  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9824	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
9825			  [(match_operand:SI 4 "s_register_operand" "r,r")
9826			   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9827			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9828			 (plus:SI
9829			  (match_operand:SI 2 "s_register_operand" "r,r")
9830			  (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9831   (clobber (reg:CC CC_REGNUM))]
9832  "TARGET_ARM"
9833  "#"
9834  [(set_attr "conds" "clob")
9835   (set_attr "length" "8,12")]
9836)
9837
9838(define_insn "*if_move_plus"
9839  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9840	(if_then_else:SI
9841	 (match_operator 4 "arm_comparison_operator"
9842	  [(match_operand 5 "cc_register" "") (const_int 0)])
9843	 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9844	 (plus:SI
9845	  (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9846	  (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9847  "TARGET_ARM"
9848  "@
9849   add%D4\\t%0, %2, %3
9850   sub%D4\\t%0, %2, #%n3
9851   add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9852   sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9853  [(set_attr "conds" "use")
9854   (set_attr "length" "4,4,8,8")
9855   (set_attr "type" "*,*,*,*")]
9856)
9857
9858(define_insn "*ifcompare_arith_arith"
9859  [(set (match_operand:SI 0 "s_register_operand" "=r")
9860	(if_then_else:SI (match_operator 9 "arm_comparison_operator"
9861			  [(match_operand:SI 5 "s_register_operand" "r")
9862			   (match_operand:SI 6 "arm_add_operand" "rIL")])
9863			 (match_operator:SI 8 "shiftable_operator"
9864			  [(match_operand:SI 1 "s_register_operand" "r")
9865			   (match_operand:SI 2 "arm_rhs_operand" "rI")])
9866			 (match_operator:SI 7 "shiftable_operator"
9867			  [(match_operand:SI 3 "s_register_operand" "r")
9868			   (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9869   (clobber (reg:CC CC_REGNUM))]
9870  "TARGET_ARM"
9871  "#"
9872  [(set_attr "conds" "clob")
9873   (set_attr "length" "12")]
9874)
9875
9876(define_insn "*if_arith_arith"
9877  [(set (match_operand:SI 0 "s_register_operand" "=r")
9878	(if_then_else:SI (match_operator 5 "arm_comparison_operator"
9879			  [(match_operand 8 "cc_register" "") (const_int 0)])
9880			 (match_operator:SI 6 "shiftable_operator"
9881			  [(match_operand:SI 1 "s_register_operand" "r")
9882			   (match_operand:SI 2 "arm_rhs_operand" "rI")])
9883			 (match_operator:SI 7 "shiftable_operator"
9884			  [(match_operand:SI 3 "s_register_operand" "r")
9885			   (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9886  "TARGET_ARM"
9887  "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9888  [(set_attr "conds" "use")
9889   (set_attr "length" "8")]
9890)
9891
9892(define_insn "*ifcompare_arith_move"
9893  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9894	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
9895			  [(match_operand:SI 2 "s_register_operand" "r,r")
9896			   (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9897			 (match_operator:SI 7 "shiftable_operator"
9898			  [(match_operand:SI 4 "s_register_operand" "r,r")
9899			   (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9900			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9901   (clobber (reg:CC CC_REGNUM))]
9902  "TARGET_ARM"
9903  "*
9904  /* If we have an operation where (op x 0) is the identity operation and
9905     the conditional operator is LT or GE and we are comparing against zero and
9906     everything is in registers then we can do this in two instructions.  */
9907  if (operands[3] == const0_rtx
9908      && GET_CODE (operands[7]) != AND
9909      && GET_CODE (operands[5]) == REG
9910      && GET_CODE (operands[1]) == REG
9911      && REGNO (operands[1]) == REGNO (operands[4])
9912      && REGNO (operands[4]) != REGNO (operands[0]))
9913    {
9914      if (GET_CODE (operands[6]) == LT)
9915	return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9916      else if (GET_CODE (operands[6]) == GE)
9917	return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9918    }
9919  if (GET_CODE (operands[3]) == CONST_INT
9920      && !const_ok_for_arm (INTVAL (operands[3])))
9921    output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9922  else
9923    output_asm_insn (\"cmp\\t%2, %3\", operands);
9924  output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9925  if (which_alternative != 0)
9926    return \"mov%D6\\t%0, %1\";
9927  return \"\";
9928  "
9929  [(set_attr "conds" "clob")
9930   (set_attr "length" "8,12")]
9931)
9932
9933(define_insn "*if_arith_move"
9934  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9935	(if_then_else:SI (match_operator 4 "arm_comparison_operator"
9936			  [(match_operand 6 "cc_register" "") (const_int 0)])
9937			 (match_operator:SI 5 "shiftable_operator"
9938			  [(match_operand:SI 2 "s_register_operand" "r,r")
9939			   (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9940			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9941  "TARGET_ARM"
9942  "@
9943   %I5%d4\\t%0, %2, %3
9944   %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9945  [(set_attr "conds" "use")
9946   (set_attr "length" "4,8")
9947   (set_attr "type" "*,*")]
9948)
9949
9950(define_insn "*ifcompare_move_arith"
9951  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9952	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
9953			  [(match_operand:SI 4 "s_register_operand" "r,r")
9954			   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9955			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9956			 (match_operator:SI 7 "shiftable_operator"
9957			  [(match_operand:SI 2 "s_register_operand" "r,r")
9958			   (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9959   (clobber (reg:CC CC_REGNUM))]
9960  "TARGET_ARM"
9961  "*
9962  /* If we have an operation where (op x 0) is the identity operation and
9963     the conditional operator is LT or GE and we are comparing against zero and
9964     everything is in registers then we can do this in two instructions */
9965  if (operands[5] == const0_rtx
9966      && GET_CODE (operands[7]) != AND
9967      && GET_CODE (operands[3]) == REG
9968      && GET_CODE (operands[1]) == REG
9969      && REGNO (operands[1]) == REGNO (operands[2])
9970      && REGNO (operands[2]) != REGNO (operands[0]))
9971    {
9972      if (GET_CODE (operands[6]) == GE)
9973	return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9974      else if (GET_CODE (operands[6]) == LT)
9975	return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9976    }
9977
9978  if (GET_CODE (operands[5]) == CONST_INT
9979      && !const_ok_for_arm (INTVAL (operands[5])))
9980    output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9981  else
9982    output_asm_insn (\"cmp\\t%4, %5\", operands);
9983
9984  if (which_alternative != 0)
9985    output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9986  return \"%I7%D6\\t%0, %2, %3\";
9987  "
9988  [(set_attr "conds" "clob")
9989   (set_attr "length" "8,12")]
9990)
9991
9992(define_insn "*if_move_arith"
9993  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9994	(if_then_else:SI
9995	 (match_operator 4 "arm_comparison_operator"
9996	  [(match_operand 6 "cc_register" "") (const_int 0)])
9997	 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9998	 (match_operator:SI 5 "shiftable_operator"
9999	  [(match_operand:SI 2 "s_register_operand" "r,r")
10000	   (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10001  "TARGET_ARM"
10002  "@
10003   %I5%D4\\t%0, %2, %3
10004   %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10005  [(set_attr "conds" "use")
10006   (set_attr "length" "4,8")
10007   (set_attr "type" "*,*")]
10008)
10009
10010(define_insn "*ifcompare_move_not"
10011  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10012	(if_then_else:SI
10013	 (match_operator 5 "arm_comparison_operator"
10014	  [(match_operand:SI 3 "s_register_operand" "r,r")
10015	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10016	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10017	 (not:SI
10018	  (match_operand:SI 2 "s_register_operand" "r,r"))))
10019   (clobber (reg:CC CC_REGNUM))]
10020  "TARGET_ARM"
10021  "#"
10022  [(set_attr "conds" "clob")
10023   (set_attr "length" "8,12")]
10024)
10025
10026(define_insn "*if_move_not"
10027  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10028	(if_then_else:SI
10029	 (match_operator 4 "arm_comparison_operator"
10030	  [(match_operand 3 "cc_register" "") (const_int 0)])
10031	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10032	 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10033  "TARGET_ARM"
10034  "@
10035   mvn%D4\\t%0, %2
10036   mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10037   mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10038  [(set_attr "conds" "use")
10039   (set_attr "length" "4,8,8")]
10040)
10041
10042(define_insn "*ifcompare_not_move"
10043  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10044	(if_then_else:SI
10045	 (match_operator 5 "arm_comparison_operator"
10046	  [(match_operand:SI 3 "s_register_operand" "r,r")
10047	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10048	 (not:SI
10049	  (match_operand:SI 2 "s_register_operand" "r,r"))
10050	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10051   (clobber (reg:CC CC_REGNUM))]
10052  "TARGET_ARM"
10053  "#"
10054  [(set_attr "conds" "clob")
10055   (set_attr "length" "8,12")]
10056)
10057
10058(define_insn "*if_not_move"
10059  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10060	(if_then_else:SI
10061	 (match_operator 4 "arm_comparison_operator"
10062	  [(match_operand 3 "cc_register" "") (const_int 0)])
10063	 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10064	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10065  "TARGET_ARM"
10066  "@
10067   mvn%d4\\t%0, %2
10068   mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10069   mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10070  [(set_attr "conds" "use")
10071   (set_attr "length" "4,8,8")]
10072)
10073
10074(define_insn "*ifcompare_shift_move"
10075  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10076	(if_then_else:SI
10077	 (match_operator 6 "arm_comparison_operator"
10078	  [(match_operand:SI 4 "s_register_operand" "r,r")
10079	   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10080	 (match_operator:SI 7 "shift_operator"
10081	  [(match_operand:SI 2 "s_register_operand" "r,r")
10082	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10083	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10084   (clobber (reg:CC CC_REGNUM))]
10085  "TARGET_ARM"
10086  "#"
10087  [(set_attr "conds" "clob")
10088   (set_attr "length" "8,12")]
10089)
10090
10091(define_insn "*if_shift_move"
10092  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10093	(if_then_else:SI
10094	 (match_operator 5 "arm_comparison_operator"
10095	  [(match_operand 6 "cc_register" "") (const_int 0)])
10096	 (match_operator:SI 4 "shift_operator"
10097	  [(match_operand:SI 2 "s_register_operand" "r,r,r")
10098	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10099	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10100  "TARGET_ARM"
10101  "@
10102   mov%d5\\t%0, %2%S4
10103   mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10104   mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10105  [(set_attr "conds" "use")
10106   (set_attr "shift" "2")
10107   (set_attr "length" "4,8,8")
10108   (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10109		      (const_string "alu_shift")
10110		      (const_string "alu_shift_reg")))]
10111)
10112
10113(define_insn "*ifcompare_move_shift"
10114  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10115	(if_then_else:SI
10116	 (match_operator 6 "arm_comparison_operator"
10117	  [(match_operand:SI 4 "s_register_operand" "r,r")
10118	   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10119	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10120	 (match_operator:SI 7 "shift_operator"
10121	  [(match_operand:SI 2 "s_register_operand" "r,r")
10122	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10123   (clobber (reg:CC CC_REGNUM))]
10124  "TARGET_ARM"
10125  "#"
10126  [(set_attr "conds" "clob")
10127   (set_attr "length" "8,12")]
10128)
10129
10130(define_insn "*if_move_shift"
10131  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10132	(if_then_else:SI
10133	 (match_operator 5 "arm_comparison_operator"
10134	  [(match_operand 6 "cc_register" "") (const_int 0)])
10135	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10136	 (match_operator:SI 4 "shift_operator"
10137	  [(match_operand:SI 2 "s_register_operand" "r,r,r")
10138	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10139  "TARGET_ARM"
10140  "@
10141   mov%D5\\t%0, %2%S4
10142   mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10143   mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10144  [(set_attr "conds" "use")
10145   (set_attr "shift" "2")
10146   (set_attr "length" "4,8,8")
10147   (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
10148		      (const_string "alu_shift")
10149		      (const_string "alu_shift_reg")))]
10150)
10151
10152(define_insn "*ifcompare_shift_shift"
10153  [(set (match_operand:SI 0 "s_register_operand" "=r")
10154	(if_then_else:SI
10155	 (match_operator 7 "arm_comparison_operator"
10156	  [(match_operand:SI 5 "s_register_operand" "r")
10157	   (match_operand:SI 6 "arm_add_operand" "rIL")])
10158	 (match_operator:SI 8 "shift_operator"
10159	  [(match_operand:SI 1 "s_register_operand" "r")
10160	   (match_operand:SI 2 "arm_rhs_operand" "rM")])
10161	 (match_operator:SI 9 "shift_operator"
10162	  [(match_operand:SI 3 "s_register_operand" "r")
10163	   (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10164   (clobber (reg:CC CC_REGNUM))]
10165  "TARGET_ARM"
10166  "#"
10167  [(set_attr "conds" "clob")
10168   (set_attr "length" "12")]
10169)
10170
10171(define_insn "*if_shift_shift"
10172  [(set (match_operand:SI 0 "s_register_operand" "=r")
10173	(if_then_else:SI
10174	 (match_operator 5 "arm_comparison_operator"
10175	  [(match_operand 8 "cc_register" "") (const_int 0)])
10176	 (match_operator:SI 6 "shift_operator"
10177	  [(match_operand:SI 1 "s_register_operand" "r")
10178	   (match_operand:SI 2 "arm_rhs_operand" "rM")])
10179	 (match_operator:SI 7 "shift_operator"
10180	  [(match_operand:SI 3 "s_register_operand" "r")
10181	   (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10182  "TARGET_ARM"
10183  "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10184  [(set_attr "conds" "use")
10185   (set_attr "shift" "1")
10186   (set_attr "length" "8")
10187   (set (attr "type") (if_then_else
10188		        (and (match_operand 2 "const_int_operand" "")
10189                             (match_operand 4 "const_int_operand" ""))
10190		      (const_string "alu_shift")
10191		      (const_string "alu_shift_reg")))]
10192)
10193
10194(define_insn "*ifcompare_not_arith"
10195  [(set (match_operand:SI 0 "s_register_operand" "=r")
10196	(if_then_else:SI
10197	 (match_operator 6 "arm_comparison_operator"
10198	  [(match_operand:SI 4 "s_register_operand" "r")
10199	   (match_operand:SI 5 "arm_add_operand" "rIL")])
10200	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10201	 (match_operator:SI 7 "shiftable_operator"
10202	  [(match_operand:SI 2 "s_register_operand" "r")
10203	   (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10204   (clobber (reg:CC CC_REGNUM))]
10205  "TARGET_ARM"
10206  "#"
10207  [(set_attr "conds" "clob")
10208   (set_attr "length" "12")]
10209)
10210
10211(define_insn "*if_not_arith"
10212  [(set (match_operand:SI 0 "s_register_operand" "=r")
10213	(if_then_else:SI
10214	 (match_operator 5 "arm_comparison_operator"
10215	  [(match_operand 4 "cc_register" "") (const_int 0)])
10216	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10217	 (match_operator:SI 6 "shiftable_operator"
10218	  [(match_operand:SI 2 "s_register_operand" "r")
10219	   (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10220  "TARGET_ARM"
10221  "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10222  [(set_attr "conds" "use")
10223   (set_attr "length" "8")]
10224)
10225
10226(define_insn "*ifcompare_arith_not"
10227  [(set (match_operand:SI 0 "s_register_operand" "=r")
10228	(if_then_else:SI
10229	 (match_operator 6 "arm_comparison_operator"
10230	  [(match_operand:SI 4 "s_register_operand" "r")
10231	   (match_operand:SI 5 "arm_add_operand" "rIL")])
10232	 (match_operator:SI 7 "shiftable_operator"
10233	  [(match_operand:SI 2 "s_register_operand" "r")
10234	   (match_operand:SI 3 "arm_rhs_operand" "rI")])
10235	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10236   (clobber (reg:CC CC_REGNUM))]
10237  "TARGET_ARM"
10238  "#"
10239  [(set_attr "conds" "clob")
10240   (set_attr "length" "12")]
10241)
10242
10243(define_insn "*if_arith_not"
10244  [(set (match_operand:SI 0 "s_register_operand" "=r")
10245	(if_then_else:SI
10246	 (match_operator 5 "arm_comparison_operator"
10247	  [(match_operand 4 "cc_register" "") (const_int 0)])
10248	 (match_operator:SI 6 "shiftable_operator"
10249	  [(match_operand:SI 2 "s_register_operand" "r")
10250	   (match_operand:SI 3 "arm_rhs_operand" "rI")])
10251	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10252  "TARGET_ARM"
10253  "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10254  [(set_attr "conds" "use")
10255   (set_attr "length" "8")]
10256)
10257
10258(define_insn "*ifcompare_neg_move"
10259  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10260	(if_then_else:SI
10261	 (match_operator 5 "arm_comparison_operator"
10262	  [(match_operand:SI 3 "s_register_operand" "r,r")
10263	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10264	 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10265	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10266   (clobber (reg:CC CC_REGNUM))]
10267  "TARGET_ARM"
10268  "#"
10269  [(set_attr "conds" "clob")
10270   (set_attr "length" "8,12")]
10271)
10272
10273(define_insn "*if_neg_move"
10274  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10275	(if_then_else:SI
10276	 (match_operator 4 "arm_comparison_operator"
10277	  [(match_operand 3 "cc_register" "") (const_int 0)])
10278	 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10279	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10280  "TARGET_ARM"
10281  "@
10282   rsb%d4\\t%0, %2, #0
10283   mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10284   mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10285  [(set_attr "conds" "use")
10286   (set_attr "length" "4,8,8")]
10287)
10288
10289(define_insn "*ifcompare_move_neg"
10290  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10291	(if_then_else:SI
10292	 (match_operator 5 "arm_comparison_operator"
10293	  [(match_operand:SI 3 "s_register_operand" "r,r")
10294	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10295	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10296	 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10297   (clobber (reg:CC CC_REGNUM))]
10298  "TARGET_ARM"
10299  "#"
10300  [(set_attr "conds" "clob")
10301   (set_attr "length" "8,12")]
10302)
10303
10304(define_insn "*if_move_neg"
10305  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10306	(if_then_else:SI
10307	 (match_operator 4 "arm_comparison_operator"
10308	  [(match_operand 3 "cc_register" "") (const_int 0)])
10309	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10310	 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10311  "TARGET_ARM"
10312  "@
10313   rsb%D4\\t%0, %2, #0
10314   mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10315   mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10316  [(set_attr "conds" "use")
10317   (set_attr "length" "4,8,8")]
10318)
10319
10320(define_insn "*arith_adjacentmem"
10321  [(set (match_operand:SI 0 "s_register_operand" "=r")
10322	(match_operator:SI 1 "shiftable_operator"
10323	 [(match_operand:SI 2 "memory_operand" "m")
10324	  (match_operand:SI 3 "memory_operand" "m")]))
10325   (clobber (match_scratch:SI 4 "=r"))]
10326  "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10327  "*
10328  {
10329    rtx ldm[3];
10330    rtx arith[4];
10331    rtx base_reg;
10332    HOST_WIDE_INT val1 = 0, val2 = 0;
10333
10334    if (REGNO (operands[0]) > REGNO (operands[4]))
10335      {
10336	ldm[1] = operands[4];
10337	ldm[2] = operands[0];
10338      }
10339    else
10340      {
10341	ldm[1] = operands[0];
10342	ldm[2] = operands[4];
10343      }
10344
10345    base_reg = XEXP (operands[2], 0);
10346
10347    if (!REG_P (base_reg))
10348      {
10349	val1 = INTVAL (XEXP (base_reg, 1));
10350	base_reg = XEXP (base_reg, 0);
10351      }
10352
10353    if (!REG_P (XEXP (operands[3], 0)))
10354      val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10355
10356    arith[0] = operands[0];
10357    arith[3] = operands[1];
10358
10359    if (val1 < val2)
10360      {
10361	arith[1] = ldm[1];
10362	arith[2] = ldm[2];
10363      }
10364    else
10365      {
10366	arith[1] = ldm[2];
10367	arith[2] = ldm[1];
10368      }
10369
10370    ldm[0] = base_reg;
10371    if (val1 !=0 && val2 != 0)
10372      {
10373	rtx ops[3];
10374
10375	if (val1 == 4 || val2 == 4)
10376	  /* Other val must be 8, since we know they are adjacent and neither
10377	     is zero.  */
10378	  output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10379	else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10380	  {
10381	    ldm[0] = ops[0] = operands[4];
10382	    ops[1] = base_reg;
10383	    ops[2] = GEN_INT (val1);
10384	    output_add_immediate (ops);
10385	    if (val1 < val2)
10386	      output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10387	    else
10388	      output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10389	  }
10390	else
10391	  {
10392	    /* Offset is out of range for a single add, so use two ldr.  */
10393	    ops[0] = ldm[1];
10394	    ops[1] = base_reg;
10395	    ops[2] = GEN_INT (val1);
10396	    output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10397	    ops[0] = ldm[2];
10398	    ops[2] = GEN_INT (val2);
10399	    output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10400	  }
10401      }
10402    else if (val1 != 0)
10403      {
10404	if (val1 < val2)
10405	  output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10406	else
10407	  output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10408      }
10409    else
10410      {
10411	if (val1 < val2)
10412	  output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10413	else
10414	  output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10415      }
10416    output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10417    return \"\";
10418  }"
10419  [(set_attr "length" "12")
10420   (set_attr "predicable" "yes")
10421   (set_attr "type" "load1")]
10422)
10423
10424; This pattern is never tried by combine, so do it as a peephole
10425
10426(define_peephole2
10427  [(set (match_operand:SI 0 "arm_general_register_operand" "")
10428	(match_operand:SI 1 "arm_general_register_operand" ""))
10429   (set (reg:CC CC_REGNUM)
10430	(compare:CC (match_dup 1) (const_int 0)))]
10431  "TARGET_ARM"
10432  [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10433	      (set (match_dup 0) (match_dup 1))])]
10434  ""
10435)
10436
10437; Peepholes to spot possible load- and store-multiples, if the ordering is
10438; reversed, check that the memory references aren't volatile.
10439
10440(define_peephole
10441  [(set (match_operand:SI 0 "s_register_operand" "=rk")
10442        (match_operand:SI 4 "memory_operand" "m"))
10443   (set (match_operand:SI 1 "s_register_operand" "=rk")
10444        (match_operand:SI 5 "memory_operand" "m"))
10445   (set (match_operand:SI 2 "s_register_operand" "=rk")
10446        (match_operand:SI 6 "memory_operand" "m"))
10447   (set (match_operand:SI 3 "s_register_operand" "=rk")
10448        (match_operand:SI 7 "memory_operand" "m"))]
10449  "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10450  "*
10451  return emit_ldm_seq (operands, 4);
10452  "
10453)
10454
10455(define_peephole
10456  [(set (match_operand:SI 0 "s_register_operand" "=rk")
10457        (match_operand:SI 3 "memory_operand" "m"))
10458   (set (match_operand:SI 1 "s_register_operand" "=rk")
10459        (match_operand:SI 4 "memory_operand" "m"))
10460   (set (match_operand:SI 2 "s_register_operand" "=rk")
10461        (match_operand:SI 5 "memory_operand" "m"))]
10462  "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10463  "*
10464  return emit_ldm_seq (operands, 3);
10465  "
10466)
10467
10468(define_peephole
10469  [(set (match_operand:SI 0 "s_register_operand" "=rk")
10470        (match_operand:SI 2 "memory_operand" "m"))
10471   (set (match_operand:SI 1 "s_register_operand" "=rk")
10472        (match_operand:SI 3 "memory_operand" "m"))]
10473  "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10474  "*
10475  return emit_ldm_seq (operands, 2);
10476  "
10477)
10478
10479(define_peephole
10480  [(set (match_operand:SI 4 "memory_operand" "=m")
10481        (match_operand:SI 0 "s_register_operand" "rk"))
10482   (set (match_operand:SI 5 "memory_operand" "=m")
10483        (match_operand:SI 1 "s_register_operand" "rk"))
10484   (set (match_operand:SI 6 "memory_operand" "=m")
10485        (match_operand:SI 2 "s_register_operand" "rk"))
10486   (set (match_operand:SI 7 "memory_operand" "=m")
10487        (match_operand:SI 3 "s_register_operand" "rk"))]
10488  "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10489  "*
10490  return emit_stm_seq (operands, 4);
10491  "
10492)
10493
10494(define_peephole
10495  [(set (match_operand:SI 3 "memory_operand" "=m")
10496        (match_operand:SI 0 "s_register_operand" "rk"))
10497   (set (match_operand:SI 4 "memory_operand" "=m")
10498        (match_operand:SI 1 "s_register_operand" "rk"))
10499   (set (match_operand:SI 5 "memory_operand" "=m")
10500        (match_operand:SI 2 "s_register_operand" "rk"))]
10501  "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10502  "*
10503  return emit_stm_seq (operands, 3);
10504  "
10505)
10506
10507(define_peephole
10508  [(set (match_operand:SI 2 "memory_operand" "=m")
10509        (match_operand:SI 0 "s_register_operand" "rk"))
10510   (set (match_operand:SI 3 "memory_operand" "=m")
10511        (match_operand:SI 1 "s_register_operand" "rk"))]
10512  "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10513  "*
10514  return emit_stm_seq (operands, 2);
10515  "
10516)
10517
10518(define_split
10519  [(set (match_operand:SI 0 "s_register_operand" "")
10520	(and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10521		       (const_int 0))
10522		(neg:SI (match_operator:SI 2 "arm_comparison_operator"
10523			 [(match_operand:SI 3 "s_register_operand" "")
10524			  (match_operand:SI 4 "arm_rhs_operand" "")]))))
10525   (clobber (match_operand:SI 5 "s_register_operand" ""))]
10526  "TARGET_ARM"
10527  [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10528   (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10529			      (match_dup 5)))]
10530  ""
10531)
10532
10533;; This split can be used because CC_Z mode implies that the following
10534;; branch will be an equality, or an unsigned inequality, so the sign
10535;; extension is not needed.
10536
10537(define_split
10538  [(set (reg:CC_Z CC_REGNUM)
10539	(compare:CC_Z
10540	 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10541		    (const_int 24))
10542	 (match_operand 1 "const_int_operand" "")))
10543   (clobber (match_scratch:SI 2 ""))]
10544  "TARGET_ARM
10545   && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10546       == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10547  [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10548   (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10549  "
10550  operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10551  "
10552)
10553;; ??? Check the patterns above for Thumb-2 usefulness
10554
10555(define_expand "prologue"
10556  [(clobber (const_int 0))]
10557  "TARGET_EITHER"
10558  "if (TARGET_32BIT)
10559     arm_expand_prologue ();
10560   else
10561     thumb1_expand_prologue ();
10562  DONE;
10563  "
10564)
10565
10566(define_expand "epilogue"
10567  [(clobber (const_int 0))]
10568  "TARGET_EITHER"
10569  "
10570  if (crtl->calls_eh_return)
10571    emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10572  if (TARGET_THUMB1)
10573    thumb1_expand_epilogue ();
10574  else if (USE_RETURN_INSN (FALSE))
10575    {
10576      emit_jump_insn (gen_return ());
10577      DONE;
10578    }
10579  emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10580	gen_rtvec (1,
10581		gen_rtx_RETURN (VOIDmode)),
10582	VUNSPEC_EPILOGUE));
10583  DONE;
10584  "
10585)
10586
10587;; Note - although unspec_volatile's USE all hard registers,
10588;; USEs are ignored after relaod has completed.  Thus we need
10589;; to add an unspec of the link register to ensure that flow
10590;; does not think that it is unused by the sibcall branch that
10591;; will replace the standard function epilogue.
10592(define_insn "sibcall_epilogue"
10593  [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10594              (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10595  "TARGET_32BIT"
10596  "*
10597  if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10598    return output_return_instruction (const_true_rtx, FALSE, FALSE);
10599  return arm_output_epilogue (next_nonnote_insn (insn));
10600  "
10601;; Length is absolute worst case
10602  [(set_attr "length" "44")
10603   (set_attr "type" "block")
10604   ;; We don't clobber the conditions, but the potential length of this
10605   ;; operation is sufficient to make conditionalizing the sequence
10606   ;; unlikely to be profitable.
10607   (set_attr "conds" "clob")]
10608)
10609
10610(define_insn "*epilogue_insns"
10611  [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10612  "TARGET_EITHER"
10613  "*
10614  if (TARGET_32BIT)
10615    return arm_output_epilogue (NULL);
10616  else /* TARGET_THUMB1 */
10617    return thumb_unexpanded_epilogue ();
10618  "
10619  ; Length is absolute worst case
10620  [(set_attr "length" "44")
10621   (set_attr "type" "block")
10622   ;; We don't clobber the conditions, but the potential length of this
10623   ;; operation is sufficient to make conditionalizing the sequence
10624   ;; unlikely to be profitable.
10625   (set_attr "conds" "clob")]
10626)
10627
10628(define_expand "eh_epilogue"
10629  [(use (match_operand:SI 0 "register_operand" ""))
10630   (use (match_operand:SI 1 "register_operand" ""))
10631   (use (match_operand:SI 2 "register_operand" ""))]
10632  "TARGET_EITHER"
10633  "
10634  {
10635    cfun->machine->eh_epilogue_sp_ofs = operands[1];
10636    if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10637      {
10638	rtx ra = gen_rtx_REG (Pmode, 2);
10639
10640	emit_move_insn (ra, operands[2]);
10641	operands[2] = ra;
10642      }
10643    /* This is a hack -- we may have crystalized the function type too
10644       early.  */
10645    cfun->machine->func_type = 0;
10646  }"
10647)
10648
10649;; This split is only used during output to reduce the number of patterns
10650;; that need assembler instructions adding to them.  We allowed the setting
10651;; of the conditions to be implicit during rtl generation so that
10652;; the conditional compare patterns would work.  However this conflicts to
10653;; some extent with the conditional data operations, so we have to split them
10654;; up again here.
10655
10656;; ??? Need to audit these splitters for Thumb-2.  Why isn't normal
10657;; conditional execution sufficient?
10658
10659(define_split
10660  [(set (match_operand:SI 0 "s_register_operand" "")
10661	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
10662			  [(match_operand 2 "" "") (match_operand 3 "" "")])
10663			 (match_dup 0)
10664			 (match_operand 4 "" "")))
10665   (clobber (reg:CC CC_REGNUM))]
10666  "TARGET_ARM && reload_completed"
10667  [(set (match_dup 5) (match_dup 6))
10668   (cond_exec (match_dup 7)
10669	      (set (match_dup 0) (match_dup 4)))]
10670  "
10671  {
10672    enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10673					     operands[2], operands[3]);
10674    enum rtx_code rc = GET_CODE (operands[1]);
10675
10676    operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10677    operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10678    if (mode == CCFPmode || mode == CCFPEmode)
10679      rc = reverse_condition_maybe_unordered (rc);
10680    else
10681      rc = reverse_condition (rc);
10682
10683    operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10684  }"
10685)
10686
10687(define_split
10688  [(set (match_operand:SI 0 "s_register_operand" "")
10689	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
10690			  [(match_operand 2 "" "") (match_operand 3 "" "")])
10691			 (match_operand 4 "" "")
10692			 (match_dup 0)))
10693   (clobber (reg:CC CC_REGNUM))]
10694  "TARGET_ARM && reload_completed"
10695  [(set (match_dup 5) (match_dup 6))
10696   (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10697	      (set (match_dup 0) (match_dup 4)))]
10698  "
10699  {
10700    enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10701					     operands[2], operands[3]);
10702
10703    operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10704    operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10705  }"
10706)
10707
10708(define_split
10709  [(set (match_operand:SI 0 "s_register_operand" "")
10710	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
10711			  [(match_operand 2 "" "") (match_operand 3 "" "")])
10712			 (match_operand 4 "" "")
10713			 (match_operand 5 "" "")))
10714   (clobber (reg:CC CC_REGNUM))]
10715  "TARGET_ARM && reload_completed"
10716  [(set (match_dup 6) (match_dup 7))
10717   (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10718	      (set (match_dup 0) (match_dup 4)))
10719   (cond_exec (match_dup 8)
10720	      (set (match_dup 0) (match_dup 5)))]
10721  "
10722  {
10723    enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10724					     operands[2], operands[3]);
10725    enum rtx_code rc = GET_CODE (operands[1]);
10726
10727    operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10728    operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10729    if (mode == CCFPmode || mode == CCFPEmode)
10730      rc = reverse_condition_maybe_unordered (rc);
10731    else
10732      rc = reverse_condition (rc);
10733
10734    operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10735  }"
10736)
10737
10738(define_split
10739  [(set (match_operand:SI 0 "s_register_operand" "")
10740	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
10741			  [(match_operand:SI 2 "s_register_operand" "")
10742			   (match_operand:SI 3 "arm_add_operand" "")])
10743			 (match_operand:SI 4 "arm_rhs_operand" "")
10744			 (not:SI
10745			  (match_operand:SI 5 "s_register_operand" ""))))
10746   (clobber (reg:CC CC_REGNUM))]
10747  "TARGET_ARM && reload_completed"
10748  [(set (match_dup 6) (match_dup 7))
10749   (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10750	      (set (match_dup 0) (match_dup 4)))
10751   (cond_exec (match_dup 8)
10752	      (set (match_dup 0) (not:SI (match_dup 5))))]
10753  "
10754  {
10755    enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10756					     operands[2], operands[3]);
10757    enum rtx_code rc = GET_CODE (operands[1]);
10758
10759    operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10760    operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10761    if (mode == CCFPmode || mode == CCFPEmode)
10762      rc = reverse_condition_maybe_unordered (rc);
10763    else
10764      rc = reverse_condition (rc);
10765
10766    operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10767  }"
10768)
10769
10770(define_insn "*cond_move_not"
10771  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10772	(if_then_else:SI (match_operator 4 "arm_comparison_operator"
10773			  [(match_operand 3 "cc_register" "") (const_int 0)])
10774			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10775			 (not:SI
10776			  (match_operand:SI 2 "s_register_operand" "r,r"))))]
10777  "TARGET_ARM"
10778  "@
10779   mvn%D4\\t%0, %2
10780   mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10781  [(set_attr "conds" "use")
10782   (set_attr "length" "4,8")]
10783)
10784
10785;; The next two patterns occur when an AND operation is followed by a
10786;; scc insn sequence
10787
10788(define_insn "*sign_extract_onebit"
10789  [(set (match_operand:SI 0 "s_register_operand" "=r")
10790	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10791			 (const_int 1)
10792			 (match_operand:SI 2 "const_int_operand" "n")))
10793    (clobber (reg:CC CC_REGNUM))]
10794  "TARGET_ARM"
10795  "*
10796    operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10797    output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10798    return \"mvnne\\t%0, #0\";
10799  "
10800  [(set_attr "conds" "clob")
10801   (set_attr "length" "8")]
10802)
10803
10804(define_insn "*not_signextract_onebit"
10805  [(set (match_operand:SI 0 "s_register_operand" "=r")
10806	(not:SI
10807	 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10808			  (const_int 1)
10809			  (match_operand:SI 2 "const_int_operand" "n"))))
10810   (clobber (reg:CC CC_REGNUM))]
10811  "TARGET_ARM"
10812  "*
10813    operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10814    output_asm_insn (\"tst\\t%1, %2\", operands);
10815    output_asm_insn (\"mvneq\\t%0, #0\", operands);
10816    return \"movne\\t%0, #0\";
10817  "
10818  [(set_attr "conds" "clob")
10819   (set_attr "length" "12")]
10820)
10821;; ??? The above patterns need auditing for Thumb-2
10822
10823;; Push multiple registers to the stack.  Registers are in parallel (use ...)
10824;; expressions.  For simplicity, the first register is also in the unspec
10825;; part.
10826(define_insn "*push_multi"
10827  [(match_parallel 2 "multi_register_push"
10828    [(set (match_operand:BLK 0 "memory_operand" "=m")
10829	  (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
10830		      UNSPEC_PUSH_MULT))])]
10831  "TARGET_32BIT"
10832  "*
10833  {
10834    int num_saves = XVECLEN (operands[2], 0);
10835
10836    /* For the StrongARM at least it is faster to
10837       use STR to store only a single register.
10838       In Thumb mode always use push, and the assembler will pick
10839       something appropriate.  */
10840    if (num_saves == 1 && TARGET_ARM)
10841      output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10842    else
10843      {
10844	int i;
10845	char pattern[100];
10846
10847	if (TARGET_ARM)
10848	    strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10849	else
10850	    strcpy (pattern, \"push\\t{%1\");
10851
10852	for (i = 1; i < num_saves; i++)
10853	  {
10854	    strcat (pattern, \", %|\");
10855	    strcat (pattern,
10856		    reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10857	  }
10858
10859	strcat (pattern, \"}\");
10860	output_asm_insn (pattern, operands);
10861      }
10862
10863    return \"\";
10864  }"
10865  [(set_attr "type" "store4")]
10866)
10867
10868(define_insn "stack_tie"
10869  [(set (mem:BLK (scratch))
10870	(unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10871		     (match_operand:SI 1 "s_register_operand" "rk")]
10872		    UNSPEC_PRLG_STK))]
10873  ""
10874  ""
10875  [(set_attr "length" "0")]
10876)
10877
10878;; Similarly for the floating point registers
10879(define_insn "*push_fp_multi"
10880  [(match_parallel 2 "multi_register_push"
10881    [(set (match_operand:BLK 0 "memory_operand" "=m")
10882	  (unspec:BLK [(match_operand:XF 1 "f_register_operand" "")]
10883		      UNSPEC_PUSH_MULT))])]
10884  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10885  "*
10886  {
10887    char pattern[100];
10888
10889    sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10890    output_asm_insn (pattern, operands);
10891    return \"\";
10892  }"
10893  [(set_attr "type" "f_store")]
10894)
10895
10896;; Special patterns for dealing with the constant pool
10897
10898(define_insn "align_4"
10899  [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10900  "TARGET_EITHER"
10901  "*
10902  assemble_align (32);
10903  return \"\";
10904  "
10905)
10906
10907(define_insn "align_8"
10908  [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10909  "TARGET_EITHER"
10910  "*
10911  assemble_align (64);
10912  return \"\";
10913  "
10914)
10915
10916(define_insn "consttable_end"
10917  [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10918  "TARGET_EITHER"
10919  "*
10920  making_const_table = FALSE;
10921  return \"\";
10922  "
10923)
10924
10925(define_insn "consttable_1"
10926  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10927  "TARGET_THUMB1"
10928  "*
10929  making_const_table = TRUE;
10930  assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10931  assemble_zeros (3);
10932  return \"\";
10933  "
10934  [(set_attr "length" "4")]
10935)
10936
10937(define_insn "consttable_2"
10938  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10939  "TARGET_THUMB1"
10940  "*
10941  making_const_table = TRUE;
10942  gcc_assert (GET_MODE_CLASS (GET_MODE (operands[0])) != MODE_FLOAT);
10943  assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10944  assemble_zeros (2);
10945  return \"\";
10946  "
10947  [(set_attr "length" "4")]
10948)
10949
10950(define_insn "consttable_4"
10951  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10952  "TARGET_EITHER"
10953  "*
10954  {
10955    rtx x = operands[0];
10956    making_const_table = TRUE;
10957    switch (GET_MODE_CLASS (GET_MODE (x)))
10958      {
10959      case MODE_FLOAT:
10960 	if (GET_MODE (x) == HFmode)
10961 	  arm_emit_fp16_const (x);
10962 	else
10963 	  {
10964 	    REAL_VALUE_TYPE r;
10965 	    REAL_VALUE_FROM_CONST_DOUBLE (r, x);
10966 	    assemble_real (r, GET_MODE (x), BITS_PER_WORD);
10967 	  }
10968 	break;
10969      default:
10970	/* XXX: Sometimes gcc does something really dumb and ends up with
10971	   a HIGH in a constant pool entry, usually because it's trying to
10972	   load into a VFP register.  We know this will always be used in
10973	   combination with a LO_SUM which ignores the high bits, so just
10974	   strip off the HIGH.  */
10975	if (GET_CODE (x) == HIGH)
10976	  x = XEXP (x, 0);
10977        assemble_integer (x, 4, BITS_PER_WORD, 1);
10978	mark_symbol_refs_as_used (x);
10979        break;
10980      }
10981    return \"\";
10982  }"
10983  [(set_attr "length" "4")]
10984)
10985
10986(define_insn "consttable_8"
10987  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10988  "TARGET_EITHER"
10989  "*
10990  {
10991    making_const_table = TRUE;
10992    switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10993      {
10994       case MODE_FLOAT:
10995        {
10996          REAL_VALUE_TYPE r;
10997          REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10998          assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10999          break;
11000        }
11001      default:
11002        assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11003        break;
11004      }
11005    return \"\";
11006  }"
11007  [(set_attr "length" "8")]
11008)
11009
11010(define_insn "consttable_16"
11011  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11012  "TARGET_EITHER"
11013  "*
11014  {
11015    making_const_table = TRUE;
11016    switch (GET_MODE_CLASS (GET_MODE (operands[0])))
11017      {
11018       case MODE_FLOAT:
11019        {
11020          REAL_VALUE_TYPE r;
11021          REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
11022          assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
11023          break;
11024        }
11025      default:
11026        assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11027        break;
11028      }
11029    return \"\";
11030  }"
11031  [(set_attr "length" "16")]
11032)
11033
11034;; Miscellaneous Thumb patterns
11035
11036(define_expand "tablejump"
11037  [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
11038	      (use (label_ref (match_operand 1 "" "")))])]
11039  "TARGET_THUMB1"
11040  "
11041  if (flag_pic)
11042    {
11043      /* Hopefully, CSE will eliminate this copy.  */
11044      rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
11045      rtx reg2 = gen_reg_rtx (SImode);
11046
11047      emit_insn (gen_addsi3 (reg2, operands[0], reg1));
11048      operands[0] = reg2;
11049    }
11050  "
11051)
11052
11053;; NB never uses BX.
11054(define_insn "*thumb1_tablejump"
11055  [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
11056   (use (label_ref (match_operand 1 "" "")))]
11057  "TARGET_THUMB1"
11058  "mov\\t%|pc, %0"
11059  [(set_attr "length" "2")]
11060)
11061
11062;; V5 Instructions,
11063
11064(define_insn "clzsi2"
11065  [(set (match_operand:SI 0 "s_register_operand" "=r")
11066	(clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11067  "TARGET_32BIT && arm_arch5"
11068  "clz%?\\t%0, %1"
11069  [(set_attr "predicable" "yes")
11070   (set_attr "insn" "clz")])
11071
11072(define_insn "rbitsi2"
11073  [(set (match_operand:SI 0 "s_register_operand" "=r")
11074	(unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11075  "TARGET_32BIT && arm_arch_thumb2"
11076  "rbit%?\\t%0, %1"
11077  [(set_attr "predicable" "yes")
11078   (set_attr "insn" "clz")])
11079
11080(define_expand "ctzsi2"
11081 [(set (match_operand:SI           0 "s_register_operand" "")
11082       (ctz:SI (match_operand:SI  1 "s_register_operand" "")))]
11083  "TARGET_32BIT && arm_arch_thumb2"
11084  "
11085   {
11086     rtx tmp = gen_reg_rtx (SImode);
11087     emit_insn (gen_rbitsi2 (tmp, operands[1]));
11088     emit_insn (gen_clzsi2 (operands[0], tmp));
11089   }
11090   DONE;
11091  "
11092)
11093
11094;; V5E instructions.
11095
11096(define_insn "prefetch"
11097  [(prefetch (match_operand:SI 0 "address_operand" "p")
11098	     (match_operand:SI 1 "" "")
11099	     (match_operand:SI 2 "" ""))]
11100  "TARGET_32BIT && arm_arch5e"
11101  "pld\\t%a0")
11102
11103;; General predication pattern
11104
11105(define_cond_exec
11106  [(match_operator 0 "arm_comparison_operator"
11107    [(match_operand 1 "cc_register" "")
11108     (const_int 0)])]
11109  "TARGET_32BIT"
11110  ""
11111)
11112
11113(define_insn "prologue_use"
11114  [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
11115  ""
11116  "%@ %0 needed for prologue"
11117  [(set_attr "length" "0")]
11118)
11119
11120
11121;; Patterns for exception handling
11122
11123(define_expand "eh_return"
11124  [(use (match_operand 0 "general_operand" ""))]
11125  "TARGET_EITHER"
11126  "
11127  {
11128    if (TARGET_32BIT)
11129      emit_insn (gen_arm_eh_return (operands[0]));
11130    else
11131      emit_insn (gen_thumb_eh_return (operands[0]));
11132    DONE;
11133  }"
11134)
11135
11136;; We can't expand this before we know where the link register is stored.
11137(define_insn_and_split "arm_eh_return"
11138  [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11139		    VUNSPEC_EH_RETURN)
11140   (clobber (match_scratch:SI 1 "=&r"))]
11141  "TARGET_ARM"
11142  "#"
11143  "&& reload_completed"
11144  [(const_int 0)]
11145  "
11146  {
11147    arm_set_return_address (operands[0], operands[1]);
11148    DONE;
11149  }"
11150)
11151
11152(define_insn_and_split "thumb_eh_return"
11153  [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
11154		    VUNSPEC_EH_RETURN)
11155   (clobber (match_scratch:SI 1 "=&l"))]
11156  "TARGET_THUMB1"
11157  "#"
11158  "&& reload_completed"
11159  [(const_int 0)]
11160  "
11161  {
11162    thumb_set_return_address (operands[0], operands[1]);
11163    DONE;
11164  }"
11165)
11166
11167
11168;; TLS support
11169
11170(define_insn "load_tp_hard"
11171  [(set (match_operand:SI 0 "register_operand" "=r")
11172	(unspec:SI [(const_int 0)] UNSPEC_TLS))]
11173  "TARGET_HARD_TP"
11174  "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11175  [(set_attr "predicable" "yes")]
11176)
11177
11178;; Doesn't clobber R1-R3.  Must use r0 for the first operand.
11179(define_insn "load_tp_soft"
11180  [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11181   (clobber (reg:SI LR_REGNUM))
11182   (clobber (reg:SI IP_REGNUM))
11183   (clobber (reg:CC CC_REGNUM))]
11184  "TARGET_SOFT_TP"
11185  "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11186  [(set_attr "conds" "clob")]
11187)
11188
11189(define_insn "*arm_movtas_ze"
11190  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
11191                   (const_int 16)
11192                   (const_int 16))
11193        (match_operand:SI 1 "const_int_operand" ""))]
11194  "arm_arch_thumb2"
11195  "movt%?\t%0, %c1"
11196 [(set_attr "predicable" "yes")
11197   (set_attr "length" "4")]
11198)
11199
11200(define_insn "*arm_rev"
11201  [(set (match_operand:SI 0 "s_register_operand" "=r")
11202	(bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11203  "TARGET_32BIT && arm_arch6"
11204  "rev%?\t%0, %1"
11205  [(set_attr "predicable" "yes")
11206   (set_attr "length" "4")]
11207)
11208
11209(define_insn "*thumb1_rev"
11210  [(set (match_operand:SI 0 "s_register_operand" "=l")
11211	(bswap:SI (match_operand:SI 1 "s_register_operand" "l")))]
11212  "TARGET_THUMB1 && arm_arch6"
11213   "rev\t%0, %1"
11214  [(set_attr "length" "2")]
11215)
11216
11217(define_expand "arm_legacy_rev"
11218  [(set (match_operand:SI 2 "s_register_operand" "")
11219	(xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
11220			     (const_int 16))
11221		(match_dup 1)))
11222   (set (match_dup 2)
11223	(lshiftrt:SI (match_dup 2)
11224		     (const_int 8)))
11225   (set (match_operand:SI 3 "s_register_operand" "")
11226	(rotatert:SI (match_dup 1)
11227		     (const_int 8)))
11228   (set (match_dup 2)
11229	(and:SI (match_dup 2)
11230		(const_int -65281)))
11231   (set (match_operand:SI 0 "s_register_operand" "")
11232	(xor:SI (match_dup 3)
11233		(match_dup 2)))]
11234  "TARGET_32BIT"
11235  ""
11236)
11237
11238;; Reuse temporaries to keep register pressure down.
11239(define_expand "thumb_legacy_rev"
11240  [(set (match_operand:SI 2 "s_register_operand" "")
11241     (ashift:SI (match_operand:SI 1 "s_register_operand" "")
11242                (const_int 24)))
11243   (set (match_operand:SI 3 "s_register_operand" "")
11244     (lshiftrt:SI (match_dup 1)
11245		  (const_int 24)))
11246   (set (match_dup 3)
11247     (ior:SI (match_dup 3)
11248	     (match_dup 2)))
11249   (set (match_operand:SI 4 "s_register_operand" "")
11250     (const_int 16))
11251   (set (match_operand:SI 5 "s_register_operand" "")
11252     (rotatert:SI (match_dup 1)
11253		  (match_dup 4)))
11254   (set (match_dup 2)
11255     (ashift:SI (match_dup 5)
11256                (const_int 24)))
11257   (set (match_dup 5)
11258     (lshiftrt:SI (match_dup 5)
11259		  (const_int 24)))
11260   (set (match_dup 5)
11261     (ior:SI (match_dup 5)
11262	     (match_dup 2)))
11263   (set (match_dup 5)
11264     (rotatert:SI (match_dup 5)
11265		  (match_dup 4)))
11266   (set (match_operand:SI 0 "s_register_operand" "")
11267     (ior:SI (match_dup 5)
11268             (match_dup 3)))]
11269  "TARGET_THUMB"
11270  ""
11271)
11272
11273(define_expand "bswapsi2"
11274  [(set (match_operand:SI 0 "s_register_operand" "=r")
11275  	(bswap:SI (match_operand:SI 1 "s_register_operand" "r")))]
11276"TARGET_EITHER && (arm_arch6 || !optimize_size)"
11277"
11278    if (!arm_arch6)
11279      {
11280	rtx op2 = gen_reg_rtx (SImode);
11281	rtx op3 = gen_reg_rtx (SImode);
11282
11283	if (TARGET_THUMB)
11284	  {
11285	    rtx op4 = gen_reg_rtx (SImode);
11286	    rtx op5 = gen_reg_rtx (SImode);
11287
11288	    emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11289					     op2, op3, op4, op5));
11290	  }
11291	else
11292	  {
11293	    emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11294					   op2, op3));
11295	  }
11296
11297	DONE;
11298      }
11299  "
11300)
11301
11302;; Load the FPA co-processor patterns
11303(include "fpa.md")
11304;; Load the Maverick co-processor patterns
11305(include "cirrus.md")
11306;; Vector bits common to IWMMXT and Neon
11307(include "vec-common.md")
11308;; Load the Intel Wireless Multimedia Extension patterns
11309(include "iwmmxt.md")
11310;; Load the VFP co-processor patterns
11311(include "vfp.md")
11312;; Thumb-2 patterns
11313(include "thumb2.md")
11314;; Neon patterns
11315(include "neon.md")
11316
11317