xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/arm/arm.md (revision 8feb0f0b7eaff0608f8350bbfa3098827b4bb91b)
1;;- Machine description for ARM for GNU compiler
2;;  Copyright (C) 1991-2020 Free Software Foundation, Inc.
3;;  Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4;;  and Martin Simmons (@harleqn.co.uk).
5;;  More major hacks by Richard Earnshaw (rearnsha@arm.com).
6
7;; This file is part of GCC.
8
9;; GCC is free software; you can redistribute it and/or modify it
10;; under the terms of the GNU General Public License as published
11;; by the Free Software Foundation; either version 3, or (at your
12;; option) any later version.
13
14;; GCC is distributed in the hope that it will be useful, but WITHOUT
15;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16;; or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
17;; License for more details.
18
19;; You should have received a copy of the GNU General Public License
20;; along with GCC; see the file COPYING3.  If not see
21;; <http://www.gnu.org/licenses/>.
22
23;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25
26;;---------------------------------------------------------------------------
27;; Constants
28
29;; Register numbers -- All machine registers should be defined here
30(define_constants
31  [(R0_REGNUM         0)	; First CORE register
32   (R1_REGNUM	      1)	; Second CORE register
33   (R4_REGNUM	      4)	; Fifth CORE register
34   (FDPIC_REGNUM      9)	; FDPIC register
35   (IP_REGNUM	     12)	; Scratch register
36   (SP_REGNUM	     13)	; Stack pointer
37   (LR_REGNUM        14)	; Return address register
38   (PC_REGNUM	     15)	; Program counter
39   (LAST_ARM_REGNUM  15)	;
40   (CC_REGNUM       100)	; Condition code pseudo register
41   (VFPCC_REGNUM    101)	; VFP Condition code pseudo register
42   (APSRQ_REGNUM    104)	; Q bit pseudo register
43   (APSRGE_REGNUM   105)	; GE bits pseudo register
44   (VPR_REGNUM      106)	; Vector Predication Register - MVE register.
45  ]
46)
47;; 3rd operand to select_dominance_cc_mode
48(define_constants
49  [(DOM_CC_X_AND_Y  0)
50   (DOM_CC_NX_OR_Y  1)
51   (DOM_CC_X_OR_Y   2)
52  ]
53)
54;; conditional compare combination
55(define_constants
56  [(CMP_CMP 0)
57   (CMN_CMP 1)
58   (CMP_CMN 2)
59   (CMN_CMN 3)
60   (NUM_OF_COND_CMP 4)
61  ]
62)
63
64
65;;---------------------------------------------------------------------------
66;; Attributes
67
68;; Processor type.  This is created automatically from arm-cores.def.
69(include "arm-tune.md")
70
71;; Instruction classification types
72(include "types.md")
73
74; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
75; generating ARM code.  This is used to control the length of some insn
76; patterns that share the same RTL in both ARM and Thumb code.
77(define_attr "is_thumb" "yes,no"
78  (const (if_then_else (symbol_ref "TARGET_THUMB")
79		       (const_string "yes") (const_string "no"))))
80
81; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
82(define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
83
84; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
85(define_attr "is_thumb1" "yes,no"
86  (const (if_then_else (symbol_ref "TARGET_THUMB1")
87		       (const_string "yes") (const_string "no"))))
88
89; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
90; The arm_restrict_it flag enables the "short IT" feature which
91; restricts IT blocks to a single 16-bit instruction.
92; This attribute should only be used on 16-bit Thumb-2 instructions
93; which may be predicated (the "predicable" attribute must be set).
94(define_attr "predicable_short_it" "no,yes" (const_string "no"))
95
96; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
97; This attribute should only be used on instructions which may emit
98; an IT block in their expansion which is not a short IT.
99(define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
100
101; Mark an instruction sequence as the required way of loading a
102; constant when -mpure-code is enabled (which implies
103; arm_disable_literal_pool)
104(define_attr "required_for_purecode" "no,yes" (const_string "no"))
105
106;; Operand number of an input operand that is shifted.  Zero if the
107;; given instruction does not shift one of its input operands.
108(define_attr "shift" "" (const_int 0))
109
110;; [For compatibility with AArch64 in pipeline models]
111;; Attribute that specifies whether or not the instruction touches fp
112;; registers.
113(define_attr "fp" "no,yes" (const_string "no"))
114
115; Floating Point Unit.  If we only have floating point emulation, then there
116; is no point in scheduling the floating point insns.  (Well, for best
117; performance we should try and group them together).
118(define_attr "fpu" "none,vfp"
119  (const (symbol_ref "arm_fpu_attr")))
120
121; Predicated means that the insn form is conditionally executed based on a
122; predicate.  We default to 'no' because no Thumb patterns match this rule
123; and not all ARM insns do.
124(define_attr "predicated" "yes,no" (const_string "no"))
125
126; LENGTH of an instruction (in bytes)
127(define_attr "length" ""
128  (const_int 4))
129
130; The architecture which supports the instruction (or alternative).
131; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
132; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode.  "v6"
133; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
134; arm_arch6.  "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
135; Baseline.  "fix_vlldm" is for fixing the v8-m/v8.1-m VLLDM erratum.
136; This attribute is used to compute attribute "enabled",
137; use type "any" to enable an alternative in all cases.
138(define_attr "arch" "any, a, t, 32, t1, t2, v6,nov6, v6t2, \
139		     v8mb, fix_vlldm, iwmmxt, iwmmxt2, armv6_or_vfpv3, \
140		     neon, mve"
141  (const_string "any"))
142
143(define_attr "arch_enabled" "no,yes"
144  (cond [(eq_attr "arch" "any")
145	 (const_string "yes")
146
147	 (and (eq_attr "arch" "a")
148	      (match_test "TARGET_ARM"))
149	 (const_string "yes")
150
151	 (and (eq_attr "arch" "t")
152	      (match_test "TARGET_THUMB"))
153	 (const_string "yes")
154
155	 (and (eq_attr "arch" "t1")
156	      (match_test "TARGET_THUMB1"))
157	 (const_string "yes")
158
159	 (and (eq_attr "arch" "t2")
160	      (match_test "TARGET_THUMB2"))
161	 (const_string "yes")
162
163	 (and (eq_attr "arch" "32")
164	      (match_test "TARGET_32BIT"))
165	 (const_string "yes")
166
167	 (and (eq_attr "arch" "v6")
168	      (match_test "TARGET_32BIT && arm_arch6"))
169	 (const_string "yes")
170
171	 (and (eq_attr "arch" "nov6")
172	      (match_test "TARGET_32BIT && !arm_arch6"))
173	 (const_string "yes")
174
175	 (and (eq_attr "arch" "v6t2")
176	      (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
177	 (const_string "yes")
178
179	 (and (eq_attr "arch" "v8mb")
180	      (match_test "TARGET_THUMB1 && arm_arch8"))
181	 (const_string "yes")
182
183	 (and (eq_attr "arch" "fix_vlldm")
184	      (match_test "fix_vlldm"))
185	 (const_string "yes")
186
187	 (and (eq_attr "arch" "iwmmxt2")
188	      (match_test "TARGET_REALLY_IWMMXT2"))
189	 (const_string "yes")
190
191	 (and (eq_attr "arch" "armv6_or_vfpv3")
192	      (match_test "arm_arch6 || TARGET_VFP3"))
193	 (const_string "yes")
194
195	 (and (eq_attr "arch" "neon")
196	      (match_test "TARGET_NEON"))
197	 (const_string "yes")
198
199	 (and (eq_attr "arch" "mve")
200	      (match_test "TARGET_HAVE_MVE"))
201	 (const_string "yes")
202	]
203
204	(const_string "no")))
205
206(define_attr "opt" "any,speed,size"
207  (const_string "any"))
208
209(define_attr "opt_enabled" "no,yes"
210  (cond [(eq_attr "opt" "any")
211         (const_string "yes")
212
213	 (and (eq_attr "opt" "speed")
214	      (match_test "optimize_function_for_speed_p (cfun)"))
215	 (const_string "yes")
216
217	 (and (eq_attr "opt" "size")
218	      (match_test "optimize_function_for_size_p (cfun)"))
219	 (const_string "yes")]
220	(const_string "no")))
221
222(define_attr "use_literal_pool" "no,yes"
223   (cond [(and (eq_attr "type" "f_loads,f_loadd")
224	       (match_test "CONSTANT_P (operands[1])"))
225	  (const_string "yes")]
226	 (const_string "no")))
227
228; Enable all alternatives that are both arch_enabled and insn_enabled.
229; FIXME:: opt_enabled has been temporarily removed till the time we have
230; an attribute that allows the use of such alternatives.
231; This depends on caching of speed_p, size_p on a per
232; alternative basis. The problem is that the enabled attribute
233; cannot depend on any state that is not cached or is not constant
234; for a compilation unit. We probably need a generic "hot/cold"
235; alternative which if implemented can help with this. We disable this
236; until such a time as this is implemented and / or the improvements or
237; regressions with removing this attribute are double checked.
238; See ashldi3_neon and <shift>di3_neon in neon.md.
239
240 (define_attr "enabled" "no,yes"
241   (cond [(and (eq_attr "predicable_short_it" "no")
242	       (and (eq_attr "predicated" "yes")
243	            (match_test "arm_restrict_it")))
244	  (const_string "no")
245
246	  (and (eq_attr "enabled_for_short_it" "no")
247	       (match_test "arm_restrict_it"))
248	  (const_string "no")
249
250	  (and (eq_attr "required_for_purecode" "yes")
251	       (not (match_test "arm_disable_literal_pool")))
252	  (const_string "no")
253
254	  (eq_attr "arch_enabled" "no")
255	  (const_string "no")]
256	 (const_string "yes")))
257
258; POOL_RANGE is how far away from a constant pool entry that this insn
259; can be placed.  If the distance is zero, then this insn will never
260; reference the pool.
261; Note that for Thumb constant pools the PC value is rounded down to the
262; nearest multiple of four.  Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
263; Thumb insns) should be set to <max_range> - 2.
264; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
265; before its address.  It is set to <max_range> - (8 + <data_size>).
266(define_attr "arm_pool_range" "" (const_int 0))
267(define_attr "thumb2_pool_range" "" (const_int 0))
268(define_attr "arm_neg_pool_range" "" (const_int 0))
269(define_attr "thumb2_neg_pool_range" "" (const_int 0))
270
271(define_attr "pool_range" ""
272  (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
273	(attr "arm_pool_range")))
274(define_attr "neg_pool_range" ""
275  (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
276	(attr "arm_neg_pool_range")))
277
278; An assembler sequence may clobber the condition codes without us knowing.
279; If such an insn references the pool, then we have no way of knowing how,
280; so use the most conservative value for pool_range.
281(define_asm_attributes
282 [(set_attr "conds" "clob")
283  (set_attr "length" "4")
284  (set_attr "pool_range" "250")])
285
286; Load scheduling, set from the arm_ld_sched variable
287; initialized by arm_option_override()
288(define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
289
290; condition codes: this one is used by final_prescan_insn to speed up
291; conditionalizing instructions.  It saves having to scan the rtl to see if
292; it uses or alters the condition codes.
293;
294; USE means that the condition codes are used by the insn in the process of
295;   outputting code, this means (at present) that we can't use the insn in
296;   inlined branches
297;
298; SET means that the purpose of the insn is to set the condition codes in a
299;   well defined manner.
300;
301; CLOB means that the condition codes are altered in an undefined manner, if
302;   they are altered at all
303;
304; UNCONDITIONAL means the instruction cannot be conditionally executed and
305;   that the instruction does not use or alter the condition codes.
306;
307; NOCOND means that the instruction does not use or alter the condition
308;   codes but can be converted into a conditionally exectuted instruction.
309
310(define_attr "conds" "use,set,clob,unconditional,nocond"
311	(if_then_else
312	 (ior (eq_attr "is_thumb1" "yes")
313	      (eq_attr "type" "call"))
314	 (const_string "clob")
315         (if_then_else
316	  (ior (eq_attr "is_neon_type" "yes")
317	       (eq_attr "is_mve_type" "yes"))
318	  (const_string "unconditional")
319	  (const_string "nocond"))))
320
321; Predicable means that the insn can be conditionally executed based on
322; an automatically added predicate (additional patterns are generated by
323; gen...).  We default to 'no' because no Thumb patterns match this rule
324; and not all ARM patterns do.
325(define_attr "predicable" "no,yes" (const_string "no"))
326
327; Only model the write buffer for ARM6 and ARM7.  Earlier processors don't
328; have one.  Later ones, such as StrongARM, have write-back caches, so don't
329; suffer blockages enough to warrant modelling this (and it can adversely
330; affect the schedule).
331(define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
332
333; WRITE_CONFLICT implies that a read following an unrelated write is likely
334; to stall the processor.  Used with model_wbuf above.
335(define_attr "write_conflict" "no,yes"
336  (if_then_else (eq_attr "type"
337		 "block,call,load_4")
338		(const_string "yes")
339		(const_string "no")))
340
341; Classify the insns into those that take one cycle and those that take more
342; than one on the main cpu execution unit.
343(define_attr "core_cycles" "single,multi"
344  (if_then_else (eq_attr "type"
345    "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
346    alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
347    alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
348    logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
349    logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
350    wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
351    wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
352    wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
353    wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
354    wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
355    wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
356    wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
357    wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
358    wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
359    wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
360    wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
361		(const_string "single")
362	        (const_string "multi")))
363
364;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
365;; distant label.  Only applicable to Thumb code.
366(define_attr "far_jump" "yes,no" (const_string "no"))
367
368
369;; The number of machine instructions this pattern expands to.
370;; Used for Thumb-2 conditional execution.
371(define_attr "ce_count" "" (const_int 1))
372
373;;---------------------------------------------------------------------------
374;; Unspecs
375
376(include "unspecs.md")
377
378;;---------------------------------------------------------------------------
379;; Mode iterators
380
381(include "iterators.md")
382
383;;---------------------------------------------------------------------------
384;; Predicates
385
386(include "predicates.md")
387(include "constraints.md")
388
389;;---------------------------------------------------------------------------
390;; Pipeline descriptions
391
392(define_attr "tune_cortexr4" "yes,no"
393  (const (if_then_else
394	  (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
395	  (const_string "yes")
396	  (const_string "no"))))
397
398;; True if the generic scheduling description should be used.
399
400(define_attr "generic_sched" "yes,no"
401  (const (if_then_else
402          (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
403                                arm926ejs,arm10e,arm1026ejs,arm1136js,\
404                                arm1136jfs,cortexa5,cortexa7,cortexa8,\
405                                cortexa9,cortexa12,cortexa15,cortexa17,\
406                                cortexa53,cortexa57,cortexm4,cortexm7,\
407				exynosm1,marvell_pj4,xgene1")
408	       (eq_attr "tune_cortexr4" "yes"))
409          (const_string "no")
410          (const_string "yes"))))
411
412(define_attr "generic_vfp" "yes,no"
413  (const (if_then_else
414	  (and (eq_attr "fpu" "vfp")
415	       (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
416                                cortexa8,cortexa9,cortexa53,cortexm4,\
417                                cortexm7,marvell_pj4,xgene1")
418	       (eq_attr "tune_cortexr4" "no"))
419	  (const_string "yes")
420	  (const_string "no"))))
421
422(include "marvell-f-iwmmxt.md")
423(include "arm-generic.md")
424(include "arm926ejs.md")
425(include "arm1020e.md")
426(include "arm1026ejs.md")
427(include "arm1136jfs.md")
428(include "fa526.md")
429(include "fa606te.md")
430(include "fa626te.md")
431(include "fmp626.md")
432(include "fa726te.md")
433(include "cortex-a5.md")
434(include "cortex-a7.md")
435(include "cortex-a8.md")
436(include "cortex-a9.md")
437(include "cortex-a15.md")
438(include "cortex-a17.md")
439(include "cortex-a53.md")
440(include "cortex-a57.md")
441(include "cortex-r4.md")
442(include "cortex-r4f.md")
443(include "cortex-m7.md")
444(include "cortex-m4.md")
445(include "cortex-m4-fpu.md")
446(include "exynos-m1.md")
447(include "vfp11.md")
448(include "marvell-pj4.md")
449(include "xgene1.md")
450
451;; define_subst and associated attributes
452
453(define_subst "add_setq"
454  [(set (match_operand:SI 0 "" "")
455        (match_operand:SI 1 "" ""))]
456  ""
457  [(set (match_dup 0)
458        (match_dup 1))
459   (set (reg:CC APSRQ_REGNUM)
460	(unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))])
461
462(define_subst_attr "add_clobber_q_name" "add_setq" "" "_setq")
463(define_subst_attr "add_clobber_q_pred" "add_setq" "!ARM_Q_BIT_READ"
464		   "ARM_Q_BIT_READ")
465
466;;---------------------------------------------------------------------------
467;; Insn patterns
468;;
469;; Addition insns.
470
471;; Note: For DImode insns, there is normally no reason why operands should
472;; not be in the same register, what we don't want is for something being
473;; written to partially overlap something that is an input.
474
475(define_expand "adddi3"
476 [(parallel
477   [(set (match_operand:DI           0 "s_register_operand")
478	  (plus:DI (match_operand:DI 1 "s_register_operand")
479		   (match_operand:DI 2 "reg_or_int_operand")))
480    (clobber (reg:CC CC_REGNUM))])]
481  "TARGET_EITHER"
482  "
483  if (TARGET_THUMB1)
484    {
485      if (!REG_P (operands[2]))
486	operands[2] = force_reg (DImode, operands[2]);
487    }
488  else
489    {
490      rtx lo_result, hi_result, lo_dest, hi_dest;
491      rtx lo_op1, hi_op1, lo_op2, hi_op2;
492      arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
493			      &lo_op2, &hi_op2);
494      lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
495      hi_result = hi_dest = gen_highpart (SImode, operands[0]);
496
497      if (lo_op2 == const0_rtx)
498	{
499	  lo_dest = lo_op1;
500	  if (!arm_add_operand (hi_op2, SImode))
501	    hi_op2 = force_reg (SImode, hi_op2);
502	  /* Assume hi_op2 won't also be zero.  */
503	  emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
504	}
505      else
506	{
507	  if (!arm_add_operand (lo_op2, SImode))
508	    lo_op2 = force_reg (SImode, lo_op2);
509	  if (!arm_not_operand (hi_op2, SImode))
510	    hi_op2 = force_reg (SImode, hi_op2);
511
512	  emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
513	  rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
514				   const0_rtx);
515	  if (hi_op2 == const0_rtx)
516	    emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
517	  else
518	    emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
519	}
520
521      if (lo_result != lo_dest)
522	emit_move_insn (lo_result, lo_dest);
523      if (hi_result != hi_dest)
524	emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
525      DONE;
526    }
527  "
528)
529
530(define_expand "addvsi4"
531  [(match_operand:SI 0 "s_register_operand")
532   (match_operand:SI 1 "s_register_operand")
533   (match_operand:SI 2 "arm_add_operand")
534   (match_operand 3 "")]
535  "TARGET_32BIT"
536{
537  if (CONST_INT_P (operands[2]))
538    emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
539  else
540    emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
541  arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
542
543  DONE;
544})
545
546(define_expand "addvdi4"
547  [(match_operand:DI 0 "s_register_operand")
548   (match_operand:DI 1 "s_register_operand")
549   (match_operand:DI 2 "reg_or_int_operand")
550   (match_operand 3 "")]
551  "TARGET_32BIT"
552{
553  rtx lo_result, hi_result;
554  rtx lo_op1, hi_op1, lo_op2, hi_op2;
555  arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
556			  &lo_op2, &hi_op2);
557  lo_result = gen_lowpart (SImode, operands[0]);
558  hi_result = gen_highpart (SImode, operands[0]);
559
560  if (lo_op2 == const0_rtx)
561    {
562      emit_move_insn (lo_result, lo_op1);
563      if (!arm_add_operand (hi_op2, SImode))
564	hi_op2 = force_reg (SImode, hi_op2);
565
566      emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
567    }
568  else
569    {
570      if (!arm_add_operand (lo_op2, SImode))
571	lo_op2 = force_reg (SImode, lo_op2);
572      if (!arm_not_operand (hi_op2, SImode))
573	hi_op2 = force_reg (SImode, hi_op2);
574
575      emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
576
577      if (hi_op2 == const0_rtx)
578        emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
579      else if (CONST_INT_P (hi_op2))
580        emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
581      else
582        emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
583
584      arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
585    }
586
587  DONE;
588})
589
590(define_expand "addsi3_cin_vout_reg"
591  [(parallel
592    [(set (match_dup 3)
593	  (compare:CC_V
594	   (plus:DI
595	    (plus:DI (match_dup 4)
596		     (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
597	    (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
598	   (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
599				    (match_dup 2)))))
600     (set (match_operand:SI 0 "s_register_operand")
601	  (plus:SI (plus:SI (match_dup 5) (match_dup 1))
602		   (match_dup 2)))])]
603  "TARGET_32BIT"
604  {
605    operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
606    rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
607    operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
608    operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
609  }
610)
611
612(define_insn "*addsi3_cin_vout_reg_insn"
613  [(set (reg:CC_V CC_REGNUM)
614	(compare:CC_V
615	 (plus:DI
616	  (plus:DI
617	   (match_operand:DI 3 "arm_carry_operation" "")
618	   (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
619	  (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
620	 (sign_extend:DI
621	  (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
622			    (match_dup 1))
623		   (match_dup 2)))))
624   (set (match_operand:SI 0 "s_register_operand" "=l,r")
625	(plus:SI (plus:SI (match_dup 4) (match_dup 1))
626		 (match_dup 2)))]
627  "TARGET_32BIT"
628  "@
629   adcs%?\\t%0, %0, %2
630   adcs%?\\t%0, %1, %2"
631  [(set_attr "type" "alus_sreg")
632   (set_attr "arch" "t2,*")
633   (set_attr "length" "2,4")]
634)
635
636(define_expand "addsi3_cin_vout_imm"
637  [(parallel
638    [(set (match_dup 3)
639	  (compare:CC_V
640	   (plus:DI
641	    (plus:DI (match_dup 4)
642		     (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
643	    (match_dup 2))
644	   (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
645				    (match_dup 2)))))
646     (set (match_operand:SI 0 "s_register_operand")
647	  (plus:SI (plus:SI (match_dup 5) (match_dup 1))
648		   (match_operand 2 "arm_adcimm_operand")))])]
649  "TARGET_32BIT"
650  {
651    operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
652    rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
653    operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
654    operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
655  }
656)
657
658(define_insn "*addsi3_cin_vout_imm_insn"
659  [(set (reg:CC_V CC_REGNUM)
660	(compare:CC_V
661	 (plus:DI
662	  (plus:DI
663	   (match_operand:DI 3 "arm_carry_operation" "")
664	   (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
665	  (match_operand 2 "arm_adcimm_operand" "I,K"))
666	 (sign_extend:DI
667	  (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
668			    (match_dup 1))
669		   (match_dup 2)))))
670   (set (match_operand:SI 0 "s_register_operand" "=r,r")
671	(plus:SI (plus:SI (match_dup 4) (match_dup 1))
672		 (match_dup 2)))]
673  "TARGET_32BIT"
674  "@
675   adcs%?\\t%0, %1, %2
676   sbcs%?\\t%0, %1, #%B2"
677  [(set_attr "type" "alus_imm")]
678)
679
680(define_expand "addsi3_cin_vout_0"
681  [(parallel
682    [(set (match_dup 2)
683	  (compare:CC_V
684	   (plus:DI (match_dup 3)
685		    (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
686	   (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
687     (set (match_operand:SI 0 "s_register_operand")
688	  (plus:SI (match_dup 4) (match_dup 1)))])]
689  "TARGET_32BIT"
690  {
691    operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
692    rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
693    operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
694    operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
695  }
696)
697
698(define_insn "*addsi3_cin_vout_0_insn"
699  [(set (reg:CC_V CC_REGNUM)
700	(compare:CC_V
701	 (plus:DI
702	  (match_operand:DI 2 "arm_carry_operation" "")
703	  (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
704	 (sign_extend:DI (plus:SI
705			  (match_operand:SI 3 "arm_carry_operation" "")
706			  (match_dup 1)))))
707   (set (match_operand:SI 0 "s_register_operand" "=r")
708	(plus:SI (match_dup 3) (match_dup 1)))]
709  "TARGET_32BIT"
710  "adcs%?\\t%0, %1, #0"
711  [(set_attr "type" "alus_imm")]
712)
713
714(define_expand "uaddvsi4"
715  [(match_operand:SI 0 "s_register_operand")
716   (match_operand:SI 1 "s_register_operand")
717   (match_operand:SI 2 "arm_add_operand")
718   (match_operand 3 "")]
719  "TARGET_32BIT"
720{
721  emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
722  arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
723
724  DONE;
725})
726
727(define_expand "uaddvdi4"
728  [(match_operand:DI 0 "s_register_operand")
729   (match_operand:DI 1 "s_register_operand")
730   (match_operand:DI 2 "reg_or_int_operand")
731   (match_operand 3 "")]
732  "TARGET_32BIT"
733{
734  rtx lo_result, hi_result;
735  rtx lo_op1, hi_op1, lo_op2, hi_op2;
736  arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
737			  &lo_op2, &hi_op2);
738  lo_result = gen_lowpart (SImode, operands[0]);
739  hi_result = gen_highpart (SImode, operands[0]);
740
741  if (lo_op2 == const0_rtx)
742    {
743      emit_move_insn (lo_result, lo_op1);
744      if (!arm_add_operand (hi_op2, SImode))
745	hi_op2 = force_reg (SImode, hi_op2);
746
747      emit_insn (gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
748    }
749  else
750    {
751      if (!arm_add_operand (lo_op2, SImode))
752	lo_op2 = force_reg (SImode, lo_op2);
753      if (!arm_not_operand (hi_op2, SImode))
754	hi_op2 = force_reg (SImode, hi_op2);
755
756      emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
757
758      if (hi_op2 == const0_rtx)
759        emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
760      else if (CONST_INT_P (hi_op2))
761        emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
762      else
763        emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
764
765      arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
766    }
767
768  DONE;
769})
770
771(define_expand "addsi3_cin_cout_reg"
772  [(parallel
773    [(set (match_dup 3)
774	  (compare:CC_ADC
775	   (plus:DI
776	    (plus:DI (match_dup 4)
777		     (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
778	    (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
779	   (const_int 4294967296)))
780     (set (match_operand:SI 0 "s_register_operand")
781	  (plus:SI (plus:SI (match_dup 5) (match_dup 1))
782		   (match_dup 2)))])]
783  "TARGET_32BIT"
784  {
785    operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
786    rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
787    operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
788    operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
789  }
790)
791
792(define_insn "*addsi3_cin_cout_reg_insn"
793  [(set (reg:CC_ADC CC_REGNUM)
794	(compare:CC_ADC
795	 (plus:DI
796	  (plus:DI
797	   (match_operand:DI 3 "arm_carry_operation" "")
798	   (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
799	  (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
800	(const_int 4294967296)))
801   (set (match_operand:SI 0 "s_register_operand" "=l,r")
802	(plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
803			  (match_dup 1))
804		 (match_dup 2)))]
805  "TARGET_32BIT"
806  "@
807   adcs%?\\t%0, %0, %2
808   adcs%?\\t%0, %1, %2"
809  [(set_attr "type" "alus_sreg")
810   (set_attr "arch" "t2,*")
811   (set_attr "length" "2,4")]
812)
813
814(define_expand "addsi3_cin_cout_imm"
815  [(parallel
816    [(set (match_dup 3)
817	  (compare:CC_ADC
818	   (plus:DI
819	    (plus:DI (match_dup 4)
820		     (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
821	    (match_dup 6))
822	   (const_int 4294967296)))
823     (set (match_operand:SI 0 "s_register_operand")
824	  (plus:SI (plus:SI (match_dup 5) (match_dup 1))
825		   (match_operand:SI 2 "arm_adcimm_operand")))])]
826  "TARGET_32BIT"
827  {
828    operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
829    rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
830    operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
831    operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
832    operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
833  }
834)
835
836(define_insn "*addsi3_cin_cout_imm_insn"
837  [(set (reg:CC_ADC CC_REGNUM)
838	(compare:CC_ADC
839	 (plus:DI
840	  (plus:DI
841	   (match_operand:DI 3 "arm_carry_operation" "")
842	   (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
843	  (match_operand:DI 5 "const_int_operand" "n,n"))
844	(const_int 4294967296)))
845   (set (match_operand:SI 0 "s_register_operand" "=r,r")
846	(plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
847			  (match_dup 1))
848		 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
849  "TARGET_32BIT
850   && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
851  "@
852   adcs%?\\t%0, %1, %2
853   sbcs%?\\t%0, %1, #%B2"
854  [(set_attr "type" "alus_imm")]
855)
856
857(define_expand "addsi3_cin_cout_0"
858  [(parallel
859    [(set (match_dup 2)
860	  (compare:CC_ADC
861	   (plus:DI (match_dup 3)
862		    (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
863	   (const_int 4294967296)))
864     (set (match_operand:SI 0 "s_register_operand")
865	  (plus:SI (match_dup 4) (match_dup 1)))])]
866  "TARGET_32BIT"
867  {
868    operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
869    rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
870    operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
871    operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
872  }
873)
874
875(define_insn "*addsi3_cin_cout_0_insn"
876  [(set (reg:CC_ADC CC_REGNUM)
877	(compare:CC_ADC
878	 (plus:DI
879	  (match_operand:DI 2 "arm_carry_operation" "")
880	  (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
881	(const_int 4294967296)))
882   (set (match_operand:SI 0 "s_register_operand" "=r")
883	(plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
884  "TARGET_32BIT"
885  "adcs%?\\t%0, %1, #0"
886  [(set_attr "type" "alus_imm")]
887)
888
889(define_expand "addsi3"
890  [(set (match_operand:SI          0 "s_register_operand")
891	(plus:SI (match_operand:SI 1 "s_register_operand")
892		 (match_operand:SI 2 "reg_or_int_operand")))]
893  "TARGET_EITHER"
894  "
895  if (TARGET_32BIT && CONST_INT_P (operands[2]))
896    {
897      arm_split_constant (PLUS, SImode, NULL_RTX,
898	                  INTVAL (operands[2]), operands[0], operands[1],
899			  optimize && can_create_pseudo_p ());
900      DONE;
901    }
902  "
903)
904
905; If there is a scratch available, this will be faster than synthesizing the
906; addition.
907(define_peephole2
908  [(match_scratch:SI 3 "r")
909   (set (match_operand:SI          0 "arm_general_register_operand" "")
910	(plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
911		 (match_operand:SI 2 "const_int_operand"  "")))]
912  "TARGET_32BIT &&
913   !(const_ok_for_arm (INTVAL (operands[2]))
914     || const_ok_for_arm (-INTVAL (operands[2])))
915    && const_ok_for_arm (~INTVAL (operands[2]))"
916  [(set (match_dup 3) (match_dup 2))
917   (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
918  ""
919)
920
921;; The r/r/k alternative is required when reloading the address
922;;  (plus (reg rN) (reg sp)) into (reg rN).  In this case reload will
923;; put the duplicated register first, and not try the commutative version.
924(define_insn_and_split "*arm_addsi3"
925  [(set (match_operand:SI          0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
926	(plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
927		 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
928  "TARGET_32BIT"
929  "@
930   add%?\\t%0, %0, %2
931   add%?\\t%0, %1, %2
932   add%?\\t%0, %1, %2
933   add%?\\t%0, %1, %2
934   add%?\\t%0, %1, %2
935   add%?\\t%0, %1, %2
936   add%?\\t%0, %2, %1
937   add%?\\t%0, %1, %2
938   addw%?\\t%0, %1, %2
939   addw%?\\t%0, %1, %2
940   sub%?\\t%0, %1, #%n2
941   sub%?\\t%0, %1, #%n2
942   sub%?\\t%0, %1, #%n2
943   subw%?\\t%0, %1, #%n2
944   subw%?\\t%0, %1, #%n2
945   #"
946  "TARGET_32BIT
947   && CONST_INT_P (operands[2])
948   && !const_ok_for_op (INTVAL (operands[2]), PLUS)
949   && (reload_completed || !arm_eliminable_register (operands[1]))"
950  [(clobber (const_int 0))]
951  "
952  arm_split_constant (PLUS, SImode, curr_insn,
953	              INTVAL (operands[2]), operands[0],
954		      operands[1], 0);
955  DONE;
956  "
957  [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
958   (set_attr "predicable" "yes")
959   (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
960   (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
961   (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
962		      (const_string "alu_imm")
963		      (const_string "alu_sreg")))
964 ]
965)
966
967(define_insn "addsi3_compareV_reg"
968  [(set (reg:CC_V CC_REGNUM)
969	(compare:CC_V
970	  (plus:DI
971	    (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
972	    (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
973	  (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
974   (set (match_operand:SI 0 "register_operand" "=l,r,r")
975	(plus:SI (match_dup 1) (match_dup 2)))]
976  "TARGET_32BIT"
977  "adds%?\\t%0, %1, %2"
978  [(set_attr "conds" "set")
979   (set_attr "arch" "t2,t2,*")
980   (set_attr "length" "2,2,4")
981   (set_attr "type" "alus_sreg")]
982)
983
984(define_insn "*addsi3_compareV_reg_nosum"
985  [(set (reg:CC_V CC_REGNUM)
986	(compare:CC_V
987	  (plus:DI
988	    (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
989	    (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
990	  (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
991  "TARGET_32BIT"
992  "cmn%?\\t%0, %1"
993  [(set_attr "conds" "set")
994   (set_attr "arch" "t2,*")
995   (set_attr "length" "2,4")
996   (set_attr "type" "alus_sreg")]
997)
998
999(define_insn "subvsi3_intmin"
1000  [(set (reg:CC_V CC_REGNUM)
1001	(compare:CC_V
1002	  (plus:DI
1003	    (sign_extend:DI
1004	     (match_operand:SI 1 "register_operand" "r"))
1005	    (const_int 2147483648))
1006	  (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
1007   (set (match_operand:SI 0 "register_operand" "=r")
1008	(plus:SI (match_dup 1) (const_int -2147483648)))]
1009  "TARGET_32BIT"
1010  "subs%?\\t%0, %1, #-2147483648"
1011  [(set_attr "conds" "set")
1012   (set_attr "type" "alus_imm")]
1013)
1014
1015(define_insn "addsi3_compareV_imm"
1016  [(set (reg:CC_V CC_REGNUM)
1017	(compare:CC_V
1018	  (plus:DI
1019	    (sign_extend:DI
1020	     (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
1021	    (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
1022	  (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
1023   (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
1024	(plus:SI (match_dup 1) (match_dup 2)))]
1025  "TARGET_32BIT
1026   && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
1027  "@
1028   adds%?\\t%0, %1, %2
1029   adds%?\\t%0, %0, %2
1030   subs%?\\t%0, %1, #%n2
1031   subs%?\\t%0, %0, #%n2
1032   adds%?\\t%0, %1, %2
1033   subs%?\\t%0, %1, #%n2"
1034  [(set_attr "conds" "set")
1035   (set_attr "arch" "t2,t2,t2,t2,*,*")
1036   (set_attr "length" "2,2,2,2,4,4")
1037   (set_attr "type" "alus_imm")]
1038)
1039
1040(define_insn "addsi3_compareV_imm_nosum"
1041  [(set (reg:CC_V CC_REGNUM)
1042	(compare:CC_V
1043	  (plus:DI
1044	    (sign_extend:DI
1045	     (match_operand:SI 0 "register_operand" "l,r,r"))
1046	    (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1047	  (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1048  "TARGET_32BIT
1049   && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1050  "@
1051   cmp%?\\t%0, #%n1
1052   cmn%?\\t%0, %1
1053   cmp%?\\t%0, #%n1"
1054  [(set_attr "conds" "set")
1055   (set_attr "arch" "t2,*,*")
1056   (set_attr "length" "2,4,4")
1057   (set_attr "type" "alus_imm")]
1058)
1059
1060;; We can handle more constants efficently if we can clobber either a scratch
1061;; or the other source operand.  We deliberately leave this late as in
1062;; high register pressure situations it's not worth forcing any reloads.
1063(define_peephole2
1064  [(match_scratch:SI 2 "l")
1065   (set (reg:CC_V CC_REGNUM)
1066	(compare:CC_V
1067	  (plus:DI
1068	    (sign_extend:DI
1069	     (match_operand:SI 0 "low_register_operand"))
1070	    (match_operand 1 "const_int_operand"))
1071	  (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1072  "TARGET_THUMB2
1073   && satisfies_constraint_Pd (operands[1])"
1074  [(parallel[
1075    (set (reg:CC_V CC_REGNUM)
1076	 (compare:CC_V
1077	  (plus:DI (sign_extend:DI (match_dup 0))
1078		   (sign_extend:DI (match_dup 1)))
1079	  (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1080    (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1081)
1082
1083(define_peephole2
1084  [(set (reg:CC_V CC_REGNUM)
1085	(compare:CC_V
1086	  (plus:DI
1087	    (sign_extend:DI
1088	     (match_operand:SI 0 "low_register_operand"))
1089	    (match_operand 1 "const_int_operand"))
1090	  (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1091  "TARGET_THUMB2
1092   && dead_or_set_p (peep2_next_insn (0), operands[0])
1093   && satisfies_constraint_Py (operands[1])"
1094  [(parallel[
1095    (set (reg:CC_V CC_REGNUM)
1096	 (compare:CC_V
1097	  (plus:DI (sign_extend:DI (match_dup 0))
1098		   (sign_extend:DI (match_dup 1)))
1099	  (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1100    (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1101)
1102
1103(define_insn "addsi3_compare0"
1104  [(set (reg:CC_NZ CC_REGNUM)
1105	(compare:CC_NZ
1106	 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1107		  (match_operand:SI 2 "arm_add_operand"    "I,L,r"))
1108	 (const_int 0)))
1109   (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1110	(plus:SI (match_dup 1) (match_dup 2)))]
1111  "TARGET_ARM"
1112  "@
1113   adds%?\\t%0, %1, %2
1114   subs%?\\t%0, %1, #%n2
1115   adds%?\\t%0, %1, %2"
1116  [(set_attr "conds" "set")
1117   (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1118)
1119
1120(define_insn "*addsi3_compare0_scratch"
1121  [(set (reg:CC_NZ CC_REGNUM)
1122	(compare:CC_NZ
1123	 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1124		  (match_operand:SI 1 "arm_add_operand"    "I,L, r"))
1125	 (const_int 0)))]
1126  "TARGET_ARM"
1127  "@
1128   cmn%?\\t%0, %1
1129   cmp%?\\t%0, #%n1
1130   cmn%?\\t%0, %1"
1131  [(set_attr "conds" "set")
1132   (set_attr "predicable" "yes")
1133   (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1134)
1135
1136(define_insn "*compare_negsi_si"
1137  [(set (reg:CC_Z CC_REGNUM)
1138	(compare:CC_Z
1139	 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1140	 (match_operand:SI 1 "s_register_operand" "l,r")))]
1141  "TARGET_32BIT"
1142  "cmn%?\\t%1, %0"
1143  [(set_attr "conds" "set")
1144   (set_attr "predicable" "yes")
1145   (set_attr "arch" "t2,*")
1146   (set_attr "length" "2,4")
1147   (set_attr "predicable_short_it" "yes,no")
1148   (set_attr "type" "alus_sreg")]
1149)
1150
1151;; This is the canonicalization of subsi3_compare when the
1152;; addend is a constant.
1153(define_insn "cmpsi2_addneg"
1154  [(set (reg:CC CC_REGNUM)
1155	(compare:CC
1156	 (match_operand:SI 1 "s_register_operand" "r,r")
1157	 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1158   (set (match_operand:SI 0 "s_register_operand" "=r,r")
1159	(plus:SI (match_dup 1)
1160		 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1161  "TARGET_32BIT
1162   && (INTVAL (operands[2])
1163       == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1164{
1165  /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1166     in different condition codes (like cmn rather than like cmp), so that
1167     alternative comes first.  Both alternatives can match for any 0x??000000
1168     where except for 0 and INT_MIN it doesn't matter what we choose, and also
1169     for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1170     as it is shorter.  */
1171  if (which_alternative == 0 && operands[3] != const1_rtx)
1172    return "subs%?\\t%0, %1, #%n3";
1173  else
1174    return "adds%?\\t%0, %1, %3";
1175}
1176  [(set_attr "conds" "set")
1177   (set_attr "type" "alus_sreg")]
1178)
1179
1180;; Convert the sequence
1181;;  sub  rd, rn, #1
1182;;  cmn  rd, #1	(equivalent to cmp rd, #-1)
1183;;  bne  dest
1184;; into
1185;;  subs rd, rn, #1
1186;;  bcs  dest	((unsigned)rn >= 1)
1187;; similarly for the beq variant using bcc.
1188;; This is a common looping idiom (while (n--))
1189(define_peephole2
1190  [(set (match_operand:SI 0 "arm_general_register_operand" "")
1191	(plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1192		 (const_int -1)))
1193   (set (match_operand 2 "cc_register" "")
1194	(compare (match_dup 0) (const_int -1)))
1195   (set (pc)
1196	(if_then_else (match_operator 3 "equality_operator"
1197		       [(match_dup 2) (const_int 0)])
1198		      (match_operand 4 "" "")
1199		      (match_operand 5 "" "")))]
1200  "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1201  [(parallel[
1202    (set (match_dup 2)
1203	 (compare:CC
1204	  (match_dup 1) (const_int 1)))
1205    (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1206   (set (pc)
1207	(if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1208		      (match_dup 4)
1209		      (match_dup 5)))]
1210  "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1211   operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1212				  ? GEU : LTU),
1213				 VOIDmode,
1214				 operands[2], const0_rtx);"
1215)
1216
1217;; The next four insns work because they compare the result with one of
1218;; the operands, and we know that the use of the condition code is
1219;; either GEU or LTU, so we can use the carry flag from the addition
1220;; instead of doing the compare a second time.
1221(define_insn "addsi3_compare_op1"
1222  [(set (reg:CC_C CC_REGNUM)
1223	(compare:CC_C
1224	 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1225		  (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1226	 (match_dup 1)))
1227   (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1228	(plus:SI (match_dup 1) (match_dup 2)))]
1229  "TARGET_32BIT"
1230  "@
1231   adds%?\\t%0, %1, %2
1232   adds%?\\t%0, %0, %2
1233   subs%?\\t%0, %1, #%n2
1234   subs%?\\t%0, %0, #%n2
1235   adds%?\\t%0, %1, %2
1236   subs%?\\t%0, %1, #%n2"
1237  [(set_attr "conds" "set")
1238   (set_attr "arch" "t2,t2,t2,t2,*,*")
1239   (set_attr "length" "2,2,2,2,4,4")
1240   (set (attr "type")
1241	(if_then_else (match_operand 2 "const_int_operand")
1242		      (const_string "alu_imm")
1243		      (const_string "alu_sreg")))]
1244)
1245
1246(define_insn "*addsi3_compare_op2"
1247  [(set (reg:CC_C CC_REGNUM)
1248	(compare:CC_C
1249	 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1250		  (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1251	 (match_dup 2)))
1252   (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1253	(plus:SI (match_dup 1) (match_dup 2)))]
1254  "TARGET_32BIT"
1255  "@
1256   adds%?\\t%0, %1, %2
1257   adds%?\\t%0, %0, %2
1258   subs%?\\t%0, %1, #%n2
1259   subs%?\\t%0, %0, #%n2
1260   adds%?\\t%0, %1, %2
1261   subs%?\\t%0, %1, #%n2"
1262  [(set_attr "conds" "set")
1263   (set_attr "arch" "t2,t2,t2,t2,*,*")
1264   (set_attr "length" "2,2,2,2,4,4")
1265   (set (attr "type")
1266	(if_then_else (match_operand 2 "const_int_operand")
1267		      (const_string "alu_imm")
1268		      (const_string "alu_sreg")))]
1269)
1270
1271(define_insn "*compare_addsi2_op0"
1272  [(set (reg:CC_C CC_REGNUM)
1273        (compare:CC_C
1274          (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1275                   (match_operand:SI 1 "arm_add_operand"    "l,Pw,rI,L"))
1276          (match_dup 0)))]
1277  "TARGET_32BIT"
1278  "@
1279   cmn%?\\t%0, %1
1280   cmp%?\\t%0, #%n1
1281   cmn%?\\t%0, %1
1282   cmp%?\\t%0, #%n1"
1283  [(set_attr "conds" "set")
1284   (set_attr "predicable" "yes")
1285   (set_attr "arch" "t2,t2,*,*")
1286   (set_attr "predicable_short_it" "yes,yes,no,no")
1287   (set_attr "length" "2,2,4,4")
1288   (set (attr "type")
1289	(if_then_else (match_operand 1 "const_int_operand")
1290		      (const_string "alu_imm")
1291		      (const_string "alu_sreg")))]
1292)
1293
1294(define_insn "*compare_addsi2_op1"
1295  [(set (reg:CC_C CC_REGNUM)
1296        (compare:CC_C
1297          (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1298                   (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1299          (match_dup 1)))]
1300  "TARGET_32BIT"
1301  "@
1302   cmn%?\\t%0, %1
1303   cmp%?\\t%0, #%n1
1304   cmn%?\\t%0, %1
1305   cmp%?\\t%0, #%n1"
1306  [(set_attr "conds" "set")
1307   (set_attr "predicable" "yes")
1308   (set_attr "arch" "t2,t2,*,*")
1309   (set_attr "predicable_short_it" "yes,yes,no,no")
1310   (set_attr "length" "2,2,4,4")
1311   (set (attr "type")
1312	(if_then_else (match_operand 1 "const_int_operand")
1313		      (const_string "alu_imm")
1314		      (const_string "alu_sreg")))]
1315 )
1316
1317(define_insn "addsi3_carryin"
1318  [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1319        (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1320                          (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1321                 (match_operand:SI 3 "arm_carry_operation" "")))]
1322  "TARGET_32BIT"
1323  "@
1324   adc%?\\t%0, %1, %2
1325   adc%?\\t%0, %1, %2
1326   sbc%?\\t%0, %1, #%B2"
1327  [(set_attr "conds" "use")
1328   (set_attr "predicable" "yes")
1329   (set_attr "arch" "t2,*,*")
1330   (set_attr "length" "4")
1331   (set_attr "predicable_short_it" "yes,no,no")
1332   (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1333)
1334
1335;; Canonicalization of the above when the immediate is zero.
1336(define_insn "add0si3_carryin"
1337  [(set (match_operand:SI 0 "s_register_operand" "=r")
1338	(plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1339		 (match_operand:SI 1 "arm_not_operand" "r")))]
1340  "TARGET_32BIT"
1341  "adc%?\\t%0, %1, #0"
1342  [(set_attr "conds" "use")
1343   (set_attr "predicable" "yes")
1344   (set_attr "length" "4")
1345   (set_attr "type" "adc_imm")]
1346)
1347
1348(define_insn "*addsi3_carryin_alt2"
1349  [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1350        (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1351                          (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1352                 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1353  "TARGET_32BIT"
1354  "@
1355   adc%?\\t%0, %1, %2
1356   adc%?\\t%0, %1, %2
1357   sbc%?\\t%0, %1, #%B2"
1358  [(set_attr "conds" "use")
1359   (set_attr "predicable" "yes")
1360   (set_attr "arch" "t2,*,*")
1361   (set_attr "length" "4")
1362   (set_attr "predicable_short_it" "yes,no,no")
1363   (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1364)
1365
1366(define_insn "*addsi3_carryin_shift"
1367  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1368	(plus:SI (plus:SI
1369		  (match_operator:SI 2 "shift_operator"
1370		    [(match_operand:SI 3 "s_register_operand" "r,r")
1371		     (match_operand:SI 4 "shift_amount_operand" "M,r")])
1372		  (match_operand:SI 5 "arm_carry_operation" ""))
1373		 (match_operand:SI 1 "s_register_operand" "r,r")))]
1374  "TARGET_32BIT"
1375  "adc%?\\t%0, %1, %3%S2"
1376  [(set_attr "conds" "use")
1377   (set_attr "arch" "32,a")
1378   (set_attr "shift" "3")
1379   (set_attr "predicable" "yes")
1380   (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1381)
1382
1383(define_insn "*addsi3_carryin_clobercc"
1384  [(set (match_operand:SI 0 "s_register_operand" "=r")
1385	(plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1386			  (match_operand:SI 2 "arm_rhs_operand" "rI"))
1387		 (match_operand:SI 3 "arm_carry_operation" "")))
1388   (clobber (reg:CC CC_REGNUM))]
1389   "TARGET_32BIT"
1390   "adcs%?\\t%0, %1, %2"
1391   [(set_attr "conds" "set")
1392    (set_attr "type" "adcs_reg")]
1393)
1394
1395(define_expand "subvsi4"
1396  [(match_operand:SI 0 "s_register_operand")
1397   (match_operand:SI 1 "arm_rhs_operand")
1398   (match_operand:SI 2 "arm_add_operand")
1399   (match_operand 3 "")]
1400  "TARGET_32BIT"
1401{
1402  if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1403    {
1404      /* If both operands are constants we can decide the result statically.  */
1405      wi::overflow_type overflow;
1406      wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1407			      rtx_mode_t (operands[2], SImode),
1408			      SIGNED, &overflow);
1409      emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1410      if (overflow != wi::OVF_NONE)
1411	emit_jump_insn (gen_jump (operands[3]));
1412      DONE;
1413    }
1414  else if (CONST_INT_P (operands[2]))
1415    {
1416      operands[2] = GEN_INT (-INTVAL (operands[2]));
1417      /* Special case for INT_MIN.  */
1418      if (INTVAL (operands[2]) == 0x80000000)
1419	emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1420      else
1421	emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1422					  operands[2]));
1423    }
1424  else if (CONST_INT_P (operands[1]))
1425    emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1426  else
1427    emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1428
1429  arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1430  DONE;
1431})
1432
1433(define_expand "subvdi4"
1434  [(match_operand:DI 0 "s_register_operand")
1435   (match_operand:DI 1 "reg_or_int_operand")
1436   (match_operand:DI 2 "reg_or_int_operand")
1437   (match_operand 3 "")]
1438  "TARGET_32BIT"
1439{
1440  rtx lo_result, hi_result;
1441  rtx lo_op1, hi_op1, lo_op2, hi_op2;
1442  lo_result = gen_lowpart (SImode, operands[0]);
1443  hi_result = gen_highpart (SImode, operands[0]);
1444  machine_mode mode = CCmode;
1445
1446  if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1447    {
1448      /* If both operands are constants we can decide the result statically.  */
1449      wi::overflow_type overflow;
1450      wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1451			      rtx_mode_t (operands[2], DImode),
1452			      SIGNED, &overflow);
1453      emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1454      if (overflow != wi::OVF_NONE)
1455	emit_jump_insn (gen_jump (operands[3]));
1456      DONE;
1457    }
1458  else if (CONST_INT_P (operands[1]))
1459    {
1460      arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1461			      &lo_op1, &hi_op1);
1462      if (const_ok_for_arm (INTVAL (lo_op1)))
1463	{
1464	  emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1465					  GEN_INT (~UINTVAL (lo_op1))));
1466	  /* We could potentially use RSC here in Arm state, but not
1467	     in Thumb, so it's probably not worth the effort of handling
1468	     this.  */
1469	  hi_op1 = force_reg (SImode, hi_op1);
1470	  mode = CC_RSBmode;
1471	  goto highpart;
1472	}
1473      operands[1] = force_reg (DImode, operands[1]);
1474    }
1475
1476  arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1477			  &lo_op2, &hi_op2);
1478  if (lo_op2 == const0_rtx)
1479    {
1480      emit_move_insn (lo_result, lo_op1);
1481      if (!arm_add_operand (hi_op2, SImode))
1482        hi_op2 = force_reg (SImode, hi_op2);
1483      emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1484      DONE;
1485    }
1486
1487  if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1488    lo_op2 = force_reg (SImode, lo_op2);
1489  if (CONST_INT_P (lo_op2))
1490    emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1491				  gen_int_mode (-INTVAL (lo_op2), SImode)));
1492  else
1493    emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1494
1495 highpart:
1496  if (!arm_not_operand (hi_op2, SImode))
1497    hi_op2 = force_reg (SImode, hi_op2);
1498  rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1499  if (CONST_INT_P (hi_op2))
1500    emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1501				       gen_rtx_LTU (SImode, ccreg, const0_rtx),
1502				       gen_rtx_LTU (DImode, ccreg,
1503						    const0_rtx)));
1504  else
1505    emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1506				   gen_rtx_LTU (SImode, ccreg, const0_rtx),
1507				   gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1508  arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1509
1510  DONE;
1511})
1512
1513(define_expand "usubvsi4"
1514  [(match_operand:SI 0 "s_register_operand")
1515   (match_operand:SI 1 "arm_rhs_operand")
1516   (match_operand:SI 2 "arm_add_operand")
1517   (match_operand 3 "")]
1518  "TARGET_32BIT"
1519{
1520  machine_mode mode = CCmode;
1521  if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1522    {
1523      /* If both operands are constants we can decide the result statically.  */
1524      wi::overflow_type overflow;
1525      wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1526			      rtx_mode_t (operands[2], SImode),
1527			      UNSIGNED, &overflow);
1528      emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1529      if (overflow != wi::OVF_NONE)
1530	emit_jump_insn (gen_jump (operands[3]));
1531      DONE;
1532    }
1533  else if (CONST_INT_P (operands[2]))
1534    emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1535				  gen_int_mode (-INTVAL (operands[2]),
1536						SImode)));
1537  else if (CONST_INT_P (operands[1]))
1538    {
1539      mode = CC_RSBmode;
1540      emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1541				      GEN_INT (~UINTVAL (operands[1]))));
1542    }
1543  else
1544    emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1545  arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1546
1547  DONE;
1548})
1549
1550(define_expand "usubvdi4"
1551  [(match_operand:DI 0 "s_register_operand")
1552   (match_operand:DI 1 "reg_or_int_operand")
1553   (match_operand:DI 2 "reg_or_int_operand")
1554   (match_operand 3 "")]
1555  "TARGET_32BIT"
1556{
1557  rtx lo_result, hi_result;
1558  rtx lo_op1, hi_op1, lo_op2, hi_op2;
1559  lo_result = gen_lowpart (SImode, operands[0]);
1560  hi_result = gen_highpart (SImode, operands[0]);
1561  machine_mode mode = CCmode;
1562
1563  if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1564    {
1565      /* If both operands are constants we can decide the result statically.  */
1566      wi::overflow_type overflow;
1567      wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1568			      rtx_mode_t (operands[2], DImode),
1569			      UNSIGNED, &overflow);
1570      emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1571      if (overflow != wi::OVF_NONE)
1572	emit_jump_insn (gen_jump (operands[3]));
1573      DONE;
1574    }
1575  else if (CONST_INT_P (operands[1]))
1576    {
1577      arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1578			      &lo_op1, &hi_op1);
1579      if (const_ok_for_arm (INTVAL (lo_op1)))
1580	{
1581	  emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1582					  GEN_INT (~UINTVAL (lo_op1))));
1583	  /* We could potentially use RSC here in Arm state, but not
1584	     in Thumb, so it's probably not worth the effort of handling
1585	     this.  */
1586	  hi_op1 = force_reg (SImode, hi_op1);
1587	  mode = CC_RSBmode;
1588	  goto highpart;
1589	}
1590      operands[1] = force_reg (DImode, operands[1]);
1591    }
1592
1593  arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1594			  &lo_op2, &hi_op2);
1595  if (lo_op2 == const0_rtx)
1596    {
1597      emit_move_insn (lo_result, lo_op1);
1598      if (!arm_add_operand (hi_op2, SImode))
1599        hi_op2 = force_reg (SImode, hi_op2);
1600      emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1601      DONE;
1602    }
1603
1604  if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1605    lo_op2 = force_reg (SImode, lo_op2);
1606  if (CONST_INT_P (lo_op2))
1607    emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1608				  gen_int_mode (-INTVAL (lo_op2), SImode)));
1609  else
1610    emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1611
1612 highpart:
1613  if (!arm_not_operand (hi_op2, SImode))
1614    hi_op2 = force_reg (SImode, hi_op2);
1615  rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1616  if (CONST_INT_P (hi_op2))
1617    emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1618					GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1619					gen_rtx_LTU (SImode, ccreg, const0_rtx),
1620					gen_rtx_LTU (DImode, ccreg,
1621						     const0_rtx)));
1622  else
1623    emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1624				    gen_rtx_LTU (SImode, ccreg, const0_rtx),
1625				    gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1626  arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1627
1628  DONE;
1629})
1630
1631(define_insn "subsi3_compare1"
1632  [(set (reg:CC CC_REGNUM)
1633	(compare:CC
1634	  (match_operand:SI 1 "register_operand" "r")
1635	  (match_operand:SI 2 "register_operand" "r")))
1636   (set (match_operand:SI 0 "register_operand" "=r")
1637	(minus:SI (match_dup 1) (match_dup 2)))]
1638  "TARGET_32BIT"
1639  "subs%?\\t%0, %1, %2"
1640  [(set_attr "conds" "set")
1641   (set_attr "type" "alus_sreg")]
1642)
1643
1644(define_insn "subvsi3"
1645  [(set (reg:CC_V CC_REGNUM)
1646	(compare:CC_V
1647	 (minus:DI
1648	  (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1649	  (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1650	 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1651   (set (match_operand:SI 0 "s_register_operand" "=l,r")
1652	(minus:SI (match_dup 1) (match_dup 2)))]
1653  "TARGET_32BIT"
1654  "subs%?\\t%0, %1, %2"
1655  [(set_attr "conds" "set")
1656   (set_attr "arch" "t2,*")
1657   (set_attr "length" "2,4")
1658   (set_attr "type" "alus_sreg")]
1659)
1660
1661(define_insn "subvsi3_imm1"
1662  [(set (reg:CC_V CC_REGNUM)
1663	(compare:CC_V
1664	 (minus:DI
1665	  (match_operand 1 "arm_immediate_operand" "I")
1666	  (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1667	 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1668   (set (match_operand:SI 0 "s_register_operand" "=r")
1669	(minus:SI (match_dup 1) (match_dup 2)))]
1670  "TARGET_32BIT"
1671  "rsbs%?\\t%0, %2, %1"
1672  [(set_attr "conds" "set")
1673   (set_attr "type" "alus_imm")]
1674)
1675
1676(define_insn "subsi3_carryin"
1677  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1678	(minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1679			    (match_operand:SI 2 "s_register_operand" "r,r,r"))
1680		  (match_operand:SI 3 "arm_borrow_operation" "")))]
1681  "TARGET_32BIT"
1682  "@
1683   sbc%?\\t%0, %1, %2
1684   rsc%?\\t%0, %2, %1
1685   sbc%?\\t%0, %2, %2, lsl #1"
1686  [(set_attr "conds" "use")
1687   (set_attr "arch" "*,a,t2")
1688   (set_attr "predicable" "yes")
1689   (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1690)
1691
1692;; Special canonicalization of the above when operand1 == (const_int 1):
1693;; in this case the 'borrow' needs to treated like subtracting from the carry.
1694(define_insn "rsbsi_carryin_reg"
1695  [(set (match_operand:SI 0 "s_register_operand" "=r")
1696	(minus:SI (match_operand:SI 1 "arm_carry_operation" "")
1697		  (match_operand:SI 2 "s_register_operand" "r")))]
1698  "TARGET_ARM"
1699  "rsc%?\\t%0, %2, #1"
1700  [(set_attr "conds" "use")
1701   (set_attr "predicable" "yes")
1702   (set_attr "type" "adc_imm")]
1703)
1704
1705;; SBC performs Rn - Rm - ~C, but -Rm = ~Rm + 1 => Rn + ~Rm + 1 - ~C
1706;; => Rn + ~Rm + C, which is essentially ADC Rd, Rn, ~Rm
1707(define_insn "*add_not_cin"
1708  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1709	(plus:SI
1710	 (plus:SI (not:SI (match_operand:SI 1 "s_register_operand" "r,r"))
1711		  (match_operand:SI 3 "arm_carry_operation" ""))
1712	 (match_operand:SI 2 "arm_rhs_operand" "r,I")))]
1713  "TARGET_ARM || (TARGET_THUMB2 && !CONST_INT_P (operands[2]))"
1714  "@
1715   sbc%?\\t%0, %2, %1
1716   rsc%?\\t%0, %1, %2"
1717  [(set_attr "conds" "use")
1718   (set_attr "predicable" "yes")
1719   (set_attr "arch" "*,a")
1720   (set_attr "type" "adc_reg,adc_imm")]
1721)
1722
1723;; On Arm we can also use the same trick when the non-inverted operand is
1724;; shifted, using RSC.
1725(define_insn "add_not_shift_cin"
1726  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1727	(plus:SI
1728	 (plus:SI (match_operator:SI 3 "shift_operator"
1729		   [(match_operand:SI 1 "s_register_operand" "r,r")
1730		    (match_operand:SI 2 "shift_amount_operand" "M,r")])
1731		  (not:SI (match_operand:SI 4 "s_register_operand" "r,r")))
1732	 (match_operand:SI 5 "arm_carry_operation" "")))]
1733  "TARGET_ARM"
1734  "rsc%?\\t%0, %4, %1%S3"
1735  [(set_attr "conds" "use")
1736   (set_attr "predicable" "yes")
1737   (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1738)
1739
1740(define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1741  [(set (reg:<CC_EXTEND> CC_REGNUM)
1742	(compare:<CC_EXTEND>
1743	 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1744	 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1745		  (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1746   (clobber (match_scratch:SI 0 "=l,r"))]
1747  "TARGET_32BIT"
1748  "sbcs\\t%0, %1, %2"
1749  [(set_attr "conds" "set")
1750   (set_attr "arch" "t2,*")
1751   (set_attr "length" "2,4")
1752   (set_attr "type" "adc_reg")]
1753)
1754
1755;; Similar to the above, but handling a constant which has a different
1756;; canonicalization.
1757(define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1758  [(set (reg:<CC_EXTEND> CC_REGNUM)
1759	(compare:<CC_EXTEND>
1760	 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1761	 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1762		  (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1763   (clobber (match_scratch:SI 0 "=l,r"))]
1764  "TARGET_32BIT"
1765  "@
1766   sbcs\\t%0, %1, %2
1767   adcs\\t%0, %1, #%B2"
1768  [(set_attr "conds" "set")
1769   (set_attr "type" "adc_imm")]
1770)
1771
1772;; Further canonicalization when the constant is zero.
1773(define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1774  [(set (reg:<CC_EXTEND> CC_REGNUM)
1775	(compare:<CC_EXTEND>
1776	 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1777	 (match_operand:DI 2 "arm_borrow_operation" "")))
1778   (clobber (match_scratch:SI 0 "=l,r"))]
1779  "TARGET_32BIT"
1780  "sbcs\\t%0, %1, #0"
1781  [(set_attr "conds" "set")
1782   (set_attr "type" "adc_imm")]
1783)
1784
1785(define_insn "*subsi3_carryin_const"
1786  [(set (match_operand:SI 0 "s_register_operand" "=r")
1787	(minus:SI (plus:SI
1788		   (match_operand:SI 1 "s_register_operand" "r")
1789		   (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1790		  (match_operand:SI 3 "arm_borrow_operation" "")))]
1791  "TARGET_32BIT"
1792  "sbc\\t%0, %1, #%n2"
1793  [(set_attr "conds" "use")
1794   (set_attr "type" "adc_imm")]
1795)
1796
1797(define_insn "*subsi3_carryin_const0"
1798  [(set (match_operand:SI 0 "s_register_operand" "=r")
1799	(minus:SI (match_operand:SI 1 "s_register_operand" "r")
1800		  (match_operand:SI 2 "arm_borrow_operation" "")))]
1801  "TARGET_32BIT"
1802  "sbc\\t%0, %1, #0"
1803  [(set_attr "conds" "use")
1804   (set_attr "type" "adc_imm")]
1805)
1806
1807(define_insn "*subsi3_carryin_shift"
1808  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1809	(minus:SI (minus:SI
1810		   (match_operand:SI 1 "s_register_operand" "r,r")
1811		   (match_operator:SI 2 "shift_operator"
1812		    [(match_operand:SI 3 "s_register_operand" "r,r")
1813		     (match_operand:SI 4 "shift_amount_operand" "M,r")]))
1814		  (match_operand:SI 5 "arm_borrow_operation" "")))]
1815  "TARGET_32BIT"
1816  "sbc%?\\t%0, %1, %3%S2"
1817  [(set_attr "conds" "use")
1818   (set_attr "arch" "32,a")
1819   (set_attr "shift" "3")
1820   (set_attr "predicable" "yes")
1821   (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1822)
1823
1824(define_insn "*subsi3_carryin_shift_alt"
1825  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1826	(minus:SI (minus:SI
1827		   (match_operand:SI 1 "s_register_operand" "r,r")
1828		   (match_operand:SI 5 "arm_borrow_operation" ""))
1829		  (match_operator:SI 2 "shift_operator"
1830		   [(match_operand:SI 3 "s_register_operand" "r,r")
1831		    (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
1832  "TARGET_32BIT"
1833  "sbc%?\\t%0, %1, %3%S2"
1834  [(set_attr "conds" "use")
1835   (set_attr "arch" "32,a")
1836   (set_attr "shift" "3")
1837   (set_attr "predicable" "yes")
1838   (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1839)
1840
1841;; No RSC in Thumb2
1842(define_insn "*rsbsi3_carryin_shift"
1843  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1844	(minus:SI (minus:SI
1845		   (match_operator:SI 2 "shift_operator"
1846		    [(match_operand:SI 3 "s_register_operand" "r,r")
1847		     (match_operand:SI 4 "shift_amount_operand" "M,r")])
1848		   (match_operand:SI 1 "s_register_operand" "r,r"))
1849		  (match_operand:SI 5 "arm_borrow_operation" "")))]
1850  "TARGET_ARM"
1851  "rsc%?\\t%0, %1, %3%S2"
1852  [(set_attr "conds" "use")
1853   (set_attr "predicable" "yes")
1854   (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1855)
1856
1857(define_insn "*rsbsi3_carryin_shift_alt"
1858  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1859	(minus:SI (minus:SI
1860		   (match_operator:SI 2 "shift_operator"
1861		    [(match_operand:SI 3 "s_register_operand" "r,r")
1862		     (match_operand:SI 4 "shift_amount_operand" "M,r")])
1863		    (match_operand:SI 5 "arm_borrow_operation" ""))
1864		  (match_operand:SI 1 "s_register_operand" "r,r")))]
1865  "TARGET_ARM"
1866  "rsc%?\\t%0, %1, %3%S2"
1867  [(set_attr "conds" "use")
1868   (set_attr "predicable" "yes")
1869   (set_attr "type" "alu_shift_imm,alu_shift_reg")]
1870)
1871
1872; transform ((x << y) - 1) to ~(~(x-1) << y)  Where X is a constant.
1873(define_split
1874  [(set (match_operand:SI 0 "s_register_operand" "")
1875	(plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1876			    (match_operand:SI 2 "s_register_operand" ""))
1877		 (const_int -1)))
1878   (clobber (match_operand:SI 3 "s_register_operand" ""))]
1879  "TARGET_32BIT"
1880  [(set (match_dup 3) (match_dup 1))
1881   (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1882  "
1883  operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1884")
1885
1886(define_expand "addsf3"
1887  [(set (match_operand:SF          0 "s_register_operand")
1888	(plus:SF (match_operand:SF 1 "s_register_operand")
1889		 (match_operand:SF 2 "s_register_operand")))]
1890  "TARGET_32BIT && TARGET_HARD_FLOAT"
1891  "
1892")
1893
1894(define_expand "adddf3"
1895  [(set (match_operand:DF          0 "s_register_operand")
1896	(plus:DF (match_operand:DF 1 "s_register_operand")
1897		 (match_operand:DF 2 "s_register_operand")))]
1898  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1899  "
1900")
1901
1902(define_expand "subdi3"
1903 [(parallel
1904   [(set (match_operand:DI            0 "s_register_operand")
1905	  (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1906		    (match_operand:DI 2 "s_register_operand")))
1907    (clobber (reg:CC CC_REGNUM))])]
1908  "TARGET_EITHER"
1909  "
1910  if (TARGET_THUMB1)
1911    {
1912      if (!REG_P (operands[1]))
1913	operands[1] = force_reg (DImode, operands[1]);
1914    }
1915  else
1916    {
1917      rtx lo_result, hi_result, lo_dest, hi_dest;
1918      rtx lo_op1, hi_op1, lo_op2, hi_op2;
1919      rtx condition;
1920
1921      /* Since operands[1] may be an integer, pass it second, so that
1922	 any necessary simplifications will be done on the decomposed
1923	 constant.  */
1924      arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1925			      &lo_op1, &hi_op1);
1926      lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1927      hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1928
1929      if (!arm_rhs_operand (lo_op1, SImode))
1930	lo_op1 = force_reg (SImode, lo_op1);
1931
1932      if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1933	  || !arm_rhs_operand (hi_op1, SImode))
1934	hi_op1 = force_reg (SImode, hi_op1);
1935
1936      rtx cc_reg;
1937      if (lo_op1 == const0_rtx)
1938	{
1939	  cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1940	  emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1941	}
1942      else if (CONST_INT_P (lo_op1))
1943	{
1944	  cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1945	  emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1946					  GEN_INT (~UINTVAL (lo_op1))));
1947	}
1948      else
1949	{
1950	  cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1951	  emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1952	}
1953
1954      condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1955
1956      if (hi_op1 == const0_rtx)
1957        emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1958      else
1959	emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1960
1961      if (lo_result != lo_dest)
1962	emit_move_insn (lo_result, lo_dest);
1963
1964      if (hi_result != hi_dest)
1965	emit_move_insn (hi_result, hi_dest);
1966
1967      DONE;
1968    }
1969  "
1970)
1971
1972(define_expand "subsi3"
1973  [(set (match_operand:SI           0 "s_register_operand")
1974	(minus:SI (match_operand:SI 1 "reg_or_int_operand")
1975		  (match_operand:SI 2 "s_register_operand")))]
1976  "TARGET_EITHER"
1977  "
1978  if (CONST_INT_P (operands[1]))
1979    {
1980      if (TARGET_32BIT)
1981        {
1982	  if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1983	    operands[1] = force_reg (SImode, operands[1]);
1984	  else
1985	    {
1986	      arm_split_constant (MINUS, SImode, NULL_RTX,
1987				  INTVAL (operands[1]), operands[0],
1988				  operands[2],
1989				  optimize && can_create_pseudo_p ());
1990	      DONE;
1991	    }
1992	}
1993      else /* TARGET_THUMB1 */
1994        operands[1] = force_reg (SImode, operands[1]);
1995    }
1996  "
1997)
1998
1999; ??? Check Thumb-2 split length
2000(define_insn_and_split "*arm_subsi3_insn"
2001  [(set (match_operand:SI           0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
2002	(minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
2003		  (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
2004  "TARGET_32BIT"
2005  "@
2006   sub%?\\t%0, %1, %2
2007   sub%?\\t%0, %2
2008   sub%?\\t%0, %1, %2
2009   rsb%?\\t%0, %2, %1
2010   rsb%?\\t%0, %2, %1
2011   sub%?\\t%0, %1, %2
2012   sub%?\\t%0, %1, %2
2013   sub%?\\t%0, %1, %2
2014   #"
2015  "&& (CONST_INT_P (operands[1])
2016       && !const_ok_for_arm (INTVAL (operands[1])))"
2017  [(clobber (const_int 0))]
2018  "
2019  arm_split_constant (MINUS, SImode, curr_insn,
2020                      INTVAL (operands[1]), operands[0], operands[2], 0);
2021  DONE;
2022  "
2023  [(set_attr "length" "4,4,4,4,4,4,4,4,16")
2024   (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
2025   (set_attr "predicable" "yes")
2026   (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
2027   (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
2028)
2029
2030(define_peephole2
2031  [(match_scratch:SI 3 "r")
2032   (set (match_operand:SI 0 "arm_general_register_operand" "")
2033	(minus:SI (match_operand:SI 1 "const_int_operand" "")
2034		  (match_operand:SI 2 "arm_general_register_operand" "")))]
2035  "TARGET_32BIT
2036   && !const_ok_for_arm (INTVAL (operands[1]))
2037   && const_ok_for_arm (~INTVAL (operands[1]))"
2038  [(set (match_dup 3) (match_dup 1))
2039   (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
2040  ""
2041)
2042
2043(define_insn "subsi3_compare0"
2044  [(set (reg:CC_NZ CC_REGNUM)
2045	(compare:CC_NZ
2046	 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2047		   (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
2048	 (const_int 0)))
2049   (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2050	(minus:SI (match_dup 1) (match_dup 2)))]
2051  "TARGET_32BIT"
2052  "@
2053   subs%?\\t%0, %1, %2
2054   subs%?\\t%0, %1, %2
2055   rsbs%?\\t%0, %2, %1"
2056  [(set_attr "conds" "set")
2057   (set_attr "type"  "alus_imm,alus_sreg,alus_sreg")]
2058)
2059
2060(define_insn "subsi3_compare"
2061  [(set (reg:CC CC_REGNUM)
2062	(compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
2063		    (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
2064   (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2065	(minus:SI (match_dup 1) (match_dup 2)))]
2066  "TARGET_32BIT"
2067  "@
2068   subs%?\\t%0, %1, %2
2069   subs%?\\t%0, %1, %2
2070   rsbs%?\\t%0, %2, %1"
2071  [(set_attr "conds" "set")
2072   (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
2073)
2074
2075;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
2076;; rather than (0 cmp reg).  This gives the same results for unsigned
2077;; and equality compares which is what we mostly need here.
2078(define_insn "rsb_imm_compare"
2079  [(set (reg:CC_RSB CC_REGNUM)
2080	(compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2081			(match_operand 3 "const_int_operand" "")))
2082   (set (match_operand:SI 0 "s_register_operand" "=r")
2083	(minus:SI (match_operand 1 "arm_immediate_operand" "I")
2084		  (match_dup 2)))]
2085  "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
2086  "rsbs\\t%0, %2, %1"
2087  [(set_attr "conds" "set")
2088   (set_attr "type" "alus_imm")]
2089)
2090
2091;; Similarly, but the result is unused.
2092(define_insn "rsb_imm_compare_scratch"
2093  [(set (reg:CC_RSB CC_REGNUM)
2094	(compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2095			(match_operand 1 "arm_not_immediate_operand" "K")))
2096   (clobber (match_scratch:SI 0 "=r"))]
2097  "TARGET_32BIT"
2098  "rsbs\\t%0, %2, #%B1"
2099  [(set_attr "conds" "set")
2100   (set_attr "type" "alus_imm")]
2101)
2102
2103;; Compare the sum of a value plus a carry against a constant.  Uses
2104;; RSC, so the result is swapped.  Only available on Arm
2105(define_insn "rscsi3_<CC_EXTEND>out_scratch"
2106  [(set (reg:CC_SWP CC_REGNUM)
2107	(compare:CC_SWP
2108	 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2109		  (match_operand:DI 3 "arm_borrow_operation" ""))
2110	 (match_operand 1 "arm_immediate_operand" "I")))
2111   (clobber (match_scratch:SI 0 "=r"))]
2112  "TARGET_ARM"
2113  "rscs\\t%0, %2, %1"
2114  [(set_attr "conds" "set")
2115   (set_attr "type" "alus_imm")]
2116)
2117
2118(define_insn "usubvsi3_borrow"
2119  [(set (reg:CC_B CC_REGNUM)
2120	(compare:CC_B
2121	 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2122	 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2123	          (zero_extend:DI
2124		   (match_operand:SI 2 "s_register_operand" "l,r")))))
2125   (set (match_operand:SI 0 "s_register_operand" "=l,r")
2126	(minus:SI (match_dup 1)
2127		  (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2128			   (match_dup 2))))]
2129  "TARGET_32BIT"
2130  "sbcs%?\\t%0, %1, %2"
2131  [(set_attr "conds" "set")
2132   (set_attr "arch" "t2,*")
2133   (set_attr "length" "2,4")]
2134)
2135
2136(define_insn "usubvsi3_borrow_imm"
2137  [(set (reg:CC_B CC_REGNUM)
2138	(compare:CC_B
2139	 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2140	 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2141		  (match_operand:DI 3 "const_int_operand" "n,n"))))
2142   (set (match_operand:SI 0 "s_register_operand" "=r,r")
2143	(minus:SI (match_dup 1)
2144		  (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2145			   (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2146  "TARGET_32BIT
2147   && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2148  "@
2149  sbcs%?\\t%0, %1, %2
2150  adcs%?\\t%0, %1, #%B2"
2151  [(set_attr "conds" "set")
2152   (set_attr "type" "alus_imm")]
2153)
2154
2155(define_insn "subvsi3_borrow"
2156  [(set (reg:CC_V CC_REGNUM)
2157	(compare:CC_V
2158	 (minus:DI
2159	  (minus:DI
2160	   (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2161	   (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2162	  (match_operand:DI 4 "arm_borrow_operation" ""))
2163	 (sign_extend:DI
2164	  (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2165		    (match_operand:SI 3 "arm_borrow_operation" "")))))
2166   (set (match_operand:SI 0 "s_register_operand" "=l,r")
2167	(minus:SI (minus:SI (match_dup 1) (match_dup 2))
2168		  (match_dup 3)))]
2169  "TARGET_32BIT"
2170  "sbcs%?\\t%0, %1, %2"
2171  [(set_attr "conds" "set")
2172   (set_attr "arch" "t2,*")
2173   (set_attr "length" "2,4")]
2174)
2175
2176(define_insn "subvsi3_borrow_imm"
2177  [(set (reg:CC_V CC_REGNUM)
2178	(compare:CC_V
2179	 (minus:DI
2180	  (minus:DI
2181	   (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2182	   (match_operand 2 "arm_adcimm_operand" "I,K"))
2183	  (match_operand:DI 4 "arm_borrow_operation" ""))
2184	 (sign_extend:DI
2185	  (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2186		    (match_operand:SI 3 "arm_borrow_operation" "")))))
2187   (set (match_operand:SI 0 "s_register_operand" "=r,r")
2188	(minus:SI (minus:SI (match_dup 1) (match_dup 2))
2189		  (match_dup 3)))]
2190  "TARGET_32BIT
2191   && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2192  "@
2193  sbcs%?\\t%0, %1, %2
2194  adcs%?\\t%0, %1, #%B2"
2195  [(set_attr "conds" "set")
2196   (set_attr "type" "alus_imm")]
2197)
2198
2199(define_expand "subsf3"
2200  [(set (match_operand:SF           0 "s_register_operand")
2201	(minus:SF (match_operand:SF 1 "s_register_operand")
2202		  (match_operand:SF 2 "s_register_operand")))]
2203  "TARGET_32BIT && TARGET_HARD_FLOAT"
2204  "
2205")
2206
2207(define_expand "subdf3"
2208  [(set (match_operand:DF           0 "s_register_operand")
2209	(minus:DF (match_operand:DF 1 "s_register_operand")
2210		  (match_operand:DF 2 "s_register_operand")))]
2211  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2212  "
2213")
2214
2215
2216;; Multiplication insns
2217
2218(define_expand "mulhi3"
2219  [(set (match_operand:HI 0 "s_register_operand")
2220	(mult:HI (match_operand:HI 1 "s_register_operand")
2221		 (match_operand:HI 2 "s_register_operand")))]
2222  "TARGET_DSP_MULTIPLY"
2223  "
2224  {
2225    rtx result = gen_reg_rtx (SImode);
2226    emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2227    emit_move_insn (operands[0], gen_lowpart (HImode, result));
2228    DONE;
2229  }"
2230)
2231
2232(define_expand "mulsi3"
2233  [(set (match_operand:SI          0 "s_register_operand")
2234	(mult:SI (match_operand:SI 2 "s_register_operand")
2235		 (match_operand:SI 1 "s_register_operand")))]
2236  "TARGET_EITHER"
2237  ""
2238)
2239
2240;; Use `&' and then `0' to prevent operands 0 and 2 being the same
2241(define_insn "*mul"
2242  [(set (match_operand:SI          0 "s_register_operand" "=l,r,&r,&r")
2243	(mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2244		 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2245  "TARGET_32BIT"
2246  "mul%?\\t%0, %2, %1"
2247  [(set_attr "type" "mul")
2248   (set_attr "predicable" "yes")
2249   (set_attr "arch" "t2,v6,nov6,nov6")
2250   (set_attr "length" "4")
2251   (set_attr "predicable_short_it" "yes,no,*,*")]
2252)
2253
2254;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2255;; reusing the same register.
2256
2257(define_insn "*mla"
2258  [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2259	(plus:SI
2260	  (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2261		   (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2262	  (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2263  "TARGET_32BIT"
2264  "mla%?\\t%0, %3, %2, %1"
2265  [(set_attr "type" "mla")
2266   (set_attr "predicable" "yes")
2267   (set_attr "arch" "v6,nov6,nov6,nov6")]
2268)
2269
2270(define_insn "*mls"
2271  [(set (match_operand:SI 0 "s_register_operand" "=r")
2272	(minus:SI
2273	  (match_operand:SI 1 "s_register_operand" "r")
2274	  (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2275		   (match_operand:SI 2 "s_register_operand" "r"))))]
2276  "TARGET_32BIT && arm_arch_thumb2"
2277  "mls%?\\t%0, %3, %2, %1"
2278  [(set_attr "type" "mla")
2279   (set_attr "predicable" "yes")]
2280)
2281
2282(define_insn "*mulsi3_compare0"
2283  [(set (reg:CC_NZ CC_REGNUM)
2284	(compare:CC_NZ (mult:SI
2285			  (match_operand:SI 2 "s_register_operand" "r,r")
2286			  (match_operand:SI 1 "s_register_operand" "%0,r"))
2287			 (const_int 0)))
2288   (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2289	(mult:SI (match_dup 2) (match_dup 1)))]
2290  "TARGET_ARM && !arm_arch6"
2291  "muls%?\\t%0, %2, %1"
2292  [(set_attr "conds" "set")
2293   (set_attr "type" "muls")]
2294)
2295
2296(define_insn "*mulsi3_compare0_v6"
2297  [(set (reg:CC_NZ CC_REGNUM)
2298	(compare:CC_NZ (mult:SI
2299			  (match_operand:SI 2 "s_register_operand" "r")
2300			  (match_operand:SI 1 "s_register_operand" "r"))
2301			 (const_int 0)))
2302   (set (match_operand:SI 0 "s_register_operand" "=r")
2303	(mult:SI (match_dup 2) (match_dup 1)))]
2304  "TARGET_ARM && arm_arch6 && optimize_size"
2305  "muls%?\\t%0, %2, %1"
2306  [(set_attr "conds" "set")
2307   (set_attr "type" "muls")]
2308)
2309
2310(define_insn "*mulsi_compare0_scratch"
2311  [(set (reg:CC_NZ CC_REGNUM)
2312	(compare:CC_NZ (mult:SI
2313			  (match_operand:SI 2 "s_register_operand" "r,r")
2314			  (match_operand:SI 1 "s_register_operand" "%0,r"))
2315			 (const_int 0)))
2316   (clobber (match_scratch:SI 0 "=&r,&r"))]
2317  "TARGET_ARM && !arm_arch6"
2318  "muls%?\\t%0, %2, %1"
2319  [(set_attr "conds" "set")
2320   (set_attr "type" "muls")]
2321)
2322
2323(define_insn "*mulsi_compare0_scratch_v6"
2324  [(set (reg:CC_NZ CC_REGNUM)
2325	(compare:CC_NZ (mult:SI
2326			  (match_operand:SI 2 "s_register_operand" "r")
2327			  (match_operand:SI 1 "s_register_operand" "r"))
2328			 (const_int 0)))
2329   (clobber (match_scratch:SI 0 "=r"))]
2330  "TARGET_ARM && arm_arch6 && optimize_size"
2331  "muls%?\\t%0, %2, %1"
2332  [(set_attr "conds" "set")
2333   (set_attr "type" "muls")]
2334)
2335
2336(define_insn "*mulsi3addsi_compare0"
2337  [(set (reg:CC_NZ CC_REGNUM)
2338	(compare:CC_NZ
2339	 (plus:SI (mult:SI
2340		   (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2341		   (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2342		  (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2343	 (const_int 0)))
2344   (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2345	(plus:SI (mult:SI (match_dup 2) (match_dup 1))
2346		 (match_dup 3)))]
2347  "TARGET_ARM && arm_arch6"
2348  "mlas%?\\t%0, %2, %1, %3"
2349  [(set_attr "conds" "set")
2350   (set_attr "type" "mlas")]
2351)
2352
2353(define_insn "*mulsi3addsi_compare0_v6"
2354  [(set (reg:CC_NZ CC_REGNUM)
2355	(compare:CC_NZ
2356	 (plus:SI (mult:SI
2357		   (match_operand:SI 2 "s_register_operand" "r")
2358		   (match_operand:SI 1 "s_register_operand" "r"))
2359		  (match_operand:SI 3 "s_register_operand" "r"))
2360	 (const_int 0)))
2361   (set (match_operand:SI 0 "s_register_operand" "=r")
2362	(plus:SI (mult:SI (match_dup 2) (match_dup 1))
2363		 (match_dup 3)))]
2364  "TARGET_ARM && arm_arch6 && optimize_size"
2365  "mlas%?\\t%0, %2, %1, %3"
2366  [(set_attr "conds" "set")
2367   (set_attr "type" "mlas")]
2368)
2369
2370(define_insn "*mulsi3addsi_compare0_scratch"
2371  [(set (reg:CC_NZ CC_REGNUM)
2372	(compare:CC_NZ
2373	 (plus:SI (mult:SI
2374		   (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2375		   (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2376		  (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2377	 (const_int 0)))
2378   (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2379  "TARGET_ARM && !arm_arch6"
2380  "mlas%?\\t%0, %2, %1, %3"
2381  [(set_attr "conds" "set")
2382   (set_attr "type" "mlas")]
2383)
2384
2385(define_insn "*mulsi3addsi_compare0_scratch_v6"
2386  [(set (reg:CC_NZ CC_REGNUM)
2387	(compare:CC_NZ
2388	 (plus:SI (mult:SI
2389		   (match_operand:SI 2 "s_register_operand" "r")
2390		   (match_operand:SI 1 "s_register_operand" "r"))
2391		  (match_operand:SI 3 "s_register_operand" "r"))
2392	 (const_int 0)))
2393   (clobber (match_scratch:SI 0 "=r"))]
2394  "TARGET_ARM && arm_arch6 && optimize_size"
2395  "mlas%?\\t%0, %2, %1, %3"
2396  [(set_attr "conds" "set")
2397   (set_attr "type" "mlas")]
2398)
2399
2400;; 32x32->64 widening multiply.
2401;; The only difference between the v3-5 and v6+ versions is the requirement
2402;; that the output does not overlap with either input.
2403
2404(define_expand "<Us>mulsidi3"
2405  [(set (match_operand:DI 0 "s_register_operand")
2406	(mult:DI
2407	 (SE:DI (match_operand:SI 1 "s_register_operand"))
2408	 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2409  "TARGET_32BIT"
2410  {
2411      emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2412			       gen_highpart (SImode, operands[0]),
2413			       operands[1], operands[2]));
2414      DONE;
2415  }
2416)
2417
2418(define_insn "<US>mull"
2419  [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2420	(mult:SI
2421	 (match_operand:SI 2 "s_register_operand" "%r,r")
2422	 (match_operand:SI 3 "s_register_operand" "r,r")))
2423   (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2424	(truncate:SI
2425	 (lshiftrt:DI
2426	  (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2427	  (const_int 32))))]
2428  "TARGET_32BIT"
2429  "<US>mull%?\\t%0, %1, %2, %3"
2430  [(set_attr "type" "umull")
2431   (set_attr "predicable" "yes")
2432   (set_attr "arch" "v6,nov6")]
2433)
2434
2435(define_expand "<Us>maddsidi4"
2436  [(set (match_operand:DI 0 "s_register_operand")
2437	(plus:DI
2438	 (mult:DI
2439	  (SE:DI (match_operand:SI 1 "s_register_operand"))
2440	  (SE:DI (match_operand:SI 2 "s_register_operand")))
2441	 (match_operand:DI 3 "s_register_operand")))]
2442  "TARGET_32BIT"
2443  {
2444      emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2445			       gen_lowpart (SImode, operands[3]),
2446			       gen_highpart (SImode, operands[0]),
2447			       gen_highpart (SImode, operands[3]),
2448			       operands[1], operands[2]));
2449      DONE;
2450  }
2451)
2452
2453(define_insn "<US>mlal"
2454  [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2455	(plus:SI
2456	 (mult:SI
2457	  (match_operand:SI 4 "s_register_operand" "%r,r")
2458	  (match_operand:SI 5 "s_register_operand" "r,r"))
2459	 (match_operand:SI 1 "s_register_operand" "0,0")))
2460   (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2461	(plus:SI
2462	 (truncate:SI
2463	  (lshiftrt:DI
2464	   (plus:DI
2465	    (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2466	    (zero_extend:DI (match_dup 1)))
2467	   (const_int 32)))
2468	 (match_operand:SI 3 "s_register_operand" "2,2")))]
2469  "TARGET_32BIT"
2470  "<US>mlal%?\\t%0, %2, %4, %5"
2471  [(set_attr "type" "umlal")
2472   (set_attr "predicable" "yes")
2473   (set_attr "arch" "v6,nov6")]
2474)
2475
2476(define_expand "<US>mulsi3_highpart"
2477  [(parallel
2478    [(set (match_operand:SI 0 "s_register_operand")
2479	  (truncate:SI
2480	   (lshiftrt:DI
2481	    (mult:DI
2482	     (SE:DI (match_operand:SI 1 "s_register_operand"))
2483	     (SE:DI (match_operand:SI 2 "s_register_operand")))
2484	    (const_int 32))))
2485     (clobber (match_scratch:SI 3 ""))])]
2486  "TARGET_32BIT"
2487  ""
2488)
2489
2490(define_insn "*<US>mull_high"
2491  [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2492	(truncate:SI
2493	 (lshiftrt:DI
2494	  (mult:DI
2495	   (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2496	   (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2497	  (const_int 32))))
2498   (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2499  "TARGET_32BIT"
2500  "<US>mull%?\\t%3, %0, %2, %1"
2501  [(set_attr "type" "umull")
2502   (set_attr "predicable" "yes")
2503   (set_attr "arch" "v6,nov6,nov6")]
2504)
2505
2506(define_insn "mulhisi3"
2507  [(set (match_operand:SI 0 "s_register_operand" "=r")
2508	(mult:SI (sign_extend:SI
2509		  (match_operand:HI 1 "s_register_operand" "%r"))
2510		 (sign_extend:SI
2511		  (match_operand:HI 2 "s_register_operand" "r"))))]
2512  "TARGET_DSP_MULTIPLY"
2513  "smulbb%?\\t%0, %1, %2"
2514  [(set_attr "type" "smulxy")
2515   (set_attr "predicable" "yes")]
2516)
2517
2518(define_insn "*mulhisi3tb"
2519  [(set (match_operand:SI 0 "s_register_operand" "=r")
2520	(mult:SI (ashiftrt:SI
2521		  (match_operand:SI 1 "s_register_operand" "r")
2522		  (const_int 16))
2523		 (sign_extend:SI
2524		  (match_operand:HI 2 "s_register_operand" "r"))))]
2525  "TARGET_DSP_MULTIPLY"
2526  "smultb%?\\t%0, %1, %2"
2527  [(set_attr "type" "smulxy")
2528   (set_attr "predicable" "yes")]
2529)
2530
2531(define_insn "*mulhisi3bt"
2532  [(set (match_operand:SI 0 "s_register_operand" "=r")
2533	(mult:SI (sign_extend:SI
2534		  (match_operand:HI 1 "s_register_operand" "r"))
2535		 (ashiftrt:SI
2536		  (match_operand:SI 2 "s_register_operand" "r")
2537		  (const_int 16))))]
2538  "TARGET_DSP_MULTIPLY"
2539  "smulbt%?\\t%0, %1, %2"
2540  [(set_attr "type" "smulxy")
2541   (set_attr "predicable" "yes")]
2542)
2543
2544(define_insn "*mulhisi3tt"
2545  [(set (match_operand:SI 0 "s_register_operand" "=r")
2546	(mult:SI (ashiftrt:SI
2547		  (match_operand:SI 1 "s_register_operand" "r")
2548		  (const_int 16))
2549		 (ashiftrt:SI
2550		  (match_operand:SI 2 "s_register_operand" "r")
2551		  (const_int 16))))]
2552  "TARGET_DSP_MULTIPLY"
2553  "smultt%?\\t%0, %1, %2"
2554  [(set_attr "type" "smulxy")
2555   (set_attr "predicable" "yes")]
2556)
2557
2558(define_expand "maddhisi4"
2559  [(set (match_operand:SI 0 "s_register_operand")
2560	(plus:SI (mult:SI (sign_extend:SI
2561			   (match_operand:HI 1 "s_register_operand"))
2562			  (sign_extend:SI
2563			   (match_operand:HI 2 "s_register_operand")))
2564		 (match_operand:SI 3 "s_register_operand")))]
2565  "TARGET_DSP_MULTIPLY"
2566  {
2567    /* If this function reads the Q bit from ACLE intrinsics break up the
2568       multiplication and accumulation as an overflow during accumulation will
2569       clobber the Q flag.  */
2570    if (ARM_Q_BIT_READ)
2571      {
2572	rtx tmp = gen_reg_rtx (SImode);
2573	emit_insn (gen_mulhisi3 (tmp, operands[1], operands[2]));
2574	emit_insn (gen_addsi3 (operands[0], tmp, operands[3]));
2575	DONE;
2576      }
2577  }
2578)
2579
2580(define_insn "*arm_maddhisi4"
2581  [(set (match_operand:SI 0 "s_register_operand" "=r")
2582	(plus:SI (mult:SI (sign_extend:SI
2583			   (match_operand:HI 1 "s_register_operand" "r"))
2584			  (sign_extend:SI
2585			   (match_operand:HI 2 "s_register_operand" "r")))
2586		 (match_operand:SI 3 "s_register_operand" "r")))]
2587  "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2588  "smlabb%?\\t%0, %1, %2, %3"
2589  [(set_attr "type" "smlaxy")
2590   (set_attr "predicable" "yes")]
2591)
2592
2593(define_insn "arm_smlabb_setq"
2594  [(set (match_operand:SI 0 "s_register_operand" "=r")
2595	(plus:SI (mult:SI (sign_extend:SI
2596			   (match_operand:HI 1 "s_register_operand" "r"))
2597			  (sign_extend:SI
2598			   (match_operand:HI 2 "s_register_operand" "r")))
2599		 (match_operand:SI 3 "s_register_operand" "r")))
2600   (set (reg:CC APSRQ_REGNUM)
2601	(unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2602  "TARGET_DSP_MULTIPLY"
2603  "smlabb%?\\t%0, %1, %2, %3"
2604  [(set_attr "type" "smlaxy")
2605   (set_attr "predicable" "yes")]
2606)
2607
2608(define_expand "arm_smlabb"
2609 [(match_operand:SI 0 "s_register_operand")
2610  (match_operand:SI 1 "s_register_operand")
2611  (match_operand:SI 2 "s_register_operand")
2612  (match_operand:SI 3 "s_register_operand")]
2613  "TARGET_DSP_MULTIPLY"
2614  {
2615    rtx mult1 = gen_lowpart (HImode, operands[1]);
2616    rtx mult2 = gen_lowpart (HImode, operands[2]);
2617    if (ARM_Q_BIT_READ)
2618      emit_insn (gen_arm_smlabb_setq (operands[0], mult1, mult2, operands[3]));
2619    else
2620      emit_insn (gen_maddhisi4 (operands[0], mult1, mult2, operands[3]));
2621    DONE;
2622  }
2623)
2624
2625;; Note: there is no maddhisi4ibt because this one is canonical form
2626(define_insn "maddhisi4tb"
2627  [(set (match_operand:SI 0 "s_register_operand" "=r")
2628	(plus:SI (mult:SI (ashiftrt:SI
2629			   (match_operand:SI 1 "s_register_operand" "r")
2630			   (const_int 16))
2631			  (sign_extend:SI
2632			   (match_operand:HI 2 "s_register_operand" "r")))
2633		 (match_operand:SI 3 "s_register_operand" "r")))]
2634  "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2635  "smlatb%?\\t%0, %1, %2, %3"
2636  [(set_attr "type" "smlaxy")
2637   (set_attr "predicable" "yes")]
2638)
2639
2640(define_insn "arm_smlatb_setq"
2641  [(set (match_operand:SI 0 "s_register_operand" "=r")
2642	(plus:SI (mult:SI (ashiftrt:SI
2643			   (match_operand:SI 1 "s_register_operand" "r")
2644			   (const_int 16))
2645			  (sign_extend:SI
2646			   (match_operand:HI 2 "s_register_operand" "r")))
2647		 (match_operand:SI 3 "s_register_operand" "r")))
2648   (set (reg:CC APSRQ_REGNUM)
2649	(unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2650  "TARGET_DSP_MULTIPLY"
2651  "smlatb%?\\t%0, %1, %2, %3"
2652  [(set_attr "type" "smlaxy")
2653   (set_attr "predicable" "yes")]
2654)
2655
2656(define_expand "arm_smlatb"
2657 [(match_operand:SI 0 "s_register_operand")
2658  (match_operand:SI 1 "s_register_operand")
2659  (match_operand:SI 2 "s_register_operand")
2660  (match_operand:SI 3 "s_register_operand")]
2661  "TARGET_DSP_MULTIPLY"
2662  {
2663    rtx mult2 = gen_lowpart (HImode, operands[2]);
2664    if (ARM_Q_BIT_READ)
2665      emit_insn (gen_arm_smlatb_setq (operands[0], operands[1],
2666				      mult2, operands[3]));
2667    else
2668      emit_insn (gen_maddhisi4tb (operands[0], operands[1],
2669				  mult2, operands[3]));
2670    DONE;
2671  }
2672)
2673
2674(define_insn "maddhisi4tt"
2675  [(set (match_operand:SI 0 "s_register_operand" "=r")
2676	(plus:SI (mult:SI (ashiftrt:SI
2677			   (match_operand:SI 1 "s_register_operand" "r")
2678			   (const_int 16))
2679			  (ashiftrt:SI
2680			   (match_operand:SI 2 "s_register_operand" "r")
2681			   (const_int 16)))
2682		 (match_operand:SI 3 "s_register_operand" "r")))]
2683  "TARGET_DSP_MULTIPLY && !ARM_Q_BIT_READ"
2684  "smlatt%?\\t%0, %1, %2, %3"
2685  [(set_attr "type" "smlaxy")
2686   (set_attr "predicable" "yes")]
2687)
2688
2689(define_insn "arm_smlatt_setq"
2690  [(set (match_operand:SI 0 "s_register_operand" "=r")
2691	(plus:SI (mult:SI (ashiftrt:SI
2692			   (match_operand:SI 1 "s_register_operand" "r")
2693			   (const_int 16))
2694			  (ashiftrt:SI
2695			   (match_operand:SI 2 "s_register_operand" "r")
2696			   (const_int 16)))
2697		 (match_operand:SI 3 "s_register_operand" "r")))
2698   (set (reg:CC APSRQ_REGNUM)
2699	(unspec:CC [(reg:CC APSRQ_REGNUM)] UNSPEC_Q_SET))]
2700  "TARGET_DSP_MULTIPLY"
2701  "smlatt%?\\t%0, %1, %2, %3"
2702  [(set_attr "type" "smlaxy")
2703   (set_attr "predicable" "yes")]
2704)
2705
2706(define_expand "arm_smlatt"
2707 [(match_operand:SI 0 "s_register_operand")
2708  (match_operand:SI 1 "s_register_operand")
2709  (match_operand:SI 2 "s_register_operand")
2710  (match_operand:SI 3 "s_register_operand")]
2711  "TARGET_DSP_MULTIPLY"
2712  {
2713    if (ARM_Q_BIT_READ)
2714      emit_insn (gen_arm_smlatt_setq (operands[0], operands[1],
2715				      operands[2], operands[3]));
2716    else
2717      emit_insn (gen_maddhisi4tt (operands[0], operands[1],
2718				  operands[2], operands[3]));
2719    DONE;
2720  }
2721)
2722
2723(define_insn "maddhidi4"
2724  [(set (match_operand:DI 0 "s_register_operand" "=r")
2725	(plus:DI
2726	  (mult:DI (sign_extend:DI
2727		    (match_operand:HI 1 "s_register_operand" "r"))
2728		   (sign_extend:DI
2729		    (match_operand:HI 2 "s_register_operand" "r")))
2730	  (match_operand:DI 3 "s_register_operand" "0")))]
2731  "TARGET_DSP_MULTIPLY"
2732  "smlalbb%?\\t%Q0, %R0, %1, %2"
2733  [(set_attr "type" "smlalxy")
2734   (set_attr "predicable" "yes")])
2735
2736;; Note: there is no maddhidi4ibt because this one is canonical form
2737(define_insn "*maddhidi4tb"
2738  [(set (match_operand:DI 0 "s_register_operand" "=r")
2739	(plus:DI
2740	  (mult:DI (sign_extend:DI
2741		    (ashiftrt:SI
2742		     (match_operand:SI 1 "s_register_operand" "r")
2743		     (const_int 16)))
2744		   (sign_extend:DI
2745		    (match_operand:HI 2 "s_register_operand" "r")))
2746	  (match_operand:DI 3 "s_register_operand" "0")))]
2747  "TARGET_DSP_MULTIPLY"
2748  "smlaltb%?\\t%Q0, %R0, %1, %2"
2749  [(set_attr "type" "smlalxy")
2750   (set_attr "predicable" "yes")])
2751
2752(define_insn "*maddhidi4tt"
2753  [(set (match_operand:DI 0 "s_register_operand" "=r")
2754	(plus:DI
2755	  (mult:DI (sign_extend:DI
2756		    (ashiftrt:SI
2757		     (match_operand:SI 1 "s_register_operand" "r")
2758		     (const_int 16)))
2759		   (sign_extend:DI
2760		    (ashiftrt:SI
2761		     (match_operand:SI 2 "s_register_operand" "r")
2762		     (const_int 16))))
2763	  (match_operand:DI 3 "s_register_operand" "0")))]
2764  "TARGET_DSP_MULTIPLY"
2765  "smlaltt%?\\t%Q0, %R0, %1, %2"
2766  [(set_attr "type" "smlalxy")
2767   (set_attr "predicable" "yes")])
2768
2769(define_insn "arm_<smlaw_op><add_clobber_q_name>_insn"
2770  [(set (match_operand:SI 0 "s_register_operand" "=r")
2771	(unspec:SI
2772	   [(match_operand:SI 1 "s_register_operand" "r")
2773	    (match_operand:SI 2 "s_register_operand" "r")
2774	    (match_operand:SI 3 "s_register_operand" "r")]
2775	   SMLAWBT))]
2776  "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
2777  "<smlaw_op>%?\\t%0, %1, %2, %3"
2778  [(set_attr "type" "smlaxy")
2779   (set_attr "predicable" "yes")]
2780)
2781
2782(define_expand "arm_<smlaw_op>"
2783  [(set (match_operand:SI 0 "s_register_operand")
2784	(unspec:SI
2785	   [(match_operand:SI 1 "s_register_operand")
2786	    (match_operand:SI 2 "s_register_operand")
2787	    (match_operand:SI 3 "s_register_operand")]
2788	   SMLAWBT))]
2789  "TARGET_DSP_MULTIPLY"
2790  {
2791    if (ARM_Q_BIT_READ)
2792      emit_insn (gen_arm_<smlaw_op>_setq_insn (operands[0], operands[1],
2793					       operands[2], operands[3]));
2794    else
2795      emit_insn (gen_arm_<smlaw_op>_insn (operands[0], operands[1],
2796					  operands[2], operands[3]));
2797    DONE;
2798  }
2799)
2800
2801(define_expand "mulsf3"
2802  [(set (match_operand:SF          0 "s_register_operand")
2803	(mult:SF (match_operand:SF 1 "s_register_operand")
2804		 (match_operand:SF 2 "s_register_operand")))]
2805  "TARGET_32BIT && TARGET_HARD_FLOAT"
2806  "
2807")
2808
2809(define_expand "muldf3"
2810  [(set (match_operand:DF          0 "s_register_operand")
2811	(mult:DF (match_operand:DF 1 "s_register_operand")
2812		 (match_operand:DF 2 "s_register_operand")))]
2813  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2814  "
2815")
2816
2817;; Division insns
2818
2819(define_expand "divsf3"
2820  [(set (match_operand:SF 0 "s_register_operand")
2821	(div:SF (match_operand:SF 1 "s_register_operand")
2822		(match_operand:SF 2 "s_register_operand")))]
2823  "TARGET_32BIT && TARGET_HARD_FLOAT"
2824  "")
2825
2826(define_expand "divdf3"
2827  [(set (match_operand:DF 0 "s_register_operand")
2828	(div:DF (match_operand:DF 1 "s_register_operand")
2829		(match_operand:DF 2 "s_register_operand")))]
2830  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2831  "")
2832
2833
2834; Expand logical operations.  The mid-end expander does not split off memory
2835; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2836; So an explicit expander is needed to generate better code.
2837
2838(define_expand "<LOGICAL:optab>di3"
2839  [(set (match_operand:DI	  0 "s_register_operand")
2840	(LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2841		    (match_operand:DI 2 "arm_<optab>di_operand")))]
2842  "TARGET_32BIT"
2843  {
2844      rtx low  = simplify_gen_binary (<CODE>, SImode,
2845				      gen_lowpart (SImode, operands[1]),
2846				      gen_lowpart (SImode, operands[2]));
2847      rtx high = simplify_gen_binary (<CODE>, SImode,
2848				      gen_highpart (SImode, operands[1]),
2849				      gen_highpart_mode (SImode, DImode,
2850							 operands[2]));
2851
2852      emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2853      emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2854      DONE;
2855  }
2856)
2857
2858(define_expand "one_cmpldi2"
2859  [(set (match_operand:DI 0 "s_register_operand")
2860	(not:DI (match_operand:DI 1 "s_register_operand")))]
2861  "TARGET_32BIT"
2862  {
2863      rtx low  = simplify_gen_unary (NOT, SImode,
2864				     gen_lowpart (SImode, operands[1]),
2865				     SImode);
2866      rtx high = simplify_gen_unary (NOT, SImode,
2867				     gen_highpart_mode (SImode, DImode,
2868							operands[1]),
2869				     SImode);
2870
2871      emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2872      emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2873      DONE;
2874  }
2875)
2876
2877;; Split DImode and, ior, xor operations.  Simply perform the logical
2878;; operation on the upper and lower halves of the registers.
2879;; This is needed for atomic operations in arm_split_atomic_op.
2880;; Avoid splitting IWMMXT instructions.
2881(define_split
2882  [(set (match_operand:DI 0 "s_register_operand" "")
2883	(match_operator:DI 6 "logical_binary_operator"
2884	  [(match_operand:DI 1 "s_register_operand" "")
2885	   (match_operand:DI 2 "s_register_operand" "")]))]
2886  "TARGET_32BIT && reload_completed
2887   && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2888  [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2889   (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2890  "
2891  {
2892    operands[3] = gen_highpart (SImode, operands[0]);
2893    operands[0] = gen_lowpart (SImode, operands[0]);
2894    operands[4] = gen_highpart (SImode, operands[1]);
2895    operands[1] = gen_lowpart (SImode, operands[1]);
2896    operands[5] = gen_highpart (SImode, operands[2]);
2897    operands[2] = gen_lowpart (SImode, operands[2]);
2898  }"
2899)
2900
2901;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2902;; Unconditionally split since there is no SIMD DImode NOT pattern.
2903(define_split
2904  [(set (match_operand:DI 0 "s_register_operand")
2905	(not:DI (match_operand:DI 1 "s_register_operand")))]
2906  "TARGET_32BIT"
2907  [(set (match_dup 0) (not:SI (match_dup 1)))
2908   (set (match_dup 2) (not:SI (match_dup 3)))]
2909  "
2910  {
2911    operands[2] = gen_highpart (SImode, operands[0]);
2912    operands[0] = gen_lowpart (SImode, operands[0]);
2913    operands[3] = gen_highpart (SImode, operands[1]);
2914    operands[1] = gen_lowpart (SImode, operands[1]);
2915  }"
2916)
2917
2918(define_expand "andsi3"
2919  [(set (match_operand:SI         0 "s_register_operand")
2920	(and:SI (match_operand:SI 1 "s_register_operand")
2921		(match_operand:SI 2 "reg_or_int_operand")))]
2922  "TARGET_EITHER"
2923  "
2924  if (TARGET_32BIT)
2925    {
2926      if (CONST_INT_P (operands[2]))
2927        {
2928	  if (INTVAL (operands[2]) == 255 && arm_arch6)
2929	    {
2930	      operands[1] = convert_to_mode (QImode, operands[1], 1);
2931	      emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2932							 operands[1]));
2933	      DONE;
2934	    }
2935	  else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2936	    operands[2] = force_reg (SImode, operands[2]);
2937	  else
2938	    {
2939	      arm_split_constant (AND, SImode, NULL_RTX,
2940				  INTVAL (operands[2]), operands[0],
2941				  operands[1],
2942				  optimize && can_create_pseudo_p ());
2943
2944	      DONE;
2945	    }
2946        }
2947    }
2948  else /* TARGET_THUMB1 */
2949    {
2950      if (!CONST_INT_P (operands[2]))
2951        {
2952          rtx tmp = force_reg (SImode, operands[2]);
2953	  if (rtx_equal_p (operands[0], operands[1]))
2954	    operands[2] = tmp;
2955	  else
2956	    {
2957              operands[2] = operands[1];
2958              operands[1] = tmp;
2959	    }
2960        }
2961      else
2962        {
2963          int i;
2964
2965          if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2966  	    {
2967	      operands[2] = force_reg (SImode,
2968				       GEN_INT (~INTVAL (operands[2])));
2969
2970	      emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2971
2972	      DONE;
2973	    }
2974
2975          for (i = 9; i <= 31; i++)
2976	    {
2977	      if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2978	        {
2979	          emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2980			 	        const0_rtx));
2981	          DONE;
2982	        }
2983	      else if ((HOST_WIDE_INT_1 << i) - 1
2984		       == ~INTVAL (operands[2]))
2985	        {
2986	          rtx shift = GEN_INT (i);
2987	          rtx reg = gen_reg_rtx (SImode);
2988
2989	          emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2990	          emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2991
2992	          DONE;
2993	        }
2994	    }
2995
2996          operands[2] = force_reg (SImode, operands[2]);
2997        }
2998    }
2999  "
3000)
3001
3002; ??? Check split length for Thumb-2
3003(define_insn_and_split "*arm_andsi3_insn"
3004  [(set (match_operand:SI         0 "s_register_operand" "=r,l,r,r,r")
3005	(and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3006		(match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3007  "TARGET_32BIT"
3008  "@
3009   and%?\\t%0, %1, %2
3010   and%?\\t%0, %1, %2
3011   bic%?\\t%0, %1, #%B2
3012   and%?\\t%0, %1, %2
3013   #"
3014  "TARGET_32BIT
3015   && CONST_INT_P (operands[2])
3016   && !(const_ok_for_arm (INTVAL (operands[2]))
3017	|| const_ok_for_arm (~INTVAL (operands[2])))"
3018  [(clobber (const_int 0))]
3019  "
3020  arm_split_constant  (AND, SImode, curr_insn,
3021	               INTVAL (operands[2]), operands[0], operands[1], 0);
3022  DONE;
3023  "
3024  [(set_attr "length" "4,4,4,4,16")
3025   (set_attr "predicable" "yes")
3026   (set_attr "predicable_short_it" "no,yes,no,no,no")
3027   (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
3028)
3029
3030(define_insn "*andsi3_compare0"
3031  [(set (reg:CC_NZ CC_REGNUM)
3032	(compare:CC_NZ
3033	 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
3034		 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
3035	 (const_int 0)))
3036   (set (match_operand:SI          0 "s_register_operand" "=r,r,r")
3037	(and:SI (match_dup 1) (match_dup 2)))]
3038  "TARGET_32BIT"
3039  "@
3040   ands%?\\t%0, %1, %2
3041   bics%?\\t%0, %1, #%B2
3042   ands%?\\t%0, %1, %2"
3043  [(set_attr "conds" "set")
3044   (set_attr "type" "logics_imm,logics_imm,logics_reg")]
3045)
3046
3047(define_insn "*andsi3_compare0_scratch"
3048  [(set (reg:CC_NZ CC_REGNUM)
3049	(compare:CC_NZ
3050	 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
3051		 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
3052	 (const_int 0)))
3053   (clobber (match_scratch:SI 2 "=X,r,X"))]
3054  "TARGET_32BIT"
3055  "@
3056   tst%?\\t%0, %1
3057   bics%?\\t%2, %0, #%B1
3058   tst%?\\t%0, %1"
3059  [(set_attr "conds" "set")
3060   (set_attr "type"  "logics_imm,logics_imm,logics_reg")]
3061)
3062
3063(define_insn "*zeroextractsi_compare0_scratch"
3064  [(set (reg:CC_NZ CC_REGNUM)
3065	(compare:CC_NZ (zero_extract:SI
3066			  (match_operand:SI 0 "s_register_operand" "r")
3067			  (match_operand 1 "const_int_operand" "n")
3068			  (match_operand 2 "const_int_operand" "n"))
3069			 (const_int 0)))]
3070  "TARGET_32BIT
3071  && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
3072      && INTVAL (operands[1]) > 0
3073      && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
3074      && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
3075  "*
3076  operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
3077			 << INTVAL (operands[2]));
3078  output_asm_insn (\"tst%?\\t%0, %1\", operands);
3079  return \"\";
3080  "
3081  [(set_attr "conds" "set")
3082   (set_attr "predicable" "yes")
3083   (set_attr "type" "logics_imm")]
3084)
3085
3086(define_insn_and_split "*ne_zeroextractsi"
3087  [(set (match_operand:SI 0 "s_register_operand" "=r")
3088	(ne:SI (zero_extract:SI
3089		(match_operand:SI 1 "s_register_operand" "r")
3090		(match_operand:SI 2 "const_int_operand" "n")
3091		(match_operand:SI 3 "const_int_operand" "n"))
3092	       (const_int 0)))
3093   (clobber (reg:CC CC_REGNUM))]
3094  "TARGET_32BIT
3095   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3096       && INTVAL (operands[2]) > 0
3097       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3098       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3099  "#"
3100  "TARGET_32BIT
3101   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3102       && INTVAL (operands[2]) > 0
3103       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3104       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
3105  [(parallel [(set (reg:CC_NZ CC_REGNUM)
3106		   (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3107				    (const_int 0)))
3108	      (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3109   (set (match_dup 0)
3110	(if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3111			 (match_dup 0) (const_int 1)))]
3112  "
3113  operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3114			 << INTVAL (operands[3]));
3115  "
3116  [(set_attr "conds" "clob")
3117   (set (attr "length")
3118	(if_then_else (eq_attr "is_thumb" "yes")
3119		      (const_int 12)
3120		      (const_int 8)))
3121   (set_attr "type" "multiple")]
3122)
3123
3124(define_insn_and_split "*ne_zeroextractsi_shifted"
3125  [(set (match_operand:SI 0 "s_register_operand" "=r")
3126	(ne:SI (zero_extract:SI
3127		(match_operand:SI 1 "s_register_operand" "r")
3128		(match_operand:SI 2 "const_int_operand" "n")
3129		(const_int 0))
3130	       (const_int 0)))
3131   (clobber (reg:CC CC_REGNUM))]
3132  "TARGET_ARM"
3133  "#"
3134  "TARGET_ARM"
3135  [(parallel [(set (reg:CC_NZ CC_REGNUM)
3136		   (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3137				    (const_int 0)))
3138	      (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3139   (set (match_dup 0)
3140	(if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3141			 (match_dup 0) (const_int 1)))]
3142  "
3143  operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3144  "
3145  [(set_attr "conds" "clob")
3146   (set_attr "length" "8")
3147   (set_attr "type" "multiple")]
3148)
3149
3150(define_insn_and_split "*ite_ne_zeroextractsi"
3151  [(set (match_operand:SI 0 "s_register_operand" "=r")
3152	(if_then_else:SI (ne (zero_extract:SI
3153			      (match_operand:SI 1 "s_register_operand" "r")
3154			      (match_operand:SI 2 "const_int_operand" "n")
3155			      (match_operand:SI 3 "const_int_operand" "n"))
3156			     (const_int 0))
3157			 (match_operand:SI 4 "arm_not_operand" "rIK")
3158			 (const_int 0)))
3159   (clobber (reg:CC CC_REGNUM))]
3160  "TARGET_ARM
3161   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3162       && INTVAL (operands[2]) > 0
3163       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3164       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3165   && !reg_overlap_mentioned_p (operands[0], operands[4])"
3166  "#"
3167  "TARGET_ARM
3168   && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
3169       && INTVAL (operands[2]) > 0
3170       && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
3171       && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
3172   && !reg_overlap_mentioned_p (operands[0], operands[4])"
3173  [(parallel [(set (reg:CC_NZ CC_REGNUM)
3174		   (compare:CC_NZ (and:SI (match_dup 1) (match_dup 2))
3175				    (const_int 0)))
3176	      (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
3177   (set (match_dup 0)
3178	(if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3179			 (match_dup 0) (match_dup 4)))]
3180  "
3181  operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
3182			 << INTVAL (operands[3]));
3183  "
3184  [(set_attr "conds" "clob")
3185   (set_attr "length" "8")
3186   (set_attr "type" "multiple")]
3187)
3188
3189(define_insn_and_split "*ite_ne_zeroextractsi_shifted"
3190  [(set (match_operand:SI 0 "s_register_operand" "=r")
3191	(if_then_else:SI (ne (zero_extract:SI
3192			      (match_operand:SI 1 "s_register_operand" "r")
3193			      (match_operand:SI 2 "const_int_operand" "n")
3194			      (const_int 0))
3195			     (const_int 0))
3196			 (match_operand:SI 3 "arm_not_operand" "rIK")
3197			 (const_int 0)))
3198   (clobber (reg:CC CC_REGNUM))]
3199  "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3200  "#"
3201  "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
3202  [(parallel [(set (reg:CC_NZ CC_REGNUM)
3203		   (compare:CC_NZ (ashift:SI (match_dup 1) (match_dup 2))
3204				    (const_int 0)))
3205	      (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
3206   (set (match_dup 0)
3207	(if_then_else:SI (eq (reg:CC_NZ CC_REGNUM) (const_int 0))
3208			 (match_dup 0) (match_dup 3)))]
3209  "
3210  operands[2] = GEN_INT (32 - INTVAL (operands[2]));
3211  "
3212  [(set_attr "conds" "clob")
3213   (set_attr "length" "8")
3214   (set_attr "type" "multiple")]
3215)
3216
3217;; ??? Use Thumb-2 has bitfield insert/extract instructions.
3218(define_split
3219  [(set (match_operand:SI 0 "s_register_operand" "")
3220	(match_operator:SI 1 "shiftable_operator"
3221	 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3222			   (match_operand:SI 3 "const_int_operand" "")
3223			   (match_operand:SI 4 "const_int_operand" ""))
3224	  (match_operand:SI 5 "s_register_operand" "")]))
3225   (clobber (match_operand:SI 6 "s_register_operand" ""))]
3226  "TARGET_ARM"
3227  [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3228   (set (match_dup 0)
3229	(match_op_dup 1
3230	 [(lshiftrt:SI (match_dup 6) (match_dup 4))
3231	  (match_dup 5)]))]
3232  "{
3233     HOST_WIDE_INT temp = INTVAL (operands[3]);
3234
3235     operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3236     operands[4] = GEN_INT (32 - temp);
3237   }"
3238)
3239
3240(define_split
3241  [(set (match_operand:SI 0 "s_register_operand" "")
3242	(match_operator:SI 1 "shiftable_operator"
3243	 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3244			   (match_operand:SI 3 "const_int_operand" "")
3245			   (match_operand:SI 4 "const_int_operand" ""))
3246	  (match_operand:SI 5 "s_register_operand" "")]))
3247   (clobber (match_operand:SI 6 "s_register_operand" ""))]
3248  "TARGET_ARM"
3249  [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3250   (set (match_dup 0)
3251	(match_op_dup 1
3252	 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3253	  (match_dup 5)]))]
3254  "{
3255     HOST_WIDE_INT temp = INTVAL (operands[3]);
3256
3257     operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3258     operands[4] = GEN_INT (32 - temp);
3259   }"
3260)
3261
3262;;; ??? This pattern is bogus.  If operand3 has bits outside the range
3263;;; represented by the bitfield, then this will produce incorrect results.
3264;;; Somewhere, the value needs to be truncated.  On targets like the m68k,
3265;;; which have a real bit-field insert instruction, the truncation happens
3266;;; in the bit-field insert instruction itself.  Since arm does not have a
3267;;; bit-field insert instruction, we would have to emit code here to truncate
3268;;; the value before we insert.  This loses some of the advantage of having
3269;;; this insv pattern, so this pattern needs to be reevalutated.
3270
3271(define_expand "insv"
3272  [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3273                      (match_operand 1 "general_operand")
3274                      (match_operand 2 "general_operand"))
3275        (match_operand 3 "reg_or_int_operand"))]
3276  "TARGET_ARM || arm_arch_thumb2"
3277  "
3278  {
3279    int start_bit = INTVAL (operands[2]);
3280    int width = INTVAL (operands[1]);
3281    HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3282    rtx target, subtarget;
3283
3284    if (arm_arch_thumb2)
3285      {
3286        if (unaligned_access && MEM_P (operands[0])
3287	    && s_register_operand (operands[3], GET_MODE (operands[3]))
3288	    && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3289	  {
3290	    rtx base_addr;
3291
3292	    if (BYTES_BIG_ENDIAN)
3293	      start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3294			  - start_bit;
3295
3296	    if (width == 32)
3297	      {
3298	        base_addr = adjust_address (operands[0], SImode,
3299					    start_bit / BITS_PER_UNIT);
3300		emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3301	      }
3302	    else
3303	      {
3304	        rtx tmp = gen_reg_rtx (HImode);
3305
3306	        base_addr = adjust_address (operands[0], HImode,
3307					    start_bit / BITS_PER_UNIT);
3308		emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3309		emit_insn (gen_unaligned_storehi (base_addr, tmp));
3310	      }
3311	    DONE;
3312	  }
3313	else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3314	  {
3315	    bool use_bfi = TRUE;
3316
3317	    if (CONST_INT_P (operands[3]))
3318	      {
3319		HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3320
3321		if (val == 0)
3322		  {
3323		    emit_insn (gen_insv_zero (operands[0], operands[1],
3324					      operands[2]));
3325		    DONE;
3326		  }
3327
3328		/* See if the set can be done with a single orr instruction.  */
3329		if (val == mask && const_ok_for_arm (val << start_bit))
3330		  use_bfi = FALSE;
3331	      }
3332
3333	    if (use_bfi)
3334	      {
3335		if (!REG_P (operands[3]))
3336		  operands[3] = force_reg (SImode, operands[3]);
3337
3338		emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3339					operands[3]));
3340		DONE;
3341	      }
3342	  }
3343	else
3344	  FAIL;
3345      }
3346
3347    if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3348      FAIL;
3349
3350    target = copy_rtx (operands[0]);
3351    /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3352       subreg as the final target.  */
3353    if (GET_CODE (target) == SUBREG)
3354      {
3355	subtarget = gen_reg_rtx (SImode);
3356	if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3357	    < GET_MODE_SIZE (SImode))
3358	  target = SUBREG_REG (target);
3359      }
3360    else
3361      subtarget = target;
3362
3363    if (CONST_INT_P (operands[3]))
3364      {
3365	/* Since we are inserting a known constant, we may be able to
3366	   reduce the number of bits that we have to clear so that
3367	   the mask becomes simple.  */
3368	/* ??? This code does not check to see if the new mask is actually
3369	   simpler.  It may not be.  */
3370	rtx op1 = gen_reg_rtx (SImode);
3371	/* ??? Truncate operand3 to fit in the bitfield.  See comment before
3372	   start of this pattern.  */
3373	HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3374	HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3375
3376	emit_insn (gen_andsi3 (op1, operands[0],
3377			       gen_int_mode (~mask2, SImode)));
3378	emit_insn (gen_iorsi3 (subtarget, op1,
3379			       gen_int_mode (op3_value << start_bit, SImode)));
3380      }
3381    else if (start_bit == 0
3382	     && !(const_ok_for_arm (mask)
3383		  || const_ok_for_arm (~mask)))
3384      {
3385	/* A Trick, since we are setting the bottom bits in the word,
3386	   we can shift operand[3] up, operand[0] down, OR them together
3387	   and rotate the result back again.  This takes 3 insns, and
3388	   the third might be mergeable into another op.  */
3389	/* The shift up copes with the possibility that operand[3] is
3390           wider than the bitfield.  */
3391	rtx op0 = gen_reg_rtx (SImode);
3392	rtx op1 = gen_reg_rtx (SImode);
3393
3394	emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3395	emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3396	emit_insn (gen_iorsi3  (op1, op1, op0));
3397	emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3398      }
3399    else if ((width + start_bit == 32)
3400	     && !(const_ok_for_arm (mask)
3401		  || const_ok_for_arm (~mask)))
3402      {
3403	/* Similar trick, but slightly less efficient.  */
3404
3405	rtx op0 = gen_reg_rtx (SImode);
3406	rtx op1 = gen_reg_rtx (SImode);
3407
3408	emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3409	emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3410	emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3411	emit_insn (gen_iorsi3 (subtarget, op1, op0));
3412      }
3413    else
3414      {
3415	rtx op0 = gen_int_mode (mask, SImode);
3416	rtx op1 = gen_reg_rtx (SImode);
3417	rtx op2 = gen_reg_rtx (SImode);
3418
3419	if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3420	  {
3421	    rtx tmp = gen_reg_rtx (SImode);
3422
3423	    emit_insn (gen_movsi (tmp, op0));
3424	    op0 = tmp;
3425	  }
3426
3427	/* Mask out any bits in operand[3] that are not needed.  */
3428	   emit_insn (gen_andsi3 (op1, operands[3], op0));
3429
3430	if (CONST_INT_P (op0)
3431	    && (const_ok_for_arm (mask << start_bit)
3432		|| const_ok_for_arm (~(mask << start_bit))))
3433	  {
3434	    op0 = gen_int_mode (~(mask << start_bit), SImode);
3435	    emit_insn (gen_andsi3 (op2, operands[0], op0));
3436	  }
3437	else
3438	  {
3439	    if (CONST_INT_P (op0))
3440	      {
3441		rtx tmp = gen_reg_rtx (SImode);
3442
3443		emit_insn (gen_movsi (tmp, op0));
3444		op0 = tmp;
3445	      }
3446
3447	    if (start_bit != 0)
3448	      emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3449
3450	    emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3451	  }
3452
3453	if (start_bit != 0)
3454          emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3455
3456	emit_insn (gen_iorsi3 (subtarget, op1, op2));
3457      }
3458
3459    if (subtarget != target)
3460      {
3461	/* If TARGET is still a SUBREG, then it must be wider than a word,
3462	   so we must be careful only to set the subword we were asked to.  */
3463	if (GET_CODE (target) == SUBREG)
3464	  emit_move_insn (target, subtarget);
3465	else
3466	  emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3467      }
3468
3469    DONE;
3470  }"
3471)
3472
3473(define_insn "insv_zero"
3474  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3475                         (match_operand:SI 1 "const_int_M_operand" "M")
3476                         (match_operand:SI 2 "const_int_M_operand" "M"))
3477        (const_int 0))]
3478  "arm_arch_thumb2"
3479  "bfc%?\t%0, %2, %1"
3480  [(set_attr "length" "4")
3481   (set_attr "predicable" "yes")
3482   (set_attr "type" "bfm")]
3483)
3484
3485(define_insn "insv_t2"
3486  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3487                         (match_operand:SI 1 "const_int_M_operand" "M")
3488                         (match_operand:SI 2 "const_int_M_operand" "M"))
3489        (match_operand:SI 3 "s_register_operand" "r"))]
3490  "arm_arch_thumb2"
3491  "bfi%?\t%0, %3, %2, %1"
3492  [(set_attr "length" "4")
3493   (set_attr "predicable" "yes")
3494   (set_attr "type" "bfm")]
3495)
3496
3497(define_insn "andsi_notsi_si"
3498  [(set (match_operand:SI 0 "s_register_operand" "=r")
3499	(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3500		(match_operand:SI 1 "s_register_operand" "r")))]
3501  "TARGET_32BIT"
3502  "bic%?\\t%0, %1, %2"
3503  [(set_attr "predicable" "yes")
3504   (set_attr "type" "logic_reg")]
3505)
3506
3507(define_insn "andsi_not_shiftsi_si"
3508  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3509	(and:SI (not:SI (match_operator:SI 4 "shift_operator"
3510			 [(match_operand:SI 2 "s_register_operand" "r,r")
3511			  (match_operand:SI 3 "shift_amount_operand" "M,r")]))
3512		(match_operand:SI 1 "s_register_operand" "r,r")))]
3513  "TARGET_32BIT"
3514  "bic%?\\t%0, %1, %2%S4"
3515  [(set_attr "predicable" "yes")
3516   (set_attr "shift" "2")
3517   (set_attr "arch" "32,a")
3518   (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3519)
3520
3521;; Shifted bics pattern used to set up CC status register and not reusing
3522;; bics output.  Pattern restricts Thumb2 shift operand as bics for Thumb2
3523;; does not support shift by register.
3524(define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3525  [(set (reg:CC_NZ CC_REGNUM)
3526	(compare:CC_NZ
3527		(and:SI (not:SI (match_operator:SI 0 "shift_operator"
3528			[(match_operand:SI 1 "s_register_operand" "r,r")
3529			 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3530			(match_operand:SI 3 "s_register_operand" "r,r"))
3531		(const_int 0)))
3532   (clobber (match_scratch:SI 4 "=r,r"))]
3533  "TARGET_32BIT"
3534  "bics%?\\t%4, %3, %1%S0"
3535  [(set_attr "predicable" "yes")
3536   (set_attr "arch" "32,a")
3537   (set_attr "conds" "set")
3538   (set_attr "shift" "1")
3539   (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3540)
3541
3542;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3543;; getting reused later.
3544(define_insn "andsi_not_shiftsi_si_scc"
3545  [(parallel [(set (reg:CC_NZ CC_REGNUM)
3546	(compare:CC_NZ
3547		(and:SI (not:SI (match_operator:SI 0 "shift_operator"
3548			[(match_operand:SI 1 "s_register_operand" "r,r")
3549			 (match_operand:SI 2 "shift_amount_operand" "M,r")]))
3550			(match_operand:SI 3 "s_register_operand" "r,r"))
3551		(const_int 0)))
3552	(set (match_operand:SI 4 "s_register_operand" "=r,r")
3553	     (and:SI (not:SI (match_op_dup 0
3554		     [(match_dup 1)
3555		      (match_dup 2)]))
3556		     (match_dup 3)))])]
3557  "TARGET_32BIT"
3558  "bics%?\\t%4, %3, %1%S0"
3559  [(set_attr "predicable" "yes")
3560   (set_attr "arch" "32,a")
3561   (set_attr "conds" "set")
3562   (set_attr "shift" "1")
3563   (set_attr "type" "logic_shift_imm,logic_shift_reg")]
3564)
3565
3566(define_insn "*andsi_notsi_si_compare0"
3567  [(set (reg:CC_NZ CC_REGNUM)
3568	(compare:CC_NZ
3569	 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3570		 (match_operand:SI 1 "s_register_operand" "r"))
3571	 (const_int 0)))
3572   (set (match_operand:SI 0 "s_register_operand" "=r")
3573	(and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3574  "TARGET_32BIT"
3575  "bics\\t%0, %1, %2"
3576  [(set_attr "conds" "set")
3577   (set_attr "type" "logics_shift_reg")]
3578)
3579
3580(define_insn "*andsi_notsi_si_compare0_scratch"
3581  [(set (reg:CC_NZ CC_REGNUM)
3582	(compare:CC_NZ
3583	 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3584		 (match_operand:SI 1 "s_register_operand" "r"))
3585	 (const_int 0)))
3586   (clobber (match_scratch:SI 0 "=r"))]
3587  "TARGET_32BIT"
3588  "bics\\t%0, %1, %2"
3589  [(set_attr "conds" "set")
3590   (set_attr "type" "logics_shift_reg")]
3591)
3592
3593(define_expand "iorsi3"
3594  [(set (match_operand:SI         0 "s_register_operand")
3595	(ior:SI (match_operand:SI 1 "s_register_operand")
3596		(match_operand:SI 2 "reg_or_int_operand")))]
3597  "TARGET_EITHER"
3598  "
3599  if (CONST_INT_P (operands[2]))
3600    {
3601      if (TARGET_32BIT)
3602        {
3603	  if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3604	    operands[2] = force_reg (SImode, operands[2]);
3605	  else
3606	    {
3607	      arm_split_constant (IOR, SImode, NULL_RTX,
3608				  INTVAL (operands[2]), operands[0],
3609				  operands[1],
3610				  optimize && can_create_pseudo_p ());
3611	      DONE;
3612	    }
3613	}
3614      else /* TARGET_THUMB1 */
3615        {
3616          rtx tmp = force_reg (SImode, operands[2]);
3617	  if (rtx_equal_p (operands[0], operands[1]))
3618	    operands[2] = tmp;
3619	  else
3620	    {
3621              operands[2] = operands[1];
3622              operands[1] = tmp;
3623	    }
3624        }
3625    }
3626  "
3627)
3628
3629(define_insn_and_split "*iorsi3_insn"
3630  [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3631	(ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3632		(match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3633  "TARGET_32BIT"
3634  "@
3635   orr%?\\t%0, %1, %2
3636   orr%?\\t%0, %1, %2
3637   orn%?\\t%0, %1, #%B2
3638   orr%?\\t%0, %1, %2
3639   #"
3640  "TARGET_32BIT
3641   && CONST_INT_P (operands[2])
3642   && !(const_ok_for_arm (INTVAL (operands[2]))
3643        || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3644  [(clobber (const_int 0))]
3645{
3646  arm_split_constant (IOR, SImode, curr_insn,
3647                      INTVAL (operands[2]), operands[0], operands[1], 0);
3648  DONE;
3649}
3650  [(set_attr "length" "4,4,4,4,16")
3651   (set_attr "arch" "32,t2,t2,32,32")
3652   (set_attr "predicable" "yes")
3653   (set_attr "predicable_short_it" "no,yes,no,no,no")
3654   (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3655)
3656
3657(define_peephole2
3658  [(match_scratch:SI 3 "r")
3659   (set (match_operand:SI 0 "arm_general_register_operand" "")
3660	(ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3661		(match_operand:SI 2 "const_int_operand" "")))]
3662  "TARGET_ARM
3663   && !const_ok_for_arm (INTVAL (operands[2]))
3664   && const_ok_for_arm (~INTVAL (operands[2]))"
3665  [(set (match_dup 3) (match_dup 2))
3666   (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3667  ""
3668)
3669
3670(define_insn "*iorsi3_compare0"
3671  [(set (reg:CC_NZ CC_REGNUM)
3672	(compare:CC_NZ
3673	 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3674		 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3675	 (const_int 0)))
3676   (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3677	(ior:SI (match_dup 1) (match_dup 2)))]
3678  "TARGET_32BIT"
3679  "orrs%?\\t%0, %1, %2"
3680  [(set_attr "conds" "set")
3681   (set_attr "arch" "*,t2,*")
3682   (set_attr "length" "4,2,4")
3683   (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3684)
3685
3686(define_insn "*iorsi3_compare0_scratch"
3687  [(set (reg:CC_NZ CC_REGNUM)
3688	(compare:CC_NZ
3689	 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3690		 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3691	 (const_int 0)))
3692   (clobber (match_scratch:SI 0 "=r,l,r"))]
3693  "TARGET_32BIT"
3694  "orrs%?\\t%0, %1, %2"
3695  [(set_attr "conds" "set")
3696   (set_attr "arch" "*,t2,*")
3697   (set_attr "length" "4,2,4")
3698   (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3699)
3700
3701(define_expand "xorsi3"
3702  [(set (match_operand:SI         0 "s_register_operand")
3703	(xor:SI (match_operand:SI 1 "s_register_operand")
3704		(match_operand:SI 2 "reg_or_int_operand")))]
3705  "TARGET_EITHER"
3706  "if (CONST_INT_P (operands[2]))
3707    {
3708      if (TARGET_32BIT)
3709        {
3710	  if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3711	    operands[2] = force_reg (SImode, operands[2]);
3712	  else
3713	    {
3714	      arm_split_constant (XOR, SImode, NULL_RTX,
3715				  INTVAL (operands[2]), operands[0],
3716				  operands[1],
3717				  optimize && can_create_pseudo_p ());
3718	      DONE;
3719	    }
3720	}
3721      else /* TARGET_THUMB1 */
3722        {
3723          rtx tmp = force_reg (SImode, operands[2]);
3724	  if (rtx_equal_p (operands[0], operands[1]))
3725	    operands[2] = tmp;
3726	  else
3727	    {
3728              operands[2] = operands[1];
3729              operands[1] = tmp;
3730	    }
3731        }
3732    }"
3733)
3734
3735(define_insn_and_split "*arm_xorsi3"
3736  [(set (match_operand:SI         0 "s_register_operand" "=r,l,r,r")
3737	(xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3738		(match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3739  "TARGET_32BIT"
3740  "@
3741   eor%?\\t%0, %1, %2
3742   eor%?\\t%0, %1, %2
3743   eor%?\\t%0, %1, %2
3744   #"
3745  "TARGET_32BIT
3746   && CONST_INT_P (operands[2])
3747   && !const_ok_for_arm (INTVAL (operands[2]))"
3748  [(clobber (const_int 0))]
3749{
3750  arm_split_constant (XOR, SImode, curr_insn,
3751                      INTVAL (operands[2]), operands[0], operands[1], 0);
3752  DONE;
3753}
3754  [(set_attr "length" "4,4,4,16")
3755   (set_attr "predicable" "yes")
3756   (set_attr "predicable_short_it" "no,yes,no,no")
3757   (set_attr "type"  "logic_imm,logic_reg,logic_reg,multiple")]
3758)
3759
3760(define_insn "*xorsi3_compare0"
3761  [(set (reg:CC_NZ CC_REGNUM)
3762	(compare:CC_NZ (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3763				 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3764			 (const_int 0)))
3765   (set (match_operand:SI 0 "s_register_operand" "=r,r")
3766	(xor:SI (match_dup 1) (match_dup 2)))]
3767  "TARGET_32BIT"
3768  "eors%?\\t%0, %1, %2"
3769  [(set_attr "conds" "set")
3770   (set_attr "type" "logics_imm,logics_reg")]
3771)
3772
3773(define_insn "*xorsi3_compare0_scratch"
3774  [(set (reg:CC_NZ CC_REGNUM)
3775	(compare:CC_NZ (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3776				 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3777			 (const_int 0)))]
3778  "TARGET_32BIT"
3779  "teq%?\\t%0, %1"
3780  [(set_attr "conds" "set")
3781   (set_attr "type" "logics_imm,logics_reg")]
3782)
3783
3784; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3785; (NOT D) we can sometimes merge the final NOT into one of the following
3786; insns.
3787
3788(define_split
3789  [(set (match_operand:SI 0 "s_register_operand" "")
3790	(ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3791			(not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3792		(match_operand:SI 3 "arm_rhs_operand" "")))
3793   (clobber (match_operand:SI 4 "s_register_operand" ""))]
3794  "TARGET_32BIT"
3795  [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3796			      (not:SI (match_dup 3))))
3797   (set (match_dup 0) (not:SI (match_dup 4)))]
3798  ""
3799)
3800
3801(define_insn_and_split "*andsi_iorsi3_notsi"
3802  [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3803	(and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3804			(match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3805		(not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3806  "TARGET_32BIT"
3807  "#"   ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3808  "&& reload_completed"
3809  [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3810   (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3811  {
3812     /* If operands[3] is a constant make sure to fold the NOT into it
3813	to avoid creating a NOT of a CONST_INT.  */
3814    rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3815    if (CONST_INT_P (not_rtx))
3816      {
3817	operands[4] = operands[0];
3818	operands[5] = not_rtx;
3819      }
3820    else
3821      {
3822	operands[5] = operands[0];
3823	operands[4] = not_rtx;
3824      }
3825  }
3826  [(set_attr "length" "8")
3827   (set_attr "ce_count" "2")
3828   (set_attr "predicable" "yes")
3829   (set_attr "type" "multiple")]
3830)
3831
3832; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3833; insns are available?
3834(define_split
3835  [(set (match_operand:SI 0 "s_register_operand" "")
3836	(match_operator:SI 1 "logical_binary_operator"
3837	 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3838			   (match_operand:SI 3 "const_int_operand" "")
3839			   (match_operand:SI 4 "const_int_operand" ""))
3840	  (match_operator:SI 9 "logical_binary_operator"
3841	   [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3842			 (match_operand:SI 6 "const_int_operand" ""))
3843	    (match_operand:SI 7 "s_register_operand" "")])]))
3844   (clobber (match_operand:SI 8 "s_register_operand" ""))]
3845  "TARGET_32BIT
3846   && GET_CODE (operands[1]) == GET_CODE (operands[9])
3847   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3848  [(set (match_dup 8)
3849	(match_op_dup 1
3850	 [(ashift:SI (match_dup 2) (match_dup 4))
3851	  (match_dup 5)]))
3852   (set (match_dup 0)
3853	(match_op_dup 1
3854	 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3855	  (match_dup 7)]))]
3856  "
3857  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3858")
3859
3860(define_split
3861  [(set (match_operand:SI 0 "s_register_operand" "")
3862	(match_operator:SI 1 "logical_binary_operator"
3863	 [(match_operator:SI 9 "logical_binary_operator"
3864	   [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3865			 (match_operand:SI 6 "const_int_operand" ""))
3866	    (match_operand:SI 7 "s_register_operand" "")])
3867	  (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3868			   (match_operand:SI 3 "const_int_operand" "")
3869			   (match_operand:SI 4 "const_int_operand" ""))]))
3870   (clobber (match_operand:SI 8 "s_register_operand" ""))]
3871  "TARGET_32BIT
3872   && GET_CODE (operands[1]) == GET_CODE (operands[9])
3873   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3874  [(set (match_dup 8)
3875	(match_op_dup 1
3876	 [(ashift:SI (match_dup 2) (match_dup 4))
3877	  (match_dup 5)]))
3878   (set (match_dup 0)
3879	(match_op_dup 1
3880	 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3881	  (match_dup 7)]))]
3882  "
3883  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3884")
3885
3886(define_split
3887  [(set (match_operand:SI 0 "s_register_operand" "")
3888	(match_operator:SI 1 "logical_binary_operator"
3889	 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3890			   (match_operand:SI 3 "const_int_operand" "")
3891			   (match_operand:SI 4 "const_int_operand" ""))
3892	  (match_operator:SI 9 "logical_binary_operator"
3893	   [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3894			 (match_operand:SI 6 "const_int_operand" ""))
3895	    (match_operand:SI 7 "s_register_operand" "")])]))
3896   (clobber (match_operand:SI 8 "s_register_operand" ""))]
3897  "TARGET_32BIT
3898   && GET_CODE (operands[1]) == GET_CODE (operands[9])
3899   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3900  [(set (match_dup 8)
3901	(match_op_dup 1
3902	 [(ashift:SI (match_dup 2) (match_dup 4))
3903	  (match_dup 5)]))
3904   (set (match_dup 0)
3905	(match_op_dup 1
3906	 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3907	  (match_dup 7)]))]
3908  "
3909  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3910")
3911
3912(define_split
3913  [(set (match_operand:SI 0 "s_register_operand" "")
3914	(match_operator:SI 1 "logical_binary_operator"
3915	 [(match_operator:SI 9 "logical_binary_operator"
3916	   [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3917			 (match_operand:SI 6 "const_int_operand" ""))
3918	    (match_operand:SI 7 "s_register_operand" "")])
3919	  (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3920			   (match_operand:SI 3 "const_int_operand" "")
3921			   (match_operand:SI 4 "const_int_operand" ""))]))
3922   (clobber (match_operand:SI 8 "s_register_operand" ""))]
3923  "TARGET_32BIT
3924   && GET_CODE (operands[1]) == GET_CODE (operands[9])
3925   && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3926  [(set (match_dup 8)
3927	(match_op_dup 1
3928	 [(ashift:SI (match_dup 2) (match_dup 4))
3929	  (match_dup 5)]))
3930   (set (match_dup 0)
3931	(match_op_dup 1
3932	 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3933	  (match_dup 7)]))]
3934  "
3935  operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3936")
3937
3938
3939;; Minimum and maximum insns
3940
3941(define_expand "smaxsi3"
3942  [(parallel [
3943    (set (match_operand:SI 0 "s_register_operand")
3944	 (smax:SI (match_operand:SI 1 "s_register_operand")
3945		  (match_operand:SI 2 "arm_rhs_operand")))
3946    (clobber (reg:CC CC_REGNUM))])]
3947  "TARGET_32BIT"
3948  "
3949  if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3950    {
3951      /* No need for a clobber of the condition code register here.  */
3952      emit_insn (gen_rtx_SET (operands[0],
3953			      gen_rtx_SMAX (SImode, operands[1],
3954					    operands[2])));
3955      DONE;
3956    }
3957")
3958
3959(define_insn "*smax_0"
3960  [(set (match_operand:SI 0 "s_register_operand" "=r")
3961	(smax:SI (match_operand:SI 1 "s_register_operand" "r")
3962		 (const_int 0)))]
3963  "TARGET_32BIT"
3964  "bic%?\\t%0, %1, %1, asr #31"
3965  [(set_attr "predicable" "yes")
3966   (set_attr "type" "logic_shift_reg")]
3967)
3968
3969(define_insn "*smax_m1"
3970  [(set (match_operand:SI 0 "s_register_operand" "=r")
3971	(smax:SI (match_operand:SI 1 "s_register_operand" "r")
3972		 (const_int -1)))]
3973  "TARGET_32BIT"
3974  "orr%?\\t%0, %1, %1, asr #31"
3975  [(set_attr "predicable" "yes")
3976   (set_attr "type" "logic_shift_reg")]
3977)
3978
3979(define_insn_and_split "*arm_smax_insn"
3980  [(set (match_operand:SI          0 "s_register_operand" "=r,r")
3981	(smax:SI (match_operand:SI 1 "s_register_operand"  "%0,?r")
3982		 (match_operand:SI 2 "arm_rhs_operand"    "rI,rI")))
3983   (clobber (reg:CC CC_REGNUM))]
3984  "TARGET_ARM"
3985  "#"
3986   ; cmp\\t%1, %2\;movlt\\t%0, %2
3987   ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3988  "TARGET_ARM"
3989  [(set (reg:CC CC_REGNUM)
3990        (compare:CC (match_dup 1) (match_dup 2)))
3991   (set (match_dup 0)
3992        (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3993                         (match_dup 1)
3994                         (match_dup 2)))]
3995  ""
3996  [(set_attr "conds" "clob")
3997   (set_attr "length" "8,12")
3998   (set_attr "type" "multiple")]
3999)
4000
4001(define_expand "sminsi3"
4002  [(parallel [
4003    (set (match_operand:SI 0 "s_register_operand")
4004	 (smin:SI (match_operand:SI 1 "s_register_operand")
4005		  (match_operand:SI 2 "arm_rhs_operand")))
4006    (clobber (reg:CC CC_REGNUM))])]
4007  "TARGET_32BIT"
4008  "
4009  if (operands[2] == const0_rtx)
4010    {
4011      /* No need for a clobber of the condition code register here.  */
4012      emit_insn (gen_rtx_SET (operands[0],
4013			      gen_rtx_SMIN (SImode, operands[1],
4014					    operands[2])));
4015      DONE;
4016    }
4017")
4018
4019(define_insn "*smin_0"
4020  [(set (match_operand:SI 0 "s_register_operand" "=r")
4021	(smin:SI (match_operand:SI 1 "s_register_operand" "r")
4022		 (const_int 0)))]
4023  "TARGET_32BIT"
4024  "and%?\\t%0, %1, %1, asr #31"
4025  [(set_attr "predicable" "yes")
4026   (set_attr "type" "logic_shift_reg")]
4027)
4028
4029(define_insn_and_split "*arm_smin_insn"
4030  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4031	(smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
4032		 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
4033   (clobber (reg:CC CC_REGNUM))]
4034  "TARGET_ARM"
4035  "#"
4036    ; cmp\\t%1, %2\;movge\\t%0, %2
4037    ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
4038  "TARGET_ARM"
4039  [(set (reg:CC CC_REGNUM)
4040        (compare:CC (match_dup 1) (match_dup 2)))
4041   (set (match_dup 0)
4042        (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
4043                         (match_dup 1)
4044                         (match_dup 2)))]
4045  ""
4046  [(set_attr "conds" "clob")
4047   (set_attr "length" "8,12")
4048   (set_attr "type" "multiple,multiple")]
4049)
4050
4051(define_expand "umaxsi3"
4052  [(parallel [
4053    (set (match_operand:SI 0 "s_register_operand")
4054	 (umax:SI (match_operand:SI 1 "s_register_operand")
4055		  (match_operand:SI 2 "arm_rhs_operand")))
4056    (clobber (reg:CC CC_REGNUM))])]
4057  "TARGET_32BIT"
4058  ""
4059)
4060
4061(define_insn_and_split "*arm_umaxsi3"
4062  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4063	(umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4064		 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4065   (clobber (reg:CC CC_REGNUM))]
4066  "TARGET_ARM"
4067  "#"
4068    ; cmp\\t%1, %2\;movcc\\t%0, %2
4069    ; cmp\\t%1, %2\;movcs\\t%0, %1
4070    ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
4071  "TARGET_ARM"
4072  [(set (reg:CC CC_REGNUM)
4073        (compare:CC (match_dup 1) (match_dup 2)))
4074   (set (match_dup 0)
4075        (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
4076                         (match_dup 1)
4077                         (match_dup 2)))]
4078  ""
4079  [(set_attr "conds" "clob")
4080   (set_attr "length" "8,8,12")
4081   (set_attr "type" "store_4")]
4082)
4083
4084(define_expand "uminsi3"
4085  [(parallel [
4086    (set (match_operand:SI 0 "s_register_operand")
4087	 (umin:SI (match_operand:SI 1 "s_register_operand")
4088		  (match_operand:SI 2 "arm_rhs_operand")))
4089    (clobber (reg:CC CC_REGNUM))])]
4090  "TARGET_32BIT"
4091  ""
4092)
4093
4094(define_insn_and_split "*arm_uminsi3"
4095  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
4096	(umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
4097		 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
4098   (clobber (reg:CC CC_REGNUM))]
4099  "TARGET_ARM"
4100  "#"
4101   ; cmp\\t%1, %2\;movcs\\t%0, %2
4102   ; cmp\\t%1, %2\;movcc\\t%0, %1
4103   ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
4104  "TARGET_ARM"
4105  [(set (reg:CC CC_REGNUM)
4106        (compare:CC (match_dup 1) (match_dup 2)))
4107   (set (match_dup 0)
4108        (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
4109                         (match_dup 1)
4110                         (match_dup 2)))]
4111  ""
4112  [(set_attr "conds" "clob")
4113   (set_attr "length" "8,8,12")
4114   (set_attr "type" "store_4")]
4115)
4116
4117(define_insn "*store_minmaxsi"
4118  [(set (match_operand:SI 0 "memory_operand" "=m")
4119	(match_operator:SI 3 "minmax_operator"
4120	 [(match_operand:SI 1 "s_register_operand" "r")
4121	  (match_operand:SI 2 "s_register_operand" "r")]))
4122   (clobber (reg:CC CC_REGNUM))]
4123  "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
4124  "*
4125  operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
4126				operands[1], operands[2]);
4127  output_asm_insn (\"cmp\\t%1, %2\", operands);
4128  if (TARGET_THUMB2)
4129    output_asm_insn (\"ite\t%d3\", operands);
4130  output_asm_insn (\"str%d3\\t%1, %0\", operands);
4131  output_asm_insn (\"str%D3\\t%2, %0\", operands);
4132  return \"\";
4133  "
4134  [(set_attr "conds" "clob")
4135   (set (attr "length")
4136	(if_then_else (eq_attr "is_thumb" "yes")
4137		      (const_int 14)
4138		      (const_int 12)))
4139   (set_attr "type" "store_4")]
4140)
4141
4142; Reject the frame pointer in operand[1], since reloading this after
4143; it has been eliminated can cause carnage.
4144(define_insn "*minmax_arithsi"
4145  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4146	(match_operator:SI 4 "shiftable_operator"
4147	 [(match_operator:SI 5 "minmax_operator"
4148	   [(match_operand:SI 2 "s_register_operand" "r,r")
4149	    (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
4150	  (match_operand:SI 1 "s_register_operand" "0,?r")]))
4151   (clobber (reg:CC CC_REGNUM))]
4152  "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
4153  "*
4154  {
4155    enum rtx_code code = GET_CODE (operands[4]);
4156    bool need_else;
4157
4158    if (which_alternative != 0 || operands[3] != const0_rtx
4159        || (code != PLUS && code != IOR && code != XOR))
4160      need_else = true;
4161    else
4162      need_else = false;
4163
4164    operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
4165				  operands[2], operands[3]);
4166    output_asm_insn (\"cmp\\t%2, %3\", operands);
4167    if (TARGET_THUMB2)
4168      {
4169	if (need_else)
4170	  output_asm_insn (\"ite\\t%d5\", operands);
4171	else
4172	  output_asm_insn (\"it\\t%d5\", operands);
4173      }
4174    output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
4175    if (need_else)
4176      output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
4177    return \"\";
4178  }"
4179  [(set_attr "conds" "clob")
4180   (set (attr "length")
4181	(if_then_else (eq_attr "is_thumb" "yes")
4182		      (const_int 14)
4183		      (const_int 12)))
4184   (set_attr "type" "multiple")]
4185)
4186
4187; Reject the frame pointer in operand[1], since reloading this after
4188; it has been eliminated can cause carnage.
4189(define_insn_and_split "*minmax_arithsi_non_canon"
4190  [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
4191	(minus:SI
4192	 (match_operand:SI 1 "s_register_operand" "0,?Ts")
4193	  (match_operator:SI 4 "minmax_operator"
4194	   [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
4195	    (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
4196   (clobber (reg:CC CC_REGNUM))]
4197  "TARGET_32BIT && !arm_eliminable_register (operands[1])
4198   && !(arm_restrict_it && CONST_INT_P (operands[3]))"
4199  "#"
4200  "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
4201  [(set (reg:CC CC_REGNUM)
4202        (compare:CC (match_dup 2) (match_dup 3)))
4203
4204   (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
4205              (set (match_dup 0)
4206                   (minus:SI (match_dup 1)
4207                             (match_dup 2))))
4208   (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
4209              (set (match_dup 0)
4210                   (match_dup 6)))]
4211  {
4212  machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
4213                                           operands[2], operands[3]);
4214  enum rtx_code rc = minmax_code (operands[4]);
4215  operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
4216                                operands[2], operands[3]);
4217
4218  if (mode == CCFPmode || mode == CCFPEmode)
4219    rc = reverse_condition_maybe_unordered (rc);
4220  else
4221    rc = reverse_condition (rc);
4222  operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
4223  if (CONST_INT_P (operands[3]))
4224    operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
4225  else
4226    operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
4227  }
4228  [(set_attr "conds" "clob")
4229   (set (attr "length")
4230	(if_then_else (eq_attr "is_thumb" "yes")
4231		      (const_int 14)
4232		      (const_int 12)))
4233   (set_attr "type" "multiple")]
4234)
4235
4236
4237(define_expand "arm_<ss_op>"
4238  [(set (match_operand:SI 0 "s_register_operand")
4239	(SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand")
4240			(match_operand:SI 2 "s_register_operand")))]
4241  "TARGET_DSP_MULTIPLY"
4242  {
4243    if (ARM_Q_BIT_READ)
4244      emit_insn (gen_arm_<ss_op>_setq_insn (operands[0],
4245					    operands[1], operands[2]));
4246    else
4247      emit_insn (gen_arm_<ss_op>_insn (operands[0], operands[1], operands[2]));
4248    DONE;
4249  }
4250)
4251
4252(define_insn "arm_<ss_op><add_clobber_q_name>_insn"
4253  [(set (match_operand:SI 0 "s_register_operand" "=r")
4254	(SSPLUSMINUS:SI (match_operand:SI 1 "s_register_operand" "r")
4255			(match_operand:SI 2 "s_register_operand" "r")))]
4256  "TARGET_DSP_MULTIPLY && <add_clobber_q_pred>"
4257  "<ss_op>%?\t%0, %1, %2"
4258  [(set_attr "predicable" "yes")
4259   (set_attr "type" "alu_dsp_reg")]
4260)
4261
4262(define_code_iterator SAT [smin smax])
4263(define_code_attr SATrev [(smin "smax") (smax "smin")])
4264(define_code_attr SATlo [(smin "1") (smax "2")])
4265(define_code_attr SAThi [(smin "2") (smax "1")])
4266
4267(define_expand "arm_ssat"
4268  [(match_operand:SI 0 "s_register_operand")
4269   (match_operand:SI 1 "s_register_operand")
4270   (match_operand:SI 2 "const_int_operand")]
4271  "TARGET_32BIT && arm_arch6"
4272  {
4273    HOST_WIDE_INT val = INTVAL (operands[2]);
4274    /* The builtin checking code should have ensured the right
4275       range for the immediate.  */
4276    gcc_assert (IN_RANGE (val, 1, 32));
4277    HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << (val - 1)) - 1;
4278    HOST_WIDE_INT lower_bound = -upper_bound - 1;
4279    rtx up_rtx = gen_int_mode (upper_bound, SImode);
4280    rtx lo_rtx = gen_int_mode (lower_bound, SImode);
4281    if (ARM_Q_BIT_READ)
4282      emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx,
4283				      up_rtx, operands[1]));
4284    else
4285      emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4286    DONE;
4287  }
4288)
4289
4290(define_expand "arm_usat"
4291  [(match_operand:SI 0 "s_register_operand")
4292   (match_operand:SI 1 "s_register_operand")
4293   (match_operand:SI 2 "const_int_operand")]
4294  "TARGET_32BIT && arm_arch6"
4295  {
4296    HOST_WIDE_INT val = INTVAL (operands[2]);
4297    /* The builtin checking code should have ensured the right
4298       range for the immediate.  */
4299    gcc_assert (IN_RANGE (val, 0, 31));
4300    HOST_WIDE_INT upper_bound = (HOST_WIDE_INT_1 << val) - 1;
4301    rtx up_rtx = gen_int_mode (upper_bound, SImode);
4302    rtx lo_rtx = CONST0_RTX (SImode);
4303    if (ARM_Q_BIT_READ)
4304      emit_insn (gen_satsi_smin_setq (operands[0], lo_rtx, up_rtx,
4305				      operands[1]));
4306    else
4307      emit_insn (gen_satsi_smin (operands[0], lo_rtx, up_rtx, operands[1]));
4308    DONE;
4309  }
4310)
4311
4312(define_insn "arm_get_apsr"
4313  [(set (match_operand:SI 0 "s_register_operand" "=r")
4314	(unspec:SI [(reg:CC APSRQ_REGNUM)] UNSPEC_APSR_READ))]
4315  "TARGET_ARM_QBIT"
4316  "mrs%?\t%0, APSR"
4317  [(set_attr "predicable" "yes")
4318   (set_attr "conds" "use")]
4319)
4320
4321(define_insn "arm_set_apsr"
4322  [(set (reg:CC APSRQ_REGNUM)
4323	(unspec_volatile:CC
4324	  [(match_operand:SI 0 "s_register_operand" "r")] VUNSPEC_APSR_WRITE))]
4325  "TARGET_ARM_QBIT"
4326  "msr%?\tAPSR_nzcvq, %0"
4327  [(set_attr "predicable" "yes")
4328   (set_attr "conds" "set")]
4329)
4330
4331;; Read the APSR and extract the Q bit (bit 27)
4332(define_expand "arm_saturation_occurred"
4333  [(match_operand:SI 0 "s_register_operand")]
4334  "TARGET_ARM_QBIT"
4335  {
4336    rtx apsr = gen_reg_rtx (SImode);
4337    emit_insn (gen_arm_get_apsr (apsr));
4338    emit_insn (gen_extzv (operands[0], apsr, CONST1_RTX (SImode),
4339	       gen_int_mode (27, SImode)));
4340    DONE;
4341  }
4342)
4343
4344;; Read the APSR and set the Q bit (bit position 27) according to operand 0
4345(define_expand "arm_set_saturation"
4346  [(match_operand:SI 0 "reg_or_int_operand")]
4347  "TARGET_ARM_QBIT"
4348  {
4349    rtx apsr = gen_reg_rtx (SImode);
4350    emit_insn (gen_arm_get_apsr (apsr));
4351    rtx to_insert = gen_reg_rtx (SImode);
4352    if (CONST_INT_P (operands[0]))
4353      emit_move_insn (to_insert, operands[0] == CONST0_RTX (SImode)
4354				 ? CONST0_RTX (SImode) : CONST1_RTX (SImode));
4355    else
4356      {
4357        rtx cmp = gen_rtx_NE (SImode, operands[0], CONST0_RTX (SImode));
4358        emit_insn (gen_cstoresi4 (to_insert, cmp, operands[0],
4359				  CONST0_RTX (SImode)));
4360      }
4361    emit_insn (gen_insv (apsr, CONST1_RTX (SImode),
4362	       gen_int_mode (27, SImode), to_insert));
4363    emit_insn (gen_arm_set_apsr (apsr));
4364    DONE;
4365  }
4366)
4367
4368(define_insn "satsi_<SAT:code><add_clobber_q_name>"
4369  [(set (match_operand:SI 0 "s_register_operand" "=r")
4370        (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4371                           (match_operand:SI 1 "const_int_operand" "i"))
4372                (match_operand:SI 2 "const_int_operand" "i")))]
4373  "TARGET_32BIT && arm_arch6 && <add_clobber_q_pred>
4374   && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4375{
4376  int mask;
4377  bool signed_sat;
4378  if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4379                               &mask, &signed_sat))
4380    gcc_unreachable ();
4381
4382  operands[1] = GEN_INT (mask);
4383  if (signed_sat)
4384    return "ssat%?\t%0, %1, %3";
4385  else
4386    return "usat%?\t%0, %1, %3";
4387}
4388  [(set_attr "predicable" "yes")
4389   (set_attr "type" "alus_imm")]
4390)
4391
4392(define_insn "*satsi_<SAT:code>_shift"
4393  [(set (match_operand:SI 0 "s_register_operand" "=r")
4394        (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4395                             [(match_operand:SI 4 "s_register_operand" "r")
4396                              (match_operand:SI 5 "const_int_operand" "i")])
4397                           (match_operand:SI 1 "const_int_operand" "i"))
4398                (match_operand:SI 2 "const_int_operand" "i")))]
4399  "TARGET_32BIT && arm_arch6 && !ARM_Q_BIT_READ
4400   && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4401{
4402  int mask;
4403  bool signed_sat;
4404  if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4405                               &mask, &signed_sat))
4406    gcc_unreachable ();
4407
4408  operands[1] = GEN_INT (mask);
4409  if (signed_sat)
4410    return "ssat%?\t%0, %1, %4%S3";
4411  else
4412    return "usat%?\t%0, %1, %4%S3";
4413}
4414  [(set_attr "predicable" "yes")
4415   (set_attr "shift" "3")
4416   (set_attr "type" "logic_shift_reg")])
4417
4418;; Custom Datapath Extension insns.
4419(define_insn "arm_cx1<mode>"
4420   [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4421	 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4422	               (match_operand:SI 2 "const_int_ccde1_operand" "i")]
4423	    UNSPEC_CDE))]
4424   "TARGET_CDE"
4425   "cx1<cde_suffix>\\tp%c1, <cde_dest>, %2"
4426  [(set_attr "type" "coproc")]
4427)
4428
4429(define_insn "arm_cx1a<mode>"
4430   [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4431	 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4432		       (match_operand:SIDI 2 "s_register_operand" "0")
4433	               (match_operand:SI 3 "const_int_ccde1_operand" "i")]
4434	    UNSPEC_CDEA))]
4435   "TARGET_CDE"
4436   "cx1<cde_suffix>a\\tp%c1, <cde_dest>, %3"
4437  [(set_attr "type" "coproc")]
4438)
4439
4440(define_insn "arm_cx2<mode>"
4441   [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4442	 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4443		       (match_operand:SI 2 "s_register_operand" "r")
4444	               (match_operand:SI 3 "const_int_ccde2_operand" "i")]
4445	    UNSPEC_CDE))]
4446   "TARGET_CDE"
4447   "cx2<cde_suffix>\\tp%c1, <cde_dest>, %2, %3"
4448  [(set_attr "type" "coproc")]
4449)
4450
4451(define_insn "arm_cx2a<mode>"
4452   [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4453	 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4454		       (match_operand:SIDI 2 "s_register_operand" "0")
4455		       (match_operand:SI 3 "s_register_operand" "r")
4456	               (match_operand:SI 4 "const_int_ccde2_operand" "i")]
4457	    UNSPEC_CDEA))]
4458   "TARGET_CDE"
4459   "cx2<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4"
4460  [(set_attr "type" "coproc")]
4461)
4462
4463(define_insn "arm_cx3<mode>"
4464   [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4465	 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4466		       (match_operand:SI 2 "s_register_operand" "r")
4467		       (match_operand:SI 3 "s_register_operand" "r")
4468	               (match_operand:SI 4 "const_int_ccde3_operand" "i")]
4469	    UNSPEC_CDE))]
4470   "TARGET_CDE"
4471   "cx3<cde_suffix>\\tp%c1, <cde_dest>, %2, %3, %4"
4472  [(set_attr "type" "coproc")]
4473)
4474
4475(define_insn "arm_cx3a<mode>"
4476   [(set (match_operand:SIDI 0 "s_register_operand" "=r")
4477	 (unspec:SIDI [(match_operand:SI 1 "const_int_coproc_operand" "i")
4478		       (match_operand:SIDI 2 "s_register_operand" "0")
4479		       (match_operand:SI 3 "s_register_operand" "r")
4480		       (match_operand:SI 4 "s_register_operand" "r")
4481                       (match_operand:SI 5 "const_int_ccde3_operand" "i")]
4482	    UNSPEC_CDEA))]
4483   "TARGET_CDE"
4484   "cx3<cde_suffix>a\\tp%c1, <cde_dest>, %3, %4, %5"
4485  [(set_attr "type" "coproc")]
4486)
4487
4488;; Shift and rotation insns
4489
4490(define_expand "ashldi3"
4491  [(set (match_operand:DI            0 "s_register_operand")
4492        (ashift:DI (match_operand:DI 1 "s_register_operand")
4493                   (match_operand:SI 2 "reg_or_int_operand")))]
4494  "TARGET_32BIT"
4495  "
4496  if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN)
4497    {
4498      if (!reg_or_int_operand (operands[2], SImode))
4499        operands[2] = force_reg (SImode, operands[2]);
4500
4501      /* Armv8.1-M Mainline double shifts are not expanded.  */
4502      if (arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2]))
4503	  && (REG_P (operands[2]) || INTVAL(operands[2]) != 32))
4504        {
4505	  if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4506	    emit_insn (gen_movdi (operands[0], operands[1]));
4507
4508	  emit_insn (gen_thumb2_lsll (operands[0], operands[2]));
4509	  DONE;
4510	}
4511    }
4512
4513  arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4514				 operands[2], gen_reg_rtx (SImode),
4515				 gen_reg_rtx (SImode));
4516  DONE;
4517")
4518
4519(define_expand "ashlsi3"
4520  [(set (match_operand:SI            0 "s_register_operand")
4521	(ashift:SI (match_operand:SI 1 "s_register_operand")
4522		   (match_operand:SI 2 "arm_rhs_operand")))]
4523  "TARGET_EITHER"
4524  "
4525  if (CONST_INT_P (operands[2])
4526      && (UINTVAL (operands[2])) > 31)
4527    {
4528      emit_insn (gen_movsi (operands[0], const0_rtx));
4529      DONE;
4530    }
4531  "
4532)
4533
4534(define_expand "ashrdi3"
4535  [(set (match_operand:DI              0 "s_register_operand")
4536        (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4537                     (match_operand:SI 2 "reg_or_int_operand")))]
4538  "TARGET_32BIT"
4539  "
4540  /* Armv8.1-M Mainline double shifts are not expanded.  */
4541  if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4542      && arm_reg_or_long_shift_imm (operands[2], GET_MODE (operands[2])))
4543    {
4544      if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4545	emit_insn (gen_movdi (operands[0], operands[1]));
4546
4547      emit_insn (gen_thumb2_asrl (operands[0], operands[2]));
4548      DONE;
4549    }
4550
4551  arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4552				 operands[2], gen_reg_rtx (SImode),
4553				 gen_reg_rtx (SImode));
4554  DONE;
4555")
4556
4557(define_expand "ashrsi3"
4558  [(set (match_operand:SI              0 "s_register_operand")
4559	(ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4560		     (match_operand:SI 2 "arm_rhs_operand")))]
4561  "TARGET_EITHER"
4562  "
4563  if (CONST_INT_P (operands[2])
4564      && UINTVAL (operands[2]) > 31)
4565    operands[2] = GEN_INT (31);
4566  "
4567)
4568
4569(define_expand "lshrdi3"
4570  [(set (match_operand:DI              0 "s_register_operand")
4571        (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4572                     (match_operand:SI 2 "reg_or_int_operand")))]
4573  "TARGET_32BIT"
4574  "
4575  /* Armv8.1-M Mainline double shifts are not expanded.  */
4576  if (TARGET_HAVE_MVE && !BYTES_BIG_ENDIAN
4577    && long_shift_imm (operands[2], GET_MODE (operands[2])))
4578    {
4579      if (!reg_overlap_mentioned_p(operands[0], operands[1]))
4580        emit_insn (gen_movdi (operands[0], operands[1]));
4581
4582      emit_insn (gen_thumb2_lsrl (operands[0], operands[2]));
4583      DONE;
4584    }
4585
4586  arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4587				 operands[2], gen_reg_rtx (SImode),
4588				 gen_reg_rtx (SImode));
4589  DONE;
4590")
4591
4592(define_expand "lshrsi3"
4593  [(set (match_operand:SI              0 "s_register_operand")
4594	(lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4595		     (match_operand:SI 2 "arm_rhs_operand")))]
4596  "TARGET_EITHER"
4597  "
4598  if (CONST_INT_P (operands[2])
4599      && (UINTVAL (operands[2])) > 31)
4600    {
4601      emit_insn (gen_movsi (operands[0], const0_rtx));
4602      DONE;
4603    }
4604  "
4605)
4606
4607(define_expand "rotlsi3"
4608  [(set (match_operand:SI              0 "s_register_operand")
4609	(rotatert:SI (match_operand:SI 1 "s_register_operand")
4610		     (match_operand:SI 2 "reg_or_int_operand")))]
4611  "TARGET_32BIT"
4612  "
4613  if (CONST_INT_P (operands[2]))
4614    operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4615  else
4616    {
4617      rtx reg = gen_reg_rtx (SImode);
4618      emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4619      operands[2] = reg;
4620    }
4621  "
4622)
4623
4624(define_expand "rotrsi3"
4625  [(set (match_operand:SI              0 "s_register_operand")
4626	(rotatert:SI (match_operand:SI 1 "s_register_operand")
4627		     (match_operand:SI 2 "arm_rhs_operand")))]
4628  "TARGET_EITHER"
4629  "
4630  if (TARGET_32BIT)
4631    {
4632      if (CONST_INT_P (operands[2])
4633          && UINTVAL (operands[2]) > 31)
4634        operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4635    }
4636  else /* TARGET_THUMB1 */
4637    {
4638      if (CONST_INT_P (operands [2]))
4639        operands [2] = force_reg (SImode, operands[2]);
4640    }
4641  "
4642)
4643
4644(define_insn "*arm_shiftsi3"
4645  [(set (match_operand:SI   0 "s_register_operand" "=l,l,r,r")
4646	(match_operator:SI  3 "shift_operator"
4647	 [(match_operand:SI 1 "s_register_operand"  "0,l,r,r")
4648	  (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4649  "TARGET_32BIT"
4650  "* return arm_output_shift(operands, 0);"
4651  [(set_attr "predicable" "yes")
4652   (set_attr "arch" "t2,t2,*,*")
4653   (set_attr "predicable_short_it" "yes,yes,no,no")
4654   (set_attr "length" "4")
4655   (set_attr "shift" "1")
4656   (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
4657)
4658
4659(define_insn "*shiftsi3_compare0"
4660  [(set (reg:CC_NZ CC_REGNUM)
4661	(compare:CC_NZ (match_operator:SI 3 "shift_operator"
4662			  [(match_operand:SI 1 "s_register_operand" "r,r")
4663			   (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4664			 (const_int 0)))
4665   (set (match_operand:SI 0 "s_register_operand" "=r,r")
4666	(match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4667  "TARGET_32BIT"
4668  "* return arm_output_shift(operands, 1);"
4669  [(set_attr "conds" "set")
4670   (set_attr "shift" "1")
4671   (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4672)
4673
4674(define_insn "*shiftsi3_compare0_scratch"
4675  [(set (reg:CC_NZ CC_REGNUM)
4676	(compare:CC_NZ (match_operator:SI 3 "shift_operator"
4677			  [(match_operand:SI 1 "s_register_operand" "r,r")
4678			   (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4679			 (const_int 0)))
4680   (clobber (match_scratch:SI 0 "=r,r"))]
4681  "TARGET_32BIT"
4682  "* return arm_output_shift(operands, 1);"
4683  [(set_attr "conds" "set")
4684   (set_attr "shift" "1")
4685   (set_attr "type" "shift_imm,shift_reg")]
4686)
4687
4688(define_insn "*not_shiftsi"
4689  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4690	(not:SI (match_operator:SI 3 "shift_operator"
4691		 [(match_operand:SI 1 "s_register_operand" "r,r")
4692		  (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
4693  "TARGET_32BIT"
4694  "mvn%?\\t%0, %1%S3"
4695  [(set_attr "predicable" "yes")
4696   (set_attr "shift" "1")
4697   (set_attr "arch" "32,a")
4698   (set_attr "type" "mvn_shift,mvn_shift_reg")])
4699
4700(define_insn "*not_shiftsi_compare0"
4701  [(set (reg:CC_NZ CC_REGNUM)
4702	(compare:CC_NZ
4703	 (not:SI (match_operator:SI 3 "shift_operator"
4704		  [(match_operand:SI 1 "s_register_operand" "r,r")
4705		   (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4706	 (const_int 0)))
4707   (set (match_operand:SI 0 "s_register_operand" "=r,r")
4708	(not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4709  "TARGET_32BIT"
4710  "mvns%?\\t%0, %1%S3"
4711  [(set_attr "conds" "set")
4712   (set_attr "shift" "1")
4713   (set_attr "arch" "32,a")
4714   (set_attr "type" "mvn_shift,mvn_shift_reg")])
4715
4716(define_insn "*not_shiftsi_compare0_scratch"
4717  [(set (reg:CC_NZ CC_REGNUM)
4718	(compare:CC_NZ
4719	 (not:SI (match_operator:SI 3 "shift_operator"
4720		  [(match_operand:SI 1 "s_register_operand" "r,r")
4721		   (match_operand:SI 2 "shift_amount_operand" "M,r")]))
4722	 (const_int 0)))
4723   (clobber (match_scratch:SI 0 "=r,r"))]
4724  "TARGET_32BIT"
4725  "mvns%?\\t%0, %1%S3"
4726  [(set_attr "conds" "set")
4727   (set_attr "shift" "1")
4728   (set_attr "arch" "32,a")
4729   (set_attr "type" "mvn_shift,mvn_shift_reg")])
4730
4731;; We don't really have extzv, but defining this using shifts helps
4732;; to reduce register pressure later on.
4733
4734(define_expand "extzv"
4735  [(set (match_operand 0 "s_register_operand")
4736	(zero_extract (match_operand 1 "nonimmediate_operand")
4737		      (match_operand 2 "const_int_operand")
4738		      (match_operand 3 "const_int_operand")))]
4739  "TARGET_THUMB1 || arm_arch_thumb2"
4740  "
4741  {
4742    HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4743    HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4744
4745    if (arm_arch_thumb2)
4746      {
4747	HOST_WIDE_INT width = INTVAL (operands[2]);
4748	HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4749
4750	if (unaligned_access && MEM_P (operands[1])
4751	    && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4752	  {
4753	    rtx base_addr;
4754
4755	    if (BYTES_BIG_ENDIAN)
4756	      bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4757		       - bitpos;
4758
4759	    if (width == 32)
4760              {
4761		base_addr = adjust_address (operands[1], SImode,
4762					    bitpos / BITS_PER_UNIT);
4763		emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4764              }
4765	    else
4766              {
4767		rtx dest = operands[0];
4768		rtx tmp = gen_reg_rtx (SImode);
4769
4770		/* We may get a paradoxical subreg here.  Strip it off.  */
4771		if (GET_CODE (dest) == SUBREG
4772		    && GET_MODE (dest) == SImode
4773		    && GET_MODE (SUBREG_REG (dest)) == HImode)
4774		  dest = SUBREG_REG (dest);
4775
4776		if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4777		  FAIL;
4778
4779		base_addr = adjust_address (operands[1], HImode,
4780					    bitpos / BITS_PER_UNIT);
4781		emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4782		emit_move_insn (gen_lowpart (SImode, dest), tmp);
4783	      }
4784	    DONE;
4785	  }
4786	else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4787	  {
4788	    emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4789				     operands[3]));
4790	    DONE;
4791	  }
4792	else
4793	  FAIL;
4794      }
4795
4796    if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4797      FAIL;
4798
4799    operands[3] = GEN_INT (rshift);
4800
4801    if (lshift == 0)
4802      {
4803        emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4804        DONE;
4805      }
4806
4807    emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4808			     operands[3], gen_reg_rtx (SImode)));
4809    DONE;
4810  }"
4811)
4812
4813;; Helper for extzv, for the Thumb-1 register-shifts case.
4814
4815(define_expand "extzv_t1"
4816  [(set (match_operand:SI 4 "s_register_operand")
4817	(ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4818		   (match_operand:SI 2 "const_int_operand")))
4819   (set (match_operand:SI 0 "s_register_operand")
4820	(lshiftrt:SI (match_dup 4)
4821		     (match_operand:SI 3 "const_int_operand")))]
4822  "TARGET_THUMB1"
4823  "")
4824
4825(define_expand "extv"
4826  [(set (match_operand 0 "s_register_operand")
4827	(sign_extract (match_operand 1 "nonimmediate_operand")
4828		      (match_operand 2 "const_int_operand")
4829		      (match_operand 3 "const_int_operand")))]
4830  "arm_arch_thumb2"
4831{
4832  HOST_WIDE_INT width = INTVAL (operands[2]);
4833  HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4834
4835  if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4836      && (bitpos % BITS_PER_UNIT)  == 0)
4837    {
4838      rtx base_addr;
4839
4840      if (BYTES_BIG_ENDIAN)
4841	bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4842
4843      if (width == 32)
4844        {
4845	  base_addr = adjust_address (operands[1], SImode,
4846				      bitpos / BITS_PER_UNIT);
4847	  emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4848        }
4849      else
4850        {
4851	  rtx dest = operands[0];
4852	  rtx tmp = gen_reg_rtx (SImode);
4853
4854	  /* We may get a paradoxical subreg here.  Strip it off.  */
4855	  if (GET_CODE (dest) == SUBREG
4856	      && GET_MODE (dest) == SImode
4857	      && GET_MODE (SUBREG_REG (dest)) == HImode)
4858	    dest = SUBREG_REG (dest);
4859
4860	  if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4861	    FAIL;
4862
4863	  base_addr = adjust_address (operands[1], HImode,
4864				      bitpos / BITS_PER_UNIT);
4865	  emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4866	  emit_move_insn (gen_lowpart (SImode, dest), tmp);
4867	}
4868
4869      DONE;
4870    }
4871  else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4872    FAIL;
4873  else if (GET_MODE (operands[0]) == SImode
4874	   && GET_MODE (operands[1]) == SImode)
4875    {
4876      emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4877				 operands[3]));
4878      DONE;
4879    }
4880
4881  FAIL;
4882})
4883
4884; Helper to expand register forms of extv with the proper modes.
4885
4886(define_expand "extv_regsi"
4887  [(set (match_operand:SI 0 "s_register_operand")
4888	(sign_extract:SI (match_operand:SI 1 "s_register_operand")
4889			 (match_operand 2 "const_int_operand")
4890			 (match_operand 3 "const_int_operand")))]
4891  ""
4892{
4893})
4894
4895; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4896
4897(define_insn "unaligned_loaddi"
4898  [(set (match_operand:DI 0 "s_register_operand" "=r")
4899	(unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4900		   UNSPEC_UNALIGNED_LOAD))]
4901  "TARGET_32BIT && TARGET_LDRD"
4902  "*
4903  return output_move_double (operands, true, NULL);
4904  "
4905  [(set_attr "length" "8")
4906   (set_attr "type" "load_8")])
4907
4908(define_insn "unaligned_loadsi"
4909  [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4910	(unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4911		   UNSPEC_UNALIGNED_LOAD))]
4912  "unaligned_access"
4913  "@
4914   ldr\t%0, %1\t@ unaligned
4915   ldr%?\t%0, %1\t@ unaligned
4916   ldr%?\t%0, %1\t@ unaligned"
4917  [(set_attr "arch" "t1,t2,32")
4918   (set_attr "length" "2,2,4")
4919   (set_attr "predicable" "no,yes,yes")
4920   (set_attr "predicable_short_it" "no,yes,no")
4921   (set_attr "type" "load_4")])
4922
4923;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4924;; address (there's no immediate format).  That's tricky to support
4925;; here and we don't really need this pattern for that case, so only
4926;; enable for 32-bit ISAs.
4927(define_insn "unaligned_loadhis"
4928  [(set (match_operand:SI 0 "s_register_operand" "=r")
4929	(sign_extend:SI
4930	  (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4931		     UNSPEC_UNALIGNED_LOAD)))]
4932  "unaligned_access && TARGET_32BIT"
4933  "ldrsh%?\t%0, %1\t@ unaligned"
4934  [(set_attr "predicable" "yes")
4935   (set_attr "type" "load_byte")])
4936
4937(define_insn "unaligned_loadhiu"
4938  [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4939	(zero_extend:SI
4940	  (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4941		     UNSPEC_UNALIGNED_LOAD)))]
4942  "unaligned_access"
4943  "@
4944   ldrh\t%0, %1\t@ unaligned
4945   ldrh%?\t%0, %1\t@ unaligned
4946   ldrh%?\t%0, %1\t@ unaligned"
4947  [(set_attr "arch" "t1,t2,32")
4948   (set_attr "length" "2,2,4")
4949   (set_attr "predicable" "no,yes,yes")
4950   (set_attr "predicable_short_it" "no,yes,no")
4951   (set_attr "type" "load_byte")])
4952
4953(define_insn "unaligned_storedi"
4954  [(set (match_operand:DI 0 "memory_operand" "=m")
4955	(unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4956		   UNSPEC_UNALIGNED_STORE))]
4957  "TARGET_32BIT && TARGET_LDRD"
4958  "*
4959  return output_move_double (operands, true, NULL);
4960  "
4961  [(set_attr "length" "8")
4962   (set_attr "type" "store_8")])
4963
4964(define_insn "unaligned_storesi"
4965  [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4966	(unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4967		   UNSPEC_UNALIGNED_STORE))]
4968  "unaligned_access"
4969  "@
4970   str\t%1, %0\t@ unaligned
4971   str%?\t%1, %0\t@ unaligned
4972   str%?\t%1, %0\t@ unaligned"
4973  [(set_attr "arch" "t1,t2,32")
4974   (set_attr "length" "2,2,4")
4975   (set_attr "predicable" "no,yes,yes")
4976   (set_attr "predicable_short_it" "no,yes,no")
4977   (set_attr "type" "store_4")])
4978
4979(define_insn "unaligned_storehi"
4980  [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4981	(unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4982		   UNSPEC_UNALIGNED_STORE))]
4983  "unaligned_access"
4984  "@
4985   strh\t%1, %0\t@ unaligned
4986   strh%?\t%1, %0\t@ unaligned
4987   strh%?\t%1, %0\t@ unaligned"
4988  [(set_attr "arch" "t1,t2,32")
4989   (set_attr "length" "2,2,4")
4990   (set_attr "predicable" "no,yes,yes")
4991   (set_attr "predicable_short_it" "no,yes,no")
4992   (set_attr "type" "store_4")])
4993
4994
4995(define_insn "*extv_reg"
4996  [(set (match_operand:SI 0 "s_register_operand" "=r")
4997	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4998			  (match_operand:SI 2 "const_int_operand" "n")
4999			  (match_operand:SI 3 "const_int_operand" "n")))]
5000  "arm_arch_thumb2
5001   && IN_RANGE (INTVAL (operands[3]), 0, 31)
5002   && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5003  "sbfx%?\t%0, %1, %3, %2"
5004  [(set_attr "length" "4")
5005   (set_attr "predicable" "yes")
5006   (set_attr "type" "bfm")]
5007)
5008
5009(define_insn "extzv_t2"
5010  [(set (match_operand:SI 0 "s_register_operand" "=r")
5011	(zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
5012			  (match_operand:SI 2 "const_int_operand" "n")
5013			  (match_operand:SI 3 "const_int_operand" "n")))]
5014  "arm_arch_thumb2
5015   && IN_RANGE (INTVAL (operands[3]), 0, 31)
5016   && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
5017  "ubfx%?\t%0, %1, %3, %2"
5018  [(set_attr "length" "4")
5019   (set_attr "predicable" "yes")
5020   (set_attr "type" "bfm")]
5021)
5022
5023
5024;; Division instructions
5025(define_insn "divsi3"
5026  [(set (match_operand:SI	  0 "s_register_operand" "=r,r")
5027	(div:SI (match_operand:SI 1 "s_register_operand"  "r,r")
5028		(match_operand:SI 2 "s_register_operand"  "r,r")))]
5029  "TARGET_IDIV"
5030  "@
5031   sdiv%?\t%0, %1, %2
5032   sdiv\t%0, %1, %2"
5033  [(set_attr "arch" "32,v8mb")
5034   (set_attr "predicable" "yes")
5035   (set_attr "type" "sdiv")]
5036)
5037
5038(define_insn "udivsi3"
5039  [(set (match_operand:SI	   0 "s_register_operand" "=r,r")
5040	(udiv:SI (match_operand:SI 1 "s_register_operand"  "r,r")
5041		 (match_operand:SI 2 "s_register_operand"  "r,r")))]
5042  "TARGET_IDIV"
5043  "@
5044   udiv%?\t%0, %1, %2
5045   udiv\t%0, %1, %2"
5046  [(set_attr "arch" "32,v8mb")
5047   (set_attr "predicable" "yes")
5048   (set_attr "type" "udiv")]
5049)
5050
5051
5052;; Unary arithmetic insns
5053
5054(define_expand "negv<SIDI:mode>3"
5055  [(match_operand:SIDI 0 "s_register_operand")
5056   (match_operand:SIDI 1 "s_register_operand")
5057   (match_operand 2 "")]
5058  "TARGET_32BIT"
5059{
5060  emit_insn (gen_subv<mode>4 (operands[0], const0_rtx, operands[1],
5061			      operands[2]));
5062  DONE;
5063})
5064
5065(define_expand "negsi2"
5066  [(set (match_operand:SI         0 "s_register_operand")
5067	(neg:SI (match_operand:SI 1 "s_register_operand")))]
5068  "TARGET_EITHER"
5069  ""
5070)
5071
5072(define_insn "*arm_negsi2"
5073  [(set (match_operand:SI         0 "s_register_operand" "=l,r")
5074	(neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
5075  "TARGET_32BIT"
5076  "rsb%?\\t%0, %1, #0"
5077  [(set_attr "predicable" "yes")
5078   (set_attr "predicable_short_it" "yes,no")
5079   (set_attr "arch" "t2,*")
5080   (set_attr "length" "4")
5081   (set_attr "type" "alu_imm")]
5082)
5083
5084;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
5085;; rather than (0 cmp reg).  This gives the same results for unsigned
5086;; and equality compares which is what we mostly need here.
5087(define_insn "negsi2_0compare"
5088  [(set (reg:CC_RSB CC_REGNUM)
5089	(compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
5090			(const_int -1)))
5091   (set (match_operand:SI 0 "s_register_operand" "=l,r")
5092	(neg:SI (match_dup 1)))]
5093  "TARGET_32BIT"
5094  "@
5095   negs\\t%0, %1
5096   rsbs\\t%0, %1, #0"
5097  [(set_attr "conds" "set")
5098   (set_attr "arch" "t2,*")
5099   (set_attr "length" "2,*")
5100   (set_attr "type" "alus_imm")]
5101)
5102
5103(define_insn "negsi2_carryin"
5104  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5105	(minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
5106		  (match_operand:SI 2 "arm_borrow_operation" "")))]
5107  "TARGET_32BIT"
5108  "@
5109   rsc\\t%0, %1, #0
5110   sbc\\t%0, %1, %1, lsl #1"
5111  [(set_attr "conds" "use")
5112   (set_attr "arch" "a,t2")
5113   (set_attr "type" "adc_imm,adc_reg")]
5114)
5115
5116(define_expand "negsf2"
5117  [(set (match_operand:SF         0 "s_register_operand")
5118	(neg:SF (match_operand:SF 1 "s_register_operand")))]
5119  "TARGET_32BIT && TARGET_HARD_FLOAT"
5120  ""
5121)
5122
5123(define_expand "negdf2"
5124  [(set (match_operand:DF         0 "s_register_operand")
5125	(neg:DF (match_operand:DF 1 "s_register_operand")))]
5126  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5127  "")
5128
5129;; abssi2 doesn't really clobber the condition codes if a different register
5130;; is being set.  To keep things simple, assume during rtl manipulations that
5131;; it does, but tell the final scan operator the truth.  Similarly for
5132;; (neg (abs...))
5133
5134(define_expand "abssi2"
5135  [(parallel
5136    [(set (match_operand:SI         0 "s_register_operand")
5137	  (abs:SI (match_operand:SI 1 "s_register_operand")))
5138     (clobber (match_dup 2))])]
5139  "TARGET_EITHER"
5140  "
5141  if (TARGET_THUMB1)
5142    operands[2] = gen_rtx_SCRATCH (SImode);
5143  else
5144    operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
5145")
5146
5147(define_insn_and_split "*arm_abssi2"
5148  [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5149	(abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
5150   (clobber (reg:CC CC_REGNUM))]
5151  "TARGET_ARM"
5152  "#"
5153  "&& reload_completed"
5154  [(const_int 0)]
5155  {
5156   /* if (which_alternative == 0) */
5157   if (REGNO(operands[0]) == REGNO(operands[1]))
5158     {
5159      /* Emit the pattern:
5160         cmp\\t%0, #0\;rsblt\\t%0, %0, #0
5161         [(set (reg:CC CC_REGNUM)
5162               (compare:CC (match_dup 0) (const_int 0)))
5163          (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
5164                     (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
5165      */
5166      emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5167                              gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5168      emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5169                                    (gen_rtx_LT (SImode,
5170                                                 gen_rtx_REG (CCmode, CC_REGNUM),
5171                                                 const0_rtx)),
5172                                    (gen_rtx_SET (operands[0],
5173                                                  (gen_rtx_MINUS (SImode,
5174                                                                  const0_rtx,
5175                                                                  operands[1]))))));
5176      DONE;
5177     }
5178   else
5179     {
5180      /* Emit the pattern:
5181         alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
5182         [(set (match_dup 0)
5183               (xor:SI (match_dup 1)
5184                       (ashiftrt:SI (match_dup 1) (const_int 31))))
5185          (set (match_dup 0)
5186               (minus:SI (match_dup 0)
5187                      (ashiftrt:SI (match_dup 1) (const_int 31))))]
5188      */
5189      emit_insn (gen_rtx_SET (operands[0],
5190                              gen_rtx_XOR (SImode,
5191                                           gen_rtx_ASHIFTRT (SImode,
5192                                                             operands[1],
5193                                                             GEN_INT (31)),
5194                                           operands[1])));
5195      emit_insn (gen_rtx_SET (operands[0],
5196                              gen_rtx_MINUS (SImode,
5197                                             operands[0],
5198                                             gen_rtx_ASHIFTRT (SImode,
5199                                                               operands[1],
5200                                                               GEN_INT (31)))));
5201      DONE;
5202     }
5203  }
5204  [(set_attr "conds" "clob,*")
5205   (set_attr "shift" "1")
5206   (set_attr "predicable" "no, yes")
5207   (set_attr "length" "8")
5208   (set_attr "type" "multiple")]
5209)
5210
5211(define_insn_and_split "*arm_neg_abssi2"
5212  [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
5213	(neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
5214   (clobber (reg:CC CC_REGNUM))]
5215  "TARGET_ARM"
5216  "#"
5217  "&& reload_completed"
5218  [(const_int 0)]
5219  {
5220   /* if (which_alternative == 0) */
5221   if (REGNO (operands[0]) == REGNO (operands[1]))
5222     {
5223      /* Emit the pattern:
5224         cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
5225      */
5226      emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
5227                              gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
5228      emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5229                                    gen_rtx_GT (SImode,
5230                                                gen_rtx_REG (CCmode, CC_REGNUM),
5231                                                const0_rtx),
5232                                    gen_rtx_SET (operands[0],
5233                                                 (gen_rtx_MINUS (SImode,
5234                                                                 const0_rtx,
5235                                                                 operands[1])))));
5236     }
5237   else
5238     {
5239      /* Emit the pattern:
5240         eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
5241      */
5242      emit_insn (gen_rtx_SET (operands[0],
5243                              gen_rtx_XOR (SImode,
5244                                           gen_rtx_ASHIFTRT (SImode,
5245                                                             operands[1],
5246                                                             GEN_INT (31)),
5247                                           operands[1])));
5248      emit_insn (gen_rtx_SET (operands[0],
5249                              gen_rtx_MINUS (SImode,
5250                                             gen_rtx_ASHIFTRT (SImode,
5251                                                               operands[1],
5252                                                               GEN_INT (31)),
5253                                             operands[0])));
5254     }
5255   DONE;
5256  }
5257  [(set_attr "conds" "clob,*")
5258   (set_attr "shift" "1")
5259   (set_attr "predicable" "no, yes")
5260   (set_attr "length" "8")
5261   (set_attr "type" "multiple")]
5262)
5263
5264(define_expand "abssf2"
5265  [(set (match_operand:SF         0 "s_register_operand")
5266	(abs:SF (match_operand:SF 1 "s_register_operand")))]
5267  "TARGET_32BIT && TARGET_HARD_FLOAT"
5268  "")
5269
5270(define_expand "absdf2"
5271  [(set (match_operand:DF         0 "s_register_operand")
5272	(abs:DF (match_operand:DF 1 "s_register_operand")))]
5273  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5274  "")
5275
5276(define_expand "sqrtsf2"
5277  [(set (match_operand:SF 0 "s_register_operand")
5278	(sqrt:SF (match_operand:SF 1 "s_register_operand")))]
5279  "TARGET_32BIT && TARGET_HARD_FLOAT"
5280  "")
5281
5282(define_expand "sqrtdf2"
5283  [(set (match_operand:DF 0 "s_register_operand")
5284	(sqrt:DF (match_operand:DF 1 "s_register_operand")))]
5285  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
5286  "")
5287
5288(define_expand "one_cmplsi2"
5289  [(set (match_operand:SI         0 "s_register_operand")
5290	(not:SI (match_operand:SI 1 "s_register_operand")))]
5291  "TARGET_EITHER"
5292  ""
5293)
5294
5295(define_insn "*arm_one_cmplsi2"
5296  [(set (match_operand:SI         0 "s_register_operand" "=l,r")
5297	(not:SI (match_operand:SI 1 "s_register_operand"  "l,r")))]
5298  "TARGET_32BIT"
5299  "mvn%?\\t%0, %1"
5300  [(set_attr "predicable" "yes")
5301   (set_attr "predicable_short_it" "yes,no")
5302   (set_attr "arch" "t2,*")
5303   (set_attr "length" "4")
5304   (set_attr "type" "mvn_reg")]
5305)
5306
5307(define_insn "*notsi_compare0"
5308  [(set (reg:CC_NZ CC_REGNUM)
5309	(compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5310			 (const_int 0)))
5311   (set (match_operand:SI 0 "s_register_operand" "=r")
5312	(not:SI (match_dup 1)))]
5313  "TARGET_32BIT"
5314  "mvns%?\\t%0, %1"
5315  [(set_attr "conds" "set")
5316   (set_attr "type" "mvn_reg")]
5317)
5318
5319(define_insn "*notsi_compare0_scratch"
5320  [(set (reg:CC_NZ CC_REGNUM)
5321	(compare:CC_NZ (not:SI (match_operand:SI 1 "s_register_operand" "r"))
5322			 (const_int 0)))
5323   (clobber (match_scratch:SI 0 "=r"))]
5324  "TARGET_32BIT"
5325  "mvns%?\\t%0, %1"
5326  [(set_attr "conds" "set")
5327   (set_attr "type" "mvn_reg")]
5328)
5329
5330;; Fixed <--> Floating conversion insns
5331
5332(define_expand "floatsihf2"
5333  [(set (match_operand:HF           0 "general_operand")
5334	(float:HF (match_operand:SI 1 "general_operand")))]
5335  "TARGET_EITHER"
5336  "
5337  {
5338    rtx op1 = gen_reg_rtx (SFmode);
5339    expand_float (op1, operands[1], 0);
5340    op1 = convert_to_mode (HFmode, op1, 0);
5341    emit_move_insn (operands[0], op1);
5342    DONE;
5343  }"
5344)
5345
5346(define_expand "floatdihf2"
5347  [(set (match_operand:HF           0 "general_operand")
5348	(float:HF (match_operand:DI 1 "general_operand")))]
5349  "TARGET_EITHER"
5350  "
5351  {
5352    rtx op1 = gen_reg_rtx (SFmode);
5353    expand_float (op1, operands[1], 0);
5354    op1 = convert_to_mode (HFmode, op1, 0);
5355    emit_move_insn (operands[0], op1);
5356    DONE;
5357  }"
5358)
5359
5360(define_expand "floatsisf2"
5361  [(set (match_operand:SF           0 "s_register_operand")
5362	(float:SF (match_operand:SI 1 "s_register_operand")))]
5363  "TARGET_32BIT && TARGET_HARD_FLOAT"
5364  "
5365")
5366
5367(define_expand "floatsidf2"
5368  [(set (match_operand:DF           0 "s_register_operand")
5369	(float:DF (match_operand:SI 1 "s_register_operand")))]
5370  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5371  "
5372")
5373
5374(define_expand "fix_trunchfsi2"
5375  [(set (match_operand:SI         0 "general_operand")
5376	(fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
5377  "TARGET_EITHER"
5378  "
5379  {
5380    rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5381    expand_fix (operands[0], op1, 0);
5382    DONE;
5383  }"
5384)
5385
5386(define_expand "fix_trunchfdi2"
5387  [(set (match_operand:DI         0 "general_operand")
5388	(fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
5389  "TARGET_EITHER"
5390  "
5391  {
5392    rtx op1 = convert_to_mode (SFmode, operands[1], 0);
5393    expand_fix (operands[0], op1, 0);
5394    DONE;
5395  }"
5396)
5397
5398(define_expand "fix_truncsfsi2"
5399  [(set (match_operand:SI         0 "s_register_operand")
5400	(fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
5401  "TARGET_32BIT && TARGET_HARD_FLOAT"
5402  "
5403")
5404
5405(define_expand "fix_truncdfsi2"
5406  [(set (match_operand:SI         0 "s_register_operand")
5407	(fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
5408  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5409  "
5410")
5411
5412;; Truncation insns
5413
5414(define_expand "truncdfsf2"
5415  [(set (match_operand:SF  0 "s_register_operand")
5416	(float_truncate:SF
5417	 (match_operand:DF 1 "s_register_operand")))]
5418  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5419  ""
5420)
5421
5422;; DFmode to HFmode conversions on targets without a single-step hardware
5423;; instruction for it would have to go through SFmode.  This is dangerous
5424;; as it introduces double rounding.
5425;;
5426;; Disable this pattern unless we are in an unsafe math mode, or we have
5427;; a single-step instruction.
5428
5429(define_expand "truncdfhf2"
5430  [(set (match_operand:HF  0 "s_register_operand")
5431	(float_truncate:HF
5432	 (match_operand:DF 1 "s_register_operand")))]
5433  "(TARGET_EITHER && flag_unsafe_math_optimizations)
5434   || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
5435{
5436  /* We don't have a direct instruction for this, so we must be in
5437     an unsafe math mode, and going via SFmode.  */
5438
5439  if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5440    {
5441      rtx op1;
5442      op1 = convert_to_mode (SFmode, operands[1], 0);
5443      op1 = convert_to_mode (HFmode, op1, 0);
5444      emit_move_insn (operands[0], op1);
5445      DONE;
5446    }
5447  /* Otherwise, we will pick this up as a single instruction with
5448     no intermediary rounding.  */
5449}
5450)
5451
5452;; Zero and sign extension instructions.
5453
5454(define_expand "zero_extend<mode>di2"
5455  [(set (match_operand:DI 0 "s_register_operand" "")
5456	(zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
5457  "TARGET_32BIT <qhs_zextenddi_cond>"
5458  {
5459    rtx res_lo, res_hi, op0_lo, op0_hi;
5460    res_lo = gen_lowpart (SImode, operands[0]);
5461    res_hi = gen_highpart (SImode, operands[0]);
5462    if (can_create_pseudo_p ())
5463      {
5464	op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5465	op0_hi = gen_reg_rtx (SImode);
5466      }
5467    else
5468      {
5469	op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5470	op0_hi = res_hi;
5471      }
5472    if (<MODE>mode != SImode)
5473      emit_insn (gen_rtx_SET (op0_lo,
5474			      gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5475    emit_insn (gen_movsi (op0_hi, const0_rtx));
5476    if (res_lo != op0_lo)
5477      emit_move_insn (res_lo, op0_lo);
5478    if (res_hi != op0_hi)
5479      emit_move_insn (res_hi, op0_hi);
5480    DONE;
5481  }
5482)
5483
5484(define_expand "extend<mode>di2"
5485  [(set (match_operand:DI 0 "s_register_operand" "")
5486	(sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5487  "TARGET_32BIT <qhs_sextenddi_cond>"
5488  {
5489    rtx res_lo, res_hi, op0_lo, op0_hi;
5490    res_lo = gen_lowpart (SImode, operands[0]);
5491    res_hi = gen_highpart (SImode, operands[0]);
5492    if (can_create_pseudo_p ())
5493      {
5494	op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5495	op0_hi = gen_reg_rtx (SImode);
5496      }
5497    else
5498      {
5499	op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5500	op0_hi = res_hi;
5501      }
5502    if (<MODE>mode != SImode)
5503      emit_insn (gen_rtx_SET (op0_lo,
5504			      gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5505    emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5506    if (res_lo != op0_lo)
5507      emit_move_insn (res_lo, op0_lo);
5508    if (res_hi != op0_hi)
5509      emit_move_insn (res_hi, op0_hi);
5510    DONE;
5511  }
5512)
5513
5514;; Splits for all extensions to DImode
5515(define_split
5516  [(set (match_operand:DI 0 "s_register_operand" "")
5517        (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5518  "TARGET_32BIT"
5519  [(set (match_dup 0) (match_dup 1))]
5520{
5521  rtx lo_part = gen_lowpart (SImode, operands[0]);
5522  machine_mode src_mode = GET_MODE (operands[1]);
5523
5524  if (src_mode == SImode)
5525    emit_move_insn (lo_part, operands[1]);
5526  else
5527    emit_insn (gen_rtx_SET (lo_part,
5528			    gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5529  operands[0] = gen_highpart (SImode, operands[0]);
5530  operands[1] = const0_rtx;
5531})
5532
5533(define_split
5534  [(set (match_operand:DI 0 "s_register_operand" "")
5535        (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5536  "TARGET_32BIT"
5537  [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5538{
5539  rtx lo_part = gen_lowpart (SImode, operands[0]);
5540  machine_mode src_mode = GET_MODE (operands[1]);
5541
5542  if (src_mode == SImode)
5543    emit_move_insn (lo_part, operands[1]);
5544  else
5545    emit_insn (gen_rtx_SET (lo_part,
5546			    gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5547  operands[1] = lo_part;
5548  operands[0] = gen_highpart (SImode, operands[0]);
5549})
5550
5551(define_expand "zero_extendhisi2"
5552  [(set (match_operand:SI 0 "s_register_operand")
5553	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5554  "TARGET_EITHER"
5555{
5556  if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5557    {
5558      emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5559      DONE;
5560    }
5561  if (!arm_arch6 && !MEM_P (operands[1]))
5562    {
5563      rtx t = gen_lowpart (SImode, operands[1]);
5564      rtx tmp = gen_reg_rtx (SImode);
5565      emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5566      emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5567      DONE;
5568    }
5569})
5570
5571(define_split
5572  [(set (match_operand:SI 0 "s_register_operand" "")
5573	(zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5574  "!TARGET_THUMB2 && !arm_arch6"
5575  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5576   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5577{
5578  operands[2] = gen_lowpart (SImode, operands[1]);
5579})
5580
5581(define_insn "*arm_zero_extendhisi2"
5582  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5583	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5584  "TARGET_ARM && arm_arch4 && !arm_arch6"
5585  "@
5586   #
5587   ldrh%?\\t%0, %1"
5588  [(set_attr "type" "alu_shift_reg,load_byte")
5589   (set_attr "predicable" "yes")]
5590)
5591
5592(define_insn "*arm_zero_extendhisi2_v6"
5593  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5594	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5595  "TARGET_ARM && arm_arch6"
5596  "@
5597   uxth%?\\t%0, %1
5598   ldrh%?\\t%0, %1"
5599  [(set_attr "predicable" "yes")
5600   (set_attr "type" "extend,load_byte")]
5601)
5602
5603(define_insn "*arm_zero_extendhisi2addsi"
5604  [(set (match_operand:SI 0 "s_register_operand" "=r")
5605	(plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5606		 (match_operand:SI 2 "s_register_operand" "r")))]
5607  "TARGET_INT_SIMD"
5608  "uxtah%?\\t%0, %2, %1"
5609  [(set_attr "type" "alu_shift_reg")
5610   (set_attr "predicable" "yes")]
5611)
5612
5613(define_expand "zero_extendqisi2"
5614  [(set (match_operand:SI 0 "s_register_operand")
5615	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5616  "TARGET_EITHER"
5617{
5618  if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5619    {
5620      emit_insn (gen_andsi3 (operands[0],
5621			     gen_lowpart (SImode, operands[1]),
5622					  GEN_INT (255)));
5623      DONE;
5624    }
5625  if (!arm_arch6 && !MEM_P (operands[1]))
5626    {
5627      rtx t = gen_lowpart (SImode, operands[1]);
5628      rtx tmp = gen_reg_rtx (SImode);
5629      emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5630      emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5631      DONE;
5632    }
5633})
5634
5635(define_split
5636  [(set (match_operand:SI 0 "s_register_operand" "")
5637	(zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5638  "!arm_arch6"
5639  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5640   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5641{
5642  operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5643  if (TARGET_ARM)
5644    {
5645      emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5646      DONE;
5647    }
5648})
5649
5650(define_insn "*arm_zero_extendqisi2"
5651  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5652	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5653  "TARGET_ARM && !arm_arch6"
5654  "@
5655   #
5656   ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5657  [(set_attr "length" "8,4")
5658   (set_attr "type" "alu_shift_reg,load_byte")
5659   (set_attr "predicable" "yes")]
5660)
5661
5662(define_insn "*arm_zero_extendqisi2_v6"
5663  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5664	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5665  "TARGET_ARM && arm_arch6"
5666  "@
5667   uxtb%?\\t%0, %1
5668   ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5669  [(set_attr "type" "extend,load_byte")
5670   (set_attr "predicable" "yes")]
5671)
5672
5673(define_insn "*arm_zero_extendqisi2addsi"
5674  [(set (match_operand:SI 0 "s_register_operand" "=r")
5675	(plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5676		 (match_operand:SI 2 "s_register_operand" "r")))]
5677  "TARGET_INT_SIMD"
5678  "uxtab%?\\t%0, %2, %1"
5679  [(set_attr "predicable" "yes")
5680   (set_attr "type" "alu_shift_reg")]
5681)
5682
5683(define_split
5684  [(set (match_operand:SI 0 "s_register_operand" "")
5685	(zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5686   (clobber (match_operand:SI 2 "s_register_operand" ""))]
5687  "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5688  [(set (match_dup 2) (match_dup 1))
5689   (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5690  ""
5691)
5692
5693(define_split
5694  [(set (match_operand:SI 0 "s_register_operand" "")
5695	(zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5696   (clobber (match_operand:SI 2 "s_register_operand" ""))]
5697  "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5698  [(set (match_dup 2) (match_dup 1))
5699   (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5700  ""
5701)
5702
5703
5704(define_split
5705  [(set (match_operand:SI 0 "s_register_operand" "")
5706	(IOR_XOR:SI (and:SI (ashift:SI
5707			     (match_operand:SI 1 "s_register_operand" "")
5708			     (match_operand:SI 2 "const_int_operand" ""))
5709			    (match_operand:SI 3 "const_int_operand" ""))
5710		    (zero_extend:SI
5711		     (match_operator 5 "subreg_lowpart_operator"
5712		      [(match_operand:SI 4 "s_register_operand" "")]))))]
5713  "TARGET_32BIT
5714   && (UINTVAL (operands[3])
5715       == (GET_MODE_MASK (GET_MODE (operands[5]))
5716           & (GET_MODE_MASK (GET_MODE (operands[5]))
5717	      << (INTVAL (operands[2])))))"
5718  [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5719				  (match_dup 4)))
5720   (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5721  "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5722)
5723
5724(define_insn "*compareqi_eq0"
5725  [(set (reg:CC_Z CC_REGNUM)
5726	(compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5727			 (const_int 0)))]
5728  "TARGET_32BIT"
5729  "tst%?\\t%0, #255"
5730  [(set_attr "conds" "set")
5731   (set_attr "predicable" "yes")
5732   (set_attr "type" "logic_imm")]
5733)
5734
5735(define_expand "extendhisi2"
5736  [(set (match_operand:SI 0 "s_register_operand")
5737	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5738  "TARGET_EITHER"
5739{
5740  if (TARGET_THUMB1)
5741    {
5742      emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5743      DONE;
5744    }
5745  if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5746    {
5747      emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5748      DONE;
5749    }
5750
5751  if (!arm_arch6 && !MEM_P (operands[1]))
5752    {
5753      rtx t = gen_lowpart (SImode, operands[1]);
5754      rtx tmp = gen_reg_rtx (SImode);
5755      emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5756      emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5757      DONE;
5758    }
5759})
5760
5761(define_split
5762  [(parallel
5763    [(set (match_operand:SI 0 "register_operand" "")
5764	  (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5765     (clobber (match_scratch:SI 2 ""))])]
5766  "!arm_arch6"
5767  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5768   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5769{
5770  operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5771})
5772
5773;; This pattern will only be used when ldsh is not available
5774(define_expand "extendhisi2_mem"
5775  [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5776   (set (match_dup 3)
5777	(zero_extend:SI (match_dup 7)))
5778   (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5779   (set (match_operand:SI 0 "" "")
5780	(ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5781  "TARGET_ARM"
5782  "
5783  {
5784    rtx mem1, mem2;
5785    rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5786
5787    mem1 = change_address (operands[1], QImode, addr);
5788    mem2 = change_address (operands[1], QImode,
5789			   plus_constant (Pmode, addr, 1));
5790    operands[0] = gen_lowpart (SImode, operands[0]);
5791    operands[1] = mem1;
5792    operands[2] = gen_reg_rtx (SImode);
5793    operands[3] = gen_reg_rtx (SImode);
5794    operands[6] = gen_reg_rtx (SImode);
5795    operands[7] = mem2;
5796
5797    if (BYTES_BIG_ENDIAN)
5798      {
5799	operands[4] = operands[2];
5800	operands[5] = operands[3];
5801      }
5802    else
5803      {
5804	operands[4] = operands[3];
5805	operands[5] = operands[2];
5806      }
5807  }"
5808)
5809
5810(define_split
5811  [(set (match_operand:SI 0 "register_operand" "")
5812	(sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5813  "!arm_arch6"
5814  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5815   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5816{
5817  operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5818})
5819
5820(define_insn "*arm_extendhisi2"
5821  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5822	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5823  "TARGET_ARM && arm_arch4 && !arm_arch6"
5824  "@
5825   #
5826   ldrsh%?\\t%0, %1"
5827  [(set_attr "length" "8,4")
5828   (set_attr "type" "alu_shift_reg,load_byte")
5829   (set_attr "predicable" "yes")]
5830)
5831
5832;; ??? Check Thumb-2 pool range
5833(define_insn "*arm_extendhisi2_v6"
5834  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5835	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5836  "TARGET_32BIT && arm_arch6"
5837  "@
5838   sxth%?\\t%0, %1
5839   ldrsh%?\\t%0, %1"
5840  [(set_attr "type" "extend,load_byte")
5841   (set_attr "predicable" "yes")]
5842)
5843
5844(define_insn "*arm_extendhisi2addsi"
5845  [(set (match_operand:SI 0 "s_register_operand" "=r")
5846	(plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5847		 (match_operand:SI 2 "s_register_operand" "r")))]
5848  "TARGET_INT_SIMD"
5849  "sxtah%?\\t%0, %2, %1"
5850  [(set_attr "type" "alu_shift_reg")]
5851)
5852
5853(define_expand "extendqihi2"
5854  [(set (match_dup 2)
5855	(ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5856		   (const_int 24)))
5857   (set (match_operand:HI 0 "s_register_operand")
5858	(ashiftrt:SI (match_dup 2)
5859		     (const_int 24)))]
5860  "TARGET_ARM"
5861  "
5862  {
5863    if (arm_arch4 && MEM_P (operands[1]))
5864      {
5865	emit_insn (gen_rtx_SET (operands[0],
5866				gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5867	DONE;
5868      }
5869    if (!s_register_operand (operands[1], QImode))
5870      operands[1] = copy_to_mode_reg (QImode, operands[1]);
5871    operands[0] = gen_lowpart (SImode, operands[0]);
5872    operands[1] = gen_lowpart (SImode, operands[1]);
5873    operands[2] = gen_reg_rtx (SImode);
5874  }"
5875)
5876
5877(define_insn "*arm_extendqihi_insn"
5878  [(set (match_operand:HI 0 "s_register_operand" "=r")
5879	(sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5880  "TARGET_ARM && arm_arch4"
5881  "ldrsb%?\\t%0, %1"
5882  [(set_attr "type" "load_byte")
5883   (set_attr "predicable" "yes")]
5884)
5885
5886(define_expand "extendqisi2"
5887  [(set (match_operand:SI 0 "s_register_operand")
5888	(sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5889  "TARGET_EITHER"
5890{
5891  if (!arm_arch4 && MEM_P (operands[1]))
5892    operands[1] = copy_to_mode_reg (QImode, operands[1]);
5893
5894  if (!arm_arch6 && !MEM_P (operands[1]))
5895    {
5896      rtx t = gen_lowpart (SImode, operands[1]);
5897      rtx tmp = gen_reg_rtx (SImode);
5898      emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5899      emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5900      DONE;
5901    }
5902})
5903
5904(define_split
5905  [(set (match_operand:SI 0 "register_operand" "")
5906	(sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5907  "!arm_arch6"
5908  [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5909   (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5910{
5911  operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5912})
5913
5914(define_insn "*arm_extendqisi"
5915  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5916	(sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5917  "TARGET_ARM && arm_arch4 && !arm_arch6"
5918  "@
5919   #
5920   ldrsb%?\\t%0, %1"
5921  [(set_attr "length" "8,4")
5922   (set_attr "type" "alu_shift_reg,load_byte")
5923   (set_attr "predicable" "yes")]
5924)
5925
5926(define_insn "*arm_extendqisi_v6"
5927  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5928	(sign_extend:SI
5929	 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5930  "TARGET_ARM && arm_arch6"
5931  "@
5932   sxtb%?\\t%0, %1
5933   ldrsb%?\\t%0, %1"
5934  [(set_attr "type" "extend,load_byte")
5935   (set_attr "predicable" "yes")]
5936)
5937
5938(define_insn "*arm_extendqisi2addsi"
5939  [(set (match_operand:SI 0 "s_register_operand" "=r")
5940	(plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5941		 (match_operand:SI 2 "s_register_operand" "r")))]
5942  "TARGET_INT_SIMD"
5943  "sxtab%?\\t%0, %2, %1"
5944  [(set_attr "type" "alu_shift_reg")
5945   (set_attr "predicable" "yes")]
5946)
5947
5948(define_insn "arm_<sup>xtb16"
5949  [(set (match_operand:SI 0 "s_register_operand" "=r")
5950	(unspec:SI
5951	  [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5952  "TARGET_INT_SIMD"
5953  "<sup>xtb16%?\\t%0, %1"
5954  [(set_attr "predicable" "yes")
5955   (set_attr "type" "alu_dsp_reg")])
5956
5957(define_insn "arm_<simd32_op>"
5958  [(set (match_operand:SI 0 "s_register_operand" "=r")
5959	(unspec:SI
5960	  [(match_operand:SI 1 "s_register_operand" "r")
5961	   (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5962  "TARGET_INT_SIMD"
5963  "<simd32_op>%?\\t%0, %1, %2"
5964  [(set_attr "predicable" "yes")
5965   (set_attr "type" "alu_dsp_reg")])
5966
5967(define_insn "arm_usada8"
5968  [(set (match_operand:SI 0 "s_register_operand" "=r")
5969	(unspec:SI
5970	  [(match_operand:SI 1 "s_register_operand" "r")
5971	  (match_operand:SI 2 "s_register_operand" "r")
5972	  (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5973  "TARGET_INT_SIMD"
5974  "usada8%?\\t%0, %1, %2, %3"
5975  [(set_attr "predicable" "yes")
5976   (set_attr "type" "alu_dsp_reg")])
5977
5978(define_insn "arm_<simd32_op>"
5979  [(set (match_operand:DI 0 "s_register_operand" "=r")
5980	(unspec:DI
5981	  [(match_operand:SI 1 "s_register_operand" "r")
5982	   (match_operand:SI 2 "s_register_operand" "r")
5983	   (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5984  "TARGET_INT_SIMD"
5985  "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5986  [(set_attr "predicable" "yes")
5987   (set_attr "type" "smlald")])
5988
5989(define_insn "arm_<simd32_op>"
5990  [(set (match_operand:SI 0 "s_register_operand" "=r")
5991	(unspec:SI
5992	  [(match_operand:SI 1 "s_register_operand" "r")
5993	   (match_operand:SI 2 "s_register_operand" "r")] SIMD32_GE))
5994   (set (reg:CC APSRGE_REGNUM)
5995	(unspec:CC [(reg:CC APSRGE_REGNUM)] UNSPEC_GE_SET))]
5996  "TARGET_INT_SIMD"
5997  "<simd32_op>%?\\t%0, %1, %2"
5998  [(set_attr "predicable" "yes")
5999   (set_attr "type" "alu_sreg")])
6000
6001(define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6002  [(set (match_operand:SI 0 "s_register_operand" "=r")
6003	(unspec:SI
6004	  [(match_operand:SI 1 "s_register_operand" "r")
6005	   (match_operand:SI 2 "s_register_operand" "r")
6006	   (match_operand:SI 3 "s_register_operand" "r")] SIMD32_TERNOP_Q))]
6007  "TARGET_INT_SIMD && <add_clobber_q_pred>"
6008  "<simd32_op>%?\\t%0, %1, %2, %3"
6009  [(set_attr "predicable" "yes")
6010   (set_attr "type" "alu_sreg")])
6011
6012(define_expand "arm_<simd32_op>"
6013  [(set (match_operand:SI 0 "s_register_operand")
6014	(unspec:SI
6015	  [(match_operand:SI 1 "s_register_operand")
6016	   (match_operand:SI 2 "s_register_operand")
6017	   (match_operand:SI 3 "s_register_operand")] SIMD32_TERNOP_Q))]
6018  "TARGET_INT_SIMD"
6019  {
6020    if (ARM_Q_BIT_READ)
6021      emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6022						operands[2], operands[3]));
6023    else
6024      emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6025					   operands[2], operands[3]));
6026    DONE;
6027  }
6028)
6029
6030(define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6031  [(set (match_operand:SI 0 "s_register_operand" "=r")
6032	(unspec:SI
6033	  [(match_operand:SI 1 "s_register_operand" "r")
6034	   (match_operand:SI 2 "s_register_operand" "r")] SIMD32_BINOP_Q))]
6035  "TARGET_INT_SIMD && <add_clobber_q_pred>"
6036  "<simd32_op>%?\\t%0, %1, %2"
6037  [(set_attr "predicable" "yes")
6038   (set_attr "type" "alu_sreg")])
6039
6040(define_expand "arm_<simd32_op>"
6041  [(set (match_operand:SI 0 "s_register_operand")
6042	(unspec:SI
6043	  [(match_operand:SI 1 "s_register_operand")
6044	   (match_operand:SI 2 "s_register_operand")] SIMD32_BINOP_Q))]
6045  "TARGET_INT_SIMD"
6046  {
6047    if (ARM_Q_BIT_READ)
6048      emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6049						operands[2]));
6050    else
6051      emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6052					   operands[2]));
6053    DONE;
6054  }
6055)
6056
6057(define_insn "arm_<simd32_op><add_clobber_q_name>_insn"
6058  [(set (match_operand:SI 0 "s_register_operand" "=r")
6059	(unspec:SI
6060	  [(match_operand:SI 1 "s_register_operand" "r")
6061	   (match_operand:SI 2 "<sup>sat16_imm" "i")] USSAT16))]
6062  "TARGET_INT_SIMD && <add_clobber_q_pred>"
6063  "<simd32_op>%?\\t%0, %2, %1"
6064  [(set_attr "predicable" "yes")
6065   (set_attr "type" "alu_sreg")])
6066
6067(define_expand "arm_<simd32_op>"
6068  [(set (match_operand:SI 0 "s_register_operand")
6069	(unspec:SI
6070	  [(match_operand:SI 1 "s_register_operand")
6071	   (match_operand:SI 2 "<sup>sat16_imm")] USSAT16))]
6072  "TARGET_INT_SIMD"
6073  {
6074    if (ARM_Q_BIT_READ)
6075      emit_insn (gen_arm_<simd32_op>_setq_insn (operands[0], operands[1],
6076						operands[2]));
6077    else
6078      emit_insn (gen_arm_<simd32_op>_insn (operands[0], operands[1],
6079					   operands[2]));
6080    DONE;
6081  }
6082)
6083
6084(define_insn "arm_sel"
6085  [(set (match_operand:SI 0 "s_register_operand" "=r")
6086	(unspec:SI
6087	  [(match_operand:SI 1 "s_register_operand" "r")
6088	   (match_operand:SI 2 "s_register_operand" "r")
6089	   (reg:CC APSRGE_REGNUM)] UNSPEC_SEL))]
6090  "TARGET_INT_SIMD"
6091  "sel%?\\t%0, %1, %2"
6092  [(set_attr "predicable" "yes")
6093   (set_attr "type" "alu_sreg")])
6094
6095(define_expand "extendsfdf2"
6096  [(set (match_operand:DF                  0 "s_register_operand")
6097	(float_extend:DF (match_operand:SF 1 "s_register_operand")))]
6098  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
6099  ""
6100)
6101
6102;; HFmode -> DFmode conversions where we don't have an instruction for it
6103;; must go through SFmode.
6104;;
6105;; This is always safe for an extend.
6106
6107(define_expand "extendhfdf2"
6108  [(set (match_operand:DF		   0 "s_register_operand")
6109	(float_extend:DF (match_operand:HF 1 "s_register_operand")))]
6110  "TARGET_EITHER"
6111{
6112  /* We don't have a direct instruction for this, so go via SFmode.  */
6113  if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
6114    {
6115      rtx op1;
6116      op1 = convert_to_mode (SFmode, operands[1], 0);
6117      op1 = convert_to_mode (DFmode, op1, 0);
6118      emit_insn (gen_movdf (operands[0], op1));
6119      DONE;
6120    }
6121  /* Otherwise, we're done producing RTL and will pick up the correct
6122     pattern to do this with one rounding-step in a single instruction.  */
6123}
6124)
6125
6126;; Move insns (including loads and stores)
6127
6128;; XXX Just some ideas about movti.
6129;; I don't think these are a good idea on the arm, there just aren't enough
6130;; registers
6131;;(define_expand "loadti"
6132;;  [(set (match_operand:TI 0 "s_register_operand")
6133;;	(mem:TI (match_operand:SI 1 "address_operand")))]
6134;;  "" "")
6135
6136;;(define_expand "storeti"
6137;;  [(set (mem:TI (match_operand:TI 0 "address_operand"))
6138;;	(match_operand:TI 1 "s_register_operand"))]
6139;;  "" "")
6140
6141;;(define_expand "movti"
6142;;  [(set (match_operand:TI 0 "general_operand")
6143;;	(match_operand:TI 1 "general_operand"))]
6144;;  ""
6145;;  "
6146;;{
6147;;  rtx insn;
6148;;
6149;;  if (MEM_P (operands[0]) && MEM_P (operands[1]))
6150;;    operands[1] = copy_to_reg (operands[1]);
6151;;  if (MEM_P (operands[0]))
6152;;    insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
6153;;  else if (MEM_P (operands[1]))
6154;;    insn = gen_loadti (operands[0], XEXP (operands[1], 0));
6155;;  else
6156;;    FAIL;
6157;;
6158;;  emit_insn (insn);
6159;;  DONE;
6160;;}")
6161
6162;; Recognize garbage generated above.
6163
6164;;(define_insn ""
6165;;  [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
6166;;	(match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
6167;;  ""
6168;;  "*
6169;;  {
6170;;    register mem = (which_alternative < 3);
6171;;    register const char *template;
6172;;
6173;;    operands[mem] = XEXP (operands[mem], 0);
6174;;    switch (which_alternative)
6175;;      {
6176;;      case 0: template = \"ldmdb\\t%1!, %M0\"; break;
6177;;      case 1: template = \"ldmia\\t%1!, %M0\"; break;
6178;;      case 2: template = \"ldmia\\t%1, %M0\"; break;
6179;;      case 3: template = \"stmdb\\t%0!, %M1\"; break;
6180;;      case 4: template = \"stmia\\t%0!, %M1\"; break;
6181;;      case 5: template = \"stmia\\t%0, %M1\"; break;
6182;;      }
6183;;    output_asm_insn (template, operands);
6184;;    return \"\";
6185;;  }")
6186
6187(define_expand "movdi"
6188  [(set (match_operand:DI 0 "general_operand")
6189	(match_operand:DI 1 "general_operand"))]
6190  "TARGET_EITHER"
6191  "
6192  gcc_checking_assert (aligned_operand (operands[0], DImode));
6193  gcc_checking_assert (aligned_operand (operands[1], DImode));
6194  if (can_create_pseudo_p ())
6195    {
6196      if (!REG_P (operands[0]))
6197	operands[1] = force_reg (DImode, operands[1]);
6198    }
6199  if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
6200      && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
6201    {
6202      /* Avoid LDRD's into an odd-numbered register pair in ARM state
6203	 when expanding function calls.  */
6204      gcc_assert (can_create_pseudo_p ());
6205      if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
6206	{
6207	  /* Perform load into legal reg pair first, then move.  */
6208	  rtx reg = gen_reg_rtx (DImode);
6209	  emit_insn (gen_movdi (reg, operands[1]));
6210	  operands[1] = reg;
6211	}
6212      emit_move_insn (gen_lowpart (SImode, operands[0]),
6213		      gen_lowpart (SImode, operands[1]));
6214      emit_move_insn (gen_highpart (SImode, operands[0]),
6215		      gen_highpart (SImode, operands[1]));
6216      DONE;
6217    }
6218  else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
6219	   && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
6220    {
6221      /* Avoid STRD's from an odd-numbered register pair in ARM state
6222	 when expanding function prologue.  */
6223      gcc_assert (can_create_pseudo_p ());
6224      rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
6225		       ? gen_reg_rtx (DImode)
6226		       : operands[0];
6227      emit_move_insn (gen_lowpart (SImode, split_dest),
6228		      gen_lowpart (SImode, operands[1]));
6229      emit_move_insn (gen_highpart (SImode, split_dest),
6230		      gen_highpart (SImode, operands[1]));
6231      if (split_dest != operands[0])
6232	emit_insn (gen_movdi (operands[0], split_dest));
6233      DONE;
6234    }
6235  "
6236)
6237
6238(define_insn "*arm_movdi"
6239  [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
6240	(match_operand:DI 1 "di_operand"              "rDa,Db,Dc,mi,r"))]
6241  "TARGET_32BIT
6242   && !(TARGET_HARD_FLOAT)
6243   && !(TARGET_HAVE_MVE || TARGET_HAVE_MVE_FLOAT)
6244   && !TARGET_IWMMXT
6245   && (   register_operand (operands[0], DImode)
6246       || register_operand (operands[1], DImode))"
6247  "*
6248  switch (which_alternative)
6249    {
6250    case 0:
6251    case 1:
6252    case 2:
6253      return \"#\";
6254    case 3:
6255      /* Cannot load it directly, split to load it via MOV / MOVT.  */
6256      if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6257	return \"#\";
6258      /* Fall through.  */
6259    default:
6260      return output_move_double (operands, true, NULL);
6261    }
6262  "
6263  [(set_attr "length" "8,12,16,8,8")
6264   (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6265   (set_attr "arm_pool_range" "*,*,*,1020,*")
6266   (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6267   (set_attr "thumb2_pool_range" "*,*,*,4094,*")
6268   (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6269)
6270
6271(define_split
6272  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6273	(match_operand:ANY64 1 "immediate_operand" ""))]
6274  "TARGET_32BIT
6275   && reload_completed
6276   && (arm_disable_literal_pool
6277       || (arm_const_double_inline_cost (operands[1])
6278	   <= arm_max_const_double_inline_cost ()))"
6279  [(const_int 0)]
6280  "
6281  arm_split_constant (SET, SImode, curr_insn,
6282		      INTVAL (gen_lowpart (SImode, operands[1])),
6283		      gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
6284  arm_split_constant (SET, SImode, curr_insn,
6285		      INTVAL (gen_highpart_mode (SImode,
6286						 GET_MODE (operands[0]),
6287						 operands[1])),
6288		      gen_highpart (SImode, operands[0]), NULL_RTX, 0);
6289  DONE;
6290  "
6291)
6292
6293; If optimizing for size, or if we have load delay slots, then
6294; we want to split the constant into two separate operations.
6295; In both cases this may split a trivial part into a single data op
6296; leaving a single complex constant to load.  We can also get longer
6297; offsets in a LDR which means we get better chances of sharing the pool
6298; entries.  Finally, we can normally do a better job of scheduling
6299; LDR instructions than we can with LDM.
6300; This pattern will only match if the one above did not.
6301(define_split
6302  [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
6303	(match_operand:ANY64 1 "const_double_operand" ""))]
6304  "TARGET_ARM && reload_completed
6305   && arm_const_double_by_parts (operands[1])"
6306  [(set (match_dup 0) (match_dup 1))
6307   (set (match_dup 2) (match_dup 3))]
6308  "
6309  operands[2] = gen_highpart (SImode, operands[0]);
6310  operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
6311				   operands[1]);
6312  operands[0] = gen_lowpart (SImode, operands[0]);
6313  operands[1] = gen_lowpart (SImode, operands[1]);
6314  "
6315)
6316
6317(define_split
6318  [(set (match_operand:ANY64_BF 0 "arm_general_register_operand" "")
6319	(match_operand:ANY64_BF 1 "arm_general_register_operand" ""))]
6320  "TARGET_EITHER && reload_completed"
6321  [(set (match_dup 0) (match_dup 1))
6322   (set (match_dup 2) (match_dup 3))]
6323  "
6324  operands[2] = gen_highpart (SImode, operands[0]);
6325  operands[3] = gen_highpart (SImode, operands[1]);
6326  operands[0] = gen_lowpart (SImode, operands[0]);
6327  operands[1] = gen_lowpart (SImode, operands[1]);
6328
6329  /* Handle a partial overlap.  */
6330  if (rtx_equal_p (operands[0], operands[3]))
6331    {
6332      rtx tmp0 = operands[0];
6333      rtx tmp1 = operands[1];
6334
6335      operands[0] = operands[2];
6336      operands[1] = operands[3];
6337      operands[2] = tmp0;
6338      operands[3] = tmp1;
6339    }
6340  "
6341)
6342
6343;; We can't actually do base+index doubleword loads if the index and
6344;; destination overlap.  Split here so that we at least have chance to
6345;; schedule.
6346(define_split
6347  [(set (match_operand:DI 0 "s_register_operand" "")
6348	(mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
6349			 (match_operand:SI 2 "s_register_operand" ""))))]
6350  "TARGET_LDRD
6351  && reg_overlap_mentioned_p (operands[0], operands[1])
6352  && reg_overlap_mentioned_p (operands[0], operands[2])"
6353  [(set (match_dup 4)
6354	(plus:SI (match_dup 1)
6355		 (match_dup 2)))
6356   (set (match_dup 0)
6357	(mem:DI (match_dup 4)))]
6358  "
6359  operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
6360  "
6361)
6362
6363(define_expand "movsi"
6364  [(set (match_operand:SI 0 "general_operand")
6365        (match_operand:SI 1 "general_operand"))]
6366  "TARGET_EITHER"
6367  "
6368  {
6369  rtx base, offset, tmp;
6370
6371  gcc_checking_assert (aligned_operand (operands[0], SImode));
6372  gcc_checking_assert (aligned_operand (operands[1], SImode));
6373  if (TARGET_32BIT || TARGET_HAVE_MOVT)
6374    {
6375      /* Everything except mem = const or mem = mem can be done easily.  */
6376      if (MEM_P (operands[0]))
6377        operands[1] = force_reg (SImode, operands[1]);
6378      if (arm_general_register_operand (operands[0], SImode)
6379	  && CONST_INT_P (operands[1])
6380          && !(const_ok_for_arm (INTVAL (operands[1]))
6381               || const_ok_for_arm (~INTVAL (operands[1]))))
6382        {
6383	   if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
6384	     {
6385		emit_insn (gen_rtx_SET (operands[0], operands[1]));
6386		DONE;
6387	     }
6388	  else
6389	     {
6390		arm_split_constant (SET, SImode, NULL_RTX,
6391	                            INTVAL (operands[1]), operands[0], NULL_RTX,
6392			            optimize && can_create_pseudo_p ());
6393		DONE;
6394	     }
6395        }
6396    }
6397  else /* Target doesn't have MOVT...  */
6398    {
6399      if (can_create_pseudo_p ())
6400        {
6401          if (!REG_P (operands[0]))
6402	    operands[1] = force_reg (SImode, operands[1]);
6403        }
6404    }
6405
6406  split_const (operands[1], &base, &offset);
6407  if (INTVAL (offset) != 0
6408      && targetm.cannot_force_const_mem (SImode, operands[1]))
6409    {
6410      tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
6411      emit_move_insn (tmp, base);
6412      emit_insn (gen_addsi3 (operands[0], tmp, offset));
6413      DONE;
6414    }
6415
6416  tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
6417
6418  /* Recognize the case where operand[1] is a reference to thread-local
6419     data and load its address to a register.  Offsets have been split off
6420     already.  */
6421  if (arm_tls_referenced_p (operands[1]))
6422    operands[1] = legitimize_tls_address (operands[1], tmp);
6423  else if (flag_pic
6424	   && (CONSTANT_P (operands[1])
6425	       || symbol_mentioned_p (operands[1])
6426	       || label_mentioned_p (operands[1])))
6427    operands[1] =
6428      legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
6429  }
6430  "
6431)
6432
6433;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
6434;; LO_SUM adds in the high bits.  Fortunately these are opaque operations
6435;; so this does not matter.
6436(define_insn "*arm_movt"
6437  [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
6438	(lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
6439		   (match_operand:SI 2 "general_operand"      "i,i")))]
6440  "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
6441  "@
6442   movt%?\t%0, #:upper16:%c2
6443   movt\t%0, #:upper16:%c2"
6444  [(set_attr "arch"  "32,v8mb")
6445   (set_attr "predicable" "yes")
6446   (set_attr "length" "4")
6447   (set_attr "type" "alu_sreg")]
6448)
6449
6450(define_insn "*arm_movsi_insn"
6451  [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
6452	(match_operand:SI 1 "general_operand"      "rk, I,K,j,mi,rk"))]
6453  "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
6454   && (   register_operand (operands[0], SImode)
6455       || register_operand (operands[1], SImode))"
6456  "@
6457   mov%?\\t%0, %1
6458   mov%?\\t%0, %1
6459   mvn%?\\t%0, #%B1
6460   movw%?\\t%0, %1
6461   ldr%?\\t%0, %1
6462   str%?\\t%1, %0"
6463  [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
6464   (set_attr "predicable" "yes")
6465   (set_attr "arch" "*,*,*,v6t2,*,*")
6466   (set_attr "pool_range" "*,*,*,*,4096,*")
6467   (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
6468)
6469
6470(define_split
6471  [(set (match_operand:SI 0 "arm_general_register_operand" "")
6472	(match_operand:SI 1 "const_int_operand" ""))]
6473  "(TARGET_32BIT || TARGET_HAVE_MOVT)
6474  && (!(const_ok_for_arm (INTVAL (operands[1]))
6475        || const_ok_for_arm (~INTVAL (operands[1]))))"
6476  [(clobber (const_int 0))]
6477  "
6478  arm_split_constant (SET, SImode, NULL_RTX,
6479                      INTVAL (operands[1]), operands[0], NULL_RTX, 0);
6480  DONE;
6481  "
6482)
6483
6484;; A normal way to do (symbol + offset) requires three instructions at least
6485;; (depends on how big the offset is) as below:
6486;; movw r0, #:lower16:g
6487;; movw r0, #:upper16:g
6488;; adds r0, #4
6489;;
6490;; A better way would be:
6491;; movw r0, #:lower16:g+4
6492;; movw r0, #:upper16:g+4
6493;;
6494;; The limitation of this way is that the length of offset should be a 16-bit
6495;; signed value, because current assembler only supports REL type relocation for
6496;; such case.  If the more powerful RELA type is supported in future, we should
6497;; update this pattern to go with better way.
6498(define_split
6499  [(set (match_operand:SI 0 "arm_general_register_operand" "")
6500	(const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
6501			   (match_operand:SI 2 "const_int_operand" ""))))]
6502  "TARGET_THUMB
6503   && TARGET_HAVE_MOVT
6504   && arm_disable_literal_pool
6505   && reload_completed
6506   && GET_CODE (operands[1]) == SYMBOL_REF"
6507  [(clobber (const_int 0))]
6508  "
6509    int offset = INTVAL (operands[2]);
6510
6511    if (offset < -0x8000 || offset > 0x7fff)
6512      {
6513	arm_emit_movpair (operands[0], operands[1]);
6514	emit_insn (gen_rtx_SET (operands[0],
6515				gen_rtx_PLUS (SImode, operands[0], operands[2])));
6516      }
6517    else
6518      {
6519	rtx op = gen_rtx_CONST (SImode,
6520				gen_rtx_PLUS (SImode, operands[1], operands[2]));
6521	arm_emit_movpair (operands[0], op);
6522      }
6523  "
6524)
6525
6526;; Split symbol_refs at the later stage (after cprop), instead of generating
6527;; movt/movw pair directly at expand.  Otherwise corresponding high_sum
6528;; and lo_sum would be merged back into memory load at cprop.  However,
6529;; if the default is to prefer movt/movw rather than a load from the constant
6530;; pool, the performance is better.
6531(define_split
6532  [(set (match_operand:SI 0 "arm_general_register_operand" "")
6533       (match_operand:SI 1 "general_operand" ""))]
6534  "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
6535   && !target_word_relocations
6536   && !arm_tls_referenced_p (operands[1])"
6537  [(clobber (const_int 0))]
6538{
6539  arm_emit_movpair (operands[0], operands[1]);
6540  DONE;
6541})
6542
6543;; When generating pic, we need to load the symbol offset into a register.
6544;; So that the optimizer does not confuse this with a normal symbol load
6545;; we use an unspec.  The offset will be loaded from a constant pool entry,
6546;; since that is the only type of relocation we can use.
6547
6548;; Wrap calculation of the whole PIC address in a single pattern for the
6549;; benefit of optimizers, particularly, PRE and HOIST.  Calculation of
6550;; a PIC address involves two loads from memory, so we want to CSE it
6551;; as often as possible.
6552;; This pattern will be split into one of the pic_load_addr_* patterns
6553;; and a move after GCSE optimizations.
6554;;
6555;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6556(define_expand "calculate_pic_address"
6557  [(set (match_operand:SI 0 "register_operand")
6558	(mem:SI (plus:SI (match_operand:SI 1 "register_operand")
6559			 (unspec:SI [(match_operand:SI 2 "" "")]
6560				    UNSPEC_PIC_SYM))))]
6561  "flag_pic"
6562)
6563
6564;; Split calculate_pic_address into pic_load_addr_* and a move.
6565(define_split
6566  [(set (match_operand:SI 0 "register_operand" "")
6567	(mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6568			 (unspec:SI [(match_operand:SI 2 "" "")]
6569				    UNSPEC_PIC_SYM))))]
6570  "flag_pic"
6571  [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6572   (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6573  "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6574)
6575
6576;; operand1 is the memory address to go into
6577;; pic_load_addr_32bit.
6578;; operand2 is the PIC label to be emitted
6579;; from pic_add_dot_plus_eight.
6580;; We do this to allow hoisting of the entire insn.
6581(define_insn_and_split "pic_load_addr_unified"
6582  [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6583	(unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6584		    (match_operand:SI 2 "" "")]
6585		    UNSPEC_PIC_UNIFIED))]
6586 "flag_pic"
6587 "#"
6588 "&& reload_completed"
6589 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6590  (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6591       		     		 (match_dup 2)] UNSPEC_PIC_BASE))]
6592 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6593 [(set_attr "type" "load_4,load_4,load_4")
6594  (set_attr "pool_range" "4096,4094,1022")
6595  (set_attr "neg_pool_range" "4084,0,0")
6596  (set_attr "arch"  "a,t2,t1")
6597  (set_attr "length" "8,6,4")]
6598)
6599
6600;; The rather odd constraints on the following are to force reload to leave
6601;; the insn alone, and to force the minipool generation pass to then move
6602;; the GOT symbol to memory.
6603
6604(define_insn "pic_load_addr_32bit"
6605  [(set (match_operand:SI 0 "s_register_operand" "=r")
6606	(unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6607  "TARGET_32BIT && flag_pic"
6608  "ldr%?\\t%0, %1"
6609  [(set_attr "type" "load_4")
6610   (set (attr "pool_range")
6611	(if_then_else (eq_attr "is_thumb" "no")
6612		      (const_int 4096)
6613		      (const_int 4094)))
6614   (set (attr "neg_pool_range")
6615	(if_then_else (eq_attr "is_thumb" "no")
6616		      (const_int 4084)
6617		      (const_int 0)))]
6618)
6619
6620(define_insn "pic_load_addr_thumb1"
6621  [(set (match_operand:SI 0 "s_register_operand" "=l")
6622	(unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6623  "TARGET_THUMB1 && flag_pic"
6624  "ldr\\t%0, %1"
6625  [(set_attr "type" "load_4")
6626   (set (attr "pool_range") (const_int 1018))]
6627)
6628
6629(define_insn "pic_add_dot_plus_four"
6630  [(set (match_operand:SI 0 "register_operand" "=r")
6631	(unspec:SI [(match_operand:SI 1 "register_operand" "0")
6632		    (const_int 4)
6633		    (match_operand 2 "" "")]
6634		   UNSPEC_PIC_BASE))]
6635  "TARGET_THUMB"
6636  "*
6637  (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6638				     INTVAL (operands[2]));
6639  return \"add\\t%0, %|pc\";
6640  "
6641  [(set_attr "length" "2")
6642   (set_attr "type" "alu_sreg")]
6643)
6644
6645(define_insn "pic_add_dot_plus_eight"
6646  [(set (match_operand:SI 0 "register_operand" "=r")
6647	(unspec:SI [(match_operand:SI 1 "register_operand" "r")
6648		    (const_int 8)
6649		    (match_operand 2 "" "")]
6650		   UNSPEC_PIC_BASE))]
6651  "TARGET_ARM"
6652  "*
6653    (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6654				       INTVAL (operands[2]));
6655    return \"add%?\\t%0, %|pc, %1\";
6656  "
6657  [(set_attr "predicable" "yes")
6658   (set_attr "type" "alu_sreg")]
6659)
6660
6661(define_insn "tls_load_dot_plus_eight"
6662  [(set (match_operand:SI 0 "register_operand" "=r")
6663	(mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6664			    (const_int 8)
6665			    (match_operand 2 "" "")]
6666			   UNSPEC_PIC_BASE)))]
6667  "TARGET_ARM"
6668  "*
6669    (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6670				       INTVAL (operands[2]));
6671    return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6672  "
6673  [(set_attr "predicable" "yes")
6674   (set_attr "type" "load_4")]
6675)
6676
6677;; PIC references to local variables can generate pic_add_dot_plus_eight
6678;; followed by a load.  These sequences can be crunched down to
6679;; tls_load_dot_plus_eight by a peephole.
6680
6681(define_peephole2
6682  [(set (match_operand:SI 0 "register_operand" "")
6683	(unspec:SI [(match_operand:SI 3 "register_operand" "")
6684		    (const_int 8)
6685		    (match_operand 1 "" "")]
6686		   UNSPEC_PIC_BASE))
6687   (set (match_operand:SI 2 "arm_general_register_operand" "")
6688	(mem:SI (match_dup 0)))]
6689  "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6690  [(set (match_dup 2)
6691	(mem:SI (unspec:SI [(match_dup 3)
6692			    (const_int 8)
6693			    (match_dup 1)]
6694			   UNSPEC_PIC_BASE)))]
6695  ""
6696)
6697
6698(define_insn "pic_offset_arm"
6699  [(set (match_operand:SI 0 "register_operand" "=r")
6700	(mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6701			 (unspec:SI [(match_operand:SI 2 "" "X")]
6702				    UNSPEC_PIC_OFFSET))))]
6703  "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6704  "ldr%?\\t%0, [%1,%2]"
6705  [(set_attr "type" "load_4")]
6706)
6707
6708(define_expand "builtin_setjmp_receiver"
6709  [(label_ref (match_operand 0 "" ""))]
6710  "flag_pic"
6711  "
6712{
6713  /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6714     register.  */
6715  if (arm_pic_register != INVALID_REGNUM)
6716    arm_load_pic_register (1UL << 3, NULL_RTX);
6717  DONE;
6718}")
6719
6720;; If copying one reg to another we can set the condition codes according to
6721;; its value.  Such a move is common after a return from subroutine and the
6722;; result is being tested against zero.
6723
6724(define_insn "*movsi_compare0"
6725  [(set (reg:CC CC_REGNUM)
6726	(compare:CC (match_operand:SI 1 "s_register_operand" "0,0,l,rk,rk")
6727		    (const_int 0)))
6728   (set (match_operand:SI 0 "s_register_operand" "=l,rk,l,r,rk")
6729	(match_dup 1))]
6730  "TARGET_32BIT"
6731  "@
6732   cmp%?\\t%0, #0
6733   cmp%?\\t%0, #0
6734   subs%?\\t%0, %1, #0
6735   subs%?\\t%0, %1, #0
6736   subs%?\\t%0, %1, #0"
6737  [(set_attr "conds" "set")
6738   (set_attr "arch" "t2,*,t2,t2,a")
6739   (set_attr "type" "alus_imm")
6740   (set_attr "length" "2,4,2,4,4")]
6741)
6742
6743;; Subroutine to store a half word from a register into memory.
6744;; Operand 0 is the source register (HImode)
6745;; Operand 1 is the destination address in a register (SImode)
6746
6747;; In both this routine and the next, we must be careful not to spill
6748;; a memory address of reg+large_const into a separate PLUS insn, since this
6749;; can generate unrecognizable rtl.
6750
6751(define_expand "storehi"
6752  [;; store the low byte
6753   (set (match_operand 1 "" "") (match_dup 3))
6754   ;; extract the high byte
6755   (set (match_dup 2)
6756	(ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6757   ;; store the high byte
6758   (set (match_dup 4) (match_dup 5))]
6759  "TARGET_ARM"
6760  "
6761  {
6762    rtx op1 = operands[1];
6763    rtx addr = XEXP (op1, 0);
6764    enum rtx_code code = GET_CODE (addr);
6765
6766    if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6767	|| code == MINUS)
6768      op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6769
6770    operands[4] = adjust_address (op1, QImode, 1);
6771    operands[1] = adjust_address (operands[1], QImode, 0);
6772    operands[3] = gen_lowpart (QImode, operands[0]);
6773    operands[0] = gen_lowpart (SImode, operands[0]);
6774    operands[2] = gen_reg_rtx (SImode);
6775    operands[5] = gen_lowpart (QImode, operands[2]);
6776  }"
6777)
6778
6779(define_expand "storehi_bigend"
6780  [(set (match_dup 4) (match_dup 3))
6781   (set (match_dup 2)
6782	(ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6783   (set (match_operand 1 "" "")	(match_dup 5))]
6784  "TARGET_ARM"
6785  "
6786  {
6787    rtx op1 = operands[1];
6788    rtx addr = XEXP (op1, 0);
6789    enum rtx_code code = GET_CODE (addr);
6790
6791    if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6792	|| code == MINUS)
6793      op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6794
6795    operands[4] = adjust_address (op1, QImode, 1);
6796    operands[1] = adjust_address (operands[1], QImode, 0);
6797    operands[3] = gen_lowpart (QImode, operands[0]);
6798    operands[0] = gen_lowpart (SImode, operands[0]);
6799    operands[2] = gen_reg_rtx (SImode);
6800    operands[5] = gen_lowpart (QImode, operands[2]);
6801  }"
6802)
6803
6804;; Subroutine to store a half word integer constant into memory.
6805(define_expand "storeinthi"
6806  [(set (match_operand 0 "" "")
6807	(match_operand 1 "" ""))
6808   (set (match_dup 3) (match_dup 2))]
6809  "TARGET_ARM"
6810  "
6811  {
6812    HOST_WIDE_INT value = INTVAL (operands[1]);
6813    rtx addr = XEXP (operands[0], 0);
6814    rtx op0 = operands[0];
6815    enum rtx_code code = GET_CODE (addr);
6816
6817    if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6818	|| code == MINUS)
6819      op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6820
6821    operands[1] = gen_reg_rtx (SImode);
6822    if (BYTES_BIG_ENDIAN)
6823      {
6824	emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6825	if ((value & 255) == ((value >> 8) & 255))
6826	  operands[2] = operands[1];
6827	else
6828	  {
6829	    operands[2] = gen_reg_rtx (SImode);
6830	    emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6831	  }
6832      }
6833    else
6834      {
6835	emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6836	if ((value & 255) == ((value >> 8) & 255))
6837	  operands[2] = operands[1];
6838	else
6839	  {
6840	    operands[2] = gen_reg_rtx (SImode);
6841	    emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6842	  }
6843      }
6844
6845    operands[3] = adjust_address (op0, QImode, 1);
6846    operands[0] = adjust_address (operands[0], QImode, 0);
6847    operands[2] = gen_lowpart (QImode, operands[2]);
6848    operands[1] = gen_lowpart (QImode, operands[1]);
6849  }"
6850)
6851
6852(define_expand "storehi_single_op"
6853  [(set (match_operand:HI 0 "memory_operand")
6854	(match_operand:HI 1 "general_operand"))]
6855  "TARGET_32BIT && arm_arch4"
6856  "
6857  if (!s_register_operand (operands[1], HImode))
6858    operands[1] = copy_to_mode_reg (HImode, operands[1]);
6859  "
6860)
6861
6862(define_expand "movhi"
6863  [(set (match_operand:HI 0 "general_operand")
6864	(match_operand:HI 1 "general_operand"))]
6865  "TARGET_EITHER"
6866  "
6867  gcc_checking_assert (aligned_operand (operands[0], HImode));
6868  gcc_checking_assert (aligned_operand (operands[1], HImode));
6869  if (TARGET_ARM)
6870    {
6871      if (can_create_pseudo_p ())
6872        {
6873          if (MEM_P (operands[0]))
6874	    {
6875	      if (arm_arch4)
6876	        {
6877	          emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6878	          DONE;
6879	        }
6880	      if (CONST_INT_P (operands[1]))
6881	        emit_insn (gen_storeinthi (operands[0], operands[1]));
6882	      else
6883	        {
6884	          if (MEM_P (operands[1]))
6885		    operands[1] = force_reg (HImode, operands[1]);
6886	          if (BYTES_BIG_ENDIAN)
6887		    emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6888	          else
6889		   emit_insn (gen_storehi (operands[1], operands[0]));
6890	        }
6891	      DONE;
6892	    }
6893          /* Sign extend a constant, and keep it in an SImode reg.  */
6894          else if (CONST_INT_P (operands[1]))
6895	    {
6896	      rtx reg = gen_reg_rtx (SImode);
6897	      HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6898
6899	      /* If the constant is already valid, leave it alone.  */
6900	      if (!const_ok_for_arm (val))
6901	        {
6902	          /* If setting all the top bits will make the constant
6903		     loadable in a single instruction, then set them.
6904		     Otherwise, sign extend the number.  */
6905
6906	          if (const_ok_for_arm (~(val | ~0xffff)))
6907		    val |= ~0xffff;
6908	          else if (val & 0x8000)
6909		    val |= ~0xffff;
6910	        }
6911
6912	      emit_insn (gen_movsi (reg, GEN_INT (val)));
6913	      operands[1] = gen_lowpart (HImode, reg);
6914	    }
6915	  else if (arm_arch4 && optimize && can_create_pseudo_p ()
6916		   && MEM_P (operands[1]))
6917	    {
6918	      rtx reg = gen_reg_rtx (SImode);
6919
6920	      emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6921	      operands[1] = gen_lowpart (HImode, reg);
6922	    }
6923          else if (!arm_arch4)
6924	    {
6925	      if (MEM_P (operands[1]))
6926	        {
6927		  rtx base;
6928		  rtx offset = const0_rtx;
6929		  rtx reg = gen_reg_rtx (SImode);
6930
6931		  if ((REG_P (base = XEXP (operands[1], 0))
6932		       || (GET_CODE (base) == PLUS
6933			   && (CONST_INT_P (offset = XEXP (base, 1)))
6934                           && ((INTVAL(offset) & 1) != 1)
6935			   && REG_P (base = XEXP (base, 0))))
6936		      && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6937		    {
6938		      rtx new_rtx;
6939
6940		      new_rtx = widen_memory_access (operands[1], SImode,
6941						     ((INTVAL (offset) & ~3)
6942						      - INTVAL (offset)));
6943		      emit_insn (gen_movsi (reg, new_rtx));
6944		      if (((INTVAL (offset) & 2) != 0)
6945			  ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6946			{
6947			  rtx reg2 = gen_reg_rtx (SImode);
6948
6949			  emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6950			  reg = reg2;
6951			}
6952		    }
6953		  else
6954		    emit_insn (gen_movhi_bytes (reg, operands[1]));
6955
6956		  operands[1] = gen_lowpart (HImode, reg);
6957	       }
6958	   }
6959        }
6960      /* Handle loading a large integer during reload.  */
6961      else if (CONST_INT_P (operands[1])
6962	       && !const_ok_for_arm (INTVAL (operands[1]))
6963	       && !const_ok_for_arm (~INTVAL (operands[1])))
6964        {
6965          /* Writing a constant to memory needs a scratch, which should
6966	     be handled with SECONDARY_RELOADs.  */
6967          gcc_assert (REG_P (operands[0]));
6968
6969          operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6970          emit_insn (gen_movsi (operands[0], operands[1]));
6971          DONE;
6972       }
6973    }
6974  else if (TARGET_THUMB2)
6975    {
6976      /* Thumb-2 can do everything except mem=mem and mem=const easily.  */
6977      if (can_create_pseudo_p ())
6978	{
6979	  if (!REG_P (operands[0]))
6980	    operands[1] = force_reg (HImode, operands[1]);
6981          /* Zero extend a constant, and keep it in an SImode reg.  */
6982          else if (CONST_INT_P (operands[1]))
6983	    {
6984	      rtx reg = gen_reg_rtx (SImode);
6985	      HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6986
6987	      emit_insn (gen_movsi (reg, GEN_INT (val)));
6988	      operands[1] = gen_lowpart (HImode, reg);
6989	    }
6990	}
6991    }
6992  else /* TARGET_THUMB1 */
6993    {
6994      if (can_create_pseudo_p ())
6995        {
6996	  if (CONST_INT_P (operands[1]))
6997	    {
6998	      rtx reg = gen_reg_rtx (SImode);
6999
7000	      emit_insn (gen_movsi (reg, operands[1]));
7001	      operands[1] = gen_lowpart (HImode, reg);
7002	    }
7003
7004          /* ??? We shouldn't really get invalid addresses here, but this can
7005	     happen if we are passed a SP (never OK for HImode/QImode) or
7006	     virtual register (also rejected as illegitimate for HImode/QImode)
7007	     relative address.  */
7008          /* ??? This should perhaps be fixed elsewhere, for instance, in
7009	     fixup_stack_1, by checking for other kinds of invalid addresses,
7010	     e.g. a bare reference to a virtual register.  This may confuse the
7011	     alpha though, which must handle this case differently.  */
7012          if (MEM_P (operands[0])
7013	      && !memory_address_p (GET_MODE (operands[0]),
7014				    XEXP (operands[0], 0)))
7015	    operands[0]
7016	      = replace_equiv_address (operands[0],
7017				       copy_to_reg (XEXP (operands[0], 0)));
7018
7019          if (MEM_P (operands[1])
7020	      && !memory_address_p (GET_MODE (operands[1]),
7021				    XEXP (operands[1], 0)))
7022	    operands[1]
7023	      = replace_equiv_address (operands[1],
7024				       copy_to_reg (XEXP (operands[1], 0)));
7025
7026	  if (MEM_P (operands[1]) && optimize > 0)
7027	    {
7028	      rtx reg = gen_reg_rtx (SImode);
7029
7030	      emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
7031	      operands[1] = gen_lowpart (HImode, reg);
7032	    }
7033
7034          if (MEM_P (operands[0]))
7035	    operands[1] = force_reg (HImode, operands[1]);
7036        }
7037      else if (CONST_INT_P (operands[1])
7038	        && !satisfies_constraint_I (operands[1]))
7039        {
7040	  /* Handle loading a large integer during reload.  */
7041
7042          /* Writing a constant to memory needs a scratch, which should
7043	     be handled with SECONDARY_RELOADs.  */
7044          gcc_assert (REG_P (operands[0]));
7045
7046          operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7047          emit_insn (gen_movsi (operands[0], operands[1]));
7048          DONE;
7049        }
7050    }
7051  "
7052)
7053
7054(define_expand "movhi_bytes"
7055  [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
7056   (set (match_dup 3)
7057	(zero_extend:SI (match_dup 6)))
7058   (set (match_operand:SI 0 "" "")
7059	 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
7060  "TARGET_ARM"
7061  "
7062  {
7063    rtx mem1, mem2;
7064    rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
7065
7066    mem1 = change_address (operands[1], QImode, addr);
7067    mem2 = change_address (operands[1], QImode,
7068			   plus_constant (Pmode, addr, 1));
7069    operands[0] = gen_lowpart (SImode, operands[0]);
7070    operands[1] = mem1;
7071    operands[2] = gen_reg_rtx (SImode);
7072    operands[3] = gen_reg_rtx (SImode);
7073    operands[6] = mem2;
7074
7075    if (BYTES_BIG_ENDIAN)
7076      {
7077	operands[4] = operands[2];
7078	operands[5] = operands[3];
7079      }
7080    else
7081      {
7082	operands[4] = operands[3];
7083	operands[5] = operands[2];
7084      }
7085  }"
7086)
7087
7088(define_expand "movhi_bigend"
7089  [(set (match_dup 2)
7090	(rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
7091		   (const_int 16)))
7092   (set (match_dup 3)
7093	(ashiftrt:SI (match_dup 2) (const_int 16)))
7094   (set (match_operand:HI 0 "s_register_operand")
7095	(match_dup 4))]
7096  "TARGET_ARM"
7097  "
7098  operands[2] = gen_reg_rtx (SImode);
7099  operands[3] = gen_reg_rtx (SImode);
7100  operands[4] = gen_lowpart (HImode, operands[3]);
7101  "
7102)
7103
7104;; Pattern to recognize insn generated default case above
7105(define_insn "*movhi_insn_arch4"
7106  [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
7107	(match_operand:HI 1 "general_operand"      "rIk,K,n,r,mi"))]
7108  "TARGET_ARM
7109   && arm_arch4 && !TARGET_HARD_FLOAT
7110   && (register_operand (operands[0], HImode)
7111       || register_operand (operands[1], HImode))"
7112  "@
7113   mov%?\\t%0, %1\\t%@ movhi
7114   mvn%?\\t%0, #%B1\\t%@ movhi
7115   movw%?\\t%0, %L1\\t%@ movhi
7116   strh%?\\t%1, %0\\t%@ movhi
7117   ldrh%?\\t%0, %1\\t%@ movhi"
7118  [(set_attr "predicable" "yes")
7119   (set_attr "pool_range" "*,*,*,*,256")
7120   (set_attr "neg_pool_range" "*,*,*,*,244")
7121   (set_attr "arch" "*,*,v6t2,*,*")
7122   (set_attr_alternative "type"
7123                         [(if_then_else (match_operand 1 "const_int_operand" "")
7124                                        (const_string "mov_imm" )
7125                                        (const_string "mov_reg"))
7126                          (const_string "mvn_imm")
7127                          (const_string "mov_imm")
7128                          (const_string "store_4")
7129                          (const_string "load_4")])]
7130)
7131
7132(define_insn "*movhi_bytes"
7133  [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
7134	(match_operand:HI 1 "arm_rhs_operand"  "I,rk,K"))]
7135  "TARGET_ARM && !TARGET_HARD_FLOAT"
7136  "@
7137   mov%?\\t%0, %1\\t%@ movhi
7138   mov%?\\t%0, %1\\t%@ movhi
7139   mvn%?\\t%0, #%B1\\t%@ movhi"
7140  [(set_attr "predicable" "yes")
7141   (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
7142)
7143
7144;; We use a DImode scratch because we may occasionally need an additional
7145;; temporary if the address isn't offsettable -- push_reload doesn't seem
7146;; to take any notice of the "o" constraints on reload_memory_operand operand.
7147;; The reload_in<m> and reload_out<m> patterns require special constraints
7148;; to be correctly handled in default_secondary_reload function.
7149(define_expand "reload_outhi"
7150  [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
7151	      (match_operand:HI 1 "s_register_operand"        "r")
7152	      (match_operand:DI 2 "s_register_operand"        "=&l")])]
7153  "TARGET_EITHER"
7154  "if (TARGET_ARM)
7155     arm_reload_out_hi (operands);
7156   else
7157     thumb_reload_out_hi (operands);
7158  DONE;
7159  "
7160)
7161
7162(define_expand "reload_inhi"
7163  [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
7164	      (match_operand:HI 1 "arm_reload_memory_operand" "o")
7165	      (match_operand:DI 2 "s_register_operand" "=&r")])]
7166  "TARGET_EITHER"
7167  "
7168  if (TARGET_ARM)
7169    arm_reload_in_hi (operands);
7170  else
7171    thumb_reload_out_hi (operands);
7172  DONE;
7173")
7174
7175(define_expand "movqi"
7176  [(set (match_operand:QI 0 "general_operand")
7177        (match_operand:QI 1 "general_operand"))]
7178  "TARGET_EITHER"
7179  "
7180  /* Everything except mem = const or mem = mem can be done easily */
7181
7182  if (can_create_pseudo_p ())
7183    {
7184      if (CONST_INT_P (operands[1]))
7185	{
7186	  rtx reg = gen_reg_rtx (SImode);
7187
7188	  /* For thumb we want an unsigned immediate, then we are more likely
7189	     to be able to use a movs insn.  */
7190	  if (TARGET_THUMB)
7191	    operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
7192
7193	  emit_insn (gen_movsi (reg, operands[1]));
7194	  operands[1] = gen_lowpart (QImode, reg);
7195	}
7196
7197      if (TARGET_THUMB)
7198	{
7199          /* ??? We shouldn't really get invalid addresses here, but this can
7200	     happen if we are passed a SP (never OK for HImode/QImode) or
7201	     virtual register (also rejected as illegitimate for HImode/QImode)
7202	     relative address.  */
7203          /* ??? This should perhaps be fixed elsewhere, for instance, in
7204	     fixup_stack_1, by checking for other kinds of invalid addresses,
7205	     e.g. a bare reference to a virtual register.  This may confuse the
7206	     alpha though, which must handle this case differently.  */
7207          if (MEM_P (operands[0])
7208	      && !memory_address_p (GET_MODE (operands[0]),
7209		  		     XEXP (operands[0], 0)))
7210	    operands[0]
7211	      = replace_equiv_address (operands[0],
7212				       copy_to_reg (XEXP (operands[0], 0)));
7213          if (MEM_P (operands[1])
7214	      && !memory_address_p (GET_MODE (operands[1]),
7215				    XEXP (operands[1], 0)))
7216	     operands[1]
7217	       = replace_equiv_address (operands[1],
7218					copy_to_reg (XEXP (operands[1], 0)));
7219	}
7220
7221      if (MEM_P (operands[1]) && optimize > 0)
7222	{
7223	  rtx reg = gen_reg_rtx (SImode);
7224
7225	  emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
7226	  operands[1] = gen_lowpart (QImode, reg);
7227	}
7228
7229      if (MEM_P (operands[0]))
7230	operands[1] = force_reg (QImode, operands[1]);
7231    }
7232  else if (TARGET_THUMB
7233	   && CONST_INT_P (operands[1])
7234	   && !satisfies_constraint_I (operands[1]))
7235    {
7236      /* Handle loading a large integer during reload.  */
7237
7238      /* Writing a constant to memory needs a scratch, which should
7239	 be handled with SECONDARY_RELOADs.  */
7240      gcc_assert (REG_P (operands[0]));
7241
7242      operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
7243      emit_insn (gen_movsi (operands[0], operands[1]));
7244      DONE;
7245    }
7246  "
7247)
7248
7249(define_insn "*arm_movqi_insn"
7250  [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
7251	(match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
7252  "TARGET_32BIT
7253   && (   register_operand (operands[0], QImode)
7254       || register_operand (operands[1], QImode))"
7255  "@
7256   mov%?\\t%0, %1
7257   mov%?\\t%0, %1
7258   mov%?\\t%0, %1
7259   mov%?\\t%0, %1
7260   mvn%?\\t%0, #%B1
7261   ldrb%?\\t%0, %1
7262   strb%?\\t%1, %0
7263   ldrb%?\\t%0, %1
7264   strb%?\\t%1, %0"
7265  [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
7266   (set_attr "predicable" "yes")
7267   (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
7268   (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
7269   (set_attr "length" "2,4,4,2,4,2,2,4,4")]
7270)
7271
7272;; HFmode and BFmode moves.
7273(define_expand "mov<mode>"
7274  [(set (match_operand:HFBF 0 "general_operand")
7275	(match_operand:HFBF 1 "general_operand"))]
7276  "TARGET_EITHER"
7277  "
7278  gcc_checking_assert (aligned_operand (operands[0], <MODE>mode));
7279  gcc_checking_assert (aligned_operand (operands[1], <MODE>mode));
7280  if (TARGET_32BIT)
7281    {
7282      if (MEM_P (operands[0]))
7283	operands[1] = force_reg (<MODE>mode, operands[1]);
7284    }
7285  else /* TARGET_THUMB1 */
7286    {
7287      if (can_create_pseudo_p ())
7288        {
7289           if (!REG_P (operands[0]))
7290	     operands[1] = force_reg (<MODE>mode, operands[1]);
7291        }
7292    }
7293  "
7294)
7295
7296(define_insn "*arm32_mov<mode>"
7297  [(set (match_operand:HFBF 0 "nonimmediate_operand" "=r,m,r,r")
7298	(match_operand:HFBF 1 "general_operand"	   " m,r,r,F"))]
7299  "TARGET_32BIT
7300   && !TARGET_HARD_FLOAT
7301   && !TARGET_HAVE_MVE
7302   && (	  s_register_operand (operands[0], <MODE>mode)
7303       || s_register_operand (operands[1], <MODE>mode))"
7304  "*
7305  switch (which_alternative)
7306    {
7307    case 0:	/* ARM register from memory */
7308      return \"ldrh%?\\t%0, %1\\t%@ __<fporbf>\";
7309    case 1:	/* memory from ARM register */
7310      return \"strh%?\\t%1, %0\\t%@ __<fporbf>\";
7311    case 2:	/* ARM register from ARM register */
7312      return \"mov%?\\t%0, %1\\t%@ __<fporbf>\";
7313    case 3:	/* ARM register from constant */
7314      {
7315	long bits;
7316	rtx ops[4];
7317
7318	bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
7319			       <MODE>mode);
7320	ops[0] = operands[0];
7321	ops[1] = GEN_INT (bits);
7322	ops[2] = GEN_INT (bits & 0xff00);
7323	ops[3] = GEN_INT (bits & 0x00ff);
7324
7325	if (arm_arch_thumb2)
7326	  output_asm_insn (\"movw%?\\t%0, %1\", ops);
7327	else
7328	  output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
7329	return \"\";
7330       }
7331    default:
7332      gcc_unreachable ();
7333    }
7334  "
7335  [(set_attr "conds" "unconditional")
7336   (set_attr "type" "load_4,store_4,mov_reg,multiple")
7337   (set_attr "length" "4,4,4,8")
7338   (set_attr "predicable" "yes")]
7339)
7340
7341(define_expand "movsf"
7342  [(set (match_operand:SF 0 "general_operand")
7343	(match_operand:SF 1 "general_operand"))]
7344  "TARGET_EITHER"
7345  "
7346  gcc_checking_assert (aligned_operand (operands[0], SFmode));
7347  gcc_checking_assert (aligned_operand (operands[1], SFmode));
7348  if (TARGET_32BIT)
7349    {
7350      if (MEM_P (operands[0]))
7351        operands[1] = force_reg (SFmode, operands[1]);
7352    }
7353  else /* TARGET_THUMB1 */
7354    {
7355      if (can_create_pseudo_p ())
7356        {
7357           if (!REG_P (operands[0]))
7358	     operands[1] = force_reg (SFmode, operands[1]);
7359        }
7360    }
7361
7362  /* Cannot load it directly, generate a load with clobber so that it can be
7363     loaded via GPR with MOV / MOVT.  */
7364  if (arm_disable_literal_pool
7365      && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7366      && CONST_DOUBLE_P (operands[1])
7367      && TARGET_VFP_BASE
7368      && !vfp3_const_double_rtx (operands[1]))
7369    {
7370      rtx clobreg = gen_reg_rtx (SFmode);
7371      emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
7372						   clobreg));
7373      DONE;
7374    }
7375  "
7376)
7377
7378;; Transform a floating-point move of a constant into a core register into
7379;; an SImode operation.
7380(define_split
7381  [(set (match_operand:SF 0 "arm_general_register_operand" "")
7382	(match_operand:SF 1 "immediate_operand" ""))]
7383  "TARGET_EITHER
7384   && reload_completed
7385   && CONST_DOUBLE_P (operands[1])"
7386  [(set (match_dup 2) (match_dup 3))]
7387  "
7388  operands[2] = gen_lowpart (SImode, operands[0]);
7389  operands[3] = gen_lowpart (SImode, operands[1]);
7390  if (operands[2] == 0 || operands[3] == 0)
7391    FAIL;
7392  "
7393)
7394
7395(define_insn "*arm_movsf_soft_insn"
7396  [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
7397	(match_operand:SF 1 "general_operand"  "r,mE,r"))]
7398  "TARGET_32BIT
7399   && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7400   && (!MEM_P (operands[0])
7401       || register_operand (operands[1], SFmode))"
7402{
7403  switch (which_alternative)
7404    {
7405    case 0: return \"mov%?\\t%0, %1\";
7406    case 1:
7407      /* Cannot load it directly, split to load it via MOV / MOVT.  */
7408      if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7409	return \"#\";
7410      return \"ldr%?\\t%0, %1\\t%@ float\";
7411    case 2: return \"str%?\\t%1, %0\\t%@ float\";
7412    default: gcc_unreachable ();
7413    }
7414}
7415  [(set_attr "predicable" "yes")
7416   (set_attr "type" "mov_reg,load_4,store_4")
7417   (set_attr "arm_pool_range" "*,4096,*")
7418   (set_attr "thumb2_pool_range" "*,4094,*")
7419   (set_attr "arm_neg_pool_range" "*,4084,*")
7420   (set_attr "thumb2_neg_pool_range" "*,0,*")]
7421)
7422
7423;; Splitter for the above.
7424(define_split
7425  [(set (match_operand:SF 0 "s_register_operand")
7426	(match_operand:SF 1 "const_double_operand"))]
7427  "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7428  [(const_int 0)]
7429{
7430  long buf;
7431  real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
7432  rtx cst = gen_int_mode (buf, SImode);
7433  emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
7434  DONE;
7435}
7436)
7437
7438(define_expand "movdf"
7439  [(set (match_operand:DF 0 "general_operand")
7440	(match_operand:DF 1 "general_operand"))]
7441  "TARGET_EITHER"
7442  "
7443  gcc_checking_assert (aligned_operand (operands[0], DFmode));
7444  gcc_checking_assert (aligned_operand (operands[1], DFmode));
7445  if (TARGET_32BIT)
7446    {
7447      if (MEM_P (operands[0]))
7448        operands[1] = force_reg (DFmode, operands[1]);
7449    }
7450  else /* TARGET_THUMB */
7451    {
7452      if (can_create_pseudo_p ())
7453        {
7454          if (!REG_P (operands[0]))
7455	    operands[1] = force_reg (DFmode, operands[1]);
7456        }
7457    }
7458
7459  /* Cannot load it directly, generate a load with clobber so that it can be
7460     loaded via GPR with MOV / MOVT.  */
7461  if (arm_disable_literal_pool
7462      && (REG_P (operands[0]) || SUBREG_P (operands[0]))
7463      && CONSTANT_P (operands[1])
7464      && TARGET_VFP_BASE
7465      && !arm_const_double_rtx (operands[1])
7466      && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
7467    {
7468      rtx clobreg = gen_reg_rtx (DFmode);
7469      emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
7470						   clobreg));
7471      DONE;
7472    }
7473  "
7474)
7475
7476;; Reloading a df mode value stored in integer regs to memory can require a
7477;; scratch reg.
7478;; Another reload_out<m> pattern that requires special constraints.
7479(define_expand "reload_outdf"
7480  [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
7481   (match_operand:DF 1 "s_register_operand" "r")
7482   (match_operand:SI 2 "s_register_operand" "=&r")]
7483  "TARGET_THUMB2"
7484  "
7485  {
7486    enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
7487
7488    if (code == REG)
7489      operands[2] = XEXP (operands[0], 0);
7490    else if (code == POST_INC || code == PRE_DEC)
7491      {
7492	operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
7493	operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
7494	emit_insn (gen_movdi (operands[0], operands[1]));
7495	DONE;
7496      }
7497    else if (code == PRE_INC)
7498      {
7499	rtx reg = XEXP (XEXP (operands[0], 0), 0);
7500
7501	emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
7502	operands[2] = reg;
7503      }
7504    else if (code == POST_DEC)
7505      operands[2] = XEXP (XEXP (operands[0], 0), 0);
7506    else
7507      emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
7508			     XEXP (XEXP (operands[0], 0), 1)));
7509
7510    emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
7511			    operands[1]));
7512
7513    if (code == POST_DEC)
7514      emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
7515
7516    DONE;
7517  }"
7518)
7519
7520(define_insn "*movdf_soft_insn"
7521  [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
7522       (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
7523  "TARGET_32BIT && TARGET_SOFT_FLOAT && !TARGET_HAVE_MVE
7524   && (   register_operand (operands[0], DFmode)
7525       || register_operand (operands[1], DFmode))"
7526  "*
7527  switch (which_alternative)
7528    {
7529    case 0:
7530    case 1:
7531    case 2:
7532      return \"#\";
7533    case 3:
7534      /* Cannot load it directly, split to load it via MOV / MOVT.  */
7535      if (!MEM_P (operands[1]) && arm_disable_literal_pool)
7536	return \"#\";
7537      /* Fall through.  */
7538    default:
7539      return output_move_double (operands, true, NULL);
7540    }
7541  "
7542  [(set_attr "length" "8,12,16,8,8")
7543   (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
7544   (set_attr "arm_pool_range" "*,*,*,1020,*")
7545   (set_attr "thumb2_pool_range" "*,*,*,1018,*")
7546   (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
7547   (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
7548)
7549
7550;; Splitter for the above.
7551(define_split
7552  [(set (match_operand:DF 0 "s_register_operand")
7553	(match_operand:DF 1 "const_double_operand"))]
7554  "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7555  [(const_int 0)]
7556{
7557  long buf[2];
7558  int order = BYTES_BIG_ENDIAN ? 1 : 0;
7559  real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
7560  unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
7561  ival |= (zext_hwi (buf[1 - order], 32) << 32);
7562  rtx cst = gen_int_mode (ival, DImode);
7563  emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
7564  DONE;
7565}
7566)
7567
7568
7569;; load- and store-multiple insns
7570;; The arm can load/store any set of registers, provided that they are in
7571;; ascending order, but these expanders assume a contiguous set.
7572
7573(define_expand "load_multiple"
7574  [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7575                          (match_operand:SI 1 "" ""))
7576                     (use (match_operand:SI 2 "" ""))])]
7577  "TARGET_32BIT"
7578{
7579  HOST_WIDE_INT offset = 0;
7580
7581  /* Support only fixed point registers.  */
7582  if (!CONST_INT_P (operands[2])
7583      || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7584      || INTVAL (operands[2]) < 2
7585      || !MEM_P (operands[1])
7586      || !REG_P (operands[0])
7587      || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7588      || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7589    FAIL;
7590
7591  operands[3]
7592    = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7593			     INTVAL (operands[2]),
7594			     force_reg (SImode, XEXP (operands[1], 0)),
7595			     FALSE, operands[1], &offset);
7596})
7597
7598(define_expand "store_multiple"
7599  [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7600                          (match_operand:SI 1 "" ""))
7601                     (use (match_operand:SI 2 "" ""))])]
7602  "TARGET_32BIT"
7603{
7604  HOST_WIDE_INT offset = 0;
7605
7606  /* Support only fixed point registers.  */
7607  if (!CONST_INT_P (operands[2])
7608      || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7609      || INTVAL (operands[2]) < 2
7610      || !REG_P (operands[1])
7611      || !MEM_P (operands[0])
7612      || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7613      || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7614    FAIL;
7615
7616  operands[3]
7617    = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7618			      INTVAL (operands[2]),
7619			      force_reg (SImode, XEXP (operands[0], 0)),
7620			      FALSE, operands[0], &offset);
7621})
7622
7623
7624(define_expand "setmemsi"
7625  [(match_operand:BLK 0 "general_operand")
7626   (match_operand:SI 1 "const_int_operand")
7627   (match_operand:SI 2 "const_int_operand")
7628   (match_operand:SI 3 "const_int_operand")]
7629  "TARGET_32BIT"
7630{
7631  if (arm_gen_setmem (operands))
7632    DONE;
7633
7634  FAIL;
7635})
7636
7637
7638;; Move a block of memory if it is word aligned and MORE than 2 words long.
7639;; We could let this apply for blocks of less than this, but it clobbers so
7640;; many registers that there is then probably a better way.
7641
7642(define_expand "cpymemqi"
7643  [(match_operand:BLK 0 "general_operand")
7644   (match_operand:BLK 1 "general_operand")
7645   (match_operand:SI 2 "const_int_operand")
7646   (match_operand:SI 3 "const_int_operand")]
7647  ""
7648  "
7649  if (TARGET_32BIT)
7650    {
7651      if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7652          && !optimize_function_for_size_p (cfun))
7653        {
7654          if (gen_cpymem_ldrd_strd (operands))
7655            DONE;
7656          FAIL;
7657        }
7658
7659      if (arm_gen_cpymemqi (operands))
7660        DONE;
7661      FAIL;
7662    }
7663  else /* TARGET_THUMB1 */
7664    {
7665      if (   INTVAL (operands[3]) != 4
7666          || INTVAL (operands[2]) > 48)
7667        FAIL;
7668
7669      thumb_expand_cpymemqi (operands);
7670      DONE;
7671    }
7672  "
7673)
7674
7675
7676;; Compare & branch insns
7677;; The range calculations are based as follows:
7678;; For forward branches, the address calculation returns the address of
7679;; the next instruction.  This is 2 beyond the branch instruction.
7680;; For backward branches, the address calculation returns the address of
7681;; the first instruction in this pattern (cmp).  This is 2 before the branch
7682;; instruction for the shortest sequence, and 4 before the branch instruction
7683;; if we have to jump around an unconditional branch.
7684;; To the basic branch range the PC offset must be added (this is +4).
7685;; So for forward branches we have
7686;;   (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7687;; And for backward branches we have
7688;;   (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7689;;
7690;; In 16-bit Thumb these ranges are:
7691;; For a 'b'       pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7692;; For a 'b<cond>' pos_range = 254,  neg_range = -256  giving (-250 ->256).
7693
7694;; In 32-bit Thumb these ranges are:
7695;; For a 'b'       +/- 16MB is not checked for.
7696;; For a 'b<cond>' pos_range = 1048574,  neg_range = -1048576  giving
7697;; (-1048568 -> 1048576).
7698
7699(define_expand "cbranchsi4"
7700  [(set (pc) (if_then_else
7701	      (match_operator 0 "expandable_comparison_operator"
7702	       [(match_operand:SI 1 "s_register_operand")
7703	        (match_operand:SI 2 "nonmemory_operand")])
7704	      (label_ref (match_operand 3 "" ""))
7705	      (pc)))]
7706  "TARGET_EITHER"
7707  "
7708  if (!TARGET_THUMB1)
7709    {
7710      if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7711        FAIL;
7712      emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7713				      operands[3]));
7714      DONE;
7715    }
7716  if (thumb1_cmpneg_operand (operands[2], SImode))
7717    {
7718      emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7719					      operands[3], operands[0]));
7720      DONE;
7721    }
7722  if (!thumb1_cmp_operand (operands[2], SImode))
7723    operands[2] = force_reg (SImode, operands[2]);
7724  ")
7725
7726(define_expand "cbranchsf4"
7727  [(set (pc) (if_then_else
7728	      (match_operator 0 "expandable_comparison_operator"
7729	       [(match_operand:SF 1 "s_register_operand")
7730	        (match_operand:SF 2 "vfp_compare_operand")])
7731	      (label_ref (match_operand 3 "" ""))
7732	      (pc)))]
7733  "TARGET_32BIT && TARGET_HARD_FLOAT"
7734  "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7735				   operands[3])); DONE;"
7736)
7737
7738(define_expand "cbranchdf4"
7739  [(set (pc) (if_then_else
7740	      (match_operator 0 "expandable_comparison_operator"
7741	       [(match_operand:DF 1 "s_register_operand")
7742	        (match_operand:DF 2 "vfp_compare_operand")])
7743	      (label_ref (match_operand 3 "" ""))
7744	      (pc)))]
7745  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7746  "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7747				   operands[3])); DONE;"
7748)
7749
7750(define_expand "cbranchdi4"
7751  [(set (pc) (if_then_else
7752	      (match_operator 0 "expandable_comparison_operator"
7753	       [(match_operand:DI 1 "s_register_operand")
7754	        (match_operand:DI 2 "reg_or_int_operand")])
7755	      (label_ref (match_operand 3 "" ""))
7756	      (pc)))]
7757  "TARGET_32BIT"
7758  "{
7759     if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7760       FAIL;
7761     emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7762				       operands[3]));
7763     DONE;
7764   }"
7765)
7766
7767;; Comparison and test insns
7768
7769(define_insn "*arm_cmpsi_insn"
7770  [(set (reg:CC CC_REGNUM)
7771	(compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7772		    (match_operand:SI 1 "arm_add_operand"    "Py,r,r,I,L")))]
7773  "TARGET_32BIT"
7774  "@
7775   cmp%?\\t%0, %1
7776   cmp%?\\t%0, %1
7777   cmp%?\\t%0, %1
7778   cmp%?\\t%0, %1
7779   cmn%?\\t%0, #%n1"
7780  [(set_attr "conds" "set")
7781   (set_attr "arch" "t2,t2,any,any,any")
7782   (set_attr "length" "2,2,4,4,4")
7783   (set_attr "predicable" "yes")
7784   (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7785   (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7786)
7787
7788(define_insn "*cmpsi_shiftsi"
7789  [(set (reg:CC CC_REGNUM)
7790	(compare:CC (match_operand:SI   0 "s_register_operand" "r,r")
7791		    (match_operator:SI  3 "shift_operator"
7792		     [(match_operand:SI 1 "s_register_operand" "r,r")
7793		      (match_operand:SI 2 "shift_amount_operand" "M,r")])))]
7794  "TARGET_32BIT"
7795  "cmp\\t%0, %1%S3"
7796  [(set_attr "conds" "set")
7797   (set_attr "shift" "1")
7798   (set_attr "arch" "32,a")
7799   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7800
7801(define_insn "*cmpsi_shiftsi_swp"
7802  [(set (reg:CC_SWP CC_REGNUM)
7803	(compare:CC_SWP (match_operator:SI 3 "shift_operator"
7804			 [(match_operand:SI 1 "s_register_operand" "r,r")
7805			  (match_operand:SI 2 "shift_amount_operand" "M,r")])
7806			(match_operand:SI 0 "s_register_operand" "r,r")))]
7807  "TARGET_32BIT"
7808  "cmp%?\\t%0, %1%S3"
7809  [(set_attr "conds" "set")
7810   (set_attr "shift" "1")
7811   (set_attr "arch" "32,a")
7812   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
7813
7814(define_insn "*arm_cmpsi_negshiftsi_si"
7815  [(set (reg:CC_Z CC_REGNUM)
7816	(compare:CC_Z
7817	 (neg:SI (match_operator:SI 1 "shift_operator"
7818		    [(match_operand:SI 2 "s_register_operand" "r,r")
7819		     (match_operand:SI 3 "shift_amount_operand" "M,r")]))
7820	 (match_operand:SI 0 "s_register_operand" "r,r")))]
7821  "TARGET_32BIT"
7822  "cmn%?\\t%0, %2%S1"
7823  [(set_attr "conds" "set")
7824   (set_attr "arch" "32,a")
7825   (set_attr "shift" "2")
7826   (set_attr "type" "alus_shift_imm,alus_shift_reg")
7827   (set_attr "predicable" "yes")]
7828)
7829
7830; This insn allows redundant compares to be removed by cse, nothing should
7831; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7832; is deleted later on. The match_dup will match the mode here, so that
7833; mode changes of the condition codes aren't lost by this even though we don't
7834; specify what they are.
7835
7836(define_insn "*deleted_compare"
7837  [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7838  "TARGET_32BIT"
7839  "\\t%@ deleted compare"
7840  [(set_attr "conds" "set")
7841   (set_attr "length" "0")
7842   (set_attr "type" "no_insn")]
7843)
7844
7845
7846;; Conditional branch insns
7847
7848(define_expand "cbranch_cc"
7849  [(set (pc)
7850	(if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7851					    (match_operand 2 "" "")])
7852		      (label_ref (match_operand 3 "" ""))
7853		      (pc)))]
7854  "TARGET_32BIT"
7855  "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7856				      operands[1], operands[2], NULL_RTX);
7857   operands[2] = const0_rtx;"
7858)
7859
7860;;
7861;; Patterns to match conditional branch insns.
7862;;
7863
7864(define_insn "arm_cond_branch"
7865  [(set (pc)
7866	(if_then_else (match_operator 1 "arm_comparison_operator"
7867		       [(match_operand 2 "cc_register" "") (const_int 0)])
7868		      (label_ref (match_operand 0 "" ""))
7869		      (pc)))]
7870  "TARGET_32BIT"
7871  {
7872    if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7873    {
7874      arm_ccfsm_state += 2;
7875      return "";
7876    }
7877    switch (get_attr_length (insn))
7878      {
7879	case 2: /* Thumb2 16-bit b{cond}.  */
7880	case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}.  */
7881	  return "b%d1\t%l0";
7882	  break;
7883
7884	/* Thumb2 b{cond} out of range.  Use 16-bit b{cond} and
7885	   unconditional branch b.  */
7886	default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%D1\t");
7887      }
7888  }
7889  [(set_attr "conds" "use")
7890   (set_attr "type" "branch")
7891   (set (attr "length")
7892    (if_then_else (match_test "!TARGET_THUMB2")
7893
7894      ;;Target is not Thumb2, therefore is A32.  Generate b{cond}.
7895      (const_int 4)
7896
7897      ;; Check if target is within 16-bit Thumb2 b{cond} range.
7898      (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7899		         (le (minus (match_dup 0) (pc)) (const_int 256)))
7900
7901	;; Target is Thumb2, within narrow range.
7902	;; Generate b{cond}.
7903	(const_int 2)
7904
7905	;; Check if target is within 32-bit Thumb2 b{cond} range.
7906	(if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
7907			   (le (minus (match_dup 0) (pc)) (const_int 1048576)))
7908
7909	  ;; Target is Thumb2, within wide range.
7910	  ;; Generate b{cond}
7911	  (const_int 4)
7912	  ;; Target is Thumb2, out of range.
7913	  ;; Generate narrow b{cond} and unconditional branch b.
7914	  (const_int 6)))))]
7915)
7916
7917(define_insn "*arm_cond_branch_reversed"
7918  [(set (pc)
7919	(if_then_else (match_operator 1 "arm_comparison_operator"
7920		       [(match_operand 2 "cc_register" "") (const_int 0)])
7921		      (pc)
7922		      (label_ref (match_operand 0 "" ""))))]
7923  "TARGET_32BIT"
7924  {
7925    if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7926    {
7927      arm_ccfsm_state += 2;
7928      return "";
7929    }
7930    switch (get_attr_length (insn))
7931      {
7932	case 2: /* Thumb2 16-bit b{cond}.  */
7933	case 4: /* Thumb2 32-bit b{cond} or A32 b{cond}.  */
7934	  return "b%D1\t%l0";
7935	  break;
7936
7937	/* Thumb2 b{cond} out of range.  Use 16-bit b{cond} and
7938	   unconditional branch b.  */
7939	default: return arm_gen_far_branch (operands, 0, "Lbcond", "b%d1\t");
7940      }
7941  }
7942  [(set_attr "conds" "use")
7943   (set_attr "type" "branch")
7944   (set (attr "length")
7945    (if_then_else (match_test "!TARGET_THUMB2")
7946
7947      ;;Target is not Thumb2, therefore is A32.  Generate b{cond}.
7948      (const_int 4)
7949
7950      ;; Check if target is within 16-bit Thumb2 b{cond} range.
7951      (if_then_else (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7952			 (le (minus (match_dup 0) (pc)) (const_int 256)))
7953
7954	;; Target is Thumb2, within narrow range.
7955	;; Generate b{cond}.
7956	(const_int 2)
7957
7958	;; Check if target is within 32-bit Thumb2 b{cond} range.
7959	(if_then_else (and (ge (minus (match_dup 0) (pc))(const_int -1048568))
7960			   (le (minus (match_dup 0) (pc)) (const_int 1048576)))
7961
7962	  ;; Target is Thumb2, within wide range.
7963	  ;; Generate b{cond}.
7964	  (const_int 4)
7965	  ;; Target is Thumb2, out of range.
7966	  ;; Generate narrow b{cond} and unconditional branch b.
7967	  (const_int 6)))))]
7968)
7969
7970
7971
7972; scc insns
7973
7974(define_expand "cstore_cc"
7975  [(set (match_operand:SI 0 "s_register_operand")
7976	(match_operator:SI 1 "" [(match_operand 2 "" "")
7977				 (match_operand 3 "" "")]))]
7978  "TARGET_32BIT"
7979  "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7980				      operands[2], operands[3], NULL_RTX);
7981   operands[3] = const0_rtx;"
7982)
7983
7984(define_insn_and_split "*mov_scc"
7985  [(set (match_operand:SI 0 "s_register_operand" "=r")
7986	(match_operator:SI 1 "arm_comparison_operator_mode"
7987	 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7988  "TARGET_ARM"
7989  "#"   ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7990  "TARGET_ARM"
7991  [(set (match_dup 0)
7992        (if_then_else:SI (match_dup 1)
7993                         (const_int 1)
7994                         (const_int 0)))]
7995  ""
7996  [(set_attr "conds" "use")
7997   (set_attr "length" "8")
7998   (set_attr "type" "multiple")]
7999)
8000
8001(define_insn "*negscc_borrow"
8002  [(set (match_operand:SI 0 "s_register_operand" "=r")
8003	(neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
8004  "TARGET_32BIT"
8005  "sbc\\t%0, %0, %0"
8006  [(set_attr "conds" "use")
8007   (set_attr "length" "4")
8008   (set_attr "type" "adc_reg")]
8009)
8010
8011(define_insn_and_split "*mov_negscc"
8012  [(set (match_operand:SI 0 "s_register_operand" "=r")
8013	(neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
8014		 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8015  "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
8016  "#"   ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
8017  "&& true"
8018  [(set (match_dup 0)
8019        (if_then_else:SI (match_dup 1)
8020                         (match_dup 3)
8021                         (const_int 0)))]
8022  {
8023    operands[3] = GEN_INT (~0);
8024  }
8025  [(set_attr "conds" "use")
8026   (set_attr "length" "8")
8027   (set_attr "type" "multiple")]
8028)
8029
8030(define_insn_and_split "*mov_notscc"
8031  [(set (match_operand:SI 0 "s_register_operand" "=r")
8032	(not:SI (match_operator:SI 1 "arm_comparison_operator"
8033		 [(match_operand 2 "cc_register" "") (const_int 0)])))]
8034  "TARGET_ARM"
8035  "#"   ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
8036  "TARGET_ARM"
8037  [(set (match_dup 0)
8038        (if_then_else:SI (match_dup 1)
8039                         (match_dup 3)
8040                         (match_dup 4)))]
8041  {
8042    operands[3] = GEN_INT (~1);
8043    operands[4] = GEN_INT (~0);
8044  }
8045  [(set_attr "conds" "use")
8046   (set_attr "length" "8")
8047   (set_attr "type" "multiple")]
8048)
8049
8050(define_expand "cstoresi4"
8051  [(set (match_operand:SI 0 "s_register_operand")
8052	(match_operator:SI 1 "expandable_comparison_operator"
8053	 [(match_operand:SI 2 "s_register_operand")
8054	  (match_operand:SI 3 "reg_or_int_operand")]))]
8055  "TARGET_32BIT || TARGET_THUMB1"
8056  "{
8057  rtx op3, scratch, scratch2;
8058
8059  if (!TARGET_THUMB1)
8060    {
8061      if (!arm_add_operand (operands[3], SImode))
8062	operands[3] = force_reg (SImode, operands[3]);
8063      emit_insn (gen_cstore_cc (operands[0], operands[1],
8064				operands[2], operands[3]));
8065      DONE;
8066    }
8067
8068  if (operands[3] == const0_rtx)
8069    {
8070      switch (GET_CODE (operands[1]))
8071	{
8072	case EQ:
8073	  emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
8074	  break;
8075
8076	case NE:
8077	  emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
8078	  break;
8079
8080	case LE:
8081          scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
8082				  NULL_RTX, 0, OPTAB_WIDEN);
8083          scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
8084				  NULL_RTX, 0, OPTAB_WIDEN);
8085          expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8086			operands[0], 1, OPTAB_WIDEN);
8087	  break;
8088
8089        case GE:
8090          scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
8091				 NULL_RTX, 1);
8092          expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
8093			NULL_RTX, 1, OPTAB_WIDEN);
8094          break;
8095
8096        case GT:
8097          scratch = expand_binop (SImode, ashr_optab, operands[2],
8098				  GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
8099          scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
8100				  NULL_RTX, 0, OPTAB_WIDEN);
8101          expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
8102			0, OPTAB_WIDEN);
8103          break;
8104
8105	/* LT is handled by generic code.  No need for unsigned with 0.  */
8106	default:
8107	  FAIL;
8108	}
8109      DONE;
8110    }
8111
8112  switch (GET_CODE (operands[1]))
8113    {
8114    case EQ:
8115      scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8116			      NULL_RTX, 0, OPTAB_WIDEN);
8117      emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
8118      break;
8119
8120    case NE:
8121      scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
8122			      NULL_RTX, 0, OPTAB_WIDEN);
8123      emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
8124      break;
8125
8126    case LE:
8127      op3 = force_reg (SImode, operands[3]);
8128
8129      scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
8130			      NULL_RTX, 1, OPTAB_WIDEN);
8131      scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
8132			      NULL_RTX, 0, OPTAB_WIDEN);
8133      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8134					  op3, operands[2]));
8135      break;
8136
8137    case GE:
8138      op3 = operands[3];
8139      if (!thumb1_cmp_operand (op3, SImode))
8140        op3 = force_reg (SImode, op3);
8141      scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8142			      NULL_RTX, 0, OPTAB_WIDEN);
8143      scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8144			       NULL_RTX, 1, OPTAB_WIDEN);
8145      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8146					  operands[2], op3));
8147      break;
8148
8149    case LEU:
8150      op3 = force_reg (SImode, operands[3]);
8151      scratch = force_reg (SImode, const0_rtx);
8152      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8153					  op3, operands[2]));
8154      break;
8155
8156    case GEU:
8157      op3 = operands[3];
8158      if (!thumb1_cmp_operand (op3, SImode))
8159        op3 = force_reg (SImode, op3);
8160      scratch = force_reg (SImode, const0_rtx);
8161      emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8162					  operands[2], op3));
8163      break;
8164
8165    case LTU:
8166      op3 = operands[3];
8167      if (!thumb1_cmp_operand (op3, SImode))
8168        op3 = force_reg (SImode, op3);
8169      scratch = gen_reg_rtx (SImode);
8170      emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
8171      break;
8172
8173    case GTU:
8174      op3 = force_reg (SImode, operands[3]);
8175      scratch = gen_reg_rtx (SImode);
8176      emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
8177      break;
8178
8179    /* No good sequences for GT, LT.  */
8180    default:
8181      FAIL;
8182    }
8183  DONE;
8184}")
8185
8186(define_expand "cstorehf4"
8187  [(set (match_operand:SI 0 "s_register_operand")
8188	(match_operator:SI 1 "expandable_comparison_operator"
8189	 [(match_operand:HF 2 "s_register_operand")
8190	  (match_operand:HF 3 "vfp_compare_operand")]))]
8191  "TARGET_VFP_FP16INST"
8192  {
8193    if (!arm_validize_comparison (&operands[1],
8194				  &operands[2],
8195				  &operands[3]))
8196       FAIL;
8197
8198    emit_insn (gen_cstore_cc (operands[0], operands[1],
8199			      operands[2], operands[3]));
8200    DONE;
8201  }
8202)
8203
8204(define_expand "cstoresf4"
8205  [(set (match_operand:SI 0 "s_register_operand")
8206	(match_operator:SI 1 "expandable_comparison_operator"
8207	 [(match_operand:SF 2 "s_register_operand")
8208	  (match_operand:SF 3 "vfp_compare_operand")]))]
8209  "TARGET_32BIT && TARGET_HARD_FLOAT"
8210  "emit_insn (gen_cstore_cc (operands[0], operands[1],
8211			     operands[2], operands[3])); DONE;"
8212)
8213
8214(define_expand "cstoredf4"
8215  [(set (match_operand:SI 0 "s_register_operand")
8216	(match_operator:SI 1 "expandable_comparison_operator"
8217	 [(match_operand:DF 2 "s_register_operand")
8218	  (match_operand:DF 3 "vfp_compare_operand")]))]
8219  "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
8220  "emit_insn (gen_cstore_cc (operands[0], operands[1],
8221			     operands[2], operands[3])); DONE;"
8222)
8223
8224(define_expand "cstoredi4"
8225  [(set (match_operand:SI 0 "s_register_operand")
8226	(match_operator:SI 1 "expandable_comparison_operator"
8227	 [(match_operand:DI 2 "s_register_operand")
8228	  (match_operand:DI 3 "reg_or_int_operand")]))]
8229  "TARGET_32BIT"
8230  "{
8231     if (!arm_validize_comparison (&operands[1],
8232     				   &operands[2],
8233				   &operands[3]))
8234       FAIL;
8235     emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
8236		      	         operands[3]));
8237     DONE;
8238   }"
8239)
8240
8241
8242;; Conditional move insns
8243
8244(define_expand "movsicc"
8245  [(set (match_operand:SI 0 "s_register_operand")
8246	(if_then_else:SI (match_operand 1 "expandable_comparison_operator")
8247			 (match_operand:SI 2 "arm_not_operand")
8248			 (match_operand:SI 3 "arm_not_operand")))]
8249  "TARGET_32BIT"
8250  "
8251  {
8252    enum rtx_code code;
8253    rtx ccreg;
8254
8255    if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8256				  &XEXP (operands[1], 1)))
8257      FAIL;
8258
8259    code = GET_CODE (operands[1]);
8260    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8261				 XEXP (operands[1], 1), NULL_RTX);
8262    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8263  }"
8264)
8265
8266(define_expand "movhfcc"
8267  [(set (match_operand:HF 0 "s_register_operand")
8268	(if_then_else:HF (match_operand 1 "arm_cond_move_operator")
8269			 (match_operand:HF 2 "s_register_operand")
8270			 (match_operand:HF 3 "s_register_operand")))]
8271  "TARGET_VFP_FP16INST"
8272  "
8273  {
8274    enum rtx_code code = GET_CODE (operands[1]);
8275    rtx ccreg;
8276
8277    if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8278				  &XEXP (operands[1], 1)))
8279      FAIL;
8280
8281    code = GET_CODE (operands[1]);
8282    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8283				 XEXP (operands[1], 1), NULL_RTX);
8284    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8285  }"
8286)
8287
8288(define_expand "movsfcc"
8289  [(set (match_operand:SF 0 "s_register_operand")
8290	(if_then_else:SF (match_operand 1 "arm_cond_move_operator")
8291			 (match_operand:SF 2 "s_register_operand")
8292			 (match_operand:SF 3 "s_register_operand")))]
8293  "TARGET_32BIT && TARGET_HARD_FLOAT"
8294  "
8295  {
8296    enum rtx_code code = GET_CODE (operands[1]);
8297    rtx ccreg;
8298
8299    if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8300       				  &XEXP (operands[1], 1)))
8301       FAIL;
8302
8303    code = GET_CODE (operands[1]);
8304    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8305				 XEXP (operands[1], 1), NULL_RTX);
8306    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8307  }"
8308)
8309
8310(define_expand "movdfcc"
8311  [(set (match_operand:DF 0 "s_register_operand")
8312	(if_then_else:DF (match_operand 1 "arm_cond_move_operator")
8313			 (match_operand:DF 2 "s_register_operand")
8314			 (match_operand:DF 3 "s_register_operand")))]
8315  "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
8316  "
8317  {
8318    enum rtx_code code = GET_CODE (operands[1]);
8319    rtx ccreg;
8320
8321    if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
8322       				  &XEXP (operands[1], 1)))
8323       FAIL;
8324    code = GET_CODE (operands[1]);
8325    ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
8326				 XEXP (operands[1], 1), NULL_RTX);
8327    operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8328  }"
8329)
8330
8331(define_insn "*cmov<mode>"
8332    [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
8333	(if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
8334			  [(match_operand 2 "cc_register" "") (const_int 0)])
8335			  (match_operand:SDF 3 "s_register_operand"
8336			                      "<F_constraint>")
8337			  (match_operand:SDF 4 "s_register_operand"
8338			                      "<F_constraint>")))]
8339  "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
8340  "*
8341  {
8342    enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8343    switch (code)
8344      {
8345      case ARM_GE:
8346      case ARM_GT:
8347      case ARM_EQ:
8348      case ARM_VS:
8349        return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
8350      case ARM_LT:
8351      case ARM_LE:
8352      case ARM_NE:
8353      case ARM_VC:
8354        return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
8355      default:
8356        gcc_unreachable ();
8357      }
8358    return \"\";
8359  }"
8360  [(set_attr "conds" "use")
8361   (set_attr "type" "fcsel")]
8362)
8363
8364(define_insn "*cmovhf"
8365    [(set (match_operand:HF 0 "s_register_operand" "=t")
8366	(if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
8367			 [(match_operand 2 "cc_register" "") (const_int 0)])
8368			  (match_operand:HF 3 "s_register_operand" "t")
8369			  (match_operand:HF 4 "s_register_operand" "t")))]
8370  "TARGET_VFP_FP16INST"
8371  "*
8372  {
8373    enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
8374    switch (code)
8375      {
8376      case ARM_GE:
8377      case ARM_GT:
8378      case ARM_EQ:
8379      case ARM_VS:
8380	return \"vsel%d1.f16\\t%0, %3, %4\";
8381      case ARM_LT:
8382      case ARM_LE:
8383      case ARM_NE:
8384      case ARM_VC:
8385	return \"vsel%D1.f16\\t%0, %4, %3\";
8386      default:
8387	gcc_unreachable ();
8388      }
8389    return \"\";
8390  }"
8391  [(set_attr "conds" "use")
8392   (set_attr "type" "fcsel")]
8393)
8394
8395(define_insn_and_split "*movsicc_insn"
8396  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8397	(if_then_else:SI
8398	 (match_operator 3 "arm_comparison_operator"
8399	  [(match_operand 4 "cc_register" "") (const_int 0)])
8400	 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8401	 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8402  "TARGET_ARM"
8403  "@
8404   mov%D3\\t%0, %2
8405   mvn%D3\\t%0, #%B2
8406   mov%d3\\t%0, %1
8407   mvn%d3\\t%0, #%B1
8408   #
8409   #
8410   #
8411   #"
8412   ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8413   ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8414   ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8415   ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8416  "&& reload_completed"
8417  [(const_int 0)]
8418  {
8419    enum rtx_code rev_code;
8420    machine_mode mode;
8421    rtx rev_cond;
8422
8423    emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8424                                  operands[3],
8425                                  gen_rtx_SET (operands[0], operands[1])));
8426
8427    rev_code = GET_CODE (operands[3]);
8428    mode = GET_MODE (operands[4]);
8429    if (mode == CCFPmode || mode == CCFPEmode)
8430      rev_code = reverse_condition_maybe_unordered (rev_code);
8431    else
8432      rev_code = reverse_condition (rev_code);
8433
8434    rev_cond = gen_rtx_fmt_ee (rev_code,
8435                               VOIDmode,
8436                               operands[4],
8437                               const0_rtx);
8438    emit_insn (gen_rtx_COND_EXEC (VOIDmode,
8439                                  rev_cond,
8440                                  gen_rtx_SET (operands[0], operands[2])));
8441    DONE;
8442  }
8443  [(set_attr "length" "4,4,4,4,8,8,8,8")
8444   (set_attr "conds" "use")
8445   (set_attr_alternative "type"
8446                         [(if_then_else (match_operand 2 "const_int_operand" "")
8447                                        (const_string "mov_imm")
8448                                        (const_string "mov_reg"))
8449                          (const_string "mvn_imm")
8450                          (if_then_else (match_operand 1 "const_int_operand" "")
8451                                        (const_string "mov_imm")
8452                                        (const_string "mov_reg"))
8453                          (const_string "mvn_imm")
8454                          (const_string "multiple")
8455                          (const_string "multiple")
8456                          (const_string "multiple")
8457                          (const_string "multiple")])]
8458)
8459
8460(define_insn "*movsfcc_soft_insn"
8461  [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8462	(if_then_else:SF (match_operator 3 "arm_comparison_operator"
8463			  [(match_operand 4 "cc_register" "") (const_int 0)])
8464			 (match_operand:SF 1 "s_register_operand" "0,r")
8465			 (match_operand:SF 2 "s_register_operand" "r,0")))]
8466  "TARGET_ARM && TARGET_SOFT_FLOAT"
8467  "@
8468   mov%D3\\t%0, %2
8469   mov%d3\\t%0, %1"
8470  [(set_attr "conds" "use")
8471   (set_attr "type" "mov_reg")]
8472)
8473
8474
8475;; Jump and linkage insns
8476
8477(define_expand "jump"
8478  [(set (pc)
8479	(label_ref (match_operand 0 "" "")))]
8480  "TARGET_EITHER"
8481  ""
8482)
8483
8484(define_insn "*arm_jump"
8485  [(set (pc)
8486	(label_ref (match_operand 0 "" "")))]
8487  "TARGET_32BIT"
8488  "*
8489  {
8490    if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8491      {
8492        arm_ccfsm_state += 2;
8493        return \"\";
8494      }
8495    return \"b%?\\t%l0\";
8496  }
8497  "
8498  [(set_attr "predicable" "yes")
8499   (set (attr "length")
8500	(if_then_else
8501	   (and (match_test "TARGET_THUMB2")
8502		(and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8503		     (le (minus (match_dup 0) (pc)) (const_int 2048))))
8504	   (const_int 2)
8505	   (const_int 4)))
8506   (set_attr "type" "branch")]
8507)
8508
8509(define_expand "call"
8510  [(parallel [(call (match_operand 0 "memory_operand")
8511	            (match_operand 1 "general_operand"))
8512	      (use (match_operand 2 "" ""))
8513	      (clobber (reg:SI LR_REGNUM))])]
8514  "TARGET_EITHER"
8515  "
8516  {
8517    rtx callee, pat;
8518    tree addr = MEM_EXPR (operands[0]);
8519
8520    /* In an untyped call, we can get NULL for operand 2.  */
8521    if (operands[2] == NULL_RTX)
8522      operands[2] = const0_rtx;
8523
8524    /* Decide if we should generate indirect calls by loading the
8525       32-bit address of the callee into a register before performing the
8526       branch and link.  */
8527    callee = XEXP (operands[0], 0);
8528    if (GET_CODE (callee) == SYMBOL_REF
8529	? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8530	: !REG_P (callee))
8531      XEXP (operands[0], 0) = force_reg (Pmode, callee);
8532
8533    if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
8534	/* Indirect call: set r9 with FDPIC value of callee.  */
8535	XEXP (operands[0], 0)
8536	  = arm_load_function_descriptor (XEXP (operands[0], 0));
8537
8538    if (detect_cmse_nonsecure_call (addr))
8539      {
8540	pat = gen_nonsecure_call_internal (operands[0], operands[1],
8541					   operands[2]);
8542	emit_call_insn (pat);
8543      }
8544    else
8545      {
8546	pat = gen_call_internal (operands[0], operands[1], operands[2]);
8547	arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
8548      }
8549
8550    /* Restore FDPIC register (r9) after call.  */
8551    if (TARGET_FDPIC)
8552      {
8553	rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8554	rtx initial_fdpic_reg
8555	    = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8556
8557	emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8558							initial_fdpic_reg));
8559      }
8560
8561    DONE;
8562  }"
8563)
8564
8565(define_insn "restore_pic_register_after_call"
8566  [(set (match_operand:SI 0 "s_register_operand" "+r,r")
8567        (unspec:SI [(match_dup 0)
8568                    (match_operand:SI 1 "nonimmediate_operand" "r,m")]
8569                   UNSPEC_PIC_RESTORE))]
8570  ""
8571  "@
8572  mov\t%0, %1
8573  ldr\t%0, %1"
8574)
8575
8576(define_expand "call_internal"
8577  [(parallel [(call (match_operand 0 "memory_operand")
8578	            (match_operand 1 "general_operand"))
8579	      (use (match_operand 2 "" ""))
8580	      (clobber (reg:SI LR_REGNUM))])])
8581
8582(define_expand "nonsecure_call_internal"
8583  [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
8584			       UNSPEC_NONSECURE_MEM)
8585		    (match_operand 1 "general_operand"))
8586	      (use (match_operand 2 "" ""))
8587	      (clobber (reg:SI LR_REGNUM))])]
8588  "use_cmse"
8589  {
8590    rtx addr = XEXP (operands[0], 0);
8591    rtx tmp = REG_P (addr) ? addr : force_reg (SImode, addr);
8592
8593    if (!TARGET_HAVE_FPCXT_CMSE)
8594      {
8595	rtx r4 = gen_rtx_REG (SImode, R4_REGNUM);
8596	emit_move_insn (r4, tmp);
8597	tmp = r4;
8598      }
8599
8600    if (tmp != addr)
8601      operands[0] = replace_equiv_address (operands[0], tmp);
8602  }
8603)
8604
8605(define_insn "*call_reg_armv5"
8606  [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8607         (match_operand 1 "" ""))
8608   (use (match_operand 2 "" ""))
8609   (clobber (reg:SI LR_REGNUM))]
8610  "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8611  "blx%?\\t%0"
8612  [(set_attr "type" "call")]
8613)
8614
8615(define_insn "*call_reg_arm"
8616  [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8617         (match_operand 1 "" ""))
8618   (use (match_operand 2 "" ""))
8619   (clobber (reg:SI LR_REGNUM))]
8620  "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8621  "*
8622  return output_call (operands);
8623  "
8624  ;; length is worst case, normally it is only two
8625  [(set_attr "length" "12")
8626   (set_attr "type" "call")]
8627)
8628
8629
8630(define_expand "call_value"
8631  [(parallel [(set (match_operand       0 "" "")
8632	           (call (match_operand 1 "memory_operand")
8633		         (match_operand 2 "general_operand")))
8634	      (use (match_operand 3 "" ""))
8635	      (clobber (reg:SI LR_REGNUM))])]
8636  "TARGET_EITHER"
8637  "
8638  {
8639    rtx pat, callee;
8640    tree addr = MEM_EXPR (operands[1]);
8641
8642    /* In an untyped call, we can get NULL for operand 2.  */
8643    if (operands[3] == 0)
8644      operands[3] = const0_rtx;
8645
8646    /* Decide if we should generate indirect calls by loading the
8647       32-bit address of the callee into a register before performing the
8648       branch and link.  */
8649    callee = XEXP (operands[1], 0);
8650    if (GET_CODE (callee) == SYMBOL_REF
8651	? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8652	: !REG_P (callee))
8653      XEXP (operands[1], 0) = force_reg (Pmode, callee);
8654
8655    if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
8656	/* Indirect call: set r9 with FDPIC value of callee.  */
8657	XEXP (operands[1], 0)
8658	  = arm_load_function_descriptor (XEXP (operands[1], 0));
8659
8660    if (detect_cmse_nonsecure_call (addr))
8661      {
8662	pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8663						 operands[2], operands[3]);
8664	emit_call_insn (pat);
8665      }
8666    else
8667      {
8668	pat = gen_call_value_internal (operands[0], operands[1],
8669				       operands[2], operands[3]);
8670	arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8671      }
8672
8673    /* Restore FDPIC register (r9) after call.  */
8674    if (TARGET_FDPIC)
8675      {
8676	rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8677	rtx initial_fdpic_reg
8678	    = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8679
8680	emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8681							initial_fdpic_reg));
8682      }
8683
8684    DONE;
8685  }"
8686)
8687
8688(define_expand "call_value_internal"
8689  [(parallel [(set (match_operand       0 "" "")
8690	           (call (match_operand 1 "memory_operand")
8691		         (match_operand 2 "general_operand")))
8692	      (use (match_operand 3 "" ""))
8693	      (clobber (reg:SI LR_REGNUM))])])
8694
8695(define_expand "nonsecure_call_value_internal"
8696  [(parallel [(set (match_operand       0 "" "")
8697		   (call (unspec:SI [(match_operand 1 "memory_operand")]
8698				    UNSPEC_NONSECURE_MEM)
8699			 (match_operand 2 "general_operand")))
8700	      (use (match_operand 3 "" ""))
8701	      (clobber (reg:SI LR_REGNUM))])]
8702  "use_cmse"
8703  "
8704  {
8705    if (!TARGET_HAVE_FPCXT_CMSE)
8706      {
8707	rtx tmp =
8708	  copy_to_suggested_reg (XEXP (operands[1], 0),
8709				 gen_rtx_REG (SImode, R4_REGNUM),
8710				 SImode);
8711
8712	operands[1] = replace_equiv_address (operands[1], tmp);
8713      }
8714  }")
8715
8716(define_insn "*call_value_reg_armv5"
8717  [(set (match_operand 0 "" "")
8718        (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8719	      (match_operand 2 "" "")))
8720   (use (match_operand 3 "" ""))
8721   (clobber (reg:SI LR_REGNUM))]
8722  "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8723  "blx%?\\t%1"
8724  [(set_attr "type" "call")]
8725)
8726
8727(define_insn "*call_value_reg_arm"
8728  [(set (match_operand 0 "" "")
8729        (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8730	      (match_operand 2 "" "")))
8731   (use (match_operand 3 "" ""))
8732   (clobber (reg:SI LR_REGNUM))]
8733  "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8734  "*
8735  return output_call (&operands[1]);
8736  "
8737  [(set_attr "length" "12")
8738   (set_attr "type" "call")]
8739)
8740
8741;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8742;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8743
8744(define_insn "*call_symbol"
8745  [(call (mem:SI (match_operand:SI 0 "" ""))
8746	 (match_operand 1 "" ""))
8747   (use (match_operand 2 "" ""))
8748   (clobber (reg:SI LR_REGNUM))]
8749  "TARGET_32BIT
8750   && !SIBLING_CALL_P (insn)
8751   && (GET_CODE (operands[0]) == SYMBOL_REF)
8752   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8753  "*
8754  {
8755   rtx op = operands[0];
8756
8757   /* Switch mode now when possible.  */
8758   if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8759	&& arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8760      return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8761
8762    return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8763  }"
8764  [(set_attr "type" "call")]
8765)
8766
8767(define_insn "*call_value_symbol"
8768  [(set (match_operand 0 "" "")
8769	(call (mem:SI (match_operand:SI 1 "" ""))
8770	(match_operand:SI 2 "" "")))
8771   (use (match_operand 3 "" ""))
8772   (clobber (reg:SI LR_REGNUM))]
8773  "TARGET_32BIT
8774   && !SIBLING_CALL_P (insn)
8775   && (GET_CODE (operands[1]) == SYMBOL_REF)
8776   && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8777  "*
8778  {
8779   rtx op = operands[1];
8780
8781   /* Switch mode now when possible.  */
8782   if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8783	&& arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8784      return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8785
8786    return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8787  }"
8788  [(set_attr "type" "call")]
8789)
8790
8791(define_expand "sibcall_internal"
8792  [(parallel [(call (match_operand 0 "memory_operand")
8793		    (match_operand 1 "general_operand"))
8794	      (return)
8795	      (use (match_operand 2 "" ""))])])
8796
8797;; We may also be able to do sibcalls for Thumb, but it's much harder...
8798(define_expand "sibcall"
8799  [(parallel [(call (match_operand 0 "memory_operand")
8800		    (match_operand 1 "general_operand"))
8801	      (return)
8802	      (use (match_operand 2 "" ""))])]
8803  "TARGET_32BIT"
8804  "
8805  {
8806    rtx pat;
8807
8808    if ((!REG_P (XEXP (operands[0], 0))
8809	 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8810	|| (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8811	    && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8812     XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8813
8814    if (operands[2] == NULL_RTX)
8815      operands[2] = const0_rtx;
8816
8817    pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8818    arm_emit_call_insn (pat, operands[0], true);
8819    DONE;
8820  }"
8821)
8822
8823(define_expand "sibcall_value_internal"
8824  [(parallel [(set (match_operand 0 "" "")
8825		   (call (match_operand 1 "memory_operand")
8826			 (match_operand 2 "general_operand")))
8827	      (return)
8828	      (use (match_operand 3 "" ""))])])
8829
8830(define_expand "sibcall_value"
8831  [(parallel [(set (match_operand 0 "" "")
8832		   (call (match_operand 1 "memory_operand")
8833			 (match_operand 2 "general_operand")))
8834	      (return)
8835	      (use (match_operand 3 "" ""))])]
8836  "TARGET_32BIT"
8837  "
8838  {
8839    rtx pat;
8840
8841    if ((!REG_P (XEXP (operands[1], 0))
8842	 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8843	|| (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8844	    && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8845     XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8846
8847    if (operands[3] == NULL_RTX)
8848      operands[3] = const0_rtx;
8849
8850    pat = gen_sibcall_value_internal (operands[0], operands[1],
8851                                      operands[2], operands[3]);
8852    arm_emit_call_insn (pat, operands[1], true);
8853    DONE;
8854  }"
8855)
8856
8857(define_insn "*sibcall_insn"
8858 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8859	(match_operand 1 "" ""))
8860  (return)
8861  (use (match_operand 2 "" ""))]
8862  "TARGET_32BIT && SIBLING_CALL_P (insn)"
8863  "*
8864  if (which_alternative == 1)
8865    return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8866  else
8867    {
8868      if (arm_arch5t || arm_arch4t)
8869	return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8870      else
8871	return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8872    }
8873  "
8874  [(set_attr "type" "call")]
8875)
8876
8877(define_insn "*sibcall_value_insn"
8878 [(set (match_operand 0 "" "")
8879       (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8880	     (match_operand 2 "" "")))
8881  (return)
8882  (use (match_operand 3 "" ""))]
8883  "TARGET_32BIT && SIBLING_CALL_P (insn)"
8884  "*
8885  if (which_alternative == 1)
8886   return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8887  else
8888    {
8889      if (arm_arch5t || arm_arch4t)
8890	return \"bx%?\\t%1\";
8891      else
8892	return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8893    }
8894  "
8895  [(set_attr "type" "call")]
8896)
8897
8898(define_expand "<return_str>return"
8899  [(RETURNS)]
8900  "(TARGET_ARM || (TARGET_THUMB2
8901                   && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8902                   && !IS_STACKALIGN (arm_current_func_type ())))
8903    <return_cond_false>"
8904  "
8905  {
8906    if (TARGET_THUMB2)
8907      {
8908        thumb2_expand_return (<return_simple_p>);
8909        DONE;
8910      }
8911  }
8912  "
8913)
8914
8915;; Often the return insn will be the same as loading from memory, so set attr
8916(define_insn "*arm_return"
8917  [(return)]
8918  "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8919  "*
8920  {
8921    if (arm_ccfsm_state == 2)
8922      {
8923        arm_ccfsm_state += 2;
8924        return \"\";
8925      }
8926    return output_return_instruction (const_true_rtx, true, false, false);
8927  }"
8928  [(set_attr "type" "load_4")
8929   (set_attr "length" "12")
8930   (set_attr "predicable" "yes")]
8931)
8932
8933(define_insn "*cond_<return_str>return"
8934  [(set (pc)
8935        (if_then_else (match_operator 0 "arm_comparison_operator"
8936		       [(match_operand 1 "cc_register" "") (const_int 0)])
8937                      (RETURNS)
8938                      (pc)))]
8939  "TARGET_ARM  <return_cond_true>"
8940  "*
8941  {
8942    if (arm_ccfsm_state == 2)
8943      {
8944        arm_ccfsm_state += 2;
8945        return \"\";
8946      }
8947    return output_return_instruction (operands[0], true, false,
8948				      <return_simple_p>);
8949  }"
8950  [(set_attr "conds" "use")
8951   (set_attr "length" "12")
8952   (set_attr "type" "load_4")]
8953)
8954
8955(define_insn "*cond_<return_str>return_inverted"
8956  [(set (pc)
8957        (if_then_else (match_operator 0 "arm_comparison_operator"
8958		       [(match_operand 1 "cc_register" "") (const_int 0)])
8959                      (pc)
8960		      (RETURNS)))]
8961  "TARGET_ARM <return_cond_true>"
8962  "*
8963  {
8964    if (arm_ccfsm_state == 2)
8965      {
8966        arm_ccfsm_state += 2;
8967        return \"\";
8968      }
8969    return output_return_instruction (operands[0], true, true,
8970				      <return_simple_p>);
8971  }"
8972  [(set_attr "conds" "use")
8973   (set_attr "length" "12")
8974   (set_attr "type" "load_4")]
8975)
8976
8977(define_insn "*arm_simple_return"
8978  [(simple_return)]
8979  "TARGET_ARM"
8980  "*
8981  {
8982    if (arm_ccfsm_state == 2)
8983      {
8984        arm_ccfsm_state += 2;
8985        return \"\";
8986      }
8987    return output_return_instruction (const_true_rtx, true, false, true);
8988  }"
8989  [(set_attr "type" "branch")
8990   (set_attr "length" "4")
8991   (set_attr "predicable" "yes")]
8992)
8993
8994;; Generate a sequence of instructions to determine if the processor is
8995;; in 26-bit or 32-bit mode, and return the appropriate return address
8996;; mask.
8997
8998(define_expand "return_addr_mask"
8999  [(set (match_dup 1)
9000      (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9001		       (const_int 0)))
9002   (set (match_operand:SI 0 "s_register_operand")
9003      (if_then_else:SI (eq (match_dup 1) (const_int 0))
9004		       (const_int -1)
9005		       (const_int 67108860)))] ; 0x03fffffc
9006  "TARGET_ARM"
9007  "
9008  operands[1] = gen_rtx_REG (CC_NZmode, CC_REGNUM);
9009  ")
9010
9011(define_insn "*check_arch2"
9012  [(set (match_operand:CC_NZ 0 "cc_register" "")
9013      (compare:CC_NZ (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
9014		       (const_int 0)))]
9015  "TARGET_ARM"
9016  "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
9017  [(set_attr "length" "8")
9018   (set_attr "conds" "set")
9019   (set_attr "type" "multiple")]
9020)
9021
9022;; Call subroutine returning any type.
9023
9024(define_expand "untyped_call"
9025  [(parallel [(call (match_operand 0 "" "")
9026		    (const_int 0))
9027	      (match_operand 1 "" "")
9028	      (match_operand 2 "" "")])]
9029  "TARGET_EITHER && !TARGET_FDPIC"
9030  "
9031  {
9032    int i;
9033    rtx par = gen_rtx_PARALLEL (VOIDmode,
9034				rtvec_alloc (XVECLEN (operands[2], 0)));
9035    rtx addr = gen_reg_rtx (Pmode);
9036    rtx mem;
9037    int size = 0;
9038
9039    emit_move_insn (addr, XEXP (operands[1], 0));
9040    mem = change_address (operands[1], BLKmode, addr);
9041
9042    for (i = 0; i < XVECLEN (operands[2], 0); i++)
9043      {
9044	rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
9045
9046	/* Default code only uses r0 as a return value, but we could
9047	   be using anything up to 4 registers.  */
9048	if (REGNO (src) == R0_REGNUM)
9049	  src = gen_rtx_REG (TImode, R0_REGNUM);
9050
9051        XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
9052						 GEN_INT (size));
9053        size += GET_MODE_SIZE (GET_MODE (src));
9054      }
9055
9056    emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
9057
9058    size = 0;
9059
9060    for (i = 0; i < XVECLEN (par, 0); i++)
9061      {
9062	HOST_WIDE_INT offset = 0;
9063	rtx reg = XEXP (XVECEXP (par, 0, i), 0);
9064
9065	if (size != 0)
9066	  emit_move_insn (addr, plus_constant (Pmode, addr, size));
9067
9068	mem = change_address (mem, GET_MODE (reg), NULL);
9069	if (REGNO (reg) == R0_REGNUM)
9070	  {
9071	    /* On thumb we have to use a write-back instruction.  */
9072	    emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
9073 		       TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9074	    size = TARGET_ARM ? 16 : 0;
9075	  }
9076	else
9077	  {
9078	    emit_move_insn (mem, reg);
9079	    size = GET_MODE_SIZE (GET_MODE (reg));
9080	  }
9081      }
9082
9083    /* The optimizer does not know that the call sets the function value
9084       registers we stored in the result block.  We avoid problems by
9085       claiming that all hard registers are used and clobbered at this
9086       point.  */
9087    emit_insn (gen_blockage ());
9088
9089    DONE;
9090  }"
9091)
9092
9093(define_expand "untyped_return"
9094  [(match_operand:BLK 0 "memory_operand")
9095   (match_operand 1 "" "")]
9096  "TARGET_EITHER && !TARGET_FDPIC"
9097  "
9098  {
9099    int i;
9100    rtx addr = gen_reg_rtx (Pmode);
9101    rtx mem;
9102    int size = 0;
9103
9104    emit_move_insn (addr, XEXP (operands[0], 0));
9105    mem = change_address (operands[0], BLKmode, addr);
9106
9107    for (i = 0; i < XVECLEN (operands[1], 0); i++)
9108      {
9109	HOST_WIDE_INT offset = 0;
9110	rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
9111
9112	if (size != 0)
9113	  emit_move_insn (addr, plus_constant (Pmode, addr, size));
9114
9115	mem = change_address (mem, GET_MODE (reg), NULL);
9116	if (REGNO (reg) == R0_REGNUM)
9117	  {
9118	    /* On thumb we have to use a write-back instruction.  */
9119	    emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
9120 		       TARGET_THUMB ? TRUE : FALSE, mem, &offset));
9121	    size = TARGET_ARM ? 16 : 0;
9122	  }
9123	else
9124	  {
9125	    emit_move_insn (reg, mem);
9126	    size = GET_MODE_SIZE (GET_MODE (reg));
9127	  }
9128      }
9129
9130    /* Emit USE insns before the return.  */
9131    for (i = 0; i < XVECLEN (operands[1], 0); i++)
9132      emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
9133
9134    /* Construct the return.  */
9135    expand_naked_return ();
9136
9137    DONE;
9138  }"
9139)
9140
9141;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
9142;; all of memory.  This blocks insns from being moved across this point.
9143
9144(define_insn "blockage"
9145  [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
9146  "TARGET_EITHER"
9147  ""
9148  [(set_attr "length" "0")
9149   (set_attr "type" "block")]
9150)
9151
9152;; Since we hard code r0 here use the 'o' constraint to prevent
9153;; provoking undefined behaviour in the hardware with putting out
9154;; auto-increment operations with potentially r0 as the base register.
9155(define_insn "probe_stack"
9156  [(set (match_operand:SI 0 "memory_operand" "=o")
9157        (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
9158  "TARGET_32BIT"
9159  "str%?\\tr0, %0"
9160  [(set_attr "type" "store_4")
9161   (set_attr "predicable" "yes")]
9162)
9163
9164(define_insn "probe_stack_range"
9165  [(set (match_operand:SI 0 "register_operand" "=r")
9166	(unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
9167			     (match_operand:SI 2 "register_operand" "r")]
9168			     VUNSPEC_PROBE_STACK_RANGE))]
9169  "TARGET_32BIT"
9170{
9171  return output_probe_stack_range (operands[0], operands[2]);
9172}
9173  [(set_attr "type" "multiple")
9174   (set_attr "conds" "clob")]
9175)
9176
9177;; Named patterns for stack smashing protection.
9178(define_expand "stack_protect_combined_set"
9179  [(parallel
9180     [(set (match_operand:SI 0 "memory_operand")
9181	   (unspec:SI [(match_operand:SI 1 "guard_operand")]
9182		      UNSPEC_SP_SET))
9183      (clobber (match_scratch:SI 2 ""))
9184      (clobber (match_scratch:SI 3 ""))])]
9185  ""
9186  ""
9187)
9188
9189;; Use a separate insn from the above expand to be able to have the mem outside
9190;; the operand #1 when register allocation comes. This is needed to avoid LRA
9191;; try to reload the guard since we need to control how PIC access is done in
9192;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9193;; legitimize_pic_address ()).
9194(define_insn_and_split "*stack_protect_combined_set_insn"
9195  [(set (match_operand:SI 0 "memory_operand" "=m,m")
9196	(unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9197		   UNSPEC_SP_SET))
9198   (clobber (match_scratch:SI 2 "=&l,&r"))
9199   (clobber (match_scratch:SI 3 "=&l,&r"))]
9200  ""
9201  "#"
9202  "reload_completed"
9203  [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
9204					    UNSPEC_SP_SET))
9205	      (clobber (match_dup 2))])]
9206  "
9207{
9208  if (flag_pic)
9209    {
9210      rtx pic_reg;
9211
9212      if (TARGET_FDPIC)
9213	  pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9214      else
9215	  pic_reg = operands[3];
9216
9217      /* Forces recomputing of GOT base now.  */
9218      legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
9219			      true /*compute_now*/);
9220    }
9221  else
9222    {
9223      if (address_operand (operands[1], SImode))
9224	operands[2] = operands[1];
9225      else
9226	{
9227	  rtx mem = force_const_mem (SImode, operands[1]);
9228	  emit_move_insn (operands[2], mem);
9229	}
9230    }
9231}"
9232  [(set_attr "arch" "t1,32")]
9233)
9234
9235;; DO NOT SPLIT THIS INSN.  It's important for security reasons that the
9236;; canary value does not live beyond the life of this sequence.
9237(define_insn "*stack_protect_set_insn"
9238  [(set (match_operand:SI 0 "memory_operand" "=m,m")
9239	(unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
9240	 UNSPEC_SP_SET))
9241   (clobber (match_dup 1))]
9242  ""
9243  "@
9244   ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
9245   ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
9246  [(set_attr "length" "8,12")
9247   (set_attr "conds" "clob,nocond")
9248   (set_attr "type" "multiple")
9249   (set_attr "arch" "t1,32")]
9250)
9251
9252(define_expand "stack_protect_combined_test"
9253  [(parallel
9254     [(set (pc)
9255	   (if_then_else
9256		(eq (match_operand:SI 0 "memory_operand")
9257		    (unspec:SI [(match_operand:SI 1 "guard_operand")]
9258			       UNSPEC_SP_TEST))
9259		(label_ref (match_operand 2))
9260		(pc)))
9261      (clobber (match_scratch:SI 3 ""))
9262      (clobber (match_scratch:SI 4 ""))
9263      (clobber (reg:CC CC_REGNUM))])]
9264  ""
9265  ""
9266)
9267
9268;; Use a separate insn from the above expand to be able to have the mem outside
9269;; the operand #1 when register allocation comes. This is needed to avoid LRA
9270;; try to reload the guard since we need to control how PIC access is done in
9271;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
9272;; legitimize_pic_address ()).
9273(define_insn_and_split "*stack_protect_combined_test_insn"
9274  [(set (pc)
9275	(if_then_else
9276		(eq (match_operand:SI 0 "memory_operand" "m,m")
9277		    (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
9278			       UNSPEC_SP_TEST))
9279		(label_ref (match_operand 2))
9280		(pc)))
9281   (clobber (match_scratch:SI 3 "=&l,&r"))
9282   (clobber (match_scratch:SI 4 "=&l,&r"))
9283   (clobber (reg:CC CC_REGNUM))]
9284  ""
9285  "#"
9286  "reload_completed"
9287  [(const_int 0)]
9288{
9289  rtx eq;
9290
9291  if (flag_pic)
9292    {
9293      rtx pic_reg;
9294
9295      if (TARGET_FDPIC)
9296	  pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
9297      else
9298	  pic_reg = operands[4];
9299
9300      /* Forces recomputing of GOT base now.  */
9301      legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
9302			      true /*compute_now*/);
9303    }
9304  else
9305    {
9306      if (address_operand (operands[1], SImode))
9307	operands[3] = operands[1];
9308      else
9309	{
9310	  rtx mem = force_const_mem (SImode, operands[1]);
9311	  emit_move_insn (operands[3], mem);
9312	}
9313    }
9314  if (TARGET_32BIT)
9315    {
9316      emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
9317						  operands[3]));
9318      rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
9319      eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
9320      emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
9321    }
9322  else
9323    {
9324      emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
9325						     operands[3]));
9326      eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
9327      emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
9328				      operands[2]));
9329    }
9330  DONE;
9331}
9332  [(set_attr "arch" "t1,32")]
9333)
9334
9335;; DO NOT SPLIT THIS PATTERN.  It is important for security reasons that the
9336;; canary value does not live beyond the end of this sequence.
9337(define_insn "arm_stack_protect_test_insn"
9338  [(set (reg:CC_Z CC_REGNUM)
9339	(compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
9340				  (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
9341				 UNSPEC_SP_TEST)
9342		      (const_int 0)))
9343   (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
9344   (clobber (match_dup 2))]
9345  "TARGET_32BIT"
9346  "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0\;mov\t%2, #0"
9347  [(set_attr "length" "12,16")
9348   (set_attr "conds" "set")
9349   (set_attr "type" "multiple")
9350   (set_attr "arch" "t,32")]
9351)
9352
9353(define_expand "casesi"
9354  [(match_operand:SI 0 "s_register_operand")	; index to jump on
9355   (match_operand:SI 1 "const_int_operand")	; lower bound
9356   (match_operand:SI 2 "const_int_operand")	; total range
9357   (match_operand:SI 3 "" "")			; table label
9358   (match_operand:SI 4 "" "")]			; Out of range label
9359  "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
9360  "
9361  {
9362    enum insn_code code;
9363    if (operands[1] != const0_rtx)
9364      {
9365	rtx reg = gen_reg_rtx (SImode);
9366
9367	emit_insn (gen_addsi3 (reg, operands[0],
9368			       gen_int_mode (-INTVAL (operands[1]),
9369			       		     SImode)));
9370	operands[0] = reg;
9371      }
9372
9373    if (TARGET_ARM)
9374      code = CODE_FOR_arm_casesi_internal;
9375    else if (TARGET_THUMB1)
9376      code = CODE_FOR_thumb1_casesi_internal_pic;
9377    else if (flag_pic)
9378      code = CODE_FOR_thumb2_casesi_internal_pic;
9379    else
9380      code = CODE_FOR_thumb2_casesi_internal;
9381
9382    if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
9383      operands[2] = force_reg (SImode, operands[2]);
9384
9385    emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
9386					  operands[3], operands[4]));
9387    DONE;
9388  }"
9389)
9390
9391;; The USE in this pattern is needed to tell flow analysis that this is
9392;; a CASESI insn.  It has no other purpose.
9393(define_expand "arm_casesi_internal"
9394  [(parallel [(set (pc)
9395	       (if_then_else
9396		(leu (match_operand:SI 0 "s_register_operand")
9397		     (match_operand:SI 1 "arm_rhs_operand"))
9398		(match_dup 4)
9399		(label_ref:SI (match_operand 3 ""))))
9400	      (clobber (reg:CC CC_REGNUM))
9401	      (use (label_ref:SI (match_operand 2 "")))])]
9402  "TARGET_ARM"
9403{
9404  operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
9405  operands[4] = gen_rtx_PLUS (SImode, operands[4],
9406			      gen_rtx_LABEL_REF (SImode, operands[2]));
9407  operands[4] = gen_rtx_MEM (SImode, operands[4]);
9408  MEM_READONLY_P (operands[4]) = 1;
9409  MEM_NOTRAP_P (operands[4]) = 1;
9410})
9411
9412(define_insn "*arm_casesi_internal"
9413  [(parallel [(set (pc)
9414	       (if_then_else
9415		(leu (match_operand:SI 0 "s_register_operand" "r")
9416		     (match_operand:SI 1 "arm_rhs_operand" "rI"))
9417		(mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
9418				 (label_ref:SI (match_operand 2 "" ""))))
9419		(label_ref:SI (match_operand 3 "" ""))))
9420	      (clobber (reg:CC CC_REGNUM))
9421	      (use (label_ref:SI (match_dup 2)))])]
9422  "TARGET_ARM"
9423  "*
9424    if (flag_pic)
9425      return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
9426    return   \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
9427  "
9428  [(set_attr "conds" "clob")
9429   (set_attr "length" "12")
9430   (set_attr "type" "multiple")]
9431)
9432
9433(define_expand "indirect_jump"
9434  [(set (pc)
9435	(match_operand:SI 0 "s_register_operand"))]
9436  "TARGET_EITHER"
9437  "
9438  /* Thumb-2 doesn't have mov pc, reg.  Explicitly set the low bit of the
9439     address and use bx.  */
9440  if (TARGET_THUMB2)
9441    {
9442      rtx tmp;
9443      tmp = gen_reg_rtx (SImode);
9444      emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
9445      operands[0] = tmp;
9446    }
9447  "
9448)
9449
9450;; NB Never uses BX.
9451(define_insn "*arm_indirect_jump"
9452  [(set (pc)
9453	(match_operand:SI 0 "s_register_operand" "r"))]
9454  "TARGET_ARM"
9455  "mov%?\\t%|pc, %0\\t%@ indirect register jump"
9456  [(set_attr "predicable" "yes")
9457   (set_attr "type" "branch")]
9458)
9459
9460(define_insn "*load_indirect_jump"
9461  [(set (pc)
9462	(match_operand:SI 0 "memory_operand" "m"))]
9463  "TARGET_ARM"
9464  "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
9465  [(set_attr "type" "load_4")
9466   (set_attr "pool_range" "4096")
9467   (set_attr "neg_pool_range" "4084")
9468   (set_attr "predicable" "yes")]
9469)
9470
9471
9472;; Misc insns
9473
9474(define_insn "nop"
9475  [(const_int 0)]
9476  "TARGET_EITHER"
9477  "nop"
9478  [(set (attr "length")
9479	(if_then_else (eq_attr "is_thumb" "yes")
9480		      (const_int 2)
9481		      (const_int 4)))
9482   (set_attr "type" "mov_reg")]
9483)
9484
9485(define_insn "trap"
9486  [(trap_if (const_int 1) (const_int 0))]
9487  ""
9488  "*
9489  if (TARGET_ARM)
9490    return \".inst\\t0xe7f000f0\";
9491  else
9492    return \".inst\\t0xdeff\";
9493  "
9494  [(set (attr "length")
9495	(if_then_else (eq_attr "is_thumb" "yes")
9496		      (const_int 2)
9497		      (const_int 4)))
9498   (set_attr "type" "trap")
9499   (set_attr "conds" "unconditional")]
9500)
9501
9502
9503;; Patterns to allow combination of arithmetic, cond code and shifts
9504
9505(define_insn "*<arith_shift_insn>_multsi"
9506  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9507	(SHIFTABLE_OPS:SI
9508	 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
9509		  (match_operand:SI 3 "power_of_two_operand" ""))
9510	 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
9511  "TARGET_32BIT"
9512  "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
9513  [(set_attr "predicable" "yes")
9514   (set_attr "shift" "2")
9515   (set_attr "arch" "a,t2")
9516   (set_attr "type" "alu_shift_imm")])
9517
9518(define_insn "*<arith_shift_insn>_shiftsi"
9519  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9520	(SHIFTABLE_OPS:SI
9521	 (match_operator:SI 2 "shift_nomul_operator"
9522	  [(match_operand:SI 3 "s_register_operand" "r,r,r")
9523	   (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
9524	 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
9525  "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
9526  "<arith_shift_insn>%?\\t%0, %1, %3%S2"
9527  [(set_attr "predicable" "yes")
9528   (set_attr "shift" "3")
9529   (set_attr "arch" "a,t2,a")
9530   (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
9531
9532(define_split
9533  [(set (match_operand:SI 0 "s_register_operand" "")
9534	(match_operator:SI 1 "shiftable_operator"
9535	 [(match_operator:SI 2 "shiftable_operator"
9536	   [(match_operator:SI 3 "shift_operator"
9537	     [(match_operand:SI 4 "s_register_operand" "")
9538	      (match_operand:SI 5 "reg_or_int_operand" "")])
9539	    (match_operand:SI 6 "s_register_operand" "")])
9540	  (match_operand:SI 7 "arm_rhs_operand" "")]))
9541   (clobber (match_operand:SI 8 "s_register_operand" ""))]
9542  "TARGET_32BIT"
9543  [(set (match_dup 8)
9544	(match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9545			 (match_dup 6)]))
9546   (set (match_dup 0)
9547	(match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
9548  "")
9549
9550(define_insn "*arith_shiftsi_compare0"
9551  [(set (reg:CC_NZ CC_REGNUM)
9552        (compare:CC_NZ
9553	 (match_operator:SI 1 "shiftable_operator"
9554	  [(match_operator:SI 3 "shift_operator"
9555	    [(match_operand:SI 4 "s_register_operand" "r,r")
9556	     (match_operand:SI 5 "shift_amount_operand" "M,r")])
9557	   (match_operand:SI 2 "s_register_operand" "r,r")])
9558	 (const_int 0)))
9559   (set (match_operand:SI 0 "s_register_operand" "=r,r")
9560	(match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
9561			 (match_dup 2)]))]
9562  "TARGET_32BIT"
9563  "%i1s%?\\t%0, %2, %4%S3"
9564  [(set_attr "conds" "set")
9565   (set_attr "shift" "4")
9566   (set_attr "arch" "32,a")
9567   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9568
9569(define_insn "*arith_shiftsi_compare0_scratch"
9570  [(set (reg:CC_NZ CC_REGNUM)
9571        (compare:CC_NZ
9572	 (match_operator:SI 1 "shiftable_operator"
9573	  [(match_operator:SI 3 "shift_operator"
9574	    [(match_operand:SI 4 "s_register_operand" "r,r")
9575	     (match_operand:SI 5 "shift_amount_operand" "M,r")])
9576	   (match_operand:SI 2 "s_register_operand" "r,r")])
9577	 (const_int 0)))
9578   (clobber (match_scratch:SI 0 "=r,r"))]
9579  "TARGET_32BIT"
9580  "%i1s%?\\t%0, %2, %4%S3"
9581  [(set_attr "conds" "set")
9582   (set_attr "shift" "4")
9583   (set_attr "arch" "32,a")
9584   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9585
9586(define_insn "*sub_shiftsi"
9587  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9588	(minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9589		  (match_operator:SI 2 "shift_operator"
9590		   [(match_operand:SI 3 "s_register_operand" "r,r")
9591		    (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
9592  "TARGET_32BIT"
9593  "sub%?\\t%0, %1, %3%S2"
9594  [(set_attr "predicable" "yes")
9595   (set_attr "predicable_short_it" "no")
9596   (set_attr "shift" "3")
9597   (set_attr "arch" "32,a")
9598   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9599
9600(define_insn "*sub_shiftsi_compare0"
9601  [(set (reg:CC_NZ CC_REGNUM)
9602	(compare:CC_NZ
9603	 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9604		   (match_operator:SI 2 "shift_operator"
9605		    [(match_operand:SI 3 "s_register_operand" "r,r")
9606		     (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9607	 (const_int 0)))
9608   (set (match_operand:SI 0 "s_register_operand" "=r,r")
9609	(minus:SI (match_dup 1)
9610		  (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
9611  "TARGET_32BIT"
9612  "subs%?\\t%0, %1, %3%S2"
9613  [(set_attr "conds" "set")
9614   (set_attr "shift" "3")
9615   (set_attr "arch" "32,a")
9616   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9617
9618(define_insn "*sub_shiftsi_compare0_scratch"
9619  [(set (reg:CC_NZ CC_REGNUM)
9620	(compare:CC_NZ
9621	 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
9622		   (match_operator:SI 2 "shift_operator"
9623		    [(match_operand:SI 3 "s_register_operand" "r,r")
9624		     (match_operand:SI 4 "shift_amount_operand" "M,r")]))
9625	 (const_int 0)))
9626   (clobber (match_scratch:SI 0 "=r,r"))]
9627  "TARGET_32BIT"
9628  "subs%?\\t%0, %1, %3%S2"
9629  [(set_attr "conds" "set")
9630   (set_attr "shift" "3")
9631   (set_attr "arch" "32,a")
9632   (set_attr "type" "alus_shift_imm,alus_shift_reg")])
9633
9634
9635(define_insn_and_split "*and_scc"
9636  [(set (match_operand:SI 0 "s_register_operand" "=r")
9637	(and:SI (match_operator:SI 1 "arm_comparison_operator"
9638		 [(match_operand 2 "cc_register" "") (const_int 0)])
9639		(match_operand:SI 3 "s_register_operand" "r")))]
9640  "TARGET_ARM"
9641  "#"   ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9642  "&& reload_completed"
9643  [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9644   (cond_exec (match_dup 4) (set (match_dup 0)
9645                                 (and:SI (match_dup 3) (const_int 1))))]
9646  {
9647    machine_mode mode = GET_MODE (operands[2]);
9648    enum rtx_code rc = GET_CODE (operands[1]);
9649
9650    /* Note that operands[4] is the same as operands[1],
9651       but with VOIDmode as the result. */
9652    operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9653    if (mode == CCFPmode || mode == CCFPEmode)
9654      rc = reverse_condition_maybe_unordered (rc);
9655    else
9656      rc = reverse_condition (rc);
9657    operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9658  }
9659  [(set_attr "conds" "use")
9660   (set_attr "type" "multiple")
9661   (set_attr "length" "8")]
9662)
9663
9664(define_insn_and_split "*ior_scc"
9665  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9666	(ior:SI (match_operator:SI 1 "arm_comparison_operator"
9667		 [(match_operand 2 "cc_register" "") (const_int 0)])
9668		(match_operand:SI 3 "s_register_operand" "0,?r")))]
9669  "TARGET_ARM"
9670  "@
9671   orr%d1\\t%0, %3, #1
9672   #"
9673  "&& reload_completed
9674   && REGNO (operands [0]) != REGNO (operands[3])"
9675  ;; && which_alternative == 1
9676  ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9677  [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9678   (cond_exec (match_dup 4) (set (match_dup 0)
9679                                 (ior:SI (match_dup 3) (const_int 1))))]
9680  {
9681    machine_mode mode = GET_MODE (operands[2]);
9682    enum rtx_code rc = GET_CODE (operands[1]);
9683
9684    /* Note that operands[4] is the same as operands[1],
9685       but with VOIDmode as the result. */
9686    operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9687    if (mode == CCFPmode || mode == CCFPEmode)
9688      rc = reverse_condition_maybe_unordered (rc);
9689    else
9690      rc = reverse_condition (rc);
9691    operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9692  }
9693  [(set_attr "conds" "use")
9694   (set_attr "length" "4,8")
9695   (set_attr "type" "logic_imm,multiple")]
9696)
9697
9698; A series of splitters for the compare_scc pattern below.  Note that
9699; order is important.
9700(define_split
9701  [(set (match_operand:SI 0 "s_register_operand" "")
9702	(lt:SI (match_operand:SI 1 "s_register_operand" "")
9703	       (const_int 0)))
9704   (clobber (reg:CC CC_REGNUM))]
9705  "TARGET_32BIT && reload_completed"
9706  [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9707
9708(define_split
9709  [(set (match_operand:SI 0 "s_register_operand" "")
9710	(ge:SI (match_operand:SI 1 "s_register_operand" "")
9711	       (const_int 0)))
9712   (clobber (reg:CC CC_REGNUM))]
9713  "TARGET_32BIT && reload_completed"
9714  [(set (match_dup 0) (not:SI (match_dup 1)))
9715   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9716
9717(define_split
9718  [(set (match_operand:SI 0 "s_register_operand" "")
9719	(eq:SI (match_operand:SI 1 "s_register_operand" "")
9720	       (const_int 0)))
9721   (clobber (reg:CC CC_REGNUM))]
9722  "arm_arch5t && TARGET_32BIT"
9723  [(set (match_dup 0) (clz:SI (match_dup 1)))
9724   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9725)
9726
9727(define_split
9728  [(set (match_operand:SI 0 "s_register_operand" "")
9729	(eq:SI (match_operand:SI 1 "s_register_operand" "")
9730	       (const_int 0)))
9731   (clobber (reg:CC CC_REGNUM))]
9732  "TARGET_32BIT && reload_completed"
9733  [(parallel
9734    [(set (reg:CC CC_REGNUM)
9735	  (compare:CC (const_int 1) (match_dup 1)))
9736     (set (match_dup 0)
9737	  (minus:SI (const_int 1) (match_dup 1)))])
9738   (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9739	      (set (match_dup 0) (const_int 0)))])
9740
9741(define_split
9742  [(set (match_operand:SI 0 "s_register_operand" "")
9743	(ne:SI (match_operand:SI 1 "s_register_operand" "")
9744	       (match_operand:SI 2 "const_int_operand" "")))
9745   (clobber (reg:CC CC_REGNUM))]
9746  "TARGET_32BIT && reload_completed"
9747  [(parallel
9748    [(set (reg:CC CC_REGNUM)
9749	  (compare:CC (match_dup 1) (match_dup 2)))
9750     (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9751   (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9752	      (set (match_dup 0) (const_int 1)))]
9753{
9754  operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9755})
9756
9757(define_split
9758  [(set (match_operand:SI 0 "s_register_operand" "")
9759	(ne:SI (match_operand:SI 1 "s_register_operand" "")
9760	       (match_operand:SI 2 "arm_add_operand" "")))
9761   (clobber (reg:CC CC_REGNUM))]
9762  "TARGET_32BIT && reload_completed"
9763  [(parallel
9764    [(set (reg:CC_NZ CC_REGNUM)
9765	  (compare:CC_NZ (minus:SI (match_dup 1) (match_dup 2))
9766			   (const_int 0)))
9767     (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9768   (cond_exec (ne:CC_NZ (reg:CC_NZ CC_REGNUM) (const_int 0))
9769	      (set (match_dup 0) (const_int 1)))])
9770
9771(define_insn_and_split "*compare_scc"
9772  [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9773	(match_operator:SI 1 "arm_comparison_operator"
9774	 [(match_operand:SI 2 "s_register_operand" "r,r")
9775	  (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9776   (clobber (reg:CC CC_REGNUM))]
9777  "TARGET_32BIT"
9778  "#"
9779  "&& reload_completed"
9780  [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9781   (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9782   (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9783{
9784  rtx tmp1;
9785  machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9786					   operands[2], operands[3]);
9787  enum rtx_code rc = GET_CODE (operands[1]);
9788
9789  tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9790
9791  operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9792  if (mode == CCFPmode || mode == CCFPEmode)
9793    rc = reverse_condition_maybe_unordered (rc);
9794  else
9795    rc = reverse_condition (rc);
9796  operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9797}
9798  [(set_attr "type" "multiple")]
9799)
9800
9801;; Attempt to improve the sequence generated by the compare_scc splitters
9802;; not to use conditional execution.
9803
9804;; Rd = (eq (reg1) (const_int0))  // ARMv5
9805;;	clz Rd, reg1
9806;;	lsr Rd, Rd, #5
9807(define_peephole2
9808  [(set (reg:CC CC_REGNUM)
9809	(compare:CC (match_operand:SI 1 "register_operand" "")
9810		    (const_int 0)))
9811   (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9812	      (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9813   (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9814	      (set (match_dup 0) (const_int 1)))]
9815  "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9816  [(set (match_dup 0) (clz:SI (match_dup 1)))
9817   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9818)
9819
9820;; Rd = (eq (reg1) (const_int0))  // !ARMv5
9821;;	negs Rd, reg1
9822;;	adc  Rd, Rd, reg1
9823(define_peephole2
9824  [(set (reg:CC CC_REGNUM)
9825	(compare:CC (match_operand:SI 1 "register_operand" "")
9826		    (const_int 0)))
9827   (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9828	      (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9829   (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9830	      (set (match_dup 0) (const_int 1)))
9831   (match_scratch:SI 2 "r")]
9832  "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9833  [(parallel
9834    [(set (reg:CC CC_REGNUM)
9835	  (compare:CC (const_int 0) (match_dup 1)))
9836     (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9837   (set (match_dup 0)
9838	(plus:SI (plus:SI (match_dup 1) (match_dup 2))
9839		 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9840)
9841
9842;; Rd = (eq (reg1) (reg2/imm))	// ARMv5 and optimising for speed.
9843;;	sub  Rd, Reg1, reg2
9844;;	clz  Rd, Rd
9845;;	lsr  Rd, Rd, #5
9846(define_peephole2
9847  [(set (reg:CC CC_REGNUM)
9848	(compare:CC (match_operand:SI 1 "register_operand" "")
9849		    (match_operand:SI 2 "arm_rhs_operand" "")))
9850   (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9851	      (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9852   (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9853	      (set (match_dup 0) (const_int 1)))]
9854  "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9855  && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9856  [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9857   (set (match_dup 0) (clz:SI (match_dup 0)))
9858   (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9859)
9860
9861
9862;; Rd = (eq (reg1) (reg2))	// ! ARMv5 or optimising for size.
9863;;	sub  T1, Reg1, reg2
9864;;	negs Rd, T1
9865;;	adc  Rd, Rd, T1
9866(define_peephole2
9867  [(set (reg:CC CC_REGNUM)
9868	(compare:CC (match_operand:SI 1 "register_operand" "")
9869		    (match_operand:SI 2 "arm_rhs_operand" "")))
9870   (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9871	      (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9872   (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9873	      (set (match_dup 0) (const_int 1)))
9874   (match_scratch:SI 3 "r")]
9875  "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9876  [(set (match_dup 3) (match_dup 4))
9877   (parallel
9878    [(set (reg:CC CC_REGNUM)
9879	  (compare:CC (const_int 0) (match_dup 3)))
9880     (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9881   (set (match_dup 0)
9882	(plus:SI (plus:SI (match_dup 0) (match_dup 3))
9883		 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9884  "
9885  if (CONST_INT_P (operands[2]))
9886    operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9887  else
9888    operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9889  ")
9890
9891(define_insn "*cond_move"
9892  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9893	(if_then_else:SI (match_operator 3 "equality_operator"
9894			  [(match_operator 4 "arm_comparison_operator"
9895			    [(match_operand 5 "cc_register" "") (const_int 0)])
9896			   (const_int 0)])
9897			 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9898			 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9899  "TARGET_ARM"
9900  "*
9901    if (GET_CODE (operands[3]) == NE)
9902      {
9903        if (which_alternative != 1)
9904	  output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9905        if (which_alternative != 0)
9906	  output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9907        return \"\";
9908      }
9909    if (which_alternative != 0)
9910      output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9911    if (which_alternative != 1)
9912      output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9913    return \"\";
9914  "
9915  [(set_attr "conds" "use")
9916   (set_attr_alternative "type"
9917                         [(if_then_else (match_operand 2 "const_int_operand" "")
9918                                        (const_string "mov_imm")
9919                                        (const_string "mov_reg"))
9920                          (if_then_else (match_operand 1 "const_int_operand" "")
9921                                        (const_string "mov_imm")
9922                                        (const_string "mov_reg"))
9923                          (const_string "multiple")])
9924   (set_attr "length" "4,4,8")]
9925)
9926
9927(define_insn "*cond_arith"
9928  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9929        (match_operator:SI 5 "shiftable_operator"
9930	 [(match_operator:SI 4 "arm_comparison_operator"
9931           [(match_operand:SI 2 "s_register_operand" "r,r")
9932	    (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9933          (match_operand:SI 1 "s_register_operand" "0,?r")]))
9934   (clobber (reg:CC CC_REGNUM))]
9935  "TARGET_ARM"
9936  "*
9937    if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9938      return \"%i5\\t%0, %1, %2, lsr #31\";
9939
9940    output_asm_insn (\"cmp\\t%2, %3\", operands);
9941    if (GET_CODE (operands[5]) == AND)
9942      output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9943    else if (GET_CODE (operands[5]) == MINUS)
9944      output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9945    else if (which_alternative != 0)
9946      output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9947    return \"%i5%d4\\t%0, %1, #1\";
9948  "
9949  [(set_attr "conds" "clob")
9950   (set_attr "length" "12")
9951   (set_attr "type" "multiple")]
9952)
9953
9954(define_insn "*cond_sub"
9955  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9956        (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9957		  (match_operator:SI 4 "arm_comparison_operator"
9958                   [(match_operand:SI 2 "s_register_operand" "r,r")
9959		    (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9960   (clobber (reg:CC CC_REGNUM))]
9961  "TARGET_ARM"
9962  "*
9963    output_asm_insn (\"cmp\\t%2, %3\", operands);
9964    if (which_alternative != 0)
9965      output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9966    return \"sub%d4\\t%0, %1, #1\";
9967  "
9968  [(set_attr "conds" "clob")
9969   (set_attr "length" "8,12")
9970   (set_attr "type" "multiple")]
9971)
9972
9973(define_insn "*cmp_ite0"
9974  [(set (match_operand 6 "dominant_cc_register" "")
9975	(compare
9976	 (if_then_else:SI
9977	  (match_operator 4 "arm_comparison_operator"
9978	   [(match_operand:SI 0 "s_register_operand"
9979	        "l,l,l,r,r,r,r,r,r")
9980	    (match_operand:SI 1 "arm_add_operand"
9981	        "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9982	  (match_operator:SI 5 "arm_comparison_operator"
9983	   [(match_operand:SI 2 "s_register_operand"
9984	        "l,r,r,l,l,r,r,r,r")
9985	    (match_operand:SI 3 "arm_add_operand"
9986	        "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9987	  (const_int 0))
9988	 (const_int 0)))]
9989  "TARGET_32BIT"
9990  "*
9991  {
9992    static const char * const cmp1[NUM_OF_COND_CMP][2] =
9993    {
9994      {\"cmp%d5\\t%0, %1\",
9995       \"cmp%d4\\t%2, %3\"},
9996      {\"cmn%d5\\t%0, #%n1\",
9997       \"cmp%d4\\t%2, %3\"},
9998      {\"cmp%d5\\t%0, %1\",
9999       \"cmn%d4\\t%2, #%n3\"},
10000      {\"cmn%d5\\t%0, #%n1\",
10001       \"cmn%d4\\t%2, #%n3\"}
10002    };
10003    static const char * const cmp2[NUM_OF_COND_CMP][2] =
10004    {
10005      {\"cmp\\t%2, %3\",
10006       \"cmp\\t%0, %1\"},
10007      {\"cmp\\t%2, %3\",
10008       \"cmn\\t%0, #%n1\"},
10009      {\"cmn\\t%2, #%n3\",
10010       \"cmp\\t%0, %1\"},
10011      {\"cmn\\t%2, #%n3\",
10012       \"cmn\\t%0, #%n1\"}
10013    };
10014    static const char * const ite[2] =
10015    {
10016      \"it\\t%d5\",
10017      \"it\\t%d4\"
10018    };
10019    static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10020                                   CMP_CMP, CMN_CMP, CMP_CMP,
10021                                   CMN_CMP, CMP_CMN, CMN_CMN};
10022    int swap =
10023      comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10024
10025    output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10026    if (TARGET_THUMB2) {
10027      output_asm_insn (ite[swap], operands);
10028    }
10029    output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10030    return \"\";
10031  }"
10032  [(set_attr "conds" "set")
10033   (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10034   (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10035   (set_attr "type" "multiple")
10036   (set_attr_alternative "length"
10037      [(const_int 6)
10038       (const_int 8)
10039       (const_int 8)
10040       (const_int 8)
10041       (const_int 8)
10042       (if_then_else (eq_attr "is_thumb" "no")
10043           (const_int 8)
10044           (const_int 10))
10045       (if_then_else (eq_attr "is_thumb" "no")
10046           (const_int 8)
10047           (const_int 10))
10048       (if_then_else (eq_attr "is_thumb" "no")
10049           (const_int 8)
10050           (const_int 10))
10051       (if_then_else (eq_attr "is_thumb" "no")
10052           (const_int 8)
10053           (const_int 10))])]
10054)
10055
10056(define_insn "*cmp_ite1"
10057  [(set (match_operand 6 "dominant_cc_register" "")
10058	(compare
10059	 (if_then_else:SI
10060	  (match_operator 4 "arm_comparison_operator"
10061	   [(match_operand:SI 0 "s_register_operand"
10062	        "l,l,l,r,r,r,r,r,r")
10063	    (match_operand:SI 1 "arm_add_operand"
10064	        "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
10065	  (match_operator:SI 5 "arm_comparison_operator"
10066	   [(match_operand:SI 2 "s_register_operand"
10067	        "l,r,r,l,l,r,r,r,r")
10068	    (match_operand:SI 3 "arm_add_operand"
10069	        "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
10070	  (const_int 1))
10071	 (const_int 0)))]
10072  "TARGET_32BIT"
10073  "*
10074  {
10075    static const char * const cmp1[NUM_OF_COND_CMP][2] =
10076    {
10077      {\"cmp\\t%0, %1\",
10078       \"cmp\\t%2, %3\"},
10079      {\"cmn\\t%0, #%n1\",
10080       \"cmp\\t%2, %3\"},
10081      {\"cmp\\t%0, %1\",
10082       \"cmn\\t%2, #%n3\"},
10083      {\"cmn\\t%0, #%n1\",
10084       \"cmn\\t%2, #%n3\"}
10085    };
10086    static const char * const cmp2[NUM_OF_COND_CMP][2] =
10087    {
10088      {\"cmp%d4\\t%2, %3\",
10089       \"cmp%D5\\t%0, %1\"},
10090      {\"cmp%d4\\t%2, %3\",
10091       \"cmn%D5\\t%0, #%n1\"},
10092      {\"cmn%d4\\t%2, #%n3\",
10093       \"cmp%D5\\t%0, %1\"},
10094      {\"cmn%d4\\t%2, #%n3\",
10095       \"cmn%D5\\t%0, #%n1\"}
10096    };
10097    static const char * const ite[2] =
10098    {
10099      \"it\\t%d4\",
10100      \"it\\t%D5\"
10101    };
10102    static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
10103                                   CMP_CMP, CMN_CMP, CMP_CMP,
10104                                   CMN_CMP, CMP_CMN, CMN_CMN};
10105    int swap =
10106      comparison_dominates_p (GET_CODE (operands[5]),
10107			      reverse_condition (GET_CODE (operands[4])));
10108
10109    output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10110    if (TARGET_THUMB2) {
10111      output_asm_insn (ite[swap], operands);
10112    }
10113    output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10114    return \"\";
10115  }"
10116  [(set_attr "conds" "set")
10117   (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
10118   (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
10119   (set_attr_alternative "length"
10120      [(const_int 6)
10121       (const_int 8)
10122       (const_int 8)
10123       (const_int 8)
10124       (const_int 8)
10125       (if_then_else (eq_attr "is_thumb" "no")
10126           (const_int 8)
10127           (const_int 10))
10128       (if_then_else (eq_attr "is_thumb" "no")
10129           (const_int 8)
10130           (const_int 10))
10131       (if_then_else (eq_attr "is_thumb" "no")
10132           (const_int 8)
10133           (const_int 10))
10134       (if_then_else (eq_attr "is_thumb" "no")
10135           (const_int 8)
10136           (const_int 10))])
10137   (set_attr "type" "multiple")]
10138)
10139
10140(define_insn "*cmp_and"
10141  [(set (match_operand 6 "dominant_cc_register" "")
10142	(compare
10143	 (and:SI
10144	  (match_operator 4 "arm_comparison_operator"
10145	   [(match_operand:SI 0 "s_register_operand"
10146	        "l,l,l,r,r,r,r,r,r,r")
10147	    (match_operand:SI 1 "arm_add_operand"
10148	        "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10149	  (match_operator:SI 5 "arm_comparison_operator"
10150	   [(match_operand:SI 2 "s_register_operand"
10151	        "l,r,r,l,l,r,r,r,r,r")
10152	    (match_operand:SI 3 "arm_add_operand"
10153	        "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10154	 (const_int 0)))]
10155  "TARGET_32BIT"
10156  "*
10157  {
10158    static const char *const cmp1[NUM_OF_COND_CMP][2] =
10159    {
10160      {\"cmp%d5\\t%0, %1\",
10161       \"cmp%d4\\t%2, %3\"},
10162      {\"cmn%d5\\t%0, #%n1\",
10163       \"cmp%d4\\t%2, %3\"},
10164      {\"cmp%d5\\t%0, %1\",
10165       \"cmn%d4\\t%2, #%n3\"},
10166      {\"cmn%d5\\t%0, #%n1\",
10167       \"cmn%d4\\t%2, #%n3\"}
10168    };
10169    static const char *const cmp2[NUM_OF_COND_CMP][2] =
10170    {
10171      {\"cmp\\t%2, %3\",
10172       \"cmp\\t%0, %1\"},
10173      {\"cmp\\t%2, %3\",
10174       \"cmn\\t%0, #%n1\"},
10175      {\"cmn\\t%2, #%n3\",
10176       \"cmp\\t%0, %1\"},
10177      {\"cmn\\t%2, #%n3\",
10178       \"cmn\\t%0, #%n1\"}
10179    };
10180    static const char *const ite[2] =
10181    {
10182      \"it\\t%d5\",
10183      \"it\\t%d4\"
10184    };
10185    static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10186                                  CMP_CMP, CMN_CMP, CMP_CMP,
10187                                  CMP_CMP, CMN_CMP, CMP_CMN,
10188				  CMN_CMN};
10189    int swap =
10190      comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10191
10192    output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10193    if (TARGET_THUMB2) {
10194      output_asm_insn (ite[swap], operands);
10195    }
10196    output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10197    return \"\";
10198  }"
10199  [(set_attr "conds" "set")
10200   (set_attr "predicable" "no")
10201   (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10202   (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10203   (set_attr_alternative "length"
10204      [(const_int 6)
10205       (const_int 8)
10206       (const_int 8)
10207       (const_int 8)
10208       (const_int 8)
10209       (const_int 6)
10210       (if_then_else (eq_attr "is_thumb" "no")
10211           (const_int 8)
10212           (const_int 10))
10213       (if_then_else (eq_attr "is_thumb" "no")
10214           (const_int 8)
10215           (const_int 10))
10216       (if_then_else (eq_attr "is_thumb" "no")
10217           (const_int 8)
10218           (const_int 10))
10219       (if_then_else (eq_attr "is_thumb" "no")
10220           (const_int 8)
10221           (const_int 10))])
10222   (set_attr "type" "multiple")]
10223)
10224
10225(define_insn "*cmp_ior"
10226  [(set (match_operand 6 "dominant_cc_register" "")
10227	(compare
10228	 (ior:SI
10229	  (match_operator 4 "arm_comparison_operator"
10230	   [(match_operand:SI 0 "s_register_operand"
10231	        "l,l,l,r,r,r,r,r,r,r")
10232	    (match_operand:SI 1 "arm_add_operand"
10233	        "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
10234	  (match_operator:SI 5 "arm_comparison_operator"
10235	   [(match_operand:SI 2 "s_register_operand"
10236	        "l,r,r,l,l,r,r,r,r,r")
10237	    (match_operand:SI 3 "arm_add_operand"
10238	        "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
10239	 (const_int 0)))]
10240  "TARGET_32BIT"
10241  "*
10242  {
10243    static const char *const cmp1[NUM_OF_COND_CMP][2] =
10244    {
10245      {\"cmp\\t%0, %1\",
10246       \"cmp\\t%2, %3\"},
10247      {\"cmn\\t%0, #%n1\",
10248       \"cmp\\t%2, %3\"},
10249      {\"cmp\\t%0, %1\",
10250       \"cmn\\t%2, #%n3\"},
10251      {\"cmn\\t%0, #%n1\",
10252       \"cmn\\t%2, #%n3\"}
10253    };
10254    static const char *const cmp2[NUM_OF_COND_CMP][2] =
10255    {
10256      {\"cmp%D4\\t%2, %3\",
10257       \"cmp%D5\\t%0, %1\"},
10258      {\"cmp%D4\\t%2, %3\",
10259       \"cmn%D5\\t%0, #%n1\"},
10260      {\"cmn%D4\\t%2, #%n3\",
10261       \"cmp%D5\\t%0, %1\"},
10262      {\"cmn%D4\\t%2, #%n3\",
10263       \"cmn%D5\\t%0, #%n1\"}
10264    };
10265    static const char *const ite[2] =
10266    {
10267      \"it\\t%D4\",
10268      \"it\\t%D5\"
10269    };
10270    static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
10271                                  CMP_CMP, CMN_CMP, CMP_CMP,
10272				  CMP_CMP, CMN_CMP, CMP_CMN,
10273				  CMN_CMN};
10274    int swap =
10275      comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
10276
10277    output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
10278    if (TARGET_THUMB2) {
10279      output_asm_insn (ite[swap], operands);
10280    }
10281    output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
10282    return \"\";
10283  }
10284  "
10285  [(set_attr "conds" "set")
10286   (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
10287   (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
10288   (set_attr_alternative "length"
10289      [(const_int 6)
10290       (const_int 8)
10291       (const_int 8)
10292       (const_int 8)
10293       (const_int 8)
10294       (const_int 6)
10295       (if_then_else (eq_attr "is_thumb" "no")
10296           (const_int 8)
10297           (const_int 10))
10298       (if_then_else (eq_attr "is_thumb" "no")
10299           (const_int 8)
10300           (const_int 10))
10301       (if_then_else (eq_attr "is_thumb" "no")
10302           (const_int 8)
10303           (const_int 10))
10304       (if_then_else (eq_attr "is_thumb" "no")
10305           (const_int 8)
10306           (const_int 10))])
10307   (set_attr "type" "multiple")]
10308)
10309
10310(define_insn_and_split "*ior_scc_scc"
10311  [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10312	(ior:SI (match_operator:SI 3 "arm_comparison_operator"
10313		 [(match_operand:SI 1 "s_register_operand" "l,r")
10314		  (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10315		(match_operator:SI 6 "arm_comparison_operator"
10316		 [(match_operand:SI 4 "s_register_operand" "l,r")
10317		  (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10318   (clobber (reg:CC CC_REGNUM))]
10319  "TARGET_32BIT
10320   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
10321       != CCmode)"
10322  "#"
10323  "TARGET_32BIT && reload_completed"
10324  [(set (match_dup 7)
10325	(compare
10326	 (ior:SI
10327	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10328	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10329	 (const_int 0)))
10330   (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10331  "operands[7]
10332     = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10333						  DOM_CC_X_OR_Y),
10334		    CC_REGNUM);"
10335  [(set_attr "conds" "clob")
10336   (set_attr "enabled_for_short_it" "yes,no")
10337   (set_attr "length" "16")
10338   (set_attr "type" "multiple")]
10339)
10340
10341; If the above pattern is followed by a CMP insn, then the compare is
10342; redundant, since we can rework the conditional instruction that follows.
10343(define_insn_and_split "*ior_scc_scc_cmp"
10344  [(set (match_operand 0 "dominant_cc_register" "")
10345	(compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
10346			  [(match_operand:SI 1 "s_register_operand" "l,r")
10347			   (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10348			 (match_operator:SI 6 "arm_comparison_operator"
10349			  [(match_operand:SI 4 "s_register_operand" "l,r")
10350			   (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10351		 (const_int 0)))
10352   (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10353	(ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10354		(match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10355  "TARGET_32BIT"
10356  "#"
10357  "TARGET_32BIT && reload_completed"
10358  [(set (match_dup 0)
10359	(compare
10360	 (ior:SI
10361	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10362	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10363	 (const_int 0)))
10364   (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10365  ""
10366  [(set_attr "conds" "set")
10367   (set_attr "enabled_for_short_it" "yes,no")
10368   (set_attr "length" "16")
10369   (set_attr "type" "multiple")]
10370)
10371
10372(define_insn_and_split "*and_scc_scc"
10373  [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
10374	(and:SI (match_operator:SI 3 "arm_comparison_operator"
10375		 [(match_operand:SI 1 "s_register_operand" "l,r")
10376		  (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10377		(match_operator:SI 6 "arm_comparison_operator"
10378		 [(match_operand:SI 4 "s_register_operand" "l,r")
10379		  (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
10380   (clobber (reg:CC CC_REGNUM))]
10381  "TARGET_32BIT
10382   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10383       != CCmode)"
10384  "#"
10385  "TARGET_32BIT && reload_completed
10386   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10387       != CCmode)"
10388  [(set (match_dup 7)
10389	(compare
10390	 (and:SI
10391	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10392	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10393	 (const_int 0)))
10394   (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
10395  "operands[7]
10396     = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
10397						  DOM_CC_X_AND_Y),
10398		    CC_REGNUM);"
10399  [(set_attr "conds" "clob")
10400   (set_attr "enabled_for_short_it" "yes,no")
10401   (set_attr "length" "16")
10402   (set_attr "type" "multiple")]
10403)
10404
10405; If the above pattern is followed by a CMP insn, then the compare is
10406; redundant, since we can rework the conditional instruction that follows.
10407(define_insn_and_split "*and_scc_scc_cmp"
10408  [(set (match_operand 0 "dominant_cc_register" "")
10409	(compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
10410			  [(match_operand:SI 1 "s_register_operand" "l,r")
10411			   (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
10412			 (match_operator:SI 6 "arm_comparison_operator"
10413			  [(match_operand:SI 4 "s_register_operand" "l,r")
10414			   (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
10415		 (const_int 0)))
10416   (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
10417	(and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10418		(match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
10419  "TARGET_32BIT"
10420  "#"
10421  "TARGET_32BIT && reload_completed"
10422  [(set (match_dup 0)
10423	(compare
10424	 (and:SI
10425	  (match_op_dup 3 [(match_dup 1) (match_dup 2)])
10426	  (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
10427	 (const_int 0)))
10428   (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
10429  ""
10430  [(set_attr "conds" "set")
10431   (set_attr "enabled_for_short_it" "yes,no")
10432   (set_attr "length" "16")
10433   (set_attr "type" "multiple")]
10434)
10435
10436;; If there is no dominance in the comparison, then we can still save an
10437;; instruction in the AND case, since we can know that the second compare
10438;; need only zero the value if false (if true, then the value is already
10439;; correct).
10440(define_insn_and_split "*and_scc_scc_nodom"
10441  [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
10442	(and:SI (match_operator:SI 3 "arm_comparison_operator"
10443		 [(match_operand:SI 1 "s_register_operand" "r,r,0")
10444		  (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
10445		(match_operator:SI 6 "arm_comparison_operator"
10446		 [(match_operand:SI 4 "s_register_operand" "r,r,r")
10447		  (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
10448   (clobber (reg:CC CC_REGNUM))]
10449  "TARGET_32BIT
10450   && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
10451       == CCmode)"
10452  "#"
10453  "TARGET_32BIT && reload_completed"
10454  [(parallel [(set (match_dup 0)
10455		   (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
10456	      (clobber (reg:CC CC_REGNUM))])
10457   (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
10458   (set (match_dup 0)
10459	(if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
10460			 (match_dup 0)
10461			 (const_int 0)))]
10462  "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
10463					      operands[4], operands[5]),
10464			      CC_REGNUM);
10465   operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
10466				  operands[5]);"
10467  [(set_attr "conds" "clob")
10468   (set_attr "length" "20")
10469   (set_attr "type" "multiple")]
10470)
10471
10472(define_split
10473  [(set (reg:CC_NZ CC_REGNUM)
10474	(compare:CC_NZ (ior:SI
10475			  (and:SI (match_operand:SI 0 "s_register_operand" "")
10476				  (const_int 1))
10477			  (match_operator:SI 1 "arm_comparison_operator"
10478			   [(match_operand:SI 2 "s_register_operand" "")
10479			    (match_operand:SI 3 "arm_add_operand" "")]))
10480			 (const_int 0)))
10481   (clobber (match_operand:SI 4 "s_register_operand" ""))]
10482  "TARGET_ARM"
10483  [(set (match_dup 4)
10484	(ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10485		(match_dup 0)))
10486   (set (reg:CC_NZ CC_REGNUM)
10487	(compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10488			 (const_int 0)))]
10489  "")
10490
10491(define_split
10492  [(set (reg:CC_NZ CC_REGNUM)
10493	(compare:CC_NZ (ior:SI
10494			  (match_operator:SI 1 "arm_comparison_operator"
10495			   [(match_operand:SI 2 "s_register_operand" "")
10496			    (match_operand:SI 3 "arm_add_operand" "")])
10497			  (and:SI (match_operand:SI 0 "s_register_operand" "")
10498				  (const_int 1)))
10499			 (const_int 0)))
10500   (clobber (match_operand:SI 4 "s_register_operand" ""))]
10501  "TARGET_ARM"
10502  [(set (match_dup 4)
10503	(ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
10504		(match_dup 0)))
10505   (set (reg:CC_NZ CC_REGNUM)
10506	(compare:CC_NZ (and:SI (match_dup 4) (const_int 1))
10507			 (const_int 0)))]
10508  "")
10509;; ??? The conditional patterns above need checking for Thumb-2 usefulness
10510
10511(define_insn_and_split "*negscc"
10512  [(set (match_operand:SI 0 "s_register_operand" "=r")
10513	(neg:SI (match_operator 3 "arm_comparison_operator"
10514		 [(match_operand:SI 1 "s_register_operand" "r")
10515		  (match_operand:SI 2 "arm_rhs_operand" "rI")])))
10516   (clobber (reg:CC CC_REGNUM))]
10517  "TARGET_ARM"
10518  "#"
10519  "&& reload_completed"
10520  [(const_int 0)]
10521  {
10522    rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
10523
10524    if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
10525       {
10526         /* Emit mov\\t%0, %1, asr #31 */
10527         emit_insn (gen_rtx_SET (operands[0],
10528                                 gen_rtx_ASHIFTRT (SImode,
10529                                                   operands[1],
10530                                                   GEN_INT (31))));
10531         DONE;
10532       }
10533     else if (GET_CODE (operands[3]) == NE)
10534       {
10535        /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
10536        if (CONST_INT_P (operands[2]))
10537          emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
10538                                        gen_int_mode (-INTVAL (operands[2]),
10539						      SImode)));
10540        else
10541          emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
10542
10543        emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10544                                      gen_rtx_NE (SImode,
10545                                                  cc_reg,
10546                                                  const0_rtx),
10547                                      gen_rtx_SET (operands[0],
10548                                                   GEN_INT (~0))));
10549        DONE;
10550      }
10551    else
10552      {
10553        /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
10554        emit_insn (gen_rtx_SET (cc_reg,
10555                                gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
10556        enum rtx_code rc = GET_CODE (operands[3]);
10557
10558        rc = reverse_condition (rc);
10559        emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10560                                      gen_rtx_fmt_ee (rc,
10561                                                      VOIDmode,
10562                                                      cc_reg,
10563                                                      const0_rtx),
10564                                      gen_rtx_SET (operands[0], const0_rtx)));
10565        rc = GET_CODE (operands[3]);
10566        emit_insn (gen_rtx_COND_EXEC (VOIDmode,
10567                                      gen_rtx_fmt_ee (rc,
10568                                                      VOIDmode,
10569                                                      cc_reg,
10570                                                      const0_rtx),
10571                                      gen_rtx_SET (operands[0],
10572                                                   GEN_INT (~0))));
10573        DONE;
10574      }
10575     FAIL;
10576  }
10577  [(set_attr "conds" "clob")
10578   (set_attr "length" "12")
10579   (set_attr "type" "multiple")]
10580)
10581
10582(define_insn_and_split "movcond_addsi"
10583  [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
10584	(if_then_else:SI
10585	 (match_operator 5 "comparison_operator"
10586	  [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
10587	            (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
10588            (const_int 0)])
10589	 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
10590	 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
10591   (clobber (reg:CC CC_REGNUM))]
10592   "TARGET_32BIT"
10593   "#"
10594   "&& reload_completed"
10595  [(set (reg:CC_NZ CC_REGNUM)
10596	(compare:CC_NZ
10597	 (plus:SI (match_dup 3)
10598		  (match_dup 4))
10599	 (const_int 0)))
10600   (set (match_dup 0) (match_dup 1))
10601   (cond_exec (match_dup 6)
10602	      (set (match_dup 0) (match_dup 2)))]
10603  "
10604  {
10605    machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
10606					     operands[3], operands[4]);
10607    enum rtx_code rc = GET_CODE (operands[5]);
10608    operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10609    gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
10610    if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
10611      rc = reverse_condition (rc);
10612    else
10613      std::swap (operands[1], operands[2]);
10614
10615    operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10616  }
10617  "
10618  [(set_attr "conds" "clob")
10619   (set_attr "enabled_for_short_it" "no,yes,yes")
10620   (set_attr "type" "multiple")]
10621)
10622
10623(define_insn "movcond"
10624  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10625	(if_then_else:SI
10626	 (match_operator 5 "arm_comparison_operator"
10627	  [(match_operand:SI 3 "s_register_operand" "r,r,r")
10628	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10629	 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10630	 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10631   (clobber (reg:CC CC_REGNUM))]
10632  "TARGET_ARM"
10633  "*
10634  if (GET_CODE (operands[5]) == LT
10635      && (operands[4] == const0_rtx))
10636    {
10637      if (which_alternative != 1 && REG_P (operands[1]))
10638	{
10639	  if (operands[2] == const0_rtx)
10640	    return \"and\\t%0, %1, %3, asr #31\";
10641	  return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10642	}
10643      else if (which_alternative != 0 && REG_P (operands[2]))
10644	{
10645	  if (operands[1] == const0_rtx)
10646	    return \"bic\\t%0, %2, %3, asr #31\";
10647	  return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10648	}
10649      /* The only case that falls through to here is when both ops 1 & 2
10650	 are constants.  */
10651    }
10652
10653  if (GET_CODE (operands[5]) == GE
10654      && (operands[4] == const0_rtx))
10655    {
10656      if (which_alternative != 1 && REG_P (operands[1]))
10657	{
10658	  if (operands[2] == const0_rtx)
10659	    return \"bic\\t%0, %1, %3, asr #31\";
10660	  return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10661	}
10662      else if (which_alternative != 0 && REG_P (operands[2]))
10663	{
10664	  if (operands[1] == const0_rtx)
10665	    return \"and\\t%0, %2, %3, asr #31\";
10666	  return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10667	}
10668      /* The only case that falls through to here is when both ops 1 & 2
10669	 are constants.  */
10670    }
10671  if (CONST_INT_P (operands[4])
10672      && !const_ok_for_arm (INTVAL (operands[4])))
10673    output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10674  else
10675    output_asm_insn (\"cmp\\t%3, %4\", operands);
10676  if (which_alternative != 0)
10677    output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10678  if (which_alternative != 1)
10679    output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10680  return \"\";
10681  "
10682  [(set_attr "conds" "clob")
10683   (set_attr "length" "8,8,12")
10684   (set_attr "type" "multiple")]
10685)
10686
10687;; ??? The patterns below need checking for Thumb-2 usefulness.
10688
10689(define_insn "*ifcompare_plus_move"
10690  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10691	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
10692			  [(match_operand:SI 4 "s_register_operand" "r,r")
10693			   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10694			 (plus:SI
10695			  (match_operand:SI 2 "s_register_operand" "r,r")
10696			  (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10697			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10698   (clobber (reg:CC CC_REGNUM))]
10699  "TARGET_ARM"
10700  "#"
10701  [(set_attr "conds" "clob")
10702   (set_attr "length" "8,12")
10703   (set_attr "type" "multiple")]
10704)
10705
10706(define_insn "*if_plus_move"
10707  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10708	(if_then_else:SI
10709	 (match_operator 4 "arm_comparison_operator"
10710	  [(match_operand 5 "cc_register" "") (const_int 0)])
10711	 (plus:SI
10712	  (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10713	  (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10714	 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10715  "TARGET_ARM"
10716  "@
10717   add%d4\\t%0, %2, %3
10718   sub%d4\\t%0, %2, #%n3
10719   add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10720   sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10721  [(set_attr "conds" "use")
10722   (set_attr "length" "4,4,8,8")
10723   (set_attr_alternative "type"
10724                         [(if_then_else (match_operand 3 "const_int_operand" "")
10725                                        (const_string "alu_imm" )
10726                                        (const_string "alu_sreg"))
10727                          (const_string "alu_imm")
10728                          (const_string "multiple")
10729                          (const_string "multiple")])]
10730)
10731
10732(define_insn "*ifcompare_move_plus"
10733  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10734	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
10735			  [(match_operand:SI 4 "s_register_operand" "r,r")
10736			   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10737			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10738			 (plus:SI
10739			  (match_operand:SI 2 "s_register_operand" "r,r")
10740			  (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10741   (clobber (reg:CC CC_REGNUM))]
10742  "TARGET_ARM"
10743  "#"
10744  [(set_attr "conds" "clob")
10745   (set_attr "length" "8,12")
10746   (set_attr "type" "multiple")]
10747)
10748
10749(define_insn "*if_move_plus"
10750  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10751	(if_then_else:SI
10752	 (match_operator 4 "arm_comparison_operator"
10753	  [(match_operand 5 "cc_register" "") (const_int 0)])
10754	 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10755	 (plus:SI
10756	  (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10757	  (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10758  "TARGET_ARM"
10759  "@
10760   add%D4\\t%0, %2, %3
10761   sub%D4\\t%0, %2, #%n3
10762   add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10763   sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10764  [(set_attr "conds" "use")
10765   (set_attr "length" "4,4,8,8")
10766   (set_attr_alternative "type"
10767                         [(if_then_else (match_operand 3 "const_int_operand" "")
10768                                        (const_string "alu_imm" )
10769                                        (const_string "alu_sreg"))
10770                          (const_string "alu_imm")
10771                          (const_string "multiple")
10772                          (const_string "multiple")])]
10773)
10774
10775(define_insn "*ifcompare_arith_arith"
10776  [(set (match_operand:SI 0 "s_register_operand" "=r")
10777	(if_then_else:SI (match_operator 9 "arm_comparison_operator"
10778			  [(match_operand:SI 5 "s_register_operand" "r")
10779			   (match_operand:SI 6 "arm_add_operand" "rIL")])
10780			 (match_operator:SI 8 "shiftable_operator"
10781			  [(match_operand:SI 1 "s_register_operand" "r")
10782			   (match_operand:SI 2 "arm_rhs_operand" "rI")])
10783			 (match_operator:SI 7 "shiftable_operator"
10784			  [(match_operand:SI 3 "s_register_operand" "r")
10785			   (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10786   (clobber (reg:CC CC_REGNUM))]
10787  "TARGET_ARM"
10788  "#"
10789  [(set_attr "conds" "clob")
10790   (set_attr "length" "12")
10791   (set_attr "type" "multiple")]
10792)
10793
10794(define_insn "*if_arith_arith"
10795  [(set (match_operand:SI 0 "s_register_operand" "=r")
10796	(if_then_else:SI (match_operator 5 "arm_comparison_operator"
10797			  [(match_operand 8 "cc_register" "") (const_int 0)])
10798			 (match_operator:SI 6 "shiftable_operator"
10799			  [(match_operand:SI 1 "s_register_operand" "r")
10800			   (match_operand:SI 2 "arm_rhs_operand" "rI")])
10801			 (match_operator:SI 7 "shiftable_operator"
10802			  [(match_operand:SI 3 "s_register_operand" "r")
10803			   (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10804  "TARGET_ARM"
10805  "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10806  [(set_attr "conds" "use")
10807   (set_attr "length" "8")
10808   (set_attr "type" "multiple")]
10809)
10810
10811(define_insn "*ifcompare_arith_move"
10812  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10813	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
10814			  [(match_operand:SI 2 "s_register_operand" "r,r")
10815			   (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10816			 (match_operator:SI 7 "shiftable_operator"
10817			  [(match_operand:SI 4 "s_register_operand" "r,r")
10818			   (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10819			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10820   (clobber (reg:CC CC_REGNUM))]
10821  "TARGET_ARM"
10822  "*
10823  /* If we have an operation where (op x 0) is the identity operation and
10824     the conditional operator is LT or GE and we are comparing against zero and
10825     everything is in registers then we can do this in two instructions.  */
10826  if (operands[3] == const0_rtx
10827      && GET_CODE (operands[7]) != AND
10828      && REG_P (operands[5])
10829      && REG_P (operands[1])
10830      && REGNO (operands[1]) == REGNO (operands[4])
10831      && REGNO (operands[4]) != REGNO (operands[0]))
10832    {
10833      if (GET_CODE (operands[6]) == LT)
10834	return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10835      else if (GET_CODE (operands[6]) == GE)
10836	return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10837    }
10838  if (CONST_INT_P (operands[3])
10839      && !const_ok_for_arm (INTVAL (operands[3])))
10840    output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10841  else
10842    output_asm_insn (\"cmp\\t%2, %3\", operands);
10843  output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10844  if (which_alternative != 0)
10845    return \"mov%D6\\t%0, %1\";
10846  return \"\";
10847  "
10848  [(set_attr "conds" "clob")
10849   (set_attr "length" "8,12")
10850   (set_attr "type" "multiple")]
10851)
10852
10853(define_insn "*if_arith_move"
10854  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10855	(if_then_else:SI (match_operator 4 "arm_comparison_operator"
10856			  [(match_operand 6 "cc_register" "") (const_int 0)])
10857			 (match_operator:SI 5 "shiftable_operator"
10858			  [(match_operand:SI 2 "s_register_operand" "r,r")
10859			   (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10860			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10861  "TARGET_ARM"
10862  "@
10863   %I5%d4\\t%0, %2, %3
10864   %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10865  [(set_attr "conds" "use")
10866   (set_attr "length" "4,8")
10867   (set_attr_alternative "type"
10868                         [(if_then_else (match_operand 3 "const_int_operand" "")
10869                                        (const_string "alu_shift_imm" )
10870                                        (const_string "alu_shift_reg"))
10871                          (const_string "multiple")])]
10872)
10873
10874(define_insn "*ifcompare_move_arith"
10875  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10876	(if_then_else:SI (match_operator 6 "arm_comparison_operator"
10877			  [(match_operand:SI 4 "s_register_operand" "r,r")
10878			   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10879			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10880			 (match_operator:SI 7 "shiftable_operator"
10881			  [(match_operand:SI 2 "s_register_operand" "r,r")
10882			   (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10883   (clobber (reg:CC CC_REGNUM))]
10884  "TARGET_ARM"
10885  "*
10886  /* If we have an operation where (op x 0) is the identity operation and
10887     the conditional operator is LT or GE and we are comparing against zero and
10888     everything is in registers then we can do this in two instructions */
10889  if (operands[5] == const0_rtx
10890      && GET_CODE (operands[7]) != AND
10891      && REG_P (operands[3])
10892      && REG_P (operands[1])
10893      && REGNO (operands[1]) == REGNO (operands[2])
10894      && REGNO (operands[2]) != REGNO (operands[0]))
10895    {
10896      if (GET_CODE (operands[6]) == GE)
10897	return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10898      else if (GET_CODE (operands[6]) == LT)
10899	return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10900    }
10901
10902  if (CONST_INT_P (operands[5])
10903      && !const_ok_for_arm (INTVAL (operands[5])))
10904    output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10905  else
10906    output_asm_insn (\"cmp\\t%4, %5\", operands);
10907
10908  if (which_alternative != 0)
10909    output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10910  return \"%I7%D6\\t%0, %2, %3\";
10911  "
10912  [(set_attr "conds" "clob")
10913   (set_attr "length" "8,12")
10914   (set_attr "type" "multiple")]
10915)
10916
10917(define_insn "*if_move_arith"
10918  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10919	(if_then_else:SI
10920	 (match_operator 4 "arm_comparison_operator"
10921	  [(match_operand 6 "cc_register" "") (const_int 0)])
10922	 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10923	 (match_operator:SI 5 "shiftable_operator"
10924	  [(match_operand:SI 2 "s_register_operand" "r,r")
10925	   (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10926  "TARGET_ARM"
10927  "@
10928   %I5%D4\\t%0, %2, %3
10929   %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10930  [(set_attr "conds" "use")
10931   (set_attr "length" "4,8")
10932   (set_attr_alternative "type"
10933                         [(if_then_else (match_operand 3 "const_int_operand" "")
10934                                        (const_string "alu_shift_imm" )
10935                                        (const_string "alu_shift_reg"))
10936                          (const_string "multiple")])]
10937)
10938
10939(define_insn "*ifcompare_move_not"
10940  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10941	(if_then_else:SI
10942	 (match_operator 5 "arm_comparison_operator"
10943	  [(match_operand:SI 3 "s_register_operand" "r,r")
10944	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10945	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10946	 (not:SI
10947	  (match_operand:SI 2 "s_register_operand" "r,r"))))
10948   (clobber (reg:CC CC_REGNUM))]
10949  "TARGET_ARM"
10950  "#"
10951  [(set_attr "conds" "clob")
10952   (set_attr "length" "8,12")
10953   (set_attr "type" "multiple")]
10954)
10955
10956(define_insn "*if_move_not"
10957  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10958	(if_then_else:SI
10959	 (match_operator 4 "arm_comparison_operator"
10960	  [(match_operand 3 "cc_register" "") (const_int 0)])
10961	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10962	 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10963  "TARGET_ARM"
10964  "@
10965   mvn%D4\\t%0, %2
10966   mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10967   mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10968  [(set_attr "conds" "use")
10969   (set_attr "type" "mvn_reg")
10970   (set_attr "length" "4,8,8")
10971   (set_attr "type" "mvn_reg,multiple,multiple")]
10972)
10973
10974(define_insn "*ifcompare_not_move"
10975  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10976	(if_then_else:SI
10977	 (match_operator 5 "arm_comparison_operator"
10978	  [(match_operand:SI 3 "s_register_operand" "r,r")
10979	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10980	 (not:SI
10981	  (match_operand:SI 2 "s_register_operand" "r,r"))
10982	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10983   (clobber (reg:CC CC_REGNUM))]
10984  "TARGET_ARM"
10985  "#"
10986  [(set_attr "conds" "clob")
10987   (set_attr "length" "8,12")
10988   (set_attr "type" "multiple")]
10989)
10990
10991(define_insn "*if_not_move"
10992  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10993	(if_then_else:SI
10994	 (match_operator 4 "arm_comparison_operator"
10995	  [(match_operand 3 "cc_register" "") (const_int 0)])
10996	 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10997	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10998  "TARGET_ARM"
10999  "@
11000   mvn%d4\\t%0, %2
11001   mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
11002   mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
11003  [(set_attr "conds" "use")
11004   (set_attr "type" "mvn_reg,multiple,multiple")
11005   (set_attr "length" "4,8,8")]
11006)
11007
11008(define_insn "*ifcompare_shift_move"
11009  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11010	(if_then_else:SI
11011	 (match_operator 6 "arm_comparison_operator"
11012	  [(match_operand:SI 4 "s_register_operand" "r,r")
11013	   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11014	 (match_operator:SI 7 "shift_operator"
11015	  [(match_operand:SI 2 "s_register_operand" "r,r")
11016	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
11017	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11018   (clobber (reg:CC CC_REGNUM))]
11019  "TARGET_ARM"
11020  "#"
11021  [(set_attr "conds" "clob")
11022   (set_attr "length" "8,12")
11023   (set_attr "type" "multiple")]
11024)
11025
11026(define_insn "*if_shift_move"
11027  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11028	(if_then_else:SI
11029	 (match_operator 5 "arm_comparison_operator"
11030	  [(match_operand 6 "cc_register" "") (const_int 0)])
11031	 (match_operator:SI 4 "shift_operator"
11032	  [(match_operand:SI 2 "s_register_operand" "r,r,r")
11033	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
11034	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
11035  "TARGET_ARM"
11036  "@
11037   mov%d5\\t%0, %2%S4
11038   mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
11039   mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
11040  [(set_attr "conds" "use")
11041   (set_attr "shift" "2")
11042   (set_attr "length" "4,8,8")
11043   (set_attr_alternative "type"
11044                         [(if_then_else (match_operand 3 "const_int_operand" "")
11045                                        (const_string "mov_shift" )
11046                                        (const_string "mov_shift_reg"))
11047                          (const_string "multiple")
11048                          (const_string "multiple")])]
11049)
11050
11051(define_insn "*ifcompare_move_shift"
11052  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11053	(if_then_else:SI
11054	 (match_operator 6 "arm_comparison_operator"
11055	  [(match_operand:SI 4 "s_register_operand" "r,r")
11056	   (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
11057	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11058	 (match_operator:SI 7 "shift_operator"
11059	  [(match_operand:SI 2 "s_register_operand" "r,r")
11060	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
11061   (clobber (reg:CC CC_REGNUM))]
11062  "TARGET_ARM"
11063  "#"
11064  [(set_attr "conds" "clob")
11065   (set_attr "length" "8,12")
11066   (set_attr "type" "multiple")]
11067)
11068
11069(define_insn "*if_move_shift"
11070  [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
11071	(if_then_else:SI
11072	 (match_operator 5 "arm_comparison_operator"
11073	  [(match_operand 6 "cc_register" "") (const_int 0)])
11074	 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
11075	 (match_operator:SI 4 "shift_operator"
11076	  [(match_operand:SI 2 "s_register_operand" "r,r,r")
11077	   (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
11078  "TARGET_ARM"
11079  "@
11080   mov%D5\\t%0, %2%S4
11081   mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
11082   mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
11083  [(set_attr "conds" "use")
11084   (set_attr "shift" "2")
11085   (set_attr "length" "4,8,8")
11086   (set_attr_alternative "type"
11087                         [(if_then_else (match_operand 3 "const_int_operand" "")
11088                                        (const_string "mov_shift" )
11089                                        (const_string "mov_shift_reg"))
11090                          (const_string "multiple")
11091                          (const_string "multiple")])]
11092)
11093
11094(define_insn "*ifcompare_shift_shift"
11095  [(set (match_operand:SI 0 "s_register_operand" "=r")
11096	(if_then_else:SI
11097	 (match_operator 7 "arm_comparison_operator"
11098	  [(match_operand:SI 5 "s_register_operand" "r")
11099	   (match_operand:SI 6 "arm_add_operand" "rIL")])
11100	 (match_operator:SI 8 "shift_operator"
11101	  [(match_operand:SI 1 "s_register_operand" "r")
11102	   (match_operand:SI 2 "arm_rhs_operand" "rM")])
11103	 (match_operator:SI 9 "shift_operator"
11104	  [(match_operand:SI 3 "s_register_operand" "r")
11105	   (match_operand:SI 4 "arm_rhs_operand" "rM")])))
11106   (clobber (reg:CC CC_REGNUM))]
11107  "TARGET_ARM"
11108  "#"
11109  [(set_attr "conds" "clob")
11110   (set_attr "length" "12")
11111   (set_attr "type" "multiple")]
11112)
11113
11114(define_insn "*if_shift_shift"
11115  [(set (match_operand:SI 0 "s_register_operand" "=r")
11116	(if_then_else:SI
11117	 (match_operator 5 "arm_comparison_operator"
11118	  [(match_operand 8 "cc_register" "") (const_int 0)])
11119	 (match_operator:SI 6 "shift_operator"
11120	  [(match_operand:SI 1 "s_register_operand" "r")
11121	   (match_operand:SI 2 "arm_rhs_operand" "rM")])
11122	 (match_operator:SI 7 "shift_operator"
11123	  [(match_operand:SI 3 "s_register_operand" "r")
11124	   (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
11125  "TARGET_ARM"
11126  "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
11127  [(set_attr "conds" "use")
11128   (set_attr "shift" "1")
11129   (set_attr "length" "8")
11130   (set (attr "type") (if_then_else
11131		        (and (match_operand 2 "const_int_operand" "")
11132                             (match_operand 4 "const_int_operand" ""))
11133		      (const_string "mov_shift")
11134		      (const_string "mov_shift_reg")))]
11135)
11136
11137(define_insn "*ifcompare_not_arith"
11138  [(set (match_operand:SI 0 "s_register_operand" "=r")
11139	(if_then_else:SI
11140	 (match_operator 6 "arm_comparison_operator"
11141	  [(match_operand:SI 4 "s_register_operand" "r")
11142	   (match_operand:SI 5 "arm_add_operand" "rIL")])
11143	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11144	 (match_operator:SI 7 "shiftable_operator"
11145	  [(match_operand:SI 2 "s_register_operand" "r")
11146	   (match_operand:SI 3 "arm_rhs_operand" "rI")])))
11147   (clobber (reg:CC CC_REGNUM))]
11148  "TARGET_ARM"
11149  "#"
11150  [(set_attr "conds" "clob")
11151   (set_attr "length" "12")
11152   (set_attr "type" "multiple")]
11153)
11154
11155(define_insn "*if_not_arith"
11156  [(set (match_operand:SI 0 "s_register_operand" "=r")
11157	(if_then_else:SI
11158	 (match_operator 5 "arm_comparison_operator"
11159	  [(match_operand 4 "cc_register" "") (const_int 0)])
11160	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
11161	 (match_operator:SI 6 "shiftable_operator"
11162	  [(match_operand:SI 2 "s_register_operand" "r")
11163	   (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
11164  "TARGET_ARM"
11165  "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
11166  [(set_attr "conds" "use")
11167   (set_attr "type" "mvn_reg")
11168   (set_attr "length" "8")]
11169)
11170
11171(define_insn "*ifcompare_arith_not"
11172  [(set (match_operand:SI 0 "s_register_operand" "=r")
11173	(if_then_else:SI
11174	 (match_operator 6 "arm_comparison_operator"
11175	  [(match_operand:SI 4 "s_register_operand" "r")
11176	   (match_operand:SI 5 "arm_add_operand" "rIL")])
11177	 (match_operator:SI 7 "shiftable_operator"
11178	  [(match_operand:SI 2 "s_register_operand" "r")
11179	   (match_operand:SI 3 "arm_rhs_operand" "rI")])
11180	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
11181   (clobber (reg:CC CC_REGNUM))]
11182  "TARGET_ARM"
11183  "#"
11184  [(set_attr "conds" "clob")
11185   (set_attr "length" "12")
11186   (set_attr "type" "multiple")]
11187)
11188
11189(define_insn "*if_arith_not"
11190  [(set (match_operand:SI 0 "s_register_operand" "=r")
11191	(if_then_else:SI
11192	 (match_operator 5 "arm_comparison_operator"
11193	  [(match_operand 4 "cc_register" "") (const_int 0)])
11194	 (match_operator:SI 6 "shiftable_operator"
11195	  [(match_operand:SI 2 "s_register_operand" "r")
11196	   (match_operand:SI 3 "arm_rhs_operand" "rI")])
11197	 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
11198  "TARGET_ARM"
11199  "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
11200  [(set_attr "conds" "use")
11201   (set_attr "type" "multiple")
11202   (set_attr "length" "8")]
11203)
11204
11205(define_insn "*ifcompare_neg_move"
11206  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11207	(if_then_else:SI
11208	 (match_operator 5 "arm_comparison_operator"
11209	  [(match_operand:SI 3 "s_register_operand" "r,r")
11210	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11211	 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
11212	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
11213   (clobber (reg:CC CC_REGNUM))]
11214  "TARGET_ARM"
11215  "#"
11216  [(set_attr "conds" "clob")
11217   (set_attr "length" "8,12")
11218   (set_attr "type" "multiple")]
11219)
11220
11221(define_insn_and_split "*if_neg_move"
11222  [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11223	(if_then_else:SI
11224	 (match_operator 4 "arm_comparison_operator"
11225	  [(match_operand 3 "cc_register" "") (const_int 0)])
11226	 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
11227	 (match_operand:SI 1 "s_register_operand" "0,0")))]
11228  "TARGET_32BIT"
11229  "#"
11230  "&& reload_completed"
11231  [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
11232	      (set (match_dup 0) (neg:SI (match_dup 2))))]
11233  ""
11234  [(set_attr "conds" "use")
11235   (set_attr "length" "4")
11236   (set_attr "arch" "t2,32")
11237   (set_attr "enabled_for_short_it" "yes,no")
11238   (set_attr "type" "logic_shift_imm")]
11239)
11240
11241(define_insn "*ifcompare_move_neg"
11242  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11243	(if_then_else:SI
11244	 (match_operator 5 "arm_comparison_operator"
11245	  [(match_operand:SI 3 "s_register_operand" "r,r")
11246	   (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
11247	 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
11248	 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
11249   (clobber (reg:CC CC_REGNUM))]
11250  "TARGET_ARM"
11251  "#"
11252  [(set_attr "conds" "clob")
11253   (set_attr "length" "8,12")
11254   (set_attr "type" "multiple")]
11255)
11256
11257(define_insn_and_split "*if_move_neg"
11258  [(set (match_operand:SI 0 "s_register_operand" "=l,r")
11259	(if_then_else:SI
11260	 (match_operator 4 "arm_comparison_operator"
11261	  [(match_operand 3 "cc_register" "") (const_int 0)])
11262	 (match_operand:SI 1 "s_register_operand" "0,0")
11263	 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
11264  "TARGET_32BIT"
11265  "#"
11266  "&& reload_completed"
11267  [(cond_exec (match_dup 5)
11268	      (set (match_dup 0) (neg:SI (match_dup 2))))]
11269  {
11270    machine_mode mode = GET_MODE (operands[3]);
11271    rtx_code rc = GET_CODE (operands[4]);
11272
11273    if (mode == CCFPmode || mode == CCFPEmode)
11274      rc = reverse_condition_maybe_unordered (rc);
11275    else
11276      rc = reverse_condition (rc);
11277
11278    operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
11279  }
11280  [(set_attr "conds" "use")
11281   (set_attr "length" "4")
11282   (set_attr "arch" "t2,32")
11283   (set_attr "enabled_for_short_it" "yes,no")
11284   (set_attr "type" "logic_shift_imm")]
11285)
11286
11287(define_insn "*arith_adjacentmem"
11288  [(set (match_operand:SI 0 "s_register_operand" "=r")
11289	(match_operator:SI 1 "shiftable_operator"
11290	 [(match_operand:SI 2 "memory_operand" "m")
11291	  (match_operand:SI 3 "memory_operand" "m")]))
11292   (clobber (match_scratch:SI 4 "=r"))]
11293  "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
11294  "*
11295  {
11296    rtx ldm[3];
11297    rtx arith[4];
11298    rtx base_reg;
11299    HOST_WIDE_INT val1 = 0, val2 = 0;
11300
11301    if (REGNO (operands[0]) > REGNO (operands[4]))
11302      {
11303	ldm[1] = operands[4];
11304	ldm[2] = operands[0];
11305      }
11306    else
11307      {
11308	ldm[1] = operands[0];
11309	ldm[2] = operands[4];
11310      }
11311
11312    base_reg = XEXP (operands[2], 0);
11313
11314    if (!REG_P (base_reg))
11315      {
11316	val1 = INTVAL (XEXP (base_reg, 1));
11317	base_reg = XEXP (base_reg, 0);
11318      }
11319
11320    if (!REG_P (XEXP (operands[3], 0)))
11321      val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
11322
11323    arith[0] = operands[0];
11324    arith[3] = operands[1];
11325
11326    if (val1 < val2)
11327      {
11328	arith[1] = ldm[1];
11329	arith[2] = ldm[2];
11330      }
11331    else
11332      {
11333	arith[1] = ldm[2];
11334	arith[2] = ldm[1];
11335      }
11336
11337    ldm[0] = base_reg;
11338    if (val1 !=0 && val2 != 0)
11339      {
11340	rtx ops[3];
11341
11342	if (val1 == 4 || val2 == 4)
11343	  /* Other val must be 8, since we know they are adjacent and neither
11344	     is zero.  */
11345	  output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
11346	else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
11347	  {
11348	    ldm[0] = ops[0] = operands[4];
11349	    ops[1] = base_reg;
11350	    ops[2] = GEN_INT (val1);
11351	    output_add_immediate (ops);
11352	    if (val1 < val2)
11353	      output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11354	    else
11355	      output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11356	  }
11357	else
11358	  {
11359	    /* Offset is out of range for a single add, so use two ldr.  */
11360	    ops[0] = ldm[1];
11361	    ops[1] = base_reg;
11362	    ops[2] = GEN_INT (val1);
11363	    output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11364	    ops[0] = ldm[2];
11365	    ops[2] = GEN_INT (val2);
11366	    output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
11367	  }
11368      }
11369    else if (val1 != 0)
11370      {
11371	if (val1 < val2)
11372	  output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11373	else
11374	  output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11375      }
11376    else
11377      {
11378	if (val1 < val2)
11379	  output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
11380	else
11381	  output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
11382      }
11383    output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
11384    return \"\";
11385  }"
11386  [(set_attr "length" "12")
11387   (set_attr "predicable" "yes")
11388   (set_attr "type" "load_4")]
11389)
11390
11391; This pattern is never tried by combine, so do it as a peephole
11392
11393(define_peephole2
11394  [(set (match_operand:SI 0 "arm_general_register_operand" "")
11395	(match_operand:SI 1 "arm_general_register_operand" ""))
11396   (set (reg:CC CC_REGNUM)
11397	(compare:CC (match_dup 1) (const_int 0)))]
11398  "TARGET_ARM"
11399  [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
11400	      (set (match_dup 0) (match_dup 1))])]
11401  ""
11402)
11403
11404(define_split
11405  [(set (match_operand:SI 0 "s_register_operand" "")
11406	(and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
11407		       (const_int 0))
11408		(neg:SI (match_operator:SI 2 "arm_comparison_operator"
11409			 [(match_operand:SI 3 "s_register_operand" "")
11410			  (match_operand:SI 4 "arm_rhs_operand" "")]))))
11411   (clobber (match_operand:SI 5 "s_register_operand" ""))]
11412  "TARGET_ARM"
11413  [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
11414   (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
11415			      (match_dup 5)))]
11416  ""
11417)
11418
11419;; This split can be used because CC_Z mode implies that the following
11420;; branch will be an equality, or an unsigned inequality, so the sign
11421;; extension is not needed.
11422
11423(define_split
11424  [(set (reg:CC_Z CC_REGNUM)
11425	(compare:CC_Z
11426	 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
11427		    (const_int 24))
11428	 (match_operand 1 "const_int_operand" "")))
11429   (clobber (match_scratch:SI 2 ""))]
11430  "TARGET_ARM
11431   && ((UINTVAL (operands[1]))
11432       == ((UINTVAL (operands[1])) >> 24) << 24)"
11433  [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
11434   (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
11435  "
11436  operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
11437  "
11438)
11439;; ??? Check the patterns above for Thumb-2 usefulness
11440
11441(define_expand "prologue"
11442  [(clobber (const_int 0))]
11443  "TARGET_EITHER"
11444  "if (TARGET_32BIT)
11445     arm_expand_prologue ();
11446   else
11447     thumb1_expand_prologue ();
11448  DONE;
11449  "
11450)
11451
11452(define_expand "epilogue"
11453  [(clobber (const_int 0))]
11454  "TARGET_EITHER"
11455  "
11456  if (crtl->calls_eh_return)
11457    emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
11458  if (TARGET_THUMB1)
11459   {
11460     thumb1_expand_epilogue ();
11461     emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
11462                     gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
11463   }
11464  else if (HAVE_return)
11465   {
11466     /* HAVE_return is testing for USE_RETURN_INSN (FALSE).  Hence,
11467        no need for explicit testing again.  */
11468     emit_jump_insn (gen_return ());
11469   }
11470  else if (TARGET_32BIT)
11471   {
11472    arm_expand_epilogue (true);
11473   }
11474  DONE;
11475  "
11476)
11477
11478;; Note - although unspec_volatile's USE all hard registers,
11479;; USEs are ignored after relaod has completed.  Thus we need
11480;; to add an unspec of the link register to ensure that flow
11481;; does not think that it is unused by the sibcall branch that
11482;; will replace the standard function epilogue.
11483(define_expand "sibcall_epilogue"
11484   [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
11485               (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
11486   "TARGET_32BIT"
11487   "
11488   arm_expand_epilogue (false);
11489   DONE;
11490   "
11491)
11492
11493(define_expand "eh_epilogue"
11494  [(use (match_operand:SI 0 "register_operand"))
11495   (use (match_operand:SI 1 "register_operand"))
11496   (use (match_operand:SI 2 "register_operand"))]
11497  "TARGET_EITHER"
11498  "
11499  {
11500    cfun->machine->eh_epilogue_sp_ofs = operands[1];
11501    if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
11502      {
11503	rtx ra = gen_rtx_REG (Pmode, 2);
11504
11505	emit_move_insn (ra, operands[2]);
11506	operands[2] = ra;
11507      }
11508    /* This is a hack -- we may have crystalized the function type too
11509       early.  */
11510    cfun->machine->func_type = 0;
11511  }"
11512)
11513
11514;; This split is only used during output to reduce the number of patterns
11515;; that need assembler instructions adding to them.  We allowed the setting
11516;; of the conditions to be implicit during rtl generation so that
11517;; the conditional compare patterns would work.  However this conflicts to
11518;; some extent with the conditional data operations, so we have to split them
11519;; up again here.
11520
11521;; ??? Need to audit these splitters for Thumb-2.  Why isn't normal
11522;; conditional execution sufficient?
11523
11524(define_split
11525  [(set (match_operand:SI 0 "s_register_operand" "")
11526	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
11527			  [(match_operand 2 "" "") (match_operand 3 "" "")])
11528			 (match_dup 0)
11529			 (match_operand 4 "" "")))
11530   (clobber (reg:CC CC_REGNUM))]
11531  "TARGET_ARM && reload_completed"
11532  [(set (match_dup 5) (match_dup 6))
11533   (cond_exec (match_dup 7)
11534	      (set (match_dup 0) (match_dup 4)))]
11535  "
11536  {
11537    machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11538					     operands[2], operands[3]);
11539    enum rtx_code rc = GET_CODE (operands[1]);
11540
11541    operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11542    operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11543    if (mode == CCFPmode || mode == CCFPEmode)
11544      rc = reverse_condition_maybe_unordered (rc);
11545    else
11546      rc = reverse_condition (rc);
11547
11548    operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
11549  }"
11550)
11551
11552(define_split
11553  [(set (match_operand:SI 0 "s_register_operand" "")
11554	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
11555			  [(match_operand 2 "" "") (match_operand 3 "" "")])
11556			 (match_operand 4 "" "")
11557			 (match_dup 0)))
11558   (clobber (reg:CC CC_REGNUM))]
11559  "TARGET_ARM && reload_completed"
11560  [(set (match_dup 5) (match_dup 6))
11561   (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
11562	      (set (match_dup 0) (match_dup 4)))]
11563  "
11564  {
11565    machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11566					     operands[2], operands[3]);
11567
11568    operands[5] = gen_rtx_REG (mode, CC_REGNUM);
11569    operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11570  }"
11571)
11572
11573(define_split
11574  [(set (match_operand:SI 0 "s_register_operand" "")
11575	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
11576			  [(match_operand 2 "" "") (match_operand 3 "" "")])
11577			 (match_operand 4 "" "")
11578			 (match_operand 5 "" "")))
11579   (clobber (reg:CC CC_REGNUM))]
11580  "TARGET_ARM && reload_completed"
11581  [(set (match_dup 6) (match_dup 7))
11582   (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11583	      (set (match_dup 0) (match_dup 4)))
11584   (cond_exec (match_dup 8)
11585	      (set (match_dup 0) (match_dup 5)))]
11586  "
11587  {
11588    machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11589					     operands[2], operands[3]);
11590    enum rtx_code rc = GET_CODE (operands[1]);
11591
11592    operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11593    operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11594    if (mode == CCFPmode || mode == CCFPEmode)
11595      rc = reverse_condition_maybe_unordered (rc);
11596    else
11597      rc = reverse_condition (rc);
11598
11599    operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11600  }"
11601)
11602
11603(define_split
11604  [(set (match_operand:SI 0 "s_register_operand" "")
11605	(if_then_else:SI (match_operator 1 "arm_comparison_operator"
11606			  [(match_operand:SI 2 "s_register_operand" "")
11607			   (match_operand:SI 3 "arm_add_operand" "")])
11608			 (match_operand:SI 4 "arm_rhs_operand" "")
11609			 (not:SI
11610			  (match_operand:SI 5 "s_register_operand" ""))))
11611   (clobber (reg:CC CC_REGNUM))]
11612  "TARGET_ARM && reload_completed"
11613  [(set (match_dup 6) (match_dup 7))
11614   (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
11615	      (set (match_dup 0) (match_dup 4)))
11616   (cond_exec (match_dup 8)
11617	      (set (match_dup 0) (not:SI (match_dup 5))))]
11618  "
11619  {
11620    machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
11621					     operands[2], operands[3]);
11622    enum rtx_code rc = GET_CODE (operands[1]);
11623
11624    operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11625    operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11626    if (mode == CCFPmode || mode == CCFPEmode)
11627      rc = reverse_condition_maybe_unordered (rc);
11628    else
11629      rc = reverse_condition (rc);
11630
11631    operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11632  }"
11633)
11634
11635(define_insn "*cond_move_not"
11636  [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11637	(if_then_else:SI (match_operator 4 "arm_comparison_operator"
11638			  [(match_operand 3 "cc_register" "") (const_int 0)])
11639			 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11640			 (not:SI
11641			  (match_operand:SI 2 "s_register_operand" "r,r"))))]
11642  "TARGET_ARM"
11643  "@
11644   mvn%D4\\t%0, %2
11645   mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11646  [(set_attr "conds" "use")
11647   (set_attr "type" "mvn_reg,multiple")
11648   (set_attr "length" "4,8")]
11649)
11650
11651;; The next two patterns occur when an AND operation is followed by a
11652;; scc insn sequence
11653
11654(define_insn "*sign_extract_onebit"
11655  [(set (match_operand:SI 0 "s_register_operand" "=r")
11656	(sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11657			 (const_int 1)
11658			 (match_operand:SI 2 "const_int_operand" "n")))
11659    (clobber (reg:CC CC_REGNUM))]
11660  "TARGET_ARM"
11661  "*
11662    operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11663    output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11664    return \"mvnne\\t%0, #0\";
11665  "
11666  [(set_attr "conds" "clob")
11667   (set_attr "length" "8")
11668   (set_attr "type" "multiple")]
11669)
11670
11671(define_insn "*not_signextract_onebit"
11672  [(set (match_operand:SI 0 "s_register_operand" "=r")
11673	(not:SI
11674	 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11675			  (const_int 1)
11676			  (match_operand:SI 2 "const_int_operand" "n"))))
11677   (clobber (reg:CC CC_REGNUM))]
11678  "TARGET_ARM"
11679  "*
11680    operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11681    output_asm_insn (\"tst\\t%1, %2\", operands);
11682    output_asm_insn (\"mvneq\\t%0, #0\", operands);
11683    return \"movne\\t%0, #0\";
11684  "
11685  [(set_attr "conds" "clob")
11686   (set_attr "length" "12")
11687   (set_attr "type" "multiple")]
11688)
11689;; ??? The above patterns need auditing for Thumb-2
11690
11691;; Push multiple registers to the stack.  Registers are in parallel (use ...)
11692;; expressions.  For simplicity, the first register is also in the unspec
11693;; part.
11694;; To avoid the usage of GNU extension, the length attribute is computed
11695;; in a C function arm_attr_length_push_multi.
11696(define_insn "*push_multi"
11697  [(match_parallel 2 "multi_register_push"
11698    [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11699	  (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11700		      UNSPEC_PUSH_MULT))])]
11701  ""
11702  "*
11703  {
11704    int num_saves = XVECLEN (operands[2], 0);
11705
11706    /* For the StrongARM at least it is faster to
11707       use STR to store only a single register.
11708       In Thumb mode always use push, and the assembler will pick
11709       something appropriate.  */
11710    if (num_saves == 1 && TARGET_ARM)
11711      output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11712    else
11713      {
11714	int i;
11715	char pattern[100];
11716
11717	if (TARGET_32BIT)
11718	    strcpy (pattern, \"push%?\\t{%1\");
11719	else
11720	    strcpy (pattern, \"push\\t{%1\");
11721
11722	for (i = 1; i < num_saves; i++)
11723	  {
11724	    strcat (pattern, \", %|\");
11725	    strcat (pattern,
11726		    reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11727	  }
11728
11729	strcat (pattern, \"}\");
11730	output_asm_insn (pattern, operands);
11731      }
11732
11733    return \"\";
11734  }"
11735  [(set_attr "type" "store_16")
11736   (set (attr "length")
11737	(symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11738)
11739
11740(define_insn "stack_tie"
11741  [(set (mem:BLK (scratch))
11742	(unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11743		     (match_operand:SI 1 "s_register_operand" "rk")]
11744		    UNSPEC_PRLG_STK))]
11745  ""
11746  ""
11747  [(set_attr "length" "0")
11748   (set_attr "type" "block")]
11749)
11750
11751;; Pop (as used in epilogue RTL)
11752;;
11753(define_insn "*load_multiple_with_writeback"
11754  [(match_parallel 0 "load_multiple_operation"
11755    [(set (match_operand:SI 1 "s_register_operand" "+rk")
11756          (plus:SI (match_dup 1)
11757                   (match_operand:SI 2 "const_int_I_operand" "I")))
11758     (set (match_operand:SI 3 "s_register_operand" "=rk")
11759          (mem:SI (match_dup 1)))
11760        ])]
11761  "TARGET_32BIT && (reload_in_progress || reload_completed)"
11762  "*
11763  {
11764    arm_output_multireg_pop (operands, /*return_pc=*/false,
11765                                       /*cond=*/const_true_rtx,
11766                                       /*reverse=*/false,
11767                                       /*update=*/true);
11768    return \"\";
11769  }
11770  "
11771  [(set_attr "type" "load_16")
11772   (set_attr "predicable" "yes")
11773   (set (attr "length")
11774	(symbol_ref "arm_attr_length_pop_multi (operands,
11775						/*return_pc=*/false,
11776						/*write_back_p=*/true)"))]
11777)
11778
11779;; Pop with return (as used in epilogue RTL)
11780;;
11781;; This instruction is generated when the registers are popped at the end of
11782;; epilogue.  Here, instead of popping the value into LR and then generating
11783;; jump to LR, value is popped into PC directly.  Hence, the pattern is combined
11784;;  with (return).
11785(define_insn "*pop_multiple_with_writeback_and_return"
11786  [(match_parallel 0 "pop_multiple_return"
11787    [(return)
11788     (set (match_operand:SI 1 "s_register_operand" "+rk")
11789          (plus:SI (match_dup 1)
11790                   (match_operand:SI 2 "const_int_I_operand" "I")))
11791     (set (match_operand:SI 3 "s_register_operand" "=rk")
11792          (mem:SI (match_dup 1)))
11793        ])]
11794  "TARGET_32BIT && (reload_in_progress || reload_completed)"
11795  "*
11796  {
11797    arm_output_multireg_pop (operands, /*return_pc=*/true,
11798                                       /*cond=*/const_true_rtx,
11799                                       /*reverse=*/false,
11800                                       /*update=*/true);
11801    return \"\";
11802  }
11803  "
11804  [(set_attr "type" "load_16")
11805   (set_attr "predicable" "yes")
11806   (set (attr "length")
11807	(symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11808						/*write_back_p=*/true)"))]
11809)
11810
11811(define_insn "*pop_multiple_with_return"
11812  [(match_parallel 0 "pop_multiple_return"
11813    [(return)
11814     (set (match_operand:SI 2 "s_register_operand" "=rk")
11815          (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11816        ])]
11817  "TARGET_32BIT && (reload_in_progress || reload_completed)"
11818  "*
11819  {
11820    arm_output_multireg_pop (operands, /*return_pc=*/true,
11821                                       /*cond=*/const_true_rtx,
11822                                       /*reverse=*/false,
11823                                       /*update=*/false);
11824    return \"\";
11825  }
11826  "
11827  [(set_attr "type" "load_16")
11828   (set_attr "predicable" "yes")
11829   (set (attr "length")
11830	(symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11831						/*write_back_p=*/false)"))]
11832)
11833
11834;; Load into PC and return
11835(define_insn "*ldr_with_return"
11836  [(return)
11837   (set (reg:SI PC_REGNUM)
11838        (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11839  "TARGET_32BIT && (reload_in_progress || reload_completed)"
11840  "ldr%?\t%|pc, [%0], #4"
11841  [(set_attr "type" "load_4")
11842   (set_attr "predicable" "yes")]
11843)
11844;; Pop for floating point registers (as used in epilogue RTL)
11845(define_insn "*vfp_pop_multiple_with_writeback"
11846  [(match_parallel 0 "pop_multiple_fp"
11847    [(set (match_operand:SI 1 "s_register_operand" "+rk")
11848          (plus:SI (match_dup 1)
11849                   (match_operand:SI 2 "const_int_I_operand" "I")))
11850     (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11851          (mem:DF (match_dup 1)))])]
11852  "TARGET_32BIT && TARGET_VFP_BASE"
11853  "*
11854  {
11855    int num_regs = XVECLEN (operands[0], 0);
11856    char pattern[100];
11857    rtx op_list[2];
11858    strcpy (pattern, \"vldm\\t\");
11859    strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11860    strcat (pattern, \"!, {\");
11861    op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11862    strcat (pattern, \"%P0\");
11863    if ((num_regs - 1) > 1)
11864      {
11865        strcat (pattern, \"-%P1\");
11866        op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11867      }
11868
11869    strcat (pattern, \"}\");
11870    output_asm_insn (pattern, op_list);
11871    return \"\";
11872  }
11873  "
11874  [(set_attr "type" "load_16")
11875   (set_attr "conds" "unconditional")
11876   (set_attr "predicable" "no")]
11877)
11878
11879;; Special patterns for dealing with the constant pool
11880
11881(define_insn "align_4"
11882  [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11883  "TARGET_EITHER"
11884  "*
11885  assemble_align (32);
11886  return \"\";
11887  "
11888  [(set_attr "type" "no_insn")]
11889)
11890
11891(define_insn "align_8"
11892  [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11893  "TARGET_EITHER"
11894  "*
11895  assemble_align (64);
11896  return \"\";
11897  "
11898  [(set_attr "type" "no_insn")]
11899)
11900
11901(define_insn "consttable_end"
11902  [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11903  "TARGET_EITHER"
11904  "*
11905  making_const_table = FALSE;
11906  return \"\";
11907  "
11908  [(set_attr "type" "no_insn")]
11909)
11910
11911(define_insn "consttable_1"
11912  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11913  "TARGET_EITHER"
11914  "*
11915  making_const_table = TRUE;
11916  assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11917  assemble_zeros (3);
11918  return \"\";
11919  "
11920  [(set_attr "length" "4")
11921   (set_attr "type" "no_insn")]
11922)
11923
11924(define_insn "consttable_2"
11925  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11926  "TARGET_EITHER"
11927  "*
11928  {
11929    rtx x = operands[0];
11930    making_const_table = TRUE;
11931    switch (GET_MODE_CLASS (GET_MODE (x)))
11932      {
11933      case MODE_FLOAT:
11934	arm_emit_fp16_const (x);
11935	break;
11936      default:
11937	assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11938	assemble_zeros (2);
11939	break;
11940      }
11941    return \"\";
11942  }"
11943  [(set_attr "length" "4")
11944   (set_attr "type" "no_insn")]
11945)
11946
11947(define_insn "consttable_4"
11948  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11949  "TARGET_EITHER"
11950  "*
11951  {
11952    rtx x = operands[0];
11953    making_const_table = TRUE;
11954    scalar_float_mode float_mode;
11955    if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11956      assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11957    else
11958      {
11959	/* XXX: Sometimes gcc does something really dumb and ends up with
11960	   a HIGH in a constant pool entry, usually because it's trying to
11961	   load into a VFP register.  We know this will always be used in
11962	   combination with a LO_SUM which ignores the high bits, so just
11963	   strip off the HIGH.  */
11964	if (GET_CODE (x) == HIGH)
11965	  x = XEXP (x, 0);
11966        assemble_integer (x, 4, BITS_PER_WORD, 1);
11967	mark_symbol_refs_as_used (x);
11968      }
11969    return \"\";
11970  }"
11971  [(set_attr "length" "4")
11972   (set_attr "type" "no_insn")]
11973)
11974
11975(define_insn "consttable_8"
11976  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11977  "TARGET_EITHER"
11978  "*
11979  {
11980    making_const_table = TRUE;
11981    scalar_float_mode float_mode;
11982    if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11983      assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11984		     float_mode, BITS_PER_WORD);
11985    else
11986      assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11987    return \"\";
11988  }"
11989  [(set_attr "length" "8")
11990   (set_attr "type" "no_insn")]
11991)
11992
11993(define_insn "consttable_16"
11994  [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11995  "TARGET_EITHER"
11996  "*
11997  {
11998    making_const_table = TRUE;
11999    scalar_float_mode float_mode;
12000    if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
12001      assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
12002		     float_mode, BITS_PER_WORD);
12003    else
12004      assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
12005    return \"\";
12006  }"
12007  [(set_attr "length" "16")
12008   (set_attr "type" "no_insn")]
12009)
12010
12011;; V5 Instructions,
12012
12013(define_insn "clzsi2"
12014  [(set (match_operand:SI 0 "s_register_operand" "=r")
12015	(clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
12016  "TARGET_32BIT && arm_arch5t"
12017  "clz%?\\t%0, %1"
12018  [(set_attr "predicable" "yes")
12019   (set_attr "type" "clz")])
12020
12021(define_insn "rbitsi2"
12022  [(set (match_operand:SI 0 "s_register_operand" "=r")
12023	(unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
12024  "TARGET_32BIT && arm_arch_thumb2"
12025  "rbit%?\\t%0, %1"
12026  [(set_attr "predicable" "yes")
12027   (set_attr "type" "clz")])
12028
12029;; Keep this as a CTZ expression until after reload and then split
12030;; into RBIT + CLZ.  Since RBIT is represented as an UNSPEC it is unlikely
12031;; to fold with any other expression.
12032
12033(define_insn_and_split "ctzsi2"
12034 [(set (match_operand:SI           0 "s_register_operand" "=r")
12035       (ctz:SI (match_operand:SI  1 "s_register_operand" "r")))]
12036  "TARGET_32BIT && arm_arch_thumb2"
12037  "#"
12038  "&& reload_completed"
12039  [(const_int 0)]
12040  "
12041  emit_insn (gen_rbitsi2 (operands[0], operands[1]));
12042  emit_insn (gen_clzsi2 (operands[0], operands[0]));
12043  DONE;
12044")
12045
12046;; V5E instructions.
12047
12048(define_insn "prefetch"
12049  [(prefetch (match_operand:SI 0 "address_operand" "p")
12050	     (match_operand:SI 1 "" "")
12051	     (match_operand:SI 2 "" ""))]
12052  "TARGET_32BIT && arm_arch5te"
12053  "pld\\t%a0"
12054  [(set_attr "type" "load_4")]
12055)
12056
12057;; General predication pattern
12058
12059(define_cond_exec
12060  [(match_operator 0 "arm_comparison_operator"
12061    [(match_operand 1 "cc_register" "")
12062     (const_int 0)])]
12063  "TARGET_32BIT
12064   && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
12065  ""
12066[(set_attr "predicated" "yes")]
12067)
12068
12069(define_insn "force_register_use"
12070  [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
12071  ""
12072  "%@ %0 needed"
12073  [(set_attr "length" "0")
12074   (set_attr "type" "no_insn")]
12075)
12076
12077
12078;; Patterns for exception handling
12079
12080(define_expand "eh_return"
12081  [(use (match_operand 0 "general_operand"))]
12082  "TARGET_EITHER"
12083  "
12084  {
12085    if (TARGET_32BIT)
12086      emit_insn (gen_arm_eh_return (operands[0]));
12087    else
12088      emit_insn (gen_thumb_eh_return (operands[0]));
12089    DONE;
12090  }"
12091)
12092
12093;; We can't expand this before we know where the link register is stored.
12094(define_insn_and_split "arm_eh_return"
12095  [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
12096		    VUNSPEC_EH_RETURN)
12097   (clobber (match_scratch:SI 1 "=&r"))]
12098  "TARGET_ARM"
12099  "#"
12100  "&& reload_completed"
12101  [(const_int 0)]
12102  "
12103  {
12104    arm_set_return_address (operands[0], operands[1]);
12105    DONE;
12106  }"
12107)
12108
12109
12110;; TLS support
12111
12112(define_insn "load_tp_hard"
12113  [(set (match_operand:SI 0 "register_operand" "=r")
12114	(unspec:SI [(const_int 0)] UNSPEC_TLS))]
12115  "TARGET_HARD_TP"
12116  "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
12117  [(set_attr "predicable" "yes")
12118   (set_attr "type" "mrs")]
12119)
12120
12121;; Doesn't clobber R1-R3.  Must use r0 for the first operand.
12122(define_insn "load_tp_soft_fdpic"
12123  [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12124   (clobber (reg:SI FDPIC_REGNUM))
12125   (clobber (reg:SI LR_REGNUM))
12126   (clobber (reg:SI IP_REGNUM))
12127   (clobber (reg:CC CC_REGNUM))]
12128  "TARGET_SOFT_TP && TARGET_FDPIC"
12129  "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12130  [(set_attr "conds" "clob")
12131   (set_attr "type" "branch")]
12132)
12133
12134;; Doesn't clobber R1-R3.  Must use r0 for the first operand.
12135(define_insn "load_tp_soft"
12136  [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
12137   (clobber (reg:SI LR_REGNUM))
12138   (clobber (reg:SI IP_REGNUM))
12139   (clobber (reg:CC CC_REGNUM))]
12140  "TARGET_SOFT_TP && !TARGET_FDPIC"
12141  "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
12142  [(set_attr "conds" "clob")
12143   (set_attr "type" "branch")]
12144)
12145
12146;; tls descriptor call
12147(define_insn "tlscall"
12148  [(set (reg:SI R0_REGNUM)
12149        (unspec:SI [(reg:SI R0_REGNUM)
12150                    (match_operand:SI 0 "" "X")
12151	            (match_operand 1 "" "")] UNSPEC_TLS))
12152   (clobber (reg:SI R1_REGNUM))
12153   (clobber (reg:SI LR_REGNUM))
12154   (clobber (reg:SI CC_REGNUM))]
12155  "TARGET_GNU2_TLS"
12156  {
12157    targetm.asm_out.internal_label (asm_out_file, "LPIC",
12158				    INTVAL (operands[1]));
12159    return "bl\\t%c0(tlscall)";
12160  }
12161  [(set_attr "conds" "clob")
12162   (set_attr "length" "4")
12163   (set_attr "type" "branch")]
12164)
12165
12166;; For thread pointer builtin
12167(define_expand "get_thread_pointersi"
12168  [(match_operand:SI 0 "s_register_operand")]
12169 ""
12170 "
12171 {
12172   arm_load_tp (operands[0]);
12173   DONE;
12174 }")
12175
12176;;
12177
12178;; We only care about the lower 16 bits of the constant
12179;; being inserted into the upper 16 bits of the register.
12180(define_insn "*arm_movtas_ze"
12181  [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
12182                   (const_int 16)
12183                   (const_int 16))
12184        (match_operand:SI 1 "const_int_operand" ""))]
12185  "TARGET_HAVE_MOVT"
12186  "@
12187   movt%?\t%0, %L1
12188   movt\t%0, %L1"
12189 [(set_attr "arch" "32,v8mb")
12190  (set_attr "predicable" "yes")
12191  (set_attr "length" "4")
12192  (set_attr "type" "alu_sreg")]
12193)
12194
12195(define_insn "*arm_rev"
12196  [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12197	(bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
12198  "arm_arch6"
12199  "@
12200   rev\t%0, %1
12201   rev%?\t%0, %1
12202   rev%?\t%0, %1"
12203  [(set_attr "arch" "t1,t2,32")
12204   (set_attr "length" "2,2,4")
12205   (set_attr "predicable" "no,yes,yes")
12206   (set_attr "type" "rev")]
12207)
12208
12209(define_expand "arm_legacy_rev"
12210  [(set (match_operand:SI 2 "s_register_operand")
12211	(xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
12212			     (const_int 16))
12213		(match_dup 1)))
12214   (set (match_dup 2)
12215	(lshiftrt:SI (match_dup 2)
12216		     (const_int 8)))
12217   (set (match_operand:SI 3 "s_register_operand")
12218	(rotatert:SI (match_dup 1)
12219		     (const_int 8)))
12220   (set (match_dup 2)
12221	(and:SI (match_dup 2)
12222		(const_int -65281)))
12223   (set (match_operand:SI 0 "s_register_operand")
12224	(xor:SI (match_dup 3)
12225		(match_dup 2)))]
12226  "TARGET_32BIT"
12227  ""
12228)
12229
12230;; Reuse temporaries to keep register pressure down.
12231(define_expand "thumb_legacy_rev"
12232  [(set (match_operand:SI 2 "s_register_operand")
12233     (ashift:SI (match_operand:SI 1 "s_register_operand")
12234                (const_int 24)))
12235   (set (match_operand:SI 3 "s_register_operand")
12236     (lshiftrt:SI (match_dup 1)
12237		  (const_int 24)))
12238   (set (match_dup 3)
12239     (ior:SI (match_dup 3)
12240	     (match_dup 2)))
12241   (set (match_operand:SI 4 "s_register_operand")
12242     (const_int 16))
12243   (set (match_operand:SI 5 "s_register_operand")
12244     (rotatert:SI (match_dup 1)
12245		  (match_dup 4)))
12246   (set (match_dup 2)
12247     (ashift:SI (match_dup 5)
12248                (const_int 24)))
12249   (set (match_dup 5)
12250     (lshiftrt:SI (match_dup 5)
12251		  (const_int 24)))
12252   (set (match_dup 5)
12253     (ior:SI (match_dup 5)
12254	     (match_dup 2)))
12255   (set (match_dup 5)
12256     (rotatert:SI (match_dup 5)
12257		  (match_dup 4)))
12258   (set (match_operand:SI 0 "s_register_operand")
12259     (ior:SI (match_dup 5)
12260             (match_dup 3)))]
12261  "TARGET_THUMB"
12262  ""
12263)
12264
12265;; ARM-specific expansion of signed mod by power of 2
12266;; using conditional negate.
12267;; For r0 % n where n is a power of 2 produce:
12268;; rsbs    r1, r0, #0
12269;; and     r0, r0, #(n - 1)
12270;; and     r1, r1, #(n - 1)
12271;; rsbpl   r0, r1, #0
12272
12273(define_expand "modsi3"
12274  [(match_operand:SI 0 "register_operand")
12275   (match_operand:SI 1 "register_operand")
12276   (match_operand:SI 2 "const_int_operand")]
12277  "TARGET_32BIT"
12278  {
12279    HOST_WIDE_INT val = INTVAL (operands[2]);
12280
12281    if (val <= 0
12282       || exact_log2 (val) <= 0)
12283      FAIL;
12284
12285    rtx mask = GEN_INT (val - 1);
12286
12287    /* In the special case of x0 % 2 we can do the even shorter:
12288	cmp     r0, #0
12289	and     r0, r0, #1
12290	rsblt   r0, r0, #0.  */
12291
12292    if (val == 2)
12293      {
12294	rtx cc_reg = arm_gen_compare_reg (LT,
12295					  operands[1], const0_rtx, NULL_RTX);
12296	rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
12297	rtx masked = gen_reg_rtx (SImode);
12298
12299	emit_insn (gen_andsi3 (masked, operands[1], mask));
12300	emit_move_insn (operands[0],
12301			gen_rtx_IF_THEN_ELSE (SImode, cond,
12302					      gen_rtx_NEG (SImode,
12303							   masked),
12304					      masked));
12305	DONE;
12306      }
12307
12308    rtx neg_op = gen_reg_rtx (SImode);
12309    rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
12310						      operands[1]));
12311
12312    /* Extract the condition register and mode.  */
12313    rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
12314    rtx cc_reg = SET_DEST (cmp);
12315    rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
12316
12317    emit_insn (gen_andsi3 (operands[0], operands[1], mask));
12318
12319    rtx masked_neg = gen_reg_rtx (SImode);
12320    emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
12321
12322    /* We want a conditional negate here, but emitting COND_EXEC rtxes
12323       during expand does not always work.  Do an IF_THEN_ELSE instead.  */
12324    emit_move_insn (operands[0],
12325		    gen_rtx_IF_THEN_ELSE (SImode, cond,
12326					  gen_rtx_NEG (SImode, masked_neg),
12327					  operands[0]));
12328
12329
12330    DONE;
12331  }
12332)
12333
12334(define_expand "bswapsi2"
12335  [(set (match_operand:SI 0 "s_register_operand")
12336	(bswap:SI (match_operand:SI 1 "s_register_operand")))]
12337"TARGET_EITHER && (arm_arch6 || !optimize_size)"
12338"
12339    if (!arm_arch6)
12340      {
12341	rtx op2 = gen_reg_rtx (SImode);
12342	rtx op3 = gen_reg_rtx (SImode);
12343
12344	if (TARGET_THUMB)
12345	  {
12346	    rtx op4 = gen_reg_rtx (SImode);
12347	    rtx op5 = gen_reg_rtx (SImode);
12348
12349	    emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
12350					     op2, op3, op4, op5));
12351	  }
12352	else
12353	  {
12354	    emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
12355					   op2, op3));
12356	  }
12357
12358	DONE;
12359      }
12360  "
12361)
12362
12363;; bswap16 patterns: use revsh and rev16 instructions for the signed
12364;; and unsigned variants, respectively. For rev16, expose
12365;; byte-swapping in the lower 16 bits only.
12366(define_insn "*arm_revsh"
12367  [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
12368	(sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
12369  "arm_arch6"
12370  "@
12371  revsh\t%0, %1
12372  revsh%?\t%0, %1
12373  revsh%?\t%0, %1"
12374  [(set_attr "arch" "t1,t2,32")
12375   (set_attr "length" "2,2,4")
12376   (set_attr "type" "rev")]
12377)
12378
12379(define_insn "*arm_rev16"
12380  [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
12381	(bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
12382  "arm_arch6"
12383  "@
12384   rev16\t%0, %1
12385   rev16%?\t%0, %1
12386   rev16%?\t%0, %1"
12387  [(set_attr "arch" "t1,t2,32")
12388   (set_attr "length" "2,2,4")
12389   (set_attr "type" "rev")]
12390)
12391
12392;; There are no canonicalisation rules for the position of the lshiftrt, ashift
12393;; operations within an IOR/AND RTX, therefore we have two patterns matching
12394;; each valid permutation.
12395
12396(define_insn "arm_rev16si2"
12397  [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12398        (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
12399                                   (const_int 8))
12400                        (match_operand:SI 3 "const_int_operand" "n,n,n"))
12401                (and:SI (lshiftrt:SI (match_dup 1)
12402                                     (const_int 8))
12403                        (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
12404  "arm_arch6
12405   && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12406   && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12407  "rev16\\t%0, %1"
12408  [(set_attr "arch" "t1,t2,32")
12409   (set_attr "length" "2,2,4")
12410   (set_attr "type" "rev")]
12411)
12412
12413(define_insn "arm_rev16si2_alt"
12414  [(set (match_operand:SI 0 "register_operand" "=l,l,r")
12415        (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
12416                                     (const_int 8))
12417                        (match_operand:SI 2 "const_int_operand" "n,n,n"))
12418                (and:SI (ashift:SI (match_dup 1)
12419                                   (const_int 8))
12420                        (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
12421  "arm_arch6
12422   && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
12423   && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
12424  "rev16\\t%0, %1"
12425  [(set_attr "arch" "t1,t2,32")
12426   (set_attr "length" "2,2,4")
12427   (set_attr "type" "rev")]
12428)
12429
12430(define_expand "bswaphi2"
12431  [(set (match_operand:HI 0 "s_register_operand")
12432	(bswap:HI (match_operand:HI 1 "s_register_operand")))]
12433"arm_arch6"
12434""
12435)
12436
12437;; Patterns for LDRD/STRD in Thumb2 mode
12438
12439(define_insn "*thumb2_ldrd"
12440  [(set (match_operand:SI 0 "s_register_operand" "=r")
12441        (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12442                         (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
12443   (set (match_operand:SI 3 "s_register_operand" "=r")
12444        (mem:SI (plus:SI (match_dup 1)
12445                         (match_operand:SI 4 "const_int_operand" ""))))]
12446  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12447     && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
12448     && (operands_ok_ldrd_strd (operands[0], operands[3],
12449                                  operands[1], INTVAL (operands[2]),
12450                                  false, true))"
12451  "ldrd%?\t%0, %3, [%1, %2]"
12452  [(set_attr "type" "load_8")
12453   (set_attr "predicable" "yes")])
12454
12455(define_insn "*thumb2_ldrd_base"
12456  [(set (match_operand:SI 0 "s_register_operand" "=r")
12457        (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12458   (set (match_operand:SI 2 "s_register_operand" "=r")
12459        (mem:SI (plus:SI (match_dup 1)
12460                         (const_int 4))))]
12461  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12462     && (operands_ok_ldrd_strd (operands[0], operands[2],
12463                                  operands[1], 0, false, true))"
12464  "ldrd%?\t%0, %2, [%1]"
12465  [(set_attr "type" "load_8")
12466   (set_attr "predicable" "yes")])
12467
12468(define_insn "*thumb2_ldrd_base_neg"
12469  [(set (match_operand:SI 0 "s_register_operand" "=r")
12470	(mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
12471                         (const_int -4))))
12472   (set (match_operand:SI 2 "s_register_operand" "=r")
12473        (mem:SI (match_dup 1)))]
12474  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12475     && (operands_ok_ldrd_strd (operands[0], operands[2],
12476                                  operands[1], -4, false, true))"
12477  "ldrd%?\t%0, %2, [%1, #-4]"
12478  [(set_attr "type" "load_8")
12479   (set_attr "predicable" "yes")])
12480
12481(define_insn "*thumb2_strd"
12482  [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12483                         (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
12484        (match_operand:SI 2 "s_register_operand" "r"))
12485   (set (mem:SI (plus:SI (match_dup 0)
12486                         (match_operand:SI 3 "const_int_operand" "")))
12487        (match_operand:SI 4 "s_register_operand" "r"))]
12488  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12489     && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
12490     && (operands_ok_ldrd_strd (operands[2], operands[4],
12491                                  operands[0], INTVAL (operands[1]),
12492                                  false, false))"
12493  "strd%?\t%2, %4, [%0, %1]"
12494  [(set_attr "type" "store_8")
12495   (set_attr "predicable" "yes")])
12496
12497(define_insn "*thumb2_strd_base"
12498  [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
12499        (match_operand:SI 1 "s_register_operand" "r"))
12500   (set (mem:SI (plus:SI (match_dup 0)
12501                         (const_int 4)))
12502        (match_operand:SI 2 "s_register_operand" "r"))]
12503  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12504     && (operands_ok_ldrd_strd (operands[1], operands[2],
12505                                  operands[0], 0, false, false))"
12506  "strd%?\t%1, %2, [%0]"
12507  [(set_attr "type" "store_8")
12508   (set_attr "predicable" "yes")])
12509
12510(define_insn "*thumb2_strd_base_neg"
12511  [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
12512                         (const_int -4)))
12513        (match_operand:SI 1 "s_register_operand" "r"))
12514   (set (mem:SI (match_dup 0))
12515        (match_operand:SI 2 "s_register_operand" "r"))]
12516  "TARGET_LDRD && TARGET_THUMB2 && reload_completed
12517     && (operands_ok_ldrd_strd (operands[1], operands[2],
12518                                  operands[0], -4, false, false))"
12519  "strd%?\t%1, %2, [%0, #-4]"
12520  [(set_attr "type" "store_8")
12521   (set_attr "predicable" "yes")])
12522
12523;; ARMv8 CRC32 instructions.
12524(define_insn "arm_<crc_variant>"
12525  [(set (match_operand:SI 0 "s_register_operand" "=r")
12526        (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
12527                    (match_operand:<crc_mode> 2 "s_register_operand" "r")]
12528         CRC))]
12529  "TARGET_CRC32"
12530  "<crc_variant>\\t%0, %1, %2"
12531  [(set_attr "type" "crc")
12532   (set_attr "conds" "unconditional")]
12533)
12534
12535;; Load the load/store double peephole optimizations.
12536(include "ldrdstrd.md")
12537
12538;; Load the load/store multiple patterns
12539(include "ldmstm.md")
12540
12541;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
12542;; large lists without explicit writeback generated for APCS_FRAME epilogue.
12543;; The operands are validated through the load_multiple_operation
12544;; match_parallel predicate rather than through constraints so enable it only
12545;; after reload.
12546(define_insn "*load_multiple"
12547  [(match_parallel 0 "load_multiple_operation"
12548    [(set (match_operand:SI 2 "s_register_operand" "=rk")
12549          (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
12550        ])]
12551  "TARGET_32BIT && reload_completed"
12552  "*
12553  {
12554    arm_output_multireg_pop (operands, /*return_pc=*/false,
12555                                       /*cond=*/const_true_rtx,
12556                                       /*reverse=*/false,
12557                                       /*update=*/false);
12558    return \"\";
12559  }
12560  "
12561  [(set_attr "predicable" "yes")]
12562)
12563
12564(define_expand "copysignsf3"
12565  [(match_operand:SF 0 "register_operand")
12566   (match_operand:SF 1 "register_operand")
12567   (match_operand:SF 2 "register_operand")]
12568  "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12569  "{
12570     emit_move_insn (operands[0], operands[2]);
12571     emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
12572		GEN_INT (31), GEN_INT (0),
12573		simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
12574     DONE;
12575  }"
12576)
12577
12578(define_expand "copysigndf3"
12579  [(match_operand:DF 0 "register_operand")
12580   (match_operand:DF 1 "register_operand")
12581   (match_operand:DF 2 "register_operand")]
12582  "TARGET_SOFT_FLOAT && arm_arch_thumb2"
12583  "{
12584     rtx op0_low = gen_lowpart (SImode, operands[0]);
12585     rtx op0_high = gen_highpart (SImode, operands[0]);
12586     rtx op1_low = gen_lowpart (SImode, operands[1]);
12587     rtx op1_high = gen_highpart (SImode, operands[1]);
12588     rtx op2_high = gen_highpart (SImode, operands[2]);
12589
12590     rtx scratch1 = gen_reg_rtx (SImode);
12591     rtx scratch2 = gen_reg_rtx (SImode);
12592     emit_move_insn (scratch1, op2_high);
12593     emit_move_insn (scratch2, op1_high);
12594
12595     emit_insn(gen_rtx_SET(scratch1,
12596			   gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
12597     emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
12598     emit_move_insn (op0_low, op1_low);
12599     emit_move_insn (op0_high, scratch2);
12600
12601     DONE;
12602  }"
12603)
12604
12605;; movmisalign patterns for HImode and SImode.
12606(define_expand "movmisalign<mode>"
12607  [(match_operand:HSI 0 "general_operand")
12608   (match_operand:HSI 1 "general_operand")]
12609  "unaligned_access"
12610{
12611  /* This pattern is not permitted to fail during expansion: if both arguments
12612     are non-registers (e.g. memory := constant), force operand 1 into a
12613     register.  */
12614  rtx (* gen_unaligned_load)(rtx, rtx);
12615  rtx tmp_dest = operands[0];
12616  if (!s_register_operand (operands[0], <MODE>mode)
12617      && !s_register_operand (operands[1], <MODE>mode))
12618    operands[1] = force_reg (<MODE>mode, operands[1]);
12619
12620  if (<MODE>mode == HImode)
12621   {
12622    gen_unaligned_load = gen_unaligned_loadhiu;
12623    tmp_dest = gen_reg_rtx (SImode);
12624   }
12625  else
12626    gen_unaligned_load = gen_unaligned_loadsi;
12627
12628  if (MEM_P (operands[1]))
12629   {
12630    emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
12631    if (<MODE>mode == HImode)
12632      emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
12633   }
12634  else
12635    emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
12636
12637  DONE;
12638})
12639
12640(define_insn "arm_<cdp>"
12641  [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12642		     (match_operand:SI 1 "immediate_operand" "n")
12643		     (match_operand:SI 2 "immediate_operand" "n")
12644		     (match_operand:SI 3 "immediate_operand" "n")
12645		     (match_operand:SI 4 "immediate_operand" "n")
12646		     (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
12647  "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
12648{
12649  arm_const_bounds (operands[0], 0, 16);
12650  arm_const_bounds (operands[1], 0, 16);
12651  arm_const_bounds (operands[2], 0, (1 << 5));
12652  arm_const_bounds (operands[3], 0, (1 << 5));
12653  arm_const_bounds (operands[4], 0, (1 << 5));
12654  arm_const_bounds (operands[5], 0, 8);
12655  return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
12656}
12657  [(set_attr "length" "4")
12658   (set_attr "type" "coproc")])
12659
12660(define_insn "*ldc"
12661  [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12662		     (match_operand:SI 1 "immediate_operand" "n")
12663		     (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12664  "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12665{
12666  arm_const_bounds (operands[0], 0, 16);
12667  arm_const_bounds (operands[1], 0, (1 << 5));
12668  return "<ldc>\\tp%c0, CR%c1, %2";
12669}
12670  [(set_attr "length" "4")
12671   (set_attr "type" "coproc")])
12672
12673(define_insn "*stc"
12674  [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12675		     (match_operand:SI 1 "immediate_operand" "n")
12676		     (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12677  "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12678{
12679  arm_const_bounds (operands[0], 0, 16);
12680  arm_const_bounds (operands[1], 0, (1 << 5));
12681  return "<stc>\\tp%c0, CR%c1, %2";
12682}
12683  [(set_attr "length" "4")
12684   (set_attr "type" "coproc")])
12685
12686(define_expand "arm_<ldc>"
12687  [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12688		     (match_operand:SI 1 "immediate_operand")
12689		     (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12690  "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12691
12692(define_expand "arm_<stc>"
12693  [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12694		     (match_operand:SI 1 "immediate_operand")
12695		     (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12696  "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12697
12698(define_insn "arm_<mcr>"
12699  [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12700		     (match_operand:SI 1 "immediate_operand" "n")
12701		     (match_operand:SI 2 "s_register_operand" "r")
12702		     (match_operand:SI 3 "immediate_operand" "n")
12703		     (match_operand:SI 4 "immediate_operand" "n")
12704		     (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12705   (use (match_dup 2))]
12706  "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12707{
12708  arm_const_bounds (operands[0], 0, 16);
12709  arm_const_bounds (operands[1], 0, 8);
12710  arm_const_bounds (operands[3], 0, (1 << 5));
12711  arm_const_bounds (operands[4], 0, (1 << 5));
12712  arm_const_bounds (operands[5], 0, 8);
12713  return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12714}
12715  [(set_attr "length" "4")
12716   (set_attr "type" "coproc")])
12717
12718(define_insn "arm_<mrc>"
12719  [(set (match_operand:SI 0 "s_register_operand" "=r")
12720	(unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12721			  (match_operand:SI 2 "immediate_operand" "n")
12722			  (match_operand:SI 3 "immediate_operand" "n")
12723			  (match_operand:SI 4 "immediate_operand" "n")
12724			  (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12725  "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12726{
12727  arm_const_bounds (operands[1], 0, 16);
12728  arm_const_bounds (operands[2], 0, 8);
12729  arm_const_bounds (operands[3], 0, (1 << 5));
12730  arm_const_bounds (operands[4], 0, (1 << 5));
12731  arm_const_bounds (operands[5], 0, 8);
12732  return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12733}
12734  [(set_attr "length" "4")
12735   (set_attr "type" "coproc")])
12736
12737(define_insn "arm_<mcrr>"
12738  [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12739		     (match_operand:SI 1 "immediate_operand" "n")
12740		     (match_operand:DI 2 "s_register_operand" "r")
12741		     (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12742   (use (match_dup 2))]
12743  "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12744{
12745  arm_const_bounds (operands[0], 0, 16);
12746  arm_const_bounds (operands[1], 0, 8);
12747  arm_const_bounds (operands[3], 0, (1 << 5));
12748  return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12749}
12750  [(set_attr "length" "4")
12751   (set_attr "type" "coproc")])
12752
12753(define_insn "arm_<mrrc>"
12754  [(set (match_operand:DI 0 "s_register_operand" "=r")
12755	(unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12756			  (match_operand:SI 2 "immediate_operand" "n")
12757			  (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12758  "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12759{
12760  arm_const_bounds (operands[1], 0, 16);
12761  arm_const_bounds (operands[2], 0, 8);
12762  arm_const_bounds (operands[3], 0, (1 << 5));
12763  return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12764}
12765  [(set_attr "length" "4")
12766   (set_attr "type" "coproc")])
12767
12768(define_expand "speculation_barrier"
12769  [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12770  "TARGET_EITHER"
12771  "
12772  /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12773     have a usable barrier (and probably don't need one in practice).
12774     But to be safe if such code is run on later architectures, call a
12775     helper function in libgcc that will do the thing for the active
12776     system.  */
12777  if (!(arm_arch7 || arm_arch8))
12778    {
12779      arm_emit_speculation_barrier_function ();
12780      DONE;
12781    }
12782  "
12783)
12784
12785;; Generate a hard speculation barrier when we have not enabled speculation
12786;; tracking.
12787(define_insn "*speculation_barrier_insn"
12788  [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12789  "arm_arch7 || arm_arch8"
12790  "isb\;dsb\\tsy"
12791  [(set_attr "type" "block")
12792   (set_attr "length" "8")]
12793)
12794
12795;; Vector bits common to IWMMXT, Neon and MVE
12796(include "vec-common.md")
12797;; Load the Intel Wireless Multimedia Extension patterns
12798(include "iwmmxt.md")
12799;; Load the VFP co-processor patterns
12800(include "vfp.md")
12801;; Thumb-1 patterns
12802(include "thumb1.md")
12803;; Thumb-2 patterns
12804(include "thumb2.md")
12805;; Neon patterns
12806(include "neon.md")
12807;; Crypto patterns
12808(include "crypto.md")
12809;; Synchronization Primitives
12810(include "sync.md")
12811;; Fixed-point patterns
12812(include "arm-fixed.md")
12813;; M-profile Vector Extension
12814(include "mve.md")
12815