xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/vax/vax.md (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1;; Machine description for GNU compiler, VAX Version
2;; Copyright (C) 1987, 1988, 1991, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
3;; 2002, 2004, 2005, 2007, 2009 Free Software Foundation, Inc.
4
5;; This file is part of GCC.
6
7;; GCC is free software; you can redistribute it and/or modify
8;; it under the terms of the GNU General Public License as published by
9;; the Free Software Foundation; either version 3, or (at your option)
10;; any later version.
11
12;; GCC is distributed in the hope that it will be useful,
13;; but WITHOUT ANY WARRANTY; without even the implied warranty of
14;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15;; GNU General Public License for more details.
16
17;; You should have received a copy of the GNU General Public License
18;; along with GCC; see the file COPYING3.  If not see
19;; <http://www.gnu.org/licenses/>.
20
21
22;;- Instruction patterns.  When multiple patterns apply,
23;;- the first one in the file is chosen.
24;;-
25;;- See file "rtl.def" for documentation on define_insn, match_*, et al.
26;;-
27;;- cpp macro #define NOTICE_UPDATE_CC in file tm.h handles condition code
28;;- updates for most instructions.
29
30;; UNSPEC_VOLATILE usage:
31
32(define_constants
33  [(VUNSPEC_BLOCKAGE 0)	    ; `blockage' insn to prevent scheduling across an
34			    ; insn in the code.
35   (VUNSPEC_SYNC_ISTREAM 1) ; sequence of insns to sync the I-stream
36   (VAX_AP_REGNUM 12)	    ; Register 12 contains the argument pointer
37   (VAX_FP_REGNUM 13)	    ; Register 13 contains the frame pointer
38   (VAX_SP_REGNUM 14)	    ; Register 14 contains the stack pointer
39   (VAX_PC_REGNUM 15)	    ; Register 15 contains the program counter
40  ]
41)
42
43;; Integer modes supported on VAX, with a mapping from machine mode
44;; to mnemonic suffix.  DImode is always a special case.
45(define_mode_iterator VAXint [QI HI SI])
46(define_mode_iterator VAXintQH [QI HI])
47(define_mode_iterator VAXintQHSD [QI HI SI DI])
48(define_mode_attr  isfx [(QI "b") (HI "w") (SI "l") (DI "q")])
49
50;; Similar for float modes supported on VAX.
51(define_mode_iterator VAXfp [SF DF])
52(define_mode_attr  fsfx [(SF "f") (DF "%#")])
53
54;; Some output patterns want integer immediates with a prefix...
55(define_mode_attr  iprefx [(QI "B") (HI "H") (SI "N")])
56
57;;
58(include "constraints.md")
59(include "predicates.md")
60
61(define_insn "*cmp<mode>"
62  [(set (cc0)
63	(compare (match_operand:VAXint 0 "nonimmediate_operand" "nrmT,nrmT")
64		 (match_operand:VAXint 1 "general_operand" "I,nrmT")))]
65  ""
66  "@
67   tst<VAXint:isfx> %0
68   cmp<VAXint:isfx> %0,%1")
69
70(define_insn "*cmp<mode>"
71  [(set (cc0)
72	(compare (match_operand:VAXfp 0 "general_operand" "gF,gF")
73		 (match_operand:VAXfp 1 "general_operand" "G,gF")))]
74  ""
75  "@
76   tst<VAXfp:fsfx> %0
77   cmp<VAXfp:fsfx> %0,%1")
78
79(define_insn "*bit<mode>"
80  [(set (cc0)
81	(compare (and:VAXint (match_operand:VAXint 0 "general_operand" "nrmT")
82			     (match_operand:VAXint 1 "general_operand" "nrmT"))
83		 (const_int 0)))]
84  ""
85  "bit<VAXint:isfx> %0,%1")
86
87;; The VAX has no sCOND insns.  It does have add/subtract with carry
88;; which could be used to implement the sltu and sgeu patterns.  However,
89;; to do this properly requires a complete rewrite of the compare insns
90;; to keep them together with the sltu/sgeu insns until after the
91;; reload pass is complete.  The previous implementation didn't do this
92;; and has been deleted.
93
94
95(define_insn "mov<mode>"
96  [(set (match_operand:VAXfp 0 "nonimmediate_operand" "=g,g")
97	(match_operand:VAXfp 1 "general_operand" "G,gF"))]
98  ""
99  "@
100   clr<VAXfp:fsfx> %0
101   mov<VAXfp:fsfx> %1,%0")
102
103;; Some VAXen don't support this instruction.
104;;(define_insn "movti"
105;;  [(set (match_operand:TI 0 "general_operand" "=g")
106;;	(match_operand:TI 1 "general_operand" "g"))]
107;;  ""
108;;  "movh %1,%0")
109
110(define_insn "movdi"
111  [(set (match_operand:DI 0 "nonimmediate_operand" "=g")
112	(match_operand:DI 1 "general_operand" "g"))]
113  ""
114  "* return vax_output_int_move (insn, operands, DImode);")
115
116;; The VAX move instructions have space-time tradeoffs.  On a MicroVAX
117;; register-register mov instructions take 3 bytes and 2 CPU cycles.  clrl
118;; takes 2 bytes and 3 cycles.  mov from constant to register takes 2 cycles
119;; if the constant is smaller than 4 bytes, 3 cycles for a longword
120;; constant.  movz, mneg, and mcom are as fast as mov, so movzwl is faster
121;; than movl for positive constants that fit in 16 bits but not 6 bits.  cvt
122;; instructions take 4 cycles.  inc takes 3 cycles.  The machine description
123;; is willing to trade 1 byte for 1 cycle (clrl instead of movl $0; cvtwl
124;; instead of movl).
125
126;; Cycle counts for other models may vary (on a VAX 750 they are similar,
127;; but on a VAX 9000 most move and add instructions with one constant
128;; operand take 1 cycle).
129
130;;  Loads of constants between 64 and 128 used to be done with
131;; "addl3 $63,#,dst" but this is slower than movzbl and takes as much space.
132
133(define_expand "movsi"
134  [(set (match_operand:SI 0 "nonimmediate_operand" "")
135	(match_operand:SI 1 "general_operand" ""))]
136  ""
137  "
138{
139#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
140  if (flag_pic
141      && GET_CODE (operands[1]) == CONST
142      && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
143      && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (operands[1], 0), 0)))
144    {
145      rtx symbol_ref = XEXP (XEXP (operands[1], 0), 0);
146      rtx const_int = XEXP (XEXP (operands[1], 0), 1);
147      rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
148      emit_move_insn (temp, symbol_ref);
149      emit_move_insn (operands[0], gen_rtx_PLUS (SImode, temp, const_int));
150      DONE;
151    }
152#endif
153}")
154
155(define_insn "movsi_2"
156  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
157	(match_operand:SI 1 "nonsymbolic_operand" "nrmT"))]
158  ""
159  "* return vax_output_int_move (insn, operands, SImode);")
160
161(define_insn "mov<mode>"
162  [(set (match_operand:VAXintQH 0 "nonimmediate_operand" "=g")
163	(match_operand:VAXintQH 1 "general_operand" "g"))]
164  ""
165  "* return vax_output_int_move (insn, operands, <MODE>mode);")
166
167(define_insn "movstricthi"
168  [(set (strict_low_part (match_operand:HI 0 "register_operand" "+g"))
169	(match_operand:HI 1 "general_operand" "g"))]
170  ""
171  "*
172{
173  if (CONST_INT_P (operands[1]))
174    {
175      int i = INTVAL (operands[1]);
176      if (i == 0)
177	return \"clrw %0\";
178      else if ((unsigned int)i < 64)
179	return \"movw %1,%0\";
180      else if ((unsigned int)~i < 64)
181	return \"mcomw %H1,%0\";
182      else if ((unsigned int)i < 256)
183	return \"movzbw %1,%0\";
184    }
185  return \"movw %1,%0\";
186}")
187
188(define_insn "movstrictqi"
189  [(set (strict_low_part (match_operand:QI 0 "register_operand" "+g"))
190	(match_operand:QI 1 "general_operand" "g"))]
191  ""
192  "*
193{
194  if (CONST_INT_P (operands[1]))
195    {
196      int i = INTVAL (operands[1]);
197      if (i == 0)
198	return \"clrb %0\";
199      else if ((unsigned int)~i < 64)
200	return \"mcomb %B1,%0\";
201    }
202  return \"movb %1,%0\";
203}")
204
205;; This is here to accept 4 arguments and pass the first 3 along
206;; to the movmemhi1 pattern that really does the work.
207(define_expand "movmemhi"
208  [(set (match_operand:BLK 0 "general_operand" "=g")
209	(match_operand:BLK 1 "general_operand" "g"))
210   (use (match_operand:HI 2 "general_operand" "g"))
211   (match_operand 3 "" "")]
212  ""
213  "
214{
215  if (CONST_INT_P (operands[2]) && INTVAL (operands[2]) <= 48)
216    {
217      emit_insn (gen_movmemsi1_2 (operands[0], operands[1], operands[2]));
218      DONE;
219    }
220  emit_insn (gen_movmemhi1 (operands[0], operands[1], operands[2]));
221  DONE;
222}")
223
224;; The definition of this insn does not really explain what it does,
225;; but it should suffice
226;; that anything generated as this insn will be recognized as one
227;; and that it won't successfully combine with anything.
228
229(define_insn "movmemsi1_2"
230  [(set (match_operand:BLK 0 "memory_operand" "=B")
231	(match_operand:BLK 1 "memory_operand" "B"))
232   (use (match_operand:SI 2 "const_int_operand" "g"))]
233  "INTVAL (operands[2]) <= 48"
234  "* return vax_output_movmemsi (insn, operands);")
235
236(define_insn "movmemhi1"
237  [(set (match_operand:BLK 0 "memory_operand" "=o")
238	(match_operand:BLK 1 "memory_operand" "o"))
239   (use (match_operand:HI 2 "general_operand" "g"))
240   (clobber (reg:SI 0))
241   (clobber (reg:SI 1))
242   (clobber (reg:SI 2))
243   (clobber (reg:SI 3))
244   (clobber (reg:SI 4))
245   (clobber (reg:SI 5))]
246  ""
247  "movc3 %2,%1,%0")
248
249;; Extension and truncation insns.
250
251(define_insn "truncsiqi2"
252  [(set (match_operand:QI 0 "nonimmediate_operand" "=g")
253	(truncate:QI (match_operand:SI 1 "nonimmediate_operand" "nrmT")))]
254  ""
255  "cvtlb %1,%0")
256
257(define_insn "truncsihi2"
258  [(set (match_operand:HI 0 "nonimmediate_operand" "=g")
259	(truncate:HI (match_operand:SI 1 "nonimmediate_operand" "nrmT")))]
260  ""
261  "cvtlw %1,%0")
262
263(define_insn "trunchiqi2"
264  [(set (match_operand:QI 0 "nonimmediate_operand" "=g")
265	(truncate:QI (match_operand:HI 1 "nonimmediate_operand" "g")))]
266  ""
267  "cvtwb %1,%0")
268
269(define_insn "extendhisi2"
270  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
271	(sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "g")))]
272  ""
273  "cvtwl %1,%0")
274
275(define_insn "extendqihi2"
276  [(set (match_operand:HI 0 "nonimmediate_operand" "=g")
277	(sign_extend:HI (match_operand:QI 1 "nonimmediate_operand" "g")))]
278  ""
279  "cvtbw %1,%0")
280
281(define_insn "extendqisi2"
282  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
283	(sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "g")))]
284  ""
285  "cvtbl %1,%0")
286
287(define_insn "extendsfdf2"
288  [(set (match_operand:DF 0 "nonimmediate_operand" "=g")
289	(float_extend:DF (match_operand:SF 1 "general_operand" "gF")))]
290  ""
291  "cvtf%# %1,%0")
292
293(define_insn "truncdfsf2"
294  [(set (match_operand:SF 0 "nonimmediate_operand" "=g")
295	(float_truncate:SF (match_operand:DF 1 "general_operand" "gF")))]
296  ""
297  "cvt%#f %1,%0")
298
299(define_insn "zero_extendhisi2"
300  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
301	(zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "g")))]
302  ""
303  "movzwl %1,%0")
304
305(define_insn "zero_extendqihi2"
306  [(set (match_operand:HI 0 "nonimmediate_operand" "=g")
307	(zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "g")))]
308  ""
309  "movzbw %1,%0")
310
311(define_insn "zero_extendqisi2"
312  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
313	(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "g")))]
314  ""
315  "movzbl %1,%0")
316
317;; Fix-to-float conversion insns.
318
319(define_insn "float<VAXint:mode><VAXfp:mode>2"
320  [(set (match_operand:VAXfp 0 "nonimmediate_operand" "=g")
321	(float:VAXfp (match_operand:VAXint 1 "nonimmediate_operand" "g")))]
322  ""
323  "cvt<VAXint:isfx><VAXfp:fsfx> %1,%0")
324
325;; Float-to-fix conversion insns.
326
327(define_insn "fix_trunc<VAXfp:mode><VAXint:mode>2"
328  [(set (match_operand:VAXint 0 "nonimmediate_operand" "=g")
329	(fix:VAXint (fix:VAXfp (match_operand:VAXfp 1 "general_operand" "gF"))))]
330  ""
331  "cvt<VAXfp:fsfx><VAXint:isfx> %1,%0")
332
333;;- All kinds of add instructions.
334
335(define_insn "add<mode>3"
336  [(set (match_operand:VAXfp 0 "nonimmediate_operand" "=g,g,g")
337	(plus:VAXfp (match_operand:VAXfp 1 "general_operand" "0,gF,gF")
338		    (match_operand:VAXfp 2 "general_operand" "gF,0,gF")))]
339  ""
340  "@
341   add<VAXfp:fsfx>2 %2,%0
342   add<VAXfp:fsfx>2 %1,%0
343   add<VAXfp:fsfx>3 %1,%2,%0")
344
345(define_insn "pushlclsymreg"
346  [(set (match_operand:SI 0 "push_operand" "=g")
347	(plus:SI (match_operand:SI 1 "register_operand" "%r")
348		 (match_operand:SI 2 "local_symbolic_operand" "i")))]
349  "flag_pic"
350  "pushab %a2[%1]")
351
352(define_insn "pushextsymreg"
353  [(set (match_operand:SI 0 "push_operand" "=g")
354	(plus:SI (match_operand:SI 1 "register_operand" "%r")
355		 (match_operand:SI 2 "external_symbolic_operand" "i")))]
356  "flag_pic"
357  "pushab %a2[%1]")
358
359(define_insn "movlclsymreg"
360  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
361	(plus:SI (match_operand:SI 1 "register_operand" "%r")
362		 (match_operand:SI 2 "local_symbolic_operand" "i")))]
363  "flag_pic"
364  "movab %a2[%1],%0")
365
366(define_insn "movextsymreg"
367  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
368	(plus:SI (match_operand:SI 1 "register_operand" "%r")
369		 (match_operand:SI 2 "external_symbolic_operand" "i")))]
370  "flag_pic"
371  "movab %a2[%1],%0")
372
373(define_insn "add<mode>3"
374  [(set (match_operand:VAXint 0 "nonimmediate_operand" "=g")
375	(plus:VAXint (match_operand:VAXint 1 "general_operand" "nrmT")
376		    (match_operand:VAXint 2 "general_operand" "nrmT")))]
377  ""
378  "* return vax_output_int_add (insn, operands, <MODE>mode);")
379
380(define_expand "adddi3"
381  [(set (match_operand:DI 0 "nonimmediate_operand" "=g")
382	(plus:DI (match_operand:DI 1 "general_operand" "g")
383		 (match_operand:DI 2 "general_operand" "g")))]
384  "!reload_in_progress"
385  "vax_expand_addsub_di_operands (operands, PLUS); DONE;")
386
387(define_insn "adcdi3"
388  [(set (match_operand:DI 0 "nonimmediate_addsub_di_operand" "=Rr")
389	(plus:DI (match_operand:DI 1 "general_addsub_di_operand" "%0")
390		 (match_operand:DI 2 "general_addsub_di_operand" "nRr")))]
391  "TARGET_QMATH"
392  "* return vax_output_int_add (insn, operands, DImode);")
393
394;; The add-with-carry (adwc) instruction only accepts two operands.
395(define_insn "adddi3_old"
396  [(set (match_operand:DI 0 "nonimmediate_operand" "=ro>,ro>")
397	(plus:DI (match_operand:DI 1 "general_operand" "%0,ro>")
398		 (match_operand:DI 2 "general_operand" "Fsro,Fs")))]
399  "!TARGET_QMATH"
400  "* return vax_output_int_add (insn, operands, DImode);")
401
402;;- All kinds of subtract instructions.
403
404(define_insn "sub<mode>3"
405  [(set (match_operand:VAXfp 0 "nonimmediate_operand" "=g,g")
406	(minus:VAXfp (match_operand:VAXfp 1 "general_operand" "0,gF")
407		     (match_operand:VAXfp 2 "general_operand" "gF,gF")))]
408  ""
409  "@
410   sub<VAXfp:fsfx>2 %2,%0
411   sub<VAXfp:fsfx>3 %2,%1,%0")
412
413(define_insn "sub<mode>3"
414  [(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g")
415	(minus:VAXint (match_operand:VAXint 1 "general_operand" "0,nrmT")
416		     (match_operand:VAXint 2 "general_operand" "nrmT,nrmT")))]
417  ""
418  "@
419   sub<VAXint:isfx>2 %2,%0
420   sub<VAXint:isfx>3 %2,%1,%0")
421
422(define_expand "subdi3"
423  [(set (match_operand:DI 0 "nonimmediate_operand" "=g")
424	(minus:DI (match_operand:DI 1 "general_operand" "g")
425		  (match_operand:DI 2 "general_operand" "g")))]
426  "!reload_in_progress"
427  "vax_expand_addsub_di_operands (operands, MINUS); DONE;")
428
429(define_insn "sbcdi3"
430  [(set (match_operand:DI 0 "nonimmediate_addsub_di_operand" "=Rr,=Rr")
431	(minus:DI (match_operand:DI 1 "general_addsub_di_operand" "0,I")
432		  (match_operand:DI 2 "general_addsub_di_operand" "nRr,Rr")))]
433  "TARGET_QMATH"
434  "* return vax_output_int_subtract (insn, operands, DImode);")
435
436;; The subtract-with-carry (sbwc) instruction only takes two operands.
437(define_insn "subdi3_old"
438  [(set (match_operand:DI 0 "nonimmediate_operand" "=or>,or>")
439	(minus:DI (match_operand:DI 1 "general_operand" "0,or>")
440		  (match_operand:DI 2 "general_operand" "Fsor,Fs")))]
441  "!TARGET_QMATH"
442  "* return vax_output_int_subtract (insn, operands, DImode);")
443
444;;- Multiply instructions.
445
446(define_insn "mul<mode>3"
447  [(set (match_operand:VAXfp 0 "nonimmediate_operand" "=g,g,g")
448	(mult:VAXfp (match_operand:VAXfp 1 "general_operand" "0,gF,gF")
449		    (match_operand:VAXfp 2 "general_operand" "gF,0,gF")))]
450  ""
451  "@
452   mul<VAXfp:fsfx>2 %2,%0
453   mul<VAXfp:fsfx>2 %1,%0
454   mul<VAXfp:fsfx>3 %1,%2,%0")
455
456(define_insn "mul<mode>3"
457  [(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g,g")
458	(mult:VAXint (match_operand:VAXint 1 "general_operand" "0,nrmT,nrmT")
459		    (match_operand:VAXint 2 "general_operand" "nrmT,0,nrmT")))]
460  ""
461  "@
462   mul<VAXint:isfx>2 %2,%0
463   mul<VAXint:isfx>2 %1,%0
464   mul<VAXint:isfx>3 %1,%2,%0")
465
466(define_insn "mulsidi3"
467  [(set (match_operand:DI 0 "nonimmediate_operand" "=g")
468	(mult:DI (sign_extend:DI
469		  (match_operand:SI 1 "nonimmediate_operand" "nrmT"))
470		 (sign_extend:DI
471		  (match_operand:SI 2 "nonimmediate_operand" "nrmT"))))]
472  ""
473  "emul %1,%2,$0,%0")
474
475(define_insn ""
476  [(set (match_operand:DI 0 "nonimmediate_operand" "=g")
477	(plus:DI
478	 (mult:DI (sign_extend:DI
479		   (match_operand:SI 1 "nonimmediate_operand" "nrmT"))
480		  (sign_extend:DI
481		   (match_operand:SI 2 "nonimmediate_operand" "nrmT")))
482	 (sign_extend:DI (match_operand:SI 3 "nonimmediate_operand" "g"))))]
483  ""
484  "emul %1,%2,%3,%0")
485
486;; 'F' constraint means type CONST_DOUBLE
487(define_insn ""
488  [(set (match_operand:DI 0 "nonimmediate_operand" "=g")
489	(plus:DI
490	 (mult:DI (sign_extend:DI
491		   (match_operand:SI 1 "nonimmediate_operand" "nrmT"))
492		  (sign_extend:DI
493		   (match_operand:SI 2 "nonimmediate_operand" "nrmT")))
494	 (match_operand:DI 3 "immediate_operand" "F")))]
495  "GET_CODE (operands[3]) == CONST_DOUBLE
496    && CONST_DOUBLE_HIGH (operands[3]) == (CONST_DOUBLE_LOW (operands[3]) >> 31)"
497  "*
498{
499  if (CONST_DOUBLE_HIGH (operands[3]))
500    operands[3] = GEN_INT (CONST_DOUBLE_LOW (operands[3]));
501  return \"emul %1,%2,%3,%0\";
502}")
503
504;;- Divide instructions.
505
506(define_insn "div<mode>3"
507  [(set (match_operand:VAXfp 0 "nonimmediate_operand" "=g,g")
508	(div:VAXfp (match_operand:VAXfp 1 "general_operand" "0,gF")
509		   (match_operand:VAXfp 2 "general_operand" "gF,gF")))]
510  ""
511  "@
512   div<VAXfp:fsfx>2 %2,%0
513   div<VAXfp:fsfx>3 %2,%1,%0")
514
515(define_insn "div<mode>3"
516  [(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g")
517	(div:VAXint (match_operand:VAXint 1 "general_operand" "0,nrmT")
518		   (match_operand:VAXint 2 "general_operand" "nrmT,nrmT")))]
519  ""
520  "@
521   div<VAXint:isfx>2 %2,%0
522   div<VAXint:isfx>3 %2,%1,%0")
523
524;This is left out because it is very slow;
525;we are better off programming around the "lack" of this insn.
526;(define_insn "divmoddisi4"
527;  [(set (match_operand:SI 0 "general_operand" "=g")
528;	(div:SI (match_operand:DI 1 "general_operand" "g")
529;		(match_operand:SI 2 "general_operand" "g")))
530;   (set (match_operand:SI 3 "general_operand" "=g")
531;	(mod:SI (match_operand:DI 1 "general_operand" "g")
532;		(match_operand:SI 2 "general_operand" "g")))]
533;  ""
534;  "ediv %2,%1,%0,%3")
535
536;; Bit-and on the VAX is done with a clear-bits insn.
537(define_expand "and<mode>3"
538  [(set (match_operand:VAXint 0 "nonimmediate_operand" "")
539	(and:VAXint (not:VAXint (match_operand:VAXint 1 "general_operand" ""))
540		   (match_operand:VAXint 2 "general_operand" "")))]
541  ""
542  "
543{
544  rtx op1 = operands[1];
545
546  /* If there is a constant argument, complement that one.  */
547  if (CONST_INT_P (operands[2]) && ! CONST_INT_P (op1))
548    {
549      operands[1] = operands[2];
550      operands[2] = op1;
551      op1 = operands[1];
552    }
553
554  if (CONST_INT_P (op1))
555    operands[1] = GEN_INT (~INTVAL (op1));
556  else
557    operands[1] = expand_unop (<MODE>mode, one_cmpl_optab, op1, 0, 1);
558}")
559
560(define_insn "*and<mode>"
561  [(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g")
562	(and:VAXint (not:VAXint (match_operand:VAXint 1 "general_operand" "nrmT,nrmT"))
563		    (match_operand:VAXint 2 "general_operand" "0,nrmT")))]
564  ""
565  "@
566   bic<VAXint:isfx>2 %1,%0
567   bic<VAXint:isfx>3 %1,%2,%0")
568
569;; The following used to be needed because constant propagation can
570;; create them starting from the bic insn patterns above.  This is no
571;; longer a problem.  However, having these patterns allows optimization
572;; opportunities in combine.c.
573
574(define_insn "*and<mode>_const_int"
575  [(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g")
576	(and:VAXint (match_operand:VAXint 1 "general_operand" "0,nrmT")
577		   (match_operand:VAXint 2 "const_int_operand" "n,n")))]
578  ""
579  "@
580   bic<VAXint:isfx>2 %<VAXint:iprefx>2,%0
581   bic<VAXint:isfx>3 %<VAXint:iprefx>2,%1,%0")
582
583
584;;- Bit set instructions.
585
586(define_insn "ior<mode>3"
587  [(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g,g")
588	(ior:VAXint (match_operand:VAXint 1 "general_operand" "0,nrmT,nrmT")
589		   (match_operand:VAXint 2 "general_operand" "nrmT,0,nrmT")))]
590  ""
591  "@
592   bis<VAXint:isfx>2 %2,%0
593   bis<VAXint:isfx>2 %1,%0
594   bis<VAXint:isfx>3 %2,%1,%0")
595
596;;- xor instructions.
597
598(define_insn "xor<mode>3"
599  [(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g,g")
600	(xor:VAXint (match_operand:VAXint 1 "general_operand" "0,nrmT,nrmT")
601		   (match_operand:VAXint 2 "general_operand" "nrmT,0,nrmT")))]
602  ""
603  "@
604   xor<VAXint:isfx>2 %2,%0
605   xor<VAXint:isfx>2 %1,%0
606   xor<VAXint:isfx>3 %2,%1,%0")
607
608
609(define_insn "neg<mode>2"
610  [(set (match_operand:VAXfp 0 "nonimmediate_operand" "=g")
611	(neg:VAXfp (match_operand:VAXfp 1 "general_operand" "gF")))]
612  ""
613  "mneg<VAXfp:fsfx> %1,%0")
614
615(define_insn "neg<mode>2"
616  [(set (match_operand:VAXint 0 "nonimmediate_operand" "=g")
617	(neg:VAXint (match_operand:VAXint 1 "general_operand" "nrmT")))]
618  ""
619  "mneg<VAXint:isfx> %1,%0")
620
621(define_insn "one_cmpl<mode>2"
622  [(set (match_operand:VAXint 0 "nonimmediate_operand" "=g")
623	(not:VAXint (match_operand:VAXint 1 "general_operand" "nrmT")))]
624  ""
625  "mcom<VAXint:isfx> %1,%0")
626
627
628;; Arithmetic right shift on the VAX works by negating the shift count,
629;; then emitting a right shift with the shift count negated.  This means
630;; that all actual shift counts in the RTL will be positive.  This
631;; prevents converting shifts to ZERO_EXTRACTs with negative positions,
632;; which isn't valid.
633(define_expand "ashrsi3"
634  [(set (match_operand:SI 0 "general_operand" "=g")
635	(ashiftrt:SI (match_operand:SI 1 "general_operand" "g")
636		   (match_operand:QI 2 "general_operand" "g")))]
637  ""
638  "
639{
640  if (! CONST_INT_P (operands[2]))
641    operands[2] = gen_rtx_NEG (QImode, negate_rtx (QImode, operands[2]));
642}")
643
644(define_insn ""
645  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
646	(ashiftrt:SI (match_operand:SI 1 "general_operand" "nrmT")
647		     (match_operand:QI 2 "const_int_operand" "n")))]
648  ""
649  "ashl $%n2,%1,%0")
650
651(define_insn ""
652  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
653	(ashiftrt:SI (match_operand:SI 1 "general_operand" "nrmT")
654		     (neg:QI (match_operand:QI 2 "general_operand" "g"))))]
655  ""
656  "ashl %2,%1,%0")
657
658(define_insn "ashlsi3"
659  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
660	(ashift:SI (match_operand:SI 1 "general_operand" "nrmT")
661		   (match_operand:QI 2 "general_operand" "g")))]
662  ""
663  "*
664{
665  if (operands[2] == const1_rtx && rtx_equal_p (operands[0], operands[1]))
666    return \"addl2 %0,%0\";
667  if (REG_P (operands[1]) && CONST_INT_P (operands[2]))
668    {
669      int i = INTVAL (operands[2]);
670      if (i == 1)
671	return \"addl3 %1,%1,%0\";
672      if (i == 2 && !optimize_size)
673	{
674	  if (push_operand (operands[0], SImode))
675	    return \"pushal 0[%1]\";
676	  return \"moval 0[%1],%0\";
677	}
678      if (i == 3 && !optimize_size)
679	{
680	  if (push_operand (operands[0], SImode))
681	    return \"pushaq 0[%1]\";
682	  return \"movaq 0[%1],%0\";
683	}
684    }
685  return \"ashl %2,%1,%0\";
686}")
687
688;; Arithmetic right shift on the VAX works by negating the shift count.
689(define_expand "ashrdi3"
690  [(set (match_operand:DI 0 "general_operand" "=g")
691	(ashiftrt:DI (match_operand:DI 1 "general_operand" "g")
692		     (match_operand:QI 2 "general_operand" "g")))]
693  ""
694  "
695{
696  operands[2] = gen_rtx_NEG (QImode, negate_rtx (QImode, operands[2]));
697}")
698
699(define_insn "ashldi3"
700  [(set (match_operand:DI 0 "nonimmediate_operand" "=g")
701	(ashift:DI (match_operand:DI 1 "general_operand" "g")
702		   (match_operand:QI 2 "general_operand" "g")))]
703  ""
704  "ashq %2,%D1,%0")
705
706(define_insn ""
707  [(set (match_operand:DI 0 "nonimmediate_operand" "=g")
708	(ashiftrt:DI (match_operand:DI 1 "general_operand" "g")
709		     (neg:QI (match_operand:QI 2 "general_operand" "g"))))]
710  ""
711  "ashq %2,%D1,%0")
712
713;; We used to have expand_shift handle logical right shifts by using extzv,
714;; but this make it very difficult to do lshrdi3.  Since the VAX is the
715;; only machine with this kludge, it's better to just do this with a
716;; define_expand and remove that case from expand_shift.
717
718(define_expand "lshrsi3"
719  [(set (match_dup 3)
720	(minus:QI (const_int 32)
721		  (match_dup 4)))
722   (set (match_operand:SI 0 "nonimmediate_operand" "=g")
723	(zero_extract:SI (match_operand:SI 1 "register_operand" "r")
724			 (match_dup 3)
725			 (match_operand:SI 2 "register_operand" "g")))]
726  ""
727  "
728{
729  operands[3] = gen_reg_rtx (QImode);
730  operands[4] = gen_lowpart (QImode, operands[2]);
731}")
732
733;; Rotate right on the VAX works by negating the shift count.
734(define_expand "rotrsi3"
735  [(set (match_operand:SI 0 "general_operand" "=g")
736	(rotatert:SI (match_operand:SI 1 "general_operand" "g")
737		     (match_operand:QI 2 "general_operand" "g")))]
738  ""
739  "
740{
741  if (! CONST_INT_P (operands[2]))
742    operands[2] = gen_rtx_NEG (QImode, negate_rtx (QImode, operands[2]));
743}")
744
745(define_insn "rotlsi3"
746  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
747	(rotate:SI (match_operand:SI 1 "general_operand" "nrmT")
748		   (match_operand:QI 2 "general_operand" "g")))]
749  ""
750  "rotl %2,%1,%0")
751
752(define_insn ""
753  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
754	(rotatert:SI (match_operand:SI 1 "general_operand" "nrmT")
755		     (match_operand:QI 2 "const_int_operand" "n")))]
756  ""
757  "rotl %R2,%1,%0")
758
759(define_insn ""
760  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
761	(rotatert:SI (match_operand:SI 1 "general_operand" "nrmT")
762		     (neg:QI (match_operand:QI 2 "general_operand" "g"))))]
763  ""
764  "rotl %2,%1,%0")
765
766;This insn is probably slower than a multiply and an add.
767;(define_insn ""
768;  [(set (match_operand:SI 0 "general_operand" "=g")
769;	(mult:SI (plus:SI (match_operand:SI 1 "general_operand" "g")
770;			  (match_operand:SI 2 "general_operand" "g"))
771;		 (match_operand:SI 3 "general_operand" "g")))]
772;  ""
773;  "index %1,$0x80000000,$0x7fffffff,%3,%2,%0")
774
775;; Special cases of bit-field insns which we should
776;; recognize in preference to the general case.
777;; These handle aligned 8-bit and 16-bit fields,
778;; which can usually be done with move instructions.
779
780(define_insn ""
781  [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+ro")
782			 (match_operand:QI 1 "const_int_operand" "n")
783			 (match_operand:SI 2 "const_int_operand" "n"))
784	(match_operand:SI 3 "general_operand" "g"))]
785   "(INTVAL (operands[1]) == 8 || INTVAL (operands[1]) == 16)
786   && INTVAL (operands[2]) % INTVAL (operands[1]) == 0
787   && (REG_P (operands[0])
788       || ! mode_dependent_address_p (XEXP (operands[0], 0)))"
789  "*
790{
791  if (REG_P (operands[0]))
792    {
793      if (INTVAL (operands[2]) != 0)
794	return \"insv %3,%2,%1,%0\";
795    }
796  else
797    operands[0]
798      = adjust_address (operands[0],
799			INTVAL (operands[1]) == 8 ? QImode : HImode,
800			INTVAL (operands[2]) / 8);
801
802  CC_STATUS_INIT;
803  if (INTVAL (operands[1]) == 8)
804    return \"movb %3,%0\";
805  return \"movw %3,%0\";
806}")
807
808(define_insn ""
809  [(set (match_operand:SI 0 "nonimmediate_operand" "=&g")
810	(zero_extract:SI (match_operand:SI 1 "register_operand" "ro")
811			 (match_operand:QI 2 "const_int_operand" "n")
812			 (match_operand:SI 3 "const_int_operand" "n")))]
813  "(INTVAL (operands[2]) == 8 || INTVAL (operands[2]) == 16)
814   && INTVAL (operands[3]) % INTVAL (operands[2]) == 0
815   && (REG_P (operands[1])
816       || ! mode_dependent_address_p (XEXP (operands[1], 0)))"
817  "*
818{
819  if (REG_P (operands[1]))
820    {
821      if (INTVAL (operands[3]) != 0)
822	return \"extzv %3,%2,%1,%0\";
823    }
824  else
825    operands[1]
826      = adjust_address (operands[1],
827			INTVAL (operands[2]) == 8 ? QImode : HImode,
828			INTVAL (operands[3]) / 8);
829
830  if (INTVAL (operands[2]) == 8)
831    return \"movzbl %1,%0\";
832  return \"movzwl %1,%0\";
833}")
834
835(define_insn ""
836  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
837	(sign_extract:SI (match_operand:SI 1 "register_operand" "ro")
838			 (match_operand:QI 2 "const_int_operand" "n")
839			 (match_operand:SI 3 "const_int_operand" "n")))]
840  "(INTVAL (operands[2]) == 8 || INTVAL (operands[2]) == 16)
841   && INTVAL (operands[3]) % INTVAL (operands[2]) == 0
842   && (REG_P (operands[1])
843       || ! mode_dependent_address_p (XEXP (operands[1], 0)))"
844  "*
845{
846  if (REG_P (operands[1]))
847    {
848      if (INTVAL (operands[3]) != 0)
849	return \"extv %3,%2,%1,%0\";
850    }
851  else
852    operands[1]
853      = adjust_address (operands[1],
854			INTVAL (operands[2]) == 8 ? QImode : HImode,
855			INTVAL (operands[3]) / 8);
856
857  if (INTVAL (operands[2]) == 8)
858    return \"cvtbl %1,%0\";
859  return \"cvtwl %1,%0\";
860}")
861
862;; Register-only SImode cases of bit-field insns.
863
864(define_insn ""
865  [(set (cc0)
866	(compare
867	 (sign_extract:SI (match_operand:SI 0 "register_operand" "r")
868			  (match_operand:QI 1 "general_operand" "g")
869			  (match_operand:SI 2 "general_operand" "nrmT"))
870	 (match_operand:SI 3 "general_operand" "nrmT")))]
871  ""
872  "cmpv %2,%1,%0,%3")
873
874(define_insn ""
875  [(set (cc0)
876	(compare
877	 (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
878			  (match_operand:QI 1 "general_operand" "g")
879			  (match_operand:SI 2 "general_operand" "nrmT"))
880	 (match_operand:SI 3 "general_operand" "nrmT")))]
881  ""
882  "cmpzv %2,%1,%0,%3")
883
884;; When the field position and size are constant and the destination
885;; is a register, extv and extzv are much slower than a rotate followed
886;; by a bicl or sign extension.  Because we might end up choosing ext[z]v
887;; anyway, we can't allow immediate values for the primary source operand.
888
889(define_insn ""
890  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
891	(sign_extract:SI (match_operand:SI 1 "register_operand" "ro")
892			 (match_operand:QI 2 "general_operand" "g")
893			 (match_operand:SI 3 "general_operand" "nrmT")))]
894  ""
895  "*
896{
897  if (! CONST_INT_P (operands[3]) || ! CONST_INT_P (operands[2])
898      || ! REG_P (operands[0])
899      || (INTVAL (operands[2]) != 8 && INTVAL (operands[2]) != 16))
900    return \"extv %3,%2,%1,%0\";
901  if (INTVAL (operands[2]) == 8)
902    return \"rotl %R3,%1,%0\;cvtbl %0,%0\";
903  return \"rotl %R3,%1,%0\;cvtwl %0,%0\";
904}")
905
906(define_insn ""
907  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
908	(zero_extract:SI (match_operand:SI 1 "register_operand" "ro")
909			 (match_operand:QI 2 "general_operand" "g")
910			 (match_operand:SI 3 "general_operand" "nrmT")))]
911  ""
912  "*
913{
914  if (! CONST_INT_P (operands[3]) || ! CONST_INT_P (operands[2])
915      || ! REG_P (operands[0]))
916    return \"extzv %3,%2,%1,%0\";
917  if (INTVAL (operands[2]) == 8)
918    return \"rotl %R3,%1,%0\;movzbl %0,%0\";
919  if (INTVAL (operands[2]) == 16)
920    return \"rotl %R3,%1,%0\;movzwl %0,%0\";
921  if (INTVAL (operands[3]) & 31)
922    return \"rotl %R3,%1,%0\;bicl2 %M2,%0\";
923  if (rtx_equal_p (operands[0], operands[1]))
924    return \"bicl2 %M2,%0\";
925  return \"bicl3 %M2,%1,%0\";
926}")
927
928;; Non-register cases.
929;; nonimmediate_operand is used to make sure that mode-ambiguous cases
930;; don't match these (and therefore match the cases above instead).
931
932(define_insn ""
933  [(set (cc0)
934	(compare
935	 (sign_extract:SI (match_operand:QI 0 "memory_operand" "m")
936			  (match_operand:QI 1 "general_operand" "g")
937			  (match_operand:SI 2 "general_operand" "nrmT"))
938	 (match_operand:SI 3 "general_operand" "nrmT")))]
939  ""
940  "cmpv %2,%1,%0,%3")
941
942(define_insn ""
943  [(set (cc0)
944	(compare
945	 (zero_extract:SI (match_operand:QI 0 "nonimmediate_operand" "rm")
946			  (match_operand:QI 1 "general_operand" "g")
947			  (match_operand:SI 2 "general_operand" "nrmT"))
948	 (match_operand:SI 3 "general_operand" "nrmT")))]
949  ""
950  "cmpzv %2,%1,%0,%3")
951
952(define_insn "extv"
953  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
954	(sign_extract:SI (match_operand:QI 1 "memory_operand" "m")
955			 (match_operand:QI 2 "general_operand" "g")
956			 (match_operand:SI 3 "general_operand" "nrmT")))]
957  ""
958  "*
959{
960  if (! REG_P (operands[0]) || ! CONST_INT_P (operands[2])
961      || ! CONST_INT_P (operands[3])
962      || (INTVAL (operands[2]) != 8 && INTVAL (operands[2]) != 16)
963      || INTVAL (operands[2]) + INTVAL (operands[3]) > 32
964      || side_effects_p (operands[1])
965      || (MEM_P (operands[1])
966	  && mode_dependent_address_p (XEXP (operands[1], 0))))
967    return \"extv %3,%2,%1,%0\";
968  if (INTVAL (operands[2]) == 8)
969    return \"rotl %R3,%1,%0\;cvtbl %0,%0\";
970  return \"rotl %R3,%1,%0\;cvtwl %0,%0\";
971}")
972
973(define_expand "extzv"
974  [(set (match_operand:SI 0 "general_operand" "")
975	(zero_extract:SI (match_operand:SI 1 "general_operand" "")
976			 (match_operand:QI 2 "general_operand" "")
977			 (match_operand:SI 3 "general_operand" "")))]
978  ""
979  "")
980
981(define_insn ""
982  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
983	(zero_extract:SI (match_operand:QI 1 "memory_operand" "m")
984			 (match_operand:QI 2 "general_operand" "g")
985			 (match_operand:SI 3 "general_operand" "nrmT")))]
986  ""
987  "*
988{
989  if (! REG_P (operands[0]) || ! CONST_INT_P (operands[2])
990      || ! CONST_INT_P (operands[3])
991      || INTVAL (operands[2]) + INTVAL (operands[3]) > 32
992      || side_effects_p (operands[1])
993      || (MEM_P (operands[1])
994	  && mode_dependent_address_p (XEXP (operands[1], 0))))
995    return \"extzv %3,%2,%1,%0\";
996  if (INTVAL (operands[2]) == 8)
997    return \"rotl %R3,%1,%0\;movzbl %0,%0\";
998  if (INTVAL (operands[2]) == 16)
999    return \"rotl %R3,%1,%0\;movzwl %0,%0\";
1000  if (MEM_P (operands[1])
1001      && GET_CODE (XEXP (operands[1], 0)) == PLUS
1002      && REG_P (XEXP (XEXP (operands[1], 0), 0))
1003      && CONST_INT_P (XEXP (XEXP (operands[1], 0), 1))
1004      && CONST_INT_P (operands[2])
1005      && CONST_INT_P (operands[3]))
1006    {
1007      HOST_WIDE_INT o = INTVAL (XEXP (XEXP (operands[1], 0), 1));
1008      HOST_WIDE_INT l = INTVAL (operands[2]);
1009      HOST_WIDE_INT v = INTVAL (operands[3]);
1010      if ((o & 3) && (o & 3) * 8 + v + l <= 32)
1011	{
1012	  rtx tmp;
1013	  tmp = XEXP (XEXP (operands[1], 0), 0);
1014	  if (o & ~3)
1015	    tmp = gen_rtx_PLUS (SImode, tmp, GEN_INT (o & ~3));
1016	  operands[1] = gen_rtx_MEM (QImode, tmp);
1017	  operands[3] = GEN_INT (v + (o & 3) * 8);
1018	}
1019      if (optimize_size)
1020	return \"extzv %3,%2,%1,%0\";
1021    }
1022  return \"rotl %R3,%1,%0\;bicl2 %M2,%0\";
1023}")
1024
1025(define_expand "insv"
1026  [(set (zero_extract:SI (match_operand:SI 0 "general_operand" "")
1027			 (match_operand:QI 1 "general_operand" "")
1028			 (match_operand:SI 2 "general_operand" ""))
1029	(match_operand:SI 3 "general_operand" ""))]
1030  ""
1031  "")
1032
1033(define_insn ""
1034  [(set (zero_extract:SI (match_operand:QI 0 "memory_operand" "+g")
1035			 (match_operand:QI 1 "general_operand" "g")
1036			 (match_operand:SI 2 "general_operand" "nrmT"))
1037	(match_operand:SI 3 "general_operand" "nrmT"))]
1038  ""
1039  "*
1040{
1041  if (MEM_P (operands[0])
1042      && GET_CODE (XEXP (operands[0], 0)) == PLUS
1043      && REG_P (XEXP (XEXP (operands[0], 0), 0))
1044      && CONST_INT_P (XEXP (XEXP (operands[0], 0), 1))
1045      && CONST_INT_P (operands[1])
1046      && CONST_INT_P (operands[2]))
1047    {
1048      HOST_WIDE_INT o = INTVAL (XEXP (XEXP (operands[0], 0), 1));
1049      HOST_WIDE_INT v = INTVAL (operands[2]);
1050      HOST_WIDE_INT l = INTVAL (operands[1]);
1051      if ((o & 3) && (o & 3) * 8 + v + l <= 32)
1052	{
1053	  rtx tmp;
1054	  tmp = XEXP (XEXP (operands[0], 0), 0);
1055	  if (o & ~3)
1056	    tmp = gen_rtx_PLUS (SImode, tmp, GEN_INT (o & ~3));
1057	  operands[0] = gen_rtx_MEM (QImode, tmp);
1058	  operands[2] = GEN_INT (v + (o & 3) * 8);
1059	}
1060    }
1061  return \"insv %3,%2,%1,%0\";
1062}")
1063
1064(define_insn ""
1065  [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
1066			 (match_operand:QI 1 "general_operand" "g")
1067			 (match_operand:SI 2 "general_operand" "nrmT"))
1068	(match_operand:SI 3 "general_operand" "nrmT"))]
1069  ""
1070  "insv %3,%2,%1,%0")
1071
1072;; Unconditional jump
1073(define_insn "jump"
1074  [(set (pc)
1075	(label_ref (match_operand 0 "" "")))]
1076  ""
1077  "jbr %l0")
1078
1079;; Conditional jumps
1080
1081(define_expand "cbranch<mode>4"
1082  [(set (cc0)
1083        (compare (match_operand:VAXint 1 "nonimmediate_operand" "")
1084                 (match_operand:VAXint 2 "general_operand" "")))
1085   (set (pc)
1086        (if_then_else
1087              (match_operator 0 "ordered_comparison_operator" [(cc0)
1088                                                               (const_int 0)])
1089              (label_ref (match_operand 3 "" ""))
1090              (pc)))]
1091 "")
1092
1093(define_expand "cbranch<mode>4"
1094  [(set (cc0)
1095        (compare (match_operand:VAXfp 1 "general_operand" "")
1096                 (match_operand:VAXfp 2 "general_operand" "")))
1097   (set (pc)
1098        (if_then_else
1099              (match_operator 0 "ordered_comparison_operator" [(cc0)
1100                                                               (const_int 0)])
1101              (label_ref (match_operand 3 "" ""))
1102              (pc)))]
1103 "")
1104
1105(define_insn "*branch"
1106  [(set (pc)
1107	(if_then_else (match_operator 0 "ordered_comparison_operator"
1108				      [(cc0)
1109				       (const_int 0)])
1110		      (label_ref (match_operand 1 "" ""))
1111		      (pc)))]
1112  ""
1113  "j%c0 %l1")
1114
1115;; Recognize reversed jumps.
1116(define_insn "*branch_reversed"
1117  [(set (pc)
1118	(if_then_else (match_operator 0 "ordered_comparison_operator"
1119				      [(cc0)
1120				       (const_int 0)])
1121		      (pc)
1122		      (label_ref (match_operand 1 "" ""))))]
1123  ""
1124  "j%C0 %l1") ; %C0 negates condition
1125
1126;; Recognize jbs, jlbs, jbc and jlbc instructions.  Note that the operand
1127;; of jlbs and jlbc insns are SImode in the hardware.  However, if it is
1128;; memory, we use QImode in the insn.  So we can't use those instructions
1129;; for mode-dependent addresses.
1130
1131(define_insn ""
1132  [(set (pc)
1133	(if_then_else
1134	 (ne (zero_extract:SI (match_operand:QI 0 "memory_operand" "Q,g")
1135			      (const_int 1)
1136			      (match_operand:SI 1 "general_operand" "I,nrmT"))
1137	     (const_int 0))
1138	 (label_ref (match_operand 2 "" ""))
1139	 (pc)))]
1140  ""
1141  "@
1142   jlbs %0,%l2
1143   jbs %1,%0,%l2")
1144
1145(define_insn ""
1146  [(set (pc)
1147	(if_then_else
1148	 (eq (zero_extract:SI (match_operand:QI 0 "memory_operand" "Q,g")
1149			      (const_int 1)
1150			      (match_operand:SI 1 "general_operand" "I,nrmT"))
1151	     (const_int 0))
1152	 (label_ref (match_operand 2 "" ""))
1153	 (pc)))]
1154  ""
1155  "@
1156   jlbc %0,%l2
1157   jbc %1,%0,%l2")
1158
1159(define_insn ""
1160  [(set (pc)
1161	(if_then_else
1162	 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r,r")
1163			      (const_int 1)
1164			      (match_operand:SI 1 "general_operand" "I,nrmT"))
1165	     (const_int 0))
1166	 (label_ref (match_operand 2 "" ""))
1167	 (pc)))]
1168  ""
1169  "@
1170   jlbs %0,%l2
1171   jbs %1,%0,%l2")
1172
1173(define_insn ""
1174  [(set (pc)
1175	(if_then_else
1176	 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r,r")
1177			      (const_int 1)
1178			      (match_operand:SI 1 "general_operand" "I,nrmT"))
1179	     (const_int 0))
1180	 (label_ref (match_operand 2 "" ""))
1181	 (pc)))]
1182  ""
1183  "@
1184   jlbc %0,%l2
1185   jbc %1,%0,%l2")
1186
1187;; Subtract-and-jump and Add-and-jump insns.
1188;; These are not used when output is for the Unix assembler
1189;; because it does not know how to modify them to reach far.
1190
1191;; Normal sob insns.
1192
1193(define_insn ""
1194  [(set (pc)
1195	(if_then_else
1196	 (gt (plus:SI (match_operand:SI 0 "nonimmediate_operand" "+g")
1197		      (const_int -1))
1198	     (const_int 0))
1199	 (label_ref (match_operand 1 "" ""))
1200	 (pc)))
1201   (set (match_dup 0)
1202	(plus:SI (match_dup 0)
1203		 (const_int -1)))]
1204  "!TARGET_UNIX_ASM"
1205  "jsobgtr %0,%l1")
1206
1207(define_insn ""
1208  [(set (pc)
1209	(if_then_else
1210	 (ge (plus:SI (match_operand:SI 0 "nonimmediate_operand" "+g")
1211		      (const_int -1))
1212	     (const_int 0))
1213	 (label_ref (match_operand 1 "" ""))
1214	 (pc)))
1215   (set (match_dup 0)
1216	(plus:SI (match_dup 0)
1217		 (const_int -1)))]
1218  "!TARGET_UNIX_ASM"
1219  "jsobgeq %0,%l1")
1220
1221;; Normal aob insns.  Define a version for when operands[1] is a constant.
1222(define_insn ""
1223  [(set (pc)
1224	(if_then_else
1225	 (lt (plus:SI (match_operand:SI 0 "nonimmediate_operand" "+g")
1226		      (const_int 1))
1227	     (match_operand:SI 1 "general_operand" "nrmT"))
1228	 (label_ref (match_operand 2 "" ""))
1229	 (pc)))
1230   (set (match_dup 0)
1231	(plus:SI (match_dup 0)
1232		 (const_int 1)))]
1233  "!TARGET_UNIX_ASM"
1234  "jaoblss %1,%0,%l2")
1235
1236(define_insn ""
1237  [(set (pc)
1238	(if_then_else
1239	 (lt (match_operand:SI 0 "nonimmediate_operand" "+g")
1240	     (match_operand:SI 1 "general_operand" "nrmT"))
1241	 (label_ref (match_operand 2 "" ""))
1242	 (pc)))
1243   (set (match_dup 0)
1244	(plus:SI (match_dup 0)
1245		 (const_int 1)))]
1246  "!TARGET_UNIX_ASM && CONST_INT_P (operands[1])"
1247  "jaoblss %P1,%0,%l2")
1248
1249(define_insn ""
1250  [(set (pc)
1251	(if_then_else
1252	 (le (plus:SI (match_operand:SI 0 "nonimmediate_operand" "+g")
1253		      (const_int 1))
1254	     (match_operand:SI 1 "general_operand" "nrmT"))
1255	 (label_ref (match_operand 2 "" ""))
1256	 (pc)))
1257   (set (match_dup 0)
1258	(plus:SI (match_dup 0)
1259		 (const_int 1)))]
1260  "!TARGET_UNIX_ASM"
1261  "jaobleq %1,%0,%l2")
1262
1263(define_insn ""
1264  [(set (pc)
1265	(if_then_else
1266	 (le (match_operand:SI 0 "nonimmediate_operand" "+g")
1267	     (match_operand:SI 1 "general_operand" "nrmT"))
1268	 (label_ref (match_operand 2 "" ""))
1269	 (pc)))
1270   (set (match_dup 0)
1271	(plus:SI (match_dup 0)
1272		 (const_int 1)))]
1273  "!TARGET_UNIX_ASM && CONST_INT_P (operands[1])"
1274  "jaobleq %P1,%0,%l2")
1275
1276;; Something like a sob insn, but compares against -1.
1277;; This finds `while (foo--)' which was changed to `while (--foo != -1)'.
1278
1279(define_insn ""
1280  [(set (pc)
1281	(if_then_else
1282	 (ne (match_operand:SI 0 "nonimmediate_operand" "+g")
1283	     (const_int 0))
1284	 (label_ref (match_operand 1 "" ""))
1285	 (pc)))
1286   (set (match_dup 0)
1287	(plus:SI (match_dup 0)
1288		 (const_int -1)))]
1289  ""
1290  "decl %0\;jgequ %l1")
1291
1292(define_expand "call_pop"
1293  [(parallel [(call (match_operand:QI 0 "memory_operand" "")
1294		    (match_operand:SI 1 "const_int_operand" ""))
1295	      (set (reg:SI VAX_SP_REGNUM)
1296		   (plus:SI (reg:SI VAX_SP_REGNUM)
1297			    (match_operand:SI 3 "immediate_operand" "")))])]
1298  ""
1299{
1300  gcc_assert (INTVAL (operands[3]) <= 255 * 4 && INTVAL (operands[3]) % 4 == 0);
1301
1302  /* Operand 1 is the number of bytes to be popped by DW_CFA_GNU_args_size
1303     during EH unwinding.  We must include the argument count pushed by
1304     the calls instruction.  */
1305  operands[1] = GEN_INT (INTVAL (operands[3]) + 4);
1306})
1307
1308(define_insn "*call_pop"
1309  [(call (match_operand:QI 0 "memory_operand" "m")
1310	 (match_operand:SI 1 "const_int_operand" "n"))
1311   (set (reg:SI VAX_SP_REGNUM) (plus:SI (reg:SI VAX_SP_REGNUM)
1312					(match_operand:SI 2 "immediate_operand" "i")))]
1313  ""
1314{
1315  operands[1] = GEN_INT ((INTVAL (operands[1]) - 4) / 4);
1316  return "calls %1,%0";
1317})
1318
1319(define_expand "call_value_pop"
1320  [(parallel [(set (match_operand 0 "" "")
1321		   (call (match_operand:QI 1 "memory_operand" "")
1322			 (match_operand:SI 2 "const_int_operand" "")))
1323	      (set (reg:SI VAX_SP_REGNUM)
1324		   (plus:SI (reg:SI VAX_SP_REGNUM)
1325			    (match_operand:SI 4 "immediate_operand" "")))])]
1326  ""
1327{
1328  gcc_assert (INTVAL (operands[4]) <= 255 * 4 && INTVAL (operands[4]) % 4 == 0);
1329
1330  /* Operand 2 is the number of bytes to be popped by DW_CFA_GNU_args_size
1331     during EH unwinding.  We must include the argument count pushed by
1332     the calls instruction.  */
1333  operands[2] = GEN_INT (INTVAL (operands[4]) + 4);
1334})
1335
1336(define_insn "*call_value_pop"
1337  [(set (match_operand 0 "" "")
1338	(call (match_operand:QI 1 "memory_operand" "m")
1339	      (match_operand:SI 2 "const_int_operand" "n")))
1340   (set (reg:SI VAX_SP_REGNUM) (plus:SI (reg:SI VAX_SP_REGNUM)
1341					(match_operand:SI 3 "immediate_operand" "i")))]
1342  ""
1343  "*
1344{
1345  operands[2] = GEN_INT ((INTVAL (operands[2]) - 4) / 4);
1346  return \"calls %2,%1\";
1347}")
1348
1349(define_expand "call"
1350  [(call (match_operand:QI 0 "memory_operand" "")
1351      (match_operand:SI 1 "const_int_operand" ""))]
1352  ""
1353  "
1354{
1355  /* Operand 1 is the number of bytes to be popped by DW_CFA_GNU_args_size
1356     during EH unwinding.  We must include the argument count pushed by
1357     the calls instruction.  */
1358  operands[1] = GEN_INT (INTVAL (operands[1]) + 4);
1359}")
1360
1361(define_insn "*call"
1362   [(call (match_operand:QI 0 "memory_operand" "m")
1363       (match_operand:SI 1 "const_int_operand" ""))]
1364  ""
1365  "calls $0,%0")
1366
1367(define_expand "call_value"
1368  [(set (match_operand 0 "" "")
1369      (call (match_operand:QI 1 "memory_operand" "")
1370	    (match_operand:SI 2 "const_int_operand" "")))]
1371  ""
1372  "
1373{
1374  /* Operand 2 is the number of bytes to be popped by DW_CFA_GNU_args_size
1375     during EH unwinding.  We must include the argument count pushed by
1376     the calls instruction.  */
1377  operands[2] = GEN_INT (INTVAL (operands[2]) + 4);
1378}")
1379
1380(define_insn "*call_value"
1381  [(set (match_operand 0 "" "")
1382	(call (match_operand:QI 1 "memory_operand" "m")
1383	      (match_operand:SI 2 "const_int_operand" "")))]
1384  ""
1385  "calls $0,%1")
1386
1387;; Call subroutine returning any type.
1388
1389(define_expand "untyped_call"
1390  [(parallel [(call (match_operand 0 "" "")
1391	      (const_int 0))
1392	      (match_operand 1 "" "")
1393	      (match_operand 2 "" "")])]
1394  ""
1395  "
1396{
1397  int i;
1398
1399  emit_call_insn (gen_call_pop (operands[0], const0_rtx, NULL, const0_rtx));
1400
1401  for (i = 0; i < XVECLEN (operands[2], 0); i++)
1402    {
1403      rtx set = XVECEXP (operands[2], 0, i);
1404      emit_move_insn (SET_DEST (set), SET_SRC (set));
1405    }
1406
1407  /* The optimizer does not know that the call sets the function value
1408     registers we stored in the result block.  We avoid problems by
1409     claiming that all hard registers are used and clobbered at this
1410     point.  */
1411  emit_insn (gen_blockage ());
1412
1413  DONE;
1414}")
1415
1416;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
1417;; all of memory.  This blocks insns from being moved across this point.
1418
1419(define_insn "blockage"
1420  [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
1421  ""
1422  "")
1423
1424(define_insn "return"
1425  [(return)]
1426  ""
1427  "ret")
1428
1429(define_expand "epilogue"
1430  [(return)]
1431  ""
1432  "
1433{
1434  emit_jump_insn (gen_return ());
1435  DONE;
1436}")
1437
1438(define_insn "nop"
1439  [(const_int 0)]
1440  ""
1441  "nop")
1442
1443;; This had a wider constraint once, and it had trouble.
1444;; If you are tempted to try `g', please don't--it's not worth
1445;; the risk we will reopen the same bug.
1446(define_insn "indirect_jump"
1447  [(set (pc) (match_operand:SI 0 "register_operand" "r"))]
1448  ""
1449  "jmp (%0)")
1450
1451;; This is here to accept 5 arguments (as passed by expand_end_case)
1452;; and pass the first 4 along to the casesi1 pattern that really does
1453;; the actual casesi work.  We emit a jump here to the default label
1454;; _before_ the casesi so that we can be sure that the casesi never
1455;; drops through.
1456;; This is suboptimal perhaps, but so is much of the rest of this
1457;; machine description.  For what it's worth, HPPA uses the same trick.
1458;;
1459;; operand 0 is index
1460;; operand 1 is the minimum bound (a const_int)
1461;; operand 2 is the maximum bound - minimum bound + 1 (also a const_int)
1462;; operand 3 is CODE_LABEL for the table;
1463;; operand 4 is the CODE_LABEL to go to if index out of range (ie. default).
1464;;
1465;; We emit:
1466;;	i = index - minimum_bound
1467;;	if (i > (maximum_bound - minimum_bound + 1) goto default;
1468;;	casesi (i, 0, table);
1469;;
1470(define_expand "casesi"
1471  [(match_operand:SI 0 "general_operand" "")
1472   (match_operand:SI 1 "general_operand" "")
1473   (match_operand:SI 2 "general_operand" "")
1474   (match_operand 3 "" "")
1475   (match_operand 4 "" "")]
1476  ""
1477{
1478  rtx test;
1479
1480  /* i = index - minimum_bound;
1481     But only if the lower bound is not already zero.  */
1482  if (operands[1] != const0_rtx)
1483    {
1484      rtx index = gen_reg_rtx (SImode);
1485      emit_insn (gen_addsi3 (index,
1486			     operands[0],
1487			     GEN_INT (-INTVAL (operands[1]))));
1488      operands[0] = index;
1489    }
1490
1491  /* if (i > (maximum_bound - minimum_bound + 1)) goto default;  */
1492  test = gen_rtx_fmt_ee (GTU, VOIDmode, operands[0], operands[2]);
1493  emit_jump_insn (gen_cbranchsi4 (test, operands[0], operands[2], operands[4]));
1494
1495  /* casesi (i, 0, table);  */
1496  emit_jump_insn (gen_casesi1 (operands[0], operands[2], operands[3]));
1497  DONE;
1498})
1499
1500;; This insn is a bit of a lier.  It actually falls through if no case
1501;; matches.  But, we prevent that from ever happening by emitting a jump
1502;; before this, see the define_expand above.
1503(define_insn "casesi1"
1504  [(match_operand:SI 1 "const_int_operand" "n")
1505   (set (pc)
1506	(plus:SI (sign_extend:SI
1507		  (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "general_operand" "nrmT")
1508					    (const_int 2))
1509			  (pc))))
1510		 (label_ref:SI (match_operand 2 "" ""))))]
1511  ""
1512  "casel %0,$0,%1")
1513
1514(define_insn "pushextsym"
1515  [(set (match_operand:SI 0 "push_operand" "=g")
1516	(match_operand:SI 1 "external_symbolic_operand" "i"))]
1517  ""
1518  "pushab %a1")
1519
1520(define_insn "movextsym"
1521  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
1522	(match_operand:SI 1 "external_symbolic_operand" "i"))]
1523  ""
1524  "movab %a1,%0")
1525
1526(define_insn "pushlclsym"
1527  [(set (match_operand:SI 0 "push_operand" "=g")
1528	(match_operand:SI 1 "local_symbolic_operand" "i"))]
1529  ""
1530  "pushab %a1")
1531
1532(define_insn "movlclsym"
1533  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
1534	(match_operand:SI 1 "local_symbolic_operand" "i"))]
1535  ""
1536  "movab %a1,%0")
1537
1538;;- load or push effective address
1539;; These come after the move and add/sub patterns
1540;; because we don't want pushl $1 turned into pushad 1.
1541;; or addl3 r1,r2,r3 turned into movab 0(r1)[r2],r3.
1542
1543;; It does not work to use constraints to distinguish pushes from moves,
1544;; because < matches any autodecrement, not just a push.
1545
1546(define_insn "pushaddr<mode>"
1547  [(set (match_operand:SI 0 "push_operand" "=g")
1548	(match_operand:VAXintQHSD 1 "address_operand" "p"))]
1549  ""
1550  "pusha<VAXintQHSD:isfx> %a1")
1551
1552(define_insn "movaddr<mode>"
1553  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
1554	(match_operand:VAXintQHSD 1 "address_operand" "p"))]
1555  ""
1556  "mova<VAXintQHSD:isfx> %a1,%0")
1557
1558(define_insn "pushaddr<mode>"
1559  [(set (match_operand:SI 0 "push_operand" "=g")
1560	(match_operand:VAXfp 1 "address_operand" "p"))]
1561  ""
1562  "pusha<VAXfp:fsfx> %a1")
1563
1564(define_insn "movaddr<mode>"
1565  [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
1566	(match_operand:VAXfp 1 "address_operand" "p"))]
1567  ""
1568  "mova<VAXfp:fsfx> %a1,%0")
1569
1570;; These used to be peepholes, but it is more straightforward to do them
1571;; as single insns.  However, we must force the output to be a register
1572;; if it is not an offsettable address so that we know that we can assign
1573;; to it twice.
1574
1575;; If we had a good way of evaluating the relative costs, these could be
1576;; machine-independent.
1577
1578;; Optimize   extzv ...,z;    andl2 ...,z
1579;; or	      ashl ...,z;     andl2 ...,z
1580;; with other operands constant.  This is what the combiner converts the
1581;; above sequences to before attempting to recognize the new insn.
1582
1583(define_insn ""
1584  [(set (match_operand:SI 0 "nonimmediate_operand" "=ro")
1585	(and:SI (ashiftrt:SI (match_operand:SI 1 "general_operand" "nrmT")
1586			     (match_operand:QI 2 "const_int_operand" "n"))
1587		(match_operand:SI 3 "const_int_operand" "n")))]
1588  "(INTVAL (operands[3]) & ~((1 << (32 - INTVAL (operands[2]))) - 1)) == 0"
1589  "*
1590{
1591  unsigned long mask1 = INTVAL (operands[3]);
1592  unsigned long mask2 = (1 << (32 - INTVAL (operands[2]))) - 1;
1593
1594  if ((mask1 & mask2) != mask1)
1595    operands[3] = GEN_INT (mask1 & mask2);
1596
1597  return \"rotl %R2,%1,%0\;bicl2 %N3,%0\";
1598}")
1599
1600;; left-shift and mask
1601;; The only case where `ashl' is better is if the mask only turns off
1602;; bits that the ashl would anyways, in which case it should have been
1603;; optimized away.
1604
1605(define_insn ""
1606  [(set (match_operand:SI 0 "nonimmediate_operand" "=ro")
1607	(and:SI (ashift:SI (match_operand:SI 1 "general_operand" "nrmT")
1608			   (match_operand:QI 2 "const_int_operand" "n"))
1609		(match_operand:SI 3 "const_int_operand" "n")))]
1610  ""
1611  "*
1612{
1613  operands[3]
1614    = GEN_INT (INTVAL (operands[3]) & ~((1 << INTVAL (operands[2])) - 1));
1615  return \"rotl %2,%1,%0\;bicl2 %N3,%0\";
1616}")
1617
1618;; Instruction sequence to sync the VAX instruction stream.
1619(define_insn "sync_istream"
1620  [(unspec_volatile [(const_int 0)] VUNSPEC_SYNC_ISTREAM)]
1621  ""
1622  "movpsl -(%|sp)\;pushal 1(%|pc)\;rei")
1623
1624(define_expand "nonlocal_goto"
1625  [(use (match_operand 0 "general_operand" ""))
1626   (use (match_operand 1 "general_operand" ""))
1627   (use (match_operand 2 "general_operand" ""))
1628   (use (match_operand 3 "general_operand" ""))]
1629  ""
1630{
1631  rtx lab = operands[1];
1632  rtx stack = operands[2];
1633  rtx fp = operands[3];
1634
1635  emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1636  emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1637
1638  emit_move_insn (hard_frame_pointer_rtx, fp);
1639  emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
1640
1641  emit_use (hard_frame_pointer_rtx);
1642  emit_use (stack_pointer_rtx);
1643
1644  /* We'll convert this to direct jump via a peephole optimization.  */
1645  emit_indirect_jump (copy_to_reg (lab));
1646  emit_barrier ();
1647  DONE;
1648})
1649
1650(include "builtins.md")
1651