xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/vax/vax.c (revision c38e7cc395b1472a774ff828e46123de44c628e9)
1 /* Subroutines for insn-output.c for VAX.
2    Copyright (C) 1987-2015 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "dominance.h"
26 #include "cfg.h"
27 #include "cfgrtl.h"
28 #include "cfganal.h"
29 #include "lcm.h"
30 #include "cfgbuild.h"
31 #include "cfgcleanup.h"
32 #include "predict.h"
33 #include "basic-block.h"
34 #include "df.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "vec.h"
38 #include "double-int.h"
39 #include "input.h"
40 #include "alias.h"
41 #include "symtab.h"
42 #include "wide-int.h"
43 #include "inchash.h"
44 #include "tree.h"
45 #include "calls.h"
46 #include "varasm.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "insn-config.h"
50 #include "conditions.h"
51 #include "function.h"
52 #include "output.h"
53 #include "insn-attr.h"
54 #include "recog.h"
55 #include "hashtab.h"
56 #include "flags.h"
57 #include "statistics.h"
58 #include "real.h"
59 #include "fixed-value.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "emit-rtl.h"
64 #include "stmt.h"
65 #include "expr.h"
66 #include "insn-codes.h"
67 #include "optabs.h"
68 #include "debug.h"
69 #include "diagnostic-core.h"
70 #include "reload.h"
71 #include "tm-preds.h"
72 #include "tm-constrs.h"
73 #include "tm_p.h"
74 #include "target.h"
75 #include "target-def.h"
76 #include "builtins.h"
77 
78 static void vax_option_override (void);
79 static bool vax_legitimate_address_p (machine_mode, rtx, bool);
80 static void vax_file_start (void);
81 static void vax_init_libfuncs (void);
82 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
83 				 HOST_WIDE_INT, tree);
84 static int vax_address_cost_1 (rtx);
85 static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
86 static bool vax_rtx_costs (rtx, int, int, int, int *, bool);
87 static rtx vax_function_arg (cumulative_args_t, machine_mode,
88 			     const_tree, bool);
89 static void vax_function_arg_advance (cumulative_args_t, machine_mode,
90 				      const_tree, bool);
91 static rtx vax_struct_value_rtx (tree, int);
92 static rtx vax_builtin_setjmp_frame_value (void);
93 static void vax_asm_trampoline_template (FILE *);
94 static void vax_trampoline_init (rtx, tree, rtx);
95 static int vax_return_pops_args (tree, tree, int);
96 static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
97 
98 /* Initialize the GCC target structure.  */
99 #undef TARGET_ASM_ALIGNED_HI_OP
100 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
101 
102 #undef TARGET_ASM_FILE_START
103 #define TARGET_ASM_FILE_START vax_file_start
104 #undef TARGET_ASM_FILE_START_APP_OFF
105 #define TARGET_ASM_FILE_START_APP_OFF true
106 
107 #undef TARGET_INIT_LIBFUNCS
108 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
109 
110 #undef TARGET_ASM_OUTPUT_MI_THUNK
111 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
112 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
113 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
114 
115 #undef TARGET_RTX_COSTS
116 #define TARGET_RTX_COSTS vax_rtx_costs
117 #undef TARGET_ADDRESS_COST
118 #define TARGET_ADDRESS_COST vax_address_cost
119 
120 #undef TARGET_PROMOTE_PROTOTYPES
121 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
122 
123 #undef TARGET_FUNCTION_ARG
124 #define TARGET_FUNCTION_ARG vax_function_arg
125 #undef TARGET_FUNCTION_ARG_ADVANCE
126 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
127 
128 #undef TARGET_STRUCT_VALUE_RTX
129 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
130 
131 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
132 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
133 
134 #undef TARGET_LEGITIMATE_ADDRESS_P
135 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
136 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
137 #define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
138 
139 #undef TARGET_FRAME_POINTER_REQUIRED
140 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
141 
142 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
143 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
144 #undef TARGET_TRAMPOLINE_INIT
145 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
146 #undef TARGET_RETURN_POPS_ARGS
147 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
148 
149 #undef TARGET_OPTION_OVERRIDE
150 #define TARGET_OPTION_OVERRIDE vax_option_override
151 
152 struct gcc_target targetm = TARGET_INITIALIZER;
153 
154 /* Set global variables as needed for the options enabled.  */
155 
156 static void
157 vax_option_override (void)
158 {
159   /* We're VAX floating point, not IEEE floating point.  */
160   if (TARGET_G_FLOAT)
161     REAL_MODE_FORMAT (DFmode) = &vax_g_format;
162 
163   flag_dwarf2_cfi_asm = 0;
164 
165 #ifdef SUBTARGET_OVERRIDE_OPTIONS
166   SUBTARGET_OVERRIDE_OPTIONS;
167 #endif
168 }
169 
170 static void
171 vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
172 {
173   rtx x;
174 
175   x = plus_constant (Pmode, frame_pointer_rtx, offset);
176   x = gen_rtx_MEM (SImode, x);
177   x = gen_rtx_SET (VOIDmode, x, src);
178   add_reg_note (insn, REG_CFA_OFFSET, x);
179 }
180 
181 /* Generate the assembly code for function entry.  FILE is a stdio
182    stream to output the code to.  SIZE is an int: how many units of
183    temporary storage to allocate.
184 
185    Refer to the array `regs_ever_live' to determine which registers to
186    save; `regs_ever_live[I]' is nonzero if register number I is ever
187    used in the function.  This function is responsible for knowing
188    which registers should not be saved even if used.  */
189 
190 void
191 vax_expand_prologue (void)
192 {
193   int regno, offset;
194   int mask = 0;
195   HOST_WIDE_INT size;
196   rtx insn;
197 
198   offset = 20;
199   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
200     if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
201       {
202         mask |= 1 << regno;
203         offset += 4;
204       }
205 
206   insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
207   RTX_FRAME_RELATED_P (insn) = 1;
208 
209   /* The layout of the CALLG/S stack frame is follows:
210 
211 		<- CFA, AP
212 	r11
213 	r10
214 	...	Registers saved as specified by MASK
215 	r3
216 	r2
217 	return-addr
218 	old fp
219 	old ap
220 	old psw
221 	zero
222 		<- FP, SP
223 
224      The rest of the prologue will adjust the SP for the local frame.  */
225 
226   add_reg_note (insn, REG_CFA_DEF_CFA,
227                 plus_constant (Pmode, frame_pointer_rtx, offset));
228   insn = emit_insn (gen_blockage ());
229   RTX_FRAME_RELATED_P (insn) = 1;
230 
231 #ifdef notyet
232   /*
233    * We can't do this, the dwarf code asserts and we don't have yet a
234    * way to get to the psw
235    */
236   vax_add_reg_cfa_offset (insn, 4, gen_rtx_REG (Pmode, PSW_REGNUM));
237 #endif
238   vax_add_reg_cfa_offset (insn, 8, arg_pointer_rtx);
239   vax_add_reg_cfa_offset (insn, 12, frame_pointer_rtx);
240   vax_add_reg_cfa_offset (insn, 16, pc_rtx);
241 
242   offset = 20;
243   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
244     if (mask & (1 << regno))
245       {
246 	vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
247 	offset += 4;
248       }
249 
250   /* Allocate the local stack frame.  */
251   size = get_frame_size ();
252   size -= STARTING_FRAME_OFFSET;
253   emit_insn (gen_addsi3 (stack_pointer_rtx,
254 			 stack_pointer_rtx, GEN_INT (-size)));
255 
256   /* Do not allow instructions referencing local stack memory to be
257      scheduled before the frame is allocated.  This is more pedantic
258      than anything else, given that VAX does not currently have a
259      scheduling description.  */
260   emit_insn (gen_blockage ());
261 }
262 
263 /* When debugging with stabs, we want to output an extra dummy label
264    so that gas can distinguish between D_float and G_float prior to
265    processing the .stabs directive identifying type double.  */
266 static void
267 vax_file_start (void)
268 {
269   default_file_start ();
270 
271   if (write_symbols == DBX_DEBUG)
272     fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
273 }
274 
275 /* We can use the BSD C library routines for the libgcc calls that are
276    still generated, since that's what they boil down to anyways.  When
277    ELF, avoid the user's namespace.  */
278 
279 static void
280 vax_init_libfuncs (void)
281 {
282   if (TARGET_BSD_DIVMOD)
283     {
284       set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
285       set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
286     }
287 }
288 
289 /* This is like nonimmediate_operand with a restriction on the type of MEM.  */
290 
291 static void
292 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
293 			 rtx * low, int n)
294 {
295   int i;
296 
297   for (i = 0; i < n; i++)
298     low[i] = 0;
299 
300   for (i = 0; i < n; i++)
301     {
302       if (MEM_P (operands[i])
303 	  && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
304 	      || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
305 	{
306 	  rtx addr = XEXP (operands[i], 0);
307 	  operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
308 	}
309       else if (optimize_size && MEM_P (operands[i])
310 	       && REG_P (XEXP (operands[i], 0))
311 	       && (code != MINUS || operands[1] != const0_rtx)
312 	       && find_regno_note (insn, REG_DEAD,
313 				   REGNO (XEXP (operands[i], 0))))
314 	{
315 	  low[i] = gen_rtx_MEM (SImode,
316 				gen_rtx_POST_INC (Pmode,
317 						  XEXP (operands[i], 0)));
318 	  operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
319 	}
320       else
321 	{
322 	  low[i] = operand_subword (operands[i], 0, 0, DImode);
323 	  operands[i] = operand_subword (operands[i], 1, 0, DImode);
324 	}
325     }
326 }
327 
328 void
329 print_operand_address (FILE * file, rtx addr)
330 {
331   rtx orig = addr;
332   rtx reg1, breg, ireg;
333   rtx offset;
334 
335  retry:
336   switch (GET_CODE (addr))
337     {
338     case MEM:
339       fprintf (file, "*");
340       addr = XEXP (addr, 0);
341       goto retry;
342 
343     case REG:
344       fprintf (file, "(%s)", reg_names[REGNO (addr)]);
345       break;
346 
347     case PRE_DEC:
348       fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
349       break;
350 
351     case POST_INC:
352       fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
353       break;
354 
355     case PLUS:
356       /* There can be either two or three things added here.  One must be a
357 	 REG.  One can be either a REG or a MULT of a REG and an appropriate
358 	 constant, and the third can only be a constant or a MEM.
359 
360 	 We get these two or three things and put the constant or MEM in
361 	 OFFSET, the MULT or REG in IREG, and the REG in BREG.  If we have
362 	 a register and can't tell yet if it is a base or index register,
363 	 put it into REG1.  */
364 
365       reg1 = 0; ireg = 0; breg = 0; offset = 0;
366 
367       if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
368 	  || MEM_P (XEXP (addr, 0)))
369 	{
370 	  offset = XEXP (addr, 0);
371 	  addr = XEXP (addr, 1);
372 	}
373       else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
374 	       || MEM_P (XEXP (addr, 1)))
375 	{
376 	  offset = XEXP (addr, 1);
377 	  addr = XEXP (addr, 0);
378 	}
379       else if (GET_CODE (XEXP (addr, 1)) == MULT)
380 	{
381 	  ireg = XEXP (addr, 1);
382 	  addr = XEXP (addr, 0);
383 	}
384       else if (GET_CODE (XEXP (addr, 0)) == MULT)
385 	{
386 	  ireg = XEXP (addr, 0);
387 	  addr = XEXP (addr, 1);
388 	}
389       else if (REG_P (XEXP (addr, 1)))
390 	{
391 	  reg1 = XEXP (addr, 1);
392 	  addr = XEXP (addr, 0);
393 	}
394       else if (REG_P (XEXP (addr, 0)))
395 	{
396 	  reg1 = XEXP (addr, 0);
397 	  addr = XEXP (addr, 1);
398 	}
399       else
400 	{
401 	   debug_rtx (orig);
402 	   gcc_unreachable ();
403 	}
404 
405       if (REG_P (addr))
406 	{
407 	  if (reg1)
408 	    ireg = addr;
409 	  else
410 	    reg1 = addr;
411 	}
412       else if (GET_CODE (addr) == MULT)
413 	ireg = addr;
414       else if (GET_CODE (addr) == PLUS)
415 	{
416 	  if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
417 	      || MEM_P (XEXP (addr, 0)))
418 	    {
419 	      if (offset)
420 		{
421 		  if (CONST_INT_P (offset))
422 		    offset = plus_constant (Pmode, XEXP (addr, 0),
423 		                            INTVAL (offset));
424 		  else
425 		    {
426 		      gcc_assert (CONST_INT_P (XEXP (addr, 0)));
427 		      offset = plus_constant (Pmode, offset,
428 					      INTVAL (XEXP (addr, 0)));
429 		    }
430 		}
431 	      offset = XEXP (addr, 0);
432 	    }
433 	  else if (REG_P (XEXP (addr, 0)))
434 	    {
435 	      if (reg1)
436 		ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
437 	      else
438 		reg1 = XEXP (addr, 0);
439 	    }
440 	  else if (GET_CODE (XEXP (addr, 0)) == MULT && !ireg)
441 	    {
442 	      ireg = XEXP (addr, 0);
443 	    }
444 	  else
445 	    {
446 	      debug_rtx (orig);
447 	      gcc_unreachable ();
448 	    }
449 
450 	  if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
451 	      || MEM_P (XEXP (addr, 1)))
452 	    {
453 	      if (offset)
454 		{
455 		  if (CONST_INT_P (offset))
456 		    offset = plus_constant (Pmode, XEXP (addr, 1),
457 					    INTVAL (offset));
458 		  else
459 		    {
460 		      gcc_assert (CONST_INT_P (XEXP (addr, 1)));
461 		      offset = plus_constant (Pmode, offset,
462 					      INTVAL (XEXP (addr, 1)));
463 		    }
464 		}
465 	      offset = XEXP (addr, 1);
466 	    }
467 	  else if (REG_P (XEXP (addr, 1)))
468 	    {
469 	      if (reg1)
470 		ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
471 	      else
472 		reg1 = XEXP (addr, 1);
473 	    }
474 	  else if (GET_CODE (XEXP (addr, 1)) == MULT && !ireg)
475 	    {
476 	      ireg = XEXP (addr, 1);
477 	    }
478 	  else
479 	    {
480 	      debug_rtx (orig);
481 	      gcc_unreachable ();
482 	    }
483 	}
484       else
485 	{
486 	  debug_rtx (orig);
487 	  gcc_unreachable ();
488 	}
489 
490       /* If REG1 is nonzero, figure out if it is a base or index register.  */
491       if (reg1)
492 	{
493 	  if (breg
494 	      || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
495 	      || (offset
496 		  && (MEM_P (offset)
497 		      || (flag_pic && symbolic_operand (offset, SImode)))))
498 	    {
499 	      if (ireg)
500 		{
501 		  debug_rtx (orig);
502 		  gcc_unreachable ();
503 		}
504 	      ireg = reg1;
505 	    }
506 	  else
507 	    breg = reg1;
508 	}
509 
510       if (offset != 0)
511 	{
512 	  if (flag_pic && symbolic_operand (offset, SImode))
513 	    {
514 	      if (breg && ireg)
515 		{
516 		  debug_rtx (orig);
517 		  output_operand_lossage ("symbol used with both base and indexed registers");
518 		}
519 
520 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
521 	      if (flag_pic > 1 && GET_CODE (offset) == CONST
522 		  && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
523 		  && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
524 		{
525 		  debug_rtx (orig);
526 		  output_operand_lossage ("symbol with offset used in PIC mode");
527 		}
528 #endif
529 
530 	      /* symbol(reg) isn't PIC, but symbol[reg] is.  */
531 	      if (breg)
532 		{
533 		  ireg = breg;
534 		  breg = 0;
535 		}
536 
537 	    }
538 
539 	  output_address (offset);
540 	}
541 
542       if (breg != 0)
543 	fprintf (file, "(%s)", reg_names[REGNO (breg)]);
544 
545       if (ireg != 0)
546 	{
547 	  if (GET_CODE (ireg) == MULT)
548 	    ireg = XEXP (ireg, 0);
549 	  if (! REG_P (ireg))
550 	    {
551 	      debug_rtx (orig);
552 	      output_operand_lossage ("non-register index expression");
553 	    }
554 	  fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
555 	}
556       break;
557 
558     default:
559       gcc_assert (! REG_P(addr));
560       output_addr_const (file, addr);
561     }
562 }
563 
564 void
565 print_operand (FILE *file, rtx x, int code)
566 {
567   if (code == '#')
568     fputc (ASM_DOUBLE_CHAR, file);
569   else if (code == '|')
570     fputs (REGISTER_PREFIX, file);
571   else if (code == 'c')
572     fputs (cond_name (x), file);
573   else if (code == 'C')
574     fputs (rev_cond_name (x), file);
575   else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
576     fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
577   else if (code == 'P' && CONST_INT_P (x))
578     fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
579   else if (code == 'N' && CONST_INT_P (x))
580     fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
581   /* rotl instruction cannot deal with negative arguments.  */
582   else if (code == 'R' && CONST_INT_P (x))
583     fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
584   else if (code == 'H' && CONST_INT_P (x))
585     fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
586   else if (code == 'h' && CONST_INT_P (x))
587     fprintf (file, "$%d", (short) - INTVAL (x));
588   else if (code == 'B' && CONST_INT_P (x))
589     fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
590   else if (code == 'b' && CONST_INT_P (x))
591     fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
592   else if (code == 'M' && CONST_INT_P (x))
593     fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
594   else if (code == 'x' && CONST_INT_P (x))
595     fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
596   else if (REG_P (x))
597     fprintf (file, "%s", reg_names[REGNO (x)]);
598   else if (MEM_P (x))
599     output_address (XEXP (x, 0));
600   else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
601     {
602       char dstr[30];
603       real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
604 		       sizeof (dstr), 0, 1);
605       fprintf (file, "$0f%s", dstr);
606     }
607   else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
608     {
609       char dstr[30];
610       real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
611 		       sizeof (dstr), 0, 1);
612       fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
613     }
614   else if (GET_CODE (x) == SUBREG)
615     {
616       debug_rtx (x);
617       output_operand_lossage ("SUBREG operand");
618     }
619   else
620     {
621       if (flag_pic > 1 && symbolic_operand (x, SImode))
622 	{
623 	  debug_rtx (x);
624 	  output_operand_lossage ("symbol used as immediate operand");
625 	}
626       putc ('$', file);
627       output_addr_const (file, x);
628     }
629 }
630 
631 const char *
632 cond_name (rtx op)
633 {
634   switch (GET_CODE (op))
635     {
636     case NE:
637       return "neq";
638     case EQ:
639       return "eql";
640     case GE:
641       return "geq";
642     case GT:
643       return "gtr";
644     case LE:
645       return "leq";
646     case LT:
647       return "lss";
648     case GEU:
649       return "gequ";
650     case GTU:
651       return "gtru";
652     case LEU:
653       return "lequ";
654     case LTU:
655       return "lssu";
656 
657     default:
658       gcc_unreachable ();
659     }
660 }
661 
662 const char *
663 rev_cond_name (rtx op)
664 {
665   switch (GET_CODE (op))
666     {
667     case EQ:
668       return "neq";
669     case NE:
670       return "eql";
671     case LT:
672       return "geq";
673     case LE:
674       return "gtr";
675     case GT:
676       return "leq";
677     case GE:
678       return "lss";
679     case LTU:
680       return "gequ";
681     case LEU:
682       return "gtru";
683     case GTU:
684       return "lequ";
685     case GEU:
686       return "lssu";
687 
688     default:
689       gcc_unreachable ();
690     }
691 }
692 
693 static bool
694 vax_float_literal (rtx c)
695 {
696   machine_mode mode;
697   REAL_VALUE_TYPE r, s;
698   int i;
699 
700   if (GET_CODE (c) != CONST_DOUBLE)
701     return false;
702 
703   mode = GET_MODE (c);
704 
705   if (c == const_tiny_rtx[(int) mode][0]
706       || c == const_tiny_rtx[(int) mode][1]
707       || c == const_tiny_rtx[(int) mode][2])
708     return true;
709 
710   REAL_VALUE_FROM_CONST_DOUBLE (r, c);
711 
712   for (i = 0; i < 7; i++)
713     {
714       int x = 1 << i;
715       bool ok;
716       real_from_integer (&s, mode, x, SIGNED);
717 
718       if (REAL_VALUES_EQUAL (r, s))
719 	return true;
720       ok = exact_real_inverse (mode, &s);
721       gcc_assert (ok);
722       if (REAL_VALUES_EQUAL (r, s))
723 	return true;
724     }
725   return false;
726 }
727 
728 
729 /* Return the cost in cycles of a memory address, relative to register
730    indirect.
731 
732    Each of the following adds the indicated number of cycles:
733 
734    1 - symbolic address
735    1 - pre-decrement
736    1 - indexing and/or offset(register)
737    2 - indirect */
738 
739 
740 static int
741 vax_address_cost_1 (rtx addr)
742 {
743   int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
744   rtx plus_op0 = 0, plus_op1 = 0;
745  restart:
746   switch (GET_CODE (addr))
747     {
748     case PRE_DEC:
749       predec = 1;
750     case REG:
751     case SUBREG:
752     case POST_INC:
753       reg = 1;
754       break;
755     case MULT:
756       indexed = 1;	/* 2 on VAX 2 */
757       break;
758     case CONST_INT:
759       /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
760       if (offset == 0)
761 	offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
762       break;
763     case CONST:
764     case SYMBOL_REF:
765       offset = 1;	/* 2 on VAX 2 */
766       break;
767     case LABEL_REF:	/* this is probably a byte offset from the pc */
768       if (offset == 0)
769 	offset = 1;
770       break;
771     case PLUS:
772       if (plus_op0)
773 	plus_op1 = XEXP (addr, 0);
774       else
775 	plus_op0 = XEXP (addr, 0);
776       addr = XEXP (addr, 1);
777       goto restart;
778     case MEM:
779       indir = 2;	/* 3 on VAX 2 */
780       addr = XEXP (addr, 0);
781       goto restart;
782     default:
783       break;
784     }
785 
786   /* Up to 3 things can be added in an address.  They are stored in
787      plus_op0, plus_op1, and addr.  */
788 
789   if (plus_op0)
790     {
791       addr = plus_op0;
792       plus_op0 = 0;
793       goto restart;
794     }
795   if (plus_op1)
796     {
797       addr = plus_op1;
798       plus_op1 = 0;
799       goto restart;
800     }
801   /* Indexing and register+offset can both be used (except on a VAX 2)
802      without increasing execution time over either one alone.  */
803   if (reg && indexed && offset)
804     return reg + indir + offset + predec;
805   return reg + indexed + indir + offset + predec;
806 }
807 
808 static int
809 vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
810 		  addr_space_t as ATTRIBUTE_UNUSED,
811 		  bool speed ATTRIBUTE_UNUSED)
812 {
813   return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
814 }
815 
816 /* Cost of an expression on a VAX.  This version has costs tuned for the
817    CVAX chip (found in the VAX 3 series) with comments for variations on
818    other models.
819 
820    FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
821    and FLOAT_TRUNCATE.  We need a -mcpu option to allow provision of
822    costs on a per cpu basis.  */
823 
824 static bool
825 vax_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
826 	       int *total, bool speed ATTRIBUTE_UNUSED)
827 {
828   machine_mode mode = GET_MODE (x);
829   int i = 0;				   /* may be modified in switch */
830   const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
831 
832   switch (code)
833     {
834       /* On a VAX, constants from 0..63 are cheap because they can use the
835 	 1 byte literal constant format.  Compare to -1 should be made cheap
836 	 so that decrement-and-branch insns can be formed more easily (if
837 	 the value -1 is copied to a register some decrement-and-branch
838 	 patterns will not match).  */
839     case CONST_INT:
840       if (INTVAL (x) == 0)
841 	{
842 	  *total = 0;
843 	  return true;
844 	}
845       if (outer_code == AND)
846 	{
847 	  *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
848 	  return true;
849 	}
850       if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
851 	  || (outer_code == COMPARE
852 	      && INTVAL (x) == -1)
853 	  || ((outer_code == PLUS || outer_code == MINUS)
854 	      && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
855 	{
856 	  *total = 1;
857 	  return true;
858 	}
859       /* FALLTHRU */
860 
861     case CONST:
862     case LABEL_REF:
863     case SYMBOL_REF:
864       *total = 3;
865       return true;
866 
867     case CONST_DOUBLE:
868       if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
869 	*total = vax_float_literal (x) ? 5 : 8;
870       else
871 	*total = ((CONST_DOUBLE_HIGH (x) == 0
872 		   && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
873 		  || (outer_code == PLUS
874 		      && CONST_DOUBLE_HIGH (x) == -1
875 		      && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
876 		 ? 2 : 5;
877       return true;
878 
879     case POST_INC:
880       *total = 2;
881       return true;		/* Implies register operand.  */
882 
883     case PRE_DEC:
884       *total = 3;
885       return true;		/* Implies register operand.  */
886 
887     case MULT:
888       switch (mode)
889 	{
890 	case DFmode:
891 	  *total = 16;		/* 4 on VAX 9000 */
892 	  break;
893 	case SFmode:
894 	  *total = 9;		/* 4 on VAX 9000, 12 on VAX 2 */
895 	  break;
896 	case DImode:
897 	  *total = 16;		/* 6 on VAX 9000, 28 on VAX 2 */
898 	  break;
899 	case SImode:
900 	case HImode:
901 	case QImode:
902 	  *total = 10;		/* 3-4 on VAX 9000, 20-28 on VAX 2 */
903 	  break;
904 	default:
905 	  *total = MAX_COST;	/* Mode is not supported.  */
906 	  return true;
907 	}
908       break;
909 
910     case UDIV:
911       if (mode != SImode)
912 	{
913 	  *total = MAX_COST;	/* Mode is not supported.  */
914 	  return true;
915 	}
916       *total = 17;
917       break;
918 
919     case DIV:
920       if (mode == DImode)
921 	*total = 30;		/* Highly variable.  */
922       else if (mode == DFmode)
923 	/* divide takes 28 cycles if the result is not zero, 13 otherwise */
924 	*total = 24;
925       else
926 	*total = 11;		/* 25 on VAX 2 */
927       break;
928 
929     case MOD:
930       *total = 23;
931       break;
932 
933     case UMOD:
934       if (mode != SImode)
935 	{
936 	  *total = MAX_COST;	/* Mode is not supported.  */
937 	  return true;
938 	}
939       *total = 29;
940       break;
941 
942     case FLOAT:
943       *total = (6		/* 4 on VAX 9000 */
944 		+ (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
945       break;
946 
947     case FIX:
948       *total = 7;		/* 17 on VAX 2 */
949       break;
950 
951     case ASHIFT:
952     case LSHIFTRT:
953     case ASHIFTRT:
954       if (mode == DImode)
955 	*total = 12;
956       else
957 	*total = 10;		/* 6 on VAX 9000 */
958       break;
959 
960     case ROTATE:
961     case ROTATERT:
962       *total = 6;		/* 5 on VAX 2, 4 on VAX 9000 */
963       if (CONST_INT_P (XEXP (x, 1)))
964 	fmt = "e"; 		/* all constant rotate counts are short */
965       break;
966 
967     case PLUS:
968     case MINUS:
969       *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
970       /* Small integer operands can use subl2 and addl2.  */
971       if ((CONST_INT_P (XEXP (x, 1)))
972 	  && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
973 	fmt = "e";
974       break;
975 
976     case IOR:
977     case XOR:
978       *total = 3;
979       break;
980 
981     case AND:
982       /* AND is special because the first operand is complemented.  */
983       *total = 3;
984       if (CONST_INT_P (XEXP (x, 0)))
985 	{
986 	  if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
987 	    *total = 4;
988 	  fmt = "e";
989 	  i = 1;
990 	}
991       break;
992 
993     case NEG:
994       if (mode == DFmode)
995 	*total = 9;
996       else if (mode == SFmode)
997 	*total = 6;
998       else if (mode == DImode)
999 	*total = 4;
1000       else
1001 	*total = 2;
1002       break;
1003 
1004     case NOT:
1005       *total = 2;
1006       break;
1007 
1008     case ZERO_EXTRACT:
1009     case SIGN_EXTRACT:
1010       *total = 15;
1011       break;
1012 
1013     case MEM:
1014       if (mode == DImode || mode == DFmode)
1015 	*total = 5;		/* 7 on VAX 2 */
1016       else
1017 	*total = 3;		/* 4 on VAX 2 */
1018       x = XEXP (x, 0);
1019       if (!REG_P (x) && GET_CODE (x) != POST_INC)
1020 	*total += vax_address_cost_1 (x);
1021       return true;
1022 
1023     case FLOAT_EXTEND:
1024     case FLOAT_TRUNCATE:
1025     case TRUNCATE:
1026       *total = 3;		/* FIXME: Costs need to be checked  */
1027       break;
1028 
1029     default:
1030       return false;
1031     }
1032 
1033   /* Now look inside the expression.  Operands which are not registers or
1034      short constants add to the cost.
1035 
1036      FMT and I may have been adjusted in the switch above for instructions
1037      which require special handling.  */
1038 
1039   while (*fmt++ == 'e')
1040     {
1041       rtx op = XEXP (x, i);
1042 
1043       i += 1;
1044       code = GET_CODE (op);
1045 
1046       /* A NOT is likely to be found as the first operand of an AND
1047 	 (in which case the relevant cost is of the operand inside
1048 	 the not) and not likely to be found anywhere else.  */
1049       if (code == NOT)
1050 	op = XEXP (op, 0), code = GET_CODE (op);
1051 
1052       switch (code)
1053 	{
1054 	case CONST_INT:
1055 	  if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
1056 	      && GET_MODE (x) != QImode)
1057 	    *total += 1;	/* 2 on VAX 2 */
1058 	  break;
1059 	case CONST:
1060 	case LABEL_REF:
1061 	case SYMBOL_REF:
1062 	  *total += 1;		/* 2 on VAX 2 */
1063 	  break;
1064 	case CONST_DOUBLE:
1065 	  if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
1066 	    {
1067 	      /* Registers are faster than floating point constants -- even
1068 		 those constants which can be encoded in a single byte.  */
1069 	      if (vax_float_literal (op))
1070 		*total += 1;
1071 	      else
1072 		*total += (GET_MODE (x) == DFmode) ? 3 : 2;
1073 	    }
1074 	  else
1075 	    {
1076 	      if (CONST_DOUBLE_HIGH (op) != 0
1077 		  || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1078 		*total += 2;
1079 	    }
1080 	  break;
1081 	case MEM:
1082 	  *total += 1;		/* 2 on VAX 2 */
1083 	  if (!REG_P (XEXP (op, 0)))
1084 	    *total += vax_address_cost_1 (XEXP (op, 0));
1085 	  break;
1086 	case REG:
1087 	case SUBREG:
1088 	  break;
1089 	default:
1090 	  *total += 1;
1091 	  break;
1092 	}
1093     }
1094   return true;
1095 }
1096 
1097 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1098    Used for C++ multiple inheritance.
1099 	.mask	^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11>  #conservative entry mask
1100 	addl2	$DELTA, 4(ap)	#adjust first argument
1101 	jmp	FUNCTION+2	#jump beyond FUNCTION's entry mask
1102 */
1103 
1104 static void
1105 vax_output_mi_thunk (FILE * file,
1106 		     tree thunk ATTRIBUTE_UNUSED,
1107 		     HOST_WIDE_INT delta,
1108 		     HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1109 		     tree function)
1110 {
1111   fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1112   asm_fprintf (file, ",4(%Rap)\n");
1113   fprintf (file, "\tjmp ");
1114   assemble_name (file,  XSTR (XEXP (DECL_RTL (function), 0), 0));
1115   fprintf (file, "+2\n");
1116 }
1117 
1118 static rtx
1119 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1120 		      int incoming ATTRIBUTE_UNUSED)
1121 {
1122   return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1123 }
1124 
1125 static rtx
1126 vax_builtin_setjmp_frame_value (void)
1127 {
1128   return hard_frame_pointer_rtx;
1129 }
1130 
1131 /* Worker function for NOTICE_UPDATE_CC.  */
1132 
1133 void
1134 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1135 {
1136   if (GET_CODE (exp) == SET)
1137     {
1138       if (GET_CODE (SET_SRC (exp)) == CALL)
1139 	CC_STATUS_INIT;
1140       else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1141 	       && GET_CODE (SET_DEST (exp)) != PC)
1142 	{
1143 	  cc_status.flags = 0;
1144 	  /* The integer operations below don't set carry or
1145 	     set it in an incompatible way.  That's ok though
1146 	     as the Z bit is all we need when doing unsigned
1147 	     comparisons on the result of these insns (since
1148 	     they're always with 0).  Set CC_NO_OVERFLOW to
1149 	     generate the correct unsigned branches.  */
1150 	  switch (GET_CODE (SET_SRC (exp)))
1151 	    {
1152 	    case NEG:
1153 	      if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1154 		break;
1155 	    case AND:
1156 	    case IOR:
1157 	    case XOR:
1158 	    case NOT:
1159 	    case CTZ:
1160 	    case MEM:
1161 	    case REG:
1162 	      cc_status.flags = CC_NO_OVERFLOW;
1163 	      break;
1164 	    default:
1165 	      break;
1166 	    }
1167 	  cc_status.value1 = SET_DEST (exp);
1168 	  cc_status.value2 = SET_SRC (exp);
1169 	}
1170     }
1171   else if (GET_CODE (exp) == PARALLEL
1172 	   && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1173     {
1174       if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1175 	CC_STATUS_INIT;
1176       else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1177 	{
1178 	  cc_status.flags = 0;
1179 	  cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1180 	  cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1181 	}
1182       else
1183 	/* PARALLELs whose first element sets the PC are aob,
1184 	   sob insns.  They do change the cc's.  */
1185 	CC_STATUS_INIT;
1186     }
1187   else
1188     CC_STATUS_INIT;
1189   if (cc_status.value1 && REG_P (cc_status.value1)
1190       && cc_status.value2
1191       && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1192     cc_status.value2 = 0;
1193   if (cc_status.value1 && MEM_P (cc_status.value1)
1194       && cc_status.value2
1195       && MEM_P (cc_status.value2))
1196     cc_status.value2 = 0;
1197   /* Actual condition, one line up, should be that value2's address
1198      depends on value1, but that is too much of a pain.  */
1199 }
1200 
1201 /* Output integer move instructions.  */
1202 
1203 const char *
1204 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1205 		     machine_mode mode)
1206 {
1207   rtx hi[3], lo[3];
1208   const char *pattern_hi, *pattern_lo;
1209 
1210   switch (mode)
1211     {
1212     case DImode:
1213       if (operands[1] == const0_rtx)
1214 	return "clrq %0";
1215       if (TARGET_QMATH && optimize_size
1216 	  && (CONST_INT_P (operands[1])
1217 	      || GET_CODE (operands[1]) == CONST_DOUBLE))
1218 	{
1219 	  unsigned HOST_WIDE_INT hval, lval;
1220 	  int n;
1221 
1222 	  if (GET_CODE (operands[1]) == CONST_DOUBLE)
1223 	    {
1224 	      gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1225 
1226 	      /* Make sure only the low 32 bits are valid.  */
1227 	      lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1228 	      hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1229 	    }
1230 	  else
1231 	    {
1232 	      lval = INTVAL (operands[1]);
1233 	      hval = 0;
1234 	    }
1235 
1236 	  /* Here we see if we are trying to see if the 64bit value is really
1237 	     a 6bit shifted some arbitrary amount.  If so, we can use ashq to
1238 	     shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1239 	     8 bytes - 1 shift byte - 1 short literal byte.  */
1240 	  if (lval != 0
1241 	      && (n = exact_log2 (lval & (- lval))) != -1
1242 	      && (lval >> n) < 64)
1243 	    {
1244 	      lval >>= n;
1245 
1246 	      /* On 32bit platforms, if the 6bits didn't overflow into the
1247 		 upper 32bit value that value better be 0.  If we have
1248 		 overflowed, make sure it wasn't too much.  */
1249 	      if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1250 		{
1251 		  if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1252 		    n = 0;	/* failure */
1253 		  else
1254 		    lval |= hval << (32 - n);
1255 		}
1256 	      /*  If n is 0, then ashq is not the best way to emit this.  */
1257 	      if (n > 0)
1258 		{
1259 		  operands[1] = GEN_INT (lval);
1260 		  operands[2] = GEN_INT (n);
1261 		  return "ashq %2,%D1,%0";
1262 		}
1263 #if HOST_BITS_PER_WIDE_INT == 32
1264 	    }
1265 	  /* On 32bit platforms, if the low 32bit value is 0, checkout the
1266 	     upper 32bit value.  */
1267 	  else if (hval != 0
1268 		   && (n = exact_log2 (hval & (- hval)) - 1) != -1
1269 		   && (hval >> n) < 64)
1270 	    {
1271 	      operands[1] = GEN_INT (hval >> n);
1272 	      operands[2] = GEN_INT (n + 32);
1273 	      return "ashq %2,%D1,%0";
1274 #endif
1275 	    }
1276 	}
1277 
1278       if (TARGET_QMATH
1279 	  && (!MEM_P (operands[0])
1280 	      || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1281 	      || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1282 	      || !illegal_addsub_di_memory_operand (operands[0], DImode))
1283 	  && ((CONST_INT_P (operands[1])
1284 	       && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1285 	      || GET_CODE (operands[1]) == CONST_DOUBLE))
1286 	{
1287 	  hi[0] = operands[0];
1288 	  hi[1] = operands[1];
1289 
1290 	  split_quadword_operands (insn, SET, hi, lo, 2);
1291 
1292 	  pattern_lo = vax_output_int_move (NULL, lo, SImode);
1293 	  pattern_hi = vax_output_int_move (NULL, hi, SImode);
1294 
1295 	  /* The patterns are just movl/movl or pushl/pushl then a movq will
1296 	     be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1297 	     bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1298 	     value bytes.  */
1299 	  if ((!strncmp (pattern_lo, "movl", 4)
1300 	      && !strncmp (pattern_hi, "movl", 4))
1301 	      || (!strncmp (pattern_lo, "pushl", 5)
1302 		  && !strncmp (pattern_hi, "pushl", 5)))
1303 	    return "movq %1,%0";
1304 
1305 	  if (MEM_P (operands[0])
1306 	      && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1307 	    {
1308 	      output_asm_insn (pattern_hi, hi);
1309 	      operands[0] = lo[0];
1310 	      operands[1] = lo[1];
1311 	      operands[2] = lo[2];
1312 	      return pattern_lo;
1313 	    }
1314 	  else
1315 	    {
1316 	      output_asm_insn (pattern_lo, lo);
1317 	      operands[0] = hi[0];
1318 	      operands[1] = hi[1];
1319 	      operands[2] = hi[2];
1320 	      return pattern_hi;
1321 	    }
1322 	}
1323       return "movq %1,%0";
1324 
1325     case SImode:
1326       if (symbolic_operand (operands[1], SImode))
1327 	{
1328 	  if (push_operand (operands[0], SImode))
1329 	    return "pushab %a1";
1330 	  return "movab %a1,%0";
1331 	}
1332 
1333       if (operands[1] == const0_rtx)
1334 	{
1335 	  if (push_operand (operands[0], SImode))
1336 	    return "pushl %1";
1337 	  return "clrl %0";
1338 	}
1339 
1340       if (CONST_INT_P (operands[1])
1341 	  && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1342 	{
1343 	  HOST_WIDE_INT i = INTVAL (operands[1]);
1344 	  int n;
1345 	  if ((unsigned HOST_WIDE_INT)(~i) < 64)
1346 	    return "mcoml %N1,%0";
1347 	  if ((unsigned HOST_WIDE_INT)i < 0x100)
1348 	    return "movzbl %1,%0";
1349 	  if (i >= -0x80 && i < 0)
1350 	    return "cvtbl %1,%0";
1351 	  if (optimize_size
1352 	      && (n = exact_log2 (i & (-i))) != -1
1353 	      && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1354 	    {
1355 	      operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1356 	      operands[2] = GEN_INT (n);
1357 	      return "ashl %2,%1,%0";
1358 	    }
1359 	  if ((unsigned HOST_WIDE_INT)i < 0x10000)
1360 	    return "movzwl %1,%0";
1361 	  if (i >= -0x8000 && i < 0)
1362 	    return "cvtwl %1,%0";
1363 	}
1364       if (push_operand (operands[0], SImode))
1365 	return "pushl %1";
1366       return "movl %1,%0";
1367 
1368     case HImode:
1369       if (CONST_INT_P (operands[1]))
1370 	{
1371 	  HOST_WIDE_INT i = INTVAL (operands[1]);
1372 	  if (i == 0)
1373 	    return "clrw %0";
1374 	  else if ((unsigned HOST_WIDE_INT)i < 64)
1375 	    return "movw %1,%0";
1376 	  else if ((unsigned HOST_WIDE_INT)~i < 64)
1377 	    return "mcomw %H1,%0";
1378 	  else if ((unsigned HOST_WIDE_INT)i < 256)
1379 	    return "movzbw %1,%0";
1380 	  else if (i >= -0x80 && i < 0)
1381 	    return "cvtbw %1,%0";
1382 	}
1383       return "movw %1,%0";
1384 
1385     case QImode:
1386       if (CONST_INT_P (operands[1]))
1387 	{
1388 	  HOST_WIDE_INT i = INTVAL (operands[1]);
1389 	  if (i == 0)
1390 	    return "clrb %0";
1391 	  else if ((unsigned HOST_WIDE_INT)~i < 64)
1392 	    return "mcomb %B1,%0";
1393 	}
1394       return "movb %1,%0";
1395 
1396     default:
1397       gcc_unreachable ();
1398     }
1399 }
1400 
1401 /* Output integer add instructions.
1402 
1403    The space-time-opcode tradeoffs for addition vary by model of VAX.
1404 
1405    On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1406    but it not faster on other models.
1407 
1408    "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1409    faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1410    a register is used in an address too soon after it is set.
1411    Compromise by using movab only when it is shorter than the add
1412    or the base register in the address is one of sp, ap, and fp,
1413    which are not modified very often.  */
1414 
1415 const char *
1416 vax_output_int_add (rtx insn, rtx *operands, machine_mode mode)
1417 {
1418   switch (mode)
1419     {
1420     case DImode:
1421       {
1422 	rtx low[3];
1423 	const char *pattern;
1424 	int carry = 1;
1425 	bool sub;
1426 
1427 	if (TARGET_QMATH && 0)
1428 	  debug_rtx (insn);
1429 
1430 	split_quadword_operands (insn, PLUS, operands, low, 3);
1431 
1432 	if (TARGET_QMATH)
1433 	  {
1434 	    gcc_assert (rtx_equal_p (operands[0], operands[1]));
1435 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1436 	    gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1437 	    gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1438 #endif
1439 
1440 	    /* No reason to add a 0 to the low part and thus no carry, so just
1441 	       emit the appropriate add/sub instruction.  */
1442 	    if (low[2] == const0_rtx)
1443 	      return vax_output_int_add (NULL, operands, SImode);
1444 
1445 	    /* Are we doing addition or subtraction?  */
1446 	    sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1447 
1448 	    /* We can't use vax_output_int_add since some the patterns don't
1449 	       modify the carry bit.  */
1450 	    if (sub)
1451 	      {
1452 		if (low[2] == constm1_rtx)
1453 		  pattern = "decl %0";
1454 		else
1455 		  pattern = "subl2 $%n2,%0";
1456 	      }
1457 	    else
1458 	      {
1459 		if (low[2] == const1_rtx)
1460 		  pattern = "incl %0";
1461 		else
1462 		  pattern = "addl2 %2,%0";
1463 	      }
1464 	    output_asm_insn (pattern, low);
1465 
1466 	    /* In 2's complement, -n = ~n + 1.  Since we are dealing with
1467 	       two 32bit parts, we complement each and then add one to
1468 	       low part.  We know that the low part can't overflow since
1469 	       it's value can never be 0.  */
1470 	    if (sub)
1471 		return "sbwc %N2,%0";
1472 	    return "adwc %2,%0";
1473 	  }
1474 
1475 	/* Add low parts.  */
1476 	if (rtx_equal_p (operands[0], operands[1]))
1477 	  {
1478 	    if (low[2] == const0_rtx)
1479 	/* Should examine operand, punt if not POST_INC.  */
1480 	      pattern = "tstl %0", carry = 0;
1481 	    else if (low[2] == const1_rtx)
1482 	      pattern = "incl %0";
1483 	    else
1484 	      pattern = "addl2 %2,%0";
1485 	  }
1486 	else
1487 	  {
1488 	    if (low[2] == const0_rtx)
1489 	      pattern = "movl %1,%0", carry = 0;
1490 	    else
1491 	      pattern = "addl3 %2,%1,%0";
1492 	  }
1493 	if (pattern)
1494 	  output_asm_insn (pattern, low);
1495 	if (!carry)
1496 	  /* If CARRY is 0, we don't have any carry value to worry about.  */
1497 	  return get_insn_template (CODE_FOR_addsi3, insn);
1498 	/* %0 = C + %1 + %2 */
1499 	if (!rtx_equal_p (operands[0], operands[1]))
1500 	  output_asm_insn ((operands[1] == const0_rtx
1501 			    ? "clrl %0"
1502 			    : "movl %1,%0"), operands);
1503 	return "adwc %2,%0";
1504       }
1505 
1506     case SImode:
1507       if (rtx_equal_p (operands[0], operands[1]))
1508 	{
1509 	  if (operands[2] == const1_rtx)
1510 	    return "incl %0";
1511 	  if (operands[2] == constm1_rtx)
1512 	    return "decl %0";
1513 	  if (CONST_INT_P (operands[2])
1514 	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1515 	    return "subl2 $%n2,%0";
1516 	  if (CONST_INT_P (operands[2])
1517 	      && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1518 	      && REG_P (operands[1])
1519 	      && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1520 		   || REGNO (operands[1]) > 11))
1521 	    return "movab %c2(%1),%0";
1522 	  if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1523 	    return "movab %a2[%0],%0";
1524 	  return "addl2 %2,%0";
1525 	}
1526 
1527       if (rtx_equal_p (operands[0], operands[2]))
1528 	{
1529 	  if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1530 	    return "movab %a1[%0],%0";
1531 	  return "addl2 %1,%0";
1532 	}
1533 
1534       if (CONST_INT_P (operands[2])
1535 	  && INTVAL (operands[2]) < 32767
1536 	  && INTVAL (operands[2]) > -32768
1537 	  && REG_P (operands[1])
1538 	  && push_operand (operands[0], SImode))
1539 	return "pushab %c2(%1)";
1540 
1541       if (CONST_INT_P (operands[2])
1542 	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1543 	return "subl3 $%n2,%1,%0";
1544 
1545       if (CONST_INT_P (operands[2])
1546 	  && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1547 	  && REG_P (operands[1])
1548 	  && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1549 	       || REGNO (operands[1]) > 11))
1550 	return "movab %c2(%1),%0";
1551 
1552       /* Add this if using gcc on a VAX 3xxx:
1553       if (REG_P (operands[1]) && REG_P (operands[2]))
1554 	return "movab (%1)[%2],%0";
1555       */
1556 
1557       if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1558 	{
1559 	  if (push_operand (operands[0], SImode))
1560 	    return "pushab %a2[%1]";
1561 	  return "movab %a2[%1],%0";
1562 	}
1563 
1564       if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1565 	{
1566 	  if (push_operand (operands[0], SImode))
1567 	    return "pushab %a1[%2]";
1568 	  return "movab %a1[%2],%0";
1569 	}
1570 
1571       if (flag_pic && REG_P (operands[0])
1572 	  && symbolic_operand (operands[2], SImode))
1573 	return "movab %a2,%0;addl2 %1,%0";
1574 
1575       if (flag_pic
1576 	  && (symbolic_operand (operands[1], SImode)
1577 	      || symbolic_operand (operands[1], SImode)))
1578 	debug_rtx (insn);
1579 
1580       return "addl3 %1,%2,%0";
1581 
1582     case HImode:
1583       if (rtx_equal_p (operands[0], operands[1]))
1584 	{
1585 	  if (operands[2] == const1_rtx)
1586 	    return "incw %0";
1587 	  if (operands[2] == constm1_rtx)
1588 	    return "decw %0";
1589 	  if (CONST_INT_P (operands[2])
1590 	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1591 	    return "subw2 $%n2,%0";
1592 	  return "addw2 %2,%0";
1593 	}
1594       if (rtx_equal_p (operands[0], operands[2]))
1595 	return "addw2 %1,%0";
1596       if (CONST_INT_P (operands[2])
1597 	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1598 	return "subw3 $%n2,%1,%0";
1599       return "addw3 %1,%2,%0";
1600 
1601     case QImode:
1602       if (rtx_equal_p (operands[0], operands[1]))
1603 	{
1604 	  if (operands[2] == const1_rtx)
1605 	    return "incb %0";
1606 	  if (operands[2] == constm1_rtx)
1607 	    return "decb %0";
1608 	  if (CONST_INT_P (operands[2])
1609 	      && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1610 	    return "subb2 $%n2,%0";
1611 	  return "addb2 %2,%0";
1612 	}
1613       if (rtx_equal_p (operands[0], operands[2]))
1614 	return "addb2 %1,%0";
1615       if (CONST_INT_P (operands[2])
1616 	  && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1617 	return "subb3 $%n2,%1,%0";
1618       return "addb3 %1,%2,%0";
1619 
1620     default:
1621       gcc_unreachable ();
1622     }
1623 }
1624 
1625 const char *
1626 vax_output_int_subtract (rtx insn, rtx *operands, machine_mode mode)
1627 {
1628   switch (mode)
1629     {
1630     case DImode:
1631       {
1632 	rtx low[3];
1633 	const char *pattern;
1634 	int carry = 1;
1635 
1636 	if (TARGET_QMATH && 0)
1637 	  debug_rtx (insn);
1638 
1639 	split_quadword_operands (insn, MINUS, operands, low, 3);
1640 
1641 	if (TARGET_QMATH)
1642 	  {
1643 	    if (operands[1] == const0_rtx && low[1] == const0_rtx)
1644 	      {
1645 		/* Negation is tricky.  It's basically complement and increment.
1646 		   Negate hi, then lo, and subtract the carry back.  */
1647 		if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1648 		    || (MEM_P (operands[0])
1649 			&& GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1650 		  fatal_insn ("illegal operand detected", insn);
1651 		output_asm_insn ("mnegl %2,%0", operands);
1652 		output_asm_insn ("mnegl %2,%0", low);
1653 		return "sbwc $0,%0";
1654 	      }
1655 	    gcc_assert (rtx_equal_p (operands[0], operands[1]));
1656 	    gcc_assert (rtx_equal_p (low[0], low[1]));
1657 	    if (low[2] == const1_rtx)
1658 	      output_asm_insn ("decl %0", low);
1659 	    else
1660 	      output_asm_insn ("subl2 %2,%0", low);
1661 	    return "sbwc %2,%0";
1662 	  }
1663 
1664 	/* Subtract low parts.  */
1665 	if (rtx_equal_p (operands[0], operands[1]))
1666 	  {
1667 	    if (low[2] == const0_rtx)
1668 	      pattern = 0, carry = 0;
1669 	    else if (low[2] == constm1_rtx)
1670 	      pattern = "decl %0";
1671 	    else
1672 	      pattern = "subl2 %2,%0";
1673 	  }
1674 	else
1675 	  {
1676 	    if (low[2] == constm1_rtx)
1677 	      pattern = "decl %0";
1678 	    else if (low[2] == const0_rtx)
1679 	      pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1680 	    else
1681 	      pattern = "subl3 %2,%1,%0";
1682 	  }
1683 	if (pattern)
1684 	  output_asm_insn (pattern, low);
1685 	if (carry)
1686 	  {
1687 	    if (!rtx_equal_p (operands[0], operands[1]))
1688 	      return "movl %1,%0;sbwc %2,%0";
1689 	    return "sbwc %2,%0";
1690 	    /* %0 = %2 - %1 - C */
1691 	  }
1692 	return get_insn_template (CODE_FOR_subsi3, insn);
1693       }
1694 
1695     default:
1696       gcc_unreachable ();
1697   }
1698 }
1699 
1700 static rtx
1701 mkrtx(enum rtx_code code, enum machine_mode mode, rtx base, HOST_WIDE_INT off)
1702 {
1703   rtx tmp;
1704 
1705   if (GET_CODE (base) == CONST)
1706     base = XEXP (base, 0);
1707 
1708   if (GET_CODE (base) == PLUS)
1709     {
1710       rtx a = XEXP (base, 0);
1711       rtx b = XEXP (base, 1);
1712       if (GET_CODE (b) == CONST)
1713 	b = XEXP (b, 0);
1714       if (CONST_INT_P (b))
1715 	{
1716           off += INTVAL (b);
1717           base = a;
1718 	}
1719       else if (REG_P (a) && GET_CODE (b) == SYMBOL_REF)
1720 	{
1721 	  if (off != 0)
1722 	    {
1723 	      base = gen_rtx_PLUS (Pmode, a, plus_constant(Pmode, b, off));
1724 	      off = 0;
1725 	    }
1726 	}
1727       else if (REG_P (a) && GET_CODE (b) == PLUS)
1728 	{
1729           off += INTVAL (XEXP (b, 1));
1730 	  base = gen_rtx_PLUS (Pmode, a, plus_constant(Pmode, XEXP (b, 0), off));
1731 	  off = 0;
1732 	}
1733       else
1734         {
1735 	  debug_rtx(base);
1736 	  gcc_unreachable ();
1737 	}
1738     }
1739   if (code == POST_INC)
1740     tmp = gen_rtx_POST_INC (SImode, base);
1741   else if (off == 0 || (REG_P (base) && code == REG))
1742     tmp = base;
1743   else
1744     tmp = plus_constant (Pmode, base, off);
1745   return gen_rtx_MEM (mode, tmp);
1746 }
1747 
1748 const char *
1749 vax_output_movmemsi (rtx insn, rtx *operands)
1750 {
1751   HOST_WIDE_INT n = INTVAL (operands[2]);
1752   HOST_WIDE_INT off;
1753   rtx src, dest;
1754   const char *pat = NULL;
1755   const enum rtx_code *src_codes;
1756   const enum rtx_code *dest_codes;
1757   int code_idx = 0;
1758   int mode_idx;
1759 
1760   static const enum machine_mode xmodes[4] =
1761     {
1762       QImode, HImode, SImode, DImode
1763     };
1764   static const char * const pats[4] =
1765     {
1766       "movb %1,%0", "movw %1,%0", "movl %1,%0", "movq %1,%0",
1767     };
1768   static const enum rtx_code codes[2][3] =
1769     {
1770       { PLUS, PLUS, PLUS },
1771       { POST_INC, POST_INC, REG },
1772     };
1773 
1774   src = XEXP (operands[1], 0);
1775 
1776   src_codes =
1777     codes[REG_P (src) && find_regno_note (insn, REG_DEAD, REGNO(src))];
1778 
1779   dest = XEXP (operands[0], 0);
1780 
1781   dest_codes =
1782     codes[REG_P (dest) && find_regno_note (insn, REG_DEAD, REGNO(dest))];
1783 
1784   for (off = 0, code_idx = 0, mode_idx = 3; mode_idx >= 0; mode_idx--)
1785     {
1786       const enum machine_mode mode = xmodes[mode_idx];
1787       const HOST_WIDE_INT mode_len = GET_MODE_SIZE (mode);
1788       for (; n >= mode_len; n -= mode_len, off += mode_len)
1789 	{
1790 	  if (pat != NULL)
1791 	    output_asm_insn (pat, operands);
1792 	  if (n == mode_len)
1793 	    code_idx = 2;
1794 	  operands[0] = mkrtx(dest_codes[code_idx], mode, dest, off);
1795 	  operands[1] = mkrtx(src_codes[code_idx], mode, src, off);
1796 	  if (pat == NULL)
1797 	    code_idx = 1;
1798 	  pat = pats[mode_idx];
1799 	}
1800     }
1801 
1802   return pat;
1803 }
1804 
1805 /* True if X is an rtx for a constant that is a valid address.  */
1806 
1807 bool
1808 legitimate_constant_address_p (rtx x)
1809 {
1810   if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1811 	  || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1812     return true;
1813   if (GET_CODE (x) != CONST)
1814     return false;
1815 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1816   if (flag_pic
1817       && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1818       && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1819     return false;
1820 #endif
1821    gcc_assert (! REG_P (x));
1822    return true;
1823 }
1824 
1825 bool
1826 legitimate_pic_operand_p (rtx x)
1827 {
1828 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1829   if (GET_CODE (x) != CONST)
1830     return true;
1831   if (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1832       && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1833     return false;
1834 #endif
1835   return true;
1836 }
1837 
1838 /* The other macros defined here are used only in legitimate_address_p ().  */
1839 
1840 /* Nonzero if X is a hard reg that can be used as an index
1841    or, if not strict, if it is a pseudo reg.  */
1842 #define	INDEX_REGISTER_P(X, STRICT) \
1843 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1844 
1845 /* Nonzero if X is a hard reg that can be used as a base reg
1846    or, if not strict, if it is a pseudo reg.  */
1847 #define	BASE_REGISTER_P(X, STRICT) \
1848 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1849 
1850 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1851 
1852 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1853    are no SYMBOL_REFs for external symbols present.  */
1854 
1855 static bool
1856 indirectable_constant_address_p (rtx x, bool indirect)
1857 {
1858   if (GET_CODE (x) == SYMBOL_REF)
1859     return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1860 
1861   if (GET_CODE (x) == CONST)
1862     return !flag_pic
1863 	   || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1864 	   || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1865 
1866   return CONSTANT_ADDRESS_P (x);
1867 }
1868 
1869 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1870 
1871 static bool
1872 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1873 {
1874   return CONSTANT_ADDRESS_P (x);
1875 }
1876 
1877 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1878 
1879 /* True if X is an address which can be indirected.  External symbols
1880    could be in a sharable image library, so we disallow those.  */
1881 
1882 static bool
1883 indirectable_address_p (rtx x, bool strict, bool indirect)
1884 {
1885   if (indirectable_constant_address_p (x, indirect)
1886       || BASE_REGISTER_P (x, strict))
1887     return true;
1888   if (GET_CODE (x) != PLUS
1889       || !BASE_REGISTER_P (XEXP (x, 0), strict)
1890       || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1891     return false;
1892   return indirectable_constant_address_p (XEXP (x, 1), indirect);
1893 }
1894 
1895 /* Return true if x is a valid address not using indexing.
1896    (This much is the easy part.)  */
1897 static bool
1898 nonindexed_address_p (rtx x, bool strict)
1899 {
1900   rtx xfoo0;
1901   if (REG_P (x))
1902     {
1903       if (! reload_in_progress
1904 	  || reg_equiv_mem (REGNO (x)) == 0
1905 	  || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1906 	return true;
1907     }
1908   if (indirectable_constant_address_p (x, false))
1909     return true;
1910   if (indirectable_address_p (x, strict, false))
1911     return true;
1912   xfoo0 = XEXP (x, 0);
1913   if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1914     return true;
1915   if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1916       && BASE_REGISTER_P (xfoo0, strict))
1917     return true;
1918   return false;
1919 }
1920 
1921 /* True if PROD is either a reg times size of mode MODE and MODE is less
1922    than or equal 8 bytes, or just a reg if MODE is one byte.  */
1923 
1924 static bool
1925 index_term_p (rtx prod, machine_mode mode, bool strict)
1926 {
1927   rtx xfoo0, xfoo1;
1928 
1929   if (GET_MODE_SIZE (mode) == 1)
1930     return BASE_REGISTER_P (prod, strict);
1931 
1932   if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1933     return false;
1934 
1935   xfoo0 = XEXP (prod, 0);
1936   xfoo1 = XEXP (prod, 1);
1937 
1938   if (CONST_INT_P (xfoo0)
1939       && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1940       && INDEX_REGISTER_P (xfoo1, strict))
1941     return true;
1942 
1943   if (CONST_INT_P (xfoo1)
1944       && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1945       && INDEX_REGISTER_P (xfoo0, strict))
1946     return true;
1947 
1948   return false;
1949 }
1950 
1951 /* Return true if X is the sum of a register
1952    and a valid index term for mode MODE.  */
1953 static bool
1954 reg_plus_index_p (rtx x, machine_mode mode, bool strict)
1955 {
1956   rtx xfoo0, xfoo1;
1957 
1958   if (GET_CODE (x) != PLUS)
1959     return false;
1960 
1961   xfoo0 = XEXP (x, 0);
1962   xfoo1 = XEXP (x, 1);
1963 
1964   if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1965     return true;
1966 
1967   if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1968     return true;
1969 
1970   return false;
1971 }
1972 
1973 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address.  */
1974 static bool
1975 indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
1976 {
1977   if (!CONSTANT_ADDRESS_P (xfoo0))
1978     return false;
1979   if (BASE_REGISTER_P (xfoo1, strict))
1980     return !flag_pic || mode == QImode;
1981   if (flag_pic && symbolic_operand (xfoo0, SImode))
1982     return false;
1983   return reg_plus_index_p (xfoo1, mode, strict);
1984 }
1985 
1986 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1987    that is a valid memory address for an instruction.
1988    The MODE argument is the machine mode for the MEM expression
1989    that wants to use this address.  */
1990 bool
1991 vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
1992 {
1993   rtx xfoo0, xfoo1;
1994 
1995   if (nonindexed_address_p (x, strict))
1996     return true;
1997 
1998   if (GET_CODE (x) != PLUS)
1999     return false;
2000 
2001   /* Handle <address>[index] represented with index-sum outermost */
2002 
2003   xfoo0 = XEXP (x, 0);
2004   xfoo1 = XEXP (x, 1);
2005 
2006   if (index_term_p (xfoo0, mode, strict)
2007       && nonindexed_address_p (xfoo1, strict))
2008     return true;
2009 
2010   if (index_term_p (xfoo1, mode, strict)
2011       && nonindexed_address_p (xfoo0, strict))
2012     return true;
2013 
2014   /* Handle offset(reg)[index] with offset added outermost */
2015 
2016   if (indexable_address_p (xfoo0, xfoo1, mode, strict)
2017       || indexable_address_p (xfoo1, xfoo0, mode, strict))
2018     return true;
2019 
2020   return false;
2021 }
2022 
2023 /* Return true if x (a legitimate address expression) has an effect that
2024    depends on the machine mode it is used for.  On the VAX, the predecrement
2025    and postincrement address depend thus (the amount of decrement or
2026    increment being the length of the operand) and all indexed address depend
2027    thus (because the index scale factor is the length of the operand).  */
2028 
2029 static bool
2030 vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
2031 {
2032   rtx xfoo0, xfoo1;
2033 
2034   /* Auto-increment cases are now dealt with generically in recog.c.  */
2035   if (GET_CODE (x) != PLUS)
2036     return false;
2037 
2038   xfoo0 = XEXP (x, 0);
2039   xfoo1 = XEXP (x, 1);
2040 
2041   if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
2042     return false;
2043   if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
2044     return false;
2045   if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
2046     return false;
2047   if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
2048     return false;
2049 
2050   return true;
2051 }
2052 
2053 static rtx
2054 fixup_mathdi_operand (rtx x, machine_mode mode)
2055 {
2056   if (illegal_addsub_di_memory_operand (x, mode))
2057     {
2058       rtx addr = XEXP (x, 0);
2059       rtx temp = gen_reg_rtx (Pmode);
2060       rtx offset = 0;
2061 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
2062       if (GET_CODE (addr) == CONST && flag_pic)
2063 	{
2064 	  offset = XEXP (XEXP (addr, 0), 1);
2065 	  addr = XEXP (XEXP (addr, 0), 0);
2066 	}
2067 #endif
2068       emit_move_insn (temp, addr);
2069       if (offset)
2070 	temp = gen_rtx_PLUS (Pmode, temp, offset);
2071       x = gen_rtx_MEM (DImode, temp);
2072     }
2073   return x;
2074 }
2075 
2076 void
2077 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
2078 {
2079   int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
2080   rtx temp;
2081 
2082   rtx (*gen_old_insn)(rtx, rtx, rtx);
2083   rtx (*gen_si_insn)(rtx, rtx, rtx);
2084   rtx (*gen_insn)(rtx, rtx, rtx);
2085 
2086   if (code == PLUS)
2087     {
2088       gen_old_insn = gen_adddi3_old;
2089       gen_si_insn = gen_addsi3;
2090       gen_insn = gen_adcdi3;
2091     }
2092   else if (code == MINUS)
2093     {
2094       gen_old_insn = gen_subdi3_old;
2095       gen_si_insn = gen_subsi3;
2096       gen_insn = gen_sbcdi3;
2097     }
2098   else
2099     gcc_unreachable ();
2100 
2101   /* If this is addition (thus operands are commutative) and if there is one
2102      addend that duplicates the desination, we want that addend to be the
2103      first addend.  */
2104   if (code == PLUS
2105       && rtx_equal_p (operands[0], operands[2])
2106       && !rtx_equal_p (operands[1], operands[2]))
2107     {
2108       temp = operands[2];
2109       operands[2] = operands[1];
2110       operands[1] = temp;
2111     }
2112 
2113   if (!TARGET_QMATH)
2114     {
2115       emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
2116     }
2117   else if (hi_only)
2118     {
2119       if (!rtx_equal_p (operands[0], operands[1])
2120 	  && (REG_P (operands[0]) && MEM_P (operands[1])))
2121 	{
2122 	  emit_move_insn (operands[0], operands[1]);
2123 	  operands[1] = operands[0];
2124 	}
2125 
2126       operands[0] = fixup_mathdi_operand (operands[0], DImode);
2127       operands[1] = fixup_mathdi_operand (operands[1], DImode);
2128       operands[2] = fixup_mathdi_operand (operands[2], DImode);
2129 
2130       if (!rtx_equal_p (operands[0], operands[1]))
2131 	emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
2132 			  operand_subword (operands[1], 0, 0, DImode));
2133 
2134       emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
2135 				 operand_subword (operands[1], 1, 0, DImode),
2136 				 operand_subword (operands[2], 1, 0, DImode)));
2137     }
2138   else
2139     {
2140       /* If are adding the same value together, that's really a multiply by 2,
2141 	 and that's just a left shift of 1.  */
2142       if (rtx_equal_p (operands[1], operands[2]))
2143 	{
2144 	  if (code == MINUS)
2145 	    emit_insn (gen_movdi (operands[0], const0_rtx));
2146 	  else
2147 	    emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
2148 	  return;
2149 	}
2150 
2151       operands[0] = fixup_mathdi_operand (operands[0], DImode);
2152 
2153       /* If an operand is the same as operand[0], use the operand[0] rtx
2154 	 because fixup will an equivalent rtx but not an equal one. */
2155 
2156       if (rtx_equal_p (operands[0], operands[1]))
2157 	operands[1] = operands[0];
2158       else
2159 	operands[1] = fixup_mathdi_operand (operands[1], DImode);
2160 
2161       if (rtx_equal_p (operands[0], operands[2]))
2162 	operands[2] = operands[0];
2163       else
2164 	operands[2] = fixup_mathdi_operand (operands[2], DImode);
2165 
2166       /* If we are subtracting not from ourselves [d = a - b], and because the
2167 	 carry ops are two operand only, we would need to do a move prior to
2168 	 the subtract.  And if d == b, we would need a temp otherwise
2169 	 [d = a, d -= d] and we end up with 0.  Instead we rewrite d = a - b
2170 	 into d = -b, d += a.  Since -b can never overflow, even if b == d,
2171 	 no temp is needed.
2172 
2173 	 If we are doing addition, since the carry ops are two operand, if
2174 	 we aren't adding to ourselves, move the first addend to the
2175 	 destination first.  */
2176 
2177       gcc_assert (operands[1] != const0_rtx || code == MINUS);
2178       if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
2179 	{
2180 	  if (code == MINUS && CONSTANT_P (operands[1]))
2181 	    {
2182 	      temp = gen_reg_rtx (DImode);
2183 	      emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
2184 	      code = PLUS;
2185 	      gen_insn = gen_adcdi3;
2186 	      operands[2] = operands[1];
2187 	      operands[1] = operands[0];
2188 	    }
2189 	  else
2190 	    emit_move_insn (operands[0], operands[1]);
2191 	}
2192 
2193       /* Subtracting a constant will have been rewritten to an addition of the
2194 	 negative of that constant before we get here.  */
2195       gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2196       emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2197     }
2198 }
2199 
2200 bool
2201 adjacent_operands_p (rtx lo, rtx hi, machine_mode mode)
2202 {
2203   HOST_WIDE_INT lo_offset;
2204   HOST_WIDE_INT hi_offset;
2205 
2206   if (GET_CODE (lo) != GET_CODE (hi))
2207     return false;
2208 
2209   if (REG_P (lo))
2210     return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
2211   if (CONST_INT_P (lo))
2212     return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
2213   if (CONST_INT_P (lo))
2214     return mode != SImode;
2215 
2216   if (!MEM_P (lo))
2217     return false;
2218 
2219   if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2220     return false;
2221 
2222   lo = XEXP (lo, 0);
2223   hi = XEXP (hi, 0);
2224 
2225   if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2226     return rtx_equal_p (lo, hi);
2227 
2228   switch (GET_CODE (lo))
2229     {
2230     case REG:
2231     case SYMBOL_REF:
2232       lo_offset = 0;
2233       break;
2234     case CONST:
2235       lo = XEXP (lo, 0);
2236       /* FALLTHROUGH */
2237     case PLUS:
2238       if (!CONST_INT_P (XEXP (lo, 1)))
2239 	return false;
2240       lo_offset = INTVAL (XEXP (lo, 1));
2241       lo = XEXP (lo, 0);
2242       break;
2243     default:
2244       return false;
2245     }
2246 
2247   switch (GET_CODE (hi))
2248     {
2249     case REG:
2250     case SYMBOL_REF:
2251       hi_offset = 0;
2252       break;
2253     case CONST:
2254       hi = XEXP (hi, 0);
2255       /* FALLTHROUGH */
2256     case PLUS:
2257       if (!CONST_INT_P (XEXP (hi, 1)))
2258 	return false;
2259       hi_offset = INTVAL (XEXP (hi, 1));
2260       hi = XEXP (hi, 0);
2261       break;
2262     default:
2263       return false;
2264     }
2265 
2266   if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2267     return false;
2268 
2269   return rtx_equal_p (lo, hi)
2270 	 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2271 }
2272 
2273 /* Output assembler code for a block containing the constant parts
2274    of a trampoline, leaving space for the variable parts.  */
2275 
2276 /* On the VAX, the trampoline contains an entry mask and two instructions:
2277      .word NN
2278      movl $STATIC,r0   (store the functions static chain)
2279      jmp  *$FUNCTION   (jump to function code at address FUNCTION)  */
2280 
2281 static void
2282 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2283 {
2284   assemble_aligned_integer (2, const0_rtx);
2285   assemble_aligned_integer (2, GEN_INT (0x8fd0));
2286   assemble_aligned_integer (4, const0_rtx);
2287   assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2288   assemble_aligned_integer (2, GEN_INT (0x9f17));
2289   assemble_aligned_integer (4, const0_rtx);
2290 }
2291 
2292 /* We copy the register-mask from the function's pure code
2293    to the start of the trampoline.  */
2294 
2295 static void
2296 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2297 {
2298   rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2299   rtx mem;
2300 
2301   emit_block_move (m_tramp, assemble_trampoline_template (),
2302 		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2303 
2304   mem = adjust_address (m_tramp, HImode, 0);
2305   emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2306 
2307   mem = adjust_address (m_tramp, SImode, 4);
2308   emit_move_insn (mem, cxt);
2309   mem = adjust_address (m_tramp, SImode, 11);
2310   emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
2311   emit_insn (gen_sync_istream ());
2312 }
2313 
2314 /* Value is the number of bytes of arguments automatically
2315    popped when returning from a subroutine call.
2316    FUNDECL is the declaration node of the function (as a tree),
2317    FUNTYPE is the data type of the function (as a tree),
2318    or for a library call it is an identifier node for the subroutine name.
2319    SIZE is the number of bytes of arguments passed on the stack.
2320 
2321    On the VAX, the RET insn pops a maximum of 255 args for any function.  */
2322 
2323 static int
2324 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2325 		      tree funtype ATTRIBUTE_UNUSED, int size)
2326 {
2327   return size > 255 * 4 ? 0 : size;
2328 }
2329 
2330 /* Define where to put the arguments to a function.
2331    Value is zero to push the argument on the stack,
2332    or a hard register in which to store the argument.
2333 
2334    MODE is the argument's machine mode.
2335    TYPE is the data type of the argument (as a tree).
2336     This is null for libcalls where that information may
2337     not be available.
2338    CUM is a variable of type CUMULATIVE_ARGS which gives info about
2339     the preceding args and about the function being called.
2340    NAMED is nonzero if this argument is a named parameter
2341     (otherwise it is an extra parameter matching an ellipsis).  */
2342 
2343 /* On the VAX all args are pushed.  */
2344 
2345 static rtx
2346 vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
2347 		  machine_mode mode ATTRIBUTE_UNUSED,
2348 		  const_tree type ATTRIBUTE_UNUSED,
2349 		  bool named ATTRIBUTE_UNUSED)
2350 {
2351   return NULL_RTX;
2352 }
2353 
2354 /* Update the data in CUM to advance over an argument of mode MODE and
2355    data type TYPE.  (TYPE is null for libcalls where that information
2356    may not be available.)  */
2357 
2358 static void
2359 vax_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2360 			  const_tree type, bool named ATTRIBUTE_UNUSED)
2361 {
2362   CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2363 
2364   *cum += (mode != BLKmode
2365 	   ? (GET_MODE_SIZE (mode) + 3) & ~3
2366 	   : (int_size_in_bytes (type) + 3) & ~3);
2367 }
2368 
2369 bool
2370 vax_decomposed_dimode_operand_p (rtx lo, rtx hi)
2371 {
2372   HOST_WIDE_INT lo_offset = 0;
2373   HOST_WIDE_INT hi_offset = 0;
2374 
2375   /* If the codes aren't the same, can't be a DImode operand.  */
2376   if (GET_CODE (lo) != GET_CODE (hi))
2377     return false;
2378 
2379   /* If a register, hi regno must be one more than the lo regno.  */
2380   if (REG_P (lo))
2381     return REGNO (lo) + 1 == REGNO (hi);
2382 
2383   /* If not memory, can't be a DImode operand.  */
2384   if (!MEM_P (lo))
2385     return false;
2386 
2387   /* Get addresses of memory operands.  */
2388   lo = XEXP(lo, 0);
2389   hi = XEXP(hi, 0);
2390 
2391   /* If POST_INC, regno must match.  */
2392   if (GET_CODE (lo) == POST_INC && GET_CODE (hi) == POST_INC)
2393     return REGNO (XEXP (lo, 0)) == REGNO (XEXP (hi, 0));
2394 
2395   if (GET_CODE (lo) == PLUS)
2396     {
2397       /* If PLUS or MULT, this must an indexed address so fail.  */
2398       if (GET_CODE (XEXP (lo, 0)) == PLUS
2399 	  || GET_CODE (XEXP (lo, 0)) == MULT
2400 	  || !CONST_INT_P (XEXP (lo, 1)))
2401 	return false;
2402       lo_offset = INTVAL (XEXP (lo, 1));
2403       lo = XEXP(lo, 0);
2404     }
2405 
2406   if (GET_CODE (hi) == PLUS)
2407     {
2408       /* If PLUS or MULT, this must an indexed address so fail.  */
2409       if (GET_CODE (XEXP (hi, 0)) == PLUS
2410 	  || GET_CODE (XEXP (hi, 0)) == MULT
2411 	  || !CONST_INT_P (XEXP (hi, 1)))
2412 	return false;
2413       hi_offset = INTVAL (XEXP (hi, 1));
2414       hi = XEXP(hi, 0);
2415     }
2416 
2417   return rtx_equal_p(lo, hi) && lo_offset + 4 == hi_offset;
2418 }
2419