gcc/
[official-gcc.git] / gcc / config / vax / vax.c
blob8880d6493fa1df583b38f7bddbe8af1291fa7e74
1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "dominance.h"
26 #include "cfg.h"
27 #include "cfgrtl.h"
28 #include "cfganal.h"
29 #include "lcm.h"
30 #include "cfgbuild.h"
31 #include "cfgcleanup.h"
32 #include "predict.h"
33 #include "basic-block.h"
34 #include "df.h"
35 #include "alias.h"
36 #include "symtab.h"
37 #include "tree.h"
38 #include "calls.h"
39 #include "varasm.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "insn-config.h"
43 #include "conditions.h"
44 #include "function.h"
45 #include "output.h"
46 #include "insn-attr.h"
47 #include "recog.h"
48 #include "flags.h"
49 #include "expmed.h"
50 #include "dojump.h"
51 #include "explow.h"
52 #include "emit-rtl.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "insn-codes.h"
56 #include "optabs.h"
57 #include "debug.h"
58 #include "diagnostic-core.h"
59 #include "reload.h"
60 #include "tm-preds.h"
61 #include "tm-constrs.h"
62 #include "tm_p.h"
63 #include "target.h"
64 #include "builtins.h"
66 #include "target-def.h"
68 static void vax_option_override (void);
69 static bool vax_legitimate_address_p (machine_mode, rtx, bool);
70 static void vax_file_start (void);
71 static void vax_init_libfuncs (void);
72 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
73 HOST_WIDE_INT, tree);
74 static int vax_address_cost_1 (rtx);
75 static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
76 static bool vax_rtx_costs (rtx, int, int, int, int *, bool);
77 static rtx vax_function_arg (cumulative_args_t, machine_mode,
78 const_tree, bool);
79 static void vax_function_arg_advance (cumulative_args_t, machine_mode,
80 const_tree, bool);
81 static rtx vax_struct_value_rtx (tree, int);
82 static rtx vax_builtin_setjmp_frame_value (void);
83 static void vax_asm_trampoline_template (FILE *);
84 static void vax_trampoline_init (rtx, tree, rtx);
85 static int vax_return_pops_args (tree, tree, int);
86 static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
88 /* Initialize the GCC target structure. */
89 #undef TARGET_ASM_ALIGNED_HI_OP
90 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
92 #undef TARGET_ASM_FILE_START
93 #define TARGET_ASM_FILE_START vax_file_start
94 #undef TARGET_ASM_FILE_START_APP_OFF
95 #define TARGET_ASM_FILE_START_APP_OFF true
97 #undef TARGET_INIT_LIBFUNCS
98 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
100 #undef TARGET_ASM_OUTPUT_MI_THUNK
101 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
102 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
103 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
105 #undef TARGET_RTX_COSTS
106 #define TARGET_RTX_COSTS vax_rtx_costs
107 #undef TARGET_ADDRESS_COST
108 #define TARGET_ADDRESS_COST vax_address_cost
110 #undef TARGET_PROMOTE_PROTOTYPES
111 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
113 #undef TARGET_FUNCTION_ARG
114 #define TARGET_FUNCTION_ARG vax_function_arg
115 #undef TARGET_FUNCTION_ARG_ADVANCE
116 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
118 #undef TARGET_STRUCT_VALUE_RTX
119 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
121 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
122 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
124 #undef TARGET_LEGITIMATE_ADDRESS_P
125 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
126 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
127 #define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
129 #undef TARGET_FRAME_POINTER_REQUIRED
130 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
132 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
133 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
134 #undef TARGET_TRAMPOLINE_INIT
135 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
136 #undef TARGET_RETURN_POPS_ARGS
137 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
139 #undef TARGET_OPTION_OVERRIDE
140 #define TARGET_OPTION_OVERRIDE vax_option_override
142 struct gcc_target targetm = TARGET_INITIALIZER;
144 /* Set global variables as needed for the options enabled. */
146 static void
147 vax_option_override (void)
149 /* We're VAX floating point, not IEEE floating point. */
150 if (TARGET_G_FLOAT)
151 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
153 #ifdef SUBTARGET_OVERRIDE_OPTIONS
154 SUBTARGET_OVERRIDE_OPTIONS;
155 #endif
158 static void
159 vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
161 rtx x;
163 x = plus_constant (Pmode, frame_pointer_rtx, offset);
164 x = gen_rtx_MEM (SImode, x);
165 x = gen_rtx_SET (x, src);
166 add_reg_note (insn, REG_CFA_OFFSET, x);
169 /* Generate the assembly code for function entry. FILE is a stdio
170 stream to output the code to. SIZE is an int: how many units of
171 temporary storage to allocate.
173 Refer to the array `regs_ever_live' to determine which registers to
174 save; `regs_ever_live[I]' is nonzero if register number I is ever
175 used in the function. This function is responsible for knowing
176 which registers should not be saved even if used. */
178 void
179 vax_expand_prologue (void)
181 int regno, offset;
182 int mask = 0;
183 HOST_WIDE_INT size;
184 rtx insn;
186 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
187 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
188 mask |= 1 << regno;
190 insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
191 RTX_FRAME_RELATED_P (insn) = 1;
193 /* The layout of the CALLG/S stack frame is follows:
195 <- CFA, AP
198 ... Registers saved as specified by MASK
201 return-addr
202 old fp
203 old ap
204 old psw
205 zero
206 <- FP, SP
208 The rest of the prologue will adjust the SP for the local frame. */
210 vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
211 vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
212 vax_add_reg_cfa_offset (insn, 12, pc_rtx);
214 offset = 16;
215 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
216 if (mask & (1 << regno))
218 vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
219 offset += 4;
222 /* Because add_reg_note pushes the notes, adding this last means that
223 it will be processed first. This is required to allow the other
224 notes be interpreted properly. */
225 add_reg_note (insn, REG_CFA_DEF_CFA,
226 plus_constant (Pmode, frame_pointer_rtx, offset));
228 /* Allocate the local stack frame. */
229 size = get_frame_size ();
230 size -= STARTING_FRAME_OFFSET;
231 emit_insn (gen_addsi3 (stack_pointer_rtx,
232 stack_pointer_rtx, GEN_INT (-size)));
234 /* Do not allow instructions referencing local stack memory to be
235 scheduled before the frame is allocated. This is more pedantic
236 than anything else, given that VAX does not currently have a
237 scheduling description. */
238 emit_insn (gen_blockage ());
241 /* When debugging with stabs, we want to output an extra dummy label
242 so that gas can distinguish between D_float and G_float prior to
243 processing the .stabs directive identifying type double. */
244 static void
245 vax_file_start (void)
247 default_file_start ();
249 if (write_symbols == DBX_DEBUG)
250 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
253 /* We can use the BSD C library routines for the libgcc calls that are
254 still generated, since that's what they boil down to anyways. When
255 ELF, avoid the user's namespace. */
257 static void
258 vax_init_libfuncs (void)
260 if (TARGET_BSD_DIVMOD)
262 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
263 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
267 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
269 static void
270 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
271 rtx * low, int n)
273 int i;
275 for (i = 0; i < n; i++)
276 low[i] = 0;
278 for (i = 0; i < n; i++)
280 if (MEM_P (operands[i])
281 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
282 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
284 rtx addr = XEXP (operands[i], 0);
285 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
287 else if (optimize_size && MEM_P (operands[i])
288 && REG_P (XEXP (operands[i], 0))
289 && (code != MINUS || operands[1] != const0_rtx)
290 && find_regno_note (insn, REG_DEAD,
291 REGNO (XEXP (operands[i], 0))))
293 low[i] = gen_rtx_MEM (SImode,
294 gen_rtx_POST_INC (Pmode,
295 XEXP (operands[i], 0)));
296 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
298 else
300 low[i] = operand_subword (operands[i], 0, 0, DImode);
301 operands[i] = operand_subword (operands[i], 1, 0, DImode);
306 void
307 print_operand_address (FILE * file, rtx addr)
309 rtx orig = addr;
310 rtx reg1, breg, ireg;
311 rtx offset;
313 retry:
314 switch (GET_CODE (addr))
316 case MEM:
317 fprintf (file, "*");
318 addr = XEXP (addr, 0);
319 goto retry;
321 case REG:
322 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
323 break;
325 case PRE_DEC:
326 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
327 break;
329 case POST_INC:
330 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
331 break;
333 case PLUS:
334 /* There can be either two or three things added here. One must be a
335 REG. One can be either a REG or a MULT of a REG and an appropriate
336 constant, and the third can only be a constant or a MEM.
338 We get these two or three things and put the constant or MEM in
339 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
340 a register and can't tell yet if it is a base or index register,
341 put it into REG1. */
343 reg1 = 0; ireg = 0; breg = 0; offset = 0;
345 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
346 || MEM_P (XEXP (addr, 0)))
348 offset = XEXP (addr, 0);
349 addr = XEXP (addr, 1);
351 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
352 || MEM_P (XEXP (addr, 1)))
354 offset = XEXP (addr, 1);
355 addr = XEXP (addr, 0);
357 else if (GET_CODE (XEXP (addr, 1)) == MULT)
359 ireg = XEXP (addr, 1);
360 addr = XEXP (addr, 0);
362 else if (GET_CODE (XEXP (addr, 0)) == MULT)
364 ireg = XEXP (addr, 0);
365 addr = XEXP (addr, 1);
367 else if (REG_P (XEXP (addr, 1)))
369 reg1 = XEXP (addr, 1);
370 addr = XEXP (addr, 0);
372 else if (REG_P (XEXP (addr, 0)))
374 reg1 = XEXP (addr, 0);
375 addr = XEXP (addr, 1);
377 else
378 gcc_unreachable ();
380 if (REG_P (addr))
382 if (reg1)
383 ireg = addr;
384 else
385 reg1 = addr;
387 else if (GET_CODE (addr) == MULT)
388 ireg = addr;
389 else
391 gcc_assert (GET_CODE (addr) == PLUS);
392 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
393 || MEM_P (XEXP (addr, 0)))
395 if (offset)
397 if (CONST_INT_P (offset))
398 offset = plus_constant (Pmode, XEXP (addr, 0),
399 INTVAL (offset));
400 else
402 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
403 offset = plus_constant (Pmode, offset,
404 INTVAL (XEXP (addr, 0)));
407 offset = XEXP (addr, 0);
409 else if (REG_P (XEXP (addr, 0)))
411 if (reg1)
412 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
413 else
414 reg1 = XEXP (addr, 0);
416 else
418 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
419 gcc_assert (!ireg);
420 ireg = XEXP (addr, 0);
423 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
424 || MEM_P (XEXP (addr, 1)))
426 if (offset)
428 if (CONST_INT_P (offset))
429 offset = plus_constant (Pmode, XEXP (addr, 1),
430 INTVAL (offset));
431 else
433 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
434 offset = plus_constant (Pmode, offset,
435 INTVAL (XEXP (addr, 1)));
438 offset = XEXP (addr, 1);
440 else if (REG_P (XEXP (addr, 1)))
442 if (reg1)
443 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
444 else
445 reg1 = XEXP (addr, 1);
447 else
449 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
450 gcc_assert (!ireg);
451 ireg = XEXP (addr, 1);
455 /* If REG1 is nonzero, figure out if it is a base or index register. */
456 if (reg1)
458 if (breg
459 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
460 || (offset
461 && (MEM_P (offset)
462 || (flag_pic && symbolic_operand (offset, SImode)))))
464 gcc_assert (!ireg);
465 ireg = reg1;
467 else
468 breg = reg1;
471 if (offset != 0)
473 if (flag_pic && symbolic_operand (offset, SImode))
475 if (breg && ireg)
477 debug_rtx (orig);
478 output_operand_lossage ("symbol used with both base and indexed registers");
481 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
482 if (flag_pic > 1 && GET_CODE (offset) == CONST
483 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
484 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
486 debug_rtx (orig);
487 output_operand_lossage ("symbol with offset used in PIC mode");
489 #endif
491 /* symbol(reg) isn't PIC, but symbol[reg] is. */
492 if (breg)
494 ireg = breg;
495 breg = 0;
500 output_address (offset);
503 if (breg != 0)
504 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
506 if (ireg != 0)
508 if (GET_CODE (ireg) == MULT)
509 ireg = XEXP (ireg, 0);
510 gcc_assert (REG_P (ireg));
511 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
513 break;
515 default:
516 output_addr_const (file, addr);
520 void
521 print_operand (FILE *file, rtx x, int code)
523 if (code == '#')
524 fputc (ASM_DOUBLE_CHAR, file);
525 else if (code == '|')
526 fputs (REGISTER_PREFIX, file);
527 else if (code == 'c')
528 fputs (cond_name (x), file);
529 else if (code == 'C')
530 fputs (rev_cond_name (x), file);
531 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
532 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
533 else if (code == 'P' && CONST_INT_P (x))
534 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
535 else if (code == 'N' && CONST_INT_P (x))
536 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
537 /* rotl instruction cannot deal with negative arguments. */
538 else if (code == 'R' && CONST_INT_P (x))
539 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
540 else if (code == 'H' && CONST_INT_P (x))
541 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
542 else if (code == 'h' && CONST_INT_P (x))
543 fprintf (file, "$%d", (short) - INTVAL (x));
544 else if (code == 'B' && CONST_INT_P (x))
545 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
546 else if (code == 'b' && CONST_INT_P (x))
547 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
548 else if (code == 'M' && CONST_INT_P (x))
549 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
550 else if (code == 'x' && CONST_INT_P (x))
551 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
552 else if (REG_P (x))
553 fprintf (file, "%s", reg_names[REGNO (x)]);
554 else if (MEM_P (x))
555 output_address (XEXP (x, 0));
556 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
558 char dstr[30];
559 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
560 sizeof (dstr), 0, 1);
561 fprintf (file, "$0f%s", dstr);
563 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
565 char dstr[30];
566 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
567 sizeof (dstr), 0, 1);
568 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
570 else
572 if (flag_pic > 1 && symbolic_operand (x, SImode))
574 debug_rtx (x);
575 output_operand_lossage ("symbol used as immediate operand");
577 putc ('$', file);
578 output_addr_const (file, x);
582 const char *
583 cond_name (rtx op)
585 switch (GET_CODE (op))
587 case NE:
588 return "neq";
589 case EQ:
590 return "eql";
591 case GE:
592 return "geq";
593 case GT:
594 return "gtr";
595 case LE:
596 return "leq";
597 case LT:
598 return "lss";
599 case GEU:
600 return "gequ";
601 case GTU:
602 return "gtru";
603 case LEU:
604 return "lequ";
605 case LTU:
606 return "lssu";
608 default:
609 gcc_unreachable ();
613 const char *
614 rev_cond_name (rtx op)
616 switch (GET_CODE (op))
618 case EQ:
619 return "neq";
620 case NE:
621 return "eql";
622 case LT:
623 return "geq";
624 case LE:
625 return "gtr";
626 case GT:
627 return "leq";
628 case GE:
629 return "lss";
630 case LTU:
631 return "gequ";
632 case LEU:
633 return "gtru";
634 case GTU:
635 return "lequ";
636 case GEU:
637 return "lssu";
639 default:
640 gcc_unreachable ();
644 static bool
645 vax_float_literal (rtx c)
647 machine_mode mode;
648 REAL_VALUE_TYPE r, s;
649 int i;
651 if (GET_CODE (c) != CONST_DOUBLE)
652 return false;
654 mode = GET_MODE (c);
656 if (c == const_tiny_rtx[(int) mode][0]
657 || c == const_tiny_rtx[(int) mode][1]
658 || c == const_tiny_rtx[(int) mode][2])
659 return true;
661 REAL_VALUE_FROM_CONST_DOUBLE (r, c);
663 for (i = 0; i < 7; i++)
665 int x = 1 << i;
666 bool ok;
667 real_from_integer (&s, mode, x, SIGNED);
669 if (REAL_VALUES_EQUAL (r, s))
670 return true;
671 ok = exact_real_inverse (mode, &s);
672 gcc_assert (ok);
673 if (REAL_VALUES_EQUAL (r, s))
674 return true;
676 return false;
680 /* Return the cost in cycles of a memory address, relative to register
681 indirect.
683 Each of the following adds the indicated number of cycles:
685 1 - symbolic address
686 1 - pre-decrement
687 1 - indexing and/or offset(register)
688 2 - indirect */
691 static int
692 vax_address_cost_1 (rtx addr)
694 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
695 rtx plus_op0 = 0, plus_op1 = 0;
696 restart:
697 switch (GET_CODE (addr))
699 case PRE_DEC:
700 predec = 1;
701 case REG:
702 case SUBREG:
703 case POST_INC:
704 reg = 1;
705 break;
706 case MULT:
707 indexed = 1; /* 2 on VAX 2 */
708 break;
709 case CONST_INT:
710 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
711 if (offset == 0)
712 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
713 break;
714 case CONST:
715 case SYMBOL_REF:
716 offset = 1; /* 2 on VAX 2 */
717 break;
718 case LABEL_REF: /* this is probably a byte offset from the pc */
719 if (offset == 0)
720 offset = 1;
721 break;
722 case PLUS:
723 if (plus_op0)
724 plus_op1 = XEXP (addr, 0);
725 else
726 plus_op0 = XEXP (addr, 0);
727 addr = XEXP (addr, 1);
728 goto restart;
729 case MEM:
730 indir = 2; /* 3 on VAX 2 */
731 addr = XEXP (addr, 0);
732 goto restart;
733 default:
734 break;
737 /* Up to 3 things can be added in an address. They are stored in
738 plus_op0, plus_op1, and addr. */
740 if (plus_op0)
742 addr = plus_op0;
743 plus_op0 = 0;
744 goto restart;
746 if (plus_op1)
748 addr = plus_op1;
749 plus_op1 = 0;
750 goto restart;
752 /* Indexing and register+offset can both be used (except on a VAX 2)
753 without increasing execution time over either one alone. */
754 if (reg && indexed && offset)
755 return reg + indir + offset + predec;
756 return reg + indexed + indir + offset + predec;
759 static int
760 vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
761 addr_space_t as ATTRIBUTE_UNUSED,
762 bool speed ATTRIBUTE_UNUSED)
764 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
767 /* Cost of an expression on a VAX. This version has costs tuned for the
768 CVAX chip (found in the VAX 3 series) with comments for variations on
769 other models.
771 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
772 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
773 costs on a per cpu basis. */
775 static bool
776 vax_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
777 int *total, bool speed ATTRIBUTE_UNUSED)
779 machine_mode mode = GET_MODE (x);
780 int i = 0; /* may be modified in switch */
781 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
783 switch (code)
785 /* On a VAX, constants from 0..63 are cheap because they can use the
786 1 byte literal constant format. Compare to -1 should be made cheap
787 so that decrement-and-branch insns can be formed more easily (if
788 the value -1 is copied to a register some decrement-and-branch
789 patterns will not match). */
790 case CONST_INT:
791 if (INTVAL (x) == 0)
793 *total = 0;
794 return true;
796 if (outer_code == AND)
798 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
799 return true;
801 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
802 || (outer_code == COMPARE
803 && INTVAL (x) == -1)
804 || ((outer_code == PLUS || outer_code == MINUS)
805 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
807 *total = 1;
808 return true;
810 /* FALLTHRU */
812 case CONST:
813 case LABEL_REF:
814 case SYMBOL_REF:
815 *total = 3;
816 return true;
818 case CONST_DOUBLE:
819 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
820 *total = vax_float_literal (x) ? 5 : 8;
821 else
822 *total = ((CONST_DOUBLE_HIGH (x) == 0
823 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
824 || (outer_code == PLUS
825 && CONST_DOUBLE_HIGH (x) == -1
826 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
827 ? 2 : 5;
828 return true;
830 case POST_INC:
831 *total = 2;
832 return true; /* Implies register operand. */
834 case PRE_DEC:
835 *total = 3;
836 return true; /* Implies register operand. */
838 case MULT:
839 switch (mode)
841 case DFmode:
842 *total = 16; /* 4 on VAX 9000 */
843 break;
844 case SFmode:
845 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
846 break;
847 case DImode:
848 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
849 break;
850 case SImode:
851 case HImode:
852 case QImode:
853 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
854 break;
855 default:
856 *total = MAX_COST; /* Mode is not supported. */
857 return true;
859 break;
861 case UDIV:
862 if (mode != SImode)
864 *total = MAX_COST; /* Mode is not supported. */
865 return true;
867 *total = 17;
868 break;
870 case DIV:
871 if (mode == DImode)
872 *total = 30; /* Highly variable. */
873 else if (mode == DFmode)
874 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
875 *total = 24;
876 else
877 *total = 11; /* 25 on VAX 2 */
878 break;
880 case MOD:
881 *total = 23;
882 break;
884 case UMOD:
885 if (mode != SImode)
887 *total = MAX_COST; /* Mode is not supported. */
888 return true;
890 *total = 29;
891 break;
893 case FLOAT:
894 *total = (6 /* 4 on VAX 9000 */
895 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
896 break;
898 case FIX:
899 *total = 7; /* 17 on VAX 2 */
900 break;
902 case ASHIFT:
903 case LSHIFTRT:
904 case ASHIFTRT:
905 if (mode == DImode)
906 *total = 12;
907 else
908 *total = 10; /* 6 on VAX 9000 */
909 break;
911 case ROTATE:
912 case ROTATERT:
913 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
914 if (CONST_INT_P (XEXP (x, 1)))
915 fmt = "e"; /* all constant rotate counts are short */
916 break;
918 case PLUS:
919 case MINUS:
920 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
921 /* Small integer operands can use subl2 and addl2. */
922 if ((CONST_INT_P (XEXP (x, 1)))
923 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
924 fmt = "e";
925 break;
927 case IOR:
928 case XOR:
929 *total = 3;
930 break;
932 case AND:
933 /* AND is special because the first operand is complemented. */
934 *total = 3;
935 if (CONST_INT_P (XEXP (x, 0)))
937 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
938 *total = 4;
939 fmt = "e";
940 i = 1;
942 break;
944 case NEG:
945 if (mode == DFmode)
946 *total = 9;
947 else if (mode == SFmode)
948 *total = 6;
949 else if (mode == DImode)
950 *total = 4;
951 else
952 *total = 2;
953 break;
955 case NOT:
956 *total = 2;
957 break;
959 case ZERO_EXTRACT:
960 case SIGN_EXTRACT:
961 *total = 15;
962 break;
964 case MEM:
965 if (mode == DImode || mode == DFmode)
966 *total = 5; /* 7 on VAX 2 */
967 else
968 *total = 3; /* 4 on VAX 2 */
969 x = XEXP (x, 0);
970 if (!REG_P (x) && GET_CODE (x) != POST_INC)
971 *total += vax_address_cost_1 (x);
972 return true;
974 case FLOAT_EXTEND:
975 case FLOAT_TRUNCATE:
976 case TRUNCATE:
977 *total = 3; /* FIXME: Costs need to be checked */
978 break;
980 default:
981 return false;
984 /* Now look inside the expression. Operands which are not registers or
985 short constants add to the cost.
987 FMT and I may have been adjusted in the switch above for instructions
988 which require special handling. */
990 while (*fmt++ == 'e')
992 rtx op = XEXP (x, i);
994 i += 1;
995 code = GET_CODE (op);
997 /* A NOT is likely to be found as the first operand of an AND
998 (in which case the relevant cost is of the operand inside
999 the not) and not likely to be found anywhere else. */
1000 if (code == NOT)
1001 op = XEXP (op, 0), code = GET_CODE (op);
1003 switch (code)
1005 case CONST_INT:
1006 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
1007 && GET_MODE (x) != QImode)
1008 *total += 1; /* 2 on VAX 2 */
1009 break;
1010 case CONST:
1011 case LABEL_REF:
1012 case SYMBOL_REF:
1013 *total += 1; /* 2 on VAX 2 */
1014 break;
1015 case CONST_DOUBLE:
1016 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
1018 /* Registers are faster than floating point constants -- even
1019 those constants which can be encoded in a single byte. */
1020 if (vax_float_literal (op))
1021 *total += 1;
1022 else
1023 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
1025 else
1027 if (CONST_DOUBLE_HIGH (op) != 0
1028 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1029 *total += 2;
1031 break;
1032 case MEM:
1033 *total += 1; /* 2 on VAX 2 */
1034 if (!REG_P (XEXP (op, 0)))
1035 *total += vax_address_cost_1 (XEXP (op, 0));
1036 break;
1037 case REG:
1038 case SUBREG:
1039 break;
1040 default:
1041 *total += 1;
1042 break;
1045 return true;
1048 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1049 Used for C++ multiple inheritance.
1050 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
1051 addl2 $DELTA, 4(ap) #adjust first argument
1052 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
1055 static void
1056 vax_output_mi_thunk (FILE * file,
1057 tree thunk ATTRIBUTE_UNUSED,
1058 HOST_WIDE_INT delta,
1059 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1060 tree function)
1062 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1063 asm_fprintf (file, ",4(%Rap)\n");
1064 fprintf (file, "\tjmp ");
1065 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1066 fprintf (file, "+2\n");
1069 static rtx
1070 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1071 int incoming ATTRIBUTE_UNUSED)
1073 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1076 static rtx
1077 vax_builtin_setjmp_frame_value (void)
1079 return hard_frame_pointer_rtx;
1082 /* Worker function for NOTICE_UPDATE_CC. */
1084 void
1085 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1087 if (GET_CODE (exp) == SET)
1089 if (GET_CODE (SET_SRC (exp)) == CALL)
1090 CC_STATUS_INIT;
1091 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1092 && GET_CODE (SET_DEST (exp)) != PC)
1094 cc_status.flags = 0;
1095 /* The integer operations below don't set carry or
1096 set it in an incompatible way. That's ok though
1097 as the Z bit is all we need when doing unsigned
1098 comparisons on the result of these insns (since
1099 they're always with 0). Set CC_NO_OVERFLOW to
1100 generate the correct unsigned branches. */
1101 switch (GET_CODE (SET_SRC (exp)))
1103 case NEG:
1104 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1105 break;
1106 case AND:
1107 case IOR:
1108 case XOR:
1109 case NOT:
1110 case MEM:
1111 case REG:
1112 cc_status.flags = CC_NO_OVERFLOW;
1113 break;
1114 default:
1115 break;
1117 cc_status.value1 = SET_DEST (exp);
1118 cc_status.value2 = SET_SRC (exp);
1121 else if (GET_CODE (exp) == PARALLEL
1122 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1124 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1125 CC_STATUS_INIT;
1126 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1128 cc_status.flags = 0;
1129 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1130 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1132 else
1133 /* PARALLELs whose first element sets the PC are aob,
1134 sob insns. They do change the cc's. */
1135 CC_STATUS_INIT;
1137 else
1138 CC_STATUS_INIT;
1139 if (cc_status.value1 && REG_P (cc_status.value1)
1140 && cc_status.value2
1141 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1142 cc_status.value2 = 0;
1143 if (cc_status.value1 && MEM_P (cc_status.value1)
1144 && cc_status.value2
1145 && MEM_P (cc_status.value2))
1146 cc_status.value2 = 0;
1147 /* Actual condition, one line up, should be that value2's address
1148 depends on value1, but that is too much of a pain. */
1151 /* Output integer move instructions. */
1153 const char *
1154 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1155 machine_mode mode)
1157 rtx hi[3], lo[3];
1158 const char *pattern_hi, *pattern_lo;
1160 switch (mode)
1162 case DImode:
1163 if (operands[1] == const0_rtx)
1164 return "clrq %0";
1165 if (TARGET_QMATH && optimize_size
1166 && (CONST_INT_P (operands[1])
1167 || GET_CODE (operands[1]) == CONST_DOUBLE))
1169 unsigned HOST_WIDE_INT hval, lval;
1170 int n;
1172 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1174 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1176 /* Make sure only the low 32 bits are valid. */
1177 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1178 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1180 else
1182 lval = INTVAL (operands[1]);
1183 hval = 0;
1186 /* Here we see if we are trying to see if the 64bit value is really
1187 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1188 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1189 8 bytes - 1 shift byte - 1 short literal byte. */
1190 if (lval != 0
1191 && (n = exact_log2 (lval & (- lval))) != -1
1192 && (lval >> n) < 64)
1194 lval >>= n;
1196 /* On 32bit platforms, if the 6bits didn't overflow into the
1197 upper 32bit value that value better be 0. If we have
1198 overflowed, make sure it wasn't too much. */
1199 if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1201 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1202 n = 0; /* failure */
1203 else
1204 lval |= hval << (32 - n);
1206 /* If n is 0, then ashq is not the best way to emit this. */
1207 if (n > 0)
1209 operands[1] = GEN_INT (lval);
1210 operands[2] = GEN_INT (n);
1211 return "ashq %2,%D1,%0";
1213 #if HOST_BITS_PER_WIDE_INT == 32
1215 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1216 upper 32bit value. */
1217 else if (hval != 0
1218 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1219 && (hval >> n) < 64)
1221 operands[1] = GEN_INT (hval >> n);
1222 operands[2] = GEN_INT (n + 32);
1223 return "ashq %2,%D1,%0";
1224 #endif
1228 if (TARGET_QMATH
1229 && (!MEM_P (operands[0])
1230 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1231 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1232 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1233 && ((CONST_INT_P (operands[1])
1234 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1235 || GET_CODE (operands[1]) == CONST_DOUBLE))
1237 hi[0] = operands[0];
1238 hi[1] = operands[1];
1240 split_quadword_operands (insn, SET, hi, lo, 2);
1242 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1243 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1245 /* The patterns are just movl/movl or pushl/pushl then a movq will
1246 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1247 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1248 value bytes. */
1249 if ((!strncmp (pattern_lo, "movl", 4)
1250 && !strncmp (pattern_hi, "movl", 4))
1251 || (!strncmp (pattern_lo, "pushl", 5)
1252 && !strncmp (pattern_hi, "pushl", 5)))
1253 return "movq %1,%0";
1255 if (MEM_P (operands[0])
1256 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1258 output_asm_insn (pattern_hi, hi);
1259 operands[0] = lo[0];
1260 operands[1] = lo[1];
1261 operands[2] = lo[2];
1262 return pattern_lo;
1264 else
1266 output_asm_insn (pattern_lo, lo);
1267 operands[0] = hi[0];
1268 operands[1] = hi[1];
1269 operands[2] = hi[2];
1270 return pattern_hi;
1273 return "movq %1,%0";
1275 case SImode:
1276 if (symbolic_operand (operands[1], SImode))
1278 if (push_operand (operands[0], SImode))
1279 return "pushab %a1";
1280 return "movab %a1,%0";
1283 if (operands[1] == const0_rtx)
1285 if (push_operand (operands[1], SImode))
1286 return "pushl %1";
1287 return "clrl %0";
1290 if (CONST_INT_P (operands[1])
1291 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1293 HOST_WIDE_INT i = INTVAL (operands[1]);
1294 int n;
1295 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1296 return "mcoml %N1,%0";
1297 if ((unsigned HOST_WIDE_INT)i < 0x100)
1298 return "movzbl %1,%0";
1299 if (i >= -0x80 && i < 0)
1300 return "cvtbl %1,%0";
1301 if (optimize_size
1302 && (n = exact_log2 (i & (-i))) != -1
1303 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1305 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1306 operands[2] = GEN_INT (n);
1307 return "ashl %2,%1,%0";
1309 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1310 return "movzwl %1,%0";
1311 if (i >= -0x8000 && i < 0)
1312 return "cvtwl %1,%0";
1314 if (push_operand (operands[0], SImode))
1315 return "pushl %1";
1316 return "movl %1,%0";
1318 case HImode:
1319 if (CONST_INT_P (operands[1]))
1321 HOST_WIDE_INT i = INTVAL (operands[1]);
1322 if (i == 0)
1323 return "clrw %0";
1324 else if ((unsigned HOST_WIDE_INT)i < 64)
1325 return "movw %1,%0";
1326 else if ((unsigned HOST_WIDE_INT)~i < 64)
1327 return "mcomw %H1,%0";
1328 else if ((unsigned HOST_WIDE_INT)i < 256)
1329 return "movzbw %1,%0";
1330 else if (i >= -0x80 && i < 0)
1331 return "cvtbw %1,%0";
1333 return "movw %1,%0";
1335 case QImode:
1336 if (CONST_INT_P (operands[1]))
1338 HOST_WIDE_INT i = INTVAL (operands[1]);
1339 if (i == 0)
1340 return "clrb %0";
1341 else if ((unsigned HOST_WIDE_INT)~i < 64)
1342 return "mcomb %B1,%0";
1344 return "movb %1,%0";
1346 default:
1347 gcc_unreachable ();
1351 /* Output integer add instructions.
1353 The space-time-opcode tradeoffs for addition vary by model of VAX.
1355 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1356 but it not faster on other models.
1358 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1359 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1360 a register is used in an address too soon after it is set.
1361 Compromise by using movab only when it is shorter than the add
1362 or the base register in the address is one of sp, ap, and fp,
1363 which are not modified very often. */
1365 const char *
1366 vax_output_int_add (rtx insn, rtx *operands, machine_mode mode)
1368 switch (mode)
1370 case DImode:
1372 rtx low[3];
1373 const char *pattern;
1374 int carry = 1;
1375 bool sub;
1377 if (TARGET_QMATH && 0)
1378 debug_rtx (insn);
1380 split_quadword_operands (insn, PLUS, operands, low, 3);
1382 if (TARGET_QMATH)
1384 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1385 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1386 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1387 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1388 #endif
1390 /* No reason to add a 0 to the low part and thus no carry, so just
1391 emit the appropriate add/sub instruction. */
1392 if (low[2] == const0_rtx)
1393 return vax_output_int_add (NULL, operands, SImode);
1395 /* Are we doing addition or subtraction? */
1396 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1398 /* We can't use vax_output_int_add since some the patterns don't
1399 modify the carry bit. */
1400 if (sub)
1402 if (low[2] == constm1_rtx)
1403 pattern = "decl %0";
1404 else
1405 pattern = "subl2 $%n2,%0";
1407 else
1409 if (low[2] == const1_rtx)
1410 pattern = "incl %0";
1411 else
1412 pattern = "addl2 %2,%0";
1414 output_asm_insn (pattern, low);
1416 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1417 two 32bit parts, we complement each and then add one to
1418 low part. We know that the low part can't overflow since
1419 it's value can never be 0. */
1420 if (sub)
1421 return "sbwc %N2,%0";
1422 return "adwc %2,%0";
1425 /* Add low parts. */
1426 if (rtx_equal_p (operands[0], operands[1]))
1428 if (low[2] == const0_rtx)
1429 /* Should examine operand, punt if not POST_INC. */
1430 pattern = "tstl %0", carry = 0;
1431 else if (low[2] == const1_rtx)
1432 pattern = "incl %0";
1433 else
1434 pattern = "addl2 %2,%0";
1436 else
1438 if (low[2] == const0_rtx)
1439 pattern = "movl %1,%0", carry = 0;
1440 else
1441 pattern = "addl3 %2,%1,%0";
1443 if (pattern)
1444 output_asm_insn (pattern, low);
1445 if (!carry)
1446 /* If CARRY is 0, we don't have any carry value to worry about. */
1447 return get_insn_template (CODE_FOR_addsi3, insn);
1448 /* %0 = C + %1 + %2 */
1449 if (!rtx_equal_p (operands[0], operands[1]))
1450 output_asm_insn ((operands[1] == const0_rtx
1451 ? "clrl %0"
1452 : "movl %1,%0"), operands);
1453 return "adwc %2,%0";
1456 case SImode:
1457 if (rtx_equal_p (operands[0], operands[1]))
1459 if (operands[2] == const1_rtx)
1460 return "incl %0";
1461 if (operands[2] == constm1_rtx)
1462 return "decl %0";
1463 if (CONST_INT_P (operands[2])
1464 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1465 return "subl2 $%n2,%0";
1466 if (CONST_INT_P (operands[2])
1467 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1468 && REG_P (operands[1])
1469 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1470 || REGNO (operands[1]) > 11))
1471 return "movab %c2(%1),%0";
1472 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1473 return "movab %a2[%0],%0";
1474 return "addl2 %2,%0";
1477 if (rtx_equal_p (operands[0], operands[2]))
1479 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1480 return "movab %a1[%0],%0";
1481 return "addl2 %1,%0";
1484 if (CONST_INT_P (operands[2])
1485 && INTVAL (operands[2]) < 32767
1486 && INTVAL (operands[2]) > -32768
1487 && REG_P (operands[1])
1488 && push_operand (operands[0], SImode))
1489 return "pushab %c2(%1)";
1491 if (CONST_INT_P (operands[2])
1492 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1493 return "subl3 $%n2,%1,%0";
1495 if (CONST_INT_P (operands[2])
1496 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1497 && REG_P (operands[1])
1498 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1499 || REGNO (operands[1]) > 11))
1500 return "movab %c2(%1),%0";
1502 /* Add this if using gcc on a VAX 3xxx:
1503 if (REG_P (operands[1]) && REG_P (operands[2]))
1504 return "movab (%1)[%2],%0";
1507 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1509 if (push_operand (operands[0], SImode))
1510 return "pushab %a2[%1]";
1511 return "movab %a2[%1],%0";
1514 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1516 if (push_operand (operands[0], SImode))
1517 return "pushab %a1[%2]";
1518 return "movab %a1[%2],%0";
1521 if (flag_pic && REG_P (operands[0])
1522 && symbolic_operand (operands[2], SImode))
1523 return "movab %a2,%0;addl2 %1,%0";
1525 if (flag_pic
1526 && (symbolic_operand (operands[1], SImode)
1527 || symbolic_operand (operands[1], SImode)))
1528 debug_rtx (insn);
1530 return "addl3 %1,%2,%0";
1532 case HImode:
1533 if (rtx_equal_p (operands[0], operands[1]))
1535 if (operands[2] == const1_rtx)
1536 return "incw %0";
1537 if (operands[2] == constm1_rtx)
1538 return "decw %0";
1539 if (CONST_INT_P (operands[2])
1540 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1541 return "subw2 $%n2,%0";
1542 return "addw2 %2,%0";
1544 if (rtx_equal_p (operands[0], operands[2]))
1545 return "addw2 %1,%0";
1546 if (CONST_INT_P (operands[2])
1547 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1548 return "subw3 $%n2,%1,%0";
1549 return "addw3 %1,%2,%0";
1551 case QImode:
1552 if (rtx_equal_p (operands[0], operands[1]))
1554 if (operands[2] == const1_rtx)
1555 return "incb %0";
1556 if (operands[2] == constm1_rtx)
1557 return "decb %0";
1558 if (CONST_INT_P (operands[2])
1559 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1560 return "subb2 $%n2,%0";
1561 return "addb2 %2,%0";
1563 if (rtx_equal_p (operands[0], operands[2]))
1564 return "addb2 %1,%0";
1565 if (CONST_INT_P (operands[2])
1566 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1567 return "subb3 $%n2,%1,%0";
1568 return "addb3 %1,%2,%0";
1570 default:
1571 gcc_unreachable ();
1575 const char *
1576 vax_output_int_subtract (rtx insn, rtx *operands, machine_mode mode)
1578 switch (mode)
1580 case DImode:
1582 rtx low[3];
1583 const char *pattern;
1584 int carry = 1;
1586 if (TARGET_QMATH && 0)
1587 debug_rtx (insn);
1589 split_quadword_operands (insn, MINUS, operands, low, 3);
1591 if (TARGET_QMATH)
1593 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1595 /* Negation is tricky. It's basically complement and increment.
1596 Negate hi, then lo, and subtract the carry back. */
1597 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1598 || (MEM_P (operands[0])
1599 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1600 fatal_insn ("illegal operand detected", insn);
1601 output_asm_insn ("mnegl %2,%0", operands);
1602 output_asm_insn ("mnegl %2,%0", low);
1603 return "sbwc $0,%0";
1605 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1606 gcc_assert (rtx_equal_p (low[0], low[1]));
1607 if (low[2] == const1_rtx)
1608 output_asm_insn ("decl %0", low);
1609 else
1610 output_asm_insn ("subl2 %2,%0", low);
1611 return "sbwc %2,%0";
1614 /* Subtract low parts. */
1615 if (rtx_equal_p (operands[0], operands[1]))
1617 if (low[2] == const0_rtx)
1618 pattern = 0, carry = 0;
1619 else if (low[2] == constm1_rtx)
1620 pattern = "decl %0";
1621 else
1622 pattern = "subl2 %2,%0";
1624 else
1626 if (low[2] == constm1_rtx)
1627 pattern = "decl %0";
1628 else if (low[2] == const0_rtx)
1629 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1630 else
1631 pattern = "subl3 %2,%1,%0";
1633 if (pattern)
1634 output_asm_insn (pattern, low);
1635 if (carry)
1637 if (!rtx_equal_p (operands[0], operands[1]))
1638 return "movl %1,%0;sbwc %2,%0";
1639 return "sbwc %2,%0";
1640 /* %0 = %2 - %1 - C */
1642 return get_insn_template (CODE_FOR_subsi3, insn);
1645 default:
1646 gcc_unreachable ();
1650 /* True if X is an rtx for a constant that is a valid address. */
1652 bool
1653 legitimate_constant_address_p (rtx x)
1655 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1656 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1657 return true;
1658 if (GET_CODE (x) != CONST)
1659 return false;
1660 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1661 if (flag_pic
1662 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1663 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1664 return false;
1665 #endif
1666 return true;
1669 /* The other macros defined here are used only in legitimate_address_p (). */
1671 /* Nonzero if X is a hard reg that can be used as an index
1672 or, if not strict, if it is a pseudo reg. */
1673 #define INDEX_REGISTER_P(X, STRICT) \
1674 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1676 /* Nonzero if X is a hard reg that can be used as a base reg
1677 or, if not strict, if it is a pseudo reg. */
1678 #define BASE_REGISTER_P(X, STRICT) \
1679 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1681 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1683 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1684 are no SYMBOL_REFs for external symbols present. */
1686 static bool
1687 indirectable_constant_address_p (rtx x, bool indirect)
1689 if (GET_CODE (x) == SYMBOL_REF)
1690 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1692 if (GET_CODE (x) == CONST)
1693 return !flag_pic
1694 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1695 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1697 return CONSTANT_ADDRESS_P (x);
1700 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1702 static bool
1703 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1705 return CONSTANT_ADDRESS_P (x);
1708 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1710 /* True if X is an address which can be indirected. External symbols
1711 could be in a sharable image library, so we disallow those. */
1713 static bool
1714 indirectable_address_p (rtx x, bool strict, bool indirect)
1716 if (indirectable_constant_address_p (x, indirect)
1717 || BASE_REGISTER_P (x, strict))
1718 return true;
1719 if (GET_CODE (x) != PLUS
1720 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1721 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1722 return false;
1723 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1726 /* Return true if x is a valid address not using indexing.
1727 (This much is the easy part.) */
1728 static bool
1729 nonindexed_address_p (rtx x, bool strict)
1731 rtx xfoo0;
1732 if (REG_P (x))
1734 if (! reload_in_progress
1735 || reg_equiv_mem (REGNO (x)) == 0
1736 || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1737 return true;
1739 if (indirectable_constant_address_p (x, false))
1740 return true;
1741 if (indirectable_address_p (x, strict, false))
1742 return true;
1743 xfoo0 = XEXP (x, 0);
1744 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1745 return true;
1746 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1747 && BASE_REGISTER_P (xfoo0, strict))
1748 return true;
1749 return false;
1752 /* True if PROD is either a reg times size of mode MODE and MODE is less
1753 than or equal 8 bytes, or just a reg if MODE is one byte. */
1755 static bool
1756 index_term_p (rtx prod, machine_mode mode, bool strict)
1758 rtx xfoo0, xfoo1;
1760 if (GET_MODE_SIZE (mode) == 1)
1761 return BASE_REGISTER_P (prod, strict);
1763 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1764 return false;
1766 xfoo0 = XEXP (prod, 0);
1767 xfoo1 = XEXP (prod, 1);
1769 if (CONST_INT_P (xfoo0)
1770 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1771 && INDEX_REGISTER_P (xfoo1, strict))
1772 return true;
1774 if (CONST_INT_P (xfoo1)
1775 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1776 && INDEX_REGISTER_P (xfoo0, strict))
1777 return true;
1779 return false;
1782 /* Return true if X is the sum of a register
1783 and a valid index term for mode MODE. */
1784 static bool
1785 reg_plus_index_p (rtx x, machine_mode mode, bool strict)
1787 rtx xfoo0, xfoo1;
1789 if (GET_CODE (x) != PLUS)
1790 return false;
1792 xfoo0 = XEXP (x, 0);
1793 xfoo1 = XEXP (x, 1);
1795 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1796 return true;
1798 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1799 return true;
1801 return false;
1804 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1805 static bool
1806 indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
1808 if (!CONSTANT_ADDRESS_P (xfoo0))
1809 return false;
1810 if (BASE_REGISTER_P (xfoo1, strict))
1811 return !flag_pic || mode == QImode;
1812 if (flag_pic && symbolic_operand (xfoo0, SImode))
1813 return false;
1814 return reg_plus_index_p (xfoo1, mode, strict);
1817 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1818 that is a valid memory address for an instruction.
1819 The MODE argument is the machine mode for the MEM expression
1820 that wants to use this address. */
1821 bool
1822 vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
1824 rtx xfoo0, xfoo1;
1826 if (nonindexed_address_p (x, strict))
1827 return true;
1829 if (GET_CODE (x) != PLUS)
1830 return false;
1832 /* Handle <address>[index] represented with index-sum outermost */
1834 xfoo0 = XEXP (x, 0);
1835 xfoo1 = XEXP (x, 1);
1837 if (index_term_p (xfoo0, mode, strict)
1838 && nonindexed_address_p (xfoo1, strict))
1839 return true;
1841 if (index_term_p (xfoo1, mode, strict)
1842 && nonindexed_address_p (xfoo0, strict))
1843 return true;
1845 /* Handle offset(reg)[index] with offset added outermost */
1847 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1848 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1849 return true;
1851 return false;
1854 /* Return true if x (a legitimate address expression) has an effect that
1855 depends on the machine mode it is used for. On the VAX, the predecrement
1856 and postincrement address depend thus (the amount of decrement or
1857 increment being the length of the operand) and all indexed address depend
1858 thus (because the index scale factor is the length of the operand). */
1860 static bool
1861 vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
1863 rtx xfoo0, xfoo1;
1865 /* Auto-increment cases are now dealt with generically in recog.c. */
1866 if (GET_CODE (x) != PLUS)
1867 return false;
1869 xfoo0 = XEXP (x, 0);
1870 xfoo1 = XEXP (x, 1);
1872 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1873 return false;
1874 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1875 return false;
1876 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1877 return false;
1878 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1879 return false;
1881 return true;
1884 static rtx
1885 fixup_mathdi_operand (rtx x, machine_mode mode)
1887 if (illegal_addsub_di_memory_operand (x, mode))
1889 rtx addr = XEXP (x, 0);
1890 rtx temp = gen_reg_rtx (Pmode);
1891 rtx offset = 0;
1892 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1893 if (GET_CODE (addr) == CONST && flag_pic)
1895 offset = XEXP (XEXP (addr, 0), 1);
1896 addr = XEXP (XEXP (addr, 0), 0);
1898 #endif
1899 emit_move_insn (temp, addr);
1900 if (offset)
1901 temp = gen_rtx_PLUS (Pmode, temp, offset);
1902 x = gen_rtx_MEM (DImode, temp);
1904 return x;
1907 void
1908 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1910 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1911 rtx temp;
1913 rtx (*gen_old_insn)(rtx, rtx, rtx);
1914 rtx (*gen_si_insn)(rtx, rtx, rtx);
1915 rtx (*gen_insn)(rtx, rtx, rtx);
1917 if (code == PLUS)
1919 gen_old_insn = gen_adddi3_old;
1920 gen_si_insn = gen_addsi3;
1921 gen_insn = gen_adcdi3;
1923 else if (code == MINUS)
1925 gen_old_insn = gen_subdi3_old;
1926 gen_si_insn = gen_subsi3;
1927 gen_insn = gen_sbcdi3;
1929 else
1930 gcc_unreachable ();
1932 /* If this is addition (thus operands are commutative) and if there is one
1933 addend that duplicates the desination, we want that addend to be the
1934 first addend. */
1935 if (code == PLUS
1936 && rtx_equal_p (operands[0], operands[2])
1937 && !rtx_equal_p (operands[1], operands[2]))
1939 temp = operands[2];
1940 operands[2] = operands[1];
1941 operands[1] = temp;
1944 if (!TARGET_QMATH)
1946 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1948 else if (hi_only)
1950 if (!rtx_equal_p (operands[0], operands[1])
1951 && (REG_P (operands[0]) && MEM_P (operands[1])))
1953 emit_move_insn (operands[0], operands[1]);
1954 operands[1] = operands[0];
1957 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1958 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1959 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1961 if (!rtx_equal_p (operands[0], operands[1]))
1962 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1963 operand_subword (operands[1], 0, 0, DImode));
1965 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1966 operand_subword (operands[1], 1, 0, DImode),
1967 operand_subword (operands[2], 1, 0, DImode)));
1969 else
1971 /* If are adding the same value together, that's really a multiply by 2,
1972 and that's just a left shift of 1. */
1973 if (rtx_equal_p (operands[1], operands[2]))
1975 gcc_assert (code != MINUS);
1976 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1977 return;
1980 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1982 /* If an operand is the same as operand[0], use the operand[0] rtx
1983 because fixup will an equivalent rtx but not an equal one. */
1985 if (rtx_equal_p (operands[0], operands[1]))
1986 operands[1] = operands[0];
1987 else
1988 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1990 if (rtx_equal_p (operands[0], operands[2]))
1991 operands[2] = operands[0];
1992 else
1993 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1995 /* If we are subtracting not from ourselves [d = a - b], and because the
1996 carry ops are two operand only, we would need to do a move prior to
1997 the subtract. And if d == b, we would need a temp otherwise
1998 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1999 into d = -b, d += a. Since -b can never overflow, even if b == d,
2000 no temp is needed.
2002 If we are doing addition, since the carry ops are two operand, if
2003 we aren't adding to ourselves, move the first addend to the
2004 destination first. */
2006 gcc_assert (operands[1] != const0_rtx || code == MINUS);
2007 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
2009 if (code == MINUS && CONSTANT_P (operands[1]))
2011 temp = gen_reg_rtx (DImode);
2012 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
2013 code = PLUS;
2014 gen_insn = gen_adcdi3;
2015 operands[2] = operands[1];
2016 operands[1] = operands[0];
2018 else
2019 emit_move_insn (operands[0], operands[1]);
2022 /* Subtracting a constant will have been rewritten to an addition of the
2023 negative of that constant before we get here. */
2024 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2025 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2029 bool
2030 adjacent_operands_p (rtx lo, rtx hi, machine_mode mode)
2032 HOST_WIDE_INT lo_offset;
2033 HOST_WIDE_INT hi_offset;
2035 if (GET_CODE (lo) != GET_CODE (hi))
2036 return false;
2038 if (REG_P (lo))
2039 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
2040 if (CONST_INT_P (lo))
2041 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
2042 if (CONST_INT_P (lo))
2043 return mode != SImode;
2045 if (!MEM_P (lo))
2046 return false;
2048 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2049 return false;
2051 lo = XEXP (lo, 0);
2052 hi = XEXP (hi, 0);
2054 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2055 return rtx_equal_p (lo, hi);
2057 switch (GET_CODE (lo))
2059 case REG:
2060 case SYMBOL_REF:
2061 lo_offset = 0;
2062 break;
2063 case CONST:
2064 lo = XEXP (lo, 0);
2065 /* FALLTHROUGH */
2066 case PLUS:
2067 if (!CONST_INT_P (XEXP (lo, 1)))
2068 return false;
2069 lo_offset = INTVAL (XEXP (lo, 1));
2070 lo = XEXP (lo, 0);
2071 break;
2072 default:
2073 return false;
2076 switch (GET_CODE (hi))
2078 case REG:
2079 case SYMBOL_REF:
2080 hi_offset = 0;
2081 break;
2082 case CONST:
2083 hi = XEXP (hi, 0);
2084 /* FALLTHROUGH */
2085 case PLUS:
2086 if (!CONST_INT_P (XEXP (hi, 1)))
2087 return false;
2088 hi_offset = INTVAL (XEXP (hi, 1));
2089 hi = XEXP (hi, 0);
2090 break;
2091 default:
2092 return false;
2095 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2096 return false;
2098 return rtx_equal_p (lo, hi)
2099 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2102 /* Output assembler code for a block containing the constant parts
2103 of a trampoline, leaving space for the variable parts. */
2105 /* On the VAX, the trampoline contains an entry mask and two instructions:
2106 .word NN
2107 movl $STATIC,r0 (store the functions static chain)
2108 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2110 static void
2111 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2113 assemble_aligned_integer (2, const0_rtx);
2114 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2115 assemble_aligned_integer (4, const0_rtx);
2116 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2117 assemble_aligned_integer (2, GEN_INT (0x9f17));
2118 assemble_aligned_integer (4, const0_rtx);
2121 /* We copy the register-mask from the function's pure code
2122 to the start of the trampoline. */
2124 static void
2125 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2127 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2128 rtx mem;
2130 emit_block_move (m_tramp, assemble_trampoline_template (),
2131 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2133 mem = adjust_address (m_tramp, HImode, 0);
2134 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2136 mem = adjust_address (m_tramp, SImode, 4);
2137 emit_move_insn (mem, cxt);
2138 mem = adjust_address (m_tramp, SImode, 11);
2139 emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
2140 emit_insn (gen_sync_istream ());
2143 /* Value is the number of bytes of arguments automatically
2144 popped when returning from a subroutine call.
2145 FUNDECL is the declaration node of the function (as a tree),
2146 FUNTYPE is the data type of the function (as a tree),
2147 or for a library call it is an identifier node for the subroutine name.
2148 SIZE is the number of bytes of arguments passed on the stack.
2150 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2152 static int
2153 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2154 tree funtype ATTRIBUTE_UNUSED, int size)
2156 return size > 255 * 4 ? 0 : size;
2159 /* Define where to put the arguments to a function.
2160 Value is zero to push the argument on the stack,
2161 or a hard register in which to store the argument.
2163 MODE is the argument's machine mode.
2164 TYPE is the data type of the argument (as a tree).
2165 This is null for libcalls where that information may
2166 not be available.
2167 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2168 the preceding args and about the function being called.
2169 NAMED is nonzero if this argument is a named parameter
2170 (otherwise it is an extra parameter matching an ellipsis). */
2172 /* On the VAX all args are pushed. */
2174 static rtx
2175 vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
2176 machine_mode mode ATTRIBUTE_UNUSED,
2177 const_tree type ATTRIBUTE_UNUSED,
2178 bool named ATTRIBUTE_UNUSED)
2180 return NULL_RTX;
2183 /* Update the data in CUM to advance over an argument of mode MODE and
2184 data type TYPE. (TYPE is null for libcalls where that information
2185 may not be available.) */
2187 static void
2188 vax_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2189 const_tree type, bool named ATTRIBUTE_UNUSED)
2191 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2193 *cum += (mode != BLKmode
2194 ? (GET_MODE_SIZE (mode) + 3) & ~3
2195 : (int_size_in_bytes (type) + 3) & ~3);