* i386.c (has_dispatch): Disable for Ryzen.
[official-gcc.git] / gcc / config / vax / vax.c
blob5989607be75bee7149c72a34165e5eebaa61f445
1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "attribs.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "optabs.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "calls.h"
36 #include "varasm.h"
37 #include "conditions.h"
38 #include "output.h"
39 #include "expr.h"
40 #include "reload.h"
41 #include "builtins.h"
43 /* This file should be included last. */
44 #include "target-def.h"
46 static void vax_option_override (void);
47 static bool vax_legitimate_address_p (machine_mode, rtx, bool);
48 static void vax_file_start (void);
49 static void vax_init_libfuncs (void);
50 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
51 HOST_WIDE_INT, tree);
52 static int vax_address_cost_1 (rtx);
53 static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
54 static bool vax_rtx_costs (rtx, machine_mode, int, int, int *, bool);
55 static rtx vax_function_arg (cumulative_args_t, machine_mode,
56 const_tree, bool);
57 static void vax_function_arg_advance (cumulative_args_t, machine_mode,
58 const_tree, bool);
59 static rtx vax_struct_value_rtx (tree, int);
60 static rtx vax_builtin_setjmp_frame_value (void);
61 static void vax_asm_trampoline_template (FILE *);
62 static void vax_trampoline_init (rtx, tree, rtx);
63 static int vax_return_pops_args (tree, tree, int);
64 static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
66 /* Initialize the GCC target structure. */
67 #undef TARGET_ASM_ALIGNED_HI_OP
68 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
70 #undef TARGET_ASM_FILE_START
71 #define TARGET_ASM_FILE_START vax_file_start
72 #undef TARGET_ASM_FILE_START_APP_OFF
73 #define TARGET_ASM_FILE_START_APP_OFF true
75 #undef TARGET_INIT_LIBFUNCS
76 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
78 #undef TARGET_ASM_OUTPUT_MI_THUNK
79 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
80 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
81 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
83 #undef TARGET_RTX_COSTS
84 #define TARGET_RTX_COSTS vax_rtx_costs
85 #undef TARGET_ADDRESS_COST
86 #define TARGET_ADDRESS_COST vax_address_cost
88 #undef TARGET_PROMOTE_PROTOTYPES
89 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
91 #undef TARGET_FUNCTION_ARG
92 #define TARGET_FUNCTION_ARG vax_function_arg
93 #undef TARGET_FUNCTION_ARG_ADVANCE
94 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
96 #undef TARGET_STRUCT_VALUE_RTX
97 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
99 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
100 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
102 #undef TARGET_LRA_P
103 #define TARGET_LRA_P hook_bool_void_false
105 #undef TARGET_LEGITIMATE_ADDRESS_P
106 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
107 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
108 #define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
110 #undef TARGET_FRAME_POINTER_REQUIRED
111 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
113 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
114 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
115 #undef TARGET_TRAMPOLINE_INIT
116 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
117 #undef TARGET_RETURN_POPS_ARGS
118 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
120 #undef TARGET_OPTION_OVERRIDE
121 #define TARGET_OPTION_OVERRIDE vax_option_override
123 struct gcc_target targetm = TARGET_INITIALIZER;
125 /* Set global variables as needed for the options enabled. */
127 static void
128 vax_option_override (void)
130 /* We're VAX floating point, not IEEE floating point. */
131 if (TARGET_G_FLOAT)
132 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
134 #ifdef SUBTARGET_OVERRIDE_OPTIONS
135 SUBTARGET_OVERRIDE_OPTIONS;
136 #endif
139 static void
140 vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
142 rtx x;
144 x = plus_constant (Pmode, frame_pointer_rtx, offset);
145 x = gen_rtx_MEM (SImode, x);
146 x = gen_rtx_SET (x, src);
147 add_reg_note (insn, REG_CFA_OFFSET, x);
150 /* Generate the assembly code for function entry. FILE is a stdio
151 stream to output the code to. SIZE is an int: how many units of
152 temporary storage to allocate.
154 Refer to the array `regs_ever_live' to determine which registers to
155 save; `regs_ever_live[I]' is nonzero if register number I is ever
156 used in the function. This function is responsible for knowing
157 which registers should not be saved even if used. */
159 void
160 vax_expand_prologue (void)
162 int regno, offset;
163 int mask = 0;
164 HOST_WIDE_INT size;
165 rtx insn;
167 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
168 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
169 mask |= 1 << regno;
171 insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
172 RTX_FRAME_RELATED_P (insn) = 1;
174 /* The layout of the CALLG/S stack frame is follows:
176 <- CFA, AP
179 ... Registers saved as specified by MASK
182 return-addr
183 old fp
184 old ap
185 old psw
186 zero
187 <- FP, SP
189 The rest of the prologue will adjust the SP for the local frame. */
191 vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
192 vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
193 vax_add_reg_cfa_offset (insn, 12, pc_rtx);
195 offset = 16;
196 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
197 if (mask & (1 << regno))
199 vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
200 offset += 4;
203 /* Because add_reg_note pushes the notes, adding this last means that
204 it will be processed first. This is required to allow the other
205 notes be interpreted properly. */
206 add_reg_note (insn, REG_CFA_DEF_CFA,
207 plus_constant (Pmode, frame_pointer_rtx, offset));
209 /* Allocate the local stack frame. */
210 size = get_frame_size ();
211 size -= STARTING_FRAME_OFFSET;
212 emit_insn (gen_addsi3 (stack_pointer_rtx,
213 stack_pointer_rtx, GEN_INT (-size)));
215 /* Do not allow instructions referencing local stack memory to be
216 scheduled before the frame is allocated. This is more pedantic
217 than anything else, given that VAX does not currently have a
218 scheduling description. */
219 emit_insn (gen_blockage ());
222 /* When debugging with stabs, we want to output an extra dummy label
223 so that gas can distinguish between D_float and G_float prior to
224 processing the .stabs directive identifying type double. */
225 static void
226 vax_file_start (void)
228 default_file_start ();
230 if (write_symbols == DBX_DEBUG)
231 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
234 /* We can use the BSD C library routines for the libgcc calls that are
235 still generated, since that's what they boil down to anyways. When
236 ELF, avoid the user's namespace. */
238 static void
239 vax_init_libfuncs (void)
241 if (TARGET_BSD_DIVMOD)
243 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
244 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
248 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
250 static void
251 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
252 rtx * low, int n)
254 int i;
256 for (i = 0; i < n; i++)
257 low[i] = 0;
259 for (i = 0; i < n; i++)
261 if (MEM_P (operands[i])
262 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
263 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
265 rtx addr = XEXP (operands[i], 0);
266 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
268 else if (optimize_size && MEM_P (operands[i])
269 && REG_P (XEXP (operands[i], 0))
270 && (code != MINUS || operands[1] != const0_rtx)
271 && find_regno_note (insn, REG_DEAD,
272 REGNO (XEXP (operands[i], 0))))
274 low[i] = gen_rtx_MEM (SImode,
275 gen_rtx_POST_INC (Pmode,
276 XEXP (operands[i], 0)));
277 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
279 else
281 low[i] = operand_subword (operands[i], 0, 0, DImode);
282 operands[i] = operand_subword (operands[i], 1, 0, DImode);
287 void
288 print_operand_address (FILE * file, rtx addr)
290 rtx orig = addr;
291 rtx reg1, breg, ireg;
292 rtx offset;
294 retry:
295 switch (GET_CODE (addr))
297 case MEM:
298 fprintf (file, "*");
299 addr = XEXP (addr, 0);
300 goto retry;
302 case REG:
303 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
304 break;
306 case PRE_DEC:
307 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
308 break;
310 case POST_INC:
311 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
312 break;
314 case PLUS:
315 /* There can be either two or three things added here. One must be a
316 REG. One can be either a REG or a MULT of a REG and an appropriate
317 constant, and the third can only be a constant or a MEM.
319 We get these two or three things and put the constant or MEM in
320 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
321 a register and can't tell yet if it is a base or index register,
322 put it into REG1. */
324 reg1 = 0; ireg = 0; breg = 0; offset = 0;
326 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
327 || MEM_P (XEXP (addr, 0)))
329 offset = XEXP (addr, 0);
330 addr = XEXP (addr, 1);
332 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
333 || MEM_P (XEXP (addr, 1)))
335 offset = XEXP (addr, 1);
336 addr = XEXP (addr, 0);
338 else if (GET_CODE (XEXP (addr, 1)) == MULT)
340 ireg = XEXP (addr, 1);
341 addr = XEXP (addr, 0);
343 else if (GET_CODE (XEXP (addr, 0)) == MULT)
345 ireg = XEXP (addr, 0);
346 addr = XEXP (addr, 1);
348 else if (REG_P (XEXP (addr, 1)))
350 reg1 = XEXP (addr, 1);
351 addr = XEXP (addr, 0);
353 else if (REG_P (XEXP (addr, 0)))
355 reg1 = XEXP (addr, 0);
356 addr = XEXP (addr, 1);
358 else
359 gcc_unreachable ();
361 if (REG_P (addr))
363 if (reg1)
364 ireg = addr;
365 else
366 reg1 = addr;
368 else if (GET_CODE (addr) == MULT)
369 ireg = addr;
370 else
372 gcc_assert (GET_CODE (addr) == PLUS);
373 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
374 || MEM_P (XEXP (addr, 0)))
376 if (offset)
378 if (CONST_INT_P (offset))
379 offset = plus_constant (Pmode, XEXP (addr, 0),
380 INTVAL (offset));
381 else
383 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
384 offset = plus_constant (Pmode, offset,
385 INTVAL (XEXP (addr, 0)));
388 offset = XEXP (addr, 0);
390 else if (REG_P (XEXP (addr, 0)))
392 if (reg1)
393 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
394 else
395 reg1 = XEXP (addr, 0);
397 else
399 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
400 gcc_assert (!ireg);
401 ireg = XEXP (addr, 0);
404 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
405 || MEM_P (XEXP (addr, 1)))
407 if (offset)
409 if (CONST_INT_P (offset))
410 offset = plus_constant (Pmode, XEXP (addr, 1),
411 INTVAL (offset));
412 else
414 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
415 offset = plus_constant (Pmode, offset,
416 INTVAL (XEXP (addr, 1)));
419 offset = XEXP (addr, 1);
421 else if (REG_P (XEXP (addr, 1)))
423 if (reg1)
424 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
425 else
426 reg1 = XEXP (addr, 1);
428 else
430 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
431 gcc_assert (!ireg);
432 ireg = XEXP (addr, 1);
436 /* If REG1 is nonzero, figure out if it is a base or index register. */
437 if (reg1)
439 if (breg
440 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
441 || (offset
442 && (MEM_P (offset)
443 || (flag_pic && symbolic_operand (offset, SImode)))))
445 gcc_assert (!ireg);
446 ireg = reg1;
448 else
449 breg = reg1;
452 if (offset != 0)
454 if (flag_pic && symbolic_operand (offset, SImode))
456 if (breg && ireg)
458 debug_rtx (orig);
459 output_operand_lossage ("symbol used with both base and indexed registers");
462 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
463 if (flag_pic > 1 && GET_CODE (offset) == CONST
464 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
465 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
467 debug_rtx (orig);
468 output_operand_lossage ("symbol with offset used in PIC mode");
470 #endif
472 /* symbol(reg) isn't PIC, but symbol[reg] is. */
473 if (breg)
475 ireg = breg;
476 breg = 0;
481 output_address (VOIDmode, offset);
484 if (breg != 0)
485 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
487 if (ireg != 0)
489 if (GET_CODE (ireg) == MULT)
490 ireg = XEXP (ireg, 0);
491 gcc_assert (REG_P (ireg));
492 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
494 break;
496 default:
497 output_addr_const (file, addr);
501 void
502 print_operand (FILE *file, rtx x, int code)
504 if (code == '#')
505 fputc (ASM_DOUBLE_CHAR, file);
506 else if (code == '|')
507 fputs (REGISTER_PREFIX, file);
508 else if (code == 'c')
509 fputs (cond_name (x), file);
510 else if (code == 'C')
511 fputs (rev_cond_name (x), file);
512 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
513 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
514 else if (code == 'P' && CONST_INT_P (x))
515 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
516 else if (code == 'N' && CONST_INT_P (x))
517 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
518 /* rotl instruction cannot deal with negative arguments. */
519 else if (code == 'R' && CONST_INT_P (x))
520 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
521 else if (code == 'H' && CONST_INT_P (x))
522 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
523 else if (code == 'h' && CONST_INT_P (x))
524 fprintf (file, "$%d", (short) - INTVAL (x));
525 else if (code == 'B' && CONST_INT_P (x))
526 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
527 else if (code == 'b' && CONST_INT_P (x))
528 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
529 else if (code == 'M' && CONST_INT_P (x))
530 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
531 else if (code == 'x' && CONST_INT_P (x))
532 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
533 else if (REG_P (x))
534 fprintf (file, "%s", reg_names[REGNO (x)]);
535 else if (MEM_P (x))
536 output_address (GET_MODE (x), XEXP (x, 0));
537 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
539 char dstr[30];
540 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
541 sizeof (dstr), 0, 1);
542 fprintf (file, "$0f%s", dstr);
544 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
546 char dstr[30];
547 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
548 sizeof (dstr), 0, 1);
549 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
551 else
553 if (flag_pic > 1 && symbolic_operand (x, SImode))
555 debug_rtx (x);
556 output_operand_lossage ("symbol used as immediate operand");
558 putc ('$', file);
559 output_addr_const (file, x);
563 const char *
564 cond_name (rtx op)
566 switch (GET_CODE (op))
568 case NE:
569 return "neq";
570 case EQ:
571 return "eql";
572 case GE:
573 return "geq";
574 case GT:
575 return "gtr";
576 case LE:
577 return "leq";
578 case LT:
579 return "lss";
580 case GEU:
581 return "gequ";
582 case GTU:
583 return "gtru";
584 case LEU:
585 return "lequ";
586 case LTU:
587 return "lssu";
589 default:
590 gcc_unreachable ();
594 const char *
595 rev_cond_name (rtx op)
597 switch (GET_CODE (op))
599 case EQ:
600 return "neq";
601 case NE:
602 return "eql";
603 case LT:
604 return "geq";
605 case LE:
606 return "gtr";
607 case GT:
608 return "leq";
609 case GE:
610 return "lss";
611 case LTU:
612 return "gequ";
613 case LEU:
614 return "gtru";
615 case GTU:
616 return "lequ";
617 case GEU:
618 return "lssu";
620 default:
621 gcc_unreachable ();
625 static bool
626 vax_float_literal (rtx c)
628 machine_mode mode;
629 const REAL_VALUE_TYPE *r;
630 REAL_VALUE_TYPE s;
631 int i;
633 if (GET_CODE (c) != CONST_DOUBLE)
634 return false;
636 mode = GET_MODE (c);
638 if (c == const_tiny_rtx[(int) mode][0]
639 || c == const_tiny_rtx[(int) mode][1]
640 || c == const_tiny_rtx[(int) mode][2])
641 return true;
643 r = CONST_DOUBLE_REAL_VALUE (c);
645 for (i = 0; i < 7; i++)
647 int x = 1 << i;
648 bool ok;
649 real_from_integer (&s, mode, x, SIGNED);
651 if (real_equal (r, &s))
652 return true;
653 ok = exact_real_inverse (mode, &s);
654 gcc_assert (ok);
655 if (real_equal (r, &s))
656 return true;
658 return false;
662 /* Return the cost in cycles of a memory address, relative to register
663 indirect.
665 Each of the following adds the indicated number of cycles:
667 1 - symbolic address
668 1 - pre-decrement
669 1 - indexing and/or offset(register)
670 2 - indirect */
673 static int
674 vax_address_cost_1 (rtx addr)
676 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
677 rtx plus_op0 = 0, plus_op1 = 0;
678 restart:
679 switch (GET_CODE (addr))
681 case PRE_DEC:
682 predec = 1;
683 /* FALLTHRU */
684 case REG:
685 case SUBREG:
686 case POST_INC:
687 reg = 1;
688 break;
689 case MULT:
690 indexed = 1; /* 2 on VAX 2 */
691 break;
692 case CONST_INT:
693 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
694 if (offset == 0)
695 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
696 break;
697 case CONST:
698 case SYMBOL_REF:
699 offset = 1; /* 2 on VAX 2 */
700 break;
701 case LABEL_REF: /* this is probably a byte offset from the pc */
702 if (offset == 0)
703 offset = 1;
704 break;
705 case PLUS:
706 if (plus_op0)
707 plus_op1 = XEXP (addr, 0);
708 else
709 plus_op0 = XEXP (addr, 0);
710 addr = XEXP (addr, 1);
711 goto restart;
712 case MEM:
713 indir = 2; /* 3 on VAX 2 */
714 addr = XEXP (addr, 0);
715 goto restart;
716 default:
717 break;
720 /* Up to 3 things can be added in an address. They are stored in
721 plus_op0, plus_op1, and addr. */
723 if (plus_op0)
725 addr = plus_op0;
726 plus_op0 = 0;
727 goto restart;
729 if (plus_op1)
731 addr = plus_op1;
732 plus_op1 = 0;
733 goto restart;
735 /* Indexing and register+offset can both be used (except on a VAX 2)
736 without increasing execution time over either one alone. */
737 if (reg && indexed && offset)
738 return reg + indir + offset + predec;
739 return reg + indexed + indir + offset + predec;
742 static int
743 vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
744 addr_space_t as ATTRIBUTE_UNUSED,
745 bool speed ATTRIBUTE_UNUSED)
747 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
750 /* Cost of an expression on a VAX. This version has costs tuned for the
751 CVAX chip (found in the VAX 3 series) with comments for variations on
752 other models.
754 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
755 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
756 costs on a per cpu basis. */
758 static bool
759 vax_rtx_costs (rtx x, machine_mode mode, int outer_code,
760 int opno ATTRIBUTE_UNUSED,
761 int *total, bool speed ATTRIBUTE_UNUSED)
763 int code = GET_CODE (x);
764 int i = 0; /* may be modified in switch */
765 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
767 switch (code)
769 /* On a VAX, constants from 0..63 are cheap because they can use the
770 1 byte literal constant format. Compare to -1 should be made cheap
771 so that decrement-and-branch insns can be formed more easily (if
772 the value -1 is copied to a register some decrement-and-branch
773 patterns will not match). */
774 case CONST_INT:
775 if (INTVAL (x) == 0)
777 *total = 0;
778 return true;
780 if (outer_code == AND)
782 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
783 return true;
785 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
786 || (outer_code == COMPARE
787 && INTVAL (x) == -1)
788 || ((outer_code == PLUS || outer_code == MINUS)
789 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
791 *total = 1;
792 return true;
794 /* FALLTHRU */
796 case CONST:
797 case LABEL_REF:
798 case SYMBOL_REF:
799 *total = 3;
800 return true;
802 case CONST_DOUBLE:
803 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
804 *total = vax_float_literal (x) ? 5 : 8;
805 else
806 *total = ((CONST_DOUBLE_HIGH (x) == 0
807 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
808 || (outer_code == PLUS
809 && CONST_DOUBLE_HIGH (x) == -1
810 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
811 ? 2 : 5;
812 return true;
814 case POST_INC:
815 *total = 2;
816 return true; /* Implies register operand. */
818 case PRE_DEC:
819 *total = 3;
820 return true; /* Implies register operand. */
822 case MULT:
823 switch (mode)
825 case E_DFmode:
826 *total = 16; /* 4 on VAX 9000 */
827 break;
828 case E_SFmode:
829 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
830 break;
831 case E_DImode:
832 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
833 break;
834 case E_SImode:
835 case E_HImode:
836 case E_QImode:
837 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
838 break;
839 default:
840 *total = MAX_COST; /* Mode is not supported. */
841 return true;
843 break;
845 case UDIV:
846 if (mode != SImode)
848 *total = MAX_COST; /* Mode is not supported. */
849 return true;
851 *total = 17;
852 break;
854 case DIV:
855 if (mode == DImode)
856 *total = 30; /* Highly variable. */
857 else if (mode == DFmode)
858 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
859 *total = 24;
860 else
861 *total = 11; /* 25 on VAX 2 */
862 break;
864 case MOD:
865 *total = 23;
866 break;
868 case UMOD:
869 if (mode != SImode)
871 *total = MAX_COST; /* Mode is not supported. */
872 return true;
874 *total = 29;
875 break;
877 case FLOAT:
878 *total = (6 /* 4 on VAX 9000 */
879 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
880 break;
882 case FIX:
883 *total = 7; /* 17 on VAX 2 */
884 break;
886 case ASHIFT:
887 case LSHIFTRT:
888 case ASHIFTRT:
889 if (mode == DImode)
890 *total = 12;
891 else
892 *total = 10; /* 6 on VAX 9000 */
893 break;
895 case ROTATE:
896 case ROTATERT:
897 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
898 if (CONST_INT_P (XEXP (x, 1)))
899 fmt = "e"; /* all constant rotate counts are short */
900 break;
902 case PLUS:
903 case MINUS:
904 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
905 /* Small integer operands can use subl2 and addl2. */
906 if ((CONST_INT_P (XEXP (x, 1)))
907 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
908 fmt = "e";
909 break;
911 case IOR:
912 case XOR:
913 *total = 3;
914 break;
916 case AND:
917 /* AND is special because the first operand is complemented. */
918 *total = 3;
919 if (CONST_INT_P (XEXP (x, 0)))
921 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
922 *total = 4;
923 fmt = "e";
924 i = 1;
926 break;
928 case NEG:
929 if (mode == DFmode)
930 *total = 9;
931 else if (mode == SFmode)
932 *total = 6;
933 else if (mode == DImode)
934 *total = 4;
935 else
936 *total = 2;
937 break;
939 case NOT:
940 *total = 2;
941 break;
943 case ZERO_EXTRACT:
944 case SIGN_EXTRACT:
945 *total = 15;
946 break;
948 case MEM:
949 if (mode == DImode || mode == DFmode)
950 *total = 5; /* 7 on VAX 2 */
951 else
952 *total = 3; /* 4 on VAX 2 */
953 x = XEXP (x, 0);
954 if (!REG_P (x) && GET_CODE (x) != POST_INC)
955 *total += vax_address_cost_1 (x);
956 return true;
958 case FLOAT_EXTEND:
959 case FLOAT_TRUNCATE:
960 case TRUNCATE:
961 *total = 3; /* FIXME: Costs need to be checked */
962 break;
964 default:
965 return false;
968 /* Now look inside the expression. Operands which are not registers or
969 short constants add to the cost.
971 FMT and I may have been adjusted in the switch above for instructions
972 which require special handling. */
974 while (*fmt++ == 'e')
976 rtx op = XEXP (x, i);
978 i += 1;
979 code = GET_CODE (op);
981 /* A NOT is likely to be found as the first operand of an AND
982 (in which case the relevant cost is of the operand inside
983 the not) and not likely to be found anywhere else. */
984 if (code == NOT)
985 op = XEXP (op, 0), code = GET_CODE (op);
987 switch (code)
989 case CONST_INT:
990 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
991 && mode != QImode)
992 *total += 1; /* 2 on VAX 2 */
993 break;
994 case CONST:
995 case LABEL_REF:
996 case SYMBOL_REF:
997 *total += 1; /* 2 on VAX 2 */
998 break;
999 case CONST_DOUBLE:
1000 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
1002 /* Registers are faster than floating point constants -- even
1003 those constants which can be encoded in a single byte. */
1004 if (vax_float_literal (op))
1005 *total += 1;
1006 else
1007 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
1009 else
1011 if (CONST_DOUBLE_HIGH (op) != 0
1012 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1013 *total += 2;
1015 break;
1016 case MEM:
1017 *total += 1; /* 2 on VAX 2 */
1018 if (!REG_P (XEXP (op, 0)))
1019 *total += vax_address_cost_1 (XEXP (op, 0));
1020 break;
1021 case REG:
1022 case SUBREG:
1023 break;
1024 default:
1025 *total += 1;
1026 break;
1029 return true;
1032 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1033 Used for C++ multiple inheritance.
1034 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
1035 addl2 $DELTA, 4(ap) #adjust first argument
1036 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
1039 static void
1040 vax_output_mi_thunk (FILE * file,
1041 tree thunk ATTRIBUTE_UNUSED,
1042 HOST_WIDE_INT delta,
1043 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1044 tree function)
1046 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1047 asm_fprintf (file, ",4(%Rap)\n");
1048 fprintf (file, "\tjmp ");
1049 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1050 fprintf (file, "+2\n");
1053 static rtx
1054 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1055 int incoming ATTRIBUTE_UNUSED)
1057 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1060 static rtx
1061 vax_builtin_setjmp_frame_value (void)
1063 return hard_frame_pointer_rtx;
1066 /* Worker function for NOTICE_UPDATE_CC. */
1068 void
1069 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1071 if (GET_CODE (exp) == SET)
1073 if (GET_CODE (SET_SRC (exp)) == CALL)
1074 CC_STATUS_INIT;
1075 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1076 && GET_CODE (SET_DEST (exp)) != PC)
1078 cc_status.flags = 0;
1079 /* The integer operations below don't set carry or
1080 set it in an incompatible way. That's ok though
1081 as the Z bit is all we need when doing unsigned
1082 comparisons on the result of these insns (since
1083 they're always with 0). Set CC_NO_OVERFLOW to
1084 generate the correct unsigned branches. */
1085 switch (GET_CODE (SET_SRC (exp)))
1087 case NEG:
1088 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1089 break;
1090 /* FALLTHRU */
1091 case AND:
1092 case IOR:
1093 case XOR:
1094 case NOT:
1095 case MEM:
1096 case REG:
1097 cc_status.flags = CC_NO_OVERFLOW;
1098 break;
1099 default:
1100 break;
1102 cc_status.value1 = SET_DEST (exp);
1103 cc_status.value2 = SET_SRC (exp);
1106 else if (GET_CODE (exp) == PARALLEL
1107 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1109 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1110 CC_STATUS_INIT;
1111 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1113 cc_status.flags = 0;
1114 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1115 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1117 else
1118 /* PARALLELs whose first element sets the PC are aob,
1119 sob insns. They do change the cc's. */
1120 CC_STATUS_INIT;
1122 else
1123 CC_STATUS_INIT;
1124 if (cc_status.value1 && REG_P (cc_status.value1)
1125 && cc_status.value2
1126 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1127 cc_status.value2 = 0;
1128 if (cc_status.value1 && MEM_P (cc_status.value1)
1129 && cc_status.value2
1130 && MEM_P (cc_status.value2))
1131 cc_status.value2 = 0;
1132 /* Actual condition, one line up, should be that value2's address
1133 depends on value1, but that is too much of a pain. */
1136 /* Output integer move instructions. */
1138 const char *
1139 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1140 machine_mode mode)
1142 rtx hi[3], lo[3];
1143 const char *pattern_hi, *pattern_lo;
1145 switch (mode)
1147 case E_DImode:
1148 if (operands[1] == const0_rtx)
1149 return "clrq %0";
1150 if (TARGET_QMATH && optimize_size
1151 && (CONST_INT_P (operands[1])
1152 || GET_CODE (operands[1]) == CONST_DOUBLE))
1154 unsigned HOST_WIDE_INT hval, lval;
1155 int n;
1157 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1159 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1161 /* Make sure only the low 32 bits are valid. */
1162 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1163 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1165 else
1167 lval = INTVAL (operands[1]);
1168 hval = 0;
1171 /* Here we see if we are trying to see if the 64bit value is really
1172 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1173 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1174 8 bytes - 1 shift byte - 1 short literal byte. */
1175 if (lval != 0
1176 && (n = exact_log2 (lval & (- lval))) != -1
1177 && (lval >> n) < 64)
1179 lval >>= n;
1181 /* On 32bit platforms, if the 6bits didn't overflow into the
1182 upper 32bit value that value better be 0. If we have
1183 overflowed, make sure it wasn't too much. */
1184 if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1186 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1187 n = 0; /* failure */
1188 else
1189 lval |= hval << (32 - n);
1191 /* If n is 0, then ashq is not the best way to emit this. */
1192 if (n > 0)
1194 operands[1] = GEN_INT (lval);
1195 operands[2] = GEN_INT (n);
1196 return "ashq %2,%D1,%0";
1198 #if HOST_BITS_PER_WIDE_INT == 32
1200 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1201 upper 32bit value. */
1202 else if (hval != 0
1203 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1204 && (hval >> n) < 64)
1206 operands[1] = GEN_INT (hval >> n);
1207 operands[2] = GEN_INT (n + 32);
1208 return "ashq %2,%D1,%0";
1209 #endif
1213 if (TARGET_QMATH
1214 && (!MEM_P (operands[0])
1215 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1216 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1217 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1218 && ((CONST_INT_P (operands[1])
1219 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1220 || GET_CODE (operands[1]) == CONST_DOUBLE))
1222 hi[0] = operands[0];
1223 hi[1] = operands[1];
1225 split_quadword_operands (insn, SET, hi, lo, 2);
1227 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1228 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1230 /* The patterns are just movl/movl or pushl/pushl then a movq will
1231 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1232 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1233 value bytes. */
1234 if ((!strncmp (pattern_lo, "movl", 4)
1235 && !strncmp (pattern_hi, "movl", 4))
1236 || (!strncmp (pattern_lo, "pushl", 5)
1237 && !strncmp (pattern_hi, "pushl", 5)))
1238 return "movq %1,%0";
1240 if (MEM_P (operands[0])
1241 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1243 output_asm_insn (pattern_hi, hi);
1244 operands[0] = lo[0];
1245 operands[1] = lo[1];
1246 operands[2] = lo[2];
1247 return pattern_lo;
1249 else
1251 output_asm_insn (pattern_lo, lo);
1252 operands[0] = hi[0];
1253 operands[1] = hi[1];
1254 operands[2] = hi[2];
1255 return pattern_hi;
1258 return "movq %1,%0";
1260 case E_SImode:
1261 if (symbolic_operand (operands[1], SImode))
1263 if (push_operand (operands[0], SImode))
1264 return "pushab %a1";
1265 return "movab %a1,%0";
1268 if (operands[1] == const0_rtx)
1270 if (push_operand (operands[1], SImode))
1271 return "pushl %1";
1272 return "clrl %0";
1275 if (CONST_INT_P (operands[1])
1276 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1278 HOST_WIDE_INT i = INTVAL (operands[1]);
1279 int n;
1280 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1281 return "mcoml %N1,%0";
1282 if ((unsigned HOST_WIDE_INT)i < 0x100)
1283 return "movzbl %1,%0";
1284 if (i >= -0x80 && i < 0)
1285 return "cvtbl %1,%0";
1286 if (optimize_size
1287 && (n = exact_log2 (i & (-i))) != -1
1288 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1290 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1291 operands[2] = GEN_INT (n);
1292 return "ashl %2,%1,%0";
1294 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1295 return "movzwl %1,%0";
1296 if (i >= -0x8000 && i < 0)
1297 return "cvtwl %1,%0";
1299 if (push_operand (operands[0], SImode))
1300 return "pushl %1";
1301 return "movl %1,%0";
1303 case E_HImode:
1304 if (CONST_INT_P (operands[1]))
1306 HOST_WIDE_INT i = INTVAL (operands[1]);
1307 if (i == 0)
1308 return "clrw %0";
1309 else if ((unsigned HOST_WIDE_INT)i < 64)
1310 return "movw %1,%0";
1311 else if ((unsigned HOST_WIDE_INT)~i < 64)
1312 return "mcomw %H1,%0";
1313 else if ((unsigned HOST_WIDE_INT)i < 256)
1314 return "movzbw %1,%0";
1315 else if (i >= -0x80 && i < 0)
1316 return "cvtbw %1,%0";
1318 return "movw %1,%0";
1320 case E_QImode:
1321 if (CONST_INT_P (operands[1]))
1323 HOST_WIDE_INT i = INTVAL (operands[1]);
1324 if (i == 0)
1325 return "clrb %0";
1326 else if ((unsigned HOST_WIDE_INT)~i < 64)
1327 return "mcomb %B1,%0";
1329 return "movb %1,%0";
1331 default:
1332 gcc_unreachable ();
1336 /* Output integer add instructions.
1338 The space-time-opcode tradeoffs for addition vary by model of VAX.
1340 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1341 but it not faster on other models.
1343 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1344 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1345 a register is used in an address too soon after it is set.
1346 Compromise by using movab only when it is shorter than the add
1347 or the base register in the address is one of sp, ap, and fp,
1348 which are not modified very often. */
1350 const char *
1351 vax_output_int_add (rtx insn, rtx *operands, machine_mode mode)
1353 switch (mode)
1355 case E_DImode:
1357 rtx low[3];
1358 const char *pattern;
1359 int carry = 1;
1360 bool sub;
1362 if (TARGET_QMATH && 0)
1363 debug_rtx (insn);
1365 split_quadword_operands (insn, PLUS, operands, low, 3);
1367 if (TARGET_QMATH)
1369 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1370 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1371 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1372 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1373 #endif
1375 /* No reason to add a 0 to the low part and thus no carry, so just
1376 emit the appropriate add/sub instruction. */
1377 if (low[2] == const0_rtx)
1378 return vax_output_int_add (NULL, operands, SImode);
1380 /* Are we doing addition or subtraction? */
1381 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1383 /* We can't use vax_output_int_add since some the patterns don't
1384 modify the carry bit. */
1385 if (sub)
1387 if (low[2] == constm1_rtx)
1388 pattern = "decl %0";
1389 else
1390 pattern = "subl2 $%n2,%0";
1392 else
1394 if (low[2] == const1_rtx)
1395 pattern = "incl %0";
1396 else
1397 pattern = "addl2 %2,%0";
1399 output_asm_insn (pattern, low);
1401 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1402 two 32bit parts, we complement each and then add one to
1403 low part. We know that the low part can't overflow since
1404 it's value can never be 0. */
1405 if (sub)
1406 return "sbwc %N2,%0";
1407 return "adwc %2,%0";
1410 /* Add low parts. */
1411 if (rtx_equal_p (operands[0], operands[1]))
1413 if (low[2] == const0_rtx)
1414 /* Should examine operand, punt if not POST_INC. */
1415 pattern = "tstl %0", carry = 0;
1416 else if (low[2] == const1_rtx)
1417 pattern = "incl %0";
1418 else
1419 pattern = "addl2 %2,%0";
1421 else
1423 if (low[2] == const0_rtx)
1424 pattern = "movl %1,%0", carry = 0;
1425 else
1426 pattern = "addl3 %2,%1,%0";
1428 if (pattern)
1429 output_asm_insn (pattern, low);
1430 if (!carry)
1431 /* If CARRY is 0, we don't have any carry value to worry about. */
1432 return get_insn_template (CODE_FOR_addsi3, insn);
1433 /* %0 = C + %1 + %2 */
1434 if (!rtx_equal_p (operands[0], operands[1]))
1435 output_asm_insn ((operands[1] == const0_rtx
1436 ? "clrl %0"
1437 : "movl %1,%0"), operands);
1438 return "adwc %2,%0";
1441 case E_SImode:
1442 if (rtx_equal_p (operands[0], operands[1]))
1444 if (operands[2] == const1_rtx)
1445 return "incl %0";
1446 if (operands[2] == constm1_rtx)
1447 return "decl %0";
1448 if (CONST_INT_P (operands[2])
1449 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1450 return "subl2 $%n2,%0";
1451 if (CONST_INT_P (operands[2])
1452 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1453 && REG_P (operands[1])
1454 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1455 || REGNO (operands[1]) > 11))
1456 return "movab %c2(%1),%0";
1457 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1458 return "movab %a2[%0],%0";
1459 return "addl2 %2,%0";
1462 if (rtx_equal_p (operands[0], operands[2]))
1464 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1465 return "movab %a1[%0],%0";
1466 return "addl2 %1,%0";
1469 if (CONST_INT_P (operands[2])
1470 && INTVAL (operands[2]) < 32767
1471 && INTVAL (operands[2]) > -32768
1472 && REG_P (operands[1])
1473 && push_operand (operands[0], SImode))
1474 return "pushab %c2(%1)";
1476 if (CONST_INT_P (operands[2])
1477 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1478 return "subl3 $%n2,%1,%0";
1480 if (CONST_INT_P (operands[2])
1481 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1482 && REG_P (operands[1])
1483 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1484 || REGNO (operands[1]) > 11))
1485 return "movab %c2(%1),%0";
1487 /* Add this if using gcc on a VAX 3xxx:
1488 if (REG_P (operands[1]) && REG_P (operands[2]))
1489 return "movab (%1)[%2],%0";
1492 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1494 if (push_operand (operands[0], SImode))
1495 return "pushab %a2[%1]";
1496 return "movab %a2[%1],%0";
1499 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1501 if (push_operand (operands[0], SImode))
1502 return "pushab %a1[%2]";
1503 return "movab %a1[%2],%0";
1506 if (flag_pic && REG_P (operands[0])
1507 && symbolic_operand (operands[2], SImode))
1508 return "movab %a2,%0;addl2 %1,%0";
1510 if (flag_pic
1511 && (symbolic_operand (operands[1], SImode)
1512 || symbolic_operand (operands[1], SImode)))
1513 debug_rtx (insn);
1515 return "addl3 %1,%2,%0";
1517 case E_HImode:
1518 if (rtx_equal_p (operands[0], operands[1]))
1520 if (operands[2] == const1_rtx)
1521 return "incw %0";
1522 if (operands[2] == constm1_rtx)
1523 return "decw %0";
1524 if (CONST_INT_P (operands[2])
1525 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1526 return "subw2 $%n2,%0";
1527 return "addw2 %2,%0";
1529 if (rtx_equal_p (operands[0], operands[2]))
1530 return "addw2 %1,%0";
1531 if (CONST_INT_P (operands[2])
1532 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1533 return "subw3 $%n2,%1,%0";
1534 return "addw3 %1,%2,%0";
1536 case E_QImode:
1537 if (rtx_equal_p (operands[0], operands[1]))
1539 if (operands[2] == const1_rtx)
1540 return "incb %0";
1541 if (operands[2] == constm1_rtx)
1542 return "decb %0";
1543 if (CONST_INT_P (operands[2])
1544 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1545 return "subb2 $%n2,%0";
1546 return "addb2 %2,%0";
1548 if (rtx_equal_p (operands[0], operands[2]))
1549 return "addb2 %1,%0";
1550 if (CONST_INT_P (operands[2])
1551 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1552 return "subb3 $%n2,%1,%0";
1553 return "addb3 %1,%2,%0";
1555 default:
1556 gcc_unreachable ();
1560 const char *
1561 vax_output_int_subtract (rtx insn, rtx *operands, machine_mode mode)
1563 switch (mode)
1565 case E_DImode:
1567 rtx low[3];
1568 const char *pattern;
1569 int carry = 1;
1571 if (TARGET_QMATH && 0)
1572 debug_rtx (insn);
1574 split_quadword_operands (insn, MINUS, operands, low, 3);
1576 if (TARGET_QMATH)
1578 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1580 /* Negation is tricky. It's basically complement and increment.
1581 Negate hi, then lo, and subtract the carry back. */
1582 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1583 || (MEM_P (operands[0])
1584 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1585 fatal_insn ("illegal operand detected", insn);
1586 output_asm_insn ("mnegl %2,%0", operands);
1587 output_asm_insn ("mnegl %2,%0", low);
1588 return "sbwc $0,%0";
1590 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1591 gcc_assert (rtx_equal_p (low[0], low[1]));
1592 if (low[2] == const1_rtx)
1593 output_asm_insn ("decl %0", low);
1594 else
1595 output_asm_insn ("subl2 %2,%0", low);
1596 return "sbwc %2,%0";
1599 /* Subtract low parts. */
1600 if (rtx_equal_p (operands[0], operands[1]))
1602 if (low[2] == const0_rtx)
1603 pattern = 0, carry = 0;
1604 else if (low[2] == constm1_rtx)
1605 pattern = "decl %0";
1606 else
1607 pattern = "subl2 %2,%0";
1609 else
1611 if (low[2] == constm1_rtx)
1612 pattern = "decl %0";
1613 else if (low[2] == const0_rtx)
1614 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1615 else
1616 pattern = "subl3 %2,%1,%0";
1618 if (pattern)
1619 output_asm_insn (pattern, low);
1620 if (carry)
1622 if (!rtx_equal_p (operands[0], operands[1]))
1623 return "movl %1,%0;sbwc %2,%0";
1624 return "sbwc %2,%0";
1625 /* %0 = %2 - %1 - C */
1627 return get_insn_template (CODE_FOR_subsi3, insn);
1630 default:
1631 gcc_unreachable ();
1635 /* True if X is an rtx for a constant that is a valid address. */
1637 bool
1638 legitimate_constant_address_p (rtx x)
1640 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1641 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1642 return true;
1643 if (GET_CODE (x) != CONST)
1644 return false;
1645 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1646 if (flag_pic
1647 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1648 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1649 return false;
1650 #endif
1651 return true;
1654 /* The other macros defined here are used only in legitimate_address_p (). */
1656 /* Nonzero if X is a hard reg that can be used as an index
1657 or, if not strict, if it is a pseudo reg. */
1658 #define INDEX_REGISTER_P(X, STRICT) \
1659 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1661 /* Nonzero if X is a hard reg that can be used as a base reg
1662 or, if not strict, if it is a pseudo reg. */
1663 #define BASE_REGISTER_P(X, STRICT) \
1664 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1666 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1668 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1669 are no SYMBOL_REFs for external symbols present. */
1671 static bool
1672 indirectable_constant_address_p (rtx x, bool indirect)
1674 if (GET_CODE (x) == SYMBOL_REF)
1675 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1677 if (GET_CODE (x) == CONST)
1678 return !flag_pic
1679 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1680 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1682 return CONSTANT_ADDRESS_P (x);
1685 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1687 static bool
1688 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1690 return CONSTANT_ADDRESS_P (x);
1693 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1695 /* True if X is an address which can be indirected. External symbols
1696 could be in a sharable image library, so we disallow those. */
1698 static bool
1699 indirectable_address_p (rtx x, bool strict, bool indirect)
1701 if (indirectable_constant_address_p (x, indirect)
1702 || BASE_REGISTER_P (x, strict))
1703 return true;
1704 if (GET_CODE (x) != PLUS
1705 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1706 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1707 return false;
1708 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1711 /* Return true if x is a valid address not using indexing.
1712 (This much is the easy part.) */
1713 static bool
1714 nonindexed_address_p (rtx x, bool strict)
1716 rtx xfoo0;
1717 if (REG_P (x))
1719 if (! reload_in_progress
1720 || reg_equiv_mem (REGNO (x)) == 0
1721 || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1722 return true;
1724 if (indirectable_constant_address_p (x, false))
1725 return true;
1726 if (indirectable_address_p (x, strict, false))
1727 return true;
1728 xfoo0 = XEXP (x, 0);
1729 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1730 return true;
1731 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1732 && BASE_REGISTER_P (xfoo0, strict))
1733 return true;
1734 return false;
1737 /* True if PROD is either a reg times size of mode MODE and MODE is less
1738 than or equal 8 bytes, or just a reg if MODE is one byte. */
1740 static bool
1741 index_term_p (rtx prod, machine_mode mode, bool strict)
1743 rtx xfoo0, xfoo1;
1745 if (GET_MODE_SIZE (mode) == 1)
1746 return BASE_REGISTER_P (prod, strict);
1748 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1749 return false;
1751 xfoo0 = XEXP (prod, 0);
1752 xfoo1 = XEXP (prod, 1);
1754 if (CONST_INT_P (xfoo0)
1755 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1756 && INDEX_REGISTER_P (xfoo1, strict))
1757 return true;
1759 if (CONST_INT_P (xfoo1)
1760 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1761 && INDEX_REGISTER_P (xfoo0, strict))
1762 return true;
1764 return false;
1767 /* Return true if X is the sum of a register
1768 and a valid index term for mode MODE. */
1769 static bool
1770 reg_plus_index_p (rtx x, machine_mode mode, bool strict)
1772 rtx xfoo0, xfoo1;
1774 if (GET_CODE (x) != PLUS)
1775 return false;
1777 xfoo0 = XEXP (x, 0);
1778 xfoo1 = XEXP (x, 1);
1780 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1781 return true;
1783 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1784 return true;
1786 return false;
1789 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1790 static bool
1791 indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
1793 if (!CONSTANT_ADDRESS_P (xfoo0))
1794 return false;
1795 if (BASE_REGISTER_P (xfoo1, strict))
1796 return !flag_pic || mode == QImode;
1797 if (flag_pic && symbolic_operand (xfoo0, SImode))
1798 return false;
1799 return reg_plus_index_p (xfoo1, mode, strict);
1802 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1803 that is a valid memory address for an instruction.
1804 The MODE argument is the machine mode for the MEM expression
1805 that wants to use this address. */
1806 bool
1807 vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
1809 rtx xfoo0, xfoo1;
1811 if (nonindexed_address_p (x, strict))
1812 return true;
1814 if (GET_CODE (x) != PLUS)
1815 return false;
1817 /* Handle <address>[index] represented with index-sum outermost */
1819 xfoo0 = XEXP (x, 0);
1820 xfoo1 = XEXP (x, 1);
1822 if (index_term_p (xfoo0, mode, strict)
1823 && nonindexed_address_p (xfoo1, strict))
1824 return true;
1826 if (index_term_p (xfoo1, mode, strict)
1827 && nonindexed_address_p (xfoo0, strict))
1828 return true;
1830 /* Handle offset(reg)[index] with offset added outermost */
1832 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1833 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1834 return true;
1836 return false;
1839 /* Return true if x (a legitimate address expression) has an effect that
1840 depends on the machine mode it is used for. On the VAX, the predecrement
1841 and postincrement address depend thus (the amount of decrement or
1842 increment being the length of the operand) and all indexed address depend
1843 thus (because the index scale factor is the length of the operand). */
1845 static bool
1846 vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
1848 rtx xfoo0, xfoo1;
1850 /* Auto-increment cases are now dealt with generically in recog.c. */
1851 if (GET_CODE (x) != PLUS)
1852 return false;
1854 xfoo0 = XEXP (x, 0);
1855 xfoo1 = XEXP (x, 1);
1857 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1858 return false;
1859 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1860 return false;
1861 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1862 return false;
1863 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1864 return false;
1866 return true;
1869 static rtx
1870 fixup_mathdi_operand (rtx x, machine_mode mode)
1872 if (illegal_addsub_di_memory_operand (x, mode))
1874 rtx addr = XEXP (x, 0);
1875 rtx temp = gen_reg_rtx (Pmode);
1876 rtx offset = 0;
1877 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1878 if (GET_CODE (addr) == CONST && flag_pic)
1880 offset = XEXP (XEXP (addr, 0), 1);
1881 addr = XEXP (XEXP (addr, 0), 0);
1883 #endif
1884 emit_move_insn (temp, addr);
1885 if (offset)
1886 temp = gen_rtx_PLUS (Pmode, temp, offset);
1887 x = gen_rtx_MEM (DImode, temp);
1889 return x;
1892 void
1893 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1895 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1896 rtx temp;
1898 rtx (*gen_old_insn)(rtx, rtx, rtx);
1899 rtx (*gen_si_insn)(rtx, rtx, rtx);
1900 rtx (*gen_insn)(rtx, rtx, rtx);
1902 if (code == PLUS)
1904 gen_old_insn = gen_adddi3_old;
1905 gen_si_insn = gen_addsi3;
1906 gen_insn = gen_adcdi3;
1908 else if (code == MINUS)
1910 gen_old_insn = gen_subdi3_old;
1911 gen_si_insn = gen_subsi3;
1912 gen_insn = gen_sbcdi3;
1914 else
1915 gcc_unreachable ();
1917 /* If this is addition (thus operands are commutative) and if there is one
1918 addend that duplicates the desination, we want that addend to be the
1919 first addend. */
1920 if (code == PLUS
1921 && rtx_equal_p (operands[0], operands[2])
1922 && !rtx_equal_p (operands[1], operands[2]))
1924 temp = operands[2];
1925 operands[2] = operands[1];
1926 operands[1] = temp;
1929 if (!TARGET_QMATH)
1931 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1933 else if (hi_only)
1935 if (!rtx_equal_p (operands[0], operands[1])
1936 && (REG_P (operands[0]) && MEM_P (operands[1])))
1938 emit_move_insn (operands[0], operands[1]);
1939 operands[1] = operands[0];
1942 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1943 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1944 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1946 if (!rtx_equal_p (operands[0], operands[1]))
1947 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1948 operand_subword (operands[1], 0, 0, DImode));
1950 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1951 operand_subword (operands[1], 1, 0, DImode),
1952 operand_subword (operands[2], 1, 0, DImode)));
1954 else
1956 /* If are adding the same value together, that's really a multiply by 2,
1957 and that's just a left shift of 1. */
1958 if (rtx_equal_p (operands[1], operands[2]))
1960 gcc_assert (code != MINUS);
1961 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1962 return;
1965 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1967 /* If an operand is the same as operand[0], use the operand[0] rtx
1968 because fixup will an equivalent rtx but not an equal one. */
1970 if (rtx_equal_p (operands[0], operands[1]))
1971 operands[1] = operands[0];
1972 else
1973 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1975 if (rtx_equal_p (operands[0], operands[2]))
1976 operands[2] = operands[0];
1977 else
1978 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1980 /* If we are subtracting not from ourselves [d = a - b], and because the
1981 carry ops are two operand only, we would need to do a move prior to
1982 the subtract. And if d == b, we would need a temp otherwise
1983 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1984 into d = -b, d += a. Since -b can never overflow, even if b == d,
1985 no temp is needed.
1987 If we are doing addition, since the carry ops are two operand, if
1988 we aren't adding to ourselves, move the first addend to the
1989 destination first. */
1991 gcc_assert (operands[1] != const0_rtx || code == MINUS);
1992 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1994 if (code == MINUS && CONSTANT_P (operands[1]))
1996 temp = gen_reg_rtx (DImode);
1997 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1998 code = PLUS;
1999 gen_insn = gen_adcdi3;
2000 operands[2] = operands[1];
2001 operands[1] = operands[0];
2003 else
2004 emit_move_insn (operands[0], operands[1]);
2007 /* Subtracting a constant will have been rewritten to an addition of the
2008 negative of that constant before we get here. */
2009 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2010 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2014 bool
2015 adjacent_operands_p (rtx lo, rtx hi, machine_mode mode)
2017 HOST_WIDE_INT lo_offset;
2018 HOST_WIDE_INT hi_offset;
2020 if (GET_CODE (lo) != GET_CODE (hi))
2021 return false;
2023 if (REG_P (lo))
2024 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
2025 if (CONST_INT_P (lo))
2026 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
2027 if (CONST_INT_P (lo))
2028 return mode != SImode;
2030 if (!MEM_P (lo))
2031 return false;
2033 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2034 return false;
2036 lo = XEXP (lo, 0);
2037 hi = XEXP (hi, 0);
2039 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2040 return rtx_equal_p (lo, hi);
2042 switch (GET_CODE (lo))
2044 case REG:
2045 case SYMBOL_REF:
2046 lo_offset = 0;
2047 break;
2048 case CONST:
2049 lo = XEXP (lo, 0);
2050 /* FALLTHROUGH */
2051 case PLUS:
2052 if (!CONST_INT_P (XEXP (lo, 1)))
2053 return false;
2054 lo_offset = INTVAL (XEXP (lo, 1));
2055 lo = XEXP (lo, 0);
2056 break;
2057 default:
2058 return false;
2061 switch (GET_CODE (hi))
2063 case REG:
2064 case SYMBOL_REF:
2065 hi_offset = 0;
2066 break;
2067 case CONST:
2068 hi = XEXP (hi, 0);
2069 /* FALLTHROUGH */
2070 case PLUS:
2071 if (!CONST_INT_P (XEXP (hi, 1)))
2072 return false;
2073 hi_offset = INTVAL (XEXP (hi, 1));
2074 hi = XEXP (hi, 0);
2075 break;
2076 default:
2077 return false;
2080 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2081 return false;
2083 return rtx_equal_p (lo, hi)
2084 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2087 /* Output assembler code for a block containing the constant parts
2088 of a trampoline, leaving space for the variable parts. */
2090 /* On the VAX, the trampoline contains an entry mask and two instructions:
2091 .word NN
2092 movl $STATIC,r0 (store the functions static chain)
2093 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2095 static void
2096 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2098 assemble_aligned_integer (2, const0_rtx);
2099 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2100 assemble_aligned_integer (4, const0_rtx);
2101 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2102 assemble_aligned_integer (2, GEN_INT (0x9f17));
2103 assemble_aligned_integer (4, const0_rtx);
2106 /* We copy the register-mask from the function's pure code
2107 to the start of the trampoline. */
2109 static void
2110 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2112 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2113 rtx mem;
2115 emit_block_move (m_tramp, assemble_trampoline_template (),
2116 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2118 mem = adjust_address (m_tramp, HImode, 0);
2119 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2121 mem = adjust_address (m_tramp, SImode, 4);
2122 emit_move_insn (mem, cxt);
2123 mem = adjust_address (m_tramp, SImode, 11);
2124 emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
2125 emit_insn (gen_sync_istream ());
2128 /* Value is the number of bytes of arguments automatically
2129 popped when returning from a subroutine call.
2130 FUNDECL is the declaration node of the function (as a tree),
2131 FUNTYPE is the data type of the function (as a tree),
2132 or for a library call it is an identifier node for the subroutine name.
2133 SIZE is the number of bytes of arguments passed on the stack.
2135 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2137 static int
2138 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2139 tree funtype ATTRIBUTE_UNUSED, int size)
2141 return size > 255 * 4 ? 0 : size;
2144 /* Define where to put the arguments to a function.
2145 Value is zero to push the argument on the stack,
2146 or a hard register in which to store the argument.
2148 MODE is the argument's machine mode.
2149 TYPE is the data type of the argument (as a tree).
2150 This is null for libcalls where that information may
2151 not be available.
2152 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2153 the preceding args and about the function being called.
2154 NAMED is nonzero if this argument is a named parameter
2155 (otherwise it is an extra parameter matching an ellipsis). */
2157 /* On the VAX all args are pushed. */
2159 static rtx
2160 vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
2161 machine_mode mode ATTRIBUTE_UNUSED,
2162 const_tree type ATTRIBUTE_UNUSED,
2163 bool named ATTRIBUTE_UNUSED)
2165 return NULL_RTX;
2168 /* Update the data in CUM to advance over an argument of mode MODE and
2169 data type TYPE. (TYPE is null for libcalls where that information
2170 may not be available.) */
2172 static void
2173 vax_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2174 const_tree type, bool named ATTRIBUTE_UNUSED)
2176 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2178 *cum += (mode != BLKmode
2179 ? (GET_MODE_SIZE (mode) + 3) & ~3
2180 : (int_size_in_bytes (type) + 3) & ~3);