tree-core.h: Include symtab.h.
[official-gcc.git] / gcc / config / vax / vax.c
blob06f161919259d0a4f1df824352122483356060c5
1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "df.h"
27 #include "cfgrtl.h"
28 #include "cfganal.h"
29 #include "lcm.h"
30 #include "cfgbuild.h"
31 #include "cfgcleanup.h"
32 #include "alias.h"
33 #include "calls.h"
34 #include "varasm.h"
35 #include "regs.h"
36 #include "insn-config.h"
37 #include "conditions.h"
38 #include "output.h"
39 #include "insn-attr.h"
40 #include "recog.h"
41 #include "flags.h"
42 #include "expmed.h"
43 #include "dojump.h"
44 #include "explow.h"
45 #include "emit-rtl.h"
46 #include "stmt.h"
47 #include "expr.h"
48 #include "insn-codes.h"
49 #include "optabs.h"
50 #include "debug.h"
51 #include "diagnostic-core.h"
52 #include "reload.h"
53 #include "tm-preds.h"
54 #include "tm-constrs.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "builtins.h"
59 /* This file should be included last. */
60 #include "target-def.h"
62 static void vax_option_override (void);
63 static bool vax_legitimate_address_p (machine_mode, rtx, bool);
64 static void vax_file_start (void);
65 static void vax_init_libfuncs (void);
66 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
67 HOST_WIDE_INT, tree);
68 static int vax_address_cost_1 (rtx);
69 static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
70 static bool vax_rtx_costs (rtx, int, int, int, int *, bool);
71 static rtx vax_function_arg (cumulative_args_t, machine_mode,
72 const_tree, bool);
73 static void vax_function_arg_advance (cumulative_args_t, machine_mode,
74 const_tree, bool);
75 static rtx vax_struct_value_rtx (tree, int);
76 static rtx vax_builtin_setjmp_frame_value (void);
77 static void vax_asm_trampoline_template (FILE *);
78 static void vax_trampoline_init (rtx, tree, rtx);
79 static int vax_return_pops_args (tree, tree, int);
80 static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
82 /* Initialize the GCC target structure. */
83 #undef TARGET_ASM_ALIGNED_HI_OP
84 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
86 #undef TARGET_ASM_FILE_START
87 #define TARGET_ASM_FILE_START vax_file_start
88 #undef TARGET_ASM_FILE_START_APP_OFF
89 #define TARGET_ASM_FILE_START_APP_OFF true
91 #undef TARGET_INIT_LIBFUNCS
92 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
94 #undef TARGET_ASM_OUTPUT_MI_THUNK
95 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
96 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
97 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
99 #undef TARGET_RTX_COSTS
100 #define TARGET_RTX_COSTS vax_rtx_costs
101 #undef TARGET_ADDRESS_COST
102 #define TARGET_ADDRESS_COST vax_address_cost
104 #undef TARGET_PROMOTE_PROTOTYPES
105 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
107 #undef TARGET_FUNCTION_ARG
108 #define TARGET_FUNCTION_ARG vax_function_arg
109 #undef TARGET_FUNCTION_ARG_ADVANCE
110 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
112 #undef TARGET_STRUCT_VALUE_RTX
113 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
115 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
116 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
118 #undef TARGET_LEGITIMATE_ADDRESS_P
119 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
120 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
121 #define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
123 #undef TARGET_FRAME_POINTER_REQUIRED
124 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
126 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
127 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
128 #undef TARGET_TRAMPOLINE_INIT
129 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
130 #undef TARGET_RETURN_POPS_ARGS
131 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
133 #undef TARGET_OPTION_OVERRIDE
134 #define TARGET_OPTION_OVERRIDE vax_option_override
136 struct gcc_target targetm = TARGET_INITIALIZER;
138 /* Set global variables as needed for the options enabled. */
140 static void
141 vax_option_override (void)
143 /* We're VAX floating point, not IEEE floating point. */
144 if (TARGET_G_FLOAT)
145 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
147 #ifdef SUBTARGET_OVERRIDE_OPTIONS
148 SUBTARGET_OVERRIDE_OPTIONS;
149 #endif
152 static void
153 vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
155 rtx x;
157 x = plus_constant (Pmode, frame_pointer_rtx, offset);
158 x = gen_rtx_MEM (SImode, x);
159 x = gen_rtx_SET (x, src);
160 add_reg_note (insn, REG_CFA_OFFSET, x);
163 /* Generate the assembly code for function entry. FILE is a stdio
164 stream to output the code to. SIZE is an int: how many units of
165 temporary storage to allocate.
167 Refer to the array `regs_ever_live' to determine which registers to
168 save; `regs_ever_live[I]' is nonzero if register number I is ever
169 used in the function. This function is responsible for knowing
170 which registers should not be saved even if used. */
172 void
173 vax_expand_prologue (void)
175 int regno, offset;
176 int mask = 0;
177 HOST_WIDE_INT size;
178 rtx insn;
180 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
181 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
182 mask |= 1 << regno;
184 insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
185 RTX_FRAME_RELATED_P (insn) = 1;
187 /* The layout of the CALLG/S stack frame is follows:
189 <- CFA, AP
192 ... Registers saved as specified by MASK
195 return-addr
196 old fp
197 old ap
198 old psw
199 zero
200 <- FP, SP
202 The rest of the prologue will adjust the SP for the local frame. */
204 vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
205 vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
206 vax_add_reg_cfa_offset (insn, 12, pc_rtx);
208 offset = 16;
209 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
210 if (mask & (1 << regno))
212 vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
213 offset += 4;
216 /* Because add_reg_note pushes the notes, adding this last means that
217 it will be processed first. This is required to allow the other
218 notes be interpreted properly. */
219 add_reg_note (insn, REG_CFA_DEF_CFA,
220 plus_constant (Pmode, frame_pointer_rtx, offset));
222 /* Allocate the local stack frame. */
223 size = get_frame_size ();
224 size -= STARTING_FRAME_OFFSET;
225 emit_insn (gen_addsi3 (stack_pointer_rtx,
226 stack_pointer_rtx, GEN_INT (-size)));
228 /* Do not allow instructions referencing local stack memory to be
229 scheduled before the frame is allocated. This is more pedantic
230 than anything else, given that VAX does not currently have a
231 scheduling description. */
232 emit_insn (gen_blockage ());
235 /* When debugging with stabs, we want to output an extra dummy label
236 so that gas can distinguish between D_float and G_float prior to
237 processing the .stabs directive identifying type double. */
238 static void
239 vax_file_start (void)
241 default_file_start ();
243 if (write_symbols == DBX_DEBUG)
244 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
247 /* We can use the BSD C library routines for the libgcc calls that are
248 still generated, since that's what they boil down to anyways. When
249 ELF, avoid the user's namespace. */
251 static void
252 vax_init_libfuncs (void)
254 if (TARGET_BSD_DIVMOD)
256 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
257 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
261 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
263 static void
264 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
265 rtx * low, int n)
267 int i;
269 for (i = 0; i < n; i++)
270 low[i] = 0;
272 for (i = 0; i < n; i++)
274 if (MEM_P (operands[i])
275 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
276 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
278 rtx addr = XEXP (operands[i], 0);
279 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
281 else if (optimize_size && MEM_P (operands[i])
282 && REG_P (XEXP (operands[i], 0))
283 && (code != MINUS || operands[1] != const0_rtx)
284 && find_regno_note (insn, REG_DEAD,
285 REGNO (XEXP (operands[i], 0))))
287 low[i] = gen_rtx_MEM (SImode,
288 gen_rtx_POST_INC (Pmode,
289 XEXP (operands[i], 0)));
290 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
292 else
294 low[i] = operand_subword (operands[i], 0, 0, DImode);
295 operands[i] = operand_subword (operands[i], 1, 0, DImode);
300 void
301 print_operand_address (FILE * file, rtx addr)
303 rtx orig = addr;
304 rtx reg1, breg, ireg;
305 rtx offset;
307 retry:
308 switch (GET_CODE (addr))
310 case MEM:
311 fprintf (file, "*");
312 addr = XEXP (addr, 0);
313 goto retry;
315 case REG:
316 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
317 break;
319 case PRE_DEC:
320 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
321 break;
323 case POST_INC:
324 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
325 break;
327 case PLUS:
328 /* There can be either two or three things added here. One must be a
329 REG. One can be either a REG or a MULT of a REG and an appropriate
330 constant, and the third can only be a constant or a MEM.
332 We get these two or three things and put the constant or MEM in
333 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
334 a register and can't tell yet if it is a base or index register,
335 put it into REG1. */
337 reg1 = 0; ireg = 0; breg = 0; offset = 0;
339 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
340 || MEM_P (XEXP (addr, 0)))
342 offset = XEXP (addr, 0);
343 addr = XEXP (addr, 1);
345 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
346 || MEM_P (XEXP (addr, 1)))
348 offset = XEXP (addr, 1);
349 addr = XEXP (addr, 0);
351 else if (GET_CODE (XEXP (addr, 1)) == MULT)
353 ireg = XEXP (addr, 1);
354 addr = XEXP (addr, 0);
356 else if (GET_CODE (XEXP (addr, 0)) == MULT)
358 ireg = XEXP (addr, 0);
359 addr = XEXP (addr, 1);
361 else if (REG_P (XEXP (addr, 1)))
363 reg1 = XEXP (addr, 1);
364 addr = XEXP (addr, 0);
366 else if (REG_P (XEXP (addr, 0)))
368 reg1 = XEXP (addr, 0);
369 addr = XEXP (addr, 1);
371 else
372 gcc_unreachable ();
374 if (REG_P (addr))
376 if (reg1)
377 ireg = addr;
378 else
379 reg1 = addr;
381 else if (GET_CODE (addr) == MULT)
382 ireg = addr;
383 else
385 gcc_assert (GET_CODE (addr) == PLUS);
386 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
387 || MEM_P (XEXP (addr, 0)))
389 if (offset)
391 if (CONST_INT_P (offset))
392 offset = plus_constant (Pmode, XEXP (addr, 0),
393 INTVAL (offset));
394 else
396 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
397 offset = plus_constant (Pmode, offset,
398 INTVAL (XEXP (addr, 0)));
401 offset = XEXP (addr, 0);
403 else if (REG_P (XEXP (addr, 0)))
405 if (reg1)
406 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
407 else
408 reg1 = XEXP (addr, 0);
410 else
412 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
413 gcc_assert (!ireg);
414 ireg = XEXP (addr, 0);
417 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
418 || MEM_P (XEXP (addr, 1)))
420 if (offset)
422 if (CONST_INT_P (offset))
423 offset = plus_constant (Pmode, XEXP (addr, 1),
424 INTVAL (offset));
425 else
427 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
428 offset = plus_constant (Pmode, offset,
429 INTVAL (XEXP (addr, 1)));
432 offset = XEXP (addr, 1);
434 else if (REG_P (XEXP (addr, 1)))
436 if (reg1)
437 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
438 else
439 reg1 = XEXP (addr, 1);
441 else
443 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
444 gcc_assert (!ireg);
445 ireg = XEXP (addr, 1);
449 /* If REG1 is nonzero, figure out if it is a base or index register. */
450 if (reg1)
452 if (breg
453 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
454 || (offset
455 && (MEM_P (offset)
456 || (flag_pic && symbolic_operand (offset, SImode)))))
458 gcc_assert (!ireg);
459 ireg = reg1;
461 else
462 breg = reg1;
465 if (offset != 0)
467 if (flag_pic && symbolic_operand (offset, SImode))
469 if (breg && ireg)
471 debug_rtx (orig);
472 output_operand_lossage ("symbol used with both base and indexed registers");
475 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
476 if (flag_pic > 1 && GET_CODE (offset) == CONST
477 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
478 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
480 debug_rtx (orig);
481 output_operand_lossage ("symbol with offset used in PIC mode");
483 #endif
485 /* symbol(reg) isn't PIC, but symbol[reg] is. */
486 if (breg)
488 ireg = breg;
489 breg = 0;
494 output_address (offset);
497 if (breg != 0)
498 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
500 if (ireg != 0)
502 if (GET_CODE (ireg) == MULT)
503 ireg = XEXP (ireg, 0);
504 gcc_assert (REG_P (ireg));
505 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
507 break;
509 default:
510 output_addr_const (file, addr);
514 void
515 print_operand (FILE *file, rtx x, int code)
517 if (code == '#')
518 fputc (ASM_DOUBLE_CHAR, file);
519 else if (code == '|')
520 fputs (REGISTER_PREFIX, file);
521 else if (code == 'c')
522 fputs (cond_name (x), file);
523 else if (code == 'C')
524 fputs (rev_cond_name (x), file);
525 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
526 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
527 else if (code == 'P' && CONST_INT_P (x))
528 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
529 else if (code == 'N' && CONST_INT_P (x))
530 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
531 /* rotl instruction cannot deal with negative arguments. */
532 else if (code == 'R' && CONST_INT_P (x))
533 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
534 else if (code == 'H' && CONST_INT_P (x))
535 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
536 else if (code == 'h' && CONST_INT_P (x))
537 fprintf (file, "$%d", (short) - INTVAL (x));
538 else if (code == 'B' && CONST_INT_P (x))
539 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
540 else if (code == 'b' && CONST_INT_P (x))
541 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
542 else if (code == 'M' && CONST_INT_P (x))
543 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
544 else if (code == 'x' && CONST_INT_P (x))
545 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
546 else if (REG_P (x))
547 fprintf (file, "%s", reg_names[REGNO (x)]);
548 else if (MEM_P (x))
549 output_address (XEXP (x, 0));
550 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
552 char dstr[30];
553 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
554 sizeof (dstr), 0, 1);
555 fprintf (file, "$0f%s", dstr);
557 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
559 char dstr[30];
560 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
561 sizeof (dstr), 0, 1);
562 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
564 else
566 if (flag_pic > 1 && symbolic_operand (x, SImode))
568 debug_rtx (x);
569 output_operand_lossage ("symbol used as immediate operand");
571 putc ('$', file);
572 output_addr_const (file, x);
576 const char *
577 cond_name (rtx op)
579 switch (GET_CODE (op))
581 case NE:
582 return "neq";
583 case EQ:
584 return "eql";
585 case GE:
586 return "geq";
587 case GT:
588 return "gtr";
589 case LE:
590 return "leq";
591 case LT:
592 return "lss";
593 case GEU:
594 return "gequ";
595 case GTU:
596 return "gtru";
597 case LEU:
598 return "lequ";
599 case LTU:
600 return "lssu";
602 default:
603 gcc_unreachable ();
607 const char *
608 rev_cond_name (rtx op)
610 switch (GET_CODE (op))
612 case EQ:
613 return "neq";
614 case NE:
615 return "eql";
616 case LT:
617 return "geq";
618 case LE:
619 return "gtr";
620 case GT:
621 return "leq";
622 case GE:
623 return "lss";
624 case LTU:
625 return "gequ";
626 case LEU:
627 return "gtru";
628 case GTU:
629 return "lequ";
630 case GEU:
631 return "lssu";
633 default:
634 gcc_unreachable ();
638 static bool
639 vax_float_literal (rtx c)
641 machine_mode mode;
642 REAL_VALUE_TYPE r, s;
643 int i;
645 if (GET_CODE (c) != CONST_DOUBLE)
646 return false;
648 mode = GET_MODE (c);
650 if (c == const_tiny_rtx[(int) mode][0]
651 || c == const_tiny_rtx[(int) mode][1]
652 || c == const_tiny_rtx[(int) mode][2])
653 return true;
655 REAL_VALUE_FROM_CONST_DOUBLE (r, c);
657 for (i = 0; i < 7; i++)
659 int x = 1 << i;
660 bool ok;
661 real_from_integer (&s, mode, x, SIGNED);
663 if (REAL_VALUES_EQUAL (r, s))
664 return true;
665 ok = exact_real_inverse (mode, &s);
666 gcc_assert (ok);
667 if (REAL_VALUES_EQUAL (r, s))
668 return true;
670 return false;
674 /* Return the cost in cycles of a memory address, relative to register
675 indirect.
677 Each of the following adds the indicated number of cycles:
679 1 - symbolic address
680 1 - pre-decrement
681 1 - indexing and/or offset(register)
682 2 - indirect */
685 static int
686 vax_address_cost_1 (rtx addr)
688 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
689 rtx plus_op0 = 0, plus_op1 = 0;
690 restart:
691 switch (GET_CODE (addr))
693 case PRE_DEC:
694 predec = 1;
695 case REG:
696 case SUBREG:
697 case POST_INC:
698 reg = 1;
699 break;
700 case MULT:
701 indexed = 1; /* 2 on VAX 2 */
702 break;
703 case CONST_INT:
704 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
705 if (offset == 0)
706 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
707 break;
708 case CONST:
709 case SYMBOL_REF:
710 offset = 1; /* 2 on VAX 2 */
711 break;
712 case LABEL_REF: /* this is probably a byte offset from the pc */
713 if (offset == 0)
714 offset = 1;
715 break;
716 case PLUS:
717 if (plus_op0)
718 plus_op1 = XEXP (addr, 0);
719 else
720 plus_op0 = XEXP (addr, 0);
721 addr = XEXP (addr, 1);
722 goto restart;
723 case MEM:
724 indir = 2; /* 3 on VAX 2 */
725 addr = XEXP (addr, 0);
726 goto restart;
727 default:
728 break;
731 /* Up to 3 things can be added in an address. They are stored in
732 plus_op0, plus_op1, and addr. */
734 if (plus_op0)
736 addr = plus_op0;
737 plus_op0 = 0;
738 goto restart;
740 if (plus_op1)
742 addr = plus_op1;
743 plus_op1 = 0;
744 goto restart;
746 /* Indexing and register+offset can both be used (except on a VAX 2)
747 without increasing execution time over either one alone. */
748 if (reg && indexed && offset)
749 return reg + indir + offset + predec;
750 return reg + indexed + indir + offset + predec;
753 static int
754 vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
755 addr_space_t as ATTRIBUTE_UNUSED,
756 bool speed ATTRIBUTE_UNUSED)
758 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
761 /* Cost of an expression on a VAX. This version has costs tuned for the
762 CVAX chip (found in the VAX 3 series) with comments for variations on
763 other models.
765 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
766 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
767 costs on a per cpu basis. */
769 static bool
770 vax_rtx_costs (rtx x, int code, int outer_code, int opno ATTRIBUTE_UNUSED,
771 int *total, bool speed ATTRIBUTE_UNUSED)
773 machine_mode mode = GET_MODE (x);
774 int i = 0; /* may be modified in switch */
775 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
777 switch (code)
779 /* On a VAX, constants from 0..63 are cheap because they can use the
780 1 byte literal constant format. Compare to -1 should be made cheap
781 so that decrement-and-branch insns can be formed more easily (if
782 the value -1 is copied to a register some decrement-and-branch
783 patterns will not match). */
784 case CONST_INT:
785 if (INTVAL (x) == 0)
787 *total = 0;
788 return true;
790 if (outer_code == AND)
792 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
793 return true;
795 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
796 || (outer_code == COMPARE
797 && INTVAL (x) == -1)
798 || ((outer_code == PLUS || outer_code == MINUS)
799 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
801 *total = 1;
802 return true;
804 /* FALLTHRU */
806 case CONST:
807 case LABEL_REF:
808 case SYMBOL_REF:
809 *total = 3;
810 return true;
812 case CONST_DOUBLE:
813 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
814 *total = vax_float_literal (x) ? 5 : 8;
815 else
816 *total = ((CONST_DOUBLE_HIGH (x) == 0
817 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
818 || (outer_code == PLUS
819 && CONST_DOUBLE_HIGH (x) == -1
820 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
821 ? 2 : 5;
822 return true;
824 case POST_INC:
825 *total = 2;
826 return true; /* Implies register operand. */
828 case PRE_DEC:
829 *total = 3;
830 return true; /* Implies register operand. */
832 case MULT:
833 switch (mode)
835 case DFmode:
836 *total = 16; /* 4 on VAX 9000 */
837 break;
838 case SFmode:
839 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
840 break;
841 case DImode:
842 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
843 break;
844 case SImode:
845 case HImode:
846 case QImode:
847 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
848 break;
849 default:
850 *total = MAX_COST; /* Mode is not supported. */
851 return true;
853 break;
855 case UDIV:
856 if (mode != SImode)
858 *total = MAX_COST; /* Mode is not supported. */
859 return true;
861 *total = 17;
862 break;
864 case DIV:
865 if (mode == DImode)
866 *total = 30; /* Highly variable. */
867 else if (mode == DFmode)
868 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
869 *total = 24;
870 else
871 *total = 11; /* 25 on VAX 2 */
872 break;
874 case MOD:
875 *total = 23;
876 break;
878 case UMOD:
879 if (mode != SImode)
881 *total = MAX_COST; /* Mode is not supported. */
882 return true;
884 *total = 29;
885 break;
887 case FLOAT:
888 *total = (6 /* 4 on VAX 9000 */
889 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
890 break;
892 case FIX:
893 *total = 7; /* 17 on VAX 2 */
894 break;
896 case ASHIFT:
897 case LSHIFTRT:
898 case ASHIFTRT:
899 if (mode == DImode)
900 *total = 12;
901 else
902 *total = 10; /* 6 on VAX 9000 */
903 break;
905 case ROTATE:
906 case ROTATERT:
907 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
908 if (CONST_INT_P (XEXP (x, 1)))
909 fmt = "e"; /* all constant rotate counts are short */
910 break;
912 case PLUS:
913 case MINUS:
914 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
915 /* Small integer operands can use subl2 and addl2. */
916 if ((CONST_INT_P (XEXP (x, 1)))
917 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
918 fmt = "e";
919 break;
921 case IOR:
922 case XOR:
923 *total = 3;
924 break;
926 case AND:
927 /* AND is special because the first operand is complemented. */
928 *total = 3;
929 if (CONST_INT_P (XEXP (x, 0)))
931 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
932 *total = 4;
933 fmt = "e";
934 i = 1;
936 break;
938 case NEG:
939 if (mode == DFmode)
940 *total = 9;
941 else if (mode == SFmode)
942 *total = 6;
943 else if (mode == DImode)
944 *total = 4;
945 else
946 *total = 2;
947 break;
949 case NOT:
950 *total = 2;
951 break;
953 case ZERO_EXTRACT:
954 case SIGN_EXTRACT:
955 *total = 15;
956 break;
958 case MEM:
959 if (mode == DImode || mode == DFmode)
960 *total = 5; /* 7 on VAX 2 */
961 else
962 *total = 3; /* 4 on VAX 2 */
963 x = XEXP (x, 0);
964 if (!REG_P (x) && GET_CODE (x) != POST_INC)
965 *total += vax_address_cost_1 (x);
966 return true;
968 case FLOAT_EXTEND:
969 case FLOAT_TRUNCATE:
970 case TRUNCATE:
971 *total = 3; /* FIXME: Costs need to be checked */
972 break;
974 default:
975 return false;
978 /* Now look inside the expression. Operands which are not registers or
979 short constants add to the cost.
981 FMT and I may have been adjusted in the switch above for instructions
982 which require special handling. */
984 while (*fmt++ == 'e')
986 rtx op = XEXP (x, i);
988 i += 1;
989 code = GET_CODE (op);
991 /* A NOT is likely to be found as the first operand of an AND
992 (in which case the relevant cost is of the operand inside
993 the not) and not likely to be found anywhere else. */
994 if (code == NOT)
995 op = XEXP (op, 0), code = GET_CODE (op);
997 switch (code)
999 case CONST_INT:
1000 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
1001 && GET_MODE (x) != QImode)
1002 *total += 1; /* 2 on VAX 2 */
1003 break;
1004 case CONST:
1005 case LABEL_REF:
1006 case SYMBOL_REF:
1007 *total += 1; /* 2 on VAX 2 */
1008 break;
1009 case CONST_DOUBLE:
1010 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
1012 /* Registers are faster than floating point constants -- even
1013 those constants which can be encoded in a single byte. */
1014 if (vax_float_literal (op))
1015 *total += 1;
1016 else
1017 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
1019 else
1021 if (CONST_DOUBLE_HIGH (op) != 0
1022 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1023 *total += 2;
1025 break;
1026 case MEM:
1027 *total += 1; /* 2 on VAX 2 */
1028 if (!REG_P (XEXP (op, 0)))
1029 *total += vax_address_cost_1 (XEXP (op, 0));
1030 break;
1031 case REG:
1032 case SUBREG:
1033 break;
1034 default:
1035 *total += 1;
1036 break;
1039 return true;
1042 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1043 Used for C++ multiple inheritance.
1044 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
1045 addl2 $DELTA, 4(ap) #adjust first argument
1046 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
1049 static void
1050 vax_output_mi_thunk (FILE * file,
1051 tree thunk ATTRIBUTE_UNUSED,
1052 HOST_WIDE_INT delta,
1053 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1054 tree function)
1056 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1057 asm_fprintf (file, ",4(%Rap)\n");
1058 fprintf (file, "\tjmp ");
1059 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1060 fprintf (file, "+2\n");
1063 static rtx
1064 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1065 int incoming ATTRIBUTE_UNUSED)
1067 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1070 static rtx
1071 vax_builtin_setjmp_frame_value (void)
1073 return hard_frame_pointer_rtx;
1076 /* Worker function for NOTICE_UPDATE_CC. */
1078 void
1079 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1081 if (GET_CODE (exp) == SET)
1083 if (GET_CODE (SET_SRC (exp)) == CALL)
1084 CC_STATUS_INIT;
1085 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1086 && GET_CODE (SET_DEST (exp)) != PC)
1088 cc_status.flags = 0;
1089 /* The integer operations below don't set carry or
1090 set it in an incompatible way. That's ok though
1091 as the Z bit is all we need when doing unsigned
1092 comparisons on the result of these insns (since
1093 they're always with 0). Set CC_NO_OVERFLOW to
1094 generate the correct unsigned branches. */
1095 switch (GET_CODE (SET_SRC (exp)))
1097 case NEG:
1098 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1099 break;
1100 case AND:
1101 case IOR:
1102 case XOR:
1103 case NOT:
1104 case MEM:
1105 case REG:
1106 cc_status.flags = CC_NO_OVERFLOW;
1107 break;
1108 default:
1109 break;
1111 cc_status.value1 = SET_DEST (exp);
1112 cc_status.value2 = SET_SRC (exp);
1115 else if (GET_CODE (exp) == PARALLEL
1116 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1118 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1119 CC_STATUS_INIT;
1120 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1122 cc_status.flags = 0;
1123 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1124 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1126 else
1127 /* PARALLELs whose first element sets the PC are aob,
1128 sob insns. They do change the cc's. */
1129 CC_STATUS_INIT;
1131 else
1132 CC_STATUS_INIT;
1133 if (cc_status.value1 && REG_P (cc_status.value1)
1134 && cc_status.value2
1135 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1136 cc_status.value2 = 0;
1137 if (cc_status.value1 && MEM_P (cc_status.value1)
1138 && cc_status.value2
1139 && MEM_P (cc_status.value2))
1140 cc_status.value2 = 0;
1141 /* Actual condition, one line up, should be that value2's address
1142 depends on value1, but that is too much of a pain. */
1145 /* Output integer move instructions. */
1147 const char *
1148 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1149 machine_mode mode)
1151 rtx hi[3], lo[3];
1152 const char *pattern_hi, *pattern_lo;
1154 switch (mode)
1156 case DImode:
1157 if (operands[1] == const0_rtx)
1158 return "clrq %0";
1159 if (TARGET_QMATH && optimize_size
1160 && (CONST_INT_P (operands[1])
1161 || GET_CODE (operands[1]) == CONST_DOUBLE))
1163 unsigned HOST_WIDE_INT hval, lval;
1164 int n;
1166 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1168 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1170 /* Make sure only the low 32 bits are valid. */
1171 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1172 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1174 else
1176 lval = INTVAL (operands[1]);
1177 hval = 0;
1180 /* Here we see if we are trying to see if the 64bit value is really
1181 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1182 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1183 8 bytes - 1 shift byte - 1 short literal byte. */
1184 if (lval != 0
1185 && (n = exact_log2 (lval & (- lval))) != -1
1186 && (lval >> n) < 64)
1188 lval >>= n;
1190 /* On 32bit platforms, if the 6bits didn't overflow into the
1191 upper 32bit value that value better be 0. If we have
1192 overflowed, make sure it wasn't too much. */
1193 if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1195 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1196 n = 0; /* failure */
1197 else
1198 lval |= hval << (32 - n);
1200 /* If n is 0, then ashq is not the best way to emit this. */
1201 if (n > 0)
1203 operands[1] = GEN_INT (lval);
1204 operands[2] = GEN_INT (n);
1205 return "ashq %2,%D1,%0";
1207 #if HOST_BITS_PER_WIDE_INT == 32
1209 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1210 upper 32bit value. */
1211 else if (hval != 0
1212 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1213 && (hval >> n) < 64)
1215 operands[1] = GEN_INT (hval >> n);
1216 operands[2] = GEN_INT (n + 32);
1217 return "ashq %2,%D1,%0";
1218 #endif
1222 if (TARGET_QMATH
1223 && (!MEM_P (operands[0])
1224 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1225 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1226 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1227 && ((CONST_INT_P (operands[1])
1228 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1229 || GET_CODE (operands[1]) == CONST_DOUBLE))
1231 hi[0] = operands[0];
1232 hi[1] = operands[1];
1234 split_quadword_operands (insn, SET, hi, lo, 2);
1236 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1237 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1239 /* The patterns are just movl/movl or pushl/pushl then a movq will
1240 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1241 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1242 value bytes. */
1243 if ((!strncmp (pattern_lo, "movl", 4)
1244 && !strncmp (pattern_hi, "movl", 4))
1245 || (!strncmp (pattern_lo, "pushl", 5)
1246 && !strncmp (pattern_hi, "pushl", 5)))
1247 return "movq %1,%0";
1249 if (MEM_P (operands[0])
1250 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1252 output_asm_insn (pattern_hi, hi);
1253 operands[0] = lo[0];
1254 operands[1] = lo[1];
1255 operands[2] = lo[2];
1256 return pattern_lo;
1258 else
1260 output_asm_insn (pattern_lo, lo);
1261 operands[0] = hi[0];
1262 operands[1] = hi[1];
1263 operands[2] = hi[2];
1264 return pattern_hi;
1267 return "movq %1,%0";
1269 case SImode:
1270 if (symbolic_operand (operands[1], SImode))
1272 if (push_operand (operands[0], SImode))
1273 return "pushab %a1";
1274 return "movab %a1,%0";
1277 if (operands[1] == const0_rtx)
1279 if (push_operand (operands[1], SImode))
1280 return "pushl %1";
1281 return "clrl %0";
1284 if (CONST_INT_P (operands[1])
1285 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1287 HOST_WIDE_INT i = INTVAL (operands[1]);
1288 int n;
1289 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1290 return "mcoml %N1,%0";
1291 if ((unsigned HOST_WIDE_INT)i < 0x100)
1292 return "movzbl %1,%0";
1293 if (i >= -0x80 && i < 0)
1294 return "cvtbl %1,%0";
1295 if (optimize_size
1296 && (n = exact_log2 (i & (-i))) != -1
1297 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1299 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1300 operands[2] = GEN_INT (n);
1301 return "ashl %2,%1,%0";
1303 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1304 return "movzwl %1,%0";
1305 if (i >= -0x8000 && i < 0)
1306 return "cvtwl %1,%0";
1308 if (push_operand (operands[0], SImode))
1309 return "pushl %1";
1310 return "movl %1,%0";
1312 case HImode:
1313 if (CONST_INT_P (operands[1]))
1315 HOST_WIDE_INT i = INTVAL (operands[1]);
1316 if (i == 0)
1317 return "clrw %0";
1318 else if ((unsigned HOST_WIDE_INT)i < 64)
1319 return "movw %1,%0";
1320 else if ((unsigned HOST_WIDE_INT)~i < 64)
1321 return "mcomw %H1,%0";
1322 else if ((unsigned HOST_WIDE_INT)i < 256)
1323 return "movzbw %1,%0";
1324 else if (i >= -0x80 && i < 0)
1325 return "cvtbw %1,%0";
1327 return "movw %1,%0";
1329 case QImode:
1330 if (CONST_INT_P (operands[1]))
1332 HOST_WIDE_INT i = INTVAL (operands[1]);
1333 if (i == 0)
1334 return "clrb %0";
1335 else if ((unsigned HOST_WIDE_INT)~i < 64)
1336 return "mcomb %B1,%0";
1338 return "movb %1,%0";
1340 default:
1341 gcc_unreachable ();
1345 /* Output integer add instructions.
1347 The space-time-opcode tradeoffs for addition vary by model of VAX.
1349 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1350 but it not faster on other models.
1352 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1353 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1354 a register is used in an address too soon after it is set.
1355 Compromise by using movab only when it is shorter than the add
1356 or the base register in the address is one of sp, ap, and fp,
1357 which are not modified very often. */
1359 const char *
1360 vax_output_int_add (rtx insn, rtx *operands, machine_mode mode)
1362 switch (mode)
1364 case DImode:
1366 rtx low[3];
1367 const char *pattern;
1368 int carry = 1;
1369 bool sub;
1371 if (TARGET_QMATH && 0)
1372 debug_rtx (insn);
1374 split_quadword_operands (insn, PLUS, operands, low, 3);
1376 if (TARGET_QMATH)
1378 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1379 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1380 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1381 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1382 #endif
1384 /* No reason to add a 0 to the low part and thus no carry, so just
1385 emit the appropriate add/sub instruction. */
1386 if (low[2] == const0_rtx)
1387 return vax_output_int_add (NULL, operands, SImode);
1389 /* Are we doing addition or subtraction? */
1390 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1392 /* We can't use vax_output_int_add since some the patterns don't
1393 modify the carry bit. */
1394 if (sub)
1396 if (low[2] == constm1_rtx)
1397 pattern = "decl %0";
1398 else
1399 pattern = "subl2 $%n2,%0";
1401 else
1403 if (low[2] == const1_rtx)
1404 pattern = "incl %0";
1405 else
1406 pattern = "addl2 %2,%0";
1408 output_asm_insn (pattern, low);
1410 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1411 two 32bit parts, we complement each and then add one to
1412 low part. We know that the low part can't overflow since
1413 it's value can never be 0. */
1414 if (sub)
1415 return "sbwc %N2,%0";
1416 return "adwc %2,%0";
1419 /* Add low parts. */
1420 if (rtx_equal_p (operands[0], operands[1]))
1422 if (low[2] == const0_rtx)
1423 /* Should examine operand, punt if not POST_INC. */
1424 pattern = "tstl %0", carry = 0;
1425 else if (low[2] == const1_rtx)
1426 pattern = "incl %0";
1427 else
1428 pattern = "addl2 %2,%0";
1430 else
1432 if (low[2] == const0_rtx)
1433 pattern = "movl %1,%0", carry = 0;
1434 else
1435 pattern = "addl3 %2,%1,%0";
1437 if (pattern)
1438 output_asm_insn (pattern, low);
1439 if (!carry)
1440 /* If CARRY is 0, we don't have any carry value to worry about. */
1441 return get_insn_template (CODE_FOR_addsi3, insn);
1442 /* %0 = C + %1 + %2 */
1443 if (!rtx_equal_p (operands[0], operands[1]))
1444 output_asm_insn ((operands[1] == const0_rtx
1445 ? "clrl %0"
1446 : "movl %1,%0"), operands);
1447 return "adwc %2,%0";
1450 case SImode:
1451 if (rtx_equal_p (operands[0], operands[1]))
1453 if (operands[2] == const1_rtx)
1454 return "incl %0";
1455 if (operands[2] == constm1_rtx)
1456 return "decl %0";
1457 if (CONST_INT_P (operands[2])
1458 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1459 return "subl2 $%n2,%0";
1460 if (CONST_INT_P (operands[2])
1461 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1462 && REG_P (operands[1])
1463 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1464 || REGNO (operands[1]) > 11))
1465 return "movab %c2(%1),%0";
1466 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1467 return "movab %a2[%0],%0";
1468 return "addl2 %2,%0";
1471 if (rtx_equal_p (operands[0], operands[2]))
1473 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1474 return "movab %a1[%0],%0";
1475 return "addl2 %1,%0";
1478 if (CONST_INT_P (operands[2])
1479 && INTVAL (operands[2]) < 32767
1480 && INTVAL (operands[2]) > -32768
1481 && REG_P (operands[1])
1482 && push_operand (operands[0], SImode))
1483 return "pushab %c2(%1)";
1485 if (CONST_INT_P (operands[2])
1486 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1487 return "subl3 $%n2,%1,%0";
1489 if (CONST_INT_P (operands[2])
1490 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1491 && REG_P (operands[1])
1492 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1493 || REGNO (operands[1]) > 11))
1494 return "movab %c2(%1),%0";
1496 /* Add this if using gcc on a VAX 3xxx:
1497 if (REG_P (operands[1]) && REG_P (operands[2]))
1498 return "movab (%1)[%2],%0";
1501 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1503 if (push_operand (operands[0], SImode))
1504 return "pushab %a2[%1]";
1505 return "movab %a2[%1],%0";
1508 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1510 if (push_operand (operands[0], SImode))
1511 return "pushab %a1[%2]";
1512 return "movab %a1[%2],%0";
1515 if (flag_pic && REG_P (operands[0])
1516 && symbolic_operand (operands[2], SImode))
1517 return "movab %a2,%0;addl2 %1,%0";
1519 if (flag_pic
1520 && (symbolic_operand (operands[1], SImode)
1521 || symbolic_operand (operands[1], SImode)))
1522 debug_rtx (insn);
1524 return "addl3 %1,%2,%0";
1526 case HImode:
1527 if (rtx_equal_p (operands[0], operands[1]))
1529 if (operands[2] == const1_rtx)
1530 return "incw %0";
1531 if (operands[2] == constm1_rtx)
1532 return "decw %0";
1533 if (CONST_INT_P (operands[2])
1534 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1535 return "subw2 $%n2,%0";
1536 return "addw2 %2,%0";
1538 if (rtx_equal_p (operands[0], operands[2]))
1539 return "addw2 %1,%0";
1540 if (CONST_INT_P (operands[2])
1541 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1542 return "subw3 $%n2,%1,%0";
1543 return "addw3 %1,%2,%0";
1545 case QImode:
1546 if (rtx_equal_p (operands[0], operands[1]))
1548 if (operands[2] == const1_rtx)
1549 return "incb %0";
1550 if (operands[2] == constm1_rtx)
1551 return "decb %0";
1552 if (CONST_INT_P (operands[2])
1553 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1554 return "subb2 $%n2,%0";
1555 return "addb2 %2,%0";
1557 if (rtx_equal_p (operands[0], operands[2]))
1558 return "addb2 %1,%0";
1559 if (CONST_INT_P (operands[2])
1560 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1561 return "subb3 $%n2,%1,%0";
1562 return "addb3 %1,%2,%0";
1564 default:
1565 gcc_unreachable ();
1569 const char *
1570 vax_output_int_subtract (rtx insn, rtx *operands, machine_mode mode)
1572 switch (mode)
1574 case DImode:
1576 rtx low[3];
1577 const char *pattern;
1578 int carry = 1;
1580 if (TARGET_QMATH && 0)
1581 debug_rtx (insn);
1583 split_quadword_operands (insn, MINUS, operands, low, 3);
1585 if (TARGET_QMATH)
1587 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1589 /* Negation is tricky. It's basically complement and increment.
1590 Negate hi, then lo, and subtract the carry back. */
1591 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1592 || (MEM_P (operands[0])
1593 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1594 fatal_insn ("illegal operand detected", insn);
1595 output_asm_insn ("mnegl %2,%0", operands);
1596 output_asm_insn ("mnegl %2,%0", low);
1597 return "sbwc $0,%0";
1599 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1600 gcc_assert (rtx_equal_p (low[0], low[1]));
1601 if (low[2] == const1_rtx)
1602 output_asm_insn ("decl %0", low);
1603 else
1604 output_asm_insn ("subl2 %2,%0", low);
1605 return "sbwc %2,%0";
1608 /* Subtract low parts. */
1609 if (rtx_equal_p (operands[0], operands[1]))
1611 if (low[2] == const0_rtx)
1612 pattern = 0, carry = 0;
1613 else if (low[2] == constm1_rtx)
1614 pattern = "decl %0";
1615 else
1616 pattern = "subl2 %2,%0";
1618 else
1620 if (low[2] == constm1_rtx)
1621 pattern = "decl %0";
1622 else if (low[2] == const0_rtx)
1623 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1624 else
1625 pattern = "subl3 %2,%1,%0";
1627 if (pattern)
1628 output_asm_insn (pattern, low);
1629 if (carry)
1631 if (!rtx_equal_p (operands[0], operands[1]))
1632 return "movl %1,%0;sbwc %2,%0";
1633 return "sbwc %2,%0";
1634 /* %0 = %2 - %1 - C */
1636 return get_insn_template (CODE_FOR_subsi3, insn);
1639 default:
1640 gcc_unreachable ();
1644 /* True if X is an rtx for a constant that is a valid address. */
1646 bool
1647 legitimate_constant_address_p (rtx x)
1649 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1650 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1651 return true;
1652 if (GET_CODE (x) != CONST)
1653 return false;
1654 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1655 if (flag_pic
1656 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1657 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1658 return false;
1659 #endif
1660 return true;
1663 /* The other macros defined here are used only in legitimate_address_p (). */
1665 /* Nonzero if X is a hard reg that can be used as an index
1666 or, if not strict, if it is a pseudo reg. */
1667 #define INDEX_REGISTER_P(X, STRICT) \
1668 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1670 /* Nonzero if X is a hard reg that can be used as a base reg
1671 or, if not strict, if it is a pseudo reg. */
1672 #define BASE_REGISTER_P(X, STRICT) \
1673 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1675 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1677 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1678 are no SYMBOL_REFs for external symbols present. */
1680 static bool
1681 indirectable_constant_address_p (rtx x, bool indirect)
1683 if (GET_CODE (x) == SYMBOL_REF)
1684 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1686 if (GET_CODE (x) == CONST)
1687 return !flag_pic
1688 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1689 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1691 return CONSTANT_ADDRESS_P (x);
1694 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1696 static bool
1697 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1699 return CONSTANT_ADDRESS_P (x);
1702 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1704 /* True if X is an address which can be indirected. External symbols
1705 could be in a sharable image library, so we disallow those. */
1707 static bool
1708 indirectable_address_p (rtx x, bool strict, bool indirect)
1710 if (indirectable_constant_address_p (x, indirect)
1711 || BASE_REGISTER_P (x, strict))
1712 return true;
1713 if (GET_CODE (x) != PLUS
1714 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1715 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1716 return false;
1717 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1720 /* Return true if x is a valid address not using indexing.
1721 (This much is the easy part.) */
1722 static bool
1723 nonindexed_address_p (rtx x, bool strict)
1725 rtx xfoo0;
1726 if (REG_P (x))
1728 if (! reload_in_progress
1729 || reg_equiv_mem (REGNO (x)) == 0
1730 || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1731 return true;
1733 if (indirectable_constant_address_p (x, false))
1734 return true;
1735 if (indirectable_address_p (x, strict, false))
1736 return true;
1737 xfoo0 = XEXP (x, 0);
1738 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1739 return true;
1740 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1741 && BASE_REGISTER_P (xfoo0, strict))
1742 return true;
1743 return false;
1746 /* True if PROD is either a reg times size of mode MODE and MODE is less
1747 than or equal 8 bytes, or just a reg if MODE is one byte. */
1749 static bool
1750 index_term_p (rtx prod, machine_mode mode, bool strict)
1752 rtx xfoo0, xfoo1;
1754 if (GET_MODE_SIZE (mode) == 1)
1755 return BASE_REGISTER_P (prod, strict);
1757 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1758 return false;
1760 xfoo0 = XEXP (prod, 0);
1761 xfoo1 = XEXP (prod, 1);
1763 if (CONST_INT_P (xfoo0)
1764 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1765 && INDEX_REGISTER_P (xfoo1, strict))
1766 return true;
1768 if (CONST_INT_P (xfoo1)
1769 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1770 && INDEX_REGISTER_P (xfoo0, strict))
1771 return true;
1773 return false;
1776 /* Return true if X is the sum of a register
1777 and a valid index term for mode MODE. */
1778 static bool
1779 reg_plus_index_p (rtx x, machine_mode mode, bool strict)
1781 rtx xfoo0, xfoo1;
1783 if (GET_CODE (x) != PLUS)
1784 return false;
1786 xfoo0 = XEXP (x, 0);
1787 xfoo1 = XEXP (x, 1);
1789 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1790 return true;
1792 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1793 return true;
1795 return false;
1798 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1799 static bool
1800 indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
1802 if (!CONSTANT_ADDRESS_P (xfoo0))
1803 return false;
1804 if (BASE_REGISTER_P (xfoo1, strict))
1805 return !flag_pic || mode == QImode;
1806 if (flag_pic && symbolic_operand (xfoo0, SImode))
1807 return false;
1808 return reg_plus_index_p (xfoo1, mode, strict);
1811 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1812 that is a valid memory address for an instruction.
1813 The MODE argument is the machine mode for the MEM expression
1814 that wants to use this address. */
1815 bool
1816 vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
1818 rtx xfoo0, xfoo1;
1820 if (nonindexed_address_p (x, strict))
1821 return true;
1823 if (GET_CODE (x) != PLUS)
1824 return false;
1826 /* Handle <address>[index] represented with index-sum outermost */
1828 xfoo0 = XEXP (x, 0);
1829 xfoo1 = XEXP (x, 1);
1831 if (index_term_p (xfoo0, mode, strict)
1832 && nonindexed_address_p (xfoo1, strict))
1833 return true;
1835 if (index_term_p (xfoo1, mode, strict)
1836 && nonindexed_address_p (xfoo0, strict))
1837 return true;
1839 /* Handle offset(reg)[index] with offset added outermost */
1841 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1842 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1843 return true;
1845 return false;
1848 /* Return true if x (a legitimate address expression) has an effect that
1849 depends on the machine mode it is used for. On the VAX, the predecrement
1850 and postincrement address depend thus (the amount of decrement or
1851 increment being the length of the operand) and all indexed address depend
1852 thus (because the index scale factor is the length of the operand). */
1854 static bool
1855 vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
1857 rtx xfoo0, xfoo1;
1859 /* Auto-increment cases are now dealt with generically in recog.c. */
1860 if (GET_CODE (x) != PLUS)
1861 return false;
1863 xfoo0 = XEXP (x, 0);
1864 xfoo1 = XEXP (x, 1);
1866 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1867 return false;
1868 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1869 return false;
1870 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1871 return false;
1872 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1873 return false;
1875 return true;
1878 static rtx
1879 fixup_mathdi_operand (rtx x, machine_mode mode)
1881 if (illegal_addsub_di_memory_operand (x, mode))
1883 rtx addr = XEXP (x, 0);
1884 rtx temp = gen_reg_rtx (Pmode);
1885 rtx offset = 0;
1886 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1887 if (GET_CODE (addr) == CONST && flag_pic)
1889 offset = XEXP (XEXP (addr, 0), 1);
1890 addr = XEXP (XEXP (addr, 0), 0);
1892 #endif
1893 emit_move_insn (temp, addr);
1894 if (offset)
1895 temp = gen_rtx_PLUS (Pmode, temp, offset);
1896 x = gen_rtx_MEM (DImode, temp);
1898 return x;
1901 void
1902 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1904 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1905 rtx temp;
1907 rtx (*gen_old_insn)(rtx, rtx, rtx);
1908 rtx (*gen_si_insn)(rtx, rtx, rtx);
1909 rtx (*gen_insn)(rtx, rtx, rtx);
1911 if (code == PLUS)
1913 gen_old_insn = gen_adddi3_old;
1914 gen_si_insn = gen_addsi3;
1915 gen_insn = gen_adcdi3;
1917 else if (code == MINUS)
1919 gen_old_insn = gen_subdi3_old;
1920 gen_si_insn = gen_subsi3;
1921 gen_insn = gen_sbcdi3;
1923 else
1924 gcc_unreachable ();
1926 /* If this is addition (thus operands are commutative) and if there is one
1927 addend that duplicates the desination, we want that addend to be the
1928 first addend. */
1929 if (code == PLUS
1930 && rtx_equal_p (operands[0], operands[2])
1931 && !rtx_equal_p (operands[1], operands[2]))
1933 temp = operands[2];
1934 operands[2] = operands[1];
1935 operands[1] = temp;
1938 if (!TARGET_QMATH)
1940 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1942 else if (hi_only)
1944 if (!rtx_equal_p (operands[0], operands[1])
1945 && (REG_P (operands[0]) && MEM_P (operands[1])))
1947 emit_move_insn (operands[0], operands[1]);
1948 operands[1] = operands[0];
1951 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1952 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1953 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1955 if (!rtx_equal_p (operands[0], operands[1]))
1956 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1957 operand_subword (operands[1], 0, 0, DImode));
1959 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1960 operand_subword (operands[1], 1, 0, DImode),
1961 operand_subword (operands[2], 1, 0, DImode)));
1963 else
1965 /* If are adding the same value together, that's really a multiply by 2,
1966 and that's just a left shift of 1. */
1967 if (rtx_equal_p (operands[1], operands[2]))
1969 gcc_assert (code != MINUS);
1970 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1971 return;
1974 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1976 /* If an operand is the same as operand[0], use the operand[0] rtx
1977 because fixup will an equivalent rtx but not an equal one. */
1979 if (rtx_equal_p (operands[0], operands[1]))
1980 operands[1] = operands[0];
1981 else
1982 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1984 if (rtx_equal_p (operands[0], operands[2]))
1985 operands[2] = operands[0];
1986 else
1987 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1989 /* If we are subtracting not from ourselves [d = a - b], and because the
1990 carry ops are two operand only, we would need to do a move prior to
1991 the subtract. And if d == b, we would need a temp otherwise
1992 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1993 into d = -b, d += a. Since -b can never overflow, even if b == d,
1994 no temp is needed.
1996 If we are doing addition, since the carry ops are two operand, if
1997 we aren't adding to ourselves, move the first addend to the
1998 destination first. */
2000 gcc_assert (operands[1] != const0_rtx || code == MINUS);
2001 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
2003 if (code == MINUS && CONSTANT_P (operands[1]))
2005 temp = gen_reg_rtx (DImode);
2006 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
2007 code = PLUS;
2008 gen_insn = gen_adcdi3;
2009 operands[2] = operands[1];
2010 operands[1] = operands[0];
2012 else
2013 emit_move_insn (operands[0], operands[1]);
2016 /* Subtracting a constant will have been rewritten to an addition of the
2017 negative of that constant before we get here. */
2018 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2019 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2023 bool
2024 adjacent_operands_p (rtx lo, rtx hi, machine_mode mode)
2026 HOST_WIDE_INT lo_offset;
2027 HOST_WIDE_INT hi_offset;
2029 if (GET_CODE (lo) != GET_CODE (hi))
2030 return false;
2032 if (REG_P (lo))
2033 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
2034 if (CONST_INT_P (lo))
2035 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
2036 if (CONST_INT_P (lo))
2037 return mode != SImode;
2039 if (!MEM_P (lo))
2040 return false;
2042 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2043 return false;
2045 lo = XEXP (lo, 0);
2046 hi = XEXP (hi, 0);
2048 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2049 return rtx_equal_p (lo, hi);
2051 switch (GET_CODE (lo))
2053 case REG:
2054 case SYMBOL_REF:
2055 lo_offset = 0;
2056 break;
2057 case CONST:
2058 lo = XEXP (lo, 0);
2059 /* FALLTHROUGH */
2060 case PLUS:
2061 if (!CONST_INT_P (XEXP (lo, 1)))
2062 return false;
2063 lo_offset = INTVAL (XEXP (lo, 1));
2064 lo = XEXP (lo, 0);
2065 break;
2066 default:
2067 return false;
2070 switch (GET_CODE (hi))
2072 case REG:
2073 case SYMBOL_REF:
2074 hi_offset = 0;
2075 break;
2076 case CONST:
2077 hi = XEXP (hi, 0);
2078 /* FALLTHROUGH */
2079 case PLUS:
2080 if (!CONST_INT_P (XEXP (hi, 1)))
2081 return false;
2082 hi_offset = INTVAL (XEXP (hi, 1));
2083 hi = XEXP (hi, 0);
2084 break;
2085 default:
2086 return false;
2089 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2090 return false;
2092 return rtx_equal_p (lo, hi)
2093 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2096 /* Output assembler code for a block containing the constant parts
2097 of a trampoline, leaving space for the variable parts. */
2099 /* On the VAX, the trampoline contains an entry mask and two instructions:
2100 .word NN
2101 movl $STATIC,r0 (store the functions static chain)
2102 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2104 static void
2105 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2107 assemble_aligned_integer (2, const0_rtx);
2108 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2109 assemble_aligned_integer (4, const0_rtx);
2110 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2111 assemble_aligned_integer (2, GEN_INT (0x9f17));
2112 assemble_aligned_integer (4, const0_rtx);
2115 /* We copy the register-mask from the function's pure code
2116 to the start of the trampoline. */
2118 static void
2119 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2121 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2122 rtx mem;
2124 emit_block_move (m_tramp, assemble_trampoline_template (),
2125 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2127 mem = adjust_address (m_tramp, HImode, 0);
2128 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2130 mem = adjust_address (m_tramp, SImode, 4);
2131 emit_move_insn (mem, cxt);
2132 mem = adjust_address (m_tramp, SImode, 11);
2133 emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
2134 emit_insn (gen_sync_istream ());
2137 /* Value is the number of bytes of arguments automatically
2138 popped when returning from a subroutine call.
2139 FUNDECL is the declaration node of the function (as a tree),
2140 FUNTYPE is the data type of the function (as a tree),
2141 or for a library call it is an identifier node for the subroutine name.
2142 SIZE is the number of bytes of arguments passed on the stack.
2144 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2146 static int
2147 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2148 tree funtype ATTRIBUTE_UNUSED, int size)
2150 return size > 255 * 4 ? 0 : size;
2153 /* Define where to put the arguments to a function.
2154 Value is zero to push the argument on the stack,
2155 or a hard register in which to store the argument.
2157 MODE is the argument's machine mode.
2158 TYPE is the data type of the argument (as a tree).
2159 This is null for libcalls where that information may
2160 not be available.
2161 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2162 the preceding args and about the function being called.
2163 NAMED is nonzero if this argument is a named parameter
2164 (otherwise it is an extra parameter matching an ellipsis). */
2166 /* On the VAX all args are pushed. */
2168 static rtx
2169 vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
2170 machine_mode mode ATTRIBUTE_UNUSED,
2171 const_tree type ATTRIBUTE_UNUSED,
2172 bool named ATTRIBUTE_UNUSED)
2174 return NULL_RTX;
2177 /* Update the data in CUM to advance over an argument of mode MODE and
2178 data type TYPE. (TYPE is null for libcalls where that information
2179 may not be available.) */
2181 static void
2182 vax_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2183 const_tree type, bool named ATTRIBUTE_UNUSED)
2185 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2187 *cum += (mode != BLKmode
2188 ? (GET_MODE_SIZE (mode) + 3) & ~3
2189 : (int_size_in_bytes (type) + 3) & ~3);