2018-02-09 Sebastian Perta <sebastian.perta@renesas.com>
[official-gcc.git] / gcc / config / vax / vax.c
blob927b82fd90dbc6cb9b994c38c4339be8c504841e
1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #define IN_TARGET_CODE 1
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "stringpool.h"
30 #include "attribs.h"
31 #include "df.h"
32 #include "memmodel.h"
33 #include "tm_p.h"
34 #include "optabs.h"
35 #include "regs.h"
36 #include "emit-rtl.h"
37 #include "calls.h"
38 #include "varasm.h"
39 #include "conditions.h"
40 #include "output.h"
41 #include "expr.h"
42 #include "reload.h"
43 #include "builtins.h"
45 /* This file should be included last. */
46 #include "target-def.h"
48 static void vax_option_override (void);
49 static bool vax_legitimate_address_p (machine_mode, rtx, bool);
50 static void vax_file_start (void);
51 static void vax_init_libfuncs (void);
52 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
53 HOST_WIDE_INT, tree);
54 static int vax_address_cost_1 (rtx);
55 static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
56 static bool vax_rtx_costs (rtx, machine_mode, int, int, int *, bool);
57 static rtx vax_function_arg (cumulative_args_t, machine_mode,
58 const_tree, bool);
59 static void vax_function_arg_advance (cumulative_args_t, machine_mode,
60 const_tree, bool);
61 static rtx vax_struct_value_rtx (tree, int);
62 static rtx vax_builtin_setjmp_frame_value (void);
63 static void vax_asm_trampoline_template (FILE *);
64 static void vax_trampoline_init (rtx, tree, rtx);
65 static poly_int64 vax_return_pops_args (tree, tree, poly_int64);
66 static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
67 static HOST_WIDE_INT vax_starting_frame_offset (void);
69 /* Initialize the GCC target structure. */
70 #undef TARGET_ASM_ALIGNED_HI_OP
71 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
73 #undef TARGET_ASM_FILE_START
74 #define TARGET_ASM_FILE_START vax_file_start
75 #undef TARGET_ASM_FILE_START_APP_OFF
76 #define TARGET_ASM_FILE_START_APP_OFF true
78 #undef TARGET_INIT_LIBFUNCS
79 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
81 #undef TARGET_ASM_OUTPUT_MI_THUNK
82 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
83 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
84 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
86 #undef TARGET_RTX_COSTS
87 #define TARGET_RTX_COSTS vax_rtx_costs
88 #undef TARGET_ADDRESS_COST
89 #define TARGET_ADDRESS_COST vax_address_cost
91 #undef TARGET_PROMOTE_PROTOTYPES
92 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
94 #undef TARGET_FUNCTION_ARG
95 #define TARGET_FUNCTION_ARG vax_function_arg
96 #undef TARGET_FUNCTION_ARG_ADVANCE
97 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
99 #undef TARGET_STRUCT_VALUE_RTX
100 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
102 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
103 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
105 #undef TARGET_LRA_P
106 #define TARGET_LRA_P hook_bool_void_false
108 #undef TARGET_LEGITIMATE_ADDRESS_P
109 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
110 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
111 #define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
113 #undef TARGET_FRAME_POINTER_REQUIRED
114 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
116 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
117 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
118 #undef TARGET_TRAMPOLINE_INIT
119 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
120 #undef TARGET_RETURN_POPS_ARGS
121 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
123 #undef TARGET_OPTION_OVERRIDE
124 #define TARGET_OPTION_OVERRIDE vax_option_override
126 #undef TARGET_STARTING_FRAME_OFFSET
127 #define TARGET_STARTING_FRAME_OFFSET vax_starting_frame_offset
129 struct gcc_target targetm = TARGET_INITIALIZER;
131 /* Set global variables as needed for the options enabled. */
133 static void
134 vax_option_override (void)
136 /* We're VAX floating point, not IEEE floating point. */
137 if (TARGET_G_FLOAT)
138 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
140 #ifdef SUBTARGET_OVERRIDE_OPTIONS
141 SUBTARGET_OVERRIDE_OPTIONS;
142 #endif
145 static void
146 vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
148 rtx x;
150 x = plus_constant (Pmode, frame_pointer_rtx, offset);
151 x = gen_rtx_MEM (SImode, x);
152 x = gen_rtx_SET (x, src);
153 add_reg_note (insn, REG_CFA_OFFSET, x);
156 /* Generate the assembly code for function entry. FILE is a stdio
157 stream to output the code to. SIZE is an int: how many units of
158 temporary storage to allocate.
160 Refer to the array `regs_ever_live' to determine which registers to
161 save; `regs_ever_live[I]' is nonzero if register number I is ever
162 used in the function. This function is responsible for knowing
163 which registers should not be saved even if used. */
165 void
166 vax_expand_prologue (void)
168 int regno, offset;
169 int mask = 0;
170 HOST_WIDE_INT size;
171 rtx insn;
173 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
174 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
175 mask |= 1 << regno;
177 insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
178 RTX_FRAME_RELATED_P (insn) = 1;
180 /* The layout of the CALLG/S stack frame is follows:
182 <- CFA, AP
185 ... Registers saved as specified by MASK
188 return-addr
189 old fp
190 old ap
191 old psw
192 zero
193 <- FP, SP
195 The rest of the prologue will adjust the SP for the local frame. */
197 vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
198 vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
199 vax_add_reg_cfa_offset (insn, 12, pc_rtx);
201 offset = 16;
202 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
203 if (mask & (1 << regno))
205 vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
206 offset += 4;
209 /* Because add_reg_note pushes the notes, adding this last means that
210 it will be processed first. This is required to allow the other
211 notes be interpreted properly. */
212 add_reg_note (insn, REG_CFA_DEF_CFA,
213 plus_constant (Pmode, frame_pointer_rtx, offset));
215 /* Allocate the local stack frame. */
216 size = get_frame_size ();
217 size -= vax_starting_frame_offset ();
218 emit_insn (gen_addsi3 (stack_pointer_rtx,
219 stack_pointer_rtx, GEN_INT (-size)));
221 /* Do not allow instructions referencing local stack memory to be
222 scheduled before the frame is allocated. This is more pedantic
223 than anything else, given that VAX does not currently have a
224 scheduling description. */
225 emit_insn (gen_blockage ());
228 /* When debugging with stabs, we want to output an extra dummy label
229 so that gas can distinguish between D_float and G_float prior to
230 processing the .stabs directive identifying type double. */
231 static void
232 vax_file_start (void)
234 default_file_start ();
236 if (write_symbols == DBX_DEBUG)
237 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
240 /* We can use the BSD C library routines for the libgcc calls that are
241 still generated, since that's what they boil down to anyways. When
242 ELF, avoid the user's namespace. */
244 static void
245 vax_init_libfuncs (void)
247 if (TARGET_BSD_DIVMOD)
249 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
250 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
254 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
256 static void
257 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
258 rtx * low, int n)
260 int i;
262 for (i = 0; i < n; i++)
263 low[i] = 0;
265 for (i = 0; i < n; i++)
267 if (MEM_P (operands[i])
268 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
269 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
271 rtx addr = XEXP (operands[i], 0);
272 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
274 else if (optimize_size && MEM_P (operands[i])
275 && REG_P (XEXP (operands[i], 0))
276 && (code != MINUS || operands[1] != const0_rtx)
277 && find_regno_note (insn, REG_DEAD,
278 REGNO (XEXP (operands[i], 0))))
280 low[i] = gen_rtx_MEM (SImode,
281 gen_rtx_POST_INC (Pmode,
282 XEXP (operands[i], 0)));
283 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
285 else
287 low[i] = operand_subword (operands[i], 0, 0, DImode);
288 operands[i] = operand_subword (operands[i], 1, 0, DImode);
293 void
294 print_operand_address (FILE * file, rtx addr)
296 rtx orig = addr;
297 rtx reg1, breg, ireg;
298 rtx offset;
300 retry:
301 switch (GET_CODE (addr))
303 case MEM:
304 fprintf (file, "*");
305 addr = XEXP (addr, 0);
306 goto retry;
308 case REG:
309 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
310 break;
312 case PRE_DEC:
313 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
314 break;
316 case POST_INC:
317 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
318 break;
320 case PLUS:
321 /* There can be either two or three things added here. One must be a
322 REG. One can be either a REG or a MULT of a REG and an appropriate
323 constant, and the third can only be a constant or a MEM.
325 We get these two or three things and put the constant or MEM in
326 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
327 a register and can't tell yet if it is a base or index register,
328 put it into REG1. */
330 reg1 = 0; ireg = 0; breg = 0; offset = 0;
332 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
333 || MEM_P (XEXP (addr, 0)))
335 offset = XEXP (addr, 0);
336 addr = XEXP (addr, 1);
338 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
339 || MEM_P (XEXP (addr, 1)))
341 offset = XEXP (addr, 1);
342 addr = XEXP (addr, 0);
344 else if (GET_CODE (XEXP (addr, 1)) == MULT)
346 ireg = XEXP (addr, 1);
347 addr = XEXP (addr, 0);
349 else if (GET_CODE (XEXP (addr, 0)) == MULT)
351 ireg = XEXP (addr, 0);
352 addr = XEXP (addr, 1);
354 else if (REG_P (XEXP (addr, 1)))
356 reg1 = XEXP (addr, 1);
357 addr = XEXP (addr, 0);
359 else if (REG_P (XEXP (addr, 0)))
361 reg1 = XEXP (addr, 0);
362 addr = XEXP (addr, 1);
364 else
365 gcc_unreachable ();
367 if (REG_P (addr))
369 if (reg1)
370 ireg = addr;
371 else
372 reg1 = addr;
374 else if (GET_CODE (addr) == MULT)
375 ireg = addr;
376 else
378 gcc_assert (GET_CODE (addr) == PLUS);
379 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
380 || MEM_P (XEXP (addr, 0)))
382 if (offset)
384 if (CONST_INT_P (offset))
385 offset = plus_constant (Pmode, XEXP (addr, 0),
386 INTVAL (offset));
387 else
389 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
390 offset = plus_constant (Pmode, offset,
391 INTVAL (XEXP (addr, 0)));
394 offset = XEXP (addr, 0);
396 else if (REG_P (XEXP (addr, 0)))
398 if (reg1)
399 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
400 else
401 reg1 = XEXP (addr, 0);
403 else
405 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
406 gcc_assert (!ireg);
407 ireg = XEXP (addr, 0);
410 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
411 || MEM_P (XEXP (addr, 1)))
413 if (offset)
415 if (CONST_INT_P (offset))
416 offset = plus_constant (Pmode, XEXP (addr, 1),
417 INTVAL (offset));
418 else
420 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
421 offset = plus_constant (Pmode, offset,
422 INTVAL (XEXP (addr, 1)));
425 offset = XEXP (addr, 1);
427 else if (REG_P (XEXP (addr, 1)))
429 if (reg1)
430 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
431 else
432 reg1 = XEXP (addr, 1);
434 else
436 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
437 gcc_assert (!ireg);
438 ireg = XEXP (addr, 1);
442 /* If REG1 is nonzero, figure out if it is a base or index register. */
443 if (reg1)
445 if (breg
446 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
447 || (offset
448 && (MEM_P (offset)
449 || (flag_pic && symbolic_operand (offset, SImode)))))
451 gcc_assert (!ireg);
452 ireg = reg1;
454 else
455 breg = reg1;
458 if (offset != 0)
460 if (flag_pic && symbolic_operand (offset, SImode))
462 if (breg && ireg)
464 debug_rtx (orig);
465 output_operand_lossage ("symbol used with both base and indexed registers");
468 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
469 if (flag_pic > 1 && GET_CODE (offset) == CONST
470 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
471 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
473 debug_rtx (orig);
474 output_operand_lossage ("symbol with offset used in PIC mode");
476 #endif
478 /* symbol(reg) isn't PIC, but symbol[reg] is. */
479 if (breg)
481 ireg = breg;
482 breg = 0;
487 output_address (VOIDmode, offset);
490 if (breg != 0)
491 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
493 if (ireg != 0)
495 if (GET_CODE (ireg) == MULT)
496 ireg = XEXP (ireg, 0);
497 gcc_assert (REG_P (ireg));
498 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
500 break;
502 default:
503 output_addr_const (file, addr);
507 void
508 print_operand (FILE *file, rtx x, int code)
510 if (code == '#')
511 fputc (ASM_DOUBLE_CHAR, file);
512 else if (code == '|')
513 fputs (REGISTER_PREFIX, file);
514 else if (code == 'c')
515 fputs (cond_name (x), file);
516 else if (code == 'C')
517 fputs (rev_cond_name (x), file);
518 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
519 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
520 else if (code == 'P' && CONST_INT_P (x))
521 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
522 else if (code == 'N' && CONST_INT_P (x))
523 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
524 /* rotl instruction cannot deal with negative arguments. */
525 else if (code == 'R' && CONST_INT_P (x))
526 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
527 else if (code == 'H' && CONST_INT_P (x))
528 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
529 else if (code == 'h' && CONST_INT_P (x))
530 fprintf (file, "$%d", (short) - INTVAL (x));
531 else if (code == 'B' && CONST_INT_P (x))
532 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
533 else if (code == 'b' && CONST_INT_P (x))
534 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
535 else if (code == 'M' && CONST_INT_P (x))
536 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
537 else if (code == 'x' && CONST_INT_P (x))
538 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
539 else if (REG_P (x))
540 fprintf (file, "%s", reg_names[REGNO (x)]);
541 else if (MEM_P (x))
542 output_address (GET_MODE (x), XEXP (x, 0));
543 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
545 char dstr[30];
546 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
547 sizeof (dstr), 0, 1);
548 fprintf (file, "$0f%s", dstr);
550 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
552 char dstr[30];
553 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
554 sizeof (dstr), 0, 1);
555 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
557 else
559 if (flag_pic > 1 && symbolic_operand (x, SImode))
561 debug_rtx (x);
562 output_operand_lossage ("symbol used as immediate operand");
564 putc ('$', file);
565 output_addr_const (file, x);
569 const char *
570 cond_name (rtx op)
572 switch (GET_CODE (op))
574 case NE:
575 return "neq";
576 case EQ:
577 return "eql";
578 case GE:
579 return "geq";
580 case GT:
581 return "gtr";
582 case LE:
583 return "leq";
584 case LT:
585 return "lss";
586 case GEU:
587 return "gequ";
588 case GTU:
589 return "gtru";
590 case LEU:
591 return "lequ";
592 case LTU:
593 return "lssu";
595 default:
596 gcc_unreachable ();
600 const char *
601 rev_cond_name (rtx op)
603 switch (GET_CODE (op))
605 case EQ:
606 return "neq";
607 case NE:
608 return "eql";
609 case LT:
610 return "geq";
611 case LE:
612 return "gtr";
613 case GT:
614 return "leq";
615 case GE:
616 return "lss";
617 case LTU:
618 return "gequ";
619 case LEU:
620 return "gtru";
621 case GTU:
622 return "lequ";
623 case GEU:
624 return "lssu";
626 default:
627 gcc_unreachable ();
631 static bool
632 vax_float_literal (rtx c)
634 machine_mode mode;
635 const REAL_VALUE_TYPE *r;
636 REAL_VALUE_TYPE s;
637 int i;
639 if (GET_CODE (c) != CONST_DOUBLE)
640 return false;
642 mode = GET_MODE (c);
644 if (c == const_tiny_rtx[(int) mode][0]
645 || c == const_tiny_rtx[(int) mode][1]
646 || c == const_tiny_rtx[(int) mode][2])
647 return true;
649 r = CONST_DOUBLE_REAL_VALUE (c);
651 for (i = 0; i < 7; i++)
653 int x = 1 << i;
654 bool ok;
655 real_from_integer (&s, mode, x, SIGNED);
657 if (real_equal (r, &s))
658 return true;
659 ok = exact_real_inverse (mode, &s);
660 gcc_assert (ok);
661 if (real_equal (r, &s))
662 return true;
664 return false;
668 /* Return the cost in cycles of a memory address, relative to register
669 indirect.
671 Each of the following adds the indicated number of cycles:
673 1 - symbolic address
674 1 - pre-decrement
675 1 - indexing and/or offset(register)
676 2 - indirect */
679 static int
680 vax_address_cost_1 (rtx addr)
682 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
683 rtx plus_op0 = 0, plus_op1 = 0;
684 restart:
685 switch (GET_CODE (addr))
687 case PRE_DEC:
688 predec = 1;
689 /* FALLTHRU */
690 case REG:
691 case SUBREG:
692 case POST_INC:
693 reg = 1;
694 break;
695 case MULT:
696 indexed = 1; /* 2 on VAX 2 */
697 break;
698 case CONST_INT:
699 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
700 if (offset == 0)
701 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
702 break;
703 case CONST:
704 case SYMBOL_REF:
705 offset = 1; /* 2 on VAX 2 */
706 break;
707 case LABEL_REF: /* this is probably a byte offset from the pc */
708 if (offset == 0)
709 offset = 1;
710 break;
711 case PLUS:
712 if (plus_op0)
713 plus_op1 = XEXP (addr, 0);
714 else
715 plus_op0 = XEXP (addr, 0);
716 addr = XEXP (addr, 1);
717 goto restart;
718 case MEM:
719 indir = 2; /* 3 on VAX 2 */
720 addr = XEXP (addr, 0);
721 goto restart;
722 default:
723 break;
726 /* Up to 3 things can be added in an address. They are stored in
727 plus_op0, plus_op1, and addr. */
729 if (plus_op0)
731 addr = plus_op0;
732 plus_op0 = 0;
733 goto restart;
735 if (plus_op1)
737 addr = plus_op1;
738 plus_op1 = 0;
739 goto restart;
741 /* Indexing and register+offset can both be used (except on a VAX 2)
742 without increasing execution time over either one alone. */
743 if (reg && indexed && offset)
744 return reg + indir + offset + predec;
745 return reg + indexed + indir + offset + predec;
748 static int
749 vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
750 addr_space_t as ATTRIBUTE_UNUSED,
751 bool speed ATTRIBUTE_UNUSED)
753 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
756 /* Cost of an expression on a VAX. This version has costs tuned for the
757 CVAX chip (found in the VAX 3 series) with comments for variations on
758 other models.
760 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
761 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
762 costs on a per cpu basis. */
764 static bool
765 vax_rtx_costs (rtx x, machine_mode mode, int outer_code,
766 int opno ATTRIBUTE_UNUSED,
767 int *total, bool speed ATTRIBUTE_UNUSED)
769 int code = GET_CODE (x);
770 int i = 0; /* may be modified in switch */
771 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
773 switch (code)
775 /* On a VAX, constants from 0..63 are cheap because they can use the
776 1 byte literal constant format. Compare to -1 should be made cheap
777 so that decrement-and-branch insns can be formed more easily (if
778 the value -1 is copied to a register some decrement-and-branch
779 patterns will not match). */
780 case CONST_INT:
781 if (INTVAL (x) == 0)
783 *total = 0;
784 return true;
786 if (outer_code == AND)
788 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
789 return true;
791 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
792 || (outer_code == COMPARE
793 && INTVAL (x) == -1)
794 || ((outer_code == PLUS || outer_code == MINUS)
795 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
797 *total = 1;
798 return true;
800 /* FALLTHRU */
802 case CONST:
803 case LABEL_REF:
804 case SYMBOL_REF:
805 *total = 3;
806 return true;
808 case CONST_DOUBLE:
809 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
810 *total = vax_float_literal (x) ? 5 : 8;
811 else
812 *total = ((CONST_DOUBLE_HIGH (x) == 0
813 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
814 || (outer_code == PLUS
815 && CONST_DOUBLE_HIGH (x) == -1
816 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
817 ? 2 : 5;
818 return true;
820 case POST_INC:
821 *total = 2;
822 return true; /* Implies register operand. */
824 case PRE_DEC:
825 *total = 3;
826 return true; /* Implies register operand. */
828 case MULT:
829 switch (mode)
831 case E_DFmode:
832 *total = 16; /* 4 on VAX 9000 */
833 break;
834 case E_SFmode:
835 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
836 break;
837 case E_DImode:
838 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
839 break;
840 case E_SImode:
841 case E_HImode:
842 case E_QImode:
843 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
844 break;
845 default:
846 *total = MAX_COST; /* Mode is not supported. */
847 return true;
849 break;
851 case UDIV:
852 if (mode != SImode)
854 *total = MAX_COST; /* Mode is not supported. */
855 return true;
857 *total = 17;
858 break;
860 case DIV:
861 if (mode == DImode)
862 *total = 30; /* Highly variable. */
863 else if (mode == DFmode)
864 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
865 *total = 24;
866 else
867 *total = 11; /* 25 on VAX 2 */
868 break;
870 case MOD:
871 *total = 23;
872 break;
874 case UMOD:
875 if (mode != SImode)
877 *total = MAX_COST; /* Mode is not supported. */
878 return true;
880 *total = 29;
881 break;
883 case FLOAT:
884 *total = (6 /* 4 on VAX 9000 */
885 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
886 break;
888 case FIX:
889 *total = 7; /* 17 on VAX 2 */
890 break;
892 case ASHIFT:
893 case LSHIFTRT:
894 case ASHIFTRT:
895 if (mode == DImode)
896 *total = 12;
897 else
898 *total = 10; /* 6 on VAX 9000 */
899 break;
901 case ROTATE:
902 case ROTATERT:
903 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
904 if (CONST_INT_P (XEXP (x, 1)))
905 fmt = "e"; /* all constant rotate counts are short */
906 break;
908 case PLUS:
909 case MINUS:
910 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
911 /* Small integer operands can use subl2 and addl2. */
912 if ((CONST_INT_P (XEXP (x, 1)))
913 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
914 fmt = "e";
915 break;
917 case IOR:
918 case XOR:
919 *total = 3;
920 break;
922 case AND:
923 /* AND is special because the first operand is complemented. */
924 *total = 3;
925 if (CONST_INT_P (XEXP (x, 0)))
927 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
928 *total = 4;
929 fmt = "e";
930 i = 1;
932 break;
934 case NEG:
935 if (mode == DFmode)
936 *total = 9;
937 else if (mode == SFmode)
938 *total = 6;
939 else if (mode == DImode)
940 *total = 4;
941 else
942 *total = 2;
943 break;
945 case NOT:
946 *total = 2;
947 break;
949 case ZERO_EXTRACT:
950 case SIGN_EXTRACT:
951 *total = 15;
952 break;
954 case MEM:
955 if (mode == DImode || mode == DFmode)
956 *total = 5; /* 7 on VAX 2 */
957 else
958 *total = 3; /* 4 on VAX 2 */
959 x = XEXP (x, 0);
960 if (!REG_P (x) && GET_CODE (x) != POST_INC)
961 *total += vax_address_cost_1 (x);
962 return true;
964 case FLOAT_EXTEND:
965 case FLOAT_TRUNCATE:
966 case TRUNCATE:
967 *total = 3; /* FIXME: Costs need to be checked */
968 break;
970 default:
971 return false;
974 /* Now look inside the expression. Operands which are not registers or
975 short constants add to the cost.
977 FMT and I may have been adjusted in the switch above for instructions
978 which require special handling. */
980 while (*fmt++ == 'e')
982 rtx op = XEXP (x, i);
984 i += 1;
985 code = GET_CODE (op);
987 /* A NOT is likely to be found as the first operand of an AND
988 (in which case the relevant cost is of the operand inside
989 the not) and not likely to be found anywhere else. */
990 if (code == NOT)
991 op = XEXP (op, 0), code = GET_CODE (op);
993 switch (code)
995 case CONST_INT:
996 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
997 && mode != QImode)
998 *total += 1; /* 2 on VAX 2 */
999 break;
1000 case CONST:
1001 case LABEL_REF:
1002 case SYMBOL_REF:
1003 *total += 1; /* 2 on VAX 2 */
1004 break;
1005 case CONST_DOUBLE:
1006 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
1008 /* Registers are faster than floating point constants -- even
1009 those constants which can be encoded in a single byte. */
1010 if (vax_float_literal (op))
1011 *total += 1;
1012 else
1013 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
1015 else
1017 if (CONST_DOUBLE_HIGH (op) != 0
1018 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1019 *total += 2;
1021 break;
1022 case MEM:
1023 *total += 1; /* 2 on VAX 2 */
1024 if (!REG_P (XEXP (op, 0)))
1025 *total += vax_address_cost_1 (XEXP (op, 0));
1026 break;
1027 case REG:
1028 case SUBREG:
1029 break;
1030 default:
1031 *total += 1;
1032 break;
1035 return true;
1038 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1039 Used for C++ multiple inheritance.
1040 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
1041 addl2 $DELTA, 4(ap) #adjust first argument
1042 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
1045 static void
1046 vax_output_mi_thunk (FILE * file,
1047 tree thunk ATTRIBUTE_UNUSED,
1048 HOST_WIDE_INT delta,
1049 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1050 tree function)
1052 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1053 asm_fprintf (file, ",4(%Rap)\n");
1054 fprintf (file, "\tjmp ");
1055 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1056 fprintf (file, "+2\n");
1059 static rtx
1060 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1061 int incoming ATTRIBUTE_UNUSED)
1063 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1066 static rtx
1067 vax_builtin_setjmp_frame_value (void)
1069 return hard_frame_pointer_rtx;
1072 /* Worker function for NOTICE_UPDATE_CC. */
1074 void
1075 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1077 if (GET_CODE (exp) == SET)
1079 if (GET_CODE (SET_SRC (exp)) == CALL)
1080 CC_STATUS_INIT;
1081 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1082 && GET_CODE (SET_DEST (exp)) != PC)
1084 cc_status.flags = 0;
1085 /* The integer operations below don't set carry or
1086 set it in an incompatible way. That's ok though
1087 as the Z bit is all we need when doing unsigned
1088 comparisons on the result of these insns (since
1089 they're always with 0). Set CC_NO_OVERFLOW to
1090 generate the correct unsigned branches. */
1091 switch (GET_CODE (SET_SRC (exp)))
1093 case NEG:
1094 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1095 break;
1096 /* FALLTHRU */
1097 case AND:
1098 case IOR:
1099 case XOR:
1100 case NOT:
1101 case MEM:
1102 case REG:
1103 cc_status.flags = CC_NO_OVERFLOW;
1104 break;
1105 default:
1106 break;
1108 cc_status.value1 = SET_DEST (exp);
1109 cc_status.value2 = SET_SRC (exp);
1112 else if (GET_CODE (exp) == PARALLEL
1113 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1115 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1116 CC_STATUS_INIT;
1117 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1119 cc_status.flags = 0;
1120 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1121 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1123 else
1124 /* PARALLELs whose first element sets the PC are aob,
1125 sob insns. They do change the cc's. */
1126 CC_STATUS_INIT;
1128 else
1129 CC_STATUS_INIT;
1130 if (cc_status.value1 && REG_P (cc_status.value1)
1131 && cc_status.value2
1132 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1133 cc_status.value2 = 0;
1134 if (cc_status.value1 && MEM_P (cc_status.value1)
1135 && cc_status.value2
1136 && MEM_P (cc_status.value2))
1137 cc_status.value2 = 0;
1138 /* Actual condition, one line up, should be that value2's address
1139 depends on value1, but that is too much of a pain. */
1142 /* Output integer move instructions. */
1144 const char *
1145 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1146 machine_mode mode)
1148 rtx hi[3], lo[3];
1149 const char *pattern_hi, *pattern_lo;
1151 switch (mode)
1153 case E_DImode:
1154 if (operands[1] == const0_rtx)
1155 return "clrq %0";
1156 if (TARGET_QMATH && optimize_size
1157 && (CONST_INT_P (operands[1])
1158 || GET_CODE (operands[1]) == CONST_DOUBLE))
1160 unsigned HOST_WIDE_INT hval, lval;
1161 int n;
1163 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1165 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1167 /* Make sure only the low 32 bits are valid. */
1168 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1169 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1171 else
1173 lval = INTVAL (operands[1]);
1174 hval = 0;
1177 /* Here we see if we are trying to see if the 64bit value is really
1178 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1179 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1180 8 bytes - 1 shift byte - 1 short literal byte. */
1181 if (lval != 0
1182 && (n = exact_log2 (lval & (- lval))) != -1
1183 && (lval >> n) < 64)
1185 lval >>= n;
1187 /* On 32bit platforms, if the 6bits didn't overflow into the
1188 upper 32bit value that value better be 0. If we have
1189 overflowed, make sure it wasn't too much. */
1190 if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1192 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1193 n = 0; /* failure */
1194 else
1195 lval |= hval << (32 - n);
1197 /* If n is 0, then ashq is not the best way to emit this. */
1198 if (n > 0)
1200 operands[1] = GEN_INT (lval);
1201 operands[2] = GEN_INT (n);
1202 return "ashq %2,%D1,%0";
1204 #if HOST_BITS_PER_WIDE_INT == 32
1206 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1207 upper 32bit value. */
1208 else if (hval != 0
1209 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1210 && (hval >> n) < 64)
1212 operands[1] = GEN_INT (hval >> n);
1213 operands[2] = GEN_INT (n + 32);
1214 return "ashq %2,%D1,%0";
1215 #endif
1219 if (TARGET_QMATH
1220 && (!MEM_P (operands[0])
1221 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1222 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1223 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1224 && ((CONST_INT_P (operands[1])
1225 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1226 || GET_CODE (operands[1]) == CONST_DOUBLE))
1228 hi[0] = operands[0];
1229 hi[1] = operands[1];
1231 split_quadword_operands (insn, SET, hi, lo, 2);
1233 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1234 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1236 /* The patterns are just movl/movl or pushl/pushl then a movq will
1237 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1238 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1239 value bytes. */
1240 if ((!strncmp (pattern_lo, "movl", 4)
1241 && !strncmp (pattern_hi, "movl", 4))
1242 || (!strncmp (pattern_lo, "pushl", 5)
1243 && !strncmp (pattern_hi, "pushl", 5)))
1244 return "movq %1,%0";
1246 if (MEM_P (operands[0])
1247 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1249 output_asm_insn (pattern_hi, hi);
1250 operands[0] = lo[0];
1251 operands[1] = lo[1];
1252 operands[2] = lo[2];
1253 return pattern_lo;
1255 else
1257 output_asm_insn (pattern_lo, lo);
1258 operands[0] = hi[0];
1259 operands[1] = hi[1];
1260 operands[2] = hi[2];
1261 return pattern_hi;
1264 return "movq %1,%0";
1266 case E_SImode:
1267 if (symbolic_operand (operands[1], SImode))
1269 if (push_operand (operands[0], SImode))
1270 return "pushab %a1";
1271 return "movab %a1,%0";
1274 if (operands[1] == const0_rtx)
1276 if (push_operand (operands[1], SImode))
1277 return "pushl %1";
1278 return "clrl %0";
1281 if (CONST_INT_P (operands[1])
1282 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1284 HOST_WIDE_INT i = INTVAL (operands[1]);
1285 int n;
1286 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1287 return "mcoml %N1,%0";
1288 if ((unsigned HOST_WIDE_INT)i < 0x100)
1289 return "movzbl %1,%0";
1290 if (i >= -0x80 && i < 0)
1291 return "cvtbl %1,%0";
1292 if (optimize_size
1293 && (n = exact_log2 (i & (-i))) != -1
1294 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1296 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1297 operands[2] = GEN_INT (n);
1298 return "ashl %2,%1,%0";
1300 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1301 return "movzwl %1,%0";
1302 if (i >= -0x8000 && i < 0)
1303 return "cvtwl %1,%0";
1305 if (push_operand (operands[0], SImode))
1306 return "pushl %1";
1307 return "movl %1,%0";
1309 case E_HImode:
1310 if (CONST_INT_P (operands[1]))
1312 HOST_WIDE_INT i = INTVAL (operands[1]);
1313 if (i == 0)
1314 return "clrw %0";
1315 else if ((unsigned HOST_WIDE_INT)i < 64)
1316 return "movw %1,%0";
1317 else if ((unsigned HOST_WIDE_INT)~i < 64)
1318 return "mcomw %H1,%0";
1319 else if ((unsigned HOST_WIDE_INT)i < 256)
1320 return "movzbw %1,%0";
1321 else if (i >= -0x80 && i < 0)
1322 return "cvtbw %1,%0";
1324 return "movw %1,%0";
1326 case E_QImode:
1327 if (CONST_INT_P (operands[1]))
1329 HOST_WIDE_INT i = INTVAL (operands[1]);
1330 if (i == 0)
1331 return "clrb %0";
1332 else if ((unsigned HOST_WIDE_INT)~i < 64)
1333 return "mcomb %B1,%0";
1335 return "movb %1,%0";
1337 default:
1338 gcc_unreachable ();
1342 /* Output integer add instructions.
1344 The space-time-opcode tradeoffs for addition vary by model of VAX.
1346 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1347 but it not faster on other models.
1349 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1350 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1351 a register is used in an address too soon after it is set.
1352 Compromise by using movab only when it is shorter than the add
1353 or the base register in the address is one of sp, ap, and fp,
1354 which are not modified very often. */
1356 const char *
1357 vax_output_int_add (rtx insn, rtx *operands, machine_mode mode)
1359 switch (mode)
1361 case E_DImode:
1363 rtx low[3];
1364 const char *pattern;
1365 int carry = 1;
1366 bool sub;
1368 if (TARGET_QMATH && 0)
1369 debug_rtx (insn);
1371 split_quadword_operands (insn, PLUS, operands, low, 3);
1373 if (TARGET_QMATH)
1375 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1376 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1377 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1378 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1379 #endif
1381 /* No reason to add a 0 to the low part and thus no carry, so just
1382 emit the appropriate add/sub instruction. */
1383 if (low[2] == const0_rtx)
1384 return vax_output_int_add (NULL, operands, SImode);
1386 /* Are we doing addition or subtraction? */
1387 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1389 /* We can't use vax_output_int_add since some the patterns don't
1390 modify the carry bit. */
1391 if (sub)
1393 if (low[2] == constm1_rtx)
1394 pattern = "decl %0";
1395 else
1396 pattern = "subl2 $%n2,%0";
1398 else
1400 if (low[2] == const1_rtx)
1401 pattern = "incl %0";
1402 else
1403 pattern = "addl2 %2,%0";
1405 output_asm_insn (pattern, low);
1407 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1408 two 32bit parts, we complement each and then add one to
1409 low part. We know that the low part can't overflow since
1410 it's value can never be 0. */
1411 if (sub)
1412 return "sbwc %N2,%0";
1413 return "adwc %2,%0";
1416 /* Add low parts. */
1417 if (rtx_equal_p (operands[0], operands[1]))
1419 if (low[2] == const0_rtx)
1420 /* Should examine operand, punt if not POST_INC. */
1421 pattern = "tstl %0", carry = 0;
1422 else if (low[2] == const1_rtx)
1423 pattern = "incl %0";
1424 else
1425 pattern = "addl2 %2,%0";
1427 else
1429 if (low[2] == const0_rtx)
1430 pattern = "movl %1,%0", carry = 0;
1431 else
1432 pattern = "addl3 %2,%1,%0";
1434 if (pattern)
1435 output_asm_insn (pattern, low);
1436 if (!carry)
1437 /* If CARRY is 0, we don't have any carry value to worry about. */
1438 return get_insn_template (CODE_FOR_addsi3, insn);
1439 /* %0 = C + %1 + %2 */
1440 if (!rtx_equal_p (operands[0], operands[1]))
1441 output_asm_insn ((operands[1] == const0_rtx
1442 ? "clrl %0"
1443 : "movl %1,%0"), operands);
1444 return "adwc %2,%0";
1447 case E_SImode:
1448 if (rtx_equal_p (operands[0], operands[1]))
1450 if (operands[2] == const1_rtx)
1451 return "incl %0";
1452 if (operands[2] == constm1_rtx)
1453 return "decl %0";
1454 if (CONST_INT_P (operands[2])
1455 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1456 return "subl2 $%n2,%0";
1457 if (CONST_INT_P (operands[2])
1458 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1459 && REG_P (operands[1])
1460 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1461 || REGNO (operands[1]) > 11))
1462 return "movab %c2(%1),%0";
1463 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1464 return "movab %a2[%0],%0";
1465 return "addl2 %2,%0";
1468 if (rtx_equal_p (operands[0], operands[2]))
1470 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1471 return "movab %a1[%0],%0";
1472 return "addl2 %1,%0";
1475 if (CONST_INT_P (operands[2])
1476 && INTVAL (operands[2]) < 32767
1477 && INTVAL (operands[2]) > -32768
1478 && REG_P (operands[1])
1479 && push_operand (operands[0], SImode))
1480 return "pushab %c2(%1)";
1482 if (CONST_INT_P (operands[2])
1483 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1484 return "subl3 $%n2,%1,%0";
1486 if (CONST_INT_P (operands[2])
1487 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1488 && REG_P (operands[1])
1489 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1490 || REGNO (operands[1]) > 11))
1491 return "movab %c2(%1),%0";
1493 /* Add this if using gcc on a VAX 3xxx:
1494 if (REG_P (operands[1]) && REG_P (operands[2]))
1495 return "movab (%1)[%2],%0";
1498 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1500 if (push_operand (operands[0], SImode))
1501 return "pushab %a2[%1]";
1502 return "movab %a2[%1],%0";
1505 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1507 if (push_operand (operands[0], SImode))
1508 return "pushab %a1[%2]";
1509 return "movab %a1[%2],%0";
1512 if (flag_pic && REG_P (operands[0])
1513 && symbolic_operand (operands[2], SImode))
1514 return "movab %a2,%0;addl2 %1,%0";
1516 if (flag_pic
1517 && (symbolic_operand (operands[1], SImode)
1518 || symbolic_operand (operands[1], SImode)))
1519 debug_rtx (insn);
1521 return "addl3 %1,%2,%0";
1523 case E_HImode:
1524 if (rtx_equal_p (operands[0], operands[1]))
1526 if (operands[2] == const1_rtx)
1527 return "incw %0";
1528 if (operands[2] == constm1_rtx)
1529 return "decw %0";
1530 if (CONST_INT_P (operands[2])
1531 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1532 return "subw2 $%n2,%0";
1533 return "addw2 %2,%0";
1535 if (rtx_equal_p (operands[0], operands[2]))
1536 return "addw2 %1,%0";
1537 if (CONST_INT_P (operands[2])
1538 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1539 return "subw3 $%n2,%1,%0";
1540 return "addw3 %1,%2,%0";
1542 case E_QImode:
1543 if (rtx_equal_p (operands[0], operands[1]))
1545 if (operands[2] == const1_rtx)
1546 return "incb %0";
1547 if (operands[2] == constm1_rtx)
1548 return "decb %0";
1549 if (CONST_INT_P (operands[2])
1550 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1551 return "subb2 $%n2,%0";
1552 return "addb2 %2,%0";
1554 if (rtx_equal_p (operands[0], operands[2]))
1555 return "addb2 %1,%0";
1556 if (CONST_INT_P (operands[2])
1557 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1558 return "subb3 $%n2,%1,%0";
1559 return "addb3 %1,%2,%0";
1561 default:
1562 gcc_unreachable ();
1566 const char *
1567 vax_output_int_subtract (rtx insn, rtx *operands, machine_mode mode)
1569 switch (mode)
1571 case E_DImode:
1573 rtx low[3];
1574 const char *pattern;
1575 int carry = 1;
1577 if (TARGET_QMATH && 0)
1578 debug_rtx (insn);
1580 split_quadword_operands (insn, MINUS, operands, low, 3);
1582 if (TARGET_QMATH)
1584 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1586 /* Negation is tricky. It's basically complement and increment.
1587 Negate hi, then lo, and subtract the carry back. */
1588 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1589 || (MEM_P (operands[0])
1590 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1591 fatal_insn ("illegal operand detected", insn);
1592 output_asm_insn ("mnegl %2,%0", operands);
1593 output_asm_insn ("mnegl %2,%0", low);
1594 return "sbwc $0,%0";
1596 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1597 gcc_assert (rtx_equal_p (low[0], low[1]));
1598 if (low[2] == const1_rtx)
1599 output_asm_insn ("decl %0", low);
1600 else
1601 output_asm_insn ("subl2 %2,%0", low);
1602 return "sbwc %2,%0";
1605 /* Subtract low parts. */
1606 if (rtx_equal_p (operands[0], operands[1]))
1608 if (low[2] == const0_rtx)
1609 pattern = 0, carry = 0;
1610 else if (low[2] == constm1_rtx)
1611 pattern = "decl %0";
1612 else
1613 pattern = "subl2 %2,%0";
1615 else
1617 if (low[2] == constm1_rtx)
1618 pattern = "decl %0";
1619 else if (low[2] == const0_rtx)
1620 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1621 else
1622 pattern = "subl3 %2,%1,%0";
1624 if (pattern)
1625 output_asm_insn (pattern, low);
1626 if (carry)
1628 if (!rtx_equal_p (operands[0], operands[1]))
1629 return "movl %1,%0;sbwc %2,%0";
1630 return "sbwc %2,%0";
1631 /* %0 = %2 - %1 - C */
1633 return get_insn_template (CODE_FOR_subsi3, insn);
1636 default:
1637 gcc_unreachable ();
1641 /* True if X is an rtx for a constant that is a valid address. */
1643 bool
1644 legitimate_constant_address_p (rtx x)
1646 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1647 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1648 return true;
1649 if (GET_CODE (x) != CONST)
1650 return false;
1651 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1652 if (flag_pic
1653 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1654 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1655 return false;
1656 #endif
1657 return true;
1660 /* The other macros defined here are used only in legitimate_address_p (). */
1662 /* Nonzero if X is a hard reg that can be used as an index
1663 or, if not strict, if it is a pseudo reg. */
1664 #define INDEX_REGISTER_P(X, STRICT) \
1665 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1667 /* Nonzero if X is a hard reg that can be used as a base reg
1668 or, if not strict, if it is a pseudo reg. */
1669 #define BASE_REGISTER_P(X, STRICT) \
1670 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1672 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1674 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1675 are no SYMBOL_REFs for external symbols present. */
1677 static bool
1678 indirectable_constant_address_p (rtx x, bool indirect)
1680 if (GET_CODE (x) == SYMBOL_REF)
1681 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1683 if (GET_CODE (x) == CONST)
1684 return !flag_pic
1685 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1686 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1688 return CONSTANT_ADDRESS_P (x);
1691 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1693 static bool
1694 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1696 return CONSTANT_ADDRESS_P (x);
1699 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1701 /* True if X is an address which can be indirected. External symbols
1702 could be in a sharable image library, so we disallow those. */
1704 static bool
1705 indirectable_address_p (rtx x, bool strict, bool indirect)
1707 if (indirectable_constant_address_p (x, indirect)
1708 || BASE_REGISTER_P (x, strict))
1709 return true;
1710 if (GET_CODE (x) != PLUS
1711 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1712 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1713 return false;
1714 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1717 /* Return true if x is a valid address not using indexing.
1718 (This much is the easy part.) */
1719 static bool
1720 nonindexed_address_p (rtx x, bool strict)
1722 rtx xfoo0;
1723 if (REG_P (x))
1725 if (! reload_in_progress
1726 || reg_equiv_mem (REGNO (x)) == 0
1727 || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1728 return true;
1730 if (indirectable_constant_address_p (x, false))
1731 return true;
1732 if (indirectable_address_p (x, strict, false))
1733 return true;
1734 xfoo0 = XEXP (x, 0);
1735 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1736 return true;
1737 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1738 && BASE_REGISTER_P (xfoo0, strict))
1739 return true;
1740 return false;
1743 /* True if PROD is either a reg times size of mode MODE and MODE is less
1744 than or equal 8 bytes, or just a reg if MODE is one byte. */
1746 static bool
1747 index_term_p (rtx prod, machine_mode mode, bool strict)
1749 rtx xfoo0, xfoo1;
1751 if (GET_MODE_SIZE (mode) == 1)
1752 return BASE_REGISTER_P (prod, strict);
1754 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1755 return false;
1757 xfoo0 = XEXP (prod, 0);
1758 xfoo1 = XEXP (prod, 1);
1760 if (CONST_INT_P (xfoo0)
1761 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1762 && INDEX_REGISTER_P (xfoo1, strict))
1763 return true;
1765 if (CONST_INT_P (xfoo1)
1766 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1767 && INDEX_REGISTER_P (xfoo0, strict))
1768 return true;
1770 return false;
1773 /* Return true if X is the sum of a register
1774 and a valid index term for mode MODE. */
1775 static bool
1776 reg_plus_index_p (rtx x, machine_mode mode, bool strict)
1778 rtx xfoo0, xfoo1;
1780 if (GET_CODE (x) != PLUS)
1781 return false;
1783 xfoo0 = XEXP (x, 0);
1784 xfoo1 = XEXP (x, 1);
1786 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1787 return true;
1789 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1790 return true;
1792 return false;
1795 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1796 static bool
1797 indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
1799 if (!CONSTANT_ADDRESS_P (xfoo0))
1800 return false;
1801 if (BASE_REGISTER_P (xfoo1, strict))
1802 return !flag_pic || mode == QImode;
1803 if (flag_pic && symbolic_operand (xfoo0, SImode))
1804 return false;
1805 return reg_plus_index_p (xfoo1, mode, strict);
1808 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1809 that is a valid memory address for an instruction.
1810 The MODE argument is the machine mode for the MEM expression
1811 that wants to use this address. */
1812 bool
1813 vax_legitimate_address_p (machine_mode mode, rtx x, bool strict)
1815 rtx xfoo0, xfoo1;
1817 if (nonindexed_address_p (x, strict))
1818 return true;
1820 if (GET_CODE (x) != PLUS)
1821 return false;
1823 /* Handle <address>[index] represented with index-sum outermost */
1825 xfoo0 = XEXP (x, 0);
1826 xfoo1 = XEXP (x, 1);
1828 if (index_term_p (xfoo0, mode, strict)
1829 && nonindexed_address_p (xfoo1, strict))
1830 return true;
1832 if (index_term_p (xfoo1, mode, strict)
1833 && nonindexed_address_p (xfoo0, strict))
1834 return true;
1836 /* Handle offset(reg)[index] with offset added outermost */
1838 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1839 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1840 return true;
1842 return false;
1845 /* Return true if x (a legitimate address expression) has an effect that
1846 depends on the machine mode it is used for. On the VAX, the predecrement
1847 and postincrement address depend thus (the amount of decrement or
1848 increment being the length of the operand) and all indexed address depend
1849 thus (because the index scale factor is the length of the operand). */
1851 static bool
1852 vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
1854 rtx xfoo0, xfoo1;
1856 /* Auto-increment cases are now dealt with generically in recog.c. */
1857 if (GET_CODE (x) != PLUS)
1858 return false;
1860 xfoo0 = XEXP (x, 0);
1861 xfoo1 = XEXP (x, 1);
1863 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1864 return false;
1865 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1866 return false;
1867 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1868 return false;
1869 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1870 return false;
1872 return true;
1875 static rtx
1876 fixup_mathdi_operand (rtx x, machine_mode mode)
1878 if (illegal_addsub_di_memory_operand (x, mode))
1880 rtx addr = XEXP (x, 0);
1881 rtx temp = gen_reg_rtx (Pmode);
1882 rtx offset = 0;
1883 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1884 if (GET_CODE (addr) == CONST && flag_pic)
1886 offset = XEXP (XEXP (addr, 0), 1);
1887 addr = XEXP (XEXP (addr, 0), 0);
1889 #endif
1890 emit_move_insn (temp, addr);
1891 if (offset)
1892 temp = gen_rtx_PLUS (Pmode, temp, offset);
1893 x = gen_rtx_MEM (DImode, temp);
1895 return x;
1898 void
1899 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1901 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1902 rtx temp;
1904 rtx (*gen_old_insn)(rtx, rtx, rtx);
1905 rtx (*gen_si_insn)(rtx, rtx, rtx);
1906 rtx (*gen_insn)(rtx, rtx, rtx);
1908 if (code == PLUS)
1910 gen_old_insn = gen_adddi3_old;
1911 gen_si_insn = gen_addsi3;
1912 gen_insn = gen_adcdi3;
1914 else if (code == MINUS)
1916 gen_old_insn = gen_subdi3_old;
1917 gen_si_insn = gen_subsi3;
1918 gen_insn = gen_sbcdi3;
1920 else
1921 gcc_unreachable ();
1923 /* If this is addition (thus operands are commutative) and if there is one
1924 addend that duplicates the desination, we want that addend to be the
1925 first addend. */
1926 if (code == PLUS
1927 && rtx_equal_p (operands[0], operands[2])
1928 && !rtx_equal_p (operands[1], operands[2]))
1930 temp = operands[2];
1931 operands[2] = operands[1];
1932 operands[1] = temp;
1935 if (!TARGET_QMATH)
1937 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1939 else if (hi_only)
1941 if (!rtx_equal_p (operands[0], operands[1])
1942 && (REG_P (operands[0]) && MEM_P (operands[1])))
1944 emit_move_insn (operands[0], operands[1]);
1945 operands[1] = operands[0];
1948 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1949 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1950 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1952 if (!rtx_equal_p (operands[0], operands[1]))
1953 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1954 operand_subword (operands[1], 0, 0, DImode));
1956 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1957 operand_subword (operands[1], 1, 0, DImode),
1958 operand_subword (operands[2], 1, 0, DImode)));
1960 else
1962 /* If are adding the same value together, that's really a multiply by 2,
1963 and that's just a left shift of 1. */
1964 if (rtx_equal_p (operands[1], operands[2]))
1966 gcc_assert (code != MINUS);
1967 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1968 return;
1971 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1973 /* If an operand is the same as operand[0], use the operand[0] rtx
1974 because fixup will an equivalent rtx but not an equal one. */
1976 if (rtx_equal_p (operands[0], operands[1]))
1977 operands[1] = operands[0];
1978 else
1979 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1981 if (rtx_equal_p (operands[0], operands[2]))
1982 operands[2] = operands[0];
1983 else
1984 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1986 /* If we are subtracting not from ourselves [d = a - b], and because the
1987 carry ops are two operand only, we would need to do a move prior to
1988 the subtract. And if d == b, we would need a temp otherwise
1989 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1990 into d = -b, d += a. Since -b can never overflow, even if b == d,
1991 no temp is needed.
1993 If we are doing addition, since the carry ops are two operand, if
1994 we aren't adding to ourselves, move the first addend to the
1995 destination first. */
1997 gcc_assert (operands[1] != const0_rtx || code == MINUS);
1998 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
2000 if (code == MINUS && CONSTANT_P (operands[1]))
2002 temp = gen_reg_rtx (DImode);
2003 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
2004 code = PLUS;
2005 gen_insn = gen_adcdi3;
2006 operands[2] = operands[1];
2007 operands[1] = operands[0];
2009 else
2010 emit_move_insn (operands[0], operands[1]);
2013 /* Subtracting a constant will have been rewritten to an addition of the
2014 negative of that constant before we get here. */
2015 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2016 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2020 bool
2021 adjacent_operands_p (rtx lo, rtx hi, machine_mode mode)
2023 HOST_WIDE_INT lo_offset;
2024 HOST_WIDE_INT hi_offset;
2026 if (GET_CODE (lo) != GET_CODE (hi))
2027 return false;
2029 if (REG_P (lo))
2030 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
2031 if (CONST_INT_P (lo))
2032 return INTVAL (hi) == 0 && UINTVAL (lo) < 64;
2033 if (CONST_INT_P (lo))
2034 return mode != SImode;
2036 if (!MEM_P (lo))
2037 return false;
2039 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2040 return false;
2042 lo = XEXP (lo, 0);
2043 hi = XEXP (hi, 0);
2045 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2046 return rtx_equal_p (lo, hi);
2048 switch (GET_CODE (lo))
2050 case REG:
2051 case SYMBOL_REF:
2052 lo_offset = 0;
2053 break;
2054 case CONST:
2055 lo = XEXP (lo, 0);
2056 /* FALLTHROUGH */
2057 case PLUS:
2058 if (!CONST_INT_P (XEXP (lo, 1)))
2059 return false;
2060 lo_offset = INTVAL (XEXP (lo, 1));
2061 lo = XEXP (lo, 0);
2062 break;
2063 default:
2064 return false;
2067 switch (GET_CODE (hi))
2069 case REG:
2070 case SYMBOL_REF:
2071 hi_offset = 0;
2072 break;
2073 case CONST:
2074 hi = XEXP (hi, 0);
2075 /* FALLTHROUGH */
2076 case PLUS:
2077 if (!CONST_INT_P (XEXP (hi, 1)))
2078 return false;
2079 hi_offset = INTVAL (XEXP (hi, 1));
2080 hi = XEXP (hi, 0);
2081 break;
2082 default:
2083 return false;
2086 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2087 return false;
2089 return rtx_equal_p (lo, hi)
2090 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2093 /* Output assembler code for a block containing the constant parts
2094 of a trampoline, leaving space for the variable parts. */
2096 /* On the VAX, the trampoline contains an entry mask and two instructions:
2097 .word NN
2098 movl $STATIC,r0 (store the functions static chain)
2099 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2101 static void
2102 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2104 assemble_aligned_integer (2, const0_rtx);
2105 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2106 assemble_aligned_integer (4, const0_rtx);
2107 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2108 assemble_aligned_integer (2, GEN_INT (0x9f17));
2109 assemble_aligned_integer (4, const0_rtx);
2112 /* We copy the register-mask from the function's pure code
2113 to the start of the trampoline. */
2115 static void
2116 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2118 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2119 rtx mem;
2121 emit_block_move (m_tramp, assemble_trampoline_template (),
2122 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2124 mem = adjust_address (m_tramp, HImode, 0);
2125 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2127 mem = adjust_address (m_tramp, SImode, 4);
2128 emit_move_insn (mem, cxt);
2129 mem = adjust_address (m_tramp, SImode, 11);
2130 emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
2131 emit_insn (gen_sync_istream ());
2134 /* Value is the number of bytes of arguments automatically
2135 popped when returning from a subroutine call.
2136 FUNDECL is the declaration node of the function (as a tree),
2137 FUNTYPE is the data type of the function (as a tree),
2138 or for a library call it is an identifier node for the subroutine name.
2139 SIZE is the number of bytes of arguments passed on the stack.
2141 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2143 static poly_int64
2144 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2145 tree funtype ATTRIBUTE_UNUSED, poly_int64 size)
2147 return size > 255 * 4 ? 0 : (HOST_WIDE_INT) size;
2150 /* Define where to put the arguments to a function.
2151 Value is zero to push the argument on the stack,
2152 or a hard register in which to store the argument.
2154 MODE is the argument's machine mode.
2155 TYPE is the data type of the argument (as a tree).
2156 This is null for libcalls where that information may
2157 not be available.
2158 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2159 the preceding args and about the function being called.
2160 NAMED is nonzero if this argument is a named parameter
2161 (otherwise it is an extra parameter matching an ellipsis). */
2163 /* On the VAX all args are pushed. */
2165 static rtx
2166 vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
2167 machine_mode mode ATTRIBUTE_UNUSED,
2168 const_tree type ATTRIBUTE_UNUSED,
2169 bool named ATTRIBUTE_UNUSED)
2171 return NULL_RTX;
2174 /* Update the data in CUM to advance over an argument of mode MODE and
2175 data type TYPE. (TYPE is null for libcalls where that information
2176 may not be available.) */
2178 static void
2179 vax_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2180 const_tree type, bool named ATTRIBUTE_UNUSED)
2182 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2184 *cum += (mode != BLKmode
2185 ? (GET_MODE_SIZE (mode) + 3) & ~3
2186 : (int_size_in_bytes (type) + 3) & ~3);
2189 static HOST_WIDE_INT
2190 vax_starting_frame_offset (void)
2192 /* On ELF targets, reserve the top of the stack for exception handler
2193 stackadj value. */
2194 return TARGET_ELF ? -4 : 0;