1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
31 #include "cfgcleanup.h"
36 #include "insn-config.h"
37 #include "conditions.h"
39 #include "insn-attr.h"
48 #include "insn-codes.h"
51 #include "diagnostic-core.h"
54 #include "tm-constrs.h"
59 /* This file should be included last. */
60 #include "target-def.h"
62 static void vax_option_override (void);
63 static bool vax_legitimate_address_p (machine_mode
, rtx
, bool);
64 static void vax_file_start (void);
65 static void vax_init_libfuncs (void);
66 static void vax_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
68 static int vax_address_cost_1 (rtx
);
69 static int vax_address_cost (rtx
, machine_mode
, addr_space_t
, bool);
70 static bool vax_rtx_costs (rtx
, int, int, int, int *, bool);
71 static rtx
vax_function_arg (cumulative_args_t
, machine_mode
,
73 static void vax_function_arg_advance (cumulative_args_t
, machine_mode
,
75 static rtx
vax_struct_value_rtx (tree
, int);
76 static rtx
vax_builtin_setjmp_frame_value (void);
77 static void vax_asm_trampoline_template (FILE *);
78 static void vax_trampoline_init (rtx
, tree
, rtx
);
79 static int vax_return_pops_args (tree
, tree
, int);
80 static bool vax_mode_dependent_address_p (const_rtx
, addr_space_t
);
82 /* Initialize the GCC target structure. */
83 #undef TARGET_ASM_ALIGNED_HI_OP
84 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
86 #undef TARGET_ASM_FILE_START
87 #define TARGET_ASM_FILE_START vax_file_start
88 #undef TARGET_ASM_FILE_START_APP_OFF
89 #define TARGET_ASM_FILE_START_APP_OFF true
91 #undef TARGET_INIT_LIBFUNCS
92 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
94 #undef TARGET_ASM_OUTPUT_MI_THUNK
95 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
96 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
97 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
99 #undef TARGET_RTX_COSTS
100 #define TARGET_RTX_COSTS vax_rtx_costs
101 #undef TARGET_ADDRESS_COST
102 #define TARGET_ADDRESS_COST vax_address_cost
104 #undef TARGET_PROMOTE_PROTOTYPES
105 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
107 #undef TARGET_FUNCTION_ARG
108 #define TARGET_FUNCTION_ARG vax_function_arg
109 #undef TARGET_FUNCTION_ARG_ADVANCE
110 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
112 #undef TARGET_STRUCT_VALUE_RTX
113 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
115 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
116 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
118 #undef TARGET_LEGITIMATE_ADDRESS_P
119 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
120 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
121 #define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
123 #undef TARGET_FRAME_POINTER_REQUIRED
124 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
126 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
127 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
128 #undef TARGET_TRAMPOLINE_INIT
129 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
130 #undef TARGET_RETURN_POPS_ARGS
131 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
133 #undef TARGET_OPTION_OVERRIDE
134 #define TARGET_OPTION_OVERRIDE vax_option_override
136 struct gcc_target targetm
= TARGET_INITIALIZER
;
138 /* Set global variables as needed for the options enabled. */
141 vax_option_override (void)
143 /* We're VAX floating point, not IEEE floating point. */
145 REAL_MODE_FORMAT (DFmode
) = &vax_g_format
;
147 #ifdef SUBTARGET_OVERRIDE_OPTIONS
148 SUBTARGET_OVERRIDE_OPTIONS
;
153 vax_add_reg_cfa_offset (rtx insn
, int offset
, rtx src
)
157 x
= plus_constant (Pmode
, frame_pointer_rtx
, offset
);
158 x
= gen_rtx_MEM (SImode
, x
);
159 x
= gen_rtx_SET (x
, src
);
160 add_reg_note (insn
, REG_CFA_OFFSET
, x
);
163 /* Generate the assembly code for function entry. FILE is a stdio
164 stream to output the code to. SIZE is an int: how many units of
165 temporary storage to allocate.
167 Refer to the array `regs_ever_live' to determine which registers to
168 save; `regs_ever_live[I]' is nonzero if register number I is ever
169 used in the function. This function is responsible for knowing
170 which registers should not be saved even if used. */
173 vax_expand_prologue (void)
180 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
181 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
184 insn
= emit_insn (gen_procedure_entry_mask (GEN_INT (mask
)));
185 RTX_FRAME_RELATED_P (insn
) = 1;
187 /* The layout of the CALLG/S stack frame is follows:
192 ... Registers saved as specified by MASK
202 The rest of the prologue will adjust the SP for the local frame. */
204 vax_add_reg_cfa_offset (insn
, 4, arg_pointer_rtx
);
205 vax_add_reg_cfa_offset (insn
, 8, frame_pointer_rtx
);
206 vax_add_reg_cfa_offset (insn
, 12, pc_rtx
);
209 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
210 if (mask
& (1 << regno
))
212 vax_add_reg_cfa_offset (insn
, offset
, gen_rtx_REG (SImode
, regno
));
216 /* Because add_reg_note pushes the notes, adding this last means that
217 it will be processed first. This is required to allow the other
218 notes be interpreted properly. */
219 add_reg_note (insn
, REG_CFA_DEF_CFA
,
220 plus_constant (Pmode
, frame_pointer_rtx
, offset
));
222 /* Allocate the local stack frame. */
223 size
= get_frame_size ();
224 size
-= STARTING_FRAME_OFFSET
;
225 emit_insn (gen_addsi3 (stack_pointer_rtx
,
226 stack_pointer_rtx
, GEN_INT (-size
)));
228 /* Do not allow instructions referencing local stack memory to be
229 scheduled before the frame is allocated. This is more pedantic
230 than anything else, given that VAX does not currently have a
231 scheduling description. */
232 emit_insn (gen_blockage ());
235 /* When debugging with stabs, we want to output an extra dummy label
236 so that gas can distinguish between D_float and G_float prior to
237 processing the .stabs directive identifying type double. */
239 vax_file_start (void)
241 default_file_start ();
243 if (write_symbols
== DBX_DEBUG
)
244 fprintf (asm_out_file
, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR
);
247 /* We can use the BSD C library routines for the libgcc calls that are
248 still generated, since that's what they boil down to anyways. When
249 ELF, avoid the user's namespace. */
252 vax_init_libfuncs (void)
254 if (TARGET_BSD_DIVMOD
)
256 set_optab_libfunc (udiv_optab
, SImode
, TARGET_ELF
? "*__udiv" : "*udiv");
257 set_optab_libfunc (umod_optab
, SImode
, TARGET_ELF
? "*__urem" : "*urem");
261 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
264 split_quadword_operands (rtx insn
, enum rtx_code code
, rtx
* operands
,
269 for (i
= 0; i
< n
; i
++)
272 for (i
= 0; i
< n
; i
++)
274 if (MEM_P (operands
[i
])
275 && (GET_CODE (XEXP (operands
[i
], 0)) == PRE_DEC
276 || GET_CODE (XEXP (operands
[i
], 0)) == POST_INC
))
278 rtx addr
= XEXP (operands
[i
], 0);
279 operands
[i
] = low
[i
] = gen_rtx_MEM (SImode
, addr
);
281 else if (optimize_size
&& MEM_P (operands
[i
])
282 && REG_P (XEXP (operands
[i
], 0))
283 && (code
!= MINUS
|| operands
[1] != const0_rtx
)
284 && find_regno_note (insn
, REG_DEAD
,
285 REGNO (XEXP (operands
[i
], 0))))
287 low
[i
] = gen_rtx_MEM (SImode
,
288 gen_rtx_POST_INC (Pmode
,
289 XEXP (operands
[i
], 0)));
290 operands
[i
] = gen_rtx_MEM (SImode
, XEXP (operands
[i
], 0));
294 low
[i
] = operand_subword (operands
[i
], 0, 0, DImode
);
295 operands
[i
] = operand_subword (operands
[i
], 1, 0, DImode
);
301 print_operand_address (FILE * file
, rtx addr
)
304 rtx reg1
, breg
, ireg
;
308 switch (GET_CODE (addr
))
312 addr
= XEXP (addr
, 0);
316 fprintf (file
, "(%s)", reg_names
[REGNO (addr
)]);
320 fprintf (file
, "-(%s)", reg_names
[REGNO (XEXP (addr
, 0))]);
324 fprintf (file
, "(%s)+", reg_names
[REGNO (XEXP (addr
, 0))]);
328 /* There can be either two or three things added here. One must be a
329 REG. One can be either a REG or a MULT of a REG and an appropriate
330 constant, and the third can only be a constant or a MEM.
332 We get these two or three things and put the constant or MEM in
333 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
334 a register and can't tell yet if it is a base or index register,
337 reg1
= 0; ireg
= 0; breg
= 0; offset
= 0;
339 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0))
340 || MEM_P (XEXP (addr
, 0)))
342 offset
= XEXP (addr
, 0);
343 addr
= XEXP (addr
, 1);
345 else if (CONSTANT_ADDRESS_P (XEXP (addr
, 1))
346 || MEM_P (XEXP (addr
, 1)))
348 offset
= XEXP (addr
, 1);
349 addr
= XEXP (addr
, 0);
351 else if (GET_CODE (XEXP (addr
, 1)) == MULT
)
353 ireg
= XEXP (addr
, 1);
354 addr
= XEXP (addr
, 0);
356 else if (GET_CODE (XEXP (addr
, 0)) == MULT
)
358 ireg
= XEXP (addr
, 0);
359 addr
= XEXP (addr
, 1);
361 else if (REG_P (XEXP (addr
, 1)))
363 reg1
= XEXP (addr
, 1);
364 addr
= XEXP (addr
, 0);
366 else if (REG_P (XEXP (addr
, 0)))
368 reg1
= XEXP (addr
, 0);
369 addr
= XEXP (addr
, 1);
381 else if (GET_CODE (addr
) == MULT
)
385 gcc_assert (GET_CODE (addr
) == PLUS
);
386 if (CONSTANT_ADDRESS_P (XEXP (addr
, 0))
387 || MEM_P (XEXP (addr
, 0)))
391 if (CONST_INT_P (offset
))
392 offset
= plus_constant (Pmode
, XEXP (addr
, 0),
396 gcc_assert (CONST_INT_P (XEXP (addr
, 0)));
397 offset
= plus_constant (Pmode
, offset
,
398 INTVAL (XEXP (addr
, 0)));
401 offset
= XEXP (addr
, 0);
403 else if (REG_P (XEXP (addr
, 0)))
406 ireg
= reg1
, breg
= XEXP (addr
, 0), reg1
= 0;
408 reg1
= XEXP (addr
, 0);
412 gcc_assert (GET_CODE (XEXP (addr
, 0)) == MULT
);
414 ireg
= XEXP (addr
, 0);
417 if (CONSTANT_ADDRESS_P (XEXP (addr
, 1))
418 || MEM_P (XEXP (addr
, 1)))
422 if (CONST_INT_P (offset
))
423 offset
= plus_constant (Pmode
, XEXP (addr
, 1),
427 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
428 offset
= plus_constant (Pmode
, offset
,
429 INTVAL (XEXP (addr
, 1)));
432 offset
= XEXP (addr
, 1);
434 else if (REG_P (XEXP (addr
, 1)))
437 ireg
= reg1
, breg
= XEXP (addr
, 1), reg1
= 0;
439 reg1
= XEXP (addr
, 1);
443 gcc_assert (GET_CODE (XEXP (addr
, 1)) == MULT
);
445 ireg
= XEXP (addr
, 1);
449 /* If REG1 is nonzero, figure out if it is a base or index register. */
453 || (flag_pic
&& GET_CODE (addr
) == SYMBOL_REF
)
456 || (flag_pic
&& symbolic_operand (offset
, SImode
)))))
467 if (flag_pic
&& symbolic_operand (offset
, SImode
))
472 output_operand_lossage ("symbol used with both base and indexed registers");
475 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
476 if (flag_pic
> 1 && GET_CODE (offset
) == CONST
477 && GET_CODE (XEXP (XEXP (offset
, 0), 0)) == SYMBOL_REF
478 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset
, 0), 0)))
481 output_operand_lossage ("symbol with offset used in PIC mode");
485 /* symbol(reg) isn't PIC, but symbol[reg] is. */
494 output_address (offset
);
498 fprintf (file
, "(%s)", reg_names
[REGNO (breg
)]);
502 if (GET_CODE (ireg
) == MULT
)
503 ireg
= XEXP (ireg
, 0);
504 gcc_assert (REG_P (ireg
));
505 fprintf (file
, "[%s]", reg_names
[REGNO (ireg
)]);
510 output_addr_const (file
, addr
);
515 print_operand (FILE *file
, rtx x
, int code
)
518 fputc (ASM_DOUBLE_CHAR
, file
);
519 else if (code
== '|')
520 fputs (REGISTER_PREFIX
, file
);
521 else if (code
== 'c')
522 fputs (cond_name (x
), file
);
523 else if (code
== 'C')
524 fputs (rev_cond_name (x
), file
);
525 else if (code
== 'D' && CONST_INT_P (x
) && INTVAL (x
) < 0)
526 fprintf (file
, "$" NEG_HWI_PRINT_HEX16
, INTVAL (x
));
527 else if (code
== 'P' && CONST_INT_P (x
))
528 fprintf (file
, "$" HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + 1);
529 else if (code
== 'N' && CONST_INT_P (x
))
530 fprintf (file
, "$" HOST_WIDE_INT_PRINT_DEC
, ~ INTVAL (x
));
531 /* rotl instruction cannot deal with negative arguments. */
532 else if (code
== 'R' && CONST_INT_P (x
))
533 fprintf (file
, "$" HOST_WIDE_INT_PRINT_DEC
, 32 - INTVAL (x
));
534 else if (code
== 'H' && CONST_INT_P (x
))
535 fprintf (file
, "$%d", (int) (0xffff & ~ INTVAL (x
)));
536 else if (code
== 'h' && CONST_INT_P (x
))
537 fprintf (file
, "$%d", (short) - INTVAL (x
));
538 else if (code
== 'B' && CONST_INT_P (x
))
539 fprintf (file
, "$%d", (int) (0xff & ~ INTVAL (x
)));
540 else if (code
== 'b' && CONST_INT_P (x
))
541 fprintf (file
, "$%d", (int) (0xff & - INTVAL (x
)));
542 else if (code
== 'M' && CONST_INT_P (x
))
543 fprintf (file
, "$%d", ~((1 << INTVAL (x
)) - 1));
544 else if (code
== 'x' && CONST_INT_P (x
))
545 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL (x
));
547 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
549 output_address (XEXP (x
, 0));
550 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
553 real_to_decimal (dstr
, CONST_DOUBLE_REAL_VALUE (x
),
554 sizeof (dstr
), 0, 1);
555 fprintf (file
, "$0f%s", dstr
);
557 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
560 real_to_decimal (dstr
, CONST_DOUBLE_REAL_VALUE (x
),
561 sizeof (dstr
), 0, 1);
562 fprintf (file
, "$0%c%s", ASM_DOUBLE_CHAR
, dstr
);
566 if (flag_pic
> 1 && symbolic_operand (x
, SImode
))
569 output_operand_lossage ("symbol used as immediate operand");
572 output_addr_const (file
, x
);
579 switch (GET_CODE (op
))
608 rev_cond_name (rtx op
)
610 switch (GET_CODE (op
))
639 vax_float_literal (rtx c
)
642 REAL_VALUE_TYPE r
, s
;
645 if (GET_CODE (c
) != CONST_DOUBLE
)
650 if (c
== const_tiny_rtx
[(int) mode
][0]
651 || c
== const_tiny_rtx
[(int) mode
][1]
652 || c
== const_tiny_rtx
[(int) mode
][2])
655 REAL_VALUE_FROM_CONST_DOUBLE (r
, c
);
657 for (i
= 0; i
< 7; i
++)
661 real_from_integer (&s
, mode
, x
, SIGNED
);
663 if (REAL_VALUES_EQUAL (r
, s
))
665 ok
= exact_real_inverse (mode
, &s
);
667 if (REAL_VALUES_EQUAL (r
, s
))
674 /* Return the cost in cycles of a memory address, relative to register
677 Each of the following adds the indicated number of cycles:
681 1 - indexing and/or offset(register)
686 vax_address_cost_1 (rtx addr
)
688 int reg
= 0, indexed
= 0, indir
= 0, offset
= 0, predec
= 0;
689 rtx plus_op0
= 0, plus_op1
= 0;
691 switch (GET_CODE (addr
))
701 indexed
= 1; /* 2 on VAX 2 */
704 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
706 offset
= (unsigned HOST_WIDE_INT
)(INTVAL(addr
)+128) > 256;
710 offset
= 1; /* 2 on VAX 2 */
712 case LABEL_REF
: /* this is probably a byte offset from the pc */
718 plus_op1
= XEXP (addr
, 0);
720 plus_op0
= XEXP (addr
, 0);
721 addr
= XEXP (addr
, 1);
724 indir
= 2; /* 3 on VAX 2 */
725 addr
= XEXP (addr
, 0);
731 /* Up to 3 things can be added in an address. They are stored in
732 plus_op0, plus_op1, and addr. */
746 /* Indexing and register+offset can both be used (except on a VAX 2)
747 without increasing execution time over either one alone. */
748 if (reg
&& indexed
&& offset
)
749 return reg
+ indir
+ offset
+ predec
;
750 return reg
+ indexed
+ indir
+ offset
+ predec
;
754 vax_address_cost (rtx x
, machine_mode mode ATTRIBUTE_UNUSED
,
755 addr_space_t as ATTRIBUTE_UNUSED
,
756 bool speed ATTRIBUTE_UNUSED
)
758 return (1 + (REG_P (x
) ? 0 : vax_address_cost_1 (x
)));
761 /* Cost of an expression on a VAX. This version has costs tuned for the
762 CVAX chip (found in the VAX 3 series) with comments for variations on
765 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
766 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
767 costs on a per cpu basis. */
770 vax_rtx_costs (rtx x
, int code
, int outer_code
, int opno ATTRIBUTE_UNUSED
,
771 int *total
, bool speed ATTRIBUTE_UNUSED
)
773 machine_mode mode
= GET_MODE (x
);
774 int i
= 0; /* may be modified in switch */
775 const char *fmt
= GET_RTX_FORMAT (code
); /* may be modified in switch */
779 /* On a VAX, constants from 0..63 are cheap because they can use the
780 1 byte literal constant format. Compare to -1 should be made cheap
781 so that decrement-and-branch insns can be formed more easily (if
782 the value -1 is copied to a register some decrement-and-branch
783 patterns will not match). */
790 if (outer_code
== AND
)
792 *total
= ((unsigned HOST_WIDE_INT
) ~INTVAL (x
) <= 077) ? 1 : 2;
795 if ((unsigned HOST_WIDE_INT
) INTVAL (x
) <= 077
796 || (outer_code
== COMPARE
798 || ((outer_code
== PLUS
|| outer_code
== MINUS
)
799 && (unsigned HOST_WIDE_INT
) -INTVAL (x
) <= 077))
813 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
814 *total
= vax_float_literal (x
) ? 5 : 8;
816 *total
= ((CONST_DOUBLE_HIGH (x
) == 0
817 && (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (x
) < 64)
818 || (outer_code
== PLUS
819 && CONST_DOUBLE_HIGH (x
) == -1
820 && (unsigned HOST_WIDE_INT
)-CONST_DOUBLE_LOW (x
) < 64))
826 return true; /* Implies register operand. */
830 return true; /* Implies register operand. */
836 *total
= 16; /* 4 on VAX 9000 */
839 *total
= 9; /* 4 on VAX 9000, 12 on VAX 2 */
842 *total
= 16; /* 6 on VAX 9000, 28 on VAX 2 */
847 *total
= 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
850 *total
= MAX_COST
; /* Mode is not supported. */
858 *total
= MAX_COST
; /* Mode is not supported. */
866 *total
= 30; /* Highly variable. */
867 else if (mode
== DFmode
)
868 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
871 *total
= 11; /* 25 on VAX 2 */
881 *total
= MAX_COST
; /* Mode is not supported. */
888 *total
= (6 /* 4 on VAX 9000 */
889 + (mode
== DFmode
) + (GET_MODE (XEXP (x
, 0)) != SImode
));
893 *total
= 7; /* 17 on VAX 2 */
902 *total
= 10; /* 6 on VAX 9000 */
907 *total
= 6; /* 5 on VAX 2, 4 on VAX 9000 */
908 if (CONST_INT_P (XEXP (x
, 1)))
909 fmt
= "e"; /* all constant rotate counts are short */
914 *total
= (mode
== DFmode
) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
915 /* Small integer operands can use subl2 and addl2. */
916 if ((CONST_INT_P (XEXP (x
, 1)))
917 && (unsigned HOST_WIDE_INT
)(INTVAL (XEXP (x
, 1)) + 63) < 127)
927 /* AND is special because the first operand is complemented. */
929 if (CONST_INT_P (XEXP (x
, 0)))
931 if ((unsigned HOST_WIDE_INT
)~INTVAL (XEXP (x
, 0)) > 63)
941 else if (mode
== SFmode
)
943 else if (mode
== DImode
)
959 if (mode
== DImode
|| mode
== DFmode
)
960 *total
= 5; /* 7 on VAX 2 */
962 *total
= 3; /* 4 on VAX 2 */
964 if (!REG_P (x
) && GET_CODE (x
) != POST_INC
)
965 *total
+= vax_address_cost_1 (x
);
971 *total
= 3; /* FIXME: Costs need to be checked */
978 /* Now look inside the expression. Operands which are not registers or
979 short constants add to the cost.
981 FMT and I may have been adjusted in the switch above for instructions
982 which require special handling. */
984 while (*fmt
++ == 'e')
986 rtx op
= XEXP (x
, i
);
989 code
= GET_CODE (op
);
991 /* A NOT is likely to be found as the first operand of an AND
992 (in which case the relevant cost is of the operand inside
993 the not) and not likely to be found anywhere else. */
995 op
= XEXP (op
, 0), code
= GET_CODE (op
);
1000 if ((unsigned HOST_WIDE_INT
)INTVAL (op
) > 63
1001 && GET_MODE (x
) != QImode
)
1002 *total
+= 1; /* 2 on VAX 2 */
1007 *total
+= 1; /* 2 on VAX 2 */
1010 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
)
1012 /* Registers are faster than floating point constants -- even
1013 those constants which can be encoded in a single byte. */
1014 if (vax_float_literal (op
))
1017 *total
+= (GET_MODE (x
) == DFmode
) ? 3 : 2;
1021 if (CONST_DOUBLE_HIGH (op
) != 0
1022 || (unsigned HOST_WIDE_INT
)CONST_DOUBLE_LOW (op
) > 63)
1027 *total
+= 1; /* 2 on VAX 2 */
1028 if (!REG_P (XEXP (op
, 0)))
1029 *total
+= vax_address_cost_1 (XEXP (op
, 0));
1042 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1043 Used for C++ multiple inheritance.
1044 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
1045 addl2 $DELTA, 4(ap) #adjust first argument
1046 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
1050 vax_output_mi_thunk (FILE * file
,
1051 tree thunk ATTRIBUTE_UNUSED
,
1052 HOST_WIDE_INT delta
,
1053 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED
,
1056 fprintf (file
, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC
, delta
);
1057 asm_fprintf (file
, ",4(%Rap)\n");
1058 fprintf (file
, "\tjmp ");
1059 assemble_name (file
, XSTR (XEXP (DECL_RTL (function
), 0), 0));
1060 fprintf (file
, "+2\n");
1064 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1065 int incoming ATTRIBUTE_UNUSED
)
1067 return gen_rtx_REG (Pmode
, VAX_STRUCT_VALUE_REGNUM
);
1071 vax_builtin_setjmp_frame_value (void)
1073 return hard_frame_pointer_rtx
;
1076 /* Worker function for NOTICE_UPDATE_CC. */
1079 vax_notice_update_cc (rtx exp
, rtx insn ATTRIBUTE_UNUSED
)
1081 if (GET_CODE (exp
) == SET
)
1083 if (GET_CODE (SET_SRC (exp
)) == CALL
)
1085 else if (GET_CODE (SET_DEST (exp
)) != ZERO_EXTRACT
1086 && GET_CODE (SET_DEST (exp
)) != PC
)
1088 cc_status
.flags
= 0;
1089 /* The integer operations below don't set carry or
1090 set it in an incompatible way. That's ok though
1091 as the Z bit is all we need when doing unsigned
1092 comparisons on the result of these insns (since
1093 they're always with 0). Set CC_NO_OVERFLOW to
1094 generate the correct unsigned branches. */
1095 switch (GET_CODE (SET_SRC (exp
)))
1098 if (GET_MODE_CLASS (GET_MODE (exp
)) == MODE_FLOAT
)
1106 cc_status
.flags
= CC_NO_OVERFLOW
;
1111 cc_status
.value1
= SET_DEST (exp
);
1112 cc_status
.value2
= SET_SRC (exp
);
1115 else if (GET_CODE (exp
) == PARALLEL
1116 && GET_CODE (XVECEXP (exp
, 0, 0)) == SET
)
1118 if (GET_CODE (SET_SRC (XVECEXP (exp
, 0, 0))) == CALL
)
1120 else if (GET_CODE (SET_DEST (XVECEXP (exp
, 0, 0))) != PC
)
1122 cc_status
.flags
= 0;
1123 cc_status
.value1
= SET_DEST (XVECEXP (exp
, 0, 0));
1124 cc_status
.value2
= SET_SRC (XVECEXP (exp
, 0, 0));
1127 /* PARALLELs whose first element sets the PC are aob,
1128 sob insns. They do change the cc's. */
1133 if (cc_status
.value1
&& REG_P (cc_status
.value1
)
1135 && reg_overlap_mentioned_p (cc_status
.value1
, cc_status
.value2
))
1136 cc_status
.value2
= 0;
1137 if (cc_status
.value1
&& MEM_P (cc_status
.value1
)
1139 && MEM_P (cc_status
.value2
))
1140 cc_status
.value2
= 0;
1141 /* Actual condition, one line up, should be that value2's address
1142 depends on value1, but that is too much of a pain. */
1145 /* Output integer move instructions. */
1148 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
,
1152 const char *pattern_hi
, *pattern_lo
;
1157 if (operands
[1] == const0_rtx
)
1159 if (TARGET_QMATH
&& optimize_size
1160 && (CONST_INT_P (operands
[1])
1161 || GET_CODE (operands
[1]) == CONST_DOUBLE
))
1163 unsigned HOST_WIDE_INT hval
, lval
;
1166 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
1168 gcc_assert (HOST_BITS_PER_WIDE_INT
!= 64);
1170 /* Make sure only the low 32 bits are valid. */
1171 lval
= CONST_DOUBLE_LOW (operands
[1]) & 0xffffffff;
1172 hval
= CONST_DOUBLE_HIGH (operands
[1]) & 0xffffffff;
1176 lval
= INTVAL (operands
[1]);
1180 /* Here we see if we are trying to see if the 64bit value is really
1181 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1182 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1183 8 bytes - 1 shift byte - 1 short literal byte. */
1185 && (n
= exact_log2 (lval
& (- lval
))) != -1
1186 && (lval
>> n
) < 64)
1190 /* On 32bit platforms, if the 6bits didn't overflow into the
1191 upper 32bit value that value better be 0. If we have
1192 overflowed, make sure it wasn't too much. */
1193 if (HOST_BITS_PER_WIDE_INT
== 32 && hval
!= 0)
1195 if (n
<= 26 || hval
>= ((unsigned)1 << (n
- 26)))
1196 n
= 0; /* failure */
1198 lval
|= hval
<< (32 - n
);
1200 /* If n is 0, then ashq is not the best way to emit this. */
1203 operands
[1] = GEN_INT (lval
);
1204 operands
[2] = GEN_INT (n
);
1205 return "ashq %2,%D1,%0";
1207 #if HOST_BITS_PER_WIDE_INT == 32
1209 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1210 upper 32bit value. */
1212 && (n
= exact_log2 (hval
& (- hval
)) - 1) != -1
1213 && (hval
>> n
) < 64)
1215 operands
[1] = GEN_INT (hval
>> n
);
1216 operands
[2] = GEN_INT (n
+ 32);
1217 return "ashq %2,%D1,%0";
1223 && (!MEM_P (operands
[0])
1224 || GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
1225 || GET_CODE (XEXP (operands
[0], 0)) == POST_INC
1226 || !illegal_addsub_di_memory_operand (operands
[0], DImode
))
1227 && ((CONST_INT_P (operands
[1])
1228 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[1]) >= 64)
1229 || GET_CODE (operands
[1]) == CONST_DOUBLE
))
1231 hi
[0] = operands
[0];
1232 hi
[1] = operands
[1];
1234 split_quadword_operands (insn
, SET
, hi
, lo
, 2);
1236 pattern_lo
= vax_output_int_move (NULL
, lo
, SImode
);
1237 pattern_hi
= vax_output_int_move (NULL
, hi
, SImode
);
1239 /* The patterns are just movl/movl or pushl/pushl then a movq will
1240 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1241 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1243 if ((!strncmp (pattern_lo
, "movl", 4)
1244 && !strncmp (pattern_hi
, "movl", 4))
1245 || (!strncmp (pattern_lo
, "pushl", 5)
1246 && !strncmp (pattern_hi
, "pushl", 5)))
1247 return "movq %1,%0";
1249 if (MEM_P (operands
[0])
1250 && GET_CODE (XEXP (operands
[0], 0)) == PRE_DEC
)
1252 output_asm_insn (pattern_hi
, hi
);
1253 operands
[0] = lo
[0];
1254 operands
[1] = lo
[1];
1255 operands
[2] = lo
[2];
1260 output_asm_insn (pattern_lo
, lo
);
1261 operands
[0] = hi
[0];
1262 operands
[1] = hi
[1];
1263 operands
[2] = hi
[2];
1267 return "movq %1,%0";
1270 if (symbolic_operand (operands
[1], SImode
))
1272 if (push_operand (operands
[0], SImode
))
1273 return "pushab %a1";
1274 return "movab %a1,%0";
1277 if (operands
[1] == const0_rtx
)
1279 if (push_operand (operands
[1], SImode
))
1284 if (CONST_INT_P (operands
[1])
1285 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[1]) >= 64)
1287 HOST_WIDE_INT i
= INTVAL (operands
[1]);
1289 if ((unsigned HOST_WIDE_INT
)(~i
) < 64)
1290 return "mcoml %N1,%0";
1291 if ((unsigned HOST_WIDE_INT
)i
< 0x100)
1292 return "movzbl %1,%0";
1293 if (i
>= -0x80 && i
< 0)
1294 return "cvtbl %1,%0";
1296 && (n
= exact_log2 (i
& (-i
))) != -1
1297 && ((unsigned HOST_WIDE_INT
)i
>> n
) < 64)
1299 operands
[1] = GEN_INT ((unsigned HOST_WIDE_INT
)i
>> n
);
1300 operands
[2] = GEN_INT (n
);
1301 return "ashl %2,%1,%0";
1303 if ((unsigned HOST_WIDE_INT
)i
< 0x10000)
1304 return "movzwl %1,%0";
1305 if (i
>= -0x8000 && i
< 0)
1306 return "cvtwl %1,%0";
1308 if (push_operand (operands
[0], SImode
))
1310 return "movl %1,%0";
1313 if (CONST_INT_P (operands
[1]))
1315 HOST_WIDE_INT i
= INTVAL (operands
[1]);
1318 else if ((unsigned HOST_WIDE_INT
)i
< 64)
1319 return "movw %1,%0";
1320 else if ((unsigned HOST_WIDE_INT
)~i
< 64)
1321 return "mcomw %H1,%0";
1322 else if ((unsigned HOST_WIDE_INT
)i
< 256)
1323 return "movzbw %1,%0";
1324 else if (i
>= -0x80 && i
< 0)
1325 return "cvtbw %1,%0";
1327 return "movw %1,%0";
1330 if (CONST_INT_P (operands
[1]))
1332 HOST_WIDE_INT i
= INTVAL (operands
[1]);
1335 else if ((unsigned HOST_WIDE_INT
)~i
< 64)
1336 return "mcomb %B1,%0";
1338 return "movb %1,%0";
1345 /* Output integer add instructions.
1347 The space-time-opcode tradeoffs for addition vary by model of VAX.
1349 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1350 but it not faster on other models.
1352 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1353 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1354 a register is used in an address too soon after it is set.
1355 Compromise by using movab only when it is shorter than the add
1356 or the base register in the address is one of sp, ap, and fp,
1357 which are not modified very often. */
1360 vax_output_int_add (rtx insn
, rtx
*operands
, machine_mode mode
)
1367 const char *pattern
;
1371 if (TARGET_QMATH
&& 0)
1374 split_quadword_operands (insn
, PLUS
, operands
, low
, 3);
1378 gcc_assert (rtx_equal_p (operands
[0], operands
[1]));
1379 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1380 gcc_assert (!flag_pic
|| !external_memory_operand (low
[2], SImode
));
1381 gcc_assert (!flag_pic
|| !external_memory_operand (low
[0], SImode
));
1384 /* No reason to add a 0 to the low part and thus no carry, so just
1385 emit the appropriate add/sub instruction. */
1386 if (low
[2] == const0_rtx
)
1387 return vax_output_int_add (NULL
, operands
, SImode
);
1389 /* Are we doing addition or subtraction? */
1390 sub
= CONST_INT_P (operands
[2]) && INTVAL (operands
[2]) < 0;
1392 /* We can't use vax_output_int_add since some the patterns don't
1393 modify the carry bit. */
1396 if (low
[2] == constm1_rtx
)
1397 pattern
= "decl %0";
1399 pattern
= "subl2 $%n2,%0";
1403 if (low
[2] == const1_rtx
)
1404 pattern
= "incl %0";
1406 pattern
= "addl2 %2,%0";
1408 output_asm_insn (pattern
, low
);
1410 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1411 two 32bit parts, we complement each and then add one to
1412 low part. We know that the low part can't overflow since
1413 it's value can never be 0. */
1415 return "sbwc %N2,%0";
1416 return "adwc %2,%0";
1419 /* Add low parts. */
1420 if (rtx_equal_p (operands
[0], operands
[1]))
1422 if (low
[2] == const0_rtx
)
1423 /* Should examine operand, punt if not POST_INC. */
1424 pattern
= "tstl %0", carry
= 0;
1425 else if (low
[2] == const1_rtx
)
1426 pattern
= "incl %0";
1428 pattern
= "addl2 %2,%0";
1432 if (low
[2] == const0_rtx
)
1433 pattern
= "movl %1,%0", carry
= 0;
1435 pattern
= "addl3 %2,%1,%0";
1438 output_asm_insn (pattern
, low
);
1440 /* If CARRY is 0, we don't have any carry value to worry about. */
1441 return get_insn_template (CODE_FOR_addsi3
, insn
);
1442 /* %0 = C + %1 + %2 */
1443 if (!rtx_equal_p (operands
[0], operands
[1]))
1444 output_asm_insn ((operands
[1] == const0_rtx
1446 : "movl %1,%0"), operands
);
1447 return "adwc %2,%0";
1451 if (rtx_equal_p (operands
[0], operands
[1]))
1453 if (operands
[2] == const1_rtx
)
1455 if (operands
[2] == constm1_rtx
)
1457 if (CONST_INT_P (operands
[2])
1458 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1459 return "subl2 $%n2,%0";
1460 if (CONST_INT_P (operands
[2])
1461 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[2]) >= 64
1462 && REG_P (operands
[1])
1463 && ((INTVAL (operands
[2]) < 32767 && INTVAL (operands
[2]) > -32768)
1464 || REGNO (operands
[1]) > 11))
1465 return "movab %c2(%1),%0";
1466 if (REG_P (operands
[0]) && symbolic_operand (operands
[2], SImode
))
1467 return "movab %a2[%0],%0";
1468 return "addl2 %2,%0";
1471 if (rtx_equal_p (operands
[0], operands
[2]))
1473 if (REG_P (operands
[0]) && symbolic_operand (operands
[1], SImode
))
1474 return "movab %a1[%0],%0";
1475 return "addl2 %1,%0";
1478 if (CONST_INT_P (operands
[2])
1479 && INTVAL (operands
[2]) < 32767
1480 && INTVAL (operands
[2]) > -32768
1481 && REG_P (operands
[1])
1482 && push_operand (operands
[0], SImode
))
1483 return "pushab %c2(%1)";
1485 if (CONST_INT_P (operands
[2])
1486 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1487 return "subl3 $%n2,%1,%0";
1489 if (CONST_INT_P (operands
[2])
1490 && (unsigned HOST_WIDE_INT
) INTVAL (operands
[2]) >= 64
1491 && REG_P (operands
[1])
1492 && ((INTVAL (operands
[2]) < 32767 && INTVAL (operands
[2]) > -32768)
1493 || REGNO (operands
[1]) > 11))
1494 return "movab %c2(%1),%0";
1496 /* Add this if using gcc on a VAX 3xxx:
1497 if (REG_P (operands[1]) && REG_P (operands[2]))
1498 return "movab (%1)[%2],%0";
1501 if (REG_P (operands
[1]) && symbolic_operand (operands
[2], SImode
))
1503 if (push_operand (operands
[0], SImode
))
1504 return "pushab %a2[%1]";
1505 return "movab %a2[%1],%0";
1508 if (REG_P (operands
[2]) && symbolic_operand (operands
[1], SImode
))
1510 if (push_operand (operands
[0], SImode
))
1511 return "pushab %a1[%2]";
1512 return "movab %a1[%2],%0";
1515 if (flag_pic
&& REG_P (operands
[0])
1516 && symbolic_operand (operands
[2], SImode
))
1517 return "movab %a2,%0;addl2 %1,%0";
1520 && (symbolic_operand (operands
[1], SImode
)
1521 || symbolic_operand (operands
[1], SImode
)))
1524 return "addl3 %1,%2,%0";
1527 if (rtx_equal_p (operands
[0], operands
[1]))
1529 if (operands
[2] == const1_rtx
)
1531 if (operands
[2] == constm1_rtx
)
1533 if (CONST_INT_P (operands
[2])
1534 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1535 return "subw2 $%n2,%0";
1536 return "addw2 %2,%0";
1538 if (rtx_equal_p (operands
[0], operands
[2]))
1539 return "addw2 %1,%0";
1540 if (CONST_INT_P (operands
[2])
1541 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1542 return "subw3 $%n2,%1,%0";
1543 return "addw3 %1,%2,%0";
1546 if (rtx_equal_p (operands
[0], operands
[1]))
1548 if (operands
[2] == const1_rtx
)
1550 if (operands
[2] == constm1_rtx
)
1552 if (CONST_INT_P (operands
[2])
1553 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1554 return "subb2 $%n2,%0";
1555 return "addb2 %2,%0";
1557 if (rtx_equal_p (operands
[0], operands
[2]))
1558 return "addb2 %1,%0";
1559 if (CONST_INT_P (operands
[2])
1560 && (unsigned HOST_WIDE_INT
) (- INTVAL (operands
[2])) < 64)
1561 return "subb3 $%n2,%1,%0";
1562 return "addb3 %1,%2,%0";
1570 vax_output_int_subtract (rtx insn
, rtx
*operands
, machine_mode mode
)
1577 const char *pattern
;
1580 if (TARGET_QMATH
&& 0)
1583 split_quadword_operands (insn
, MINUS
, operands
, low
, 3);
1587 if (operands
[1] == const0_rtx
&& low
[1] == const0_rtx
)
1589 /* Negation is tricky. It's basically complement and increment.
1590 Negate hi, then lo, and subtract the carry back. */
1591 if ((MEM_P (low
[0]) && GET_CODE (XEXP (low
[0], 0)) == POST_INC
)
1592 || (MEM_P (operands
[0])
1593 && GET_CODE (XEXP (operands
[0], 0)) == POST_INC
))
1594 fatal_insn ("illegal operand detected", insn
);
1595 output_asm_insn ("mnegl %2,%0", operands
);
1596 output_asm_insn ("mnegl %2,%0", low
);
1597 return "sbwc $0,%0";
1599 gcc_assert (rtx_equal_p (operands
[0], operands
[1]));
1600 gcc_assert (rtx_equal_p (low
[0], low
[1]));
1601 if (low
[2] == const1_rtx
)
1602 output_asm_insn ("decl %0", low
);
1604 output_asm_insn ("subl2 %2,%0", low
);
1605 return "sbwc %2,%0";
1608 /* Subtract low parts. */
1609 if (rtx_equal_p (operands
[0], operands
[1]))
1611 if (low
[2] == const0_rtx
)
1612 pattern
= 0, carry
= 0;
1613 else if (low
[2] == constm1_rtx
)
1614 pattern
= "decl %0";
1616 pattern
= "subl2 %2,%0";
1620 if (low
[2] == constm1_rtx
)
1621 pattern
= "decl %0";
1622 else if (low
[2] == const0_rtx
)
1623 pattern
= get_insn_template (CODE_FOR_movsi
, insn
), carry
= 0;
1625 pattern
= "subl3 %2,%1,%0";
1628 output_asm_insn (pattern
, low
);
1631 if (!rtx_equal_p (operands
[0], operands
[1]))
1632 return "movl %1,%0;sbwc %2,%0";
1633 return "sbwc %2,%0";
1634 /* %0 = %2 - %1 - C */
1636 return get_insn_template (CODE_FOR_subsi3
, insn
);
1644 /* True if X is an rtx for a constant that is a valid address. */
1647 legitimate_constant_address_p (rtx x
)
1649 if (GET_CODE (x
) == LABEL_REF
|| GET_CODE (x
) == SYMBOL_REF
1650 || CONST_INT_P (x
) || GET_CODE (x
) == HIGH
)
1652 if (GET_CODE (x
) != CONST
)
1654 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1656 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
1657 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x
, 0), 0)))
1663 /* The other macros defined here are used only in legitimate_address_p (). */
1665 /* Nonzero if X is a hard reg that can be used as an index
1666 or, if not strict, if it is a pseudo reg. */
1667 #define INDEX_REGISTER_P(X, STRICT) \
1668 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1670 /* Nonzero if X is a hard reg that can be used as a base reg
1671 or, if not strict, if it is a pseudo reg. */
1672 #define BASE_REGISTER_P(X, STRICT) \
1673 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1675 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1677 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1678 are no SYMBOL_REFs for external symbols present. */
1681 indirectable_constant_address_p (rtx x
, bool indirect
)
1683 if (GET_CODE (x
) == SYMBOL_REF
)
1684 return !flag_pic
|| SYMBOL_REF_LOCAL_P (x
) || !indirect
;
1686 if (GET_CODE (x
) == CONST
)
1688 || GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
1689 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x
, 0), 0));
1691 return CONSTANT_ADDRESS_P (x
);
1694 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1697 indirectable_constant_address_p (rtx x
, bool indirect ATTRIBUTE_UNUSED
)
1699 return CONSTANT_ADDRESS_P (x
);
1702 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1704 /* True if X is an address which can be indirected. External symbols
1705 could be in a sharable image library, so we disallow those. */
1708 indirectable_address_p (rtx x
, bool strict
, bool indirect
)
1710 if (indirectable_constant_address_p (x
, indirect
)
1711 || BASE_REGISTER_P (x
, strict
))
1713 if (GET_CODE (x
) != PLUS
1714 || !BASE_REGISTER_P (XEXP (x
, 0), strict
)
1715 || (flag_pic
&& !CONST_INT_P (XEXP (x
, 1))))
1717 return indirectable_constant_address_p (XEXP (x
, 1), indirect
);
1720 /* Return true if x is a valid address not using indexing.
1721 (This much is the easy part.) */
1723 nonindexed_address_p (rtx x
, bool strict
)
1728 if (! reload_in_progress
1729 || reg_equiv_mem (REGNO (x
)) == 0
1730 || indirectable_address_p (reg_equiv_mem (REGNO (x
)), strict
, false))
1733 if (indirectable_constant_address_p (x
, false))
1735 if (indirectable_address_p (x
, strict
, false))
1737 xfoo0
= XEXP (x
, 0);
1738 if (MEM_P (x
) && indirectable_address_p (xfoo0
, strict
, true))
1740 if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1741 && BASE_REGISTER_P (xfoo0
, strict
))
1746 /* True if PROD is either a reg times size of mode MODE and MODE is less
1747 than or equal 8 bytes, or just a reg if MODE is one byte. */
1750 index_term_p (rtx prod
, machine_mode mode
, bool strict
)
1754 if (GET_MODE_SIZE (mode
) == 1)
1755 return BASE_REGISTER_P (prod
, strict
);
1757 if (GET_CODE (prod
) != MULT
|| GET_MODE_SIZE (mode
) > 8)
1760 xfoo0
= XEXP (prod
, 0);
1761 xfoo1
= XEXP (prod
, 1);
1763 if (CONST_INT_P (xfoo0
)
1764 && INTVAL (xfoo0
) == (int)GET_MODE_SIZE (mode
)
1765 && INDEX_REGISTER_P (xfoo1
, strict
))
1768 if (CONST_INT_P (xfoo1
)
1769 && INTVAL (xfoo1
) == (int)GET_MODE_SIZE (mode
)
1770 && INDEX_REGISTER_P (xfoo0
, strict
))
1776 /* Return true if X is the sum of a register
1777 and a valid index term for mode MODE. */
1779 reg_plus_index_p (rtx x
, machine_mode mode
, bool strict
)
1783 if (GET_CODE (x
) != PLUS
)
1786 xfoo0
= XEXP (x
, 0);
1787 xfoo1
= XEXP (x
, 1);
1789 if (BASE_REGISTER_P (xfoo0
, strict
) && index_term_p (xfoo1
, mode
, strict
))
1792 if (BASE_REGISTER_P (xfoo1
, strict
) && index_term_p (xfoo0
, mode
, strict
))
1798 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1800 indexable_address_p (rtx xfoo0
, rtx xfoo1
, machine_mode mode
, bool strict
)
1802 if (!CONSTANT_ADDRESS_P (xfoo0
))
1804 if (BASE_REGISTER_P (xfoo1
, strict
))
1805 return !flag_pic
|| mode
== QImode
;
1806 if (flag_pic
&& symbolic_operand (xfoo0
, SImode
))
1808 return reg_plus_index_p (xfoo1
, mode
, strict
);
1811 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1812 that is a valid memory address for an instruction.
1813 The MODE argument is the machine mode for the MEM expression
1814 that wants to use this address. */
1816 vax_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
)
1820 if (nonindexed_address_p (x
, strict
))
1823 if (GET_CODE (x
) != PLUS
)
1826 /* Handle <address>[index] represented with index-sum outermost */
1828 xfoo0
= XEXP (x
, 0);
1829 xfoo1
= XEXP (x
, 1);
1831 if (index_term_p (xfoo0
, mode
, strict
)
1832 && nonindexed_address_p (xfoo1
, strict
))
1835 if (index_term_p (xfoo1
, mode
, strict
)
1836 && nonindexed_address_p (xfoo0
, strict
))
1839 /* Handle offset(reg)[index] with offset added outermost */
1841 if (indexable_address_p (xfoo0
, xfoo1
, mode
, strict
)
1842 || indexable_address_p (xfoo1
, xfoo0
, mode
, strict
))
1848 /* Return true if x (a legitimate address expression) has an effect that
1849 depends on the machine mode it is used for. On the VAX, the predecrement
1850 and postincrement address depend thus (the amount of decrement or
1851 increment being the length of the operand) and all indexed address depend
1852 thus (because the index scale factor is the length of the operand). */
1855 vax_mode_dependent_address_p (const_rtx x
, addr_space_t as ATTRIBUTE_UNUSED
)
1859 /* Auto-increment cases are now dealt with generically in recog.c. */
1860 if (GET_CODE (x
) != PLUS
)
1863 xfoo0
= XEXP (x
, 0);
1864 xfoo1
= XEXP (x
, 1);
1866 if (CONST_INT_P (xfoo0
) && REG_P (xfoo1
))
1868 if (CONST_INT_P (xfoo1
) && REG_P (xfoo0
))
1870 if (!flag_pic
&& CONSTANT_ADDRESS_P (xfoo0
) && REG_P (xfoo1
))
1872 if (!flag_pic
&& CONSTANT_ADDRESS_P (xfoo1
) && REG_P (xfoo0
))
1879 fixup_mathdi_operand (rtx x
, machine_mode mode
)
1881 if (illegal_addsub_di_memory_operand (x
, mode
))
1883 rtx addr
= XEXP (x
, 0);
1884 rtx temp
= gen_reg_rtx (Pmode
);
1886 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1887 if (GET_CODE (addr
) == CONST
&& flag_pic
)
1889 offset
= XEXP (XEXP (addr
, 0), 1);
1890 addr
= XEXP (XEXP (addr
, 0), 0);
1893 emit_move_insn (temp
, addr
);
1895 temp
= gen_rtx_PLUS (Pmode
, temp
, offset
);
1896 x
= gen_rtx_MEM (DImode
, temp
);
1902 vax_expand_addsub_di_operands (rtx
* operands
, enum rtx_code code
)
1904 int hi_only
= operand_subword (operands
[2], 0, 0, DImode
) == const0_rtx
;
1907 rtx (*gen_old_insn
)(rtx
, rtx
, rtx
);
1908 rtx (*gen_si_insn
)(rtx
, rtx
, rtx
);
1909 rtx (*gen_insn
)(rtx
, rtx
, rtx
);
1913 gen_old_insn
= gen_adddi3_old
;
1914 gen_si_insn
= gen_addsi3
;
1915 gen_insn
= gen_adcdi3
;
1917 else if (code
== MINUS
)
1919 gen_old_insn
= gen_subdi3_old
;
1920 gen_si_insn
= gen_subsi3
;
1921 gen_insn
= gen_sbcdi3
;
1926 /* If this is addition (thus operands are commutative) and if there is one
1927 addend that duplicates the desination, we want that addend to be the
1930 && rtx_equal_p (operands
[0], operands
[2])
1931 && !rtx_equal_p (operands
[1], operands
[2]))
1934 operands
[2] = operands
[1];
1940 emit_insn ((*gen_old_insn
) (operands
[0], operands
[1], operands
[2]));
1944 if (!rtx_equal_p (operands
[0], operands
[1])
1945 && (REG_P (operands
[0]) && MEM_P (operands
[1])))
1947 emit_move_insn (operands
[0], operands
[1]);
1948 operands
[1] = operands
[0];
1951 operands
[0] = fixup_mathdi_operand (operands
[0], DImode
);
1952 operands
[1] = fixup_mathdi_operand (operands
[1], DImode
);
1953 operands
[2] = fixup_mathdi_operand (operands
[2], DImode
);
1955 if (!rtx_equal_p (operands
[0], operands
[1]))
1956 emit_move_insn (operand_subword (operands
[0], 0, 0, DImode
),
1957 operand_subword (operands
[1], 0, 0, DImode
));
1959 emit_insn ((*gen_si_insn
) (operand_subword (operands
[0], 1, 0, DImode
),
1960 operand_subword (operands
[1], 1, 0, DImode
),
1961 operand_subword (operands
[2], 1, 0, DImode
)));
1965 /* If are adding the same value together, that's really a multiply by 2,
1966 and that's just a left shift of 1. */
1967 if (rtx_equal_p (operands
[1], operands
[2]))
1969 gcc_assert (code
!= MINUS
);
1970 emit_insn (gen_ashldi3 (operands
[0], operands
[1], const1_rtx
));
1974 operands
[0] = fixup_mathdi_operand (operands
[0], DImode
);
1976 /* If an operand is the same as operand[0], use the operand[0] rtx
1977 because fixup will an equivalent rtx but not an equal one. */
1979 if (rtx_equal_p (operands
[0], operands
[1]))
1980 operands
[1] = operands
[0];
1982 operands
[1] = fixup_mathdi_operand (operands
[1], DImode
);
1984 if (rtx_equal_p (operands
[0], operands
[2]))
1985 operands
[2] = operands
[0];
1987 operands
[2] = fixup_mathdi_operand (operands
[2], DImode
);
1989 /* If we are subtracting not from ourselves [d = a - b], and because the
1990 carry ops are two operand only, we would need to do a move prior to
1991 the subtract. And if d == b, we would need a temp otherwise
1992 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1993 into d = -b, d += a. Since -b can never overflow, even if b == d,
1996 If we are doing addition, since the carry ops are two operand, if
1997 we aren't adding to ourselves, move the first addend to the
1998 destination first. */
2000 gcc_assert (operands
[1] != const0_rtx
|| code
== MINUS
);
2001 if (!rtx_equal_p (operands
[0], operands
[1]) && operands
[1] != const0_rtx
)
2003 if (code
== MINUS
&& CONSTANT_P (operands
[1]))
2005 temp
= gen_reg_rtx (DImode
);
2006 emit_insn (gen_sbcdi3 (operands
[0], const0_rtx
, operands
[2]));
2008 gen_insn
= gen_adcdi3
;
2009 operands
[2] = operands
[1];
2010 operands
[1] = operands
[0];
2013 emit_move_insn (operands
[0], operands
[1]);
2016 /* Subtracting a constant will have been rewritten to an addition of the
2017 negative of that constant before we get here. */
2018 gcc_assert (!CONSTANT_P (operands
[2]) || code
== PLUS
);
2019 emit_insn ((*gen_insn
) (operands
[0], operands
[1], operands
[2]));
2024 adjacent_operands_p (rtx lo
, rtx hi
, machine_mode mode
)
2026 HOST_WIDE_INT lo_offset
;
2027 HOST_WIDE_INT hi_offset
;
2029 if (GET_CODE (lo
) != GET_CODE (hi
))
2033 return mode
== SImode
&& REGNO (lo
) + 1 == REGNO (hi
);
2034 if (CONST_INT_P (lo
))
2035 return INTVAL (hi
) == 0 && 0 <= INTVAL (lo
) && INTVAL (lo
) < 64;
2036 if (CONST_INT_P (lo
))
2037 return mode
!= SImode
;
2042 if (MEM_VOLATILE_P (lo
) || MEM_VOLATILE_P (hi
))
2048 if (GET_CODE (lo
) == POST_INC
/* || GET_CODE (lo) == PRE_DEC */)
2049 return rtx_equal_p (lo
, hi
);
2051 switch (GET_CODE (lo
))
2061 if (!CONST_INT_P (XEXP (lo
, 1)))
2063 lo_offset
= INTVAL (XEXP (lo
, 1));
2070 switch (GET_CODE (hi
))
2080 if (!CONST_INT_P (XEXP (hi
, 1)))
2082 hi_offset
= INTVAL (XEXP (hi
, 1));
2089 if (GET_CODE (lo
) == MULT
|| GET_CODE (lo
) == PLUS
)
2092 return rtx_equal_p (lo
, hi
)
2093 && hi_offset
- lo_offset
== GET_MODE_SIZE (mode
);
2096 /* Output assembler code for a block containing the constant parts
2097 of a trampoline, leaving space for the variable parts. */
2099 /* On the VAX, the trampoline contains an entry mask and two instructions:
2101 movl $STATIC,r0 (store the functions static chain)
2102 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2105 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED
)
2107 assemble_aligned_integer (2, const0_rtx
);
2108 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2109 assemble_aligned_integer (4, const0_rtx
);
2110 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM
));
2111 assemble_aligned_integer (2, GEN_INT (0x9f17));
2112 assemble_aligned_integer (4, const0_rtx
);
2115 /* We copy the register-mask from the function's pure code
2116 to the start of the trampoline. */
2119 vax_trampoline_init (rtx m_tramp
, tree fndecl
, rtx cxt
)
2121 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
2124 emit_block_move (m_tramp
, assemble_trampoline_template (),
2125 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
2127 mem
= adjust_address (m_tramp
, HImode
, 0);
2128 emit_move_insn (mem
, gen_const_mem (HImode
, fnaddr
));
2130 mem
= adjust_address (m_tramp
, SImode
, 4);
2131 emit_move_insn (mem
, cxt
);
2132 mem
= adjust_address (m_tramp
, SImode
, 11);
2133 emit_move_insn (mem
, plus_constant (Pmode
, fnaddr
, 2));
2134 emit_insn (gen_sync_istream ());
2137 /* Value is the number of bytes of arguments automatically
2138 popped when returning from a subroutine call.
2139 FUNDECL is the declaration node of the function (as a tree),
2140 FUNTYPE is the data type of the function (as a tree),
2141 or for a library call it is an identifier node for the subroutine name.
2142 SIZE is the number of bytes of arguments passed on the stack.
2144 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2147 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED
,
2148 tree funtype ATTRIBUTE_UNUSED
, int size
)
2150 return size
> 255 * 4 ? 0 : size
;
2153 /* Define where to put the arguments to a function.
2154 Value is zero to push the argument on the stack,
2155 or a hard register in which to store the argument.
2157 MODE is the argument's machine mode.
2158 TYPE is the data type of the argument (as a tree).
2159 This is null for libcalls where that information may
2161 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2162 the preceding args and about the function being called.
2163 NAMED is nonzero if this argument is a named parameter
2164 (otherwise it is an extra parameter matching an ellipsis). */
2166 /* On the VAX all args are pushed. */
2169 vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED
,
2170 machine_mode mode ATTRIBUTE_UNUSED
,
2171 const_tree type ATTRIBUTE_UNUSED
,
2172 bool named ATTRIBUTE_UNUSED
)
2177 /* Update the data in CUM to advance over an argument of mode MODE and
2178 data type TYPE. (TYPE is null for libcalls where that information
2179 may not be available.) */
2182 vax_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
2183 const_tree type
, bool named ATTRIBUTE_UNUSED
)
2185 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
2187 *cum
+= (mode
!= BLKmode
2188 ? (GET_MODE_SIZE (mode
) + 3) & ~3
2189 : (int_size_in_bytes (type
) + 3) & ~3);