Merge from mainline (163495:164578).
[official-gcc/graphite-test-results.git] / gcc / config / vax / vax.c
blob001a2269030ebd1d7c3eecbbbd30169c168f2bcb
1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
3 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "df.h"
28 #include "tree.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "function.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "recog.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "flags.h"
40 #include "debug.h"
41 #include "diagnostic-core.h"
42 #include "toplev.h"
43 #include "tm-preds.h"
44 #include "tm-constrs.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "target-def.h"
49 static void vax_option_override (void);
50 static bool vax_legitimate_address_p (enum machine_mode, rtx, bool);
51 static void vax_output_function_prologue (FILE *, HOST_WIDE_INT);
52 static void vax_file_start (void);
53 static void vax_init_libfuncs (void);
54 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
55 HOST_WIDE_INT, tree);
56 static int vax_address_cost_1 (rtx);
57 static int vax_address_cost (rtx, bool);
58 static bool vax_rtx_costs (rtx, int, int, int *, bool);
59 static rtx vax_struct_value_rtx (tree, int);
60 static rtx vax_builtin_setjmp_frame_value (void);
61 static void vax_asm_trampoline_template (FILE *);
62 static void vax_trampoline_init (rtx, tree, rtx);
63 static int vax_return_pops_args (tree, tree, int);
65 /* Initialize the GCC target structure. */
66 #undef TARGET_ASM_ALIGNED_HI_OP
67 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
69 #undef TARGET_ASM_FUNCTION_PROLOGUE
70 #define TARGET_ASM_FUNCTION_PROLOGUE vax_output_function_prologue
72 #undef TARGET_ASM_FILE_START
73 #define TARGET_ASM_FILE_START vax_file_start
74 #undef TARGET_ASM_FILE_START_APP_OFF
75 #define TARGET_ASM_FILE_START_APP_OFF true
77 #undef TARGET_INIT_LIBFUNCS
78 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
80 #undef TARGET_ASM_OUTPUT_MI_THUNK
81 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
82 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
83 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
85 #undef TARGET_DEFAULT_TARGET_FLAGS
86 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
88 #undef TARGET_RTX_COSTS
89 #define TARGET_RTX_COSTS vax_rtx_costs
90 #undef TARGET_ADDRESS_COST
91 #define TARGET_ADDRESS_COST vax_address_cost
93 #undef TARGET_PROMOTE_PROTOTYPES
94 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
96 #undef TARGET_STRUCT_VALUE_RTX
97 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
99 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
100 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
102 #undef TARGET_LEGITIMATE_ADDRESS_P
103 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
105 #undef TARGET_FRAME_POINTER_REQUIRED
106 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
108 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
109 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
110 #undef TARGET_TRAMPOLINE_INIT
111 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
112 #undef TARGET_RETURN_POPS_ARGS
113 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
115 #undef TARGET_OPTION_OVERRIDE
116 #define TARGET_OPTION_OVERRIDE vax_option_override
118 struct gcc_target targetm = TARGET_INITIALIZER;
120 /* Set global variables as needed for the options enabled. */
122 static void
123 vax_option_override (void)
125 /* We're VAX floating point, not IEEE floating point. */
126 if (TARGET_G_FLOAT)
127 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
129 #ifdef SUBTARGET_OVERRIDE_OPTIONS
130 SUBTARGET_OVERRIDE_OPTIONS;
131 #endif
134 /* Generate the assembly code for function entry. FILE is a stdio
135 stream to output the code to. SIZE is an int: how many units of
136 temporary storage to allocate.
138 Refer to the array `regs_ever_live' to determine which registers to
139 save; `regs_ever_live[I]' is nonzero if register number I is ever
140 used in the function. This function is responsible for knowing
141 which registers should not be saved even if used. */
143 static void
144 vax_output_function_prologue (FILE * file, HOST_WIDE_INT size)
146 int regno;
147 int mask = 0;
149 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
150 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
151 mask |= 1 << regno;
153 fprintf (file, "\t.word 0x%x\n", mask);
155 if (dwarf2out_do_frame ())
157 const char *label = dwarf2out_cfi_label (false);
158 int offset = 0;
160 for (regno = FIRST_PSEUDO_REGISTER-1; regno >= 0; --regno)
161 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
162 dwarf2out_reg_save (label, regno, offset -= 4);
164 dwarf2out_reg_save (label, PC_REGNUM, offset -= 4);
165 dwarf2out_reg_save (label, FRAME_POINTER_REGNUM, offset -= 4);
166 dwarf2out_reg_save (label, ARG_POINTER_REGNUM, offset -= 4);
167 dwarf2out_def_cfa (label, FRAME_POINTER_REGNUM, -(offset - 4));
170 size -= STARTING_FRAME_OFFSET;
171 if (size >= 64)
172 asm_fprintf (file, "\tmovab %wd(%Rsp),%Rsp\n", -size);
173 else if (size)
174 asm_fprintf (file, "\tsubl2 $%wd,%Rsp\n", size);
177 /* When debugging with stabs, we want to output an extra dummy label
178 so that gas can distinguish between D_float and G_float prior to
179 processing the .stabs directive identifying type double. */
180 static void
181 vax_file_start (void)
183 default_file_start ();
185 if (write_symbols == DBX_DEBUG)
186 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
189 /* We can use the BSD C library routines for the libgcc calls that are
190 still generated, since that's what they boil down to anyways. When
191 ELF, avoid the user's namespace. */
193 static void
194 vax_init_libfuncs (void)
196 if (TARGET_BSD_DIVMOD)
198 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
199 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
203 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
205 static void
206 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
207 rtx * low, int n)
209 int i;
211 for (i = 0; i < n; i++)
212 low[i] = 0;
214 for (i = 0; i < n; i++)
216 if (MEM_P (operands[i])
217 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
218 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
220 rtx addr = XEXP (operands[i], 0);
221 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
223 else if (optimize_size && MEM_P (operands[i])
224 && REG_P (XEXP (operands[i], 0))
225 && (code != MINUS || operands[1] != const0_rtx)
226 && find_regno_note (insn, REG_DEAD,
227 REGNO (XEXP (operands[i], 0))))
229 low[i] = gen_rtx_MEM (SImode,
230 gen_rtx_POST_INC (Pmode,
231 XEXP (operands[i], 0)));
232 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
234 else
236 low[i] = operand_subword (operands[i], 0, 0, DImode);
237 operands[i] = operand_subword (operands[i], 1, 0, DImode);
242 void
243 print_operand_address (FILE * file, rtx addr)
245 rtx orig = addr;
246 rtx reg1, breg, ireg;
247 rtx offset;
249 retry:
250 switch (GET_CODE (addr))
252 case MEM:
253 fprintf (file, "*");
254 addr = XEXP (addr, 0);
255 goto retry;
257 case REG:
258 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
259 break;
261 case PRE_DEC:
262 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
263 break;
265 case POST_INC:
266 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
267 break;
269 case PLUS:
270 /* There can be either two or three things added here. One must be a
271 REG. One can be either a REG or a MULT of a REG and an appropriate
272 constant, and the third can only be a constant or a MEM.
274 We get these two or three things and put the constant or MEM in
275 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
276 a register and can't tell yet if it is a base or index register,
277 put it into REG1. */
279 reg1 = 0; ireg = 0; breg = 0; offset = 0;
281 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
282 || MEM_P (XEXP (addr, 0)))
284 offset = XEXP (addr, 0);
285 addr = XEXP (addr, 1);
287 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
288 || MEM_P (XEXP (addr, 1)))
290 offset = XEXP (addr, 1);
291 addr = XEXP (addr, 0);
293 else if (GET_CODE (XEXP (addr, 1)) == MULT)
295 ireg = XEXP (addr, 1);
296 addr = XEXP (addr, 0);
298 else if (GET_CODE (XEXP (addr, 0)) == MULT)
300 ireg = XEXP (addr, 0);
301 addr = XEXP (addr, 1);
303 else if (REG_P (XEXP (addr, 1)))
305 reg1 = XEXP (addr, 1);
306 addr = XEXP (addr, 0);
308 else if (REG_P (XEXP (addr, 0)))
310 reg1 = XEXP (addr, 0);
311 addr = XEXP (addr, 1);
313 else
314 gcc_unreachable ();
316 if (REG_P (addr))
318 if (reg1)
319 ireg = addr;
320 else
321 reg1 = addr;
323 else if (GET_CODE (addr) == MULT)
324 ireg = addr;
325 else
327 gcc_assert (GET_CODE (addr) == PLUS);
328 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
329 || MEM_P (XEXP (addr, 0)))
331 if (offset)
333 if (CONST_INT_P (offset))
334 offset = plus_constant (XEXP (addr, 0), INTVAL (offset));
335 else
337 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
338 offset = plus_constant (offset, INTVAL (XEXP (addr, 0)));
341 offset = XEXP (addr, 0);
343 else if (REG_P (XEXP (addr, 0)))
345 if (reg1)
346 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
347 else
348 reg1 = XEXP (addr, 0);
350 else
352 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
353 gcc_assert (!ireg);
354 ireg = XEXP (addr, 0);
357 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
358 || MEM_P (XEXP (addr, 1)))
360 if (offset)
362 if (CONST_INT_P (offset))
363 offset = plus_constant (XEXP (addr, 1), INTVAL (offset));
364 else
366 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
367 offset = plus_constant (offset, INTVAL (XEXP (addr, 1)));
370 offset = XEXP (addr, 1);
372 else if (REG_P (XEXP (addr, 1)))
374 if (reg1)
375 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
376 else
377 reg1 = XEXP (addr, 1);
379 else
381 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
382 gcc_assert (!ireg);
383 ireg = XEXP (addr, 1);
387 /* If REG1 is nonzero, figure out if it is a base or index register. */
388 if (reg1)
390 if (breg
391 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
392 || (offset
393 && (MEM_P (offset)
394 || (flag_pic && symbolic_operand (offset, SImode)))))
396 gcc_assert (!ireg);
397 ireg = reg1;
399 else
400 breg = reg1;
403 if (offset != 0)
405 if (flag_pic && symbolic_operand (offset, SImode))
407 if (breg && ireg)
409 debug_rtx (orig);
410 output_operand_lossage ("symbol used with both base and indexed registers");
413 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
414 if (flag_pic > 1 && GET_CODE (offset) == CONST
415 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
416 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
418 debug_rtx (orig);
419 output_operand_lossage ("symbol with offset used in PIC mode");
421 #endif
423 /* symbol(reg) isn't PIC, but symbol[reg] is. */
424 if (breg)
426 ireg = breg;
427 breg = 0;
432 output_address (offset);
435 if (breg != 0)
436 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
438 if (ireg != 0)
440 if (GET_CODE (ireg) == MULT)
441 ireg = XEXP (ireg, 0);
442 gcc_assert (REG_P (ireg));
443 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
445 break;
447 default:
448 output_addr_const (file, addr);
452 void
453 print_operand (FILE *file, rtx x, int code)
455 if (code == '#')
456 fputc (ASM_DOUBLE_CHAR, file);
457 else if (code == '|')
458 fputs (REGISTER_PREFIX, file);
459 else if (code == 'c')
460 fputs (cond_name (x), file);
461 else if (code == 'C')
462 fputs (rev_cond_name (x), file);
463 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
464 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
465 else if (code == 'P' && CONST_INT_P (x))
466 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
467 else if (code == 'N' && CONST_INT_P (x))
468 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
469 /* rotl instruction cannot deal with negative arguments. */
470 else if (code == 'R' && CONST_INT_P (x))
471 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
472 else if (code == 'H' && CONST_INT_P (x))
473 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
474 else if (code == 'h' && CONST_INT_P (x))
475 fprintf (file, "$%d", (short) - INTVAL (x));
476 else if (code == 'B' && CONST_INT_P (x))
477 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
478 else if (code == 'b' && CONST_INT_P (x))
479 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
480 else if (code == 'M' && CONST_INT_P (x))
481 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
482 else if (REG_P (x))
483 fprintf (file, "%s", reg_names[REGNO (x)]);
484 else if (MEM_P (x))
485 output_address (XEXP (x, 0));
486 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
488 char dstr[30];
489 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
490 sizeof (dstr), 0, 1);
491 fprintf (file, "$0f%s", dstr);
493 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
495 char dstr[30];
496 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
497 sizeof (dstr), 0, 1);
498 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
500 else
502 if (flag_pic > 1 && symbolic_operand (x, SImode))
504 debug_rtx (x);
505 output_operand_lossage ("symbol used as immediate operand");
507 putc ('$', file);
508 output_addr_const (file, x);
512 const char *
513 cond_name (rtx op)
515 switch (GET_CODE (op))
517 case NE:
518 return "neq";
519 case EQ:
520 return "eql";
521 case GE:
522 return "geq";
523 case GT:
524 return "gtr";
525 case LE:
526 return "leq";
527 case LT:
528 return "lss";
529 case GEU:
530 return "gequ";
531 case GTU:
532 return "gtru";
533 case LEU:
534 return "lequ";
535 case LTU:
536 return "lssu";
538 default:
539 gcc_unreachable ();
543 const char *
544 rev_cond_name (rtx op)
546 switch (GET_CODE (op))
548 case EQ:
549 return "neq";
550 case NE:
551 return "eql";
552 case LT:
553 return "geq";
554 case LE:
555 return "gtr";
556 case GT:
557 return "leq";
558 case GE:
559 return "lss";
560 case LTU:
561 return "gequ";
562 case LEU:
563 return "gtru";
564 case GTU:
565 return "lequ";
566 case GEU:
567 return "lssu";
569 default:
570 gcc_unreachable ();
574 static bool
575 vax_float_literal (rtx c)
577 enum machine_mode mode;
578 REAL_VALUE_TYPE r, s;
579 int i;
581 if (GET_CODE (c) != CONST_DOUBLE)
582 return false;
584 mode = GET_MODE (c);
586 if (c == const_tiny_rtx[(int) mode][0]
587 || c == const_tiny_rtx[(int) mode][1]
588 || c == const_tiny_rtx[(int) mode][2])
589 return true;
591 REAL_VALUE_FROM_CONST_DOUBLE (r, c);
593 for (i = 0; i < 7; i++)
595 int x = 1 << i;
596 bool ok;
597 REAL_VALUE_FROM_INT (s, x, 0, mode);
599 if (REAL_VALUES_EQUAL (r, s))
600 return true;
601 ok = exact_real_inverse (mode, &s);
602 gcc_assert (ok);
603 if (REAL_VALUES_EQUAL (r, s))
604 return true;
606 return false;
610 /* Return the cost in cycles of a memory address, relative to register
611 indirect.
613 Each of the following adds the indicated number of cycles:
615 1 - symbolic address
616 1 - pre-decrement
617 1 - indexing and/or offset(register)
618 2 - indirect */
621 static int
622 vax_address_cost_1 (rtx addr)
624 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
625 rtx plus_op0 = 0, plus_op1 = 0;
626 restart:
627 switch (GET_CODE (addr))
629 case PRE_DEC:
630 predec = 1;
631 case REG:
632 case SUBREG:
633 case POST_INC:
634 reg = 1;
635 break;
636 case MULT:
637 indexed = 1; /* 2 on VAX 2 */
638 break;
639 case CONST_INT:
640 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
641 if (offset == 0)
642 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
643 break;
644 case CONST:
645 case SYMBOL_REF:
646 offset = 1; /* 2 on VAX 2 */
647 break;
648 case LABEL_REF: /* this is probably a byte offset from the pc */
649 if (offset == 0)
650 offset = 1;
651 break;
652 case PLUS:
653 if (plus_op0)
654 plus_op1 = XEXP (addr, 0);
655 else
656 plus_op0 = XEXP (addr, 0);
657 addr = XEXP (addr, 1);
658 goto restart;
659 case MEM:
660 indir = 2; /* 3 on VAX 2 */
661 addr = XEXP (addr, 0);
662 goto restart;
663 default:
664 break;
667 /* Up to 3 things can be added in an address. They are stored in
668 plus_op0, plus_op1, and addr. */
670 if (plus_op0)
672 addr = plus_op0;
673 plus_op0 = 0;
674 goto restart;
676 if (plus_op1)
678 addr = plus_op1;
679 plus_op1 = 0;
680 goto restart;
682 /* Indexing and register+offset can both be used (except on a VAX 2)
683 without increasing execution time over either one alone. */
684 if (reg && indexed && offset)
685 return reg + indir + offset + predec;
686 return reg + indexed + indir + offset + predec;
689 static int
690 vax_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
692 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
695 /* Cost of an expression on a VAX. This version has costs tuned for the
696 CVAX chip (found in the VAX 3 series) with comments for variations on
697 other models.
699 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
700 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
701 costs on a per cpu basis. */
703 static bool
704 vax_rtx_costs (rtx x, int code, int outer_code, int *total,
705 bool speed ATTRIBUTE_UNUSED)
707 enum machine_mode mode = GET_MODE (x);
708 int i = 0; /* may be modified in switch */
709 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
711 switch (code)
713 /* On a VAX, constants from 0..63 are cheap because they can use the
714 1 byte literal constant format. Compare to -1 should be made cheap
715 so that decrement-and-branch insns can be formed more easily (if
716 the value -1 is copied to a register some decrement-and-branch
717 patterns will not match). */
718 case CONST_INT:
719 if (INTVAL (x) == 0)
721 *total = 0;
722 return true;
724 if (outer_code == AND)
726 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
727 return true;
729 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
730 || (outer_code == COMPARE
731 && INTVAL (x) == -1)
732 || ((outer_code == PLUS || outer_code == MINUS)
733 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
735 *total = 1;
736 return true;
738 /* FALLTHRU */
740 case CONST:
741 case LABEL_REF:
742 case SYMBOL_REF:
743 *total = 3;
744 return true;
746 case CONST_DOUBLE:
747 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
748 *total = vax_float_literal (x) ? 5 : 8;
749 else
750 *total = ((CONST_DOUBLE_HIGH (x) == 0
751 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
752 || (outer_code == PLUS
753 && CONST_DOUBLE_HIGH (x) == -1
754 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
755 ? 2 : 5;
756 return true;
758 case POST_INC:
759 *total = 2;
760 return true; /* Implies register operand. */
762 case PRE_DEC:
763 *total = 3;
764 return true; /* Implies register operand. */
766 case MULT:
767 switch (mode)
769 case DFmode:
770 *total = 16; /* 4 on VAX 9000 */
771 break;
772 case SFmode:
773 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
774 break;
775 case DImode:
776 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
777 break;
778 case SImode:
779 case HImode:
780 case QImode:
781 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
782 break;
783 default:
784 *total = MAX_COST; /* Mode is not supported. */
785 return true;
787 break;
789 case UDIV:
790 if (mode != SImode)
792 *total = MAX_COST; /* Mode is not supported. */
793 return true;
795 *total = 17;
796 break;
798 case DIV:
799 if (mode == DImode)
800 *total = 30; /* Highly variable. */
801 else if (mode == DFmode)
802 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
803 *total = 24;
804 else
805 *total = 11; /* 25 on VAX 2 */
806 break;
808 case MOD:
809 *total = 23;
810 break;
812 case UMOD:
813 if (mode != SImode)
815 *total = MAX_COST; /* Mode is not supported. */
816 return true;
818 *total = 29;
819 break;
821 case FLOAT:
822 *total = (6 /* 4 on VAX 9000 */
823 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
824 break;
826 case FIX:
827 *total = 7; /* 17 on VAX 2 */
828 break;
830 case ASHIFT:
831 case LSHIFTRT:
832 case ASHIFTRT:
833 if (mode == DImode)
834 *total = 12;
835 else
836 *total = 10; /* 6 on VAX 9000 */
837 break;
839 case ROTATE:
840 case ROTATERT:
841 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
842 if (CONST_INT_P (XEXP (x, 1)))
843 fmt = "e"; /* all constant rotate counts are short */
844 break;
846 case PLUS:
847 case MINUS:
848 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
849 /* Small integer operands can use subl2 and addl2. */
850 if ((CONST_INT_P (XEXP (x, 1)))
851 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
852 fmt = "e";
853 break;
855 case IOR:
856 case XOR:
857 *total = 3;
858 break;
860 case AND:
861 /* AND is special because the first operand is complemented. */
862 *total = 3;
863 if (CONST_INT_P (XEXP (x, 0)))
865 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
866 *total = 4;
867 fmt = "e";
868 i = 1;
870 break;
872 case NEG:
873 if (mode == DFmode)
874 *total = 9;
875 else if (mode == SFmode)
876 *total = 6;
877 else if (mode == DImode)
878 *total = 4;
879 else
880 *total = 2;
881 break;
883 case NOT:
884 *total = 2;
885 break;
887 case ZERO_EXTRACT:
888 case SIGN_EXTRACT:
889 *total = 15;
890 break;
892 case MEM:
893 if (mode == DImode || mode == DFmode)
894 *total = 5; /* 7 on VAX 2 */
895 else
896 *total = 3; /* 4 on VAX 2 */
897 x = XEXP (x, 0);
898 if (!REG_P (x) && GET_CODE (x) != POST_INC)
899 *total += vax_address_cost_1 (x);
900 return true;
902 case FLOAT_EXTEND:
903 case FLOAT_TRUNCATE:
904 case TRUNCATE:
905 *total = 3; /* FIXME: Costs need to be checked */
906 break;
908 default:
909 return false;
912 /* Now look inside the expression. Operands which are not registers or
913 short constants add to the cost.
915 FMT and I may have been adjusted in the switch above for instructions
916 which require special handling. */
918 while (*fmt++ == 'e')
920 rtx op = XEXP (x, i);
922 i += 1;
923 code = GET_CODE (op);
925 /* A NOT is likely to be found as the first operand of an AND
926 (in which case the relevant cost is of the operand inside
927 the not) and not likely to be found anywhere else. */
928 if (code == NOT)
929 op = XEXP (op, 0), code = GET_CODE (op);
931 switch (code)
933 case CONST_INT:
934 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
935 && GET_MODE (x) != QImode)
936 *total += 1; /* 2 on VAX 2 */
937 break;
938 case CONST:
939 case LABEL_REF:
940 case SYMBOL_REF:
941 *total += 1; /* 2 on VAX 2 */
942 break;
943 case CONST_DOUBLE:
944 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
946 /* Registers are faster than floating point constants -- even
947 those constants which can be encoded in a single byte. */
948 if (vax_float_literal (op))
949 *total += 1;
950 else
951 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
953 else
955 if (CONST_DOUBLE_HIGH (op) != 0
956 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
957 *total += 2;
959 break;
960 case MEM:
961 *total += 1; /* 2 on VAX 2 */
962 if (!REG_P (XEXP (op, 0)))
963 *total += vax_address_cost_1 (XEXP (op, 0));
964 break;
965 case REG:
966 case SUBREG:
967 break;
968 default:
969 *total += 1;
970 break;
973 return true;
976 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
977 Used for C++ multiple inheritance.
978 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
979 addl2 $DELTA, 4(ap) #adjust first argument
980 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
983 static void
984 vax_output_mi_thunk (FILE * file,
985 tree thunk ATTRIBUTE_UNUSED,
986 HOST_WIDE_INT delta,
987 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
988 tree function)
990 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
991 asm_fprintf (file, ",4(%Rap)\n");
992 fprintf (file, "\tjmp ");
993 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
994 fprintf (file, "+2\n");
997 static rtx
998 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
999 int incoming ATTRIBUTE_UNUSED)
1001 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1004 static rtx
1005 vax_builtin_setjmp_frame_value (void)
1007 return hard_frame_pointer_rtx;
1010 /* Worker function for NOTICE_UPDATE_CC. */
1012 void
1013 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1015 if (GET_CODE (exp) == SET)
1017 if (GET_CODE (SET_SRC (exp)) == CALL)
1018 CC_STATUS_INIT;
1019 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1020 && GET_CODE (SET_DEST (exp)) != PC)
1022 cc_status.flags = 0;
1023 /* The integer operations below don't set carry or
1024 set it in an incompatible way. That's ok though
1025 as the Z bit is all we need when doing unsigned
1026 comparisons on the result of these insns (since
1027 they're always with 0). Set CC_NO_OVERFLOW to
1028 generate the correct unsigned branches. */
1029 switch (GET_CODE (SET_SRC (exp)))
1031 case NEG:
1032 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1033 break;
1034 case AND:
1035 case IOR:
1036 case XOR:
1037 case NOT:
1038 case MEM:
1039 case REG:
1040 cc_status.flags = CC_NO_OVERFLOW;
1041 break;
1042 default:
1043 break;
1045 cc_status.value1 = SET_DEST (exp);
1046 cc_status.value2 = SET_SRC (exp);
1049 else if (GET_CODE (exp) == PARALLEL
1050 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1052 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1053 CC_STATUS_INIT;
1054 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1056 cc_status.flags = 0;
1057 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1058 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1060 else
1061 /* PARALLELs whose first element sets the PC are aob,
1062 sob insns. They do change the cc's. */
1063 CC_STATUS_INIT;
1065 else
1066 CC_STATUS_INIT;
1067 if (cc_status.value1 && REG_P (cc_status.value1)
1068 && cc_status.value2
1069 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1070 cc_status.value2 = 0;
1071 if (cc_status.value1 && MEM_P (cc_status.value1)
1072 && cc_status.value2
1073 && MEM_P (cc_status.value2))
1074 cc_status.value2 = 0;
1075 /* Actual condition, one line up, should be that value2's address
1076 depends on value1, but that is too much of a pain. */
1079 /* Output integer move instructions. */
1081 const char *
1082 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1083 enum machine_mode mode)
1085 rtx hi[3], lo[3];
1086 const char *pattern_hi, *pattern_lo;
1088 switch (mode)
1090 case DImode:
1091 if (operands[1] == const0_rtx)
1092 return "clrq %0";
1093 if (TARGET_QMATH && optimize_size
1094 && (CONST_INT_P (operands[1])
1095 || GET_CODE (operands[1]) == CONST_DOUBLE))
1097 unsigned HOST_WIDE_INT hval, lval;
1098 int n;
1100 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1102 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1104 /* Make sure only the low 32 bits are valid. */
1105 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1106 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1108 else
1110 lval = INTVAL (operands[1]);
1111 hval = 0;
1114 /* Here we see if we are trying to see if the 64bit value is really
1115 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1116 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1117 8 bytes - 1 shift byte - 1 short literal byte. */
1118 if (lval != 0
1119 && (n = exact_log2 (lval & (- lval))) != -1
1120 && (lval >> n) < 64)
1122 lval >>= n;
1124 #if HOST_BITS_PER_WIDE_INT == 32
1125 /* On 32bit platforms, if the 6bits didn't overflow into the
1126 upper 32bit value that value better be 0. If we have
1127 overflowed, make sure it wasn't too much. */
1128 if (hval != 0)
1130 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1131 n = 0; /* failure */
1132 else
1133 lval |= hval << (32 - n);
1135 #endif
1136 /* If n is 0, then ashq is not the best way to emit this. */
1137 if (n > 0)
1139 operands[1] = GEN_INT (lval);
1140 operands[2] = GEN_INT (n);
1141 return "ashq %2,%1,%0";
1143 #if HOST_BITS_PER_WIDE_INT == 32
1145 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1146 upper 32bit value. */
1147 else if (hval != 0
1148 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1149 && (hval >> n) < 64)
1151 operands[1] = GEN_INT (hval >> n);
1152 operands[2] = GEN_INT (n + 32);
1153 return "ashq %2,%1,%0";
1154 #endif
1158 if (TARGET_QMATH
1159 && (!MEM_P (operands[0])
1160 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1161 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1162 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1163 && ((CONST_INT_P (operands[1])
1164 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1165 || GET_CODE (operands[1]) == CONST_DOUBLE))
1167 hi[0] = operands[0];
1168 hi[1] = operands[1];
1170 split_quadword_operands (insn, SET, hi, lo, 2);
1172 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1173 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1175 /* The patterns are just movl/movl or pushl/pushl then a movq will
1176 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1177 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1178 value bytes. */
1179 if ((!strncmp (pattern_lo, "movl", 4)
1180 && !strncmp (pattern_hi, "movl", 4))
1181 || (!strncmp (pattern_lo, "pushl", 5)
1182 && !strncmp (pattern_hi, "pushl", 5)))
1183 return "movq %1,%0";
1185 if (MEM_P (operands[0])
1186 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1188 output_asm_insn (pattern_hi, hi);
1189 operands[0] = lo[0];
1190 operands[1] = lo[1];
1191 operands[2] = lo[2];
1192 return pattern_lo;
1194 else
1196 output_asm_insn (pattern_lo, lo);
1197 operands[0] = hi[0];
1198 operands[1] = hi[1];
1199 operands[2] = hi[2];
1200 return pattern_hi;
1203 return "movq %1,%0";
1205 case SImode:
1206 if (symbolic_operand (operands[1], SImode))
1208 if (push_operand (operands[0], SImode))
1209 return "pushab %a1";
1210 return "movab %a1,%0";
1213 if (operands[1] == const0_rtx)
1215 if (push_operand (operands[1], SImode))
1216 return "pushl %1";
1217 return "clrl %0";
1220 if (CONST_INT_P (operands[1])
1221 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1223 HOST_WIDE_INT i = INTVAL (operands[1]);
1224 int n;
1225 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1226 return "mcoml %N1,%0";
1227 if ((unsigned HOST_WIDE_INT)i < 0x100)
1228 return "movzbl %1,%0";
1229 if (i >= -0x80 && i < 0)
1230 return "cvtbl %1,%0";
1231 if (optimize_size
1232 && (n = exact_log2 (i & (-i))) != -1
1233 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1235 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1236 operands[2] = GEN_INT (n);
1237 return "ashl %2,%1,%0";
1239 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1240 return "movzwl %1,%0";
1241 if (i >= -0x8000 && i < 0)
1242 return "cvtwl %1,%0";
1244 if (push_operand (operands[0], SImode))
1245 return "pushl %1";
1246 return "movl %1,%0";
1248 case HImode:
1249 if (CONST_INT_P (operands[1]))
1251 HOST_WIDE_INT i = INTVAL (operands[1]);
1252 if (i == 0)
1253 return "clrw %0";
1254 else if ((unsigned HOST_WIDE_INT)i < 64)
1255 return "movw %1,%0";
1256 else if ((unsigned HOST_WIDE_INT)~i < 64)
1257 return "mcomw %H1,%0";
1258 else if ((unsigned HOST_WIDE_INT)i < 256)
1259 return "movzbw %1,%0";
1260 else if (i >= -0x80 && i < 0)
1261 return "cvtbw %1,%0";
1263 return "movw %1,%0";
1265 case QImode:
1266 if (CONST_INT_P (operands[1]))
1268 HOST_WIDE_INT i = INTVAL (operands[1]);
1269 if (i == 0)
1270 return "clrb %0";
1271 else if ((unsigned HOST_WIDE_INT)~i < 64)
1272 return "mcomb %B1,%0";
1274 return "movb %1,%0";
1276 default:
1277 gcc_unreachable ();
1281 /* Output integer add instructions.
1283 The space-time-opcode tradeoffs for addition vary by model of VAX.
1285 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1286 but it not faster on other models.
1288 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1289 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1290 a register is used in an address too soon after it is set.
1291 Compromise by using movab only when it is shorter than the add
1292 or the base register in the address is one of sp, ap, and fp,
1293 which are not modified very often. */
1295 const char *
1296 vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
1298 switch (mode)
1300 case DImode:
1302 rtx low[3];
1303 const char *pattern;
1304 int carry = 1;
1305 bool sub;
1307 if (TARGET_QMATH && 0)
1308 debug_rtx (insn);
1310 split_quadword_operands (insn, PLUS, operands, low, 3);
1312 if (TARGET_QMATH)
1314 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1315 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1316 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1317 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1318 #endif
1320 /* No reason to add a 0 to the low part and thus no carry, so just
1321 emit the appropriate add/sub instruction. */
1322 if (low[2] == const0_rtx)
1323 return vax_output_int_add (NULL, operands, SImode);
1325 /* Are we doing addition or subtraction? */
1326 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1328 /* We can't use vax_output_int_add since some the patterns don't
1329 modify the carry bit. */
1330 if (sub)
1332 if (low[2] == constm1_rtx)
1333 pattern = "decl %0";
1334 else
1335 pattern = "subl2 $%n2,%0";
1337 else
1339 if (low[2] == const1_rtx)
1340 pattern = "incl %0";
1341 else
1342 pattern = "addl2 %2,%0";
1344 output_asm_insn (pattern, low);
1346 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1347 two 32bit parts, we complement each and then add one to
1348 low part. We know that the low part can't overflow since
1349 it's value can never be 0. */
1350 if (sub)
1351 return "sbwc %N2,%0";
1352 return "adwc %2,%0";
1355 /* Add low parts. */
1356 if (rtx_equal_p (operands[0], operands[1]))
1358 if (low[2] == const0_rtx)
1359 /* Should examine operand, punt if not POST_INC. */
1360 pattern = "tstl %0", carry = 0;
1361 else if (low[2] == const1_rtx)
1362 pattern = "incl %0";
1363 else
1364 pattern = "addl2 %2,%0";
1366 else
1368 if (low[2] == const0_rtx)
1369 pattern = "movl %1,%0", carry = 0;
1370 else
1371 pattern = "addl3 %2,%1,%0";
1373 if (pattern)
1374 output_asm_insn (pattern, low);
1375 if (!carry)
1376 /* If CARRY is 0, we don't have any carry value to worry about. */
1377 return get_insn_template (CODE_FOR_addsi3, insn);
1378 /* %0 = C + %1 + %2 */
1379 if (!rtx_equal_p (operands[0], operands[1]))
1380 output_asm_insn ((operands[1] == const0_rtx
1381 ? "clrl %0"
1382 : "movl %1,%0"), operands);
1383 return "adwc %2,%0";
1386 case SImode:
1387 if (rtx_equal_p (operands[0], operands[1]))
1389 if (operands[2] == const1_rtx)
1390 return "incl %0";
1391 if (operands[2] == constm1_rtx)
1392 return "decl %0";
1393 if (CONST_INT_P (operands[2])
1394 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1395 return "subl2 $%n2,%0";
1396 if (CONST_INT_P (operands[2])
1397 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1398 && REG_P (operands[1])
1399 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1400 || REGNO (operands[1]) > 11))
1401 return "movab %c2(%1),%0";
1402 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1403 return "movab %a2[%0],%0";
1404 return "addl2 %2,%0";
1407 if (rtx_equal_p (operands[0], operands[2]))
1409 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1410 return "movab %a1[%0],%0";
1411 return "addl2 %1,%0";
1414 if (CONST_INT_P (operands[2])
1415 && INTVAL (operands[2]) < 32767
1416 && INTVAL (operands[2]) > -32768
1417 && REG_P (operands[1])
1418 && push_operand (operands[0], SImode))
1419 return "pushab %c2(%1)";
1421 if (CONST_INT_P (operands[2])
1422 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1423 return "subl3 $%n2,%1,%0";
1425 if (CONST_INT_P (operands[2])
1426 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1427 && REG_P (operands[1])
1428 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1429 || REGNO (operands[1]) > 11))
1430 return "movab %c2(%1),%0";
1432 /* Add this if using gcc on a VAX 3xxx:
1433 if (REG_P (operands[1]) && REG_P (operands[2]))
1434 return "movab (%1)[%2],%0";
1437 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1439 if (push_operand (operands[0], SImode))
1440 return "pushab %a2[%1]";
1441 return "movab %a2[%1],%0";
1444 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1446 if (push_operand (operands[0], SImode))
1447 return "pushab %a1[%2]";
1448 return "movab %a1[%2],%0";
1451 if (flag_pic && REG_P (operands[0])
1452 && symbolic_operand (operands[2], SImode))
1453 return "movab %a2,%0;addl2 %1,%0";
1455 if (flag_pic
1456 && (symbolic_operand (operands[1], SImode)
1457 || symbolic_operand (operands[1], SImode)))
1458 debug_rtx (insn);
1460 return "addl3 %1,%2,%0";
1462 case HImode:
1463 if (rtx_equal_p (operands[0], operands[1]))
1465 if (operands[2] == const1_rtx)
1466 return "incw %0";
1467 if (operands[2] == constm1_rtx)
1468 return "decw %0";
1469 if (CONST_INT_P (operands[2])
1470 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1471 return "subw2 $%n2,%0";
1472 return "addw2 %2,%0";
1474 if (rtx_equal_p (operands[0], operands[2]))
1475 return "addw2 %1,%0";
1476 if (CONST_INT_P (operands[2])
1477 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1478 return "subw3 $%n2,%1,%0";
1479 return "addw3 %1,%2,%0";
1481 case QImode:
1482 if (rtx_equal_p (operands[0], operands[1]))
1484 if (operands[2] == const1_rtx)
1485 return "incb %0";
1486 if (operands[2] == constm1_rtx)
1487 return "decb %0";
1488 if (CONST_INT_P (operands[2])
1489 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1490 return "subb2 $%n2,%0";
1491 return "addb2 %2,%0";
1493 if (rtx_equal_p (operands[0], operands[2]))
1494 return "addb2 %1,%0";
1495 if (CONST_INT_P (operands[2])
1496 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1497 return "subb3 $%n2,%1,%0";
1498 return "addb3 %1,%2,%0";
1500 default:
1501 gcc_unreachable ();
1505 const char *
1506 vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
1508 switch (mode)
1510 case DImode:
1512 rtx low[3];
1513 const char *pattern;
1514 int carry = 1;
1516 if (TARGET_QMATH && 0)
1517 debug_rtx (insn);
1519 split_quadword_operands (insn, MINUS, operands, low, 3);
1521 if (TARGET_QMATH)
1523 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1525 /* Negation is tricky. It's basically complement and increment.
1526 Negate hi, then lo, and subtract the carry back. */
1527 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1528 || (MEM_P (operands[0])
1529 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1530 fatal_insn ("illegal operand detected", insn);
1531 output_asm_insn ("mnegl %2,%0", operands);
1532 output_asm_insn ("mnegl %2,%0", low);
1533 return "sbwc $0,%0";
1535 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1536 gcc_assert (rtx_equal_p (low[0], low[1]));
1537 if (low[2] == const1_rtx)
1538 output_asm_insn ("decl %0", low);
1539 else
1540 output_asm_insn ("subl2 %2,%0", low);
1541 return "sbwc %2,%0";
1544 /* Subtract low parts. */
1545 if (rtx_equal_p (operands[0], operands[1]))
1547 if (low[2] == const0_rtx)
1548 pattern = 0, carry = 0;
1549 else if (low[2] == constm1_rtx)
1550 pattern = "decl %0";
1551 else
1552 pattern = "subl2 %2,%0";
1554 else
1556 if (low[2] == constm1_rtx)
1557 pattern = "decl %0";
1558 else if (low[2] == const0_rtx)
1559 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1560 else
1561 pattern = "subl3 %2,%1,%0";
1563 if (pattern)
1564 output_asm_insn (pattern, low);
1565 if (carry)
1567 if (!rtx_equal_p (operands[0], operands[1]))
1568 return "movl %1,%0;sbwc %2,%0";
1569 return "sbwc %2,%0";
1570 /* %0 = %2 - %1 - C */
1572 return get_insn_template (CODE_FOR_subsi3, insn);
1575 default:
1576 gcc_unreachable ();
1580 /* True if X is an rtx for a constant that is a valid address. */
1582 bool
1583 legitimate_constant_address_p (rtx x)
1585 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1586 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1587 return true;
1588 if (GET_CODE (x) != CONST)
1589 return false;
1590 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1591 if (flag_pic
1592 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1593 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1594 return false;
1595 #endif
1596 return true;
1599 /* True if the constant value X is a legitimate general operand.
1600 It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1602 bool
1603 legitimate_constant_p (rtx x ATTRIBUTE_UNUSED)
1605 return true;
1608 /* The other macros defined here are used only in legitimate_address_p (). */
1610 /* Nonzero if X is a hard reg that can be used as an index
1611 or, if not strict, if it is a pseudo reg. */
1612 #define INDEX_REGISTER_P(X, STRICT) \
1613 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1615 /* Nonzero if X is a hard reg that can be used as a base reg
1616 or, if not strict, if it is a pseudo reg. */
1617 #define BASE_REGISTER_P(X, STRICT) \
1618 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1620 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1622 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1623 are no SYMBOL_REFs for external symbols present. */
1625 static bool
1626 indirectable_constant_address_p (rtx x, bool indirect)
1628 if (GET_CODE (x) == SYMBOL_REF)
1629 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1631 if (GET_CODE (x) == CONST)
1632 return !flag_pic
1633 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1634 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1636 return CONSTANT_ADDRESS_P (x);
1639 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1641 static bool
1642 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1644 return CONSTANT_ADDRESS_P (x);
1647 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1649 /* True if X is an address which can be indirected. External symbols
1650 could be in a sharable image library, so we disallow those. */
1652 static bool
1653 indirectable_address_p (rtx x, bool strict, bool indirect)
1655 if (indirectable_constant_address_p (x, indirect)
1656 || BASE_REGISTER_P (x, strict))
1657 return true;
1658 if (GET_CODE (x) != PLUS
1659 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1660 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1661 return false;
1662 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1665 /* Return true if x is a valid address not using indexing.
1666 (This much is the easy part.) */
1667 static bool
1668 nonindexed_address_p (rtx x, bool strict)
1670 rtx xfoo0;
1671 if (REG_P (x))
1673 extern rtx *reg_equiv_mem;
1674 if (! reload_in_progress
1675 || reg_equiv_mem[REGNO (x)] == 0
1676 || indirectable_address_p (reg_equiv_mem[REGNO (x)], strict, false))
1677 return true;
1679 if (indirectable_constant_address_p (x, false))
1680 return true;
1681 if (indirectable_address_p (x, strict, false))
1682 return true;
1683 xfoo0 = XEXP (x, 0);
1684 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1685 return true;
1686 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1687 && BASE_REGISTER_P (xfoo0, strict))
1688 return true;
1689 return false;
1692 /* True if PROD is either a reg times size of mode MODE and MODE is less
1693 than or equal 8 bytes, or just a reg if MODE is one byte. */
1695 static bool
1696 index_term_p (rtx prod, enum machine_mode mode, bool strict)
1698 rtx xfoo0, xfoo1;
1700 if (GET_MODE_SIZE (mode) == 1)
1701 return BASE_REGISTER_P (prod, strict);
1703 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1704 return false;
1706 xfoo0 = XEXP (prod, 0);
1707 xfoo1 = XEXP (prod, 1);
1709 if (CONST_INT_P (xfoo0)
1710 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1711 && INDEX_REGISTER_P (xfoo1, strict))
1712 return true;
1714 if (CONST_INT_P (xfoo1)
1715 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1716 && INDEX_REGISTER_P (xfoo0, strict))
1717 return true;
1719 return false;
1722 /* Return true if X is the sum of a register
1723 and a valid index term for mode MODE. */
1724 static bool
1725 reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
1727 rtx xfoo0, xfoo1;
1729 if (GET_CODE (x) != PLUS)
1730 return false;
1732 xfoo0 = XEXP (x, 0);
1733 xfoo1 = XEXP (x, 1);
1735 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1736 return true;
1738 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1739 return true;
1741 return false;
1744 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1745 static bool
1746 indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
1748 if (!CONSTANT_ADDRESS_P (xfoo0))
1749 return false;
1750 if (BASE_REGISTER_P (xfoo1, strict))
1751 return !flag_pic || mode == QImode;
1752 if (flag_pic && symbolic_operand (xfoo0, SImode))
1753 return false;
1754 return reg_plus_index_p (xfoo1, mode, strict);
1757 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1758 that is a valid memory address for an instruction.
1759 The MODE argument is the machine mode for the MEM expression
1760 that wants to use this address. */
1761 bool
1762 vax_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1764 rtx xfoo0, xfoo1;
1766 if (nonindexed_address_p (x, strict))
1767 return true;
1769 if (GET_CODE (x) != PLUS)
1770 return false;
1772 /* Handle <address>[index] represented with index-sum outermost */
1774 xfoo0 = XEXP (x, 0);
1775 xfoo1 = XEXP (x, 1);
1777 if (index_term_p (xfoo0, mode, strict)
1778 && nonindexed_address_p (xfoo1, strict))
1779 return true;
1781 if (index_term_p (xfoo1, mode, strict)
1782 && nonindexed_address_p (xfoo0, strict))
1783 return true;
1785 /* Handle offset(reg)[index] with offset added outermost */
1787 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1788 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1789 return true;
1791 return false;
1794 /* Return true if x (a legitimate address expression) has an effect that
1795 depends on the machine mode it is used for. On the VAX, the predecrement
1796 and postincrement address depend thus (the amount of decrement or
1797 increment being the length of the operand) and all indexed address depend
1798 thus (because the index scale factor is the length of the operand). */
1800 bool
1801 vax_mode_dependent_address_p (rtx x)
1803 rtx xfoo0, xfoo1;
1805 /* Auto-increment cases are now dealt with generically in recog.c. */
1806 if (GET_CODE (x) != PLUS)
1807 return false;
1809 xfoo0 = XEXP (x, 0);
1810 xfoo1 = XEXP (x, 1);
1812 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1813 return false;
1814 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1815 return false;
1816 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1817 return false;
1818 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1819 return false;
1821 return true;
1824 static rtx
1825 fixup_mathdi_operand (rtx x, enum machine_mode mode)
1827 if (illegal_addsub_di_memory_operand (x, mode))
1829 rtx addr = XEXP (x, 0);
1830 rtx temp = gen_reg_rtx (Pmode);
1831 rtx offset = 0;
1832 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1833 if (GET_CODE (addr) == CONST && flag_pic)
1835 offset = XEXP (XEXP (addr, 0), 1);
1836 addr = XEXP (XEXP (addr, 0), 0);
1838 #endif
1839 emit_move_insn (temp, addr);
1840 if (offset)
1841 temp = gen_rtx_PLUS (Pmode, temp, offset);
1842 x = gen_rtx_MEM (DImode, temp);
1844 return x;
1847 void
1848 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1850 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1851 rtx temp;
1853 rtx (*gen_old_insn)(rtx, rtx, rtx);
1854 rtx (*gen_si_insn)(rtx, rtx, rtx);
1855 rtx (*gen_insn)(rtx, rtx, rtx);
1857 if (code == PLUS)
1859 gen_old_insn = gen_adddi3_old;
1860 gen_si_insn = gen_addsi3;
1861 gen_insn = gen_adcdi3;
1863 else if (code == MINUS)
1865 gen_old_insn = gen_subdi3_old;
1866 gen_si_insn = gen_subsi3;
1867 gen_insn = gen_sbcdi3;
1869 else
1870 gcc_unreachable ();
1872 /* If this is addition (thus operands are commutative) and if there is one
1873 addend that duplicates the desination, we want that addend to be the
1874 first addend. */
1875 if (code == PLUS
1876 && rtx_equal_p (operands[0], operands[2])
1877 && !rtx_equal_p (operands[1], operands[2]))
1879 temp = operands[2];
1880 operands[2] = operands[1];
1881 operands[1] = temp;
1884 if (!TARGET_QMATH)
1886 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1888 else if (hi_only)
1890 if (!rtx_equal_p (operands[0], operands[1])
1891 && (REG_P (operands[0]) && MEM_P (operands[1])))
1893 emit_move_insn (operands[0], operands[1]);
1894 operands[1] = operands[0];
1897 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1898 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1899 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1901 if (!rtx_equal_p (operands[0], operands[1]))
1902 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1903 operand_subword (operands[1], 0, 0, DImode));
1905 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1906 operand_subword (operands[1], 1, 0, DImode),
1907 operand_subword (operands[2], 1, 0, DImode)));
1909 else
1911 /* If are adding the same value together, that's really a multiply by 2,
1912 and that's just a left shift of 1. */
1913 if (rtx_equal_p (operands[1], operands[2]))
1915 gcc_assert (code != MINUS);
1916 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1917 return;
1920 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1922 /* If an operand is the same as operand[0], use the operand[0] rtx
1923 because fixup will an equivalent rtx but not an equal one. */
1925 if (rtx_equal_p (operands[0], operands[1]))
1926 operands[1] = operands[0];
1927 else
1928 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1930 if (rtx_equal_p (operands[0], operands[2]))
1931 operands[2] = operands[0];
1932 else
1933 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1935 /* If we are subtracting not from ourselves [d = a - b], and because the
1936 carry ops are two operand only, we would need to do a move prior to
1937 the subtract. And if d == b, we would need a temp otherwise
1938 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1939 into d = -b, d += a. Since -b can never overflow, even if b == d,
1940 no temp is needed.
1942 If we are doing addition, since the carry ops are two operand, if
1943 we aren't adding to ourselves, move the first addend to the
1944 destination first. */
1946 gcc_assert (operands[1] != const0_rtx || code == MINUS);
1947 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1949 if (code == MINUS && CONSTANT_P (operands[1]))
1951 temp = gen_reg_rtx (DImode);
1952 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1953 code = PLUS;
1954 gen_insn = gen_adcdi3;
1955 operands[2] = operands[1];
1956 operands[1] = operands[0];
1958 else
1959 emit_move_insn (operands[0], operands[1]);
1962 /* Subtracting a constant will have been rewritten to an addition of the
1963 negative of that constant before we get here. */
1964 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
1965 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
1969 bool
1970 adjacent_operands_p (rtx lo, rtx hi, enum machine_mode mode)
1972 HOST_WIDE_INT lo_offset;
1973 HOST_WIDE_INT hi_offset;
1975 if (GET_CODE (lo) != GET_CODE (hi))
1976 return false;
1978 if (REG_P (lo))
1979 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
1980 if (CONST_INT_P (lo))
1981 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
1982 if (CONST_INT_P (lo))
1983 return mode != SImode;
1985 if (!MEM_P (lo))
1986 return false;
1988 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
1989 return false;
1991 lo = XEXP (lo, 0);
1992 hi = XEXP (hi, 0);
1994 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
1995 return rtx_equal_p (lo, hi);
1997 switch (GET_CODE (lo))
1999 case REG:
2000 case SYMBOL_REF:
2001 lo_offset = 0;
2002 break;
2003 case CONST:
2004 lo = XEXP (lo, 0);
2005 /* FALLTHROUGH */
2006 case PLUS:
2007 if (!CONST_INT_P (XEXP (lo, 1)))
2008 return false;
2009 lo_offset = INTVAL (XEXP (lo, 1));
2010 lo = XEXP (lo, 0);
2011 break;
2012 default:
2013 return false;
2016 switch (GET_CODE (hi))
2018 case REG:
2019 case SYMBOL_REF:
2020 hi_offset = 0;
2021 break;
2022 case CONST:
2023 hi = XEXP (hi, 0);
2024 /* FALLTHROUGH */
2025 case PLUS:
2026 if (!CONST_INT_P (XEXP (hi, 1)))
2027 return false;
2028 hi_offset = INTVAL (XEXP (hi, 1));
2029 hi = XEXP (hi, 0);
2030 break;
2031 default:
2032 return false;
2035 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2036 return false;
2038 return rtx_equal_p (lo, hi)
2039 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2042 /* Output assembler code for a block containing the constant parts
2043 of a trampoline, leaving space for the variable parts. */
2045 /* On the VAX, the trampoline contains an entry mask and two instructions:
2046 .word NN
2047 movl $STATIC,r0 (store the functions static chain)
2048 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2050 static void
2051 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2053 assemble_aligned_integer (2, const0_rtx);
2054 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2055 assemble_aligned_integer (4, const0_rtx);
2056 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2057 assemble_aligned_integer (2, GEN_INT (0x9f17));
2058 assemble_aligned_integer (4, const0_rtx);
2061 /* We copy the register-mask from the function's pure code
2062 to the start of the trampoline. */
2064 static void
2065 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2067 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2068 rtx mem;
2070 emit_block_move (m_tramp, assemble_trampoline_template (),
2071 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2073 mem = adjust_address (m_tramp, HImode, 0);
2074 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2076 mem = adjust_address (m_tramp, SImode, 4);
2077 emit_move_insn (mem, cxt);
2078 mem = adjust_address (m_tramp, SImode, 11);
2079 emit_move_insn (mem, plus_constant (fnaddr, 2));
2080 emit_insn (gen_sync_istream ());
2083 /* Value is the number of bytes of arguments automatically
2084 popped when returning from a subroutine call.
2085 FUNDECL is the declaration node of the function (as a tree),
2086 FUNTYPE is the data type of the function (as a tree),
2087 or for a library call it is an identifier node for the subroutine name.
2088 SIZE is the number of bytes of arguments passed on the stack.
2090 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2092 static int
2093 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2094 tree funtype ATTRIBUTE_UNUSED, int size)
2096 return size > 255 * 4 ? 0 : size;