1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
30 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
35 #include "insn-attr.h"
42 #include "integrate.h"
45 #include "target-def.h"
48 #define streq(a,b) (strcmp (a, b) == 0)
51 /* Function prototypes for stupid compilers: */
52 static void const_double_split (rtx
, HOST_WIDE_INT
*, HOST_WIDE_INT
*);
53 static int const_costs_int (HOST_WIDE_INT
, int);
54 static int const_costs (rtx
, enum rtx_code
);
55 static bool v850_rtx_costs (rtx
, int, int, int *);
56 static void substitute_ep_register (rtx
, rtx
, int, int, rtx
*, rtx
*);
57 static void v850_reorg (void);
58 static int ep_memory_offset (enum machine_mode
, int);
59 static void v850_set_data_area (tree
, v850_data_area
);
60 const struct attribute_spec v850_attribute_table
[];
61 static tree
v850_handle_interrupt_attribute (tree
*, tree
, tree
, int, bool *);
62 static tree
v850_handle_data_area_attribute (tree
*, tree
, tree
, int, bool *);
63 static void v850_insert_attributes (tree
, tree
*);
64 static void v850_select_section (tree
, int, unsigned HOST_WIDE_INT
);
65 static void v850_encode_data_area (tree
, rtx
);
66 static void v850_encode_section_info (tree
, rtx
, int);
67 static bool v850_return_in_memory (tree
, tree
);
68 static void v850_setup_incoming_varargs (CUMULATIVE_ARGS
*, enum machine_mode
,
70 static bool v850_pass_by_reference (CUMULATIVE_ARGS
*, enum machine_mode
,
72 static int v850_arg_partial_bytes (CUMULATIVE_ARGS
*, enum machine_mode
,
75 /* Information about the various small memory areas. */
76 struct small_memory_info small_memory
[ (int)SMALL_MEMORY_max
] =
78 /* name value max physical max */
79 { "tda", (char *)0, 0, 256 },
80 { "sda", (char *)0, 0, 65536 },
81 { "zda", (char *)0, 0, 32768 },
84 /* Names of the various data areas used on the v850. */
85 tree GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
86 tree GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
88 /* Track the current data area set by the data area pragma (which
89 can be nested). Tested by check_default_data_area. */
90 data_area_stack_element
* data_area_stack
= NULL
;
92 /* True if we don't need to check any more if the current
93 function is an interrupt handler. */
94 static int v850_interrupt_cache_p
= FALSE
;
96 /* Whether current function is an interrupt handler. */
97 static int v850_interrupt_p
= FALSE
;
99 /* Initialize the GCC target structure. */
100 #undef TARGET_ASM_ALIGNED_HI_OP
101 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
103 #undef TARGET_ATTRIBUTE_TABLE
104 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
106 #undef TARGET_INSERT_ATTRIBUTES
107 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
109 #undef TARGET_ASM_SELECT_SECTION
110 #define TARGET_ASM_SELECT_SECTION v850_select_section
112 #undef TARGET_ENCODE_SECTION_INFO
113 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
115 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
116 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
118 #undef TARGET_RTX_COSTS
119 #define TARGET_RTX_COSTS v850_rtx_costs
121 #undef TARGET_ADDRESS_COST
122 #define TARGET_ADDRESS_COST hook_int_rtx_0
124 #undef TARGET_MACHINE_DEPENDENT_REORG
125 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
127 #undef TARGET_PROMOTE_PROTOTYPES
128 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
130 #undef TARGET_RETURN_IN_MEMORY
131 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
133 #undef TARGET_PASS_BY_REFERENCE
134 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
136 #undef TARGET_CALLEE_COPIES
137 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
139 #undef TARGET_SETUP_INCOMING_VARARGS
140 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
142 #undef TARGET_ARG_PARTIAL_BYTES
143 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
145 struct gcc_target targetm
= TARGET_INITIALIZER
;
147 /* Sometimes certain combinations of command options do not make
148 sense on a particular target machine. You can define a macro
149 `OVERRIDE_OPTIONS' to take account of this. This macro, if
150 defined, is executed once just after all the command options have
153 Don't use this macro to turn on various extra optimizations for
154 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
157 override_options (void)
160 extern int atoi (const char *);
162 /* Parse -m{s,t,z}da=nnn switches */
163 for (i
= 0; i
< (int)SMALL_MEMORY_max
; i
++)
165 if (small_memory
[i
].value
)
167 if (!ISDIGIT (*small_memory
[i
].value
))
168 error ("%s=%s is not numeric",
169 small_memory
[i
].name
,
170 small_memory
[i
].value
);
173 small_memory
[i
].max
= atoi (small_memory
[i
].value
);
174 if (small_memory
[i
].max
> small_memory
[i
].physical_max
)
175 error ("%s=%s is too large",
176 small_memory
[i
].name
,
177 small_memory
[i
].value
);
182 /* Make sure that the US_BIT_SET mask has been correctly initialized. */
183 if ((target_flags
& MASK_US_MASK_SET
) == 0)
185 target_flags
|= MASK_US_MASK_SET
;
186 target_flags
&= ~MASK_US_BIT_SET
;
192 v850_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
193 enum machine_mode mode
, tree type
,
194 bool named ATTRIBUTE_UNUSED
)
196 unsigned HOST_WIDE_INT size
;
199 size
= int_size_in_bytes (type
);
201 size
= GET_MODE_SIZE (mode
);
206 /* Return an RTX to represent where a value with mode MODE will be returned
207 from a function. If the result is 0, the argument is pushed. */
210 function_arg (CUMULATIVE_ARGS
* cum
,
211 enum machine_mode mode
,
218 if (TARGET_GHS
&& !named
)
222 size
= int_size_in_bytes (type
);
224 size
= GET_MODE_SIZE (mode
);
230 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
234 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
236 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
239 if (type
== NULL_TREE
240 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
243 switch (cum
->nbytes
/ UNITS_PER_WORD
)
246 result
= gen_rtx_REG (mode
, 6);
249 result
= gen_rtx_REG (mode
, 7);
252 result
= gen_rtx_REG (mode
, 8);
255 result
= gen_rtx_REG (mode
, 9);
265 /* Return the number of bytes which must be put into registers
266 for values which are part in registers and part in memory. */
269 v850_arg_partial_bytes (CUMULATIVE_ARGS
* cum
, enum machine_mode mode
,
270 tree type
, bool named
)
274 if (TARGET_GHS
&& !named
)
278 size
= int_size_in_bytes (type
);
280 size
= GET_MODE_SIZE (mode
);
283 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
287 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
289 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
292 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
295 if (type
== NULL_TREE
296 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
299 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
303 /* Return the high and low words of a CONST_DOUBLE */
306 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
308 if (GET_CODE (x
) == CONST_DOUBLE
)
313 switch (GET_MODE (x
))
316 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
317 REAL_VALUE_TO_TARGET_DOUBLE (rv
, t
);
318 *p_high
= t
[1]; /* since v850 is little endian */
319 *p_low
= t
[0]; /* high is second word */
323 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
324 REAL_VALUE_TO_TARGET_SINGLE (rv
, *p_high
);
330 *p_high
= CONST_DOUBLE_HIGH (x
);
331 *p_low
= CONST_DOUBLE_LOW (x
);
339 fatal_insn ("const_double_split got a bad insn:", x
);
343 /* Return the cost of the rtx R with code CODE. */
346 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
348 if (CONST_OK_FOR_I (value
))
350 else if (CONST_OK_FOR_J (value
))
352 else if (CONST_OK_FOR_K (value
))
359 const_costs (rtx r
, enum rtx_code c
)
361 HOST_WIDE_INT high
, low
;
366 return const_costs_int (INTVAL (r
), 0);
369 const_double_split (r
, &high
, &low
);
370 if (GET_MODE (r
) == SFmode
)
371 return const_costs_int (high
, 1);
373 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
389 v850_rtx_costs (rtx x
,
391 int outer_code ATTRIBUTE_UNUSED
,
401 *total
= COSTS_N_INSNS (const_costs (x
, code
));
408 if (TARGET_V850E
&& optimize_size
)
416 && ( GET_MODE (x
) == SImode
417 || GET_MODE (x
) == HImode
418 || GET_MODE (x
) == QImode
))
420 if (GET_CODE (XEXP (x
, 1)) == REG
)
422 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
424 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
426 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
439 /* Print operand X using operand code CODE to assembly language output file
443 print_operand (FILE * file
, rtx x
, int code
)
445 HOST_WIDE_INT high
, low
;
450 /* We use 'c' operands with symbols for .vtinherit */
451 if (GET_CODE (x
) == SYMBOL_REF
)
453 output_addr_const(file
, x
);
460 switch ((code
== 'B' || code
== 'C')
461 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
464 if (code
== 'c' || code
== 'C')
465 fprintf (file
, "nz");
467 fprintf (file
, "ne");
470 if (code
== 'c' || code
== 'C')
476 fprintf (file
, "ge");
479 fprintf (file
, "gt");
482 fprintf (file
, "le");
485 fprintf (file
, "lt");
488 fprintf (file
, "nl");
494 fprintf (file
, "nh");
503 case 'F': /* high word of CONST_DOUBLE */
504 if (GET_CODE (x
) == CONST_INT
)
505 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
506 else if (GET_CODE (x
) == CONST_DOUBLE
)
508 const_double_split (x
, &high
, &low
);
509 fprintf (file
, "%ld", (long) high
);
514 case 'G': /* low word of CONST_DOUBLE */
515 if (GET_CODE (x
) == CONST_INT
)
516 fprintf (file
, "%ld", (long) INTVAL (x
));
517 else if (GET_CODE (x
) == CONST_DOUBLE
)
519 const_double_split (x
, &high
, &low
);
520 fprintf (file
, "%ld", (long) low
);
526 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
529 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
532 if (special_symbolref_operand (x
, VOIDmode
))
534 if (GET_CODE (x
) == SYMBOL_REF
)
536 else if (GET_CODE (x
) == CONST
)
537 x
= XEXP (XEXP (x
, 0), 0);
541 if (SYMBOL_REF_ZDA_P (x
))
542 fprintf (file
, "zdaoff");
543 else if (SYMBOL_REF_SDA_P (x
))
544 fprintf (file
, "sdaoff");
545 else if (SYMBOL_REF_TDA_P (x
))
546 fprintf (file
, "tdaoff");
554 if (special_symbolref_operand (x
, VOIDmode
))
555 output_addr_const (file
, x
);
560 if (special_symbolref_operand (x
, VOIDmode
))
562 if (GET_CODE (x
) == SYMBOL_REF
)
564 else if (GET_CODE (x
) == CONST
)
565 x
= XEXP (XEXP (x
, 0), 0);
569 if (SYMBOL_REF_ZDA_P (x
))
570 fprintf (file
, "r0");
571 else if (SYMBOL_REF_SDA_P (x
))
572 fprintf (file
, "gp");
573 else if (SYMBOL_REF_TDA_P (x
))
574 fprintf (file
, "ep");
581 case 'R': /* 2nd word of a double. */
582 switch (GET_CODE (x
))
585 fprintf (file
, reg_names
[REGNO (x
) + 1]);
588 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
589 print_operand_address (file
, x
);
590 if (GET_CODE (x
) == CONST_INT
)
591 fprintf (file
, "[r0]");
600 /* if it's a reference to a TDA variable, use sst/sld vs. st/ld */
601 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
608 /* Like an 'S' operand above, but for unsigned loads only. */
609 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
614 case 'W': /* print the instruction suffix */
615 switch (GET_MODE (x
))
620 case QImode
: fputs (".b", file
); break;
621 case HImode
: fputs (".h", file
); break;
622 case SImode
: fputs (".w", file
); break;
623 case SFmode
: fputs (".w", file
); break;
626 case '.': /* register r0 */
627 fputs (reg_names
[0], file
);
629 case 'z': /* reg or zero */
631 fputs (reg_names
[0], file
);
632 else if (GET_CODE (x
) == REG
)
633 fputs (reg_names
[REGNO (x
)], file
);
638 switch (GET_CODE (x
))
641 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
642 output_address (gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
645 output_address (XEXP (x
, 0));
649 fputs (reg_names
[REGNO (x
)], file
);
652 fputs (reg_names
[subreg_regno (x
)], file
);
659 print_operand_address (file
, x
);
670 /* Output assembly language output for the address ADDR to FILE. */
673 print_operand_address (FILE * file
, rtx addr
)
675 switch (GET_CODE (addr
))
678 fprintf (file
, "0[");
679 print_operand (file
, addr
, 0);
683 if (GET_CODE (XEXP (addr
, 0)) == REG
)
686 fprintf (file
, "lo(");
687 print_operand (file
, XEXP (addr
, 1), 0);
688 fprintf (file
, ")[");
689 print_operand (file
, XEXP (addr
, 0), 0);
694 if (GET_CODE (XEXP (addr
, 0)) == REG
695 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
698 print_operand (file
, XEXP (addr
, 1), 0);
700 print_operand (file
, XEXP (addr
, 0), 0);
705 print_operand (file
, XEXP (addr
, 0), 0);
707 print_operand (file
, XEXP (addr
, 1), 0);
712 const char *off_name
= NULL
;
713 const char *reg_name
= NULL
;
715 if (SYMBOL_REF_ZDA_P (addr
))
720 else if (SYMBOL_REF_SDA_P (addr
))
725 else if (SYMBOL_REF_TDA_P (addr
))
732 fprintf (file
, "%s(", off_name
);
733 output_addr_const (file
, addr
);
735 fprintf (file
, ")[%s]", reg_name
);
739 if (special_symbolref_operand (addr
, VOIDmode
))
741 rtx x
= XEXP (XEXP (addr
, 0), 0);
742 const char *off_name
;
743 const char *reg_name
;
745 if (SYMBOL_REF_ZDA_P (x
))
750 else if (SYMBOL_REF_SDA_P (x
))
755 else if (SYMBOL_REF_TDA_P (x
))
763 fprintf (file
, "%s(", off_name
);
764 output_addr_const (file
, addr
);
765 fprintf (file
, ")[%s]", reg_name
);
768 output_addr_const (file
, addr
);
771 output_addr_const (file
, addr
);
776 /* When assemble_integer is used to emit the offsets for a switch
777 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
778 output_addr_const will normally barf at this, but it is OK to omit
779 the truncate and just emit the difference of the two labels. The
780 .hword directive will automatically handle the truncation for us.
782 Returns 1 if rtx was handled, 0 otherwise. */
785 v850_output_addr_const_extra (FILE * file
, rtx x
)
787 if (GET_CODE (x
) != TRUNCATE
)
792 /* We must also handle the case where the switch table was passed a
793 constant value and so has been collapsed. In this case the first
794 label will have been deleted. In such a case it is OK to emit
795 nothing, since the table will not be used.
796 (cf gcc.c-torture/compile/990801-1.c). */
797 if (GET_CODE (x
) == MINUS
798 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
799 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == CODE_LABEL
800 && INSN_DELETED_P (XEXP (XEXP (x
, 0), 0)))
803 output_addr_const (file
, x
);
807 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
811 output_move_single (rtx
* operands
)
813 rtx dst
= operands
[0];
814 rtx src
= operands
[1];
821 else if (GET_CODE (src
) == CONST_INT
)
823 HOST_WIDE_INT value
= INTVAL (src
);
825 if (CONST_OK_FOR_J (value
)) /* Signed 5 bit immediate. */
828 else if (CONST_OK_FOR_K (value
)) /* Signed 16 bit immediate. */
829 return "movea lo(%1),%.,%0";
831 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
832 return "movhi hi(%1),%.,%0";
834 /* A random constant. */
835 else if (TARGET_V850E
)
838 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
841 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
843 HOST_WIDE_INT high
, low
;
845 const_double_split (src
, &high
, &low
);
847 if (CONST_OK_FOR_J (high
)) /* Signed 5 bit immediate. */
850 else if (CONST_OK_FOR_K (high
)) /* Signed 16 bit immediate. */
851 return "movea lo(%F1),%.,%0";
853 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
854 return "movhi hi(%F1),%.,%0";
856 /* A random constant. */
857 else if (TARGET_V850E
)
861 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
864 else if (GET_CODE (src
) == MEM
)
865 return "%S1ld%W1 %1,%0";
867 else if (special_symbolref_operand (src
, VOIDmode
))
868 return "movea %O1(%P1),%Q1,%0";
870 else if (GET_CODE (src
) == LABEL_REF
871 || GET_CODE (src
) == SYMBOL_REF
872 || GET_CODE (src
) == CONST
)
875 return "mov hilo(%1),%0";
877 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
880 else if (GET_CODE (src
) == HIGH
)
881 return "movhi hi(%1),%.,%0";
883 else if (GET_CODE (src
) == LO_SUM
)
885 operands
[2] = XEXP (src
, 0);
886 operands
[3] = XEXP (src
, 1);
887 return "movea lo(%3),%2,%0";
891 else if (GET_CODE (dst
) == MEM
)
894 return "%S0st%W0 %1,%0";
896 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
897 return "%S0st%W0 %.,%0";
899 else if (GET_CODE (src
) == CONST_DOUBLE
900 && CONST0_RTX (GET_MODE (dst
)) == src
)
901 return "%S0st%W0 %.,%0";
904 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode
, dst
, src
));
909 /* Return appropriate code to load up an 8 byte integer or
910 floating point value */
913 output_move_double (rtx
* operands
)
915 enum machine_mode mode
= GET_MODE (operands
[0]);
916 rtx dst
= operands
[0];
917 rtx src
= operands
[1];
919 if (register_operand (dst
, mode
)
920 && register_operand (src
, mode
))
922 if (REGNO (src
) + 1 == REGNO (dst
))
923 return "mov %R1,%R0\n\tmov %1,%0";
925 return "mov %1,%0\n\tmov %R1,%R0";
929 if (GET_CODE (dst
) == MEM
930 && ((GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
931 || (GET_CODE (src
) == CONST_DOUBLE
&& CONST_DOUBLE_OK_FOR_G (src
))))
932 return "st.w %.,%0\n\tst.w %.,%R0";
934 if (GET_CODE (src
) == CONST_INT
|| GET_CODE (src
) == CONST_DOUBLE
)
936 HOST_WIDE_INT high_low
[2];
940 if (GET_CODE (src
) == CONST_DOUBLE
)
941 const_double_split (src
, &high_low
[1], &high_low
[0]);
944 high_low
[0] = INTVAL (src
);
945 high_low
[1] = (INTVAL (src
) >= 0) ? 0 : -1;
948 for (i
= 0; i
< 2; i
++)
950 xop
[0] = gen_rtx_REG (SImode
, REGNO (dst
)+i
);
951 xop
[1] = GEN_INT (high_low
[i
]);
952 output_asm_insn (output_move_single (xop
), xop
);
958 if (GET_CODE (src
) == MEM
)
961 int dreg
= REGNO (dst
);
962 rtx inside
= XEXP (src
, 0);
964 if (GET_CODE (inside
) == REG
)
965 ptrreg
= REGNO (inside
);
966 else if (GET_CODE (inside
) == SUBREG
)
967 ptrreg
= subreg_regno (inside
);
968 else if (GET_CODE (inside
) == PLUS
)
969 ptrreg
= REGNO (XEXP (inside
, 0));
970 else if (GET_CODE (inside
) == LO_SUM
)
971 ptrreg
= REGNO (XEXP (inside
, 0));
974 return "ld.w %R1,%R0\n\tld.w %1,%0";
977 if (GET_CODE (src
) == MEM
)
978 return "ld.w %1,%0\n\tld.w %R1,%R0";
980 if (GET_CODE (dst
) == MEM
)
981 return "st.w %1,%0\n\tst.w %R1,%R0";
983 return "mov %1,%0\n\tmov %R1,%R0";
987 /* Return maximum offset supported for a short EP memory reference of mode
988 MODE and signedness UNSIGNEDP. */
991 ep_memory_offset (enum machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
998 if (TARGET_SMALL_SLD
)
999 max_offset
= (1 << 4);
1000 else if (TARGET_V850E
1001 && ( ( unsignedp
&& ! TARGET_US_BIT_SET
)
1002 || (! unsignedp
&& TARGET_US_BIT_SET
)))
1003 max_offset
= (1 << 4);
1005 max_offset
= (1 << 7);
1009 if (TARGET_SMALL_SLD
)
1010 max_offset
= (1 << 5);
1011 else if (TARGET_V850E
1012 && ( ( unsignedp
&& ! TARGET_US_BIT_SET
)
1013 || (! unsignedp
&& TARGET_US_BIT_SET
)))
1014 max_offset
= (1 << 5);
1016 max_offset
= (1 << 8);
1021 max_offset
= (1 << 8);
1031 /* Return true if OP is a valid short EP memory reference */
1034 ep_memory_operand (rtx op
, enum machine_mode mode
, int unsigned_load
)
1040 if (GET_CODE (op
) != MEM
)
1043 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1045 mask
= GET_MODE_SIZE (mode
) - 1;
1047 addr
= XEXP (op
, 0);
1048 if (GET_CODE (addr
) == CONST
)
1049 addr
= XEXP (addr
, 0);
1051 switch (GET_CODE (addr
))
1057 return SYMBOL_REF_TDA_P (addr
);
1060 return REGNO (addr
) == EP_REGNUM
;
1063 op0
= XEXP (addr
, 0);
1064 op1
= XEXP (addr
, 1);
1065 if (GET_CODE (op1
) == CONST_INT
1066 && INTVAL (op1
) < max_offset
1067 && INTVAL (op1
) >= 0
1068 && (INTVAL (op1
) & mask
) == 0)
1070 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1073 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1082 /* Return true if OP is either a register or 0 */
1085 reg_or_0_operand (rtx op
, enum machine_mode mode
)
1087 if (GET_CODE (op
) == CONST_INT
)
1088 return INTVAL (op
) == 0;
1090 else if (GET_CODE (op
) == CONST_DOUBLE
)
1091 return CONST_DOUBLE_OK_FOR_G (op
);
1094 return register_operand (op
, mode
);
1097 /* Return true if OP is either a register or a signed five bit integer */
1100 reg_or_int5_operand (rtx op
, enum machine_mode mode
)
1102 if (GET_CODE (op
) == CONST_INT
)
1103 return CONST_OK_FOR_J (INTVAL (op
));
1106 return register_operand (op
, mode
);
1109 /* Return true if OP is either a register or a signed nine bit integer. */
1112 reg_or_int9_operand (rtx op
, enum machine_mode mode
)
1114 if (GET_CODE (op
) == CONST_INT
)
1115 return CONST_OK_FOR_O (INTVAL (op
));
1117 return register_operand (op
, mode
);
1120 /* Return true if OP is either a register or a const integer. */
1123 reg_or_const_operand (rtx op
, enum machine_mode mode
)
1125 if (GET_CODE (op
) == CONST_INT
)
1128 return register_operand (op
, mode
);
1131 /* Return true if OP is a valid call operand. */
1134 call_address_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1136 /* Only registers are valid call operands if TARGET_LONG_CALLS. */
1137 if (TARGET_LONG_CALLS
)
1138 return GET_CODE (op
) == REG
;
1139 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == REG
);
1143 special_symbolref_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1145 if (GET_CODE (op
) == CONST
1146 && GET_CODE (XEXP (op
, 0)) == PLUS
1147 && GET_CODE (XEXP (XEXP (op
, 0), 1)) == CONST_INT
1148 && CONST_OK_FOR_K (INTVAL (XEXP (XEXP (op
, 0), 1))))
1149 op
= XEXP (XEXP (op
, 0), 0);
1151 if (GET_CODE (op
) == SYMBOL_REF
)
1152 return (SYMBOL_REF_FLAGS (op
)
1153 & (SYMBOL_FLAG_ZDA
| SYMBOL_FLAG_TDA
| SYMBOL_FLAG_SDA
)) != 0;
1159 movsi_source_operand (rtx op
, enum machine_mode mode
)
1161 /* Some constants, as well as symbolic operands
1162 must be done with HIGH & LO_SUM patterns. */
1164 && GET_CODE (op
) != HIGH
1165 && !(GET_CODE (op
) == CONST_INT
1166 && (CONST_OK_FOR_J (INTVAL (op
))
1167 || CONST_OK_FOR_K (INTVAL (op
))
1168 || CONST_OK_FOR_L (INTVAL (op
)))))
1169 return special_symbolref_operand (op
, mode
);
1171 return general_operand (op
, mode
);
1175 power_of_two_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1177 if (GET_CODE (op
) != CONST_INT
)
1180 if (exact_log2 (INTVAL (op
)) == -1)
1186 not_power_of_two_operand (rtx op
, enum machine_mode mode
)
1192 else if (mode
== HImode
)
1194 else if (mode
== SImode
)
1199 if (GET_CODE (op
) != CONST_INT
)
1202 if (exact_log2 (~INTVAL (op
) & mask
) == -1)
1208 /* Substitute memory references involving a pointer, to use the ep pointer,
1209 taking care to save and preserve the ep. */
1212 substitute_ep_register (rtx first_insn
,
1219 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1224 regs_ever_live
[1] = 1;
1225 *p_r1
= gen_rtx_REG (Pmode
, 1);
1226 *p_ep
= gen_rtx_REG (Pmode
, 30);
1231 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1232 2 * (uses
- 3), uses
, reg_names
[regno
],
1233 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1234 INSN_UID (first_insn
), INSN_UID (last_insn
));
1236 if (GET_CODE (first_insn
) == NOTE
)
1237 first_insn
= next_nonnote_insn (first_insn
);
1239 last_insn
= next_nonnote_insn (last_insn
);
1240 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1242 if (GET_CODE (insn
) == INSN
)
1244 rtx pattern
= single_set (insn
);
1246 /* Replace the memory references. */
1250 /* Memory operands are signed by default. */
1251 int unsignedp
= FALSE
;
1253 if (GET_CODE (SET_DEST (pattern
)) == MEM
1254 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1257 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1258 p_mem
= &SET_DEST (pattern
);
1260 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1261 p_mem
= &SET_SRC (pattern
);
1263 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1264 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1265 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1267 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1268 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1270 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1278 rtx addr
= XEXP (*p_mem
, 0);
1280 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1281 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1283 else if (GET_CODE (addr
) == PLUS
1284 && GET_CODE (XEXP (addr
, 0)) == REG
1285 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1286 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1287 && ((INTVAL (XEXP (addr
, 1)))
1288 < ep_memory_offset (GET_MODE (*p_mem
),
1290 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1291 *p_mem
= change_address (*p_mem
, VOIDmode
,
1292 gen_rtx_PLUS (Pmode
,
1300 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1301 insn
= prev_nonnote_insn (first_insn
);
1302 if (insn
&& GET_CODE (insn
) == INSN
1303 && GET_CODE (PATTERN (insn
)) == SET
1304 && SET_DEST (PATTERN (insn
)) == *p_ep
1305 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1308 emit_insn_before (gen_rtx_SET (Pmode
, *p_r1
, *p_ep
), first_insn
);
1310 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, reg
), first_insn
);
1311 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, *p_r1
), last_insn
);
1315 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1316 the -mep mode to copy heavily used pointers to ep to use the implicit
1328 regs
[FIRST_PSEUDO_REGISTER
];
1337 /* If not ep mode, just return now. */
1341 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1344 regs
[i
].first_insn
= NULL_RTX
;
1345 regs
[i
].last_insn
= NULL_RTX
;
1348 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1350 switch (GET_CODE (insn
))
1352 /* End of basic block */
1359 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1361 if (max_uses
< regs
[i
].uses
)
1363 max_uses
= regs
[i
].uses
;
1369 substitute_ep_register (regs
[max_regno
].first_insn
,
1370 regs
[max_regno
].last_insn
,
1371 max_uses
, max_regno
, &r1
, &ep
);
1375 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1378 regs
[i
].first_insn
= NULL_RTX
;
1379 regs
[i
].last_insn
= NULL_RTX
;
1387 pattern
= single_set (insn
);
1389 /* See if there are any memory references we can shorten */
1392 rtx src
= SET_SRC (pattern
);
1393 rtx dest
= SET_DEST (pattern
);
1395 /* Memory operands are signed by default. */
1396 int unsignedp
= FALSE
;
1398 /* We might have (SUBREG (MEM)) here, so just get rid of the
1399 subregs to make this code simpler. */
1400 if (GET_CODE (dest
) == SUBREG
1401 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1402 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1403 alter_subreg (&dest
);
1404 if (GET_CODE (src
) == SUBREG
1405 && (GET_CODE (SUBREG_REG (src
)) == MEM
1406 || GET_CODE (SUBREG_REG (src
)) == REG
))
1407 alter_subreg (&src
);
1409 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1412 else if (GET_CODE (dest
) == MEM
)
1415 else if (GET_CODE (src
) == MEM
)
1418 else if (GET_CODE (src
) == SIGN_EXTEND
1419 && GET_CODE (XEXP (src
, 0)) == MEM
)
1420 mem
= XEXP (src
, 0);
1422 else if (GET_CODE (src
) == ZERO_EXTEND
1423 && GET_CODE (XEXP (src
, 0)) == MEM
)
1425 mem
= XEXP (src
, 0);
1431 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1434 else if (!use_ep
&& mem
1435 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1437 rtx addr
= XEXP (mem
, 0);
1441 if (GET_CODE (addr
) == REG
)
1444 regno
= REGNO (addr
);
1447 else if (GET_CODE (addr
) == PLUS
1448 && GET_CODE (XEXP (addr
, 0)) == REG
1449 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1450 && ((INTVAL (XEXP (addr
, 1)))
1451 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1452 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1455 regno
= REGNO (XEXP (addr
, 0));
1464 regs
[regno
].last_insn
= insn
;
1465 if (!regs
[regno
].first_insn
)
1466 regs
[regno
].first_insn
= insn
;
1470 /* Loading up a register in the basic block zaps any savings
1472 if (GET_CODE (dest
) == REG
)
1474 enum machine_mode mode
= GET_MODE (dest
);
1478 regno
= REGNO (dest
);
1479 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1483 /* See if we can use the pointer before this
1488 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1490 if (max_uses
< regs
[i
].uses
)
1492 max_uses
= regs
[i
].uses
;
1498 && max_regno
>= regno
1499 && max_regno
< endregno
)
1501 substitute_ep_register (regs
[max_regno
].first_insn
,
1502 regs
[max_regno
].last_insn
,
1503 max_uses
, max_regno
, &r1
,
1506 /* Since we made a substitution, zap all remembered
1508 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1511 regs
[i
].first_insn
= NULL_RTX
;
1512 regs
[i
].last_insn
= NULL_RTX
;
1517 for (i
= regno
; i
< endregno
; i
++)
1520 regs
[i
].first_insn
= NULL_RTX
;
1521 regs
[i
].last_insn
= NULL_RTX
;
1530 /* # of registers saved by the interrupt handler. */
1531 #define INTERRUPT_FIXED_NUM 4
1533 /* # of bytes for registers saved by the interrupt handler. */
1534 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1536 /* # of registers saved in register parameter area. */
1537 #define INTERRUPT_REGPARM_NUM 4
1538 /* # of words saved for other registers. */
1539 #define INTERRUPT_ALL_SAVE_NUM \
1540 (30 - INTERRUPT_FIXED_NUM + INTERRUPT_REGPARM_NUM)
1542 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1545 compute_register_save_size (long * p_reg_saved
)
1549 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1550 int call_p
= regs_ever_live
[LINK_POINTER_REGNUM
];
1553 /* Count the return pointer if we need to save it. */
1554 if (current_function_profile
&& !call_p
)
1555 regs_ever_live
[LINK_POINTER_REGNUM
] = call_p
= 1;
1557 /* Count space for the register saves. */
1558 if (interrupt_handler
)
1560 for (i
= 0; i
<= 31; i
++)
1564 if (regs_ever_live
[i
] || call_p
)
1567 reg_saved
|= 1L << i
;
1571 /* We don't save/restore r0 or the stack pointer */
1573 case STACK_POINTER_REGNUM
:
1576 /* For registers with fixed use, we save them, set them to the
1577 appropriate value, and then restore them.
1578 These registers are handled specially, so don't list them
1579 on the list of registers to save in the prologue. */
1580 case 1: /* temp used to hold ep */
1582 case 10: /* temp used to call interrupt save/restore */
1583 case EP_REGNUM
: /* ep */
1590 /* Find the first register that needs to be saved. */
1591 for (i
= 0; i
<= 31; i
++)
1592 if (regs_ever_live
[i
] && ((! call_used_regs
[i
])
1593 || i
== LINK_POINTER_REGNUM
))
1596 /* If it is possible that an out-of-line helper function might be
1597 used to generate the prologue for the current function, then we
1598 need to cover the possibility that such a helper function will
1599 be used, despite the fact that there might be gaps in the list of
1600 registers that need to be saved. To detect this we note that the
1601 helper functions always push at least register r29 (provided
1602 that the function is not an interrupt handler). */
1604 if (TARGET_PROLOG_FUNCTION
1605 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1610 reg_saved
|= 1L << i
;
1615 /* Helper functions save all registers between the starting
1616 register and the last register, regardless of whether they
1617 are actually used by the function or not. */
1618 for (; i
<= 29; i
++)
1621 reg_saved
|= 1L << i
;
1624 if (regs_ever_live
[LINK_POINTER_REGNUM
])
1627 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1632 for (; i
<= 31; i
++)
1633 if (regs_ever_live
[i
] && ((! call_used_regs
[i
])
1634 || i
== LINK_POINTER_REGNUM
))
1637 reg_saved
|= 1L << i
;
1643 *p_reg_saved
= reg_saved
;
1649 compute_frame_size (int size
, long * p_reg_saved
)
1652 + compute_register_save_size (p_reg_saved
)
1653 + current_function_outgoing_args_size
);
1658 expand_prologue (void)
1662 unsigned int size
= get_frame_size ();
1663 unsigned int actual_fsize
;
1664 unsigned int init_stack_alloc
= 0;
1667 unsigned int num_save
;
1668 unsigned int default_stack
;
1670 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1673 actual_fsize
= compute_frame_size (size
, ®_saved
);
1675 /* Save/setup global registers for interrupt functions right now. */
1676 if (interrupt_handler
)
1678 if (TARGET_V850E
&& ! TARGET_DISABLE_CALLT
)
1679 emit_insn (gen_callt_save_interrupt ());
1681 emit_insn (gen_save_interrupt ());
1683 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1685 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1686 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1689 /* Save arg registers to the stack if necessary. */
1690 else if (current_function_args_info
.anonymous_args
)
1692 if (TARGET_PROLOG_FUNCTION
&& TARGET_V850E
&& !TARGET_DISABLE_CALLT
)
1693 emit_insn (gen_save_r6_r9_v850e ());
1694 else if (TARGET_PROLOG_FUNCTION
&& ! TARGET_LONG_CALLS
)
1695 emit_insn (gen_save_r6_r9 ());
1699 for (i
= 6; i
< 10; i
++)
1701 emit_move_insn (gen_rtx_MEM (SImode
,
1702 plus_constant (stack_pointer_rtx
,
1704 gen_rtx_REG (SImode
, i
));
1710 /* Identify all of the saved registers. */
1713 for (i
= 1; i
< 31; i
++)
1715 if (((1L << i
) & reg_saved
) != 0)
1716 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1719 /* If the return pointer is saved, the helper functions also allocate
1720 16 bytes of stack for arguments to be saved in. */
1721 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1723 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, LINK_POINTER_REGNUM
);
1727 /* See if we have an insn that allocates stack space and saves the particular
1728 registers we want to. */
1729 save_all
= NULL_RTX
;
1730 if (TARGET_PROLOG_FUNCTION
&& num_save
> 0 && actual_fsize
>= default_stack
)
1732 int alloc_stack
= (4 * num_save
) + default_stack
;
1733 int unalloc_stack
= actual_fsize
- alloc_stack
;
1734 int save_func_len
= 4;
1735 int save_normal_len
;
1738 save_func_len
+= CONST_OK_FOR_J (unalloc_stack
) ? 2 : 4;
1740 /* see if we would have used ep to save the stack */
1741 if (TARGET_EP
&& num_save
> 3 && (unsigned)actual_fsize
< 255)
1742 save_normal_len
= (3 * 2) + (2 * num_save
);
1744 save_normal_len
= 4 * num_save
;
1746 save_normal_len
+= CONST_OK_FOR_J (actual_fsize
) ? 2 : 4;
1748 /* Don't bother checking if we don't actually save any space.
1749 This happens for instance if one register is saved and additional
1750 stack space is allocated. */
1751 if (save_func_len
< save_normal_len
)
1753 save_all
= gen_rtx_PARALLEL
1755 rtvec_alloc (num_save
+ 1
1756 + (TARGET_V850
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1758 XVECEXP (save_all
, 0, 0)
1759 = gen_rtx_SET (VOIDmode
,
1761 plus_constant (stack_pointer_rtx
, -alloc_stack
));
1763 offset
= - default_stack
;
1764 for (i
= 0; i
< num_save
; i
++)
1766 XVECEXP (save_all
, 0, i
+1)
1767 = gen_rtx_SET (VOIDmode
,
1769 plus_constant (stack_pointer_rtx
,
1777 XVECEXP (save_all
, 0, num_save
+ 1)
1778 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1780 if (TARGET_LONG_CALLS
)
1781 XVECEXP (save_all
, 0, num_save
+ 2)
1782 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1785 code
= recog (save_all
, NULL_RTX
, NULL
);
1788 rtx insn
= emit_insn (save_all
);
1789 INSN_CODE (insn
) = code
;
1790 actual_fsize
-= alloc_stack
;
1794 Saved %d bytes via prologue function (%d vs. %d) for function %s\n",
1795 save_normal_len
- save_func_len
,
1796 save_normal_len
, save_func_len
,
1797 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)));
1800 save_all
= NULL_RTX
;
1804 /* If no prolog save function is available, store the registers the old
1805 fashioned way (one by one). */
1808 /* Special case interrupt functions that save all registers for a call. */
1809 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1811 if (TARGET_V850E
&& ! TARGET_DISABLE_CALLT
)
1812 emit_insn (gen_callt_save_all_interrupt ());
1814 emit_insn (gen_save_all_interrupt ());
1818 /* If the stack is too big, allocate it in chunks so we can do the
1819 register saves. We use the register save size so we use the ep
1821 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1822 init_stack_alloc
= compute_register_save_size (NULL
);
1824 init_stack_alloc
= actual_fsize
;
1826 /* Save registers at the beginning of the stack frame. */
1827 offset
= init_stack_alloc
- 4;
1829 if (init_stack_alloc
)
1830 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1832 GEN_INT (-init_stack_alloc
)));
1834 /* Save the return pointer first. */
1835 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1837 emit_move_insn (gen_rtx_MEM (SImode
,
1838 plus_constant (stack_pointer_rtx
,
1840 save_regs
[--num_save
]);
1844 for (i
= 0; i
< num_save
; i
++)
1846 emit_move_insn (gen_rtx_MEM (SImode
,
1847 plus_constant (stack_pointer_rtx
,
1855 /* Allocate the rest of the stack that was not allocated above (either it is
1856 > 32K or we just called a function to save the registers and needed more
1858 if (actual_fsize
> init_stack_alloc
)
1860 int diff
= actual_fsize
- init_stack_alloc
;
1861 if (CONST_OK_FOR_K (diff
))
1862 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1867 rtx reg
= gen_rtx_REG (Pmode
, 12);
1868 emit_move_insn (reg
, GEN_INT (-diff
));
1869 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, reg
));
1873 /* If we need a frame pointer, set it up now. */
1874 if (frame_pointer_needed
)
1875 emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1880 expand_epilogue (void)
1884 unsigned int size
= get_frame_size ();
1886 unsigned int actual_fsize
= compute_frame_size (size
, ®_saved
);
1887 unsigned int init_stack_free
= 0;
1888 rtx restore_regs
[32];
1890 unsigned int num_restore
;
1891 unsigned int default_stack
;
1893 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1895 /* Eliminate the initial stack stored by interrupt functions. */
1896 if (interrupt_handler
)
1898 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1899 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1900 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1903 /* Cut off any dynamic stack created. */
1904 if (frame_pointer_needed
)
1905 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1907 /* Identify all of the saved registers. */
1910 for (i
= 1; i
< 31; i
++)
1912 if (((1L << i
) & reg_saved
) != 0)
1913 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1916 /* If the return pointer is saved, the helper functions also allocate
1917 16 bytes of stack for arguments to be saved in. */
1918 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1920 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, LINK_POINTER_REGNUM
);
1924 /* See if we have an insn that restores the particular registers we
1926 restore_all
= NULL_RTX
;
1928 if (TARGET_PROLOG_FUNCTION
1930 && actual_fsize
>= default_stack
1931 && !interrupt_handler
)
1933 int alloc_stack
= (4 * num_restore
) + default_stack
;
1934 int unalloc_stack
= actual_fsize
- alloc_stack
;
1935 int restore_func_len
= 4;
1936 int restore_normal_len
;
1939 restore_func_len
+= CONST_OK_FOR_J (unalloc_stack
) ? 2 : 4;
1941 /* See if we would have used ep to restore the registers. */
1942 if (TARGET_EP
&& num_restore
> 3 && (unsigned)actual_fsize
< 255)
1943 restore_normal_len
= (3 * 2) + (2 * num_restore
);
1945 restore_normal_len
= 4 * num_restore
;
1947 restore_normal_len
+= (CONST_OK_FOR_J (actual_fsize
) ? 2 : 4) + 2;
1949 /* Don't bother checking if we don't actually save any space. */
1950 if (restore_func_len
< restore_normal_len
)
1952 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1953 rtvec_alloc (num_restore
+ 2));
1954 XVECEXP (restore_all
, 0, 0) = gen_rtx_RETURN (VOIDmode
);
1955 XVECEXP (restore_all
, 0, 1)
1956 = gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1957 gen_rtx_PLUS (Pmode
,
1959 GEN_INT (alloc_stack
)));
1961 offset
= alloc_stack
- 4;
1962 for (i
= 0; i
< num_restore
; i
++)
1964 XVECEXP (restore_all
, 0, i
+2)
1965 = gen_rtx_SET (VOIDmode
,
1968 plus_constant (stack_pointer_rtx
,
1973 code
= recog (restore_all
, NULL_RTX
, NULL
);
1979 actual_fsize
-= alloc_stack
;
1982 if (CONST_OK_FOR_K (actual_fsize
))
1983 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1985 GEN_INT (actual_fsize
)));
1988 rtx reg
= gen_rtx_REG (Pmode
, 12);
1989 emit_move_insn (reg
, GEN_INT (actual_fsize
));
1990 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1996 insn
= emit_jump_insn (restore_all
);
1997 INSN_CODE (insn
) = code
;
2001 Saved %d bytes via epilogue function (%d vs. %d) in function %s\n",
2002 restore_normal_len
- restore_func_len
,
2003 restore_normal_len
, restore_func_len
,
2004 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)));
2007 restore_all
= NULL_RTX
;
2011 /* If no epilog save function is available, restore the registers the
2012 old fashioned way (one by one). */
2015 /* If the stack is large, we need to cut it down in 2 pieces. */
2016 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
2017 init_stack_free
= 4 * num_restore
;
2019 init_stack_free
= actual_fsize
;
2021 /* Deallocate the rest of the stack if it is > 32K. */
2022 if (actual_fsize
> init_stack_free
)
2026 diff
= actual_fsize
- ((interrupt_handler
) ? 0 : init_stack_free
);
2028 if (CONST_OK_FOR_K (diff
))
2029 emit_insn (gen_addsi3 (stack_pointer_rtx
,
2034 rtx reg
= gen_rtx_REG (Pmode
, 12);
2035 emit_move_insn (reg
, GEN_INT (diff
));
2036 emit_insn (gen_addsi3 (stack_pointer_rtx
,
2042 /* Special case interrupt functions that save all registers
2044 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
2046 if (TARGET_V850E
&& ! TARGET_DISABLE_CALLT
)
2047 emit_insn (gen_callt_restore_all_interrupt ());
2049 emit_insn (gen_restore_all_interrupt ());
2053 /* Restore registers from the beginning of the stack frame. */
2054 offset
= init_stack_free
- 4;
2056 /* Restore the return pointer first. */
2058 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
2060 emit_move_insn (restore_regs
[--num_restore
],
2061 gen_rtx_MEM (SImode
,
2062 plus_constant (stack_pointer_rtx
,
2067 for (i
= 0; i
< num_restore
; i
++)
2069 emit_move_insn (restore_regs
[i
],
2070 gen_rtx_MEM (SImode
,
2071 plus_constant (stack_pointer_rtx
,
2074 emit_insn (gen_rtx_USE (VOIDmode
, restore_regs
[i
]));
2078 /* Cut back the remainder of the stack. */
2079 if (init_stack_free
)
2080 emit_insn (gen_addsi3 (stack_pointer_rtx
,
2082 GEN_INT (init_stack_free
)));
2085 /* And return or use reti for interrupt handlers. */
2086 if (interrupt_handler
)
2088 if (TARGET_V850E
&& ! TARGET_DISABLE_CALLT
)
2089 emit_insn (gen_callt_return_interrupt ());
2091 emit_jump_insn (gen_return_interrupt ());
2093 else if (actual_fsize
)
2094 emit_jump_insn (gen_return_internal ());
2096 emit_jump_insn (gen_return ());
2099 v850_interrupt_cache_p
= FALSE
;
2100 v850_interrupt_p
= FALSE
;
2104 /* Update the condition code from the insn. */
2107 notice_update_cc (rtx body
, rtx insn
)
2109 switch (get_attr_cc (insn
))
2112 /* Insn does not affect CC at all. */
2116 /* Insn does not change CC, but the 0'th operand has been changed. */
2117 if (cc_status
.value1
!= 0
2118 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
2119 cc_status
.value1
= 0;
2123 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2124 V,C is in an unusable state. */
2126 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
2127 cc_status
.value1
= recog_data
.operand
[0];
2131 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2132 C is in an unusable state. */
2134 cc_status
.flags
|= CC_NO_CARRY
;
2135 cc_status
.value1
= recog_data
.operand
[0];
2139 /* The insn is a compare instruction. */
2141 cc_status
.value1
= SET_SRC (body
);
2145 /* Insn doesn't leave CC in a usable state. */
2151 /* Retrieve the data area that has been chosen for the given decl. */
2154 v850_get_data_area (tree decl
)
2156 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2157 return DATA_AREA_SDA
;
2159 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2160 return DATA_AREA_TDA
;
2162 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2163 return DATA_AREA_ZDA
;
2165 return DATA_AREA_NORMAL
;
2168 /* Store the indicated data area in the decl's attributes. */
2171 v850_set_data_area (tree decl
, v850_data_area data_area
)
2177 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2178 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2179 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2184 DECL_ATTRIBUTES (decl
) = tree_cons
2185 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2188 const struct attribute_spec v850_attribute_table
[] =
2190 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2191 { "interrupt_handler", 0, 0, true, false, false, v850_handle_interrupt_attribute
},
2192 { "interrupt", 0, 0, true, false, false, v850_handle_interrupt_attribute
},
2193 { "sda", 0, 0, true, false, false, v850_handle_data_area_attribute
},
2194 { "tda", 0, 0, true, false, false, v850_handle_data_area_attribute
},
2195 { "zda", 0, 0, true, false, false, v850_handle_data_area_attribute
},
2196 { NULL
, 0, 0, false, false, false, NULL
}
2199 /* Handle an "interrupt" attribute; arguments as in
2200 struct attribute_spec.handler. */
2202 v850_handle_interrupt_attribute (tree
* node
,
2204 tree args ATTRIBUTE_UNUSED
,
2205 int flags ATTRIBUTE_UNUSED
,
2206 bool * no_add_attrs
)
2208 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2210 warning ("%qs attribute only applies to functions",
2211 IDENTIFIER_POINTER (name
));
2212 *no_add_attrs
= true;
2218 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2219 struct attribute_spec.handler. */
2221 v850_handle_data_area_attribute (tree
* node
,
2223 tree args ATTRIBUTE_UNUSED
,
2224 int flags ATTRIBUTE_UNUSED
,
2225 bool * no_add_attrs
)
2227 v850_data_area data_area
;
2228 v850_data_area area
;
2231 /* Implement data area attribute. */
2232 if (is_attribute_p ("sda", name
))
2233 data_area
= DATA_AREA_SDA
;
2234 else if (is_attribute_p ("tda", name
))
2235 data_area
= DATA_AREA_TDA
;
2236 else if (is_attribute_p ("zda", name
))
2237 data_area
= DATA_AREA_ZDA
;
2241 switch (TREE_CODE (decl
))
2244 if (current_function_decl
!= NULL_TREE
)
2246 error ("%Jdata area attributes cannot be specified for "
2247 "local variables", decl
, decl
);
2248 *no_add_attrs
= true;
2254 area
= v850_get_data_area (decl
);
2255 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2257 error ("%Jdata area of '%D' conflicts with previous declaration",
2259 *no_add_attrs
= true;
2271 /* Return nonzero if FUNC is an interrupt function as specified
2272 by the "interrupt" attribute. */
2275 v850_interrupt_function_p (tree func
)
2280 if (v850_interrupt_cache_p
)
2281 return v850_interrupt_p
;
2283 if (TREE_CODE (func
) != FUNCTION_DECL
)
2286 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2292 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2293 ret
= a
!= NULL_TREE
;
2296 /* Its not safe to trust global variables until after function inlining has
2298 if (reload_completed
| reload_in_progress
)
2299 v850_interrupt_p
= ret
;
2306 v850_encode_data_area (tree decl
, rtx symbol
)
2310 /* Map explicit sections into the appropriate attribute */
2311 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2313 if (DECL_SECTION_NAME (decl
))
2315 const char *name
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
2317 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2318 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2320 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2321 v850_set_data_area (decl
, DATA_AREA_SDA
);
2323 else if (streq (name
, ".tdata"))
2324 v850_set_data_area (decl
, DATA_AREA_TDA
);
2327 /* If no attribute, support -m{zda,sda,tda}=n */
2330 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2334 else if (size
<= small_memory
[(int) SMALL_MEMORY_TDA
].max
)
2335 v850_set_data_area (decl
, DATA_AREA_TDA
);
2337 else if (size
<= small_memory
[(int) SMALL_MEMORY_SDA
].max
)
2338 v850_set_data_area (decl
, DATA_AREA_SDA
);
2340 else if (size
<= small_memory
[(int) SMALL_MEMORY_ZDA
].max
)
2341 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2344 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2348 flags
= SYMBOL_REF_FLAGS (symbol
);
2349 switch (v850_get_data_area (decl
))
2351 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2352 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2353 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2356 SYMBOL_REF_FLAGS (symbol
) = flags
;
2360 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2362 default_encode_section_info (decl
, rtl
, first
);
2364 if (TREE_CODE (decl
) == VAR_DECL
2365 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2366 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2369 /* Return true if the given RTX is a register which can be restored
2370 by a function epilogue. */
2372 register_is_ok_for_epilogue (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2374 /* The save/restore routines can only cope with registers 20 - 31. */
2375 return ((GET_CODE (op
) == REG
)
2376 && (((REGNO (op
) >= 20) && REGNO (op
) <= 31)));
2379 /* Return nonzero if the given RTX is suitable for collapsing into
2380 jump to a function epilogue. */
2382 pattern_is_ok_for_epilogue (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2384 int count
= XVECLEN (op
, 0);
2387 /* If there are no registers to restore then the function epilogue
2392 /* The pattern matching has already established that we are performing a
2393 function epilogue and that we are popping at least one register. We must
2394 now check the remaining entries in the vector to make sure that they are
2395 also register pops. There is no good reason why there should ever be
2396 anything else in this vector, but being paranoid always helps...
2398 The test below performs the C equivalent of this machine description
2401 (set (match_operand:SI n "register_is_ok_for_epilogue" "r")
2402 (mem:SI (plus:SI (reg:SI 3) (match_operand:SI n "immediate_operand" "i"))))
2405 for (i
= 3; i
< count
; i
++)
2407 rtx vector_element
= XVECEXP (op
, 0, i
);
2412 if (GET_CODE (vector_element
) != SET
)
2415 dest
= SET_DEST (vector_element
);
2416 src
= SET_SRC (vector_element
);
2418 if (GET_CODE (dest
) != REG
2419 || GET_MODE (dest
) != SImode
2420 || ! register_is_ok_for_epilogue (dest
, SImode
)
2421 || GET_CODE (src
) != MEM
2422 || GET_MODE (src
) != SImode
)
2425 plus
= XEXP (src
, 0);
2427 if (GET_CODE (plus
) != PLUS
2428 || GET_CODE (XEXP (plus
, 0)) != REG
2429 || GET_MODE (XEXP (plus
, 0)) != SImode
2430 || REGNO (XEXP (plus
, 0)) != STACK_POINTER_REGNUM
2431 || GET_CODE (XEXP (plus
, 1)) != CONST_INT
)
2438 /* Construct a JR instruction to a routine that will perform the equivalent of
2439 the RTL passed in as an argument. This RTL is a function epilogue that
2440 pops registers off the stack and possibly releases some extra stack space
2441 as well. The code has already verified that the RTL matches these
2444 construct_restore_jr (rtx op
)
2446 int count
= XVECLEN (op
, 0);
2448 unsigned long int mask
;
2449 unsigned long int first
;
2450 unsigned long int last
;
2452 static char buff
[100]; /* XXX */
2456 error ("bogus JR construction: %d\n", count
);
2460 /* Work out how many bytes to pop off the stack before retrieving
2462 if (GET_CODE (XVECEXP (op
, 0, 1)) != SET
)
2464 if (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) != PLUS
)
2466 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) != CONST_INT
)
2469 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2471 /* Each pop will remove 4 bytes from the stack.... */
2472 stack_bytes
-= (count
- 2) * 4;
2474 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2475 if (stack_bytes
!= 0 && stack_bytes
!= 16)
2477 error ("bad amount of stack space removal: %d", stack_bytes
);
2481 /* Now compute the bit mask of registers to push. */
2483 for (i
= 2; i
< count
; i
++)
2485 rtx vector_element
= XVECEXP (op
, 0, i
);
2487 if (GET_CODE (vector_element
) != SET
)
2489 if (GET_CODE (SET_DEST (vector_element
)) != REG
)
2491 if (! register_is_ok_for_epilogue (SET_DEST (vector_element
), SImode
))
2494 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2497 /* Scan for the first register to pop. */
2498 for (first
= 0; first
< 32; first
++)
2500 if (mask
& (1 << first
))
2507 /* Discover the last register to pop. */
2508 if (mask
& (1 << LINK_POINTER_REGNUM
))
2510 if (stack_bytes
!= 16)
2513 last
= LINK_POINTER_REGNUM
;
2517 if (stack_bytes
!= 0)
2520 if ((mask
& (1 << 29)) == 0)
2526 /* Note, it is possible to have gaps in the register mask.
2527 We ignore this here, and generate a JR anyway. We will
2528 be popping more registers than is strictly necessary, but
2529 it does save code space. */
2531 if (TARGET_LONG_CALLS
)
2536 sprintf (name
, "__return_%s", reg_names
[first
]);
2538 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2540 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2546 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2548 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2555 /* Return nonzero if the given RTX is suitable for collapsing into
2556 a jump to a function prologue. */
2558 pattern_is_ok_for_prologue (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2560 int count
= XVECLEN (op
, 0);
2564 /* If there are no registers to save then the function prologue
2569 /* The pattern matching has already established that we are adjusting the
2570 stack and pushing at least one register. We must now check that the
2571 remaining entries in the vector to make sure that they are also register
2572 pushes, except for the last entry which should be a CLOBBER of r10.
2574 The test below performs the C equivalent of this machine description
2577 (set (mem:SI (plus:SI (reg:SI 3)
2578 (match_operand:SI 2 "immediate_operand" "i")))
2579 (match_operand:SI 3 "register_is_ok_for_epilogue" "r"))
2583 for (i
= 2; i
< count
- (TARGET_LONG_CALLS
? 2: 1); i
++)
2589 vector_element
= XVECEXP (op
, 0, i
);
2591 if (GET_CODE (vector_element
) != SET
)
2594 dest
= SET_DEST (vector_element
);
2595 src
= SET_SRC (vector_element
);
2597 if (GET_CODE (dest
) != MEM
2598 || GET_MODE (dest
) != SImode
2599 || GET_CODE (src
) != REG
2600 || GET_MODE (src
) != SImode
2601 || ! register_is_ok_for_epilogue (src
, SImode
))
2604 plus
= XEXP (dest
, 0);
2606 if ( GET_CODE (plus
) != PLUS
2607 || GET_CODE (XEXP (plus
, 0)) != REG
2608 || GET_MODE (XEXP (plus
, 0)) != SImode
2609 || REGNO (XEXP (plus
, 0)) != STACK_POINTER_REGNUM
2610 || GET_CODE (XEXP (plus
, 1)) != CONST_INT
)
2613 /* If the register is being pushed somewhere other than the stack
2614 space just acquired by the first operand then abandon this quest.
2615 Note: the test is <= because both values are negative. */
2616 if (INTVAL (XEXP (plus
, 1))
2617 <= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)))
2623 /* Make sure that the last entries in the vector are clobbers. */
2624 for (; i
< count
; i
++)
2626 vector_element
= XVECEXP (op
, 0, i
);
2628 if (GET_CODE (vector_element
) != CLOBBER
2629 || GET_CODE (XEXP (vector_element
, 0)) != REG
2630 || !(REGNO (XEXP (vector_element
, 0)) == 10
2631 || (TARGET_LONG_CALLS
? (REGNO (XEXP (vector_element
, 0)) == 11) : 0 )))
2638 /* Construct a JARL instruction to a routine that will perform the equivalent
2639 of the RTL passed as a parameter. This RTL is a function prologue that
2640 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2641 some stack space as well. The code has already verified that the RTL
2642 matches these requirements. */
2644 construct_save_jarl (rtx op
)
2646 int count
= XVECLEN (op
, 0);
2648 unsigned long int mask
;
2649 unsigned long int first
;
2650 unsigned long int last
;
2652 static char buff
[100]; /* XXX */
2656 error ("bogus JARL construction: %d\n", count
);
2661 if (GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
2663 if (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != PLUS
)
2665 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) != REG
)
2667 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) != CONST_INT
)
2670 /* Work out how many bytes to push onto the stack after storing the
2672 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2674 /* Each push will put 4 bytes from the stack.... */
2675 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2677 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2678 if (stack_bytes
!= 0 && stack_bytes
!= -16)
2680 error ("bad amount of stack space removal: %d", stack_bytes
);
2684 /* Now compute the bit mask of registers to push. */
2686 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2688 rtx vector_element
= XVECEXP (op
, 0, i
);
2690 if (GET_CODE (vector_element
) != SET
)
2692 if (GET_CODE (SET_SRC (vector_element
)) != REG
)
2694 if (! register_is_ok_for_epilogue (SET_SRC (vector_element
), SImode
))
2697 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2700 /* Scan for the first register to push. */
2701 for (first
= 0; first
< 32; first
++)
2703 if (mask
& (1 << first
))
2710 /* Discover the last register to push. */
2711 if (mask
& (1 << LINK_POINTER_REGNUM
))
2713 if (stack_bytes
!= -16)
2716 last
= LINK_POINTER_REGNUM
;
2720 if (stack_bytes
!= 0)
2722 if ((mask
& (1 << 29)) == 0)
2728 /* Note, it is possible to have gaps in the register mask.
2729 We ignore this here, and generate a JARL anyway. We will
2730 be pushing more registers than is strictly necessary, but
2731 it does save code space. */
2733 if (TARGET_LONG_CALLS
)
2738 sprintf (name
, "__save_%s", reg_names
[first
]);
2740 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2742 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2748 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2750 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2757 extern tree last_assemble_variable_decl
;
2758 extern int size_directive_output
;
2760 /* A version of asm_output_aligned_bss() that copes with the special
2761 data areas of the v850. */
2763 v850_output_aligned_bss (FILE * file
,
2769 switch (v850_get_data_area (decl
))
2787 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2788 #ifdef ASM_DECLARE_OBJECT_NAME
2789 last_assemble_variable_decl
= decl
;
2790 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2792 /* Standard thing is just output label for the object. */
2793 ASM_OUTPUT_LABEL (file
, name
);
2794 #endif /* ASM_DECLARE_OBJECT_NAME */
2795 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2798 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2800 v850_output_common (FILE * file
,
2806 if (decl
== NULL_TREE
)
2808 fprintf (file
, "%s", COMMON_ASM_OP
);
2812 switch (v850_get_data_area (decl
))
2815 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2819 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2823 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2827 fprintf (file
, "%s", COMMON_ASM_OP
);
2832 assemble_name (file
, name
);
2833 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2836 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2838 v850_output_local (FILE * file
,
2844 fprintf (file
, "%s", LOCAL_ASM_OP
);
2845 assemble_name (file
, name
);
2846 fprintf (file
, "\n");
2848 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2851 /* Add data area to the given declaration if a ghs data area pragma is
2852 currently in effect (#pragma ghs startXXX/endXXX). */
2854 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2857 && data_area_stack
->data_area
2858 && current_function_decl
== NULL_TREE
2859 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2860 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2861 v850_set_data_area (decl
, data_area_stack
->data_area
);
2863 /* Initialize the default names of the v850 specific sections,
2864 if this has not been done before. */
2866 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2868 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2869 = build_string (sizeof (".sdata")-1, ".sdata");
2871 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2872 = build_string (sizeof (".rosdata")-1, ".rosdata");
2874 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2875 = build_string (sizeof (".tdata")-1, ".tdata");
2877 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2878 = build_string (sizeof (".zdata")-1, ".zdata");
2880 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2881 = build_string (sizeof (".rozdata")-1, ".rozdata");
2884 if (current_function_decl
== NULL_TREE
2885 && (TREE_CODE (decl
) == VAR_DECL
2886 || TREE_CODE (decl
) == CONST_DECL
2887 || TREE_CODE (decl
) == FUNCTION_DECL
)
2888 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2889 && !DECL_SECTION_NAME (decl
))
2891 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2892 tree chosen_section
;
2894 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2895 kind
= GHS_SECTION_KIND_TEXT
;
2898 /* First choose a section kind based on the data area of the decl. */
2899 switch (v850_get_data_area (decl
))
2905 kind
= ((TREE_READONLY (decl
))
2906 ? GHS_SECTION_KIND_ROSDATA
2907 : GHS_SECTION_KIND_SDATA
);
2911 kind
= GHS_SECTION_KIND_TDATA
;
2915 kind
= ((TREE_READONLY (decl
))
2916 ? GHS_SECTION_KIND_ROZDATA
2917 : GHS_SECTION_KIND_ZDATA
);
2920 case DATA_AREA_NORMAL
: /* default data area */
2921 if (TREE_READONLY (decl
))
2922 kind
= GHS_SECTION_KIND_RODATA
;
2923 else if (DECL_INITIAL (decl
))
2924 kind
= GHS_SECTION_KIND_DATA
;
2926 kind
= GHS_SECTION_KIND_BSS
;
2930 /* Now, if the section kind has been explicitly renamed,
2931 then attach a section attribute. */
2932 chosen_section
= GHS_current_section_names
[(int) kind
];
2934 /* Otherwise, if this kind of section needs an explicit section
2935 attribute, then also attach one. */
2936 if (chosen_section
== NULL
)
2937 chosen_section
= GHS_default_section_names
[(int) kind
];
2941 /* Only set the section name if specified by a pragma, because
2942 otherwise it will force those variables to get allocated storage
2943 in this module, rather than by the linker. */
2944 DECL_SECTION_NAME (decl
) = chosen_section
;
2949 /* Return nonzero if the given RTX is suitable
2950 for collapsing into a DISPOSE instruction. */
2953 pattern_is_ok_for_dispose (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2955 int count
= XVECLEN (op
, 0);
2958 /* If there are no registers to restore then
2959 the dispose instruction is not suitable. */
2963 /* The pattern matching has already established that we are performing a
2964 function epilogue and that we are popping at least one register. We must
2965 now check the remaining entries in the vector to make sure that they are
2966 also register pops. There is no good reason why there should ever be
2967 anything else in this vector, but being paranoid always helps...
2969 The test below performs the C equivalent of this machine description
2972 (set (match_operand:SI n "register_is_ok_for_epilogue" "r")
2973 (mem:SI (plus:SI (reg:SI 3)
2974 (match_operand:SI n "immediate_operand" "i"))))
2977 for (i
= 3; i
< count
; i
++)
2979 rtx vector_element
= XVECEXP (op
, 0, i
);
2984 if (GET_CODE (vector_element
) != SET
)
2987 dest
= SET_DEST (vector_element
);
2988 src
= SET_SRC (vector_element
);
2990 if ( GET_CODE (dest
) != REG
2991 || GET_MODE (dest
) != SImode
2992 || ! register_is_ok_for_epilogue (dest
, SImode
)
2993 || GET_CODE (src
) != MEM
2994 || GET_MODE (src
) != SImode
)
2997 plus
= XEXP (src
, 0);
2999 if ( GET_CODE (plus
) != PLUS
3000 || GET_CODE (XEXP (plus
, 0)) != REG
3001 || GET_MODE (XEXP (plus
, 0)) != SImode
3002 || REGNO (XEXP (plus
, 0)) != STACK_POINTER_REGNUM
3003 || GET_CODE (XEXP (plus
, 1)) != CONST_INT
)
3010 /* Construct a DISPOSE instruction that is the equivalent of
3011 the given RTX. We have already verified that this should
3015 construct_dispose_instruction (rtx op
)
3017 int count
= XVECLEN (op
, 0);
3019 unsigned long int mask
;
3021 static char buff
[ 100 ]; /* XXX */
3026 error ("Bogus DISPOSE construction: %d\n", count
);
3030 /* Work out how many bytes to pop off the
3031 stack before retrieving registers. */
3032 if (GET_CODE (XVECEXP (op
, 0, 1)) != SET
)
3034 if (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) != PLUS
)
3036 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) != CONST_INT
)
3039 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
3041 /* Each pop will remove 4 bytes from the stack.... */
3042 stack_bytes
-= (count
- 2) * 4;
3044 /* Make sure that the amount we are popping
3045 will fit into the DISPOSE instruction. */
3046 if (stack_bytes
> 128)
3048 error ("Too much stack space to dispose of: %d", stack_bytes
);
3052 /* Now compute the bit mask of registers to push. */
3055 for (i
= 2; i
< count
; i
++)
3057 rtx vector_element
= XVECEXP (op
, 0, i
);
3059 if (GET_CODE (vector_element
) != SET
)
3061 if (GET_CODE (SET_DEST (vector_element
)) != REG
)
3063 if (! register_is_ok_for_epilogue (SET_DEST (vector_element
), SImode
))
3066 if (REGNO (SET_DEST (vector_element
)) == 2)
3069 mask
|= 1 << REGNO (SET_DEST (vector_element
));
3072 if (! TARGET_DISABLE_CALLT
3073 && (use_callt
|| stack_bytes
== 0 || stack_bytes
== 16))
3077 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
3082 for (i
= 20; i
< 32; i
++)
3083 if (mask
& (1 << i
))
3087 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
3089 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%d%s)",
3090 i
, (mask
& (1 << 31)) ? 31 : 29, stack_bytes
? "c" : "");
3095 static char regs
[100]; /* XXX */
3098 /* Generate the DISPOSE instruction. Note we could just issue the
3099 bit mask as a number as the assembler can cope with this, but for
3100 the sake of our readers we turn it into a textual description. */
3104 for (i
= 20; i
< 32; i
++)
3106 if (mask
& (1 << i
))
3111 strcat (regs
, ", ");
3116 strcat (regs
, reg_names
[ first
]);
3118 for (i
++; i
< 32; i
++)
3119 if ((mask
& (1 << i
)) == 0)
3124 strcat (regs
, " - ");
3125 strcat (regs
, reg_names
[ i
- 1 ] );
3130 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
3136 /* Return nonzero if the given RTX is suitable
3137 for collapsing into a PREPARE instruction. */
3140 pattern_is_ok_for_prepare (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
3142 int count
= XVECLEN (op
, 0);
3145 /* If there are no registers to restore then the prepare instruction
3150 /* The pattern matching has already established that we are adjusting the
3151 stack and pushing at least one register. We must now check that the
3152 remaining entries in the vector to make sure that they are also register
3155 The test below performs the C equivalent of this machine description
3158 (set (mem:SI (plus:SI (reg:SI 3)
3159 (match_operand:SI 2 "immediate_operand" "i")))
3160 (match_operand:SI 3 "register_is_ok_for_epilogue" "r"))
3164 for (i
= 2; i
< count
; i
++)
3166 rtx vector_element
= XVECEXP (op
, 0, i
);
3171 if (GET_CODE (vector_element
) != SET
)
3174 dest
= SET_DEST (vector_element
);
3175 src
= SET_SRC (vector_element
);
3177 if ( GET_CODE (dest
) != MEM
3178 || GET_MODE (dest
) != SImode
3179 || GET_CODE (src
) != REG
3180 || GET_MODE (src
) != SImode
3181 || ! register_is_ok_for_epilogue (src
, SImode
)
3185 plus
= XEXP (dest
, 0);
3187 if ( GET_CODE (plus
) != PLUS
3188 || GET_CODE (XEXP (plus
, 0)) != REG
3189 || GET_MODE (XEXP (plus
, 0)) != SImode
3190 || REGNO (XEXP (plus
, 0)) != STACK_POINTER_REGNUM
3191 || GET_CODE (XEXP (plus
, 1)) != CONST_INT
)
3194 /* If the register is being pushed somewhere other than the stack
3195 space just acquired by the first operand then abandon this quest.
3196 Note: the test is <= because both values are negative. */
3197 if (INTVAL (XEXP (plus
, 1))
3198 <= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)))
3205 /* Construct a PREPARE instruction that is the equivalent of
3206 the given RTL. We have already verified that this should
3210 construct_prepare_instruction (rtx op
)
3212 int count
= XVECLEN (op
, 0);
3214 unsigned long int mask
;
3216 static char buff
[ 100 ]; /* XXX */
3221 error ("Bogus PREPEARE construction: %d\n", count
);
3225 /* Work out how many bytes to push onto
3226 the stack after storing the registers. */
3227 if (GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
3229 if (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != PLUS
)
3231 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) != CONST_INT
)
3234 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
3236 /* Each push will put 4 bytes from the stack. */
3237 stack_bytes
+= (count
- 1) * 4;
3239 /* Make sure that the amount we are popping
3240 will fit into the DISPOSE instruction. */
3241 if (stack_bytes
< -128)
3243 error ("Too much stack space to prepare: %d", stack_bytes
);
3247 /* Now compute the bit mask of registers to push. */
3249 for (i
= 1; i
< count
; i
++)
3251 rtx vector_element
= XVECEXP (op
, 0, i
);
3253 if (GET_CODE (vector_element
) != SET
)
3255 if (GET_CODE (SET_SRC (vector_element
)) != REG
)
3257 if (! register_is_ok_for_epilogue (SET_SRC (vector_element
), SImode
))
3260 if (REGNO (SET_SRC (vector_element
)) == 2)
3263 mask
|= 1 << REGNO (SET_SRC (vector_element
));
3266 if ((! TARGET_DISABLE_CALLT
)
3267 && (use_callt
|| stack_bytes
== 0 || stack_bytes
== -16))
3271 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
3275 for (i
= 20; i
< 32; i
++)
3276 if (mask
& (1 << i
))
3280 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
3282 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%d%s)",
3283 i
, (mask
& (1 << 31)) ? 31 : 29, stack_bytes
? "c" : "");
3287 static char regs
[100]; /* XXX */
3291 /* Generate the PREPARE instruction. Note we could just issue the
3292 bit mask as a number as the assembler can cope with this, but for
3293 the sake of our readers we turn it into a textual description. */
3297 for (i
= 20; i
< 32; i
++)
3299 if (mask
& (1 << i
))
3304 strcat (regs
, ", ");
3309 strcat (regs
, reg_names
[ first
]);
3311 for (i
++; i
< 32; i
++)
3312 if ((mask
& (1 << i
)) == 0)
3317 strcat (regs
, " - ");
3318 strcat (regs
, reg_names
[ i
- 1 ] );
3323 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
3329 /* Return an RTX indicating where the return address to the
3330 calling function can be found. */
3333 v850_return_addr (int count
)
3338 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
3342 v850_select_section (tree exp
,
3343 int reloc ATTRIBUTE_UNUSED
,
3344 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
3346 if (TREE_CODE (exp
) == VAR_DECL
)
3349 if (!TREE_READONLY (exp
)
3350 || TREE_SIDE_EFFECTS (exp
)
3351 || !DECL_INITIAL (exp
)
3352 || (DECL_INITIAL (exp
) != error_mark_node
3353 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
3358 switch (v850_get_data_area (exp
))
3380 readonly_data_section ();
3387 readonly_data_section ();
3390 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3393 v850_return_in_memory (tree type
, tree fntype ATTRIBUTE_UNUSED
)
3395 /* Return values > 8 bytes in length in memory. */
3396 return int_size_in_bytes (type
) > 8 || TYPE_MODE (type
) == BLKmode
;
3399 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
3402 v850_setup_incoming_varargs (CUMULATIVE_ARGS
*ca
,
3403 enum machine_mode mode ATTRIBUTE_UNUSED
,
3404 tree type ATTRIBUTE_UNUSED
,
3405 int *pretend_arg_size ATTRIBUTE_UNUSED
,
3406 int second_time ATTRIBUTE_UNUSED
)
3408 ca
->anonymous_args
= (!TARGET_GHS
? 1 : 0);