1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "stringpool.h"
29 #include "stor-layout.h"
34 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "conditions.h"
38 #include "insn-attr.h"
48 #include "diagnostic-core.h"
51 #include "dominance.h"
57 #include "cfgcleanup.h"
59 #include "basic-block.h"
64 #include "target-def.h"
67 #define streq(a,b) (strcmp (a, b) == 0)
70 static void v850_print_operand_address (FILE *, rtx
);
72 /* Names of the various data areas used on the v850. */
73 const char * GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
74 const char * GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
76 /* Track the current data area set by the data area pragma (which
77 can be nested). Tested by check_default_data_area. */
78 data_area_stack_element
* data_area_stack
= NULL
;
80 /* True if we don't need to check any more if the current
81 function is an interrupt handler. */
82 static int v850_interrupt_cache_p
= FALSE
;
84 rtx v850_compare_op0
, v850_compare_op1
;
86 /* Whether current function is an interrupt handler. */
87 static int v850_interrupt_p
= FALSE
;
89 static GTY(()) section
* rosdata_section
;
90 static GTY(()) section
* rozdata_section
;
91 static GTY(()) section
* tdata_section
;
92 static GTY(()) section
* zdata_section
;
93 static GTY(()) section
* zbss_section
;
95 /* We use this to wrap all emitted insns in the prologue. */
99 if (GET_CODE (x
) != CLOBBER
)
100 RTX_FRAME_RELATED_P (x
) = 1;
104 /* Mark all the subexpressions of the PARALLEL rtx PAR as
105 frame-related. Return PAR.
107 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
108 PARALLEL rtx other than the first if they do not have the
109 FRAME_RELATED flag set on them. */
112 v850_all_frame_related (rtx par
)
114 int len
= XVECLEN (par
, 0);
117 gcc_assert (GET_CODE (par
) == PARALLEL
);
118 for (i
= 0; i
< len
; i
++)
119 F (XVECEXP (par
, 0, i
));
124 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
125 Specify whether to pass the argument by reference. */
128 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
129 machine_mode mode
, const_tree type
,
130 bool named ATTRIBUTE_UNUSED
)
132 unsigned HOST_WIDE_INT size
;
138 size
= int_size_in_bytes (type
);
140 size
= GET_MODE_SIZE (mode
);
145 /* Return an RTX to represent where an argument with mode MODE
146 and type TYPE will be passed to a function. If the result
147 is NULL_RTX, the argument will be pushed. */
150 v850_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
151 const_tree type
, bool named
)
153 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
154 rtx result
= NULL_RTX
;
161 size
= int_size_in_bytes (type
);
163 size
= GET_MODE_SIZE (mode
);
165 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
169 /* Once we have stopped using argument registers, do not start up again. */
170 cum
->nbytes
= 4 * UNITS_PER_WORD
;
175 align
= UNITS_PER_WORD
;
176 else if (size
<= UNITS_PER_WORD
&& type
)
177 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
181 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
183 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
186 if (type
== NULL_TREE
187 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
190 switch (cum
->nbytes
/ UNITS_PER_WORD
)
193 result
= gen_rtx_REG (mode
, 6);
196 result
= gen_rtx_REG (mode
, 7);
199 result
= gen_rtx_REG (mode
, 8);
202 result
= gen_rtx_REG (mode
, 9);
211 /* Return the number of bytes which must be put into registers
212 for values which are part in registers and part in memory. */
214 v850_arg_partial_bytes (cumulative_args_t cum_v
, machine_mode mode
,
215 tree type
, bool named
)
217 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
224 size
= int_size_in_bytes (type
);
226 size
= GET_MODE_SIZE (mode
);
232 align
= UNITS_PER_WORD
;
234 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
238 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
240 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
243 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
246 if (type
== NULL_TREE
247 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
250 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
253 /* Update the data in CUM to advance over an argument
254 of mode MODE and data type TYPE.
255 (TYPE is null for libcalls where that information may not be available.) */
258 v850_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
259 const_tree type
, bool named ATTRIBUTE_UNUSED
)
261 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
264 cum
->nbytes
+= (((mode
!= BLKmode
265 ? GET_MODE_SIZE (mode
)
266 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1)
269 cum
->nbytes
+= (((type
&& int_size_in_bytes (type
) > 8
270 ? GET_MODE_SIZE (Pmode
)
272 ? GET_MODE_SIZE (mode
)
273 : int_size_in_bytes (type
))) + UNITS_PER_WORD
- 1)
277 /* Return the high and low words of a CONST_DOUBLE */
280 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
282 if (GET_CODE (x
) == CONST_DOUBLE
)
287 switch (GET_MODE (x
))
290 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
291 REAL_VALUE_TO_TARGET_DOUBLE (rv
, t
);
292 *p_high
= t
[1]; /* since v850 is little endian */
293 *p_low
= t
[0]; /* high is second word */
297 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
298 REAL_VALUE_TO_TARGET_SINGLE (rv
, *p_high
);
304 *p_high
= CONST_DOUBLE_HIGH (x
);
305 *p_low
= CONST_DOUBLE_LOW (x
);
313 fatal_insn ("const_double_split got a bad insn:", x
);
317 /* Return the cost of the rtx R with code CODE. */
320 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
322 if (CONST_OK_FOR_I (value
))
324 else if (CONST_OK_FOR_J (value
))
326 else if (CONST_OK_FOR_K (value
))
333 const_costs (rtx r
, enum rtx_code c
)
335 HOST_WIDE_INT high
, low
;
340 return const_costs_int (INTVAL (r
), 0);
343 const_double_split (r
, &high
, &low
);
344 if (GET_MODE (r
) == SFmode
)
345 return const_costs_int (high
, 1);
347 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
363 v850_rtx_costs (rtx x
,
365 int outer_code ATTRIBUTE_UNUSED
,
366 int opno ATTRIBUTE_UNUSED
,
367 int * total
, bool speed
)
369 enum rtx_code code
= (enum rtx_code
) codearg
;
378 *total
= COSTS_N_INSNS (const_costs (x
, code
));
385 if (TARGET_V850E
&& !speed
)
393 && ( GET_MODE (x
) == SImode
394 || GET_MODE (x
) == HImode
395 || GET_MODE (x
) == QImode
))
397 if (GET_CODE (XEXP (x
, 1)) == REG
)
399 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
401 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
403 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
412 if (outer_code
== COMPARE
)
421 /* Print operand X using operand code CODE to assembly language output file
425 v850_print_operand (FILE * file
, rtx x
, int code
)
427 HOST_WIDE_INT high
, low
;
432 /* We use 'c' operands with symbols for .vtinherit. */
433 if (GET_CODE (x
) == SYMBOL_REF
)
435 output_addr_const(file
, x
);
442 switch ((code
== 'B' || code
== 'C')
443 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
446 if (code
== 'c' || code
== 'C')
447 fprintf (file
, "nz");
449 fprintf (file
, "ne");
452 if (code
== 'c' || code
== 'C')
458 fprintf (file
, "ge");
461 fprintf (file
, "gt");
464 fprintf (file
, "le");
467 fprintf (file
, "lt");
470 fprintf (file
, "nl");
476 fprintf (file
, "nh");
485 case 'F': /* High word of CONST_DOUBLE. */
486 switch (GET_CODE (x
))
489 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
493 const_double_split (x
, &high
, &low
);
494 fprintf (file
, "%ld", (long) high
);
501 case 'G': /* Low word of CONST_DOUBLE. */
502 switch (GET_CODE (x
))
505 fprintf (file
, "%ld", (long) INTVAL (x
));
509 const_double_split (x
, &high
, &low
);
510 fprintf (file
, "%ld", (long) low
);
518 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
521 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
524 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
526 if (GET_CODE (x
) == CONST
)
527 x
= XEXP (XEXP (x
, 0), 0);
529 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
531 if (SYMBOL_REF_ZDA_P (x
))
532 fprintf (file
, "zdaoff");
533 else if (SYMBOL_REF_SDA_P (x
))
534 fprintf (file
, "sdaoff");
535 else if (SYMBOL_REF_TDA_P (x
))
536 fprintf (file
, "tdaoff");
541 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
542 output_addr_const (file
, x
);
545 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
547 if (GET_CODE (x
) == CONST
)
548 x
= XEXP (XEXP (x
, 0), 0);
550 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
552 if (SYMBOL_REF_ZDA_P (x
))
553 fprintf (file
, "r0");
554 else if (SYMBOL_REF_SDA_P (x
))
555 fprintf (file
, "gp");
556 else if (SYMBOL_REF_TDA_P (x
))
557 fprintf (file
, "ep");
561 case 'R': /* 2nd word of a double. */
562 switch (GET_CODE (x
))
565 fprintf (file
, reg_names
[REGNO (x
) + 1]);
568 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
569 v850_print_operand_address (file
, x
);
570 if (GET_CODE (x
) == CONST_INT
)
571 fprintf (file
, "[r0]");
576 unsigned HOST_WIDE_INT v
= INTVAL (x
);
578 /* Trickery to avoid problems with shifting
579 32-bits at a time on a 32-bit host. */
582 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, v
);
587 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_HIGH (x
));
597 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
598 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
605 /* Like an 'S' operand above, but for unsigned loads only. */
606 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
611 case 'W': /* Print the instruction suffix. */
612 switch (GET_MODE (x
))
617 case QImode
: fputs (".b", file
); break;
618 case HImode
: fputs (".h", file
); break;
619 case SImode
: fputs (".w", file
); break;
620 case SFmode
: fputs (".w", file
); break;
623 case '.': /* Register r0. */
624 fputs (reg_names
[0], file
);
626 case 'z': /* Reg or zero. */
628 fputs (reg_names
[REGNO (x
)], file
);
629 else if ((GET_MODE(x
) == SImode
630 || GET_MODE(x
) == DFmode
631 || GET_MODE(x
) == SFmode
)
632 && x
== CONST0_RTX(GET_MODE(x
)))
633 fputs (reg_names
[0], file
);
636 gcc_assert (x
== const0_rtx
);
637 fputs (reg_names
[0], file
);
641 switch (GET_CODE (x
))
644 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
645 output_address (gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
648 output_address (XEXP (x
, 0));
652 fputs (reg_names
[REGNO (x
)], file
);
655 fputs (reg_names
[subreg_regno (x
)], file
);
658 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_LOW (x
));
666 v850_print_operand_address (file
, x
);
677 /* Output assembly language output for the address ADDR to FILE. */
680 v850_print_operand_address (FILE * file
, rtx addr
)
682 switch (GET_CODE (addr
))
685 fprintf (file
, "0[");
686 v850_print_operand (file
, addr
, 0);
690 if (GET_CODE (XEXP (addr
, 0)) == REG
)
693 fprintf (file
, "lo(");
694 v850_print_operand (file
, XEXP (addr
, 1), 0);
695 fprintf (file
, ")[");
696 v850_print_operand (file
, XEXP (addr
, 0), 0);
701 if (GET_CODE (XEXP (addr
, 0)) == REG
702 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
705 v850_print_operand (file
, XEXP (addr
, 1), 0);
707 v850_print_operand (file
, XEXP (addr
, 0), 0);
712 v850_print_operand (file
, XEXP (addr
, 0), 0);
714 v850_print_operand (file
, XEXP (addr
, 1), 0);
719 const char *off_name
= NULL
;
720 const char *reg_name
= NULL
;
722 if (SYMBOL_REF_ZDA_P (addr
))
727 else if (SYMBOL_REF_SDA_P (addr
))
732 else if (SYMBOL_REF_TDA_P (addr
))
739 fprintf (file
, "%s(", off_name
);
740 output_addr_const (file
, addr
);
742 fprintf (file
, ")[%s]", reg_name
);
746 if (special_symbolref_operand (addr
, VOIDmode
))
748 rtx x
= XEXP (XEXP (addr
, 0), 0);
749 const char *off_name
;
750 const char *reg_name
;
752 if (SYMBOL_REF_ZDA_P (x
))
757 else if (SYMBOL_REF_SDA_P (x
))
762 else if (SYMBOL_REF_TDA_P (x
))
770 fprintf (file
, "%s(", off_name
);
771 output_addr_const (file
, addr
);
772 fprintf (file
, ")[%s]", reg_name
);
775 output_addr_const (file
, addr
);
778 output_addr_const (file
, addr
);
784 v850_print_operand_punct_valid_p (unsigned char code
)
789 /* When assemble_integer is used to emit the offsets for a switch
790 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
791 output_addr_const will normally barf at this, but it is OK to omit
792 the truncate and just emit the difference of the two labels. The
793 .hword directive will automatically handle the truncation for us.
795 Returns true if rtx was handled, false otherwise. */
798 v850_output_addr_const_extra (FILE * file
, rtx x
)
800 if (GET_CODE (x
) != TRUNCATE
)
805 /* We must also handle the case where the switch table was passed a
806 constant value and so has been collapsed. In this case the first
807 label will have been deleted. In such a case it is OK to emit
808 nothing, since the table will not be used.
809 (cf gcc.c-torture/compile/990801-1.c). */
810 if (GET_CODE (x
) == MINUS
811 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
)
813 rtx_code_label
*label
814 = dyn_cast
<rtx_code_label
*> (XEXP (XEXP (x
, 0), 0));
815 if (label
&& label
->deleted ())
819 output_addr_const (file
, x
);
823 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
827 output_move_single (rtx
* operands
)
829 rtx dst
= operands
[0];
830 rtx src
= operands
[1];
837 else if (GET_CODE (src
) == CONST_INT
)
839 HOST_WIDE_INT value
= INTVAL (src
);
841 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
844 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
845 return "movea %1,%.,%0";
847 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
848 return "movhi hi0(%1),%.,%0";
850 /* A random constant. */
851 else if (TARGET_V850E_UP
)
854 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
857 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
859 HOST_WIDE_INT high
, low
;
861 const_double_split (src
, &high
, &low
);
863 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
866 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
867 return "movea %F1,%.,%0";
869 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
870 return "movhi hi0(%F1),%.,%0";
872 /* A random constant. */
873 else if (TARGET_V850E_UP
)
877 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
880 else if (GET_CODE (src
) == MEM
)
881 return "%S1ld%W1 %1,%0";
883 else if (special_symbolref_operand (src
, VOIDmode
))
884 return "movea %O1(%P1),%Q1,%0";
886 else if (GET_CODE (src
) == LABEL_REF
887 || GET_CODE (src
) == SYMBOL_REF
888 || GET_CODE (src
) == CONST
)
891 return "mov hilo(%1),%0";
893 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
896 else if (GET_CODE (src
) == HIGH
)
897 return "movhi hi(%1),%.,%0";
899 else if (GET_CODE (src
) == LO_SUM
)
901 operands
[2] = XEXP (src
, 0);
902 operands
[3] = XEXP (src
, 1);
903 return "movea lo(%3),%2,%0";
907 else if (GET_CODE (dst
) == MEM
)
910 return "%S0st%W0 %1,%0";
912 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
913 return "%S0st%W0 %.,%0";
915 else if (GET_CODE (src
) == CONST_DOUBLE
916 && CONST0_RTX (GET_MODE (dst
)) == src
)
917 return "%S0st%W0 %.,%0";
920 fatal_insn ("output_move_single:", gen_rtx_SET (dst
, src
));
925 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1 ATTRIBUTE_UNUSED
)
927 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
932 return CC_FPU_LEmode
;
934 return CC_FPU_GEmode
;
936 return CC_FPU_LTmode
;
938 return CC_FPU_GTmode
;
940 return CC_FPU_EQmode
;
942 return CC_FPU_NEmode
;
951 v850_gen_float_compare (enum rtx_code cond
, machine_mode mode ATTRIBUTE_UNUSED
, rtx op0
, rtx op1
)
953 if (GET_MODE (op0
) == DFmode
)
958 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
961 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
964 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
967 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
970 /* Note: There is no NE comparison operator. So we
971 perform an EQ comparison and invert the branch.
972 See v850_float_nz_comparison for how this is done. */
974 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
980 else if (GET_MODE (v850_compare_op0
) == SFmode
)
985 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
988 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
991 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
994 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
997 /* Note: There is no NE comparison operator. So we
998 perform an EQ comparison and invert the branch.
999 See v850_float_nz_comparison for how this is done. */
1001 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
1010 return v850_select_cc_mode (cond
, op0
, op1
);
1014 v850_gen_compare (enum rtx_code cond
, machine_mode mode
, rtx op0
, rtx op1
)
1016 if (GET_MODE_CLASS(GET_MODE (op0
)) != MODE_FLOAT
)
1018 emit_insn (gen_cmpsi_insn (op0
, op1
));
1019 return gen_rtx_fmt_ee (cond
, mode
, gen_rtx_REG(CCmode
, CC_REGNUM
), const0_rtx
);
1024 mode
= v850_gen_float_compare (cond
, mode
, op0
, op1
);
1025 cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
1026 emit_insn (gen_rtx_SET (cc_reg
, gen_rtx_REG (mode
, FCC_REGNUM
)));
1028 return gen_rtx_fmt_ee (cond
, mode
, cc_reg
, const0_rtx
);
1032 /* Return maximum offset supported for a short EP memory reference of mode
1033 MODE and signedness UNSIGNEDP. */
1036 ep_memory_offset (machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
1043 if (TARGET_SMALL_SLD
)
1044 max_offset
= (1 << 4);
1045 else if ((TARGET_V850E_UP
)
1047 max_offset
= (1 << 4);
1049 max_offset
= (1 << 7);
1053 if (TARGET_SMALL_SLD
)
1054 max_offset
= (1 << 5);
1055 else if ((TARGET_V850E_UP
)
1057 max_offset
= (1 << 5);
1059 max_offset
= (1 << 8);
1064 max_offset
= (1 << 8);
1074 /* Return true if OP is a valid short EP memory reference */
1077 ep_memory_operand (rtx op
, machine_mode mode
, int unsigned_load
)
1083 /* If we are not using the EP register on a per-function basis
1084 then do not allow this optimization at all. This is to
1085 prevent the use of the SLD/SST instructions which cannot be
1086 guaranteed to work properly due to a hardware bug. */
1090 if (GET_CODE (op
) != MEM
)
1093 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1095 mask
= GET_MODE_SIZE (mode
) - 1;
1097 addr
= XEXP (op
, 0);
1098 if (GET_CODE (addr
) == CONST
)
1099 addr
= XEXP (addr
, 0);
1101 switch (GET_CODE (addr
))
1107 return SYMBOL_REF_TDA_P (addr
);
1110 return REGNO (addr
) == EP_REGNUM
;
1113 op0
= XEXP (addr
, 0);
1114 op1
= XEXP (addr
, 1);
1115 if (GET_CODE (op1
) == CONST_INT
1116 && INTVAL (op1
) < max_offset
1117 && INTVAL (op1
) >= 0
1118 && (INTVAL (op1
) & mask
) == 0)
1120 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1123 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1132 /* Substitute memory references involving a pointer, to use the ep pointer,
1133 taking care to save and preserve the ep. */
1136 substitute_ep_register (rtx_insn
*first_insn
,
1137 rtx_insn
*last_insn
,
1143 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1148 df_set_regs_ever_live (1, true);
1149 *p_r1
= gen_rtx_REG (Pmode
, 1);
1150 *p_ep
= gen_rtx_REG (Pmode
, 30);
1155 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1156 2 * (uses
- 3), uses
, reg_names
[regno
],
1157 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1158 INSN_UID (first_insn
), INSN_UID (last_insn
));
1160 if (NOTE_P (first_insn
))
1161 first_insn
= next_nonnote_insn (first_insn
);
1163 last_insn
= next_nonnote_insn (last_insn
);
1164 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1166 if (NONJUMP_INSN_P (insn
))
1168 rtx pattern
= single_set (insn
);
1170 /* Replace the memory references. */
1174 /* Memory operands are signed by default. */
1175 int unsignedp
= FALSE
;
1177 if (GET_CODE (SET_DEST (pattern
)) == MEM
1178 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1181 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1182 p_mem
= &SET_DEST (pattern
);
1184 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1185 p_mem
= &SET_SRC (pattern
);
1187 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1188 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1189 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1191 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1192 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1194 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1202 rtx addr
= XEXP (*p_mem
, 0);
1204 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1205 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1207 else if (GET_CODE (addr
) == PLUS
1208 && GET_CODE (XEXP (addr
, 0)) == REG
1209 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1210 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1211 && ((INTVAL (XEXP (addr
, 1)))
1212 < ep_memory_offset (GET_MODE (*p_mem
),
1214 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1215 *p_mem
= change_address (*p_mem
, VOIDmode
,
1216 gen_rtx_PLUS (Pmode
,
1224 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1225 insn
= prev_nonnote_insn (first_insn
);
1226 if (insn
&& NONJUMP_INSN_P (insn
)
1227 && GET_CODE (PATTERN (insn
)) == SET
1228 && SET_DEST (PATTERN (insn
)) == *p_ep
1229 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1232 emit_insn_before (gen_rtx_SET (*p_r1
, *p_ep
), first_insn
);
1234 emit_insn_before (gen_rtx_SET (*p_ep
, reg
), first_insn
);
1235 emit_insn_before (gen_rtx_SET (*p_ep
, *p_r1
), last_insn
);
1239 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1240 the -mep mode to copy heavily used pointers to ep to use the implicit
1249 rtx_insn
*first_insn
;
1250 rtx_insn
*last_insn
;
1252 regs
[FIRST_PSEUDO_REGISTER
];
1261 /* If not ep mode, just return now. */
1265 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1268 regs
[i
].first_insn
= NULL
;
1269 regs
[i
].last_insn
= NULL
;
1272 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1274 switch (GET_CODE (insn
))
1276 /* End of basic block */
1283 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1285 if (max_uses
< regs
[i
].uses
)
1287 max_uses
= regs
[i
].uses
;
1293 substitute_ep_register (regs
[max_regno
].first_insn
,
1294 regs
[max_regno
].last_insn
,
1295 max_uses
, max_regno
, &r1
, &ep
);
1299 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1302 regs
[i
].first_insn
= NULL
;
1303 regs
[i
].last_insn
= NULL
;
1311 pattern
= single_set (insn
);
1313 /* See if there are any memory references we can shorten. */
1316 rtx src
= SET_SRC (pattern
);
1317 rtx dest
= SET_DEST (pattern
);
1319 /* Memory operands are signed by default. */
1320 int unsignedp
= FALSE
;
1322 /* We might have (SUBREG (MEM)) here, so just get rid of the
1323 subregs to make this code simpler. */
1324 if (GET_CODE (dest
) == SUBREG
1325 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1326 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1327 alter_subreg (&dest
, false);
1328 if (GET_CODE (src
) == SUBREG
1329 && (GET_CODE (SUBREG_REG (src
)) == MEM
1330 || GET_CODE (SUBREG_REG (src
)) == REG
))
1331 alter_subreg (&src
, false);
1333 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1336 else if (GET_CODE (dest
) == MEM
)
1339 else if (GET_CODE (src
) == MEM
)
1342 else if (GET_CODE (src
) == SIGN_EXTEND
1343 && GET_CODE (XEXP (src
, 0)) == MEM
)
1344 mem
= XEXP (src
, 0);
1346 else if (GET_CODE (src
) == ZERO_EXTEND
1347 && GET_CODE (XEXP (src
, 0)) == MEM
)
1349 mem
= XEXP (src
, 0);
1355 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1358 else if (!use_ep
&& mem
1359 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1361 rtx addr
= XEXP (mem
, 0);
1365 if (GET_CODE (addr
) == REG
)
1368 regno
= REGNO (addr
);
1371 else if (GET_CODE (addr
) == PLUS
1372 && GET_CODE (XEXP (addr
, 0)) == REG
1373 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1374 && ((INTVAL (XEXP (addr
, 1)))
1375 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1376 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1379 regno
= REGNO (XEXP (addr
, 0));
1388 regs
[regno
].last_insn
= insn
;
1389 if (!regs
[regno
].first_insn
)
1390 regs
[regno
].first_insn
= insn
;
1394 /* Loading up a register in the basic block zaps any savings
1396 if (GET_CODE (dest
) == REG
)
1398 machine_mode mode
= GET_MODE (dest
);
1402 regno
= REGNO (dest
);
1403 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1407 /* See if we can use the pointer before this
1412 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1414 if (max_uses
< regs
[i
].uses
)
1416 max_uses
= regs
[i
].uses
;
1422 && max_regno
>= regno
1423 && max_regno
< endregno
)
1425 substitute_ep_register (regs
[max_regno
].first_insn
,
1426 regs
[max_regno
].last_insn
,
1427 max_uses
, max_regno
, &r1
,
1430 /* Since we made a substitution, zap all remembered
1432 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1435 regs
[i
].first_insn
= NULL
;
1436 regs
[i
].last_insn
= NULL
;
1441 for (i
= regno
; i
< endregno
; i
++)
1444 regs
[i
].first_insn
= NULL
;
1445 regs
[i
].last_insn
= NULL
;
1453 /* # of registers saved by the interrupt handler. */
1454 #define INTERRUPT_FIXED_NUM 5
1456 /* # of bytes for registers saved by the interrupt handler. */
1457 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1459 /* # of words saved for other registers. */
1460 #define INTERRUPT_ALL_SAVE_NUM \
1461 (30 - INTERRUPT_FIXED_NUM)
1463 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1466 compute_register_save_size (long * p_reg_saved
)
1470 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1471 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1474 /* Count space for the register saves. */
1475 if (interrupt_handler
)
1477 for (i
= 0; i
<= 31; i
++)
1481 if (df_regs_ever_live_p (i
) || call_p
)
1484 reg_saved
|= 1L << i
;
1488 /* We don't save/restore r0 or the stack pointer */
1490 case STACK_POINTER_REGNUM
:
1493 /* For registers with fixed use, we save them, set them to the
1494 appropriate value, and then restore them.
1495 These registers are handled specially, so don't list them
1496 on the list of registers to save in the prologue. */
1497 case 1: /* temp used to hold ep */
1499 case 10: /* temp used to call interrupt save/restore */
1500 case 11: /* temp used to call interrupt save/restore (long call) */
1501 case EP_REGNUM
: /* ep */
1508 /* Find the first register that needs to be saved. */
1509 for (i
= 0; i
<= 31; i
++)
1510 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1511 || i
== LINK_POINTER_REGNUM
))
1514 /* If it is possible that an out-of-line helper function might be
1515 used to generate the prologue for the current function, then we
1516 need to cover the possibility that such a helper function will
1517 be used, despite the fact that there might be gaps in the list of
1518 registers that need to be saved. To detect this we note that the
1519 helper functions always push at least register r29 (provided
1520 that the function is not an interrupt handler). */
1522 if (TARGET_PROLOG_FUNCTION
1523 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1528 reg_saved
|= 1L << i
;
1533 /* Helper functions save all registers between the starting
1534 register and the last register, regardless of whether they
1535 are actually used by the function or not. */
1536 for (; i
<= 29; i
++)
1539 reg_saved
|= 1L << i
;
1542 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1545 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1550 for (; i
<= 31; i
++)
1551 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1552 || i
== LINK_POINTER_REGNUM
))
1555 reg_saved
|= 1L << i
;
1561 *p_reg_saved
= reg_saved
;
1566 /* Typical stack layout should looks like this after the function's prologue:
1571 | | arguments saved | Increasing
1572 | | on the stack | addresses
1573 PARENT arg pointer -> | | /
1574 -------------------------- ---- -------------------
1575 | | - space for argument split between regs & stack
1577 CHILD | | \ <-- (return address here)
1582 frame pointer -> | | \ ___
1589 | | arguments | | Decreasing
1590 (hard) frame pointer | | / | | addresses
1591 and stack pointer -> | | / _|_ |
1592 -------------------------- ---- ------------------ V */
1595 compute_frame_size (int size
, long * p_reg_saved
)
1598 + compute_register_save_size (p_reg_saved
)
1599 + crtl
->outgoing_args_size
);
1603 use_prolog_function (int num_save
, int frame_size
)
1605 int alloc_stack
= (4 * num_save
);
1606 int unalloc_stack
= frame_size
- alloc_stack
;
1607 int save_func_len
, restore_func_len
;
1608 int save_normal_len
, restore_normal_len
;
1610 if (! TARGET_DISABLE_CALLT
)
1611 save_func_len
= restore_func_len
= 2;
1613 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1617 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1618 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1621 /* See if we would have used ep to save the stack. */
1622 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1623 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1625 save_normal_len
= restore_normal_len
= 4 * num_save
;
1627 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1628 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1630 /* Don't bother checking if we don't actually save any space.
1631 This happens for instance if one register is saved and additional
1632 stack space is allocated. */
1633 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1637 increment_stack (signed int amount
, bool in_prologue
)
1644 inc
= GEN_INT (amount
);
1646 if (! CONST_OK_FOR_K (amount
))
1648 rtx reg
= gen_rtx_REG (Pmode
, 12);
1650 inc
= emit_move_insn (reg
, inc
);
1656 inc
= emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, inc
));
1662 expand_prologue (void)
1665 unsigned int size
= get_frame_size ();
1666 unsigned int actual_fsize
;
1667 unsigned int init_stack_alloc
= 0;
1670 unsigned int num_save
;
1672 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1675 actual_fsize
= compute_frame_size (size
, ®_saved
);
1677 if (flag_stack_usage_info
)
1678 current_function_static_stack_size
= actual_fsize
;
1680 /* Save/setup global registers for interrupt functions right now. */
1681 if (interrupt_handler
)
1683 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1684 emit_insn (gen_callt_save_interrupt ());
1686 emit_insn (gen_save_interrupt ());
1688 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1690 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1691 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1693 /* Interrupt functions are not passed arguments, so no need to
1694 allocate space for split structure arguments. */
1695 gcc_assert (crtl
->args
.pretend_args_size
== 0);
1698 /* Identify all of the saved registers. */
1700 for (i
= 1; i
< 32; i
++)
1702 if (((1L << i
) & reg_saved
) != 0)
1703 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1706 if (crtl
->args
.pretend_args_size
)
1710 increment_stack (- (actual_fsize
+ crtl
->args
.pretend_args_size
), true);
1714 increment_stack (- crtl
->args
.pretend_args_size
, true);
1717 /* See if we have an insn that allocates stack space and saves the particular
1718 registers we want to. Note that the helpers won't
1719 allocate additional space for registers GCC saves to complete a
1720 "split" structure argument. */
1721 save_all
= NULL_RTX
;
1722 if (TARGET_PROLOG_FUNCTION
1723 && !crtl
->args
.pretend_args_size
1726 if (use_prolog_function (num_save
, actual_fsize
))
1728 int alloc_stack
= 4 * num_save
;
1731 save_all
= gen_rtx_PARALLEL
1733 rtvec_alloc (num_save
+ 1
1734 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1736 XVECEXP (save_all
, 0, 0)
1737 = gen_rtx_SET (stack_pointer_rtx
,
1738 gen_rtx_PLUS (Pmode
,
1740 GEN_INT(-alloc_stack
)));
1741 for (i
= 0; i
< num_save
; i
++)
1744 XVECEXP (save_all
, 0, i
+1)
1745 = gen_rtx_SET (gen_rtx_MEM (Pmode
,
1746 gen_rtx_PLUS (Pmode
,
1752 if (TARGET_DISABLE_CALLT
)
1754 XVECEXP (save_all
, 0, num_save
+ 1)
1755 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1757 if (TARGET_LONG_CALLS
)
1758 XVECEXP (save_all
, 0, num_save
+ 2)
1759 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1762 v850_all_frame_related (save_all
);
1764 code
= recog (save_all
, NULL_RTX
, NULL
);
1767 rtx insn
= emit_insn (save_all
);
1768 INSN_CODE (insn
) = code
;
1769 actual_fsize
-= alloc_stack
;
1773 save_all
= NULL_RTX
;
1777 /* If no prolog save function is available, store the registers the old
1778 fashioned way (one by one). */
1781 /* Special case interrupt functions that save all registers for a call. */
1782 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1784 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1785 emit_insn (gen_callt_save_all_interrupt ());
1787 emit_insn (gen_save_all_interrupt ());
1792 /* If the stack is too big, allocate it in chunks so we can do the
1793 register saves. We use the register save size so we use the ep
1795 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1796 init_stack_alloc
= compute_register_save_size (NULL
);
1798 init_stack_alloc
= actual_fsize
;
1800 /* Save registers at the beginning of the stack frame. */
1801 offset
= init_stack_alloc
- 4;
1803 if (init_stack_alloc
)
1804 increment_stack (- (signed) init_stack_alloc
, true);
1806 /* Save the return pointer first. */
1807 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1809 F (emit_move_insn (gen_rtx_MEM (SImode
,
1810 plus_constant (Pmode
,
1813 save_regs
[--num_save
]));
1817 for (i
= 0; i
< num_save
; i
++)
1819 F (emit_move_insn (gen_rtx_MEM (SImode
,
1820 plus_constant (Pmode
,
1829 /* Allocate the rest of the stack that was not allocated above (either it is
1830 > 32K or we just called a function to save the registers and needed more
1832 if (actual_fsize
> init_stack_alloc
)
1833 increment_stack (init_stack_alloc
- actual_fsize
, true);
1835 /* If we need a frame pointer, set it up now. */
1836 if (frame_pointer_needed
)
1837 F (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
1842 expand_epilogue (void)
1845 unsigned int size
= get_frame_size ();
1847 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1848 rtx restore_regs
[32];
1850 unsigned int num_restore
;
1852 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1854 /* Eliminate the initial stack stored by interrupt functions. */
1855 if (interrupt_handler
)
1857 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1858 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1859 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1862 /* Cut off any dynamic stack created. */
1863 if (frame_pointer_needed
)
1864 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1866 /* Identify all of the saved registers. */
1868 for (i
= 1; i
< 32; i
++)
1870 if (((1L << i
) & reg_saved
) != 0)
1871 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1874 /* See if we have an insn that restores the particular registers we
1876 restore_all
= NULL_RTX
;
1878 if (TARGET_PROLOG_FUNCTION
1880 && !crtl
->args
.pretend_args_size
1881 && !interrupt_handler
)
1883 int alloc_stack
= (4 * num_restore
);
1885 /* Don't bother checking if we don't actually save any space. */
1886 if (use_prolog_function (num_restore
, actual_fsize
))
1889 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1890 rtvec_alloc (num_restore
+ 2));
1891 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1892 XVECEXP (restore_all
, 0, 1)
1893 = gen_rtx_SET (stack_pointer_rtx
,
1894 gen_rtx_PLUS (Pmode
,
1896 GEN_INT (alloc_stack
)));
1898 offset
= alloc_stack
- 4;
1899 for (i
= 0; i
< num_restore
; i
++)
1901 XVECEXP (restore_all
, 0, i
+2)
1902 = gen_rtx_SET (restore_regs
[i
],
1904 gen_rtx_PLUS (Pmode
,
1910 code
= recog (restore_all
, NULL_RTX
, NULL
);
1916 actual_fsize
-= alloc_stack
;
1917 increment_stack (actual_fsize
, false);
1919 insn
= emit_jump_insn (restore_all
);
1920 INSN_CODE (insn
) = code
;
1923 restore_all
= NULL_RTX
;
1927 /* If no epilogue save function is available, restore the registers the
1928 old fashioned way (one by one). */
1931 unsigned int init_stack_free
;
1933 /* If the stack is large, we need to cut it down in 2 pieces. */
1934 if (interrupt_handler
)
1935 init_stack_free
= 0;
1936 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1937 init_stack_free
= 4 * num_restore
;
1939 init_stack_free
= (signed) actual_fsize
;
1941 /* Deallocate the rest of the stack if it is > 32K. */
1942 if ((unsigned int) actual_fsize
> init_stack_free
)
1943 increment_stack (actual_fsize
- init_stack_free
, false);
1945 /* Special case interrupt functions that save all registers
1947 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1949 if (! TARGET_DISABLE_CALLT
)
1950 emit_insn (gen_callt_restore_all_interrupt ());
1952 emit_insn (gen_restore_all_interrupt ());
1956 /* Restore registers from the beginning of the stack frame. */
1957 int offset
= init_stack_free
- 4;
1959 /* Restore the return pointer first. */
1961 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
1963 emit_move_insn (restore_regs
[--num_restore
],
1964 gen_rtx_MEM (SImode
,
1965 plus_constant (Pmode
,
1971 for (i
= 0; i
< num_restore
; i
++)
1973 emit_move_insn (restore_regs
[i
],
1974 gen_rtx_MEM (SImode
,
1975 plus_constant (Pmode
,
1979 emit_use (restore_regs
[i
]);
1983 /* Cut back the remainder of the stack. */
1984 increment_stack (init_stack_free
+ crtl
->args
.pretend_args_size
,
1988 /* And return or use reti for interrupt handlers. */
1989 if (interrupt_handler
)
1991 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1992 emit_insn (gen_callt_return_interrupt ());
1994 emit_jump_insn (gen_return_interrupt ());
1996 else if (actual_fsize
)
1997 emit_jump_insn (gen_return_internal ());
1999 emit_jump_insn (gen_return_simple ());
2002 v850_interrupt_cache_p
= FALSE
;
2003 v850_interrupt_p
= FALSE
;
2006 /* Update the condition code from the insn. */
2008 notice_update_cc (rtx body
, rtx_insn
*insn
)
2010 switch (get_attr_cc (insn
))
2013 /* Insn does not affect CC at all. */
2017 /* Insn does not change CC, but the 0'th operand has been changed. */
2018 if (cc_status
.value1
!= 0
2019 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
2020 cc_status
.value1
= 0;
2024 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2025 V,C is in an unusable state. */
2027 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
2028 cc_status
.value1
= recog_data
.operand
[0];
2032 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2033 C is in an unusable state. */
2035 cc_status
.flags
|= CC_NO_CARRY
;
2036 cc_status
.value1
= recog_data
.operand
[0];
2040 /* The insn is a compare instruction. */
2042 cc_status
.value1
= SET_SRC (body
);
2046 /* Insn doesn't leave CC in a usable state. */
2055 /* Retrieve the data area that has been chosen for the given decl. */
2058 v850_get_data_area (tree decl
)
2060 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2061 return DATA_AREA_SDA
;
2063 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2064 return DATA_AREA_TDA
;
2066 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2067 return DATA_AREA_ZDA
;
2069 return DATA_AREA_NORMAL
;
2072 /* Store the indicated data area in the decl's attributes. */
2075 v850_set_data_area (tree decl
, v850_data_area data_area
)
2081 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2082 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2083 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2088 DECL_ATTRIBUTES (decl
) = tree_cons
2089 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2092 /* Handle an "interrupt" attribute; arguments as in
2093 struct attribute_spec.handler. */
2095 v850_handle_interrupt_attribute (tree
* node
,
2097 tree args ATTRIBUTE_UNUSED
,
2098 int flags ATTRIBUTE_UNUSED
,
2099 bool * no_add_attrs
)
2101 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2103 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2105 *no_add_attrs
= true;
2111 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2112 struct attribute_spec.handler. */
2114 v850_handle_data_area_attribute (tree
* node
,
2116 tree args ATTRIBUTE_UNUSED
,
2117 int flags ATTRIBUTE_UNUSED
,
2118 bool * no_add_attrs
)
2120 v850_data_area data_area
;
2121 v850_data_area area
;
2124 /* Implement data area attribute. */
2125 if (is_attribute_p ("sda", name
))
2126 data_area
= DATA_AREA_SDA
;
2127 else if (is_attribute_p ("tda", name
))
2128 data_area
= DATA_AREA_TDA
;
2129 else if (is_attribute_p ("zda", name
))
2130 data_area
= DATA_AREA_ZDA
;
2134 switch (TREE_CODE (decl
))
2137 if (current_function_decl
!= NULL_TREE
)
2139 error_at (DECL_SOURCE_LOCATION (decl
),
2140 "data area attributes cannot be specified for "
2142 *no_add_attrs
= true;
2148 area
= v850_get_data_area (decl
);
2149 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2151 error ("data area of %q+D conflicts with previous declaration",
2153 *no_add_attrs
= true;
2165 /* Return nonzero if FUNC is an interrupt function as specified
2166 by the "interrupt" attribute. */
2169 v850_interrupt_function_p (tree func
)
2174 if (v850_interrupt_cache_p
)
2175 return v850_interrupt_p
;
2177 if (TREE_CODE (func
) != FUNCTION_DECL
)
2180 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2186 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2187 ret
= a
!= NULL_TREE
;
2190 /* Its not safe to trust global variables until after function inlining has
2192 if (reload_completed
| reload_in_progress
)
2193 v850_interrupt_p
= ret
;
2200 v850_encode_data_area (tree decl
, rtx symbol
)
2204 /* Map explicit sections into the appropriate attribute */
2205 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2207 if (DECL_SECTION_NAME (decl
))
2209 const char *name
= DECL_SECTION_NAME (decl
);
2211 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2212 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2214 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2215 v850_set_data_area (decl
, DATA_AREA_SDA
);
2217 else if (streq (name
, ".tdata"))
2218 v850_set_data_area (decl
, DATA_AREA_TDA
);
2221 /* If no attribute, support -m{zda,sda,tda}=n */
2224 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2228 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2229 v850_set_data_area (decl
, DATA_AREA_TDA
);
2231 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2232 v850_set_data_area (decl
, DATA_AREA_SDA
);
2234 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2235 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2238 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2242 flags
= SYMBOL_REF_FLAGS (symbol
);
2243 switch (v850_get_data_area (decl
))
2245 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2246 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2247 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2248 default: gcc_unreachable ();
2250 SYMBOL_REF_FLAGS (symbol
) = flags
;
2254 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2256 default_encode_section_info (decl
, rtl
, first
);
2258 if (TREE_CODE (decl
) == VAR_DECL
2259 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2260 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2263 /* Construct a JR instruction to a routine that will perform the equivalent of
2264 the RTL passed in as an argument. This RTL is a function epilogue that
2265 pops registers off the stack and possibly releases some extra stack space
2266 as well. The code has already verified that the RTL matches these
2270 construct_restore_jr (rtx op
)
2272 int count
= XVECLEN (op
, 0);
2274 unsigned long int mask
;
2275 unsigned long int first
;
2276 unsigned long int last
;
2278 static char buff
[100]; /* XXX */
2282 error ("bogus JR construction: %d", count
);
2286 /* Work out how many bytes to pop off the stack before retrieving
2288 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2289 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2290 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2292 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2294 /* Each pop will remove 4 bytes from the stack.... */
2295 stack_bytes
-= (count
- 2) * 4;
2297 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2298 if (stack_bytes
!= 0)
2300 error ("bad amount of stack space removal: %d", stack_bytes
);
2304 /* Now compute the bit mask of registers to push. */
2306 for (i
= 2; i
< count
; i
++)
2308 rtx vector_element
= XVECEXP (op
, 0, i
);
2310 gcc_assert (GET_CODE (vector_element
) == SET
);
2311 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2312 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2315 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2318 /* Scan for the first register to pop. */
2319 for (first
= 0; first
< 32; first
++)
2321 if (mask
& (1 << first
))
2325 gcc_assert (first
< 32);
2327 /* Discover the last register to pop. */
2328 if (mask
& (1 << LINK_POINTER_REGNUM
))
2330 last
= LINK_POINTER_REGNUM
;
2334 gcc_assert (!stack_bytes
);
2335 gcc_assert (mask
& (1 << 29));
2340 /* Note, it is possible to have gaps in the register mask.
2341 We ignore this here, and generate a JR anyway. We will
2342 be popping more registers than is strictly necessary, but
2343 it does save code space. */
2345 if (TARGET_LONG_CALLS
)
2350 sprintf (name
, "__return_%s", reg_names
[first
]);
2352 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2354 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2360 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2362 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2369 /* Construct a JARL instruction to a routine that will perform the equivalent
2370 of the RTL passed as a parameter. This RTL is a function prologue that
2371 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2372 some stack space as well. The code has already verified that the RTL
2373 matches these requirements. */
2375 construct_save_jarl (rtx op
)
2377 int count
= XVECLEN (op
, 0);
2379 unsigned long int mask
;
2380 unsigned long int first
;
2381 unsigned long int last
;
2383 static char buff
[100]; /* XXX */
2385 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2387 error ("bogus JARL construction: %d", count
);
2392 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2393 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2394 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2395 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2397 /* Work out how many bytes to push onto the stack after storing the
2399 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2401 /* Each push will put 4 bytes from the stack.... */
2402 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2404 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2405 if (stack_bytes
!= 0)
2407 error ("bad amount of stack space removal: %d", stack_bytes
);
2411 /* Now compute the bit mask of registers to push. */
2413 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2415 rtx vector_element
= XVECEXP (op
, 0, i
);
2417 gcc_assert (GET_CODE (vector_element
) == SET
);
2418 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2419 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2422 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2425 /* Scan for the first register to push. */
2426 for (first
= 0; first
< 32; first
++)
2428 if (mask
& (1 << first
))
2432 gcc_assert (first
< 32);
2434 /* Discover the last register to push. */
2435 if (mask
& (1 << LINK_POINTER_REGNUM
))
2437 last
= LINK_POINTER_REGNUM
;
2441 gcc_assert (!stack_bytes
);
2442 gcc_assert (mask
& (1 << 29));
2447 /* Note, it is possible to have gaps in the register mask.
2448 We ignore this here, and generate a JARL anyway. We will
2449 be pushing more registers than is strictly necessary, but
2450 it does save code space. */
2452 if (TARGET_LONG_CALLS
)
2457 sprintf (name
, "__save_%s", reg_names
[first
]);
2459 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2461 if (TARGET_V850E3V5_UP
)
2462 sprintf (buff
, "mov hilo(%s), r11\n\tjarl [r11], r10", name
);
2464 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2470 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2472 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2479 /* A version of asm_output_aligned_bss() that copes with the special
2480 data areas of the v850. */
2482 v850_output_aligned_bss (FILE * file
,
2485 unsigned HOST_WIDE_INT size
,
2488 switch (v850_get_data_area (decl
))
2491 switch_to_section (zbss_section
);
2495 switch_to_section (sbss_section
);
2499 switch_to_section (tdata_section
);
2502 switch_to_section (bss_section
);
2506 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2507 #ifdef ASM_DECLARE_OBJECT_NAME
2508 last_assemble_variable_decl
= decl
;
2509 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2511 /* Standard thing is just output label for the object. */
2512 ASM_OUTPUT_LABEL (file
, name
);
2513 #endif /* ASM_DECLARE_OBJECT_NAME */
2514 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2517 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2519 v850_output_common (FILE * file
,
2525 if (decl
== NULL_TREE
)
2527 fprintf (file
, "%s", COMMON_ASM_OP
);
2531 switch (v850_get_data_area (decl
))
2534 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2538 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2542 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2546 fprintf (file
, "%s", COMMON_ASM_OP
);
2551 assemble_name (file
, name
);
2552 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2555 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2557 v850_output_local (FILE * file
,
2563 fprintf (file
, "%s", LOCAL_ASM_OP
);
2564 assemble_name (file
, name
);
2565 fprintf (file
, "\n");
2567 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2570 /* Add data area to the given declaration if a ghs data area pragma is
2571 currently in effect (#pragma ghs startXXX/endXXX). */
2573 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2576 && data_area_stack
->data_area
2577 && current_function_decl
== NULL_TREE
2578 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2579 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2580 v850_set_data_area (decl
, data_area_stack
->data_area
);
2582 /* Initialize the default names of the v850 specific sections,
2583 if this has not been done before. */
2585 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2587 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2590 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2593 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2596 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2599 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2603 if (current_function_decl
== NULL_TREE
2604 && (TREE_CODE (decl
) == VAR_DECL
2605 || TREE_CODE (decl
) == CONST_DECL
2606 || TREE_CODE (decl
) == FUNCTION_DECL
)
2607 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2608 && !DECL_SECTION_NAME (decl
))
2610 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2611 const char * chosen_section
;
2613 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2614 kind
= GHS_SECTION_KIND_TEXT
;
2617 /* First choose a section kind based on the data area of the decl. */
2618 switch (v850_get_data_area (decl
))
2624 kind
= ((TREE_READONLY (decl
))
2625 ? GHS_SECTION_KIND_ROSDATA
2626 : GHS_SECTION_KIND_SDATA
);
2630 kind
= GHS_SECTION_KIND_TDATA
;
2634 kind
= ((TREE_READONLY (decl
))
2635 ? GHS_SECTION_KIND_ROZDATA
2636 : GHS_SECTION_KIND_ZDATA
);
2639 case DATA_AREA_NORMAL
: /* default data area */
2640 if (TREE_READONLY (decl
))
2641 kind
= GHS_SECTION_KIND_RODATA
;
2642 else if (DECL_INITIAL (decl
))
2643 kind
= GHS_SECTION_KIND_DATA
;
2645 kind
= GHS_SECTION_KIND_BSS
;
2649 /* Now, if the section kind has been explicitly renamed,
2650 then attach a section attribute. */
2651 chosen_section
= GHS_current_section_names
[(int) kind
];
2653 /* Otherwise, if this kind of section needs an explicit section
2654 attribute, then also attach one. */
2655 if (chosen_section
== NULL
)
2656 chosen_section
= GHS_default_section_names
[(int) kind
];
2660 /* Only set the section name if specified by a pragma, because
2661 otherwise it will force those variables to get allocated storage
2662 in this module, rather than by the linker. */
2663 set_decl_section_name (decl
, chosen_section
);
2668 /* Construct a DISPOSE instruction that is the equivalent of
2669 the given RTX. We have already verified that this should
2673 construct_dispose_instruction (rtx op
)
2675 int count
= XVECLEN (op
, 0);
2677 unsigned long int mask
;
2679 static char buff
[ 100 ]; /* XXX */
2684 error ("bogus DISPOSE construction: %d", count
);
2688 /* Work out how many bytes to pop off the
2689 stack before retrieving registers. */
2690 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2691 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2692 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2694 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2696 /* Each pop will remove 4 bytes from the stack.... */
2697 stack_bytes
-= (count
- 2) * 4;
2699 /* Make sure that the amount we are popping
2700 will fit into the DISPOSE instruction. */
2701 if (stack_bytes
> 128)
2703 error ("too much stack space to dispose of: %d", stack_bytes
);
2707 /* Now compute the bit mask of registers to push. */
2710 for (i
= 2; i
< count
; i
++)
2712 rtx vector_element
= XVECEXP (op
, 0, i
);
2714 gcc_assert (GET_CODE (vector_element
) == SET
);
2715 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2716 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2719 if (REGNO (SET_DEST (vector_element
)) == 2)
2722 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2725 if (! TARGET_DISABLE_CALLT
2726 && (use_callt
|| stack_bytes
== 0))
2730 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2735 for (i
= 20; i
< 32; i
++)
2736 if (mask
& (1 << i
))
2740 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2742 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2743 i
, (mask
& (1 << 31)) ? "31c" : "29");
2748 static char regs
[100]; /* XXX */
2751 /* Generate the DISPOSE instruction. Note we could just issue the
2752 bit mask as a number as the assembler can cope with this, but for
2753 the sake of our readers we turn it into a textual description. */
2757 for (i
= 20; i
< 32; i
++)
2759 if (mask
& (1 << i
))
2764 strcat (regs
, ", ");
2769 strcat (regs
, reg_names
[ first
]);
2771 for (i
++; i
< 32; i
++)
2772 if ((mask
& (1 << i
)) == 0)
2777 strcat (regs
, " - ");
2778 strcat (regs
, reg_names
[ i
- 1 ] );
2783 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2789 /* Construct a PREPARE instruction that is the equivalent of
2790 the given RTL. We have already verified that this should
2794 construct_prepare_instruction (rtx op
)
2798 unsigned long int mask
;
2800 static char buff
[ 100 ]; /* XXX */
2803 if (XVECLEN (op
, 0) <= 1)
2805 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2809 /* Work out how many bytes to push onto
2810 the stack after storing the registers. */
2811 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2812 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2813 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2815 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2818 /* Make sure that the amount we are popping
2819 will fit into the DISPOSE instruction. */
2820 if (stack_bytes
< -128)
2822 error ("too much stack space to prepare: %d", stack_bytes
);
2826 /* Now compute the bit mask of registers to push. */
2829 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2831 rtx vector_element
= XVECEXP (op
, 0, i
);
2833 if (GET_CODE (vector_element
) == CLOBBER
)
2836 gcc_assert (GET_CODE (vector_element
) == SET
);
2837 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2838 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2841 if (REGNO (SET_SRC (vector_element
)) == 2)
2844 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2848 stack_bytes
+= count
* 4;
2850 if ((! TARGET_DISABLE_CALLT
)
2851 && (use_callt
|| stack_bytes
== 0))
2855 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2859 for (i
= 20; i
< 32; i
++)
2860 if (mask
& (1 << i
))
2864 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2866 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2867 i
, (mask
& (1 << 31)) ? "31c" : "29");
2871 static char regs
[100]; /* XXX */
2875 /* Generate the PREPARE instruction. Note we could just issue the
2876 bit mask as a number as the assembler can cope with this, but for
2877 the sake of our readers we turn it into a textual description. */
2881 for (i
= 20; i
< 32; i
++)
2883 if (mask
& (1 << i
))
2888 strcat (regs
, ", ");
2893 strcat (regs
, reg_names
[ first
]);
2895 for (i
++; i
< 32; i
++)
2896 if ((mask
& (1 << i
)) == 0)
2901 strcat (regs
, " - ");
2902 strcat (regs
, reg_names
[ i
- 1 ] );
2907 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2913 /* Return an RTX indicating where the return address to the
2914 calling function can be found. */
2917 v850_return_addr (int count
)
2922 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2925 /* Implement TARGET_ASM_INIT_SECTIONS. */
2928 v850_asm_init_sections (void)
2931 = get_unnamed_section (0, output_section_asm_op
,
2932 "\t.section .rosdata,\"a\"");
2935 = get_unnamed_section (0, output_section_asm_op
,
2936 "\t.section .rozdata,\"a\"");
2939 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2940 "\t.section .tdata,\"aw\"");
2943 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2944 "\t.section .zdata,\"aw\"");
2947 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2948 output_section_asm_op
,
2949 "\t.section .zbss,\"aw\"");
2953 v850_select_section (tree exp
,
2954 int reloc ATTRIBUTE_UNUSED
,
2955 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2957 if (TREE_CODE (exp
) == VAR_DECL
)
2960 if (!TREE_READONLY (exp
)
2961 || TREE_SIDE_EFFECTS (exp
)
2962 || !DECL_INITIAL (exp
)
2963 || (DECL_INITIAL (exp
) != error_mark_node
2964 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
2969 switch (v850_get_data_area (exp
))
2972 return is_const
? rozdata_section
: zdata_section
;
2975 return tdata_section
;
2978 return is_const
? rosdata_section
: sdata_section
;
2981 return is_const
? readonly_data_section
: data_section
;
2984 return readonly_data_section
;
2987 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2990 v850_function_value_regno_p (const unsigned int regno
)
2992 return (regno
== 10);
2995 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2998 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
3000 /* Return values > 8 bytes in length in memory. */
3001 return int_size_in_bytes (type
) > 8
3002 || TYPE_MODE (type
) == BLKmode
3003 /* With the rh850 ABI return all aggregates in memory. */
3004 || ((! TARGET_GCC_ABI
) && AGGREGATE_TYPE_P (type
))
3008 /* Worker function for TARGET_FUNCTION_VALUE. */
3011 v850_function_value (const_tree valtype
,
3012 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
3013 bool outgoing ATTRIBUTE_UNUSED
)
3015 return gen_rtx_REG (TYPE_MODE (valtype
), 10);
3019 /* Worker function for TARGET_CAN_ELIMINATE. */
3022 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
3024 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
3027 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3029 If TARGET_APP_REGS is not defined then add r2 and r5 to
3030 the pool of fixed registers. See PR 14505. */
3033 v850_conditional_register_usage (void)
3035 if (TARGET_APP_REGS
)
3037 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
3038 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
3042 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3045 v850_asm_trampoline_template (FILE *f
)
3047 fprintf (f
, "\tjarl .+4,r12\n");
3048 fprintf (f
, "\tld.w 12[r12],r20\n");
3049 fprintf (f
, "\tld.w 16[r12],r12\n");
3050 fprintf (f
, "\tjmp [r12]\n");
3051 fprintf (f
, "\tnop\n");
3052 fprintf (f
, "\t.long 0\n");
3053 fprintf (f
, "\t.long 0\n");
3056 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3059 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
3061 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3063 emit_block_move (m_tramp
, assemble_trampoline_template (),
3064 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3066 mem
= adjust_address (m_tramp
, SImode
, 16);
3067 emit_move_insn (mem
, chain_value
);
3068 mem
= adjust_address (m_tramp
, SImode
, 20);
3069 emit_move_insn (mem
, fnaddr
);
3073 v850_issue_rate (void)
3075 return (TARGET_V850E2_UP
? 2 : 1);
3078 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3081 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
3083 return (GET_CODE (x
) == CONST_DOUBLE
3084 || !(GET_CODE (x
) == CONST
3085 && GET_CODE (XEXP (x
, 0)) == PLUS
3086 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3087 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3088 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3092 v850_memory_move_cost (machine_mode mode
,
3093 reg_class_t reg_class ATTRIBUTE_UNUSED
,
3096 switch (GET_MODE_SIZE (mode
))
3106 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3111 v850_adjust_insn_length (rtx_insn
*insn
, int length
)
3113 if (TARGET_V850E3V5_UP
)
3117 if (TARGET_LONG_CALLS
)
3119 /* call_internal_long, call_value_internal_long. */
3127 /* call_internal_short, call_value_internal_short. */
3136 /* V850 specific attributes. */
3138 static const struct attribute_spec v850_attribute_table
[] =
3140 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3141 affects_type_identity } */
3142 { "interrupt_handler", 0, 0, true, false, false,
3143 v850_handle_interrupt_attribute
, false },
3144 { "interrupt", 0, 0, true, false, false,
3145 v850_handle_interrupt_attribute
, false },
3146 { "sda", 0, 0, true, false, false,
3147 v850_handle_data_area_attribute
, false },
3148 { "tda", 0, 0, true, false, false,
3149 v850_handle_data_area_attribute
, false },
3150 { "zda", 0, 0, true, false, false,
3151 v850_handle_data_area_attribute
, false },
3152 { NULL
, 0, 0, false, false, false, NULL
, false }
3156 v850_option_override (void)
3158 if (flag_exceptions
|| flag_non_call_exceptions
)
3159 flag_omit_frame_pointer
= 0;
3161 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3162 if (! TARGET_GCC_ABI
)
3163 target_flags
|= MASK_DISABLE_CALLT
;
3167 v850_gen_movdi (rtx
* operands
)
3169 if (REG_P (operands
[0]))
3171 if (REG_P (operands
[1]))
3173 if (REGNO (operands
[0]) == (REGNO (operands
[1]) - 1))
3174 return "mov %1, %0; mov %R1, %R0";
3176 return "mov %R1, %R0; mov %1, %0";
3179 if (MEM_P (operands
[1]))
3181 if (REGNO (operands
[0]) & 1)
3182 /* Use two load word instructions to synthesise a load double. */
3183 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3185 return "ld.dw %1, %0";
3188 return "mov %1, %0; mov %R1, %R0";
3191 gcc_assert (REG_P (operands
[1]));
3193 if (REGNO (operands
[1]) & 1)
3194 /* Use two store word instructions to synthesise a store double. */
3195 return "st.w %1, %0 ; st.w %R1, %R0 ";
3197 return "st.dw %1, %0";
3200 /* Initialize the GCC target structure. */
3202 #undef TARGET_OPTION_OVERRIDE
3203 #define TARGET_OPTION_OVERRIDE v850_option_override
3205 #undef TARGET_MEMORY_MOVE_COST
3206 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3208 #undef TARGET_ASM_ALIGNED_HI_OP
3209 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3211 #undef TARGET_PRINT_OPERAND
3212 #define TARGET_PRINT_OPERAND v850_print_operand
3213 #undef TARGET_PRINT_OPERAND_ADDRESS
3214 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3215 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3216 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3218 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3219 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3221 #undef TARGET_ATTRIBUTE_TABLE
3222 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3224 #undef TARGET_INSERT_ATTRIBUTES
3225 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3227 #undef TARGET_ASM_SELECT_SECTION
3228 #define TARGET_ASM_SELECT_SECTION v850_select_section
3230 /* The assembler supports switchable .bss sections, but
3231 v850_select_section doesn't yet make use of them. */
3232 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3233 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3235 #undef TARGET_ENCODE_SECTION_INFO
3236 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3238 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3239 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3241 #undef TARGET_RTX_COSTS
3242 #define TARGET_RTX_COSTS v850_rtx_costs
3244 #undef TARGET_ADDRESS_COST
3245 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3247 #undef TARGET_MACHINE_DEPENDENT_REORG
3248 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3250 #undef TARGET_SCHED_ISSUE_RATE
3251 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3253 #undef TARGET_FUNCTION_VALUE_REGNO_P
3254 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3255 #undef TARGET_FUNCTION_VALUE
3256 #define TARGET_FUNCTION_VALUE v850_function_value
3258 #undef TARGET_PROMOTE_PROTOTYPES
3259 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3261 #undef TARGET_RETURN_IN_MEMORY
3262 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3264 #undef TARGET_PASS_BY_REFERENCE
3265 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3267 #undef TARGET_CALLEE_COPIES
3268 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3270 #undef TARGET_ARG_PARTIAL_BYTES
3271 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3273 #undef TARGET_FUNCTION_ARG
3274 #define TARGET_FUNCTION_ARG v850_function_arg
3276 #undef TARGET_FUNCTION_ARG_ADVANCE
3277 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3279 #undef TARGET_CAN_ELIMINATE
3280 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3282 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3283 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3285 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3286 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3287 #undef TARGET_TRAMPOLINE_INIT
3288 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3290 #undef TARGET_LEGITIMATE_CONSTANT_P
3291 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3293 #undef TARGET_CAN_USE_DOLOOP_P
3294 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3296 struct gcc_target targetm
= TARGET_INITIALIZER
;
3298 #include "gt-v850.h"