1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "double-int.h"
35 #include "stringpool.h"
36 #include "stor-layout.h"
41 #include "hard-reg-set.h"
42 #include "insn-config.h"
43 #include "conditions.h"
45 #include "insn-attr.h"
50 #include "statistics.h"
52 #include "fixed-value.h"
59 #include "diagnostic-core.h"
63 #include "target-def.h"
64 #include "dominance.h"
70 #include "cfgcleanup.h"
72 #include "basic-block.h"
78 #define streq(a,b) (strcmp (a, b) == 0)
81 static void v850_print_operand_address (FILE *, rtx
);
83 /* Names of the various data areas used on the v850. */
84 const char * GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
85 const char * GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
87 /* Track the current data area set by the data area pragma (which
88 can be nested). Tested by check_default_data_area. */
89 data_area_stack_element
* data_area_stack
= NULL
;
91 /* True if we don't need to check any more if the current
92 function is an interrupt handler. */
93 static int v850_interrupt_cache_p
= FALSE
;
95 rtx v850_compare_op0
, v850_compare_op1
;
97 /* Whether current function is an interrupt handler. */
98 static int v850_interrupt_p
= FALSE
;
100 static GTY(()) section
* rosdata_section
;
101 static GTY(()) section
* rozdata_section
;
102 static GTY(()) section
* tdata_section
;
103 static GTY(()) section
* zdata_section
;
104 static GTY(()) section
* zbss_section
;
106 /* We use this to wrap all emitted insns in the prologue. */
110 if (GET_CODE (x
) != CLOBBER
)
111 RTX_FRAME_RELATED_P (x
) = 1;
115 /* Mark all the subexpressions of the PARALLEL rtx PAR as
116 frame-related. Return PAR.
118 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
119 PARALLEL rtx other than the first if they do not have the
120 FRAME_RELATED flag set on them. */
123 v850_all_frame_related (rtx par
)
125 int len
= XVECLEN (par
, 0);
128 gcc_assert (GET_CODE (par
) == PARALLEL
);
129 for (i
= 0; i
< len
; i
++)
130 F (XVECEXP (par
, 0, i
));
135 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
136 Specify whether to pass the argument by reference. */
139 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
140 machine_mode mode
, const_tree type
,
141 bool named ATTRIBUTE_UNUSED
)
143 unsigned HOST_WIDE_INT size
;
149 size
= int_size_in_bytes (type
);
151 size
= GET_MODE_SIZE (mode
);
156 /* Return an RTX to represent where an argument with mode MODE
157 and type TYPE will be passed to a function. If the result
158 is NULL_RTX, the argument will be pushed. */
161 v850_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
162 const_tree type
, bool named
)
164 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
165 rtx result
= NULL_RTX
;
172 size
= int_size_in_bytes (type
);
174 size
= GET_MODE_SIZE (mode
);
176 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
180 /* Once we have stopped using argument registers, do not start up again. */
181 cum
->nbytes
= 4 * UNITS_PER_WORD
;
186 align
= UNITS_PER_WORD
;
187 else if (size
<= UNITS_PER_WORD
&& type
)
188 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
192 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
194 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
197 if (type
== NULL_TREE
198 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
201 switch (cum
->nbytes
/ UNITS_PER_WORD
)
204 result
= gen_rtx_REG (mode
, 6);
207 result
= gen_rtx_REG (mode
, 7);
210 result
= gen_rtx_REG (mode
, 8);
213 result
= gen_rtx_REG (mode
, 9);
222 /* Return the number of bytes which must be put into registers
223 for values which are part in registers and part in memory. */
225 v850_arg_partial_bytes (cumulative_args_t cum_v
, machine_mode mode
,
226 tree type
, bool named
)
228 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
235 size
= int_size_in_bytes (type
);
237 size
= GET_MODE_SIZE (mode
);
243 align
= UNITS_PER_WORD
;
245 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
249 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
251 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
254 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
257 if (type
== NULL_TREE
258 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
261 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
264 /* Update the data in CUM to advance over an argument
265 of mode MODE and data type TYPE.
266 (TYPE is null for libcalls where that information may not be available.) */
269 v850_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
270 const_tree type
, bool named ATTRIBUTE_UNUSED
)
272 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
275 cum
->nbytes
+= (((mode
!= BLKmode
276 ? GET_MODE_SIZE (mode
)
277 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1)
280 cum
->nbytes
+= (((type
&& int_size_in_bytes (type
) > 8
281 ? GET_MODE_SIZE (Pmode
)
283 ? GET_MODE_SIZE (mode
)
284 : int_size_in_bytes (type
))) + UNITS_PER_WORD
- 1)
288 /* Return the high and low words of a CONST_DOUBLE */
291 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
293 if (GET_CODE (x
) == CONST_DOUBLE
)
298 switch (GET_MODE (x
))
301 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
302 REAL_VALUE_TO_TARGET_DOUBLE (rv
, t
);
303 *p_high
= t
[1]; /* since v850 is little endian */
304 *p_low
= t
[0]; /* high is second word */
308 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
309 REAL_VALUE_TO_TARGET_SINGLE (rv
, *p_high
);
315 *p_high
= CONST_DOUBLE_HIGH (x
);
316 *p_low
= CONST_DOUBLE_LOW (x
);
324 fatal_insn ("const_double_split got a bad insn:", x
);
328 /* Return the cost of the rtx R with code CODE. */
331 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
333 if (CONST_OK_FOR_I (value
))
335 else if (CONST_OK_FOR_J (value
))
337 else if (CONST_OK_FOR_K (value
))
344 const_costs (rtx r
, enum rtx_code c
)
346 HOST_WIDE_INT high
, low
;
351 return const_costs_int (INTVAL (r
), 0);
354 const_double_split (r
, &high
, &low
);
355 if (GET_MODE (r
) == SFmode
)
356 return const_costs_int (high
, 1);
358 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
374 v850_rtx_costs (rtx x
,
376 int outer_code ATTRIBUTE_UNUSED
,
377 int opno ATTRIBUTE_UNUSED
,
378 int * total
, bool speed
)
380 enum rtx_code code
= (enum rtx_code
) codearg
;
389 *total
= COSTS_N_INSNS (const_costs (x
, code
));
396 if (TARGET_V850E
&& !speed
)
404 && ( GET_MODE (x
) == SImode
405 || GET_MODE (x
) == HImode
406 || GET_MODE (x
) == QImode
))
408 if (GET_CODE (XEXP (x
, 1)) == REG
)
410 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
412 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
414 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
423 if (outer_code
== COMPARE
)
432 /* Print operand X using operand code CODE to assembly language output file
436 v850_print_operand (FILE * file
, rtx x
, int code
)
438 HOST_WIDE_INT high
, low
;
443 /* We use 'c' operands with symbols for .vtinherit. */
444 if (GET_CODE (x
) == SYMBOL_REF
)
446 output_addr_const(file
, x
);
453 switch ((code
== 'B' || code
== 'C')
454 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
457 if (code
== 'c' || code
== 'C')
458 fprintf (file
, "nz");
460 fprintf (file
, "ne");
463 if (code
== 'c' || code
== 'C')
469 fprintf (file
, "ge");
472 fprintf (file
, "gt");
475 fprintf (file
, "le");
478 fprintf (file
, "lt");
481 fprintf (file
, "nl");
487 fprintf (file
, "nh");
496 case 'F': /* High word of CONST_DOUBLE. */
497 switch (GET_CODE (x
))
500 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
504 const_double_split (x
, &high
, &low
);
505 fprintf (file
, "%ld", (long) high
);
512 case 'G': /* Low word of CONST_DOUBLE. */
513 switch (GET_CODE (x
))
516 fprintf (file
, "%ld", (long) INTVAL (x
));
520 const_double_split (x
, &high
, &low
);
521 fprintf (file
, "%ld", (long) low
);
529 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
532 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
535 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
537 if (GET_CODE (x
) == CONST
)
538 x
= XEXP (XEXP (x
, 0), 0);
540 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
542 if (SYMBOL_REF_ZDA_P (x
))
543 fprintf (file
, "zdaoff");
544 else if (SYMBOL_REF_SDA_P (x
))
545 fprintf (file
, "sdaoff");
546 else if (SYMBOL_REF_TDA_P (x
))
547 fprintf (file
, "tdaoff");
552 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
553 output_addr_const (file
, x
);
556 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
558 if (GET_CODE (x
) == CONST
)
559 x
= XEXP (XEXP (x
, 0), 0);
561 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
563 if (SYMBOL_REF_ZDA_P (x
))
564 fprintf (file
, "r0");
565 else if (SYMBOL_REF_SDA_P (x
))
566 fprintf (file
, "gp");
567 else if (SYMBOL_REF_TDA_P (x
))
568 fprintf (file
, "ep");
572 case 'R': /* 2nd word of a double. */
573 switch (GET_CODE (x
))
576 fprintf (file
, reg_names
[REGNO (x
) + 1]);
579 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
580 v850_print_operand_address (file
, x
);
581 if (GET_CODE (x
) == CONST_INT
)
582 fprintf (file
, "[r0]");
587 unsigned HOST_WIDE_INT v
= INTVAL (x
);
589 /* Trickery to avoid problems with shifting
590 32-bits at a time on a 32-bit host. */
593 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, v
);
598 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_HIGH (x
));
608 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
609 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
616 /* Like an 'S' operand above, but for unsigned loads only. */
617 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
622 case 'W': /* Print the instruction suffix. */
623 switch (GET_MODE (x
))
628 case QImode
: fputs (".b", file
); break;
629 case HImode
: fputs (".h", file
); break;
630 case SImode
: fputs (".w", file
); break;
631 case SFmode
: fputs (".w", file
); break;
634 case '.': /* Register r0. */
635 fputs (reg_names
[0], file
);
637 case 'z': /* Reg or zero. */
639 fputs (reg_names
[REGNO (x
)], file
);
640 else if ((GET_MODE(x
) == SImode
641 || GET_MODE(x
) == DFmode
642 || GET_MODE(x
) == SFmode
)
643 && x
== CONST0_RTX(GET_MODE(x
)))
644 fputs (reg_names
[0], file
);
647 gcc_assert (x
== const0_rtx
);
648 fputs (reg_names
[0], file
);
652 switch (GET_CODE (x
))
655 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
656 output_address (gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
659 output_address (XEXP (x
, 0));
663 fputs (reg_names
[REGNO (x
)], file
);
666 fputs (reg_names
[subreg_regno (x
)], file
);
669 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_LOW (x
));
677 v850_print_operand_address (file
, x
);
688 /* Output assembly language output for the address ADDR to FILE. */
691 v850_print_operand_address (FILE * file
, rtx addr
)
693 switch (GET_CODE (addr
))
696 fprintf (file
, "0[");
697 v850_print_operand (file
, addr
, 0);
701 if (GET_CODE (XEXP (addr
, 0)) == REG
)
704 fprintf (file
, "lo(");
705 v850_print_operand (file
, XEXP (addr
, 1), 0);
706 fprintf (file
, ")[");
707 v850_print_operand (file
, XEXP (addr
, 0), 0);
712 if (GET_CODE (XEXP (addr
, 0)) == REG
713 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
716 v850_print_operand (file
, XEXP (addr
, 1), 0);
718 v850_print_operand (file
, XEXP (addr
, 0), 0);
723 v850_print_operand (file
, XEXP (addr
, 0), 0);
725 v850_print_operand (file
, XEXP (addr
, 1), 0);
730 const char *off_name
= NULL
;
731 const char *reg_name
= NULL
;
733 if (SYMBOL_REF_ZDA_P (addr
))
738 else if (SYMBOL_REF_SDA_P (addr
))
743 else if (SYMBOL_REF_TDA_P (addr
))
750 fprintf (file
, "%s(", off_name
);
751 output_addr_const (file
, addr
);
753 fprintf (file
, ")[%s]", reg_name
);
757 if (special_symbolref_operand (addr
, VOIDmode
))
759 rtx x
= XEXP (XEXP (addr
, 0), 0);
760 const char *off_name
;
761 const char *reg_name
;
763 if (SYMBOL_REF_ZDA_P (x
))
768 else if (SYMBOL_REF_SDA_P (x
))
773 else if (SYMBOL_REF_TDA_P (x
))
781 fprintf (file
, "%s(", off_name
);
782 output_addr_const (file
, addr
);
783 fprintf (file
, ")[%s]", reg_name
);
786 output_addr_const (file
, addr
);
789 output_addr_const (file
, addr
);
795 v850_print_operand_punct_valid_p (unsigned char code
)
800 /* When assemble_integer is used to emit the offsets for a switch
801 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
802 output_addr_const will normally barf at this, but it is OK to omit
803 the truncate and just emit the difference of the two labels. The
804 .hword directive will automatically handle the truncation for us.
806 Returns true if rtx was handled, false otherwise. */
809 v850_output_addr_const_extra (FILE * file
, rtx x
)
811 if (GET_CODE (x
) != TRUNCATE
)
816 /* We must also handle the case where the switch table was passed a
817 constant value and so has been collapsed. In this case the first
818 label will have been deleted. In such a case it is OK to emit
819 nothing, since the table will not be used.
820 (cf gcc.c-torture/compile/990801-1.c). */
821 if (GET_CODE (x
) == MINUS
822 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
)
824 rtx_code_label
*label
825 = dyn_cast
<rtx_code_label
*> (XEXP (XEXP (x
, 0), 0));
826 if (label
&& label
->deleted ())
830 output_addr_const (file
, x
);
834 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
838 output_move_single (rtx
* operands
)
840 rtx dst
= operands
[0];
841 rtx src
= operands
[1];
848 else if (GET_CODE (src
) == CONST_INT
)
850 HOST_WIDE_INT value
= INTVAL (src
);
852 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
855 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
856 return "movea %1,%.,%0";
858 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
859 return "movhi hi0(%1),%.,%0";
861 /* A random constant. */
862 else if (TARGET_V850E_UP
)
865 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
868 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
870 HOST_WIDE_INT high
, low
;
872 const_double_split (src
, &high
, &low
);
874 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
877 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
878 return "movea %F1,%.,%0";
880 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
881 return "movhi hi0(%F1),%.,%0";
883 /* A random constant. */
884 else if (TARGET_V850E_UP
)
888 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
891 else if (GET_CODE (src
) == MEM
)
892 return "%S1ld%W1 %1,%0";
894 else if (special_symbolref_operand (src
, VOIDmode
))
895 return "movea %O1(%P1),%Q1,%0";
897 else if (GET_CODE (src
) == LABEL_REF
898 || GET_CODE (src
) == SYMBOL_REF
899 || GET_CODE (src
) == CONST
)
902 return "mov hilo(%1),%0";
904 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
907 else if (GET_CODE (src
) == HIGH
)
908 return "movhi hi(%1),%.,%0";
910 else if (GET_CODE (src
) == LO_SUM
)
912 operands
[2] = XEXP (src
, 0);
913 operands
[3] = XEXP (src
, 1);
914 return "movea lo(%3),%2,%0";
918 else if (GET_CODE (dst
) == MEM
)
921 return "%S0st%W0 %1,%0";
923 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
924 return "%S0st%W0 %.,%0";
926 else if (GET_CODE (src
) == CONST_DOUBLE
927 && CONST0_RTX (GET_MODE (dst
)) == src
)
928 return "%S0st%W0 %.,%0";
931 fatal_insn ("output_move_single:", gen_rtx_SET (dst
, src
));
936 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1 ATTRIBUTE_UNUSED
)
938 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
943 return CC_FPU_LEmode
;
945 return CC_FPU_GEmode
;
947 return CC_FPU_LTmode
;
949 return CC_FPU_GTmode
;
951 return CC_FPU_EQmode
;
953 return CC_FPU_NEmode
;
962 v850_gen_float_compare (enum rtx_code cond
, machine_mode mode ATTRIBUTE_UNUSED
, rtx op0
, rtx op1
)
964 if (GET_MODE (op0
) == DFmode
)
969 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
972 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
975 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
978 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
981 /* Note: There is no NE comparison operator. So we
982 perform an EQ comparison and invert the branch.
983 See v850_float_nz_comparison for how this is done. */
985 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
991 else if (GET_MODE (v850_compare_op0
) == SFmode
)
996 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
999 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
1002 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
1005 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
1008 /* Note: There is no NE comparison operator. So we
1009 perform an EQ comparison and invert the branch.
1010 See v850_float_nz_comparison for how this is done. */
1012 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
1021 return v850_select_cc_mode (cond
, op0
, op1
);
1025 v850_gen_compare (enum rtx_code cond
, machine_mode mode
, rtx op0
, rtx op1
)
1027 if (GET_MODE_CLASS(GET_MODE (op0
)) != MODE_FLOAT
)
1029 emit_insn (gen_cmpsi_insn (op0
, op1
));
1030 return gen_rtx_fmt_ee (cond
, mode
, gen_rtx_REG(CCmode
, CC_REGNUM
), const0_rtx
);
1035 mode
= v850_gen_float_compare (cond
, mode
, op0
, op1
);
1036 cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
1037 emit_insn (gen_rtx_SET (cc_reg
, gen_rtx_REG (mode
, FCC_REGNUM
)));
1039 return gen_rtx_fmt_ee (cond
, mode
, cc_reg
, const0_rtx
);
1043 /* Return maximum offset supported for a short EP memory reference of mode
1044 MODE and signedness UNSIGNEDP. */
1047 ep_memory_offset (machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
1054 if (TARGET_SMALL_SLD
)
1055 max_offset
= (1 << 4);
1056 else if ((TARGET_V850E_UP
)
1058 max_offset
= (1 << 4);
1060 max_offset
= (1 << 7);
1064 if (TARGET_SMALL_SLD
)
1065 max_offset
= (1 << 5);
1066 else if ((TARGET_V850E_UP
)
1068 max_offset
= (1 << 5);
1070 max_offset
= (1 << 8);
1075 max_offset
= (1 << 8);
1085 /* Return true if OP is a valid short EP memory reference */
1088 ep_memory_operand (rtx op
, machine_mode mode
, int unsigned_load
)
1094 /* If we are not using the EP register on a per-function basis
1095 then do not allow this optimization at all. This is to
1096 prevent the use of the SLD/SST instructions which cannot be
1097 guaranteed to work properly due to a hardware bug. */
1101 if (GET_CODE (op
) != MEM
)
1104 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1106 mask
= GET_MODE_SIZE (mode
) - 1;
1108 addr
= XEXP (op
, 0);
1109 if (GET_CODE (addr
) == CONST
)
1110 addr
= XEXP (addr
, 0);
1112 switch (GET_CODE (addr
))
1118 return SYMBOL_REF_TDA_P (addr
);
1121 return REGNO (addr
) == EP_REGNUM
;
1124 op0
= XEXP (addr
, 0);
1125 op1
= XEXP (addr
, 1);
1126 if (GET_CODE (op1
) == CONST_INT
1127 && INTVAL (op1
) < max_offset
1128 && INTVAL (op1
) >= 0
1129 && (INTVAL (op1
) & mask
) == 0)
1131 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1134 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1143 /* Substitute memory references involving a pointer, to use the ep pointer,
1144 taking care to save and preserve the ep. */
1147 substitute_ep_register (rtx_insn
*first_insn
,
1148 rtx_insn
*last_insn
,
1154 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1159 df_set_regs_ever_live (1, true);
1160 *p_r1
= gen_rtx_REG (Pmode
, 1);
1161 *p_ep
= gen_rtx_REG (Pmode
, 30);
1166 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1167 2 * (uses
- 3), uses
, reg_names
[regno
],
1168 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1169 INSN_UID (first_insn
), INSN_UID (last_insn
));
1171 if (NOTE_P (first_insn
))
1172 first_insn
= next_nonnote_insn (first_insn
);
1174 last_insn
= next_nonnote_insn (last_insn
);
1175 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1177 if (NONJUMP_INSN_P (insn
))
1179 rtx pattern
= single_set (insn
);
1181 /* Replace the memory references. */
1185 /* Memory operands are signed by default. */
1186 int unsignedp
= FALSE
;
1188 if (GET_CODE (SET_DEST (pattern
)) == MEM
1189 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1192 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1193 p_mem
= &SET_DEST (pattern
);
1195 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1196 p_mem
= &SET_SRC (pattern
);
1198 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1199 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1200 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1202 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1203 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1205 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1213 rtx addr
= XEXP (*p_mem
, 0);
1215 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1216 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1218 else if (GET_CODE (addr
) == PLUS
1219 && GET_CODE (XEXP (addr
, 0)) == REG
1220 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1221 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1222 && ((INTVAL (XEXP (addr
, 1)))
1223 < ep_memory_offset (GET_MODE (*p_mem
),
1225 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1226 *p_mem
= change_address (*p_mem
, VOIDmode
,
1227 gen_rtx_PLUS (Pmode
,
1235 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1236 insn
= prev_nonnote_insn (first_insn
);
1237 if (insn
&& NONJUMP_INSN_P (insn
)
1238 && GET_CODE (PATTERN (insn
)) == SET
1239 && SET_DEST (PATTERN (insn
)) == *p_ep
1240 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1243 emit_insn_before (gen_rtx_SET (*p_r1
, *p_ep
), first_insn
);
1245 emit_insn_before (gen_rtx_SET (*p_ep
, reg
), first_insn
);
1246 emit_insn_before (gen_rtx_SET (*p_ep
, *p_r1
), last_insn
);
1250 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1251 the -mep mode to copy heavily used pointers to ep to use the implicit
1260 rtx_insn
*first_insn
;
1261 rtx_insn
*last_insn
;
1263 regs
[FIRST_PSEUDO_REGISTER
];
1272 /* If not ep mode, just return now. */
1276 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1279 regs
[i
].first_insn
= NULL
;
1280 regs
[i
].last_insn
= NULL
;
1283 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1285 switch (GET_CODE (insn
))
1287 /* End of basic block */
1294 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1296 if (max_uses
< regs
[i
].uses
)
1298 max_uses
= regs
[i
].uses
;
1304 substitute_ep_register (regs
[max_regno
].first_insn
,
1305 regs
[max_regno
].last_insn
,
1306 max_uses
, max_regno
, &r1
, &ep
);
1310 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1313 regs
[i
].first_insn
= NULL
;
1314 regs
[i
].last_insn
= NULL
;
1322 pattern
= single_set (insn
);
1324 /* See if there are any memory references we can shorten. */
1327 rtx src
= SET_SRC (pattern
);
1328 rtx dest
= SET_DEST (pattern
);
1330 /* Memory operands are signed by default. */
1331 int unsignedp
= FALSE
;
1333 /* We might have (SUBREG (MEM)) here, so just get rid of the
1334 subregs to make this code simpler. */
1335 if (GET_CODE (dest
) == SUBREG
1336 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1337 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1338 alter_subreg (&dest
, false);
1339 if (GET_CODE (src
) == SUBREG
1340 && (GET_CODE (SUBREG_REG (src
)) == MEM
1341 || GET_CODE (SUBREG_REG (src
)) == REG
))
1342 alter_subreg (&src
, false);
1344 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1347 else if (GET_CODE (dest
) == MEM
)
1350 else if (GET_CODE (src
) == MEM
)
1353 else if (GET_CODE (src
) == SIGN_EXTEND
1354 && GET_CODE (XEXP (src
, 0)) == MEM
)
1355 mem
= XEXP (src
, 0);
1357 else if (GET_CODE (src
) == ZERO_EXTEND
1358 && GET_CODE (XEXP (src
, 0)) == MEM
)
1360 mem
= XEXP (src
, 0);
1366 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1369 else if (!use_ep
&& mem
1370 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1372 rtx addr
= XEXP (mem
, 0);
1376 if (GET_CODE (addr
) == REG
)
1379 regno
= REGNO (addr
);
1382 else if (GET_CODE (addr
) == PLUS
1383 && GET_CODE (XEXP (addr
, 0)) == REG
1384 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1385 && ((INTVAL (XEXP (addr
, 1)))
1386 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1387 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1390 regno
= REGNO (XEXP (addr
, 0));
1399 regs
[regno
].last_insn
= insn
;
1400 if (!regs
[regno
].first_insn
)
1401 regs
[regno
].first_insn
= insn
;
1405 /* Loading up a register in the basic block zaps any savings
1407 if (GET_CODE (dest
) == REG
)
1409 machine_mode mode
= GET_MODE (dest
);
1413 regno
= REGNO (dest
);
1414 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1418 /* See if we can use the pointer before this
1423 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1425 if (max_uses
< regs
[i
].uses
)
1427 max_uses
= regs
[i
].uses
;
1433 && max_regno
>= regno
1434 && max_regno
< endregno
)
1436 substitute_ep_register (regs
[max_regno
].first_insn
,
1437 regs
[max_regno
].last_insn
,
1438 max_uses
, max_regno
, &r1
,
1441 /* Since we made a substitution, zap all remembered
1443 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1446 regs
[i
].first_insn
= NULL
;
1447 regs
[i
].last_insn
= NULL
;
1452 for (i
= regno
; i
< endregno
; i
++)
1455 regs
[i
].first_insn
= NULL
;
1456 regs
[i
].last_insn
= NULL
;
1464 /* # of registers saved by the interrupt handler. */
1465 #define INTERRUPT_FIXED_NUM 5
1467 /* # of bytes for registers saved by the interrupt handler. */
1468 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1470 /* # of words saved for other registers. */
1471 #define INTERRUPT_ALL_SAVE_NUM \
1472 (30 - INTERRUPT_FIXED_NUM)
1474 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1477 compute_register_save_size (long * p_reg_saved
)
1481 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1482 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1485 /* Count space for the register saves. */
1486 if (interrupt_handler
)
1488 for (i
= 0; i
<= 31; i
++)
1492 if (df_regs_ever_live_p (i
) || call_p
)
1495 reg_saved
|= 1L << i
;
1499 /* We don't save/restore r0 or the stack pointer */
1501 case STACK_POINTER_REGNUM
:
1504 /* For registers with fixed use, we save them, set them to the
1505 appropriate value, and then restore them.
1506 These registers are handled specially, so don't list them
1507 on the list of registers to save in the prologue. */
1508 case 1: /* temp used to hold ep */
1510 case 10: /* temp used to call interrupt save/restore */
1511 case 11: /* temp used to call interrupt save/restore (long call) */
1512 case EP_REGNUM
: /* ep */
1519 /* Find the first register that needs to be saved. */
1520 for (i
= 0; i
<= 31; i
++)
1521 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1522 || i
== LINK_POINTER_REGNUM
))
1525 /* If it is possible that an out-of-line helper function might be
1526 used to generate the prologue for the current function, then we
1527 need to cover the possibility that such a helper function will
1528 be used, despite the fact that there might be gaps in the list of
1529 registers that need to be saved. To detect this we note that the
1530 helper functions always push at least register r29 (provided
1531 that the function is not an interrupt handler). */
1533 if (TARGET_PROLOG_FUNCTION
1534 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1539 reg_saved
|= 1L << i
;
1544 /* Helper functions save all registers between the starting
1545 register and the last register, regardless of whether they
1546 are actually used by the function or not. */
1547 for (; i
<= 29; i
++)
1550 reg_saved
|= 1L << i
;
1553 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1556 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1561 for (; i
<= 31; i
++)
1562 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1563 || i
== LINK_POINTER_REGNUM
))
1566 reg_saved
|= 1L << i
;
1572 *p_reg_saved
= reg_saved
;
1577 /* Typical stack layout should looks like this after the function's prologue:
1582 | | arguments saved | Increasing
1583 | | on the stack | addresses
1584 PARENT arg pointer -> | | /
1585 -------------------------- ---- -------------------
1586 | | - space for argument split between regs & stack
1588 CHILD | | \ <-- (return address here)
1593 frame pointer -> | | \ ___
1600 | | arguments | | Decreasing
1601 (hard) frame pointer | | / | | addresses
1602 and stack pointer -> | | / _|_ |
1603 -------------------------- ---- ------------------ V */
1606 compute_frame_size (int size
, long * p_reg_saved
)
1609 + compute_register_save_size (p_reg_saved
)
1610 + crtl
->outgoing_args_size
);
1614 use_prolog_function (int num_save
, int frame_size
)
1616 int alloc_stack
= (4 * num_save
);
1617 int unalloc_stack
= frame_size
- alloc_stack
;
1618 int save_func_len
, restore_func_len
;
1619 int save_normal_len
, restore_normal_len
;
1621 if (! TARGET_DISABLE_CALLT
)
1622 save_func_len
= restore_func_len
= 2;
1624 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1628 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1629 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1632 /* See if we would have used ep to save the stack. */
1633 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1634 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1636 save_normal_len
= restore_normal_len
= 4 * num_save
;
1638 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1639 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1641 /* Don't bother checking if we don't actually save any space.
1642 This happens for instance if one register is saved and additional
1643 stack space is allocated. */
1644 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1648 increment_stack (signed int amount
, bool in_prologue
)
1655 inc
= GEN_INT (amount
);
1657 if (! CONST_OK_FOR_K (amount
))
1659 rtx reg
= gen_rtx_REG (Pmode
, 12);
1661 inc
= emit_move_insn (reg
, inc
);
1667 inc
= emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, inc
));
1673 expand_prologue (void)
1676 unsigned int size
= get_frame_size ();
1677 unsigned int actual_fsize
;
1678 unsigned int init_stack_alloc
= 0;
1681 unsigned int num_save
;
1683 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1686 actual_fsize
= compute_frame_size (size
, ®_saved
);
1688 if (flag_stack_usage_info
)
1689 current_function_static_stack_size
= actual_fsize
;
1691 /* Save/setup global registers for interrupt functions right now. */
1692 if (interrupt_handler
)
1694 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1695 emit_insn (gen_callt_save_interrupt ());
1697 emit_insn (gen_save_interrupt ());
1699 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1701 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1702 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1704 /* Interrupt functions are not passed arguments, so no need to
1705 allocate space for split structure arguments. */
1706 gcc_assert (crtl
->args
.pretend_args_size
== 0);
1709 /* Identify all of the saved registers. */
1711 for (i
= 1; i
< 32; i
++)
1713 if (((1L << i
) & reg_saved
) != 0)
1714 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1717 if (crtl
->args
.pretend_args_size
)
1721 increment_stack (- (actual_fsize
+ crtl
->args
.pretend_args_size
), true);
1725 increment_stack (- crtl
->args
.pretend_args_size
, true);
1728 /* See if we have an insn that allocates stack space and saves the particular
1729 registers we want to. Note that the helpers won't
1730 allocate additional space for registers GCC saves to complete a
1731 "split" structure argument. */
1732 save_all
= NULL_RTX
;
1733 if (TARGET_PROLOG_FUNCTION
1734 && !crtl
->args
.pretend_args_size
1737 if (use_prolog_function (num_save
, actual_fsize
))
1739 int alloc_stack
= 4 * num_save
;
1742 save_all
= gen_rtx_PARALLEL
1744 rtvec_alloc (num_save
+ 1
1745 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1747 XVECEXP (save_all
, 0, 0)
1748 = gen_rtx_SET (stack_pointer_rtx
,
1749 gen_rtx_PLUS (Pmode
,
1751 GEN_INT(-alloc_stack
)));
1752 for (i
= 0; i
< num_save
; i
++)
1755 XVECEXP (save_all
, 0, i
+1)
1756 = gen_rtx_SET (gen_rtx_MEM (Pmode
,
1757 gen_rtx_PLUS (Pmode
,
1763 if (TARGET_DISABLE_CALLT
)
1765 XVECEXP (save_all
, 0, num_save
+ 1)
1766 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1768 if (TARGET_LONG_CALLS
)
1769 XVECEXP (save_all
, 0, num_save
+ 2)
1770 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1773 v850_all_frame_related (save_all
);
1775 code
= recog (save_all
, NULL_RTX
, NULL
);
1778 rtx insn
= emit_insn (save_all
);
1779 INSN_CODE (insn
) = code
;
1780 actual_fsize
-= alloc_stack
;
1784 save_all
= NULL_RTX
;
1788 /* If no prolog save function is available, store the registers the old
1789 fashioned way (one by one). */
1792 /* Special case interrupt functions that save all registers for a call. */
1793 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1795 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1796 emit_insn (gen_callt_save_all_interrupt ());
1798 emit_insn (gen_save_all_interrupt ());
1803 /* If the stack is too big, allocate it in chunks so we can do the
1804 register saves. We use the register save size so we use the ep
1806 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1807 init_stack_alloc
= compute_register_save_size (NULL
);
1809 init_stack_alloc
= actual_fsize
;
1811 /* Save registers at the beginning of the stack frame. */
1812 offset
= init_stack_alloc
- 4;
1814 if (init_stack_alloc
)
1815 increment_stack (- (signed) init_stack_alloc
, true);
1817 /* Save the return pointer first. */
1818 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1820 F (emit_move_insn (gen_rtx_MEM (SImode
,
1821 plus_constant (Pmode
,
1824 save_regs
[--num_save
]));
1828 for (i
= 0; i
< num_save
; i
++)
1830 F (emit_move_insn (gen_rtx_MEM (SImode
,
1831 plus_constant (Pmode
,
1840 /* Allocate the rest of the stack that was not allocated above (either it is
1841 > 32K or we just called a function to save the registers and needed more
1843 if (actual_fsize
> init_stack_alloc
)
1844 increment_stack (init_stack_alloc
- actual_fsize
, true);
1846 /* If we need a frame pointer, set it up now. */
1847 if (frame_pointer_needed
)
1848 F (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
1853 expand_epilogue (void)
1856 unsigned int size
= get_frame_size ();
1858 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1859 rtx restore_regs
[32];
1861 unsigned int num_restore
;
1863 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1865 /* Eliminate the initial stack stored by interrupt functions. */
1866 if (interrupt_handler
)
1868 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1869 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1870 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1873 /* Cut off any dynamic stack created. */
1874 if (frame_pointer_needed
)
1875 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1877 /* Identify all of the saved registers. */
1879 for (i
= 1; i
< 32; i
++)
1881 if (((1L << i
) & reg_saved
) != 0)
1882 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1885 /* See if we have an insn that restores the particular registers we
1887 restore_all
= NULL_RTX
;
1889 if (TARGET_PROLOG_FUNCTION
1891 && !crtl
->args
.pretend_args_size
1892 && !interrupt_handler
)
1894 int alloc_stack
= (4 * num_restore
);
1896 /* Don't bother checking if we don't actually save any space. */
1897 if (use_prolog_function (num_restore
, actual_fsize
))
1900 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1901 rtvec_alloc (num_restore
+ 2));
1902 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1903 XVECEXP (restore_all
, 0, 1)
1904 = gen_rtx_SET (stack_pointer_rtx
,
1905 gen_rtx_PLUS (Pmode
,
1907 GEN_INT (alloc_stack
)));
1909 offset
= alloc_stack
- 4;
1910 for (i
= 0; i
< num_restore
; i
++)
1912 XVECEXP (restore_all
, 0, i
+2)
1913 = gen_rtx_SET (restore_regs
[i
],
1915 gen_rtx_PLUS (Pmode
,
1921 code
= recog (restore_all
, NULL_RTX
, NULL
);
1927 actual_fsize
-= alloc_stack
;
1928 increment_stack (actual_fsize
, false);
1930 insn
= emit_jump_insn (restore_all
);
1931 INSN_CODE (insn
) = code
;
1934 restore_all
= NULL_RTX
;
1938 /* If no epilogue save function is available, restore the registers the
1939 old fashioned way (one by one). */
1942 unsigned int init_stack_free
;
1944 /* If the stack is large, we need to cut it down in 2 pieces. */
1945 if (interrupt_handler
)
1946 init_stack_free
= 0;
1947 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1948 init_stack_free
= 4 * num_restore
;
1950 init_stack_free
= (signed) actual_fsize
;
1952 /* Deallocate the rest of the stack if it is > 32K. */
1953 if ((unsigned int) actual_fsize
> init_stack_free
)
1954 increment_stack (actual_fsize
- init_stack_free
, false);
1956 /* Special case interrupt functions that save all registers
1958 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1960 if (! TARGET_DISABLE_CALLT
)
1961 emit_insn (gen_callt_restore_all_interrupt ());
1963 emit_insn (gen_restore_all_interrupt ());
1967 /* Restore registers from the beginning of the stack frame. */
1968 int offset
= init_stack_free
- 4;
1970 /* Restore the return pointer first. */
1972 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
1974 emit_move_insn (restore_regs
[--num_restore
],
1975 gen_rtx_MEM (SImode
,
1976 plus_constant (Pmode
,
1982 for (i
= 0; i
< num_restore
; i
++)
1984 emit_move_insn (restore_regs
[i
],
1985 gen_rtx_MEM (SImode
,
1986 plus_constant (Pmode
,
1990 emit_use (restore_regs
[i
]);
1994 /* Cut back the remainder of the stack. */
1995 increment_stack (init_stack_free
+ crtl
->args
.pretend_args_size
,
1999 /* And return or use reti for interrupt handlers. */
2000 if (interrupt_handler
)
2002 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
2003 emit_insn (gen_callt_return_interrupt ());
2005 emit_jump_insn (gen_return_interrupt ());
2007 else if (actual_fsize
)
2008 emit_jump_insn (gen_return_internal ());
2010 emit_jump_insn (gen_return_simple ());
2013 v850_interrupt_cache_p
= FALSE
;
2014 v850_interrupt_p
= FALSE
;
2017 /* Update the condition code from the insn. */
2019 notice_update_cc (rtx body
, rtx_insn
*insn
)
2021 switch (get_attr_cc (insn
))
2024 /* Insn does not affect CC at all. */
2028 /* Insn does not change CC, but the 0'th operand has been changed. */
2029 if (cc_status
.value1
!= 0
2030 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
2031 cc_status
.value1
= 0;
2035 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2036 V,C is in an unusable state. */
2038 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
2039 cc_status
.value1
= recog_data
.operand
[0];
2043 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2044 C is in an unusable state. */
2046 cc_status
.flags
|= CC_NO_CARRY
;
2047 cc_status
.value1
= recog_data
.operand
[0];
2051 /* The insn is a compare instruction. */
2053 cc_status
.value1
= SET_SRC (body
);
2057 /* Insn doesn't leave CC in a usable state. */
2066 /* Retrieve the data area that has been chosen for the given decl. */
2069 v850_get_data_area (tree decl
)
2071 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2072 return DATA_AREA_SDA
;
2074 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2075 return DATA_AREA_TDA
;
2077 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2078 return DATA_AREA_ZDA
;
2080 return DATA_AREA_NORMAL
;
2083 /* Store the indicated data area in the decl's attributes. */
2086 v850_set_data_area (tree decl
, v850_data_area data_area
)
2092 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2093 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2094 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2099 DECL_ATTRIBUTES (decl
) = tree_cons
2100 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2103 /* Handle an "interrupt" attribute; arguments as in
2104 struct attribute_spec.handler. */
2106 v850_handle_interrupt_attribute (tree
* node
,
2108 tree args ATTRIBUTE_UNUSED
,
2109 int flags ATTRIBUTE_UNUSED
,
2110 bool * no_add_attrs
)
2112 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2114 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2116 *no_add_attrs
= true;
2122 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2123 struct attribute_spec.handler. */
2125 v850_handle_data_area_attribute (tree
* node
,
2127 tree args ATTRIBUTE_UNUSED
,
2128 int flags ATTRIBUTE_UNUSED
,
2129 bool * no_add_attrs
)
2131 v850_data_area data_area
;
2132 v850_data_area area
;
2135 /* Implement data area attribute. */
2136 if (is_attribute_p ("sda", name
))
2137 data_area
= DATA_AREA_SDA
;
2138 else if (is_attribute_p ("tda", name
))
2139 data_area
= DATA_AREA_TDA
;
2140 else if (is_attribute_p ("zda", name
))
2141 data_area
= DATA_AREA_ZDA
;
2145 switch (TREE_CODE (decl
))
2148 if (current_function_decl
!= NULL_TREE
)
2150 error_at (DECL_SOURCE_LOCATION (decl
),
2151 "data area attributes cannot be specified for "
2153 *no_add_attrs
= true;
2159 area
= v850_get_data_area (decl
);
2160 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2162 error ("data area of %q+D conflicts with previous declaration",
2164 *no_add_attrs
= true;
2176 /* Return nonzero if FUNC is an interrupt function as specified
2177 by the "interrupt" attribute. */
2180 v850_interrupt_function_p (tree func
)
2185 if (v850_interrupt_cache_p
)
2186 return v850_interrupt_p
;
2188 if (TREE_CODE (func
) != FUNCTION_DECL
)
2191 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2197 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2198 ret
= a
!= NULL_TREE
;
2201 /* Its not safe to trust global variables until after function inlining has
2203 if (reload_completed
| reload_in_progress
)
2204 v850_interrupt_p
= ret
;
2211 v850_encode_data_area (tree decl
, rtx symbol
)
2215 /* Map explicit sections into the appropriate attribute */
2216 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2218 if (DECL_SECTION_NAME (decl
))
2220 const char *name
= DECL_SECTION_NAME (decl
);
2222 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2223 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2225 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2226 v850_set_data_area (decl
, DATA_AREA_SDA
);
2228 else if (streq (name
, ".tdata"))
2229 v850_set_data_area (decl
, DATA_AREA_TDA
);
2232 /* If no attribute, support -m{zda,sda,tda}=n */
2235 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2239 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2240 v850_set_data_area (decl
, DATA_AREA_TDA
);
2242 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2243 v850_set_data_area (decl
, DATA_AREA_SDA
);
2245 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2246 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2249 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2253 flags
= SYMBOL_REF_FLAGS (symbol
);
2254 switch (v850_get_data_area (decl
))
2256 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2257 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2258 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2259 default: gcc_unreachable ();
2261 SYMBOL_REF_FLAGS (symbol
) = flags
;
2265 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2267 default_encode_section_info (decl
, rtl
, first
);
2269 if (TREE_CODE (decl
) == VAR_DECL
2270 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2271 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2274 /* Construct a JR instruction to a routine that will perform the equivalent of
2275 the RTL passed in as an argument. This RTL is a function epilogue that
2276 pops registers off the stack and possibly releases some extra stack space
2277 as well. The code has already verified that the RTL matches these
2281 construct_restore_jr (rtx op
)
2283 int count
= XVECLEN (op
, 0);
2285 unsigned long int mask
;
2286 unsigned long int first
;
2287 unsigned long int last
;
2289 static char buff
[100]; /* XXX */
2293 error ("bogus JR construction: %d", count
);
2297 /* Work out how many bytes to pop off the stack before retrieving
2299 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2300 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2301 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2303 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2305 /* Each pop will remove 4 bytes from the stack.... */
2306 stack_bytes
-= (count
- 2) * 4;
2308 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2309 if (stack_bytes
!= 0)
2311 error ("bad amount of stack space removal: %d", stack_bytes
);
2315 /* Now compute the bit mask of registers to push. */
2317 for (i
= 2; i
< count
; i
++)
2319 rtx vector_element
= XVECEXP (op
, 0, i
);
2321 gcc_assert (GET_CODE (vector_element
) == SET
);
2322 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2323 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2326 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2329 /* Scan for the first register to pop. */
2330 for (first
= 0; first
< 32; first
++)
2332 if (mask
& (1 << first
))
2336 gcc_assert (first
< 32);
2338 /* Discover the last register to pop. */
2339 if (mask
& (1 << LINK_POINTER_REGNUM
))
2341 last
= LINK_POINTER_REGNUM
;
2345 gcc_assert (!stack_bytes
);
2346 gcc_assert (mask
& (1 << 29));
2351 /* Note, it is possible to have gaps in the register mask.
2352 We ignore this here, and generate a JR anyway. We will
2353 be popping more registers than is strictly necessary, but
2354 it does save code space. */
2356 if (TARGET_LONG_CALLS
)
2361 sprintf (name
, "__return_%s", reg_names
[first
]);
2363 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2365 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2371 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2373 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2380 /* Construct a JARL instruction to a routine that will perform the equivalent
2381 of the RTL passed as a parameter. This RTL is a function prologue that
2382 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2383 some stack space as well. The code has already verified that the RTL
2384 matches these requirements. */
2386 construct_save_jarl (rtx op
)
2388 int count
= XVECLEN (op
, 0);
2390 unsigned long int mask
;
2391 unsigned long int first
;
2392 unsigned long int last
;
2394 static char buff
[100]; /* XXX */
2396 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2398 error ("bogus JARL construction: %d", count
);
2403 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2404 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2405 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2406 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2408 /* Work out how many bytes to push onto the stack after storing the
2410 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2412 /* Each push will put 4 bytes from the stack.... */
2413 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2415 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2416 if (stack_bytes
!= 0)
2418 error ("bad amount of stack space removal: %d", stack_bytes
);
2422 /* Now compute the bit mask of registers to push. */
2424 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2426 rtx vector_element
= XVECEXP (op
, 0, i
);
2428 gcc_assert (GET_CODE (vector_element
) == SET
);
2429 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2430 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2433 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2436 /* Scan for the first register to push. */
2437 for (first
= 0; first
< 32; first
++)
2439 if (mask
& (1 << first
))
2443 gcc_assert (first
< 32);
2445 /* Discover the last register to push. */
2446 if (mask
& (1 << LINK_POINTER_REGNUM
))
2448 last
= LINK_POINTER_REGNUM
;
2452 gcc_assert (!stack_bytes
);
2453 gcc_assert (mask
& (1 << 29));
2458 /* Note, it is possible to have gaps in the register mask.
2459 We ignore this here, and generate a JARL anyway. We will
2460 be pushing more registers than is strictly necessary, but
2461 it does save code space. */
2463 if (TARGET_LONG_CALLS
)
2468 sprintf (name
, "__save_%s", reg_names
[first
]);
2470 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2472 if (TARGET_V850E3V5_UP
)
2473 sprintf (buff
, "mov hilo(%s), r11\n\tjarl [r11], r10", name
);
2475 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2481 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2483 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2490 /* A version of asm_output_aligned_bss() that copes with the special
2491 data areas of the v850. */
2493 v850_output_aligned_bss (FILE * file
,
2496 unsigned HOST_WIDE_INT size
,
2499 switch (v850_get_data_area (decl
))
2502 switch_to_section (zbss_section
);
2506 switch_to_section (sbss_section
);
2510 switch_to_section (tdata_section
);
2513 switch_to_section (bss_section
);
2517 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2518 #ifdef ASM_DECLARE_OBJECT_NAME
2519 last_assemble_variable_decl
= decl
;
2520 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2522 /* Standard thing is just output label for the object. */
2523 ASM_OUTPUT_LABEL (file
, name
);
2524 #endif /* ASM_DECLARE_OBJECT_NAME */
2525 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2528 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2530 v850_output_common (FILE * file
,
2536 if (decl
== NULL_TREE
)
2538 fprintf (file
, "%s", COMMON_ASM_OP
);
2542 switch (v850_get_data_area (decl
))
2545 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2549 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2553 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2557 fprintf (file
, "%s", COMMON_ASM_OP
);
2562 assemble_name (file
, name
);
2563 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2566 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2568 v850_output_local (FILE * file
,
2574 fprintf (file
, "%s", LOCAL_ASM_OP
);
2575 assemble_name (file
, name
);
2576 fprintf (file
, "\n");
2578 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2581 /* Add data area to the given declaration if a ghs data area pragma is
2582 currently in effect (#pragma ghs startXXX/endXXX). */
2584 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2587 && data_area_stack
->data_area
2588 && current_function_decl
== NULL_TREE
2589 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2590 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2591 v850_set_data_area (decl
, data_area_stack
->data_area
);
2593 /* Initialize the default names of the v850 specific sections,
2594 if this has not been done before. */
2596 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2598 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2601 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2604 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2607 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2610 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2614 if (current_function_decl
== NULL_TREE
2615 && (TREE_CODE (decl
) == VAR_DECL
2616 || TREE_CODE (decl
) == CONST_DECL
2617 || TREE_CODE (decl
) == FUNCTION_DECL
)
2618 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2619 && !DECL_SECTION_NAME (decl
))
2621 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2622 const char * chosen_section
;
2624 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2625 kind
= GHS_SECTION_KIND_TEXT
;
2628 /* First choose a section kind based on the data area of the decl. */
2629 switch (v850_get_data_area (decl
))
2635 kind
= ((TREE_READONLY (decl
))
2636 ? GHS_SECTION_KIND_ROSDATA
2637 : GHS_SECTION_KIND_SDATA
);
2641 kind
= GHS_SECTION_KIND_TDATA
;
2645 kind
= ((TREE_READONLY (decl
))
2646 ? GHS_SECTION_KIND_ROZDATA
2647 : GHS_SECTION_KIND_ZDATA
);
2650 case DATA_AREA_NORMAL
: /* default data area */
2651 if (TREE_READONLY (decl
))
2652 kind
= GHS_SECTION_KIND_RODATA
;
2653 else if (DECL_INITIAL (decl
))
2654 kind
= GHS_SECTION_KIND_DATA
;
2656 kind
= GHS_SECTION_KIND_BSS
;
2660 /* Now, if the section kind has been explicitly renamed,
2661 then attach a section attribute. */
2662 chosen_section
= GHS_current_section_names
[(int) kind
];
2664 /* Otherwise, if this kind of section needs an explicit section
2665 attribute, then also attach one. */
2666 if (chosen_section
== NULL
)
2667 chosen_section
= GHS_default_section_names
[(int) kind
];
2671 /* Only set the section name if specified by a pragma, because
2672 otherwise it will force those variables to get allocated storage
2673 in this module, rather than by the linker. */
2674 set_decl_section_name (decl
, chosen_section
);
2679 /* Construct a DISPOSE instruction that is the equivalent of
2680 the given RTX. We have already verified that this should
2684 construct_dispose_instruction (rtx op
)
2686 int count
= XVECLEN (op
, 0);
2688 unsigned long int mask
;
2690 static char buff
[ 100 ]; /* XXX */
2695 error ("bogus DISPOSE construction: %d", count
);
2699 /* Work out how many bytes to pop off the
2700 stack before retrieving registers. */
2701 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2702 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2703 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2705 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2707 /* Each pop will remove 4 bytes from the stack.... */
2708 stack_bytes
-= (count
- 2) * 4;
2710 /* Make sure that the amount we are popping
2711 will fit into the DISPOSE instruction. */
2712 if (stack_bytes
> 128)
2714 error ("too much stack space to dispose of: %d", stack_bytes
);
2718 /* Now compute the bit mask of registers to push. */
2721 for (i
= 2; i
< count
; i
++)
2723 rtx vector_element
= XVECEXP (op
, 0, i
);
2725 gcc_assert (GET_CODE (vector_element
) == SET
);
2726 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2727 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2730 if (REGNO (SET_DEST (vector_element
)) == 2)
2733 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2736 if (! TARGET_DISABLE_CALLT
2737 && (use_callt
|| stack_bytes
== 0))
2741 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2746 for (i
= 20; i
< 32; i
++)
2747 if (mask
& (1 << i
))
2751 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2753 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2754 i
, (mask
& (1 << 31)) ? "31c" : "29");
2759 static char regs
[100]; /* XXX */
2762 /* Generate the DISPOSE instruction. Note we could just issue the
2763 bit mask as a number as the assembler can cope with this, but for
2764 the sake of our readers we turn it into a textual description. */
2768 for (i
= 20; i
< 32; i
++)
2770 if (mask
& (1 << i
))
2775 strcat (regs
, ", ");
2780 strcat (regs
, reg_names
[ first
]);
2782 for (i
++; i
< 32; i
++)
2783 if ((mask
& (1 << i
)) == 0)
2788 strcat (regs
, " - ");
2789 strcat (regs
, reg_names
[ i
- 1 ] );
2794 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2800 /* Construct a PREPARE instruction that is the equivalent of
2801 the given RTL. We have already verified that this should
2805 construct_prepare_instruction (rtx op
)
2809 unsigned long int mask
;
2811 static char buff
[ 100 ]; /* XXX */
2814 if (XVECLEN (op
, 0) <= 1)
2816 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2820 /* Work out how many bytes to push onto
2821 the stack after storing the registers. */
2822 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2823 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2824 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2826 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2829 /* Make sure that the amount we are popping
2830 will fit into the DISPOSE instruction. */
2831 if (stack_bytes
< -128)
2833 error ("too much stack space to prepare: %d", stack_bytes
);
2837 /* Now compute the bit mask of registers to push. */
2840 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2842 rtx vector_element
= XVECEXP (op
, 0, i
);
2844 if (GET_CODE (vector_element
) == CLOBBER
)
2847 gcc_assert (GET_CODE (vector_element
) == SET
);
2848 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2849 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2852 if (REGNO (SET_SRC (vector_element
)) == 2)
2855 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2859 stack_bytes
+= count
* 4;
2861 if ((! TARGET_DISABLE_CALLT
)
2862 && (use_callt
|| stack_bytes
== 0))
2866 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2870 for (i
= 20; i
< 32; i
++)
2871 if (mask
& (1 << i
))
2875 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2877 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2878 i
, (mask
& (1 << 31)) ? "31c" : "29");
2882 static char regs
[100]; /* XXX */
2886 /* Generate the PREPARE instruction. Note we could just issue the
2887 bit mask as a number as the assembler can cope with this, but for
2888 the sake of our readers we turn it into a textual description. */
2892 for (i
= 20; i
< 32; i
++)
2894 if (mask
& (1 << i
))
2899 strcat (regs
, ", ");
2904 strcat (regs
, reg_names
[ first
]);
2906 for (i
++; i
< 32; i
++)
2907 if ((mask
& (1 << i
)) == 0)
2912 strcat (regs
, " - ");
2913 strcat (regs
, reg_names
[ i
- 1 ] );
2918 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2924 /* Return an RTX indicating where the return address to the
2925 calling function can be found. */
2928 v850_return_addr (int count
)
2933 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2936 /* Implement TARGET_ASM_INIT_SECTIONS. */
2939 v850_asm_init_sections (void)
2942 = get_unnamed_section (0, output_section_asm_op
,
2943 "\t.section .rosdata,\"a\"");
2946 = get_unnamed_section (0, output_section_asm_op
,
2947 "\t.section .rozdata,\"a\"");
2950 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2951 "\t.section .tdata,\"aw\"");
2954 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2955 "\t.section .zdata,\"aw\"");
2958 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2959 output_section_asm_op
,
2960 "\t.section .zbss,\"aw\"");
2964 v850_select_section (tree exp
,
2965 int reloc ATTRIBUTE_UNUSED
,
2966 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2968 if (TREE_CODE (exp
) == VAR_DECL
)
2971 if (!TREE_READONLY (exp
)
2972 || TREE_SIDE_EFFECTS (exp
)
2973 || !DECL_INITIAL (exp
)
2974 || (DECL_INITIAL (exp
) != error_mark_node
2975 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
2980 switch (v850_get_data_area (exp
))
2983 return is_const
? rozdata_section
: zdata_section
;
2986 return tdata_section
;
2989 return is_const
? rosdata_section
: sdata_section
;
2992 return is_const
? readonly_data_section
: data_section
;
2995 return readonly_data_section
;
2998 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
3001 v850_function_value_regno_p (const unsigned int regno
)
3003 return (regno
== 10);
3006 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3009 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
3011 /* Return values > 8 bytes in length in memory. */
3012 return int_size_in_bytes (type
) > 8
3013 || TYPE_MODE (type
) == BLKmode
3014 /* With the rh850 ABI return all aggregates in memory. */
3015 || ((! TARGET_GCC_ABI
) && AGGREGATE_TYPE_P (type
))
3019 /* Worker function for TARGET_FUNCTION_VALUE. */
3022 v850_function_value (const_tree valtype
,
3023 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
3024 bool outgoing ATTRIBUTE_UNUSED
)
3026 return gen_rtx_REG (TYPE_MODE (valtype
), 10);
3030 /* Worker function for TARGET_CAN_ELIMINATE. */
3033 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
3035 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
3038 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3040 If TARGET_APP_REGS is not defined then add r2 and r5 to
3041 the pool of fixed registers. See PR 14505. */
3044 v850_conditional_register_usage (void)
3046 if (TARGET_APP_REGS
)
3048 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
3049 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
3053 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3056 v850_asm_trampoline_template (FILE *f
)
3058 fprintf (f
, "\tjarl .+4,r12\n");
3059 fprintf (f
, "\tld.w 12[r12],r20\n");
3060 fprintf (f
, "\tld.w 16[r12],r12\n");
3061 fprintf (f
, "\tjmp [r12]\n");
3062 fprintf (f
, "\tnop\n");
3063 fprintf (f
, "\t.long 0\n");
3064 fprintf (f
, "\t.long 0\n");
3067 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3070 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
3072 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3074 emit_block_move (m_tramp
, assemble_trampoline_template (),
3075 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3077 mem
= adjust_address (m_tramp
, SImode
, 16);
3078 emit_move_insn (mem
, chain_value
);
3079 mem
= adjust_address (m_tramp
, SImode
, 20);
3080 emit_move_insn (mem
, fnaddr
);
3084 v850_issue_rate (void)
3086 return (TARGET_V850E2_UP
? 2 : 1);
3089 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3092 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
3094 return (GET_CODE (x
) == CONST_DOUBLE
3095 || !(GET_CODE (x
) == CONST
3096 && GET_CODE (XEXP (x
, 0)) == PLUS
3097 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3098 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3099 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3103 v850_memory_move_cost (machine_mode mode
,
3104 reg_class_t reg_class ATTRIBUTE_UNUSED
,
3107 switch (GET_MODE_SIZE (mode
))
3117 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3122 v850_adjust_insn_length (rtx_insn
*insn
, int length
)
3124 if (TARGET_V850E3V5_UP
)
3128 if (TARGET_LONG_CALLS
)
3130 /* call_internal_long, call_value_internal_long. */
3138 /* call_internal_short, call_value_internal_short. */
3147 /* V850 specific attributes. */
3149 static const struct attribute_spec v850_attribute_table
[] =
3151 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3152 affects_type_identity } */
3153 { "interrupt_handler", 0, 0, true, false, false,
3154 v850_handle_interrupt_attribute
, false },
3155 { "interrupt", 0, 0, true, false, false,
3156 v850_handle_interrupt_attribute
, false },
3157 { "sda", 0, 0, true, false, false,
3158 v850_handle_data_area_attribute
, false },
3159 { "tda", 0, 0, true, false, false,
3160 v850_handle_data_area_attribute
, false },
3161 { "zda", 0, 0, true, false, false,
3162 v850_handle_data_area_attribute
, false },
3163 { NULL
, 0, 0, false, false, false, NULL
, false }
3167 v850_option_override (void)
3169 if (flag_exceptions
|| flag_non_call_exceptions
)
3170 flag_omit_frame_pointer
= 0;
3172 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3173 if (! TARGET_GCC_ABI
)
3174 target_flags
|= MASK_DISABLE_CALLT
;
3178 v850_gen_movdi (rtx
* operands
)
3180 if (REG_P (operands
[0]))
3182 if (REG_P (operands
[1]))
3184 if (REGNO (operands
[0]) == (REGNO (operands
[1]) - 1))
3185 return "mov %1, %0; mov %R1, %R0";
3187 return "mov %R1, %R0; mov %1, %0";
3190 if (MEM_P (operands
[1]))
3192 if (REGNO (operands
[0]) & 1)
3193 /* Use two load word instructions to synthesise a load double. */
3194 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3196 return "ld.dw %1, %0";
3199 return "mov %1, %0; mov %R1, %R0";
3202 gcc_assert (REG_P (operands
[1]));
3204 if (REGNO (operands
[1]) & 1)
3205 /* Use two store word instructions to synthesise a store double. */
3206 return "st.w %1, %0 ; st.w %R1, %R0 ";
3208 return "st.dw %1, %0";
3211 /* Initialize the GCC target structure. */
3213 #undef TARGET_OPTION_OVERRIDE
3214 #define TARGET_OPTION_OVERRIDE v850_option_override
3216 #undef TARGET_MEMORY_MOVE_COST
3217 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3219 #undef TARGET_ASM_ALIGNED_HI_OP
3220 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3222 #undef TARGET_PRINT_OPERAND
3223 #define TARGET_PRINT_OPERAND v850_print_operand
3224 #undef TARGET_PRINT_OPERAND_ADDRESS
3225 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3226 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3227 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3229 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3230 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3232 #undef TARGET_ATTRIBUTE_TABLE
3233 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3235 #undef TARGET_INSERT_ATTRIBUTES
3236 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3238 #undef TARGET_ASM_SELECT_SECTION
3239 #define TARGET_ASM_SELECT_SECTION v850_select_section
3241 /* The assembler supports switchable .bss sections, but
3242 v850_select_section doesn't yet make use of them. */
3243 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3244 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3246 #undef TARGET_ENCODE_SECTION_INFO
3247 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3249 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3250 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3252 #undef TARGET_RTX_COSTS
3253 #define TARGET_RTX_COSTS v850_rtx_costs
3255 #undef TARGET_ADDRESS_COST
3256 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3258 #undef TARGET_MACHINE_DEPENDENT_REORG
3259 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3261 #undef TARGET_SCHED_ISSUE_RATE
3262 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3264 #undef TARGET_FUNCTION_VALUE_REGNO_P
3265 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3266 #undef TARGET_FUNCTION_VALUE
3267 #define TARGET_FUNCTION_VALUE v850_function_value
3269 #undef TARGET_PROMOTE_PROTOTYPES
3270 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3272 #undef TARGET_RETURN_IN_MEMORY
3273 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3275 #undef TARGET_PASS_BY_REFERENCE
3276 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3278 #undef TARGET_CALLEE_COPIES
3279 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3281 #undef TARGET_ARG_PARTIAL_BYTES
3282 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3284 #undef TARGET_FUNCTION_ARG
3285 #define TARGET_FUNCTION_ARG v850_function_arg
3287 #undef TARGET_FUNCTION_ARG_ADVANCE
3288 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3290 #undef TARGET_CAN_ELIMINATE
3291 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3293 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3294 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3296 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3297 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3298 #undef TARGET_TRAMPOLINE_INIT
3299 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3301 #undef TARGET_LEGITIMATE_CONSTANT_P
3302 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3304 #undef TARGET_CAN_USE_DOLOOP_P
3305 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3307 struct gcc_target targetm
= TARGET_INITIALIZER
;
3309 #include "gt-v850.h"