1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
38 #include "diagnostic-core.h"
42 #include "target-def.h"
47 #define streq(a,b) (strcmp (a, b) == 0)
50 static void v850_print_operand_address (FILE *, rtx
);
52 /* Names of the various data areas used on the v850. */
53 tree GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
54 tree GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
56 /* Track the current data area set by the data area pragma (which
57 can be nested). Tested by check_default_data_area. */
58 data_area_stack_element
* data_area_stack
= NULL
;
60 /* True if we don't need to check any more if the current
61 function is an interrupt handler. */
62 static int v850_interrupt_cache_p
= FALSE
;
64 rtx v850_compare_op0
, v850_compare_op1
;
66 /* Whether current function is an interrupt handler. */
67 static int v850_interrupt_p
= FALSE
;
69 static GTY(()) section
* rosdata_section
;
70 static GTY(()) section
* rozdata_section
;
71 static GTY(()) section
* tdata_section
;
72 static GTY(()) section
* zdata_section
;
73 static GTY(()) section
* zbss_section
;
75 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
76 Specify whether to pass the argument by reference. */
79 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
80 enum machine_mode mode
, const_tree type
,
81 bool named ATTRIBUTE_UNUSED
)
83 unsigned HOST_WIDE_INT size
;
86 size
= int_size_in_bytes (type
);
88 size
= GET_MODE_SIZE (mode
);
93 /* Implementing the Varargs Macros. */
96 v850_strict_argument_naming (cumulative_args_t ca ATTRIBUTE_UNUSED
)
98 return !TARGET_GHS
? true : false;
101 /* Return an RTX to represent where an argument with mode MODE
102 and type TYPE will be passed to a function. If the result
103 is NULL_RTX, the argument will be pushed. */
106 v850_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
107 const_tree type
, bool named
)
109 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
110 rtx result
= NULL_RTX
;
117 size
= int_size_in_bytes (type
);
119 size
= GET_MODE_SIZE (mode
);
121 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
125 /* Once we have stopped using argument registers, do not start up again. */
126 cum
->nbytes
= 4 * UNITS_PER_WORD
;
130 if (size
<= UNITS_PER_WORD
&& type
)
131 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
135 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
137 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
140 if (type
== NULL_TREE
141 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
144 switch (cum
->nbytes
/ UNITS_PER_WORD
)
147 result
= gen_rtx_REG (mode
, 6);
150 result
= gen_rtx_REG (mode
, 7);
153 result
= gen_rtx_REG (mode
, 8);
156 result
= gen_rtx_REG (mode
, 9);
165 /* Return the number of bytes which must be put into registers
166 for values which are part in registers and part in memory. */
168 v850_arg_partial_bytes (cumulative_args_t cum_v
, enum machine_mode mode
,
169 tree type
, bool named
)
171 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
174 if (TARGET_GHS
&& !named
)
178 size
= int_size_in_bytes (type
);
180 size
= GET_MODE_SIZE (mode
);
186 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
190 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
192 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
195 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
198 if (type
== NULL_TREE
199 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
202 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
205 /* Update the data in CUM to advance over an argument
206 of mode MODE and data type TYPE.
207 (TYPE is null for libcalls where that information may not be available.) */
210 v850_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
211 const_tree type
, bool named ATTRIBUTE_UNUSED
)
213 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
215 cum
->nbytes
+= (((type
&& int_size_in_bytes (type
) > 8
216 ? GET_MODE_SIZE (Pmode
)
218 ? GET_MODE_SIZE (mode
)
219 : int_size_in_bytes (type
))) + UNITS_PER_WORD
- 1)
223 /* Return the high and low words of a CONST_DOUBLE */
226 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
228 if (GET_CODE (x
) == CONST_DOUBLE
)
233 switch (GET_MODE (x
))
236 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
237 REAL_VALUE_TO_TARGET_DOUBLE (rv
, t
);
238 *p_high
= t
[1]; /* since v850 is little endian */
239 *p_low
= t
[0]; /* high is second word */
243 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
244 REAL_VALUE_TO_TARGET_SINGLE (rv
, *p_high
);
250 *p_high
= CONST_DOUBLE_HIGH (x
);
251 *p_low
= CONST_DOUBLE_LOW (x
);
259 fatal_insn ("const_double_split got a bad insn:", x
);
263 /* Return the cost of the rtx R with code CODE. */
266 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
268 if (CONST_OK_FOR_I (value
))
270 else if (CONST_OK_FOR_J (value
))
272 else if (CONST_OK_FOR_K (value
))
279 const_costs (rtx r
, enum rtx_code c
)
281 HOST_WIDE_INT high
, low
;
286 return const_costs_int (INTVAL (r
), 0);
289 const_double_split (r
, &high
, &low
);
290 if (GET_MODE (r
) == SFmode
)
291 return const_costs_int (high
, 1);
293 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
309 v850_rtx_costs (rtx x
,
311 int outer_code ATTRIBUTE_UNUSED
,
312 int opno ATTRIBUTE_UNUSED
,
313 int * total
, bool speed
)
315 enum rtx_code code
= (enum rtx_code
) codearg
;
324 *total
= COSTS_N_INSNS (const_costs (x
, code
));
331 if (TARGET_V850E
&& !speed
)
339 && ( GET_MODE (x
) == SImode
340 || GET_MODE (x
) == HImode
341 || GET_MODE (x
) == QImode
))
343 if (GET_CODE (XEXP (x
, 1)) == REG
)
345 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
347 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
349 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
358 if (outer_code
== COMPARE
)
367 /* Print operand X using operand code CODE to assembly language output file
371 v850_print_operand (FILE * file
, rtx x
, int code
)
373 HOST_WIDE_INT high
, low
;
378 /* We use 'c' operands with symbols for .vtinherit. */
379 if (GET_CODE (x
) == SYMBOL_REF
)
381 output_addr_const(file
, x
);
388 switch ((code
== 'B' || code
== 'C')
389 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
392 if (code
== 'c' || code
== 'C')
393 fprintf (file
, "nz");
395 fprintf (file
, "ne");
398 if (code
== 'c' || code
== 'C')
404 fprintf (file
, "ge");
407 fprintf (file
, "gt");
410 fprintf (file
, "le");
413 fprintf (file
, "lt");
416 fprintf (file
, "nl");
422 fprintf (file
, "nh");
431 case 'F': /* High word of CONST_DOUBLE. */
432 switch (GET_CODE (x
))
435 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
439 const_double_split (x
, &high
, &low
);
440 fprintf (file
, "%ld", (long) high
);
447 case 'G': /* Low word of CONST_DOUBLE. */
448 switch (GET_CODE (x
))
451 fprintf (file
, "%ld", (long) INTVAL (x
));
455 const_double_split (x
, &high
, &low
);
456 fprintf (file
, "%ld", (long) low
);
464 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
467 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
470 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
472 if (GET_CODE (x
) == CONST
)
473 x
= XEXP (XEXP (x
, 0), 0);
475 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
477 if (SYMBOL_REF_ZDA_P (x
))
478 fprintf (file
, "zdaoff");
479 else if (SYMBOL_REF_SDA_P (x
))
480 fprintf (file
, "sdaoff");
481 else if (SYMBOL_REF_TDA_P (x
))
482 fprintf (file
, "tdaoff");
487 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
488 output_addr_const (file
, x
);
491 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
493 if (GET_CODE (x
) == CONST
)
494 x
= XEXP (XEXP (x
, 0), 0);
496 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
498 if (SYMBOL_REF_ZDA_P (x
))
499 fprintf (file
, "r0");
500 else if (SYMBOL_REF_SDA_P (x
))
501 fprintf (file
, "gp");
502 else if (SYMBOL_REF_TDA_P (x
))
503 fprintf (file
, "ep");
507 case 'R': /* 2nd word of a double. */
508 switch (GET_CODE (x
))
511 fprintf (file
, reg_names
[REGNO (x
) + 1]);
514 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
515 v850_print_operand_address (file
, x
);
516 if (GET_CODE (x
) == CONST_INT
)
517 fprintf (file
, "[r0]");
526 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
527 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
534 /* Like an 'S' operand above, but for unsigned loads only. */
535 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
540 case 'W': /* Print the instruction suffix. */
541 switch (GET_MODE (x
))
546 case QImode
: fputs (".b", file
); break;
547 case HImode
: fputs (".h", file
); break;
548 case SImode
: fputs (".w", file
); break;
549 case SFmode
: fputs (".w", file
); break;
552 case '.': /* Register r0. */
553 fputs (reg_names
[0], file
);
555 case 'z': /* Reg or zero. */
557 fputs (reg_names
[REGNO (x
)], file
);
558 else if ((GET_MODE(x
) == SImode
559 || GET_MODE(x
) == DFmode
560 || GET_MODE(x
) == SFmode
)
561 && x
== CONST0_RTX(GET_MODE(x
)))
562 fputs (reg_names
[0], file
);
565 gcc_assert (x
== const0_rtx
);
566 fputs (reg_names
[0], file
);
570 switch (GET_CODE (x
))
573 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
574 output_address (gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
577 output_address (XEXP (x
, 0));
581 fputs (reg_names
[REGNO (x
)], file
);
584 fputs (reg_names
[subreg_regno (x
)], file
);
591 v850_print_operand_address (file
, x
);
602 /* Output assembly language output for the address ADDR to FILE. */
605 v850_print_operand_address (FILE * file
, rtx addr
)
607 switch (GET_CODE (addr
))
610 fprintf (file
, "0[");
611 v850_print_operand (file
, addr
, 0);
615 if (GET_CODE (XEXP (addr
, 0)) == REG
)
618 fprintf (file
, "lo(");
619 v850_print_operand (file
, XEXP (addr
, 1), 0);
620 fprintf (file
, ")[");
621 v850_print_operand (file
, XEXP (addr
, 0), 0);
626 if (GET_CODE (XEXP (addr
, 0)) == REG
627 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
630 v850_print_operand (file
, XEXP (addr
, 1), 0);
632 v850_print_operand (file
, XEXP (addr
, 0), 0);
637 v850_print_operand (file
, XEXP (addr
, 0), 0);
639 v850_print_operand (file
, XEXP (addr
, 1), 0);
644 const char *off_name
= NULL
;
645 const char *reg_name
= NULL
;
647 if (SYMBOL_REF_ZDA_P (addr
))
652 else if (SYMBOL_REF_SDA_P (addr
))
657 else if (SYMBOL_REF_TDA_P (addr
))
664 fprintf (file
, "%s(", off_name
);
665 output_addr_const (file
, addr
);
667 fprintf (file
, ")[%s]", reg_name
);
671 if (special_symbolref_operand (addr
, VOIDmode
))
673 rtx x
= XEXP (XEXP (addr
, 0), 0);
674 const char *off_name
;
675 const char *reg_name
;
677 if (SYMBOL_REF_ZDA_P (x
))
682 else if (SYMBOL_REF_SDA_P (x
))
687 else if (SYMBOL_REF_TDA_P (x
))
695 fprintf (file
, "%s(", off_name
);
696 output_addr_const (file
, addr
);
697 fprintf (file
, ")[%s]", reg_name
);
700 output_addr_const (file
, addr
);
703 output_addr_const (file
, addr
);
709 v850_print_operand_punct_valid_p (unsigned char code
)
714 /* When assemble_integer is used to emit the offsets for a switch
715 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
716 output_addr_const will normally barf at this, but it is OK to omit
717 the truncate and just emit the difference of the two labels. The
718 .hword directive will automatically handle the truncation for us.
720 Returns true if rtx was handled, false otherwise. */
723 v850_output_addr_const_extra (FILE * file
, rtx x
)
725 if (GET_CODE (x
) != TRUNCATE
)
730 /* We must also handle the case where the switch table was passed a
731 constant value and so has been collapsed. In this case the first
732 label will have been deleted. In such a case it is OK to emit
733 nothing, since the table will not be used.
734 (cf gcc.c-torture/compile/990801-1.c). */
735 if (GET_CODE (x
) == MINUS
736 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
737 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == CODE_LABEL
738 && INSN_DELETED_P (XEXP (XEXP (x
, 0), 0)))
741 output_addr_const (file
, x
);
745 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
749 output_move_single (rtx
* operands
)
751 rtx dst
= operands
[0];
752 rtx src
= operands
[1];
759 else if (GET_CODE (src
) == CONST_INT
)
761 HOST_WIDE_INT value
= INTVAL (src
);
763 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
766 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
767 return "movea %1,%.,%0";
769 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
770 return "movhi hi0(%1),%.,%0";
772 /* A random constant. */
773 else if (TARGET_V850E
|| TARGET_V850E2_ALL
)
776 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
779 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
781 HOST_WIDE_INT high
, low
;
783 const_double_split (src
, &high
, &low
);
785 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
788 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
789 return "movea %F1,%.,%0";
791 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
792 return "movhi hi0(%F1),%.,%0";
794 /* A random constant. */
795 else if (TARGET_V850E
|| TARGET_V850E2_ALL
)
799 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
802 else if (GET_CODE (src
) == MEM
)
803 return "%S1ld%W1 %1,%0";
805 else if (special_symbolref_operand (src
, VOIDmode
))
806 return "movea %O1(%P1),%Q1,%0";
808 else if (GET_CODE (src
) == LABEL_REF
809 || GET_CODE (src
) == SYMBOL_REF
810 || GET_CODE (src
) == CONST
)
812 if (TARGET_V850E
|| TARGET_V850E2_ALL
)
813 return "mov hilo(%1),%0";
815 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
818 else if (GET_CODE (src
) == HIGH
)
819 return "movhi hi(%1),%.,%0";
821 else if (GET_CODE (src
) == LO_SUM
)
823 operands
[2] = XEXP (src
, 0);
824 operands
[3] = XEXP (src
, 1);
825 return "movea lo(%3),%2,%0";
829 else if (GET_CODE (dst
) == MEM
)
832 return "%S0st%W0 %1,%0";
834 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
835 return "%S0st%W0 %.,%0";
837 else if (GET_CODE (src
) == CONST_DOUBLE
838 && CONST0_RTX (GET_MODE (dst
)) == src
)
839 return "%S0st%W0 %.,%0";
842 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode
, dst
, src
));
846 /* Generate comparison code. */
848 v850_float_z_comparison_operator (rtx op
, enum machine_mode mode
)
850 enum rtx_code code
= GET_CODE (op
);
852 if (GET_RTX_CLASS (code
) != RTX_COMPARE
853 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
856 if (mode
!= GET_MODE (op
) && mode
!= VOIDmode
)
859 if ((GET_CODE (XEXP (op
, 0)) != REG
860 || REGNO (XEXP (op
, 0)) != CC_REGNUM
)
861 || XEXP (op
, 1) != const0_rtx
)
864 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_LTmode
)
866 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_LEmode
)
868 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_EQmode
)
875 v850_float_nz_comparison_operator (rtx op
, enum machine_mode mode
)
877 enum rtx_code code
= GET_CODE (op
);
879 if (GET_RTX_CLASS (code
) != RTX_COMPARE
880 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
883 if (mode
!= GET_MODE (op
) && mode
!= VOIDmode
)
886 if ((GET_CODE (XEXP (op
, 0)) != REG
887 || REGNO (XEXP (op
, 0)) != CC_REGNUM
)
888 || XEXP (op
, 1) != const0_rtx
)
891 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_GTmode
)
893 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_GEmode
)
895 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_NEmode
)
902 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1 ATTRIBUTE_UNUSED
)
904 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
909 return CC_FPU_LEmode
;
911 return CC_FPU_GEmode
;
913 return CC_FPU_LTmode
;
915 return CC_FPU_GTmode
;
917 return CC_FPU_EQmode
;
919 return CC_FPU_NEmode
;
928 v850_gen_float_compare (enum rtx_code cond
, enum machine_mode mode ATTRIBUTE_UNUSED
, rtx op0
, rtx op1
)
930 if (GET_MODE(op0
) == DFmode
)
935 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
938 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
941 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
944 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
947 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
950 emit_insn (gen_cmpdf_ne_insn (op0
, op1
));
956 else if (GET_MODE(v850_compare_op0
) == SFmode
)
961 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
964 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
967 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
970 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
973 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
976 emit_insn (gen_cmpsf_ne_insn(op0
, op1
));
987 return v850_select_cc_mode (cond
, op0
, op1
);
991 v850_gen_compare (enum rtx_code cond
, enum machine_mode mode
, rtx op0
, rtx op1
)
993 if (GET_MODE_CLASS(GET_MODE (op0
)) != MODE_FLOAT
)
995 emit_insn (gen_cmpsi_insn (op0
, op1
));
996 return gen_rtx_fmt_ee (cond
, mode
, gen_rtx_REG(CCmode
, CC_REGNUM
), const0_rtx
);
1001 mode
= v850_gen_float_compare (cond
, mode
, op0
, op1
);
1002 cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
1003 emit_insn (gen_rtx_SET(mode
, cc_reg
, gen_rtx_REG (mode
, FCC_REGNUM
)));
1005 return gen_rtx_fmt_ee (cond
, mode
, cc_reg
, const0_rtx
);
1009 /* Return maximum offset supported for a short EP memory reference of mode
1010 MODE and signedness UNSIGNEDP. */
1013 ep_memory_offset (enum machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
1020 if (TARGET_SMALL_SLD
)
1021 max_offset
= (1 << 4);
1022 else if ((TARGET_V850E
|| TARGET_V850E2_ALL
)
1024 max_offset
= (1 << 4);
1026 max_offset
= (1 << 7);
1030 if (TARGET_SMALL_SLD
)
1031 max_offset
= (1 << 5);
1032 else if ((TARGET_V850E
|| TARGET_V850E2_ALL
)
1034 max_offset
= (1 << 5);
1036 max_offset
= (1 << 8);
1041 max_offset
= (1 << 8);
1051 /* Return true if OP is a valid short EP memory reference */
1054 ep_memory_operand (rtx op
, enum machine_mode mode
, int unsigned_load
)
1060 /* If we are not using the EP register on a per-function basis
1061 then do not allow this optimization at all. This is to
1062 prevent the use of the SLD/SST instructions which cannot be
1063 guaranteed to work properly due to a hardware bug. */
1067 if (GET_CODE (op
) != MEM
)
1070 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1072 mask
= GET_MODE_SIZE (mode
) - 1;
1074 addr
= XEXP (op
, 0);
1075 if (GET_CODE (addr
) == CONST
)
1076 addr
= XEXP (addr
, 0);
1078 switch (GET_CODE (addr
))
1084 return SYMBOL_REF_TDA_P (addr
);
1087 return REGNO (addr
) == EP_REGNUM
;
1090 op0
= XEXP (addr
, 0);
1091 op1
= XEXP (addr
, 1);
1092 if (GET_CODE (op1
) == CONST_INT
1093 && INTVAL (op1
) < max_offset
1094 && INTVAL (op1
) >= 0
1095 && (INTVAL (op1
) & mask
) == 0)
1097 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1100 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1109 /* Substitute memory references involving a pointer, to use the ep pointer,
1110 taking care to save and preserve the ep. */
1113 substitute_ep_register (rtx first_insn
,
1120 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1125 df_set_regs_ever_live (1, true);
1126 *p_r1
= gen_rtx_REG (Pmode
, 1);
1127 *p_ep
= gen_rtx_REG (Pmode
, 30);
1132 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1133 2 * (uses
- 3), uses
, reg_names
[regno
],
1134 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1135 INSN_UID (first_insn
), INSN_UID (last_insn
));
1137 if (GET_CODE (first_insn
) == NOTE
)
1138 first_insn
= next_nonnote_insn (first_insn
);
1140 last_insn
= next_nonnote_insn (last_insn
);
1141 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1143 if (GET_CODE (insn
) == INSN
)
1145 rtx pattern
= single_set (insn
);
1147 /* Replace the memory references. */
1151 /* Memory operands are signed by default. */
1152 int unsignedp
= FALSE
;
1154 if (GET_CODE (SET_DEST (pattern
)) == MEM
1155 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1158 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1159 p_mem
= &SET_DEST (pattern
);
1161 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1162 p_mem
= &SET_SRC (pattern
);
1164 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1165 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1166 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1168 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1169 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1171 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1179 rtx addr
= XEXP (*p_mem
, 0);
1181 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1182 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1184 else if (GET_CODE (addr
) == PLUS
1185 && GET_CODE (XEXP (addr
, 0)) == REG
1186 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1187 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1188 && ((INTVAL (XEXP (addr
, 1)))
1189 < ep_memory_offset (GET_MODE (*p_mem
),
1191 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1192 *p_mem
= change_address (*p_mem
, VOIDmode
,
1193 gen_rtx_PLUS (Pmode
,
1201 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1202 insn
= prev_nonnote_insn (first_insn
);
1203 if (insn
&& GET_CODE (insn
) == INSN
1204 && GET_CODE (PATTERN (insn
)) == SET
1205 && SET_DEST (PATTERN (insn
)) == *p_ep
1206 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1209 emit_insn_before (gen_rtx_SET (Pmode
, *p_r1
, *p_ep
), first_insn
);
1211 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, reg
), first_insn
);
1212 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, *p_r1
), last_insn
);
1216 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1217 the -mep mode to copy heavily used pointers to ep to use the implicit
1229 regs
[FIRST_PSEUDO_REGISTER
];
1238 /* If not ep mode, just return now. */
1242 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1245 regs
[i
].first_insn
= NULL_RTX
;
1246 regs
[i
].last_insn
= NULL_RTX
;
1249 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1251 switch (GET_CODE (insn
))
1253 /* End of basic block */
1260 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1262 if (max_uses
< regs
[i
].uses
)
1264 max_uses
= regs
[i
].uses
;
1270 substitute_ep_register (regs
[max_regno
].first_insn
,
1271 regs
[max_regno
].last_insn
,
1272 max_uses
, max_regno
, &r1
, &ep
);
1276 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1279 regs
[i
].first_insn
= NULL_RTX
;
1280 regs
[i
].last_insn
= NULL_RTX
;
1288 pattern
= single_set (insn
);
1290 /* See if there are any memory references we can shorten */
1293 rtx src
= SET_SRC (pattern
);
1294 rtx dest
= SET_DEST (pattern
);
1296 /* Memory operands are signed by default. */
1297 int unsignedp
= FALSE
;
1299 /* We might have (SUBREG (MEM)) here, so just get rid of the
1300 subregs to make this code simpler. */
1301 if (GET_CODE (dest
) == SUBREG
1302 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1303 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1304 alter_subreg (&dest
, true);
1305 if (GET_CODE (src
) == SUBREG
1306 && (GET_CODE (SUBREG_REG (src
)) == MEM
1307 || GET_CODE (SUBREG_REG (src
)) == REG
))
1308 alter_subreg (&src
, true);
1310 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1313 else if (GET_CODE (dest
) == MEM
)
1316 else if (GET_CODE (src
) == MEM
)
1319 else if (GET_CODE (src
) == SIGN_EXTEND
1320 && GET_CODE (XEXP (src
, 0)) == MEM
)
1321 mem
= XEXP (src
, 0);
1323 else if (GET_CODE (src
) == ZERO_EXTEND
1324 && GET_CODE (XEXP (src
, 0)) == MEM
)
1326 mem
= XEXP (src
, 0);
1332 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1335 else if (!use_ep
&& mem
1336 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1338 rtx addr
= XEXP (mem
, 0);
1342 if (GET_CODE (addr
) == REG
)
1345 regno
= REGNO (addr
);
1348 else if (GET_CODE (addr
) == PLUS
1349 && GET_CODE (XEXP (addr
, 0)) == REG
1350 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1351 && ((INTVAL (XEXP (addr
, 1)))
1352 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1353 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1356 regno
= REGNO (XEXP (addr
, 0));
1365 regs
[regno
].last_insn
= insn
;
1366 if (!regs
[regno
].first_insn
)
1367 regs
[regno
].first_insn
= insn
;
1371 /* Loading up a register in the basic block zaps any savings
1373 if (GET_CODE (dest
) == REG
)
1375 enum machine_mode mode
= GET_MODE (dest
);
1379 regno
= REGNO (dest
);
1380 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1384 /* See if we can use the pointer before this
1389 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1391 if (max_uses
< regs
[i
].uses
)
1393 max_uses
= regs
[i
].uses
;
1399 && max_regno
>= regno
1400 && max_regno
< endregno
)
1402 substitute_ep_register (regs
[max_regno
].first_insn
,
1403 regs
[max_regno
].last_insn
,
1404 max_uses
, max_regno
, &r1
,
1407 /* Since we made a substitution, zap all remembered
1409 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1412 regs
[i
].first_insn
= NULL_RTX
;
1413 regs
[i
].last_insn
= NULL_RTX
;
1418 for (i
= regno
; i
< endregno
; i
++)
1421 regs
[i
].first_insn
= NULL_RTX
;
1422 regs
[i
].last_insn
= NULL_RTX
;
1430 /* # of registers saved by the interrupt handler. */
1431 #define INTERRUPT_FIXED_NUM 5
1433 /* # of bytes for registers saved by the interrupt handler. */
1434 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1436 /* # of words saved for other registers. */
1437 #define INTERRUPT_ALL_SAVE_NUM \
1438 (30 - INTERRUPT_FIXED_NUM)
1440 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1443 compute_register_save_size (long * p_reg_saved
)
1447 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1448 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1451 /* Always save the link pointer - we cannot rely upon df_regs_ever_live_p. */
1454 df_set_regs_ever_live (LINK_POINTER_REGNUM
, true);
1458 /* Count space for the register saves. */
1459 if (interrupt_handler
)
1461 for (i
= 0; i
<= 31; i
++)
1465 if (df_regs_ever_live_p (i
) || call_p
)
1468 reg_saved
|= 1L << i
;
1472 /* We don't save/restore r0 or the stack pointer */
1474 case STACK_POINTER_REGNUM
:
1477 /* For registers with fixed use, we save them, set them to the
1478 appropriate value, and then restore them.
1479 These registers are handled specially, so don't list them
1480 on the list of registers to save in the prologue. */
1481 case 1: /* temp used to hold ep */
1483 case 10: /* temp used to call interrupt save/restore */
1484 case 11: /* temp used to call interrupt save/restore (long call) */
1485 case EP_REGNUM
: /* ep */
1492 /* Find the first register that needs to be saved. */
1493 for (i
= 0; i
<= 31; i
++)
1494 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1495 || i
== LINK_POINTER_REGNUM
))
1498 /* If it is possible that an out-of-line helper function might be
1499 used to generate the prologue for the current function, then we
1500 need to cover the possibility that such a helper function will
1501 be used, despite the fact that there might be gaps in the list of
1502 registers that need to be saved. To detect this we note that the
1503 helper functions always push at least register r29 (provided
1504 that the function is not an interrupt handler). */
1506 if (TARGET_PROLOG_FUNCTION
1507 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1512 reg_saved
|= 1L << i
;
1517 /* Helper functions save all registers between the starting
1518 register and the last register, regardless of whether they
1519 are actually used by the function or not. */
1520 for (; i
<= 29; i
++)
1523 reg_saved
|= 1L << i
;
1526 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1529 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1534 for (; i
<= 31; i
++)
1535 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1536 || i
== LINK_POINTER_REGNUM
))
1539 reg_saved
|= 1L << i
;
1545 *p_reg_saved
= reg_saved
;
1551 compute_frame_size (int size
, long * p_reg_saved
)
1554 + compute_register_save_size (p_reg_saved
)
1555 + crtl
->outgoing_args_size
);
1559 use_prolog_function (int num_save
, int frame_size
)
1561 int alloc_stack
= (4 * num_save
);
1562 int unalloc_stack
= frame_size
- alloc_stack
;
1563 int save_func_len
, restore_func_len
;
1564 int save_normal_len
, restore_normal_len
;
1566 if (! TARGET_DISABLE_CALLT
)
1567 save_func_len
= restore_func_len
= 2;
1569 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1573 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1574 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1577 /* See if we would have used ep to save the stack. */
1578 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1579 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1581 save_normal_len
= restore_normal_len
= 4 * num_save
;
1583 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1584 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1586 /* Don't bother checking if we don't actually save any space.
1587 This happens for instance if one register is saved and additional
1588 stack space is allocated. */
1589 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1593 increment_stack (unsigned int amount
)
1600 inc
= GEN_INT (amount
);
1602 if (! CONST_OK_FOR_K (amount
))
1604 rtx reg
= gen_rtx_REG (Pmode
, 12);
1606 emit_move_insn (reg
, inc
);
1610 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, inc
));
1614 expand_prologue (void)
1617 unsigned int size
= get_frame_size ();
1618 unsigned int actual_fsize
;
1619 unsigned int init_stack_alloc
= 0;
1622 unsigned int num_save
;
1624 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1627 actual_fsize
= compute_frame_size (size
, ®_saved
);
1629 if (flag_stack_usage_info
)
1630 current_function_static_stack_size
= actual_fsize
;
1632 /* Save/setup global registers for interrupt functions right now. */
1633 if (interrupt_handler
)
1635 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1636 emit_insn (gen_callt_save_interrupt ());
1638 emit_insn (gen_save_interrupt ());
1640 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1642 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1643 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1646 /* Identify all of the saved registers. */
1648 for (i
= 1; i
< 32; i
++)
1650 if (((1L << i
) & reg_saved
) != 0)
1651 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1654 /* See if we have an insn that allocates stack space and saves the particular
1655 registers we want to. */
1656 save_all
= NULL_RTX
;
1657 if (TARGET_PROLOG_FUNCTION
&& num_save
> 0)
1659 if (use_prolog_function (num_save
, actual_fsize
))
1661 int alloc_stack
= 4 * num_save
;
1664 save_all
= gen_rtx_PARALLEL
1666 rtvec_alloc (num_save
+ 1
1667 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1669 XVECEXP (save_all
, 0, 0)
1670 = gen_rtx_SET (VOIDmode
,
1672 gen_rtx_PLUS (Pmode
,
1674 GEN_INT(-alloc_stack
)));
1675 for (i
= 0; i
< num_save
; i
++)
1678 XVECEXP (save_all
, 0, i
+1)
1679 = gen_rtx_SET (VOIDmode
,
1681 gen_rtx_PLUS (Pmode
,
1687 if (TARGET_DISABLE_CALLT
)
1689 XVECEXP (save_all
, 0, num_save
+ 1)
1690 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1692 if (TARGET_LONG_CALLS
)
1693 XVECEXP (save_all
, 0, num_save
+ 2)
1694 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1697 code
= recog (save_all
, NULL_RTX
, NULL
);
1700 rtx insn
= emit_insn (save_all
);
1701 INSN_CODE (insn
) = code
;
1702 actual_fsize
-= alloc_stack
;
1706 save_all
= NULL_RTX
;
1710 /* If no prolog save function is available, store the registers the old
1711 fashioned way (one by one). */
1714 /* Special case interrupt functions that save all registers for a call. */
1715 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1717 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1718 emit_insn (gen_callt_save_all_interrupt ());
1720 emit_insn (gen_save_all_interrupt ());
1725 /* If the stack is too big, allocate it in chunks so we can do the
1726 register saves. We use the register save size so we use the ep
1728 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1729 init_stack_alloc
= compute_register_save_size (NULL
);
1731 init_stack_alloc
= actual_fsize
;
1733 /* Save registers at the beginning of the stack frame. */
1734 offset
= init_stack_alloc
- 4;
1736 if (init_stack_alloc
)
1737 increment_stack (- (signed) init_stack_alloc
);
1739 /* Save the return pointer first. */
1740 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1742 emit_move_insn (gen_rtx_MEM (SImode
,
1743 plus_constant (Pmode
,
1746 save_regs
[--num_save
]);
1750 for (i
= 0; i
< num_save
; i
++)
1752 emit_move_insn (gen_rtx_MEM (SImode
,
1753 plus_constant (Pmode
,
1762 /* Allocate the rest of the stack that was not allocated above (either it is
1763 > 32K or we just called a function to save the registers and needed more
1765 if (actual_fsize
> init_stack_alloc
)
1767 int diff
= actual_fsize
- init_stack_alloc
;
1769 increment_stack (- diff
);
1772 /* If we need a frame pointer, set it up now. */
1773 if (frame_pointer_needed
)
1774 emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1779 expand_epilogue (void)
1782 unsigned int size
= get_frame_size ();
1784 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1785 rtx restore_regs
[32];
1787 unsigned int num_restore
;
1789 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1791 /* Eliminate the initial stack stored by interrupt functions. */
1792 if (interrupt_handler
)
1794 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1795 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1796 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1799 /* Cut off any dynamic stack created. */
1800 if (frame_pointer_needed
)
1801 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1803 /* Identify all of the saved registers. */
1805 for (i
= 1; i
< 32; i
++)
1807 if (((1L << i
) & reg_saved
) != 0)
1808 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1811 /* See if we have an insn that restores the particular registers we
1813 restore_all
= NULL_RTX
;
1815 if (TARGET_PROLOG_FUNCTION
1817 && !interrupt_handler
)
1819 int alloc_stack
= (4 * num_restore
);
1821 /* Don't bother checking if we don't actually save any space. */
1822 if (use_prolog_function (num_restore
, actual_fsize
))
1825 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1826 rtvec_alloc (num_restore
+ 2));
1827 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1828 XVECEXP (restore_all
, 0, 1)
1829 = gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1830 gen_rtx_PLUS (Pmode
,
1832 GEN_INT (alloc_stack
)));
1834 offset
= alloc_stack
- 4;
1835 for (i
= 0; i
< num_restore
; i
++)
1837 XVECEXP (restore_all
, 0, i
+2)
1838 = gen_rtx_SET (VOIDmode
,
1841 gen_rtx_PLUS (Pmode
,
1847 code
= recog (restore_all
, NULL_RTX
, NULL
);
1853 actual_fsize
-= alloc_stack
;
1854 increment_stack (actual_fsize
);
1856 insn
= emit_jump_insn (restore_all
);
1857 INSN_CODE (insn
) = code
;
1860 restore_all
= NULL_RTX
;
1864 /* If no epilogue save function is available, restore the registers the
1865 old fashioned way (one by one). */
1868 unsigned int init_stack_free
;
1870 /* If the stack is large, we need to cut it down in 2 pieces. */
1871 if (interrupt_handler
)
1872 init_stack_free
= 0;
1873 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1874 init_stack_free
= 4 * num_restore
;
1876 init_stack_free
= (signed) actual_fsize
;
1878 /* Deallocate the rest of the stack if it is > 32K. */
1879 if ((unsigned int) actual_fsize
> init_stack_free
)
1880 increment_stack (actual_fsize
- init_stack_free
);
1882 /* Special case interrupt functions that save all registers
1884 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1886 if (! TARGET_DISABLE_CALLT
)
1887 emit_insn (gen_callt_restore_all_interrupt ());
1889 emit_insn (gen_restore_all_interrupt ());
1893 /* Restore registers from the beginning of the stack frame. */
1894 int offset
= init_stack_free
- 4;
1896 /* Restore the return pointer first. */
1898 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
1900 emit_move_insn (restore_regs
[--num_restore
],
1901 gen_rtx_MEM (SImode
,
1902 plus_constant (Pmode
,
1908 for (i
= 0; i
< num_restore
; i
++)
1910 emit_move_insn (restore_regs
[i
],
1911 gen_rtx_MEM (SImode
,
1912 plus_constant (Pmode
,
1916 emit_use (restore_regs
[i
]);
1920 /* Cut back the remainder of the stack. */
1921 increment_stack (init_stack_free
);
1924 /* And return or use reti for interrupt handlers. */
1925 if (interrupt_handler
)
1927 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1928 emit_insn (gen_callt_return_interrupt ());
1930 emit_jump_insn (gen_return_interrupt ());
1932 else if (actual_fsize
)
1933 emit_jump_insn (gen_return_internal ());
1935 emit_jump_insn (gen_return_simple ());
1938 v850_interrupt_cache_p
= FALSE
;
1939 v850_interrupt_p
= FALSE
;
1942 /* Update the condition code from the insn. */
1944 notice_update_cc (rtx body
, rtx insn
)
1946 switch (get_attr_cc (insn
))
1949 /* Insn does not affect CC at all. */
1953 /* Insn does not change CC, but the 0'th operand has been changed. */
1954 if (cc_status
.value1
!= 0
1955 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
1956 cc_status
.value1
= 0;
1960 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1961 V,C is in an unusable state. */
1963 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
1964 cc_status
.value1
= recog_data
.operand
[0];
1968 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1969 C is in an unusable state. */
1971 cc_status
.flags
|= CC_NO_CARRY
;
1972 cc_status
.value1
= recog_data
.operand
[0];
1976 /* The insn is a compare instruction. */
1978 cc_status
.value1
= SET_SRC (body
);
1982 /* Insn doesn't leave CC in a usable state. */
1991 /* Retrieve the data area that has been chosen for the given decl. */
1994 v850_get_data_area (tree decl
)
1996 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
1997 return DATA_AREA_SDA
;
1999 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2000 return DATA_AREA_TDA
;
2002 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2003 return DATA_AREA_ZDA
;
2005 return DATA_AREA_NORMAL
;
2008 /* Store the indicated data area in the decl's attributes. */
2011 v850_set_data_area (tree decl
, v850_data_area data_area
)
2017 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2018 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2019 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2024 DECL_ATTRIBUTES (decl
) = tree_cons
2025 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2028 /* Handle an "interrupt" attribute; arguments as in
2029 struct attribute_spec.handler. */
2031 v850_handle_interrupt_attribute (tree
* node
,
2033 tree args ATTRIBUTE_UNUSED
,
2034 int flags ATTRIBUTE_UNUSED
,
2035 bool * no_add_attrs
)
2037 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2039 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2041 *no_add_attrs
= true;
2047 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2048 struct attribute_spec.handler. */
2050 v850_handle_data_area_attribute (tree
* node
,
2052 tree args ATTRIBUTE_UNUSED
,
2053 int flags ATTRIBUTE_UNUSED
,
2054 bool * no_add_attrs
)
2056 v850_data_area data_area
;
2057 v850_data_area area
;
2060 /* Implement data area attribute. */
2061 if (is_attribute_p ("sda", name
))
2062 data_area
= DATA_AREA_SDA
;
2063 else if (is_attribute_p ("tda", name
))
2064 data_area
= DATA_AREA_TDA
;
2065 else if (is_attribute_p ("zda", name
))
2066 data_area
= DATA_AREA_ZDA
;
2070 switch (TREE_CODE (decl
))
2073 if (current_function_decl
!= NULL_TREE
)
2075 error_at (DECL_SOURCE_LOCATION (decl
),
2076 "data area attributes cannot be specified for "
2078 *no_add_attrs
= true;
2084 area
= v850_get_data_area (decl
);
2085 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2087 error ("data area of %q+D conflicts with previous declaration",
2089 *no_add_attrs
= true;
2101 /* Return nonzero if FUNC is an interrupt function as specified
2102 by the "interrupt" attribute. */
2105 v850_interrupt_function_p (tree func
)
2110 if (v850_interrupt_cache_p
)
2111 return v850_interrupt_p
;
2113 if (TREE_CODE (func
) != FUNCTION_DECL
)
2116 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2122 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2123 ret
= a
!= NULL_TREE
;
2126 /* Its not safe to trust global variables until after function inlining has
2128 if (reload_completed
| reload_in_progress
)
2129 v850_interrupt_p
= ret
;
2136 v850_encode_data_area (tree decl
, rtx symbol
)
2140 /* Map explicit sections into the appropriate attribute */
2141 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2143 if (DECL_SECTION_NAME (decl
))
2145 const char *name
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
2147 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2148 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2150 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2151 v850_set_data_area (decl
, DATA_AREA_SDA
);
2153 else if (streq (name
, ".tdata"))
2154 v850_set_data_area (decl
, DATA_AREA_TDA
);
2157 /* If no attribute, support -m{zda,sda,tda}=n */
2160 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2164 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2165 v850_set_data_area (decl
, DATA_AREA_TDA
);
2167 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2168 v850_set_data_area (decl
, DATA_AREA_SDA
);
2170 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2171 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2174 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2178 flags
= SYMBOL_REF_FLAGS (symbol
);
2179 switch (v850_get_data_area (decl
))
2181 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2182 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2183 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2184 default: gcc_unreachable ();
2186 SYMBOL_REF_FLAGS (symbol
) = flags
;
2190 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2192 default_encode_section_info (decl
, rtl
, first
);
2194 if (TREE_CODE (decl
) == VAR_DECL
2195 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2196 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2199 /* Construct a JR instruction to a routine that will perform the equivalent of
2200 the RTL passed in as an argument. This RTL is a function epilogue that
2201 pops registers off the stack and possibly releases some extra stack space
2202 as well. The code has already verified that the RTL matches these
2206 construct_restore_jr (rtx op
)
2208 int count
= XVECLEN (op
, 0);
2210 unsigned long int mask
;
2211 unsigned long int first
;
2212 unsigned long int last
;
2214 static char buff
[100]; /* XXX */
2218 error ("bogus JR construction: %d", count
);
2222 /* Work out how many bytes to pop off the stack before retrieving
2224 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2225 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2226 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2228 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2230 /* Each pop will remove 4 bytes from the stack.... */
2231 stack_bytes
-= (count
- 2) * 4;
2233 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2234 if (stack_bytes
!= 0)
2236 error ("bad amount of stack space removal: %d", stack_bytes
);
2240 /* Now compute the bit mask of registers to push. */
2242 for (i
= 2; i
< count
; i
++)
2244 rtx vector_element
= XVECEXP (op
, 0, i
);
2246 gcc_assert (GET_CODE (vector_element
) == SET
);
2247 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2248 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2251 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2254 /* Scan for the first register to pop. */
2255 for (first
= 0; first
< 32; first
++)
2257 if (mask
& (1 << first
))
2261 gcc_assert (first
< 32);
2263 /* Discover the last register to pop. */
2264 if (mask
& (1 << LINK_POINTER_REGNUM
))
2266 last
= LINK_POINTER_REGNUM
;
2270 gcc_assert (!stack_bytes
);
2271 gcc_assert (mask
& (1 << 29));
2276 /* Note, it is possible to have gaps in the register mask.
2277 We ignore this here, and generate a JR anyway. We will
2278 be popping more registers than is strictly necessary, but
2279 it does save code space. */
2281 if (TARGET_LONG_CALLS
)
2286 sprintf (name
, "__return_%s", reg_names
[first
]);
2288 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2290 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2296 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2298 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2305 /* Construct a JARL instruction to a routine that will perform the equivalent
2306 of the RTL passed as a parameter. This RTL is a function prologue that
2307 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2308 some stack space as well. The code has already verified that the RTL
2309 matches these requirements. */
2311 construct_save_jarl (rtx op
)
2313 int count
= XVECLEN (op
, 0);
2315 unsigned long int mask
;
2316 unsigned long int first
;
2317 unsigned long int last
;
2319 static char buff
[100]; /* XXX */
2321 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2323 error ("bogus JARL construction: %d", count
);
2328 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2329 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2330 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2331 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2333 /* Work out how many bytes to push onto the stack after storing the
2335 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2337 /* Each push will put 4 bytes from the stack.... */
2338 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2340 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2341 if (stack_bytes
!= 0)
2343 error ("bad amount of stack space removal: %d", stack_bytes
);
2347 /* Now compute the bit mask of registers to push. */
2349 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2351 rtx vector_element
= XVECEXP (op
, 0, i
);
2353 gcc_assert (GET_CODE (vector_element
) == SET
);
2354 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2355 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2358 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2361 /* Scan for the first register to push. */
2362 for (first
= 0; first
< 32; first
++)
2364 if (mask
& (1 << first
))
2368 gcc_assert (first
< 32);
2370 /* Discover the last register to push. */
2371 if (mask
& (1 << LINK_POINTER_REGNUM
))
2373 last
= LINK_POINTER_REGNUM
;
2377 gcc_assert (!stack_bytes
);
2378 gcc_assert (mask
& (1 << 29));
2383 /* Note, it is possible to have gaps in the register mask.
2384 We ignore this here, and generate a JARL anyway. We will
2385 be pushing more registers than is strictly necessary, but
2386 it does save code space. */
2388 if (TARGET_LONG_CALLS
)
2393 sprintf (name
, "__save_%s", reg_names
[first
]);
2395 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2397 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2403 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2405 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2412 /* A version of asm_output_aligned_bss() that copes with the special
2413 data areas of the v850. */
2415 v850_output_aligned_bss (FILE * file
,
2418 unsigned HOST_WIDE_INT size
,
2421 switch (v850_get_data_area (decl
))
2424 switch_to_section (zbss_section
);
2428 switch_to_section (sbss_section
);
2432 switch_to_section (tdata_section
);
2435 switch_to_section (bss_section
);
2439 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2440 #ifdef ASM_DECLARE_OBJECT_NAME
2441 last_assemble_variable_decl
= decl
;
2442 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2444 /* Standard thing is just output label for the object. */
2445 ASM_OUTPUT_LABEL (file
, name
);
2446 #endif /* ASM_DECLARE_OBJECT_NAME */
2447 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2450 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2452 v850_output_common (FILE * file
,
2458 if (decl
== NULL_TREE
)
2460 fprintf (file
, "%s", COMMON_ASM_OP
);
2464 switch (v850_get_data_area (decl
))
2467 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2471 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2475 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2479 fprintf (file
, "%s", COMMON_ASM_OP
);
2484 assemble_name (file
, name
);
2485 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2488 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2490 v850_output_local (FILE * file
,
2496 fprintf (file
, "%s", LOCAL_ASM_OP
);
2497 assemble_name (file
, name
);
2498 fprintf (file
, "\n");
2500 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2503 /* Add data area to the given declaration if a ghs data area pragma is
2504 currently in effect (#pragma ghs startXXX/endXXX). */
2506 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2509 && data_area_stack
->data_area
2510 && current_function_decl
== NULL_TREE
2511 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2512 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2513 v850_set_data_area (decl
, data_area_stack
->data_area
);
2515 /* Initialize the default names of the v850 specific sections,
2516 if this has not been done before. */
2518 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2520 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2521 = build_string (sizeof (".sdata")-1, ".sdata");
2523 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2524 = build_string (sizeof (".rosdata")-1, ".rosdata");
2526 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2527 = build_string (sizeof (".tdata")-1, ".tdata");
2529 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2530 = build_string (sizeof (".zdata")-1, ".zdata");
2532 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2533 = build_string (sizeof (".rozdata")-1, ".rozdata");
2536 if (current_function_decl
== NULL_TREE
2537 && (TREE_CODE (decl
) == VAR_DECL
2538 || TREE_CODE (decl
) == CONST_DECL
2539 || TREE_CODE (decl
) == FUNCTION_DECL
)
2540 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2541 && !DECL_SECTION_NAME (decl
))
2543 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2544 tree chosen_section
;
2546 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2547 kind
= GHS_SECTION_KIND_TEXT
;
2550 /* First choose a section kind based on the data area of the decl. */
2551 switch (v850_get_data_area (decl
))
2557 kind
= ((TREE_READONLY (decl
))
2558 ? GHS_SECTION_KIND_ROSDATA
2559 : GHS_SECTION_KIND_SDATA
);
2563 kind
= GHS_SECTION_KIND_TDATA
;
2567 kind
= ((TREE_READONLY (decl
))
2568 ? GHS_SECTION_KIND_ROZDATA
2569 : GHS_SECTION_KIND_ZDATA
);
2572 case DATA_AREA_NORMAL
: /* default data area */
2573 if (TREE_READONLY (decl
))
2574 kind
= GHS_SECTION_KIND_RODATA
;
2575 else if (DECL_INITIAL (decl
))
2576 kind
= GHS_SECTION_KIND_DATA
;
2578 kind
= GHS_SECTION_KIND_BSS
;
2582 /* Now, if the section kind has been explicitly renamed,
2583 then attach a section attribute. */
2584 chosen_section
= GHS_current_section_names
[(int) kind
];
2586 /* Otherwise, if this kind of section needs an explicit section
2587 attribute, then also attach one. */
2588 if (chosen_section
== NULL
)
2589 chosen_section
= GHS_default_section_names
[(int) kind
];
2593 /* Only set the section name if specified by a pragma, because
2594 otherwise it will force those variables to get allocated storage
2595 in this module, rather than by the linker. */
2596 DECL_SECTION_NAME (decl
) = chosen_section
;
2601 /* Construct a DISPOSE instruction that is the equivalent of
2602 the given RTX. We have already verified that this should
2606 construct_dispose_instruction (rtx op
)
2608 int count
= XVECLEN (op
, 0);
2610 unsigned long int mask
;
2612 static char buff
[ 100 ]; /* XXX */
2617 error ("bogus DISPOSE construction: %d", count
);
2621 /* Work out how many bytes to pop off the
2622 stack before retrieving registers. */
2623 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2624 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2625 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2627 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2629 /* Each pop will remove 4 bytes from the stack.... */
2630 stack_bytes
-= (count
- 2) * 4;
2632 /* Make sure that the amount we are popping
2633 will fit into the DISPOSE instruction. */
2634 if (stack_bytes
> 128)
2636 error ("too much stack space to dispose of: %d", stack_bytes
);
2640 /* Now compute the bit mask of registers to push. */
2643 for (i
= 2; i
< count
; i
++)
2645 rtx vector_element
= XVECEXP (op
, 0, i
);
2647 gcc_assert (GET_CODE (vector_element
) == SET
);
2648 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2649 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2652 if (REGNO (SET_DEST (vector_element
)) == 2)
2655 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2658 if (! TARGET_DISABLE_CALLT
2659 && (use_callt
|| stack_bytes
== 0))
2663 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2668 for (i
= 20; i
< 32; i
++)
2669 if (mask
& (1 << i
))
2673 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2675 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2676 i
, (mask
& (1 << 31)) ? "31c" : "29");
2681 static char regs
[100]; /* XXX */
2684 /* Generate the DISPOSE instruction. Note we could just issue the
2685 bit mask as a number as the assembler can cope with this, but for
2686 the sake of our readers we turn it into a textual description. */
2690 for (i
= 20; i
< 32; i
++)
2692 if (mask
& (1 << i
))
2697 strcat (regs
, ", ");
2702 strcat (regs
, reg_names
[ first
]);
2704 for (i
++; i
< 32; i
++)
2705 if ((mask
& (1 << i
)) == 0)
2710 strcat (regs
, " - ");
2711 strcat (regs
, reg_names
[ i
- 1 ] );
2716 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2722 /* Construct a PREPARE instruction that is the equivalent of
2723 the given RTL. We have already verified that this should
2727 construct_prepare_instruction (rtx op
)
2731 unsigned long int mask
;
2733 static char buff
[ 100 ]; /* XXX */
2736 if (XVECLEN (op
, 0) <= 1)
2738 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2742 /* Work out how many bytes to push onto
2743 the stack after storing the registers. */
2744 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2745 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2746 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2748 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2751 /* Make sure that the amount we are popping
2752 will fit into the DISPOSE instruction. */
2753 if (stack_bytes
< -128)
2755 error ("too much stack space to prepare: %d", stack_bytes
);
2759 /* Now compute the bit mask of registers to push. */
2762 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2764 rtx vector_element
= XVECEXP (op
, 0, i
);
2766 if (GET_CODE (vector_element
) == CLOBBER
)
2769 gcc_assert (GET_CODE (vector_element
) == SET
);
2770 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2771 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2774 if (REGNO (SET_SRC (vector_element
)) == 2)
2777 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2781 stack_bytes
+= count
* 4;
2783 if ((! TARGET_DISABLE_CALLT
)
2784 && (use_callt
|| stack_bytes
== 0))
2788 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2792 for (i
= 20; i
< 32; i
++)
2793 if (mask
& (1 << i
))
2797 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2799 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2800 i
, (mask
& (1 << 31)) ? "31c" : "29");
2804 static char regs
[100]; /* XXX */
2808 /* Generate the PREPARE instruction. Note we could just issue the
2809 bit mask as a number as the assembler can cope with this, but for
2810 the sake of our readers we turn it into a textual description. */
2814 for (i
= 20; i
< 32; i
++)
2816 if (mask
& (1 << i
))
2821 strcat (regs
, ", ");
2826 strcat (regs
, reg_names
[ first
]);
2828 for (i
++; i
< 32; i
++)
2829 if ((mask
& (1 << i
)) == 0)
2834 strcat (regs
, " - ");
2835 strcat (regs
, reg_names
[ i
- 1 ] );
2840 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2846 /* Return an RTX indicating where the return address to the
2847 calling function can be found. */
2850 v850_return_addr (int count
)
2855 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2858 /* Implement TARGET_ASM_INIT_SECTIONS. */
2861 v850_asm_init_sections (void)
2864 = get_unnamed_section (0, output_section_asm_op
,
2865 "\t.section .rosdata,\"a\"");
2868 = get_unnamed_section (0, output_section_asm_op
,
2869 "\t.section .rozdata,\"a\"");
2872 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2873 "\t.section .tdata,\"aw\"");
2876 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2877 "\t.section .zdata,\"aw\"");
2880 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2881 output_section_asm_op
,
2882 "\t.section .zbss,\"aw\"");
2886 v850_select_section (tree exp
,
2887 int reloc ATTRIBUTE_UNUSED
,
2888 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2890 if (TREE_CODE (exp
) == VAR_DECL
)
2893 if (!TREE_READONLY (exp
)
2894 || TREE_SIDE_EFFECTS (exp
)
2895 || !DECL_INITIAL (exp
)
2896 || (DECL_INITIAL (exp
) != error_mark_node
2897 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
2902 switch (v850_get_data_area (exp
))
2905 return is_const
? rozdata_section
: zdata_section
;
2908 return tdata_section
;
2911 return is_const
? rosdata_section
: sdata_section
;
2914 return is_const
? readonly_data_section
: data_section
;
2917 return readonly_data_section
;
2920 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2923 v850_function_value_regno_p (const unsigned int regno
)
2925 return (regno
== 10);
2928 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2931 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2933 /* Return values > 8 bytes in length in memory. */
2934 return int_size_in_bytes (type
) > 8 || TYPE_MODE (type
) == BLKmode
;
2937 /* Worker function for TARGET_FUNCTION_VALUE. */
2940 v850_function_value (const_tree valtype
,
2941 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
2942 bool outgoing ATTRIBUTE_UNUSED
)
2944 return gen_rtx_REG (TYPE_MODE (valtype
), 10);
2948 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2951 v850_setup_incoming_varargs (cumulative_args_t ca
,
2952 enum machine_mode mode ATTRIBUTE_UNUSED
,
2953 tree type ATTRIBUTE_UNUSED
,
2954 int *pretend_arg_size ATTRIBUTE_UNUSED
,
2955 int second_time ATTRIBUTE_UNUSED
)
2957 get_cumulative_args (ca
)->anonymous_args
= (!TARGET_GHS
? 1 : 0);
2960 /* Worker function for TARGET_CAN_ELIMINATE. */
2963 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
2965 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
2968 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2970 If TARGET_APP_REGS is not defined then add r2 and r5 to
2971 the pool of fixed registers. See PR 14505. */
2974 v850_conditional_register_usage (void)
2976 if (TARGET_APP_REGS
)
2978 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
2979 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
2983 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
2986 v850_asm_trampoline_template (FILE *f
)
2988 fprintf (f
, "\tjarl .+4,r12\n");
2989 fprintf (f
, "\tld.w 12[r12],r20\n");
2990 fprintf (f
, "\tld.w 16[r12],r12\n");
2991 fprintf (f
, "\tjmp [r12]\n");
2992 fprintf (f
, "\tnop\n");
2993 fprintf (f
, "\t.long 0\n");
2994 fprintf (f
, "\t.long 0\n");
2997 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3000 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
3002 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3004 emit_block_move (m_tramp
, assemble_trampoline_template (),
3005 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3007 mem
= adjust_address (m_tramp
, SImode
, 16);
3008 emit_move_insn (mem
, chain_value
);
3009 mem
= adjust_address (m_tramp
, SImode
, 20);
3010 emit_move_insn (mem
, fnaddr
);
3014 v850_issue_rate (void)
3016 return (TARGET_V850E2_ALL
? 2 : 1);
3019 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3022 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
3024 return (GET_CODE (x
) == CONST_DOUBLE
3025 || !(GET_CODE (x
) == CONST
3026 && GET_CODE (XEXP (x
, 0)) == PLUS
3027 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3028 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3029 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3033 v850_memory_move_cost (enum machine_mode mode
,
3034 reg_class_t reg_class ATTRIBUTE_UNUSED
,
3037 switch (GET_MODE_SIZE (mode
))
3047 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3051 /* V850 specific attributes. */
3053 static const struct attribute_spec v850_attribute_table
[] =
3055 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3056 affects_type_identity } */
3057 { "interrupt_handler", 0, 0, true, false, false,
3058 v850_handle_interrupt_attribute
, false },
3059 { "interrupt", 0, 0, true, false, false,
3060 v850_handle_interrupt_attribute
, false },
3061 { "sda", 0, 0, true, false, false,
3062 v850_handle_data_area_attribute
, false },
3063 { "tda", 0, 0, true, false, false,
3064 v850_handle_data_area_attribute
, false },
3065 { "zda", 0, 0, true, false, false,
3066 v850_handle_data_area_attribute
, false },
3067 { NULL
, 0, 0, false, false, false, NULL
, false }
3070 static enum unwind_info_type
3071 v850_debug_unwind_info (void)
3076 #undef TARGET_DEBUG_UNWIND_INFO
3077 #define TARGET_DEBUG_UNWIND_INFO v850_debug_unwind_info
3079 /* Initialize the GCC target structure. */
3081 #undef TARGET_MEMORY_MOVE_COST
3082 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3084 #undef TARGET_ASM_ALIGNED_HI_OP
3085 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3087 #undef TARGET_PRINT_OPERAND
3088 #define TARGET_PRINT_OPERAND v850_print_operand
3089 #undef TARGET_PRINT_OPERAND_ADDRESS
3090 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3091 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3092 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3094 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3095 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3097 #undef TARGET_ATTRIBUTE_TABLE
3098 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3100 #undef TARGET_INSERT_ATTRIBUTES
3101 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3103 #undef TARGET_ASM_SELECT_SECTION
3104 #define TARGET_ASM_SELECT_SECTION v850_select_section
3106 /* The assembler supports switchable .bss sections, but
3107 v850_select_section doesn't yet make use of them. */
3108 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3109 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3111 #undef TARGET_ENCODE_SECTION_INFO
3112 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3114 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3115 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3117 #undef TARGET_RTX_COSTS
3118 #define TARGET_RTX_COSTS v850_rtx_costs
3120 #undef TARGET_ADDRESS_COST
3121 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3123 #undef TARGET_MACHINE_DEPENDENT_REORG
3124 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3126 #undef TARGET_SCHED_ISSUE_RATE
3127 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3129 #undef TARGET_FUNCTION_VALUE_REGNO_P
3130 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3131 #undef TARGET_FUNCTION_VALUE
3132 #define TARGET_FUNCTION_VALUE v850_function_value
3134 #undef TARGET_PROMOTE_PROTOTYPES
3135 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3137 #undef TARGET_RETURN_IN_MEMORY
3138 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3140 #undef TARGET_PASS_BY_REFERENCE
3141 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3143 #undef TARGET_CALLEE_COPIES
3144 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3146 #undef TARGET_SETUP_INCOMING_VARARGS
3147 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3149 #undef TARGET_ARG_PARTIAL_BYTES
3150 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3152 #undef TARGET_FUNCTION_ARG
3153 #define TARGET_FUNCTION_ARG v850_function_arg
3155 #undef TARGET_FUNCTION_ARG_ADVANCE
3156 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3158 #undef TARGET_CAN_ELIMINATE
3159 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3161 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3162 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3164 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3165 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3166 #undef TARGET_TRAMPOLINE_INIT
3167 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3169 #undef TARGET_STRICT_ARGUMENT_NAMING
3170 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3172 #undef TARGET_LEGITIMATE_CONSTANT_P
3173 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3175 struct gcc_target targetm
= TARGET_INITIALIZER
;
3177 #include "gt-v850.h"