1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
38 #include "diagnostic-core.h"
42 #include "target-def.h"
47 #define streq(a,b) (strcmp (a, b) == 0)
50 static void v850_print_operand_address (FILE *, rtx
);
52 /* Names of the various data areas used on the v850. */
53 tree GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
54 tree GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
56 /* Track the current data area set by the data area pragma (which
57 can be nested). Tested by check_default_data_area. */
58 data_area_stack_element
* data_area_stack
= NULL
;
60 /* True if we don't need to check any more if the current
61 function is an interrupt handler. */
62 static int v850_interrupt_cache_p
= FALSE
;
64 rtx v850_compare_op0
, v850_compare_op1
;
66 /* Whether current function is an interrupt handler. */
67 static int v850_interrupt_p
= FALSE
;
69 static GTY(()) section
* rosdata_section
;
70 static GTY(()) section
* rozdata_section
;
71 static GTY(()) section
* tdata_section
;
72 static GTY(()) section
* zdata_section
;
73 static GTY(()) section
* zbss_section
;
75 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
76 Specify whether to pass the argument by reference. */
79 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
80 enum machine_mode mode
, const_tree type
,
81 bool named ATTRIBUTE_UNUSED
)
83 unsigned HOST_WIDE_INT size
;
86 size
= int_size_in_bytes (type
);
88 size
= GET_MODE_SIZE (mode
);
93 /* Implementing the Varargs Macros. */
96 v850_strict_argument_naming (cumulative_args_t ca ATTRIBUTE_UNUSED
)
98 return !TARGET_GHS
? true : false;
101 /* Return an RTX to represent where an argument with mode MODE
102 and type TYPE will be passed to a function. If the result
103 is NULL_RTX, the argument will be pushed. */
106 v850_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
107 const_tree type
, bool named
)
109 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
110 rtx result
= NULL_RTX
;
117 size
= int_size_in_bytes (type
);
119 size
= GET_MODE_SIZE (mode
);
121 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
125 /* Once we have stopped using argument registers, do not start up again. */
126 cum
->nbytes
= 4 * UNITS_PER_WORD
;
130 if (size
<= UNITS_PER_WORD
&& type
)
131 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
135 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
137 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
140 if (type
== NULL_TREE
141 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
144 switch (cum
->nbytes
/ UNITS_PER_WORD
)
147 result
= gen_rtx_REG (mode
, 6);
150 result
= gen_rtx_REG (mode
, 7);
153 result
= gen_rtx_REG (mode
, 8);
156 result
= gen_rtx_REG (mode
, 9);
165 /* Return the number of bytes which must be put into registers
166 for values which are part in registers and part in memory. */
168 v850_arg_partial_bytes (cumulative_args_t cum_v
, enum machine_mode mode
,
169 tree type
, bool named
)
171 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
174 if (TARGET_GHS
&& !named
)
178 size
= int_size_in_bytes (type
);
180 size
= GET_MODE_SIZE (mode
);
186 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
190 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
192 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
195 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
198 if (type
== NULL_TREE
199 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
202 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
205 /* Update the data in CUM to advance over an argument
206 of mode MODE and data type TYPE.
207 (TYPE is null for libcalls where that information may not be available.) */
210 v850_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
211 const_tree type
, bool named ATTRIBUTE_UNUSED
)
213 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
215 cum
->nbytes
+= (((type
&& int_size_in_bytes (type
) > 8
216 ? GET_MODE_SIZE (Pmode
)
218 ? GET_MODE_SIZE (mode
)
219 : int_size_in_bytes (type
))) + UNITS_PER_WORD
- 1)
223 /* Return the high and low words of a CONST_DOUBLE */
226 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
228 if (GET_CODE (x
) == CONST_DOUBLE
)
233 switch (GET_MODE (x
))
236 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
237 REAL_VALUE_TO_TARGET_DOUBLE (rv
, t
);
238 *p_high
= t
[1]; /* since v850 is little endian */
239 *p_low
= t
[0]; /* high is second word */
243 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
244 REAL_VALUE_TO_TARGET_SINGLE (rv
, *p_high
);
250 *p_high
= CONST_DOUBLE_HIGH (x
);
251 *p_low
= CONST_DOUBLE_LOW (x
);
259 fatal_insn ("const_double_split got a bad insn:", x
);
263 /* Return the cost of the rtx R with code CODE. */
266 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
268 if (CONST_OK_FOR_I (value
))
270 else if (CONST_OK_FOR_J (value
))
272 else if (CONST_OK_FOR_K (value
))
279 const_costs (rtx r
, enum rtx_code c
)
281 HOST_WIDE_INT high
, low
;
286 return const_costs_int (INTVAL (r
), 0);
289 const_double_split (r
, &high
, &low
);
290 if (GET_MODE (r
) == SFmode
)
291 return const_costs_int (high
, 1);
293 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
309 v850_rtx_costs (rtx x
,
311 int outer_code ATTRIBUTE_UNUSED
,
312 int opno ATTRIBUTE_UNUSED
,
313 int * total
, bool speed
)
315 enum rtx_code code
= (enum rtx_code
) codearg
;
324 *total
= COSTS_N_INSNS (const_costs (x
, code
));
331 if (TARGET_V850E
&& !speed
)
339 && ( GET_MODE (x
) == SImode
340 || GET_MODE (x
) == HImode
341 || GET_MODE (x
) == QImode
))
343 if (GET_CODE (XEXP (x
, 1)) == REG
)
345 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
347 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
349 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
358 if (outer_code
== COMPARE
)
367 /* Print operand X using operand code CODE to assembly language output file
371 v850_print_operand (FILE * file
, rtx x
, int code
)
373 HOST_WIDE_INT high
, low
;
378 /* We use 'c' operands with symbols for .vtinherit */
379 if (GET_CODE (x
) == SYMBOL_REF
)
381 output_addr_const(file
, x
);
388 switch ((code
== 'B' || code
== 'C')
389 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
392 if (code
== 'c' || code
== 'C')
393 fprintf (file
, "nz");
395 fprintf (file
, "ne");
398 if (code
== 'c' || code
== 'C')
404 fprintf (file
, "ge");
407 fprintf (file
, "gt");
410 fprintf (file
, "le");
413 fprintf (file
, "lt");
416 fprintf (file
, "nl");
422 fprintf (file
, "nh");
431 case 'F': /* high word of CONST_DOUBLE */
432 switch (GET_CODE (x
))
435 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
439 const_double_split (x
, &high
, &low
);
440 fprintf (file
, "%ld", (long) high
);
447 case 'G': /* low word of CONST_DOUBLE */
448 switch (GET_CODE (x
))
451 fprintf (file
, "%ld", (long) INTVAL (x
));
455 const_double_split (x
, &high
, &low
);
456 fprintf (file
, "%ld", (long) low
);
464 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
467 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
470 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
472 if (GET_CODE (x
) == CONST
)
473 x
= XEXP (XEXP (x
, 0), 0);
475 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
477 if (SYMBOL_REF_ZDA_P (x
))
478 fprintf (file
, "zdaoff");
479 else if (SYMBOL_REF_SDA_P (x
))
480 fprintf (file
, "sdaoff");
481 else if (SYMBOL_REF_TDA_P (x
))
482 fprintf (file
, "tdaoff");
487 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
488 output_addr_const (file
, x
);
491 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
493 if (GET_CODE (x
) == CONST
)
494 x
= XEXP (XEXP (x
, 0), 0);
496 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
498 if (SYMBOL_REF_ZDA_P (x
))
499 fprintf (file
, "r0");
500 else if (SYMBOL_REF_SDA_P (x
))
501 fprintf (file
, "gp");
502 else if (SYMBOL_REF_TDA_P (x
))
503 fprintf (file
, "ep");
507 case 'R': /* 2nd word of a double. */
508 switch (GET_CODE (x
))
511 fprintf (file
, reg_names
[REGNO (x
) + 1]);
514 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
515 v850_print_operand_address (file
, x
);
516 if (GET_CODE (x
) == CONST_INT
)
517 fprintf (file
, "[r0]");
526 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
527 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
534 /* Like an 'S' operand above, but for unsigned loads only. */
535 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
540 case 'W': /* print the instruction suffix */
541 switch (GET_MODE (x
))
546 case QImode
: fputs (".b", file
); break;
547 case HImode
: fputs (".h", file
); break;
548 case SImode
: fputs (".w", file
); break;
549 case SFmode
: fputs (".w", file
); break;
552 case '.': /* register r0 */
553 fputs (reg_names
[0], file
);
555 case 'z': /* reg or zero */
556 if (GET_CODE (x
) == REG
)
557 fputs (reg_names
[REGNO (x
)], file
);
558 else if ((GET_MODE(x
) == SImode
559 || GET_MODE(x
) == DFmode
560 || GET_MODE(x
) == SFmode
)
561 && x
== CONST0_RTX(GET_MODE(x
)))
562 fputs (reg_names
[0], file
);
565 gcc_assert (x
== const0_rtx
);
566 fputs (reg_names
[0], file
);
570 switch (GET_CODE (x
))
573 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
574 output_address (gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
577 output_address (XEXP (x
, 0));
581 fputs (reg_names
[REGNO (x
)], file
);
584 fputs (reg_names
[subreg_regno (x
)], file
);
591 v850_print_operand_address (file
, x
);
602 /* Output assembly language output for the address ADDR to FILE. */
605 v850_print_operand_address (FILE * file
, rtx addr
)
607 switch (GET_CODE (addr
))
610 fprintf (file
, "0[");
611 v850_print_operand (file
, addr
, 0);
615 if (GET_CODE (XEXP (addr
, 0)) == REG
)
618 fprintf (file
, "lo(");
619 v850_print_operand (file
, XEXP (addr
, 1), 0);
620 fprintf (file
, ")[");
621 v850_print_operand (file
, XEXP (addr
, 0), 0);
626 if (GET_CODE (XEXP (addr
, 0)) == REG
627 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
630 v850_print_operand (file
, XEXP (addr
, 1), 0);
632 v850_print_operand (file
, XEXP (addr
, 0), 0);
637 v850_print_operand (file
, XEXP (addr
, 0), 0);
639 v850_print_operand (file
, XEXP (addr
, 1), 0);
644 const char *off_name
= NULL
;
645 const char *reg_name
= NULL
;
647 if (SYMBOL_REF_ZDA_P (addr
))
652 else if (SYMBOL_REF_SDA_P (addr
))
657 else if (SYMBOL_REF_TDA_P (addr
))
664 fprintf (file
, "%s(", off_name
);
665 output_addr_const (file
, addr
);
667 fprintf (file
, ")[%s]", reg_name
);
671 if (special_symbolref_operand (addr
, VOIDmode
))
673 rtx x
= XEXP (XEXP (addr
, 0), 0);
674 const char *off_name
;
675 const char *reg_name
;
677 if (SYMBOL_REF_ZDA_P (x
))
682 else if (SYMBOL_REF_SDA_P (x
))
687 else if (SYMBOL_REF_TDA_P (x
))
695 fprintf (file
, "%s(", off_name
);
696 output_addr_const (file
, addr
);
697 fprintf (file
, ")[%s]", reg_name
);
700 output_addr_const (file
, addr
);
703 output_addr_const (file
, addr
);
709 v850_print_operand_punct_valid_p (unsigned char code
)
714 /* When assemble_integer is used to emit the offsets for a switch
715 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
716 output_addr_const will normally barf at this, but it is OK to omit
717 the truncate and just emit the difference of the two labels. The
718 .hword directive will automatically handle the truncation for us.
720 Returns true if rtx was handled, false otherwise. */
723 v850_output_addr_const_extra (FILE * file
, rtx x
)
725 if (GET_CODE (x
) != TRUNCATE
)
730 /* We must also handle the case where the switch table was passed a
731 constant value and so has been collapsed. In this case the first
732 label will have been deleted. In such a case it is OK to emit
733 nothing, since the table will not be used.
734 (cf gcc.c-torture/compile/990801-1.c). */
735 if (GET_CODE (x
) == MINUS
736 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
737 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == CODE_LABEL
738 && INSN_DELETED_P (XEXP (XEXP (x
, 0), 0)))
741 output_addr_const (file
, x
);
745 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
749 output_move_single (rtx
* operands
)
751 rtx dst
= operands
[0];
752 rtx src
= operands
[1];
759 else if (GET_CODE (src
) == CONST_INT
)
761 HOST_WIDE_INT value
= INTVAL (src
);
763 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
766 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
767 return "movea %1,%.,%0";
769 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
770 return "movhi hi0(%1),%.,%0";
772 /* A random constant. */
773 else if (TARGET_V850E
|| TARGET_V850E2_ALL
)
776 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
779 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
781 HOST_WIDE_INT high
, low
;
783 const_double_split (src
, &high
, &low
);
785 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
788 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
789 return "movea %F1,%.,%0";
791 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
792 return "movhi hi0(%F1),%.,%0";
794 /* A random constant. */
795 else if (TARGET_V850E
|| TARGET_V850E2_ALL
)
799 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
802 else if (GET_CODE (src
) == MEM
)
803 return "%S1ld%W1 %1,%0";
805 else if (special_symbolref_operand (src
, VOIDmode
))
806 return "movea %O1(%P1),%Q1,%0";
808 else if (GET_CODE (src
) == LABEL_REF
809 || GET_CODE (src
) == SYMBOL_REF
810 || GET_CODE (src
) == CONST
)
812 if (TARGET_V850E
|| TARGET_V850E2_ALL
)
813 return "mov hilo(%1),%0";
815 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
818 else if (GET_CODE (src
) == HIGH
)
819 return "movhi hi(%1),%.,%0";
821 else if (GET_CODE (src
) == LO_SUM
)
823 operands
[2] = XEXP (src
, 0);
824 operands
[3] = XEXP (src
, 1);
825 return "movea lo(%3),%2,%0";
829 else if (GET_CODE (dst
) == MEM
)
832 return "%S0st%W0 %1,%0";
834 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
835 return "%S0st%W0 %.,%0";
837 else if (GET_CODE (src
) == CONST_DOUBLE
838 && CONST0_RTX (GET_MODE (dst
)) == src
)
839 return "%S0st%W0 %.,%0";
842 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode
, dst
, src
));
846 /* Generate comparison code. */
848 v850_float_z_comparison_operator (rtx op
, enum machine_mode mode
)
850 enum rtx_code code
= GET_CODE (op
);
852 if (GET_RTX_CLASS (code
) != RTX_COMPARE
853 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
856 if (mode
!= GET_MODE (op
) && mode
!= VOIDmode
)
859 if ((GET_CODE (XEXP (op
, 0)) != REG
860 || REGNO (XEXP (op
, 0)) != CC_REGNUM
)
861 || XEXP (op
, 1) != const0_rtx
)
864 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_LTmode
)
866 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_LEmode
)
868 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_EQmode
)
875 v850_float_nz_comparison_operator (rtx op
, enum machine_mode mode
)
877 enum rtx_code code
= GET_CODE (op
);
879 if (GET_RTX_CLASS (code
) != RTX_COMPARE
880 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
883 if (mode
!= GET_MODE (op
) && mode
!= VOIDmode
)
886 if ((GET_CODE (XEXP (op
, 0)) != REG
887 || REGNO (XEXP (op
, 0)) != CC_REGNUM
)
888 || XEXP (op
, 1) != const0_rtx
)
891 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_GTmode
)
893 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_GEmode
)
895 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_NEmode
)
902 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1 ATTRIBUTE_UNUSED
)
904 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
909 return CC_FPU_LEmode
;
911 return CC_FPU_GEmode
;
913 return CC_FPU_LTmode
;
915 return CC_FPU_GTmode
;
917 return CC_FPU_EQmode
;
919 return CC_FPU_NEmode
;
928 v850_gen_float_compare (enum rtx_code cond
, enum machine_mode mode ATTRIBUTE_UNUSED
, rtx op0
, rtx op1
)
930 if (GET_MODE(op0
) == DFmode
)
935 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
938 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
941 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
944 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
947 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
950 emit_insn (gen_cmpdf_ne_insn (op0
, op1
));
956 else if (GET_MODE(v850_compare_op0
) == SFmode
)
961 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
964 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
967 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
970 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
973 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
976 emit_insn (gen_cmpsf_ne_insn(op0
, op1
));
987 return v850_select_cc_mode (cond
, op0
, op1
);
991 v850_gen_compare (enum rtx_code cond
, enum machine_mode mode
, rtx op0
, rtx op1
)
993 if (GET_MODE_CLASS(GET_MODE (op0
)) != MODE_FLOAT
)
995 emit_insn (gen_cmpsi_insn (op0
, op1
));
996 return gen_rtx_fmt_ee (cond
, mode
, gen_rtx_REG(CCmode
, CC_REGNUM
), const0_rtx
);
1001 mode
= v850_gen_float_compare (cond
, mode
, op0
, op1
);
1002 cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
1003 emit_insn (gen_rtx_SET(mode
, cc_reg
, gen_rtx_REG (mode
, FCC_REGNUM
)));
1005 return gen_rtx_fmt_ee (cond
, mode
, cc_reg
, const0_rtx
);
1009 /* Return maximum offset supported for a short EP memory reference of mode
1010 MODE and signedness UNSIGNEDP. */
1013 ep_memory_offset (enum machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
1020 if (TARGET_SMALL_SLD
)
1021 max_offset
= (1 << 4);
1022 else if ((TARGET_V850E
|| TARGET_V850E2_ALL
)
1024 max_offset
= (1 << 4);
1026 max_offset
= (1 << 7);
1030 if (TARGET_SMALL_SLD
)
1031 max_offset
= (1 << 5);
1032 else if ((TARGET_V850E
|| TARGET_V850E2_ALL
)
1034 max_offset
= (1 << 5);
1036 max_offset
= (1 << 8);
1041 max_offset
= (1 << 8);
1051 /* Return true if OP is a valid short EP memory reference */
1054 ep_memory_operand (rtx op
, enum machine_mode mode
, int unsigned_load
)
1060 /* If we are not using the EP register on a per-function basis
1061 then do not allow this optimization at all. This is to
1062 prevent the use of the SLD/SST instructions which cannot be
1063 guaranteed to work properly due to a hardware bug. */
1067 if (GET_CODE (op
) != MEM
)
1070 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1072 mask
= GET_MODE_SIZE (mode
) - 1;
1074 addr
= XEXP (op
, 0);
1075 if (GET_CODE (addr
) == CONST
)
1076 addr
= XEXP (addr
, 0);
1078 switch (GET_CODE (addr
))
1084 return SYMBOL_REF_TDA_P (addr
);
1087 return REGNO (addr
) == EP_REGNUM
;
1090 op0
= XEXP (addr
, 0);
1091 op1
= XEXP (addr
, 1);
1092 if (GET_CODE (op1
) == CONST_INT
1093 && INTVAL (op1
) < max_offset
1094 && INTVAL (op1
) >= 0
1095 && (INTVAL (op1
) & mask
) == 0)
1097 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1100 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1109 /* Substitute memory references involving a pointer, to use the ep pointer,
1110 taking care to save and preserve the ep. */
1113 substitute_ep_register (rtx first_insn
,
1120 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1125 df_set_regs_ever_live (1, true);
1126 *p_r1
= gen_rtx_REG (Pmode
, 1);
1127 *p_ep
= gen_rtx_REG (Pmode
, 30);
1132 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1133 2 * (uses
- 3), uses
, reg_names
[regno
],
1134 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1135 INSN_UID (first_insn
), INSN_UID (last_insn
));
1137 if (GET_CODE (first_insn
) == NOTE
)
1138 first_insn
= next_nonnote_insn (first_insn
);
1140 last_insn
= next_nonnote_insn (last_insn
);
1141 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1143 if (GET_CODE (insn
) == INSN
)
1145 rtx pattern
= single_set (insn
);
1147 /* Replace the memory references. */
1151 /* Memory operands are signed by default. */
1152 int unsignedp
= FALSE
;
1154 if (GET_CODE (SET_DEST (pattern
)) == MEM
1155 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1158 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1159 p_mem
= &SET_DEST (pattern
);
1161 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1162 p_mem
= &SET_SRC (pattern
);
1164 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1165 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1166 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1168 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1169 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1171 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1179 rtx addr
= XEXP (*p_mem
, 0);
1181 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1182 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1184 else if (GET_CODE (addr
) == PLUS
1185 && GET_CODE (XEXP (addr
, 0)) == REG
1186 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1187 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1188 && ((INTVAL (XEXP (addr
, 1)))
1189 < ep_memory_offset (GET_MODE (*p_mem
),
1191 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1192 *p_mem
= change_address (*p_mem
, VOIDmode
,
1193 gen_rtx_PLUS (Pmode
,
1201 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1202 insn
= prev_nonnote_insn (first_insn
);
1203 if (insn
&& GET_CODE (insn
) == INSN
1204 && GET_CODE (PATTERN (insn
)) == SET
1205 && SET_DEST (PATTERN (insn
)) == *p_ep
1206 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1209 emit_insn_before (gen_rtx_SET (Pmode
, *p_r1
, *p_ep
), first_insn
);
1211 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, reg
), first_insn
);
1212 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, *p_r1
), last_insn
);
1216 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1217 the -mep mode to copy heavily used pointers to ep to use the implicit
1229 regs
[FIRST_PSEUDO_REGISTER
];
1238 /* If not ep mode, just return now. */
1242 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1245 regs
[i
].first_insn
= NULL_RTX
;
1246 regs
[i
].last_insn
= NULL_RTX
;
1249 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1251 switch (GET_CODE (insn
))
1253 /* End of basic block */
1260 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1262 if (max_uses
< regs
[i
].uses
)
1264 max_uses
= regs
[i
].uses
;
1270 substitute_ep_register (regs
[max_regno
].first_insn
,
1271 regs
[max_regno
].last_insn
,
1272 max_uses
, max_regno
, &r1
, &ep
);
1276 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1279 regs
[i
].first_insn
= NULL_RTX
;
1280 regs
[i
].last_insn
= NULL_RTX
;
1288 pattern
= single_set (insn
);
1290 /* See if there are any memory references we can shorten */
1293 rtx src
= SET_SRC (pattern
);
1294 rtx dest
= SET_DEST (pattern
);
1296 /* Memory operands are signed by default. */
1297 int unsignedp
= FALSE
;
1299 /* We might have (SUBREG (MEM)) here, so just get rid of the
1300 subregs to make this code simpler. */
1301 if (GET_CODE (dest
) == SUBREG
1302 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1303 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1304 alter_subreg (&dest
);
1305 if (GET_CODE (src
) == SUBREG
1306 && (GET_CODE (SUBREG_REG (src
)) == MEM
1307 || GET_CODE (SUBREG_REG (src
)) == REG
))
1308 alter_subreg (&src
);
1310 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1313 else if (GET_CODE (dest
) == MEM
)
1316 else if (GET_CODE (src
) == MEM
)
1319 else if (GET_CODE (src
) == SIGN_EXTEND
1320 && GET_CODE (XEXP (src
, 0)) == MEM
)
1321 mem
= XEXP (src
, 0);
1323 else if (GET_CODE (src
) == ZERO_EXTEND
1324 && GET_CODE (XEXP (src
, 0)) == MEM
)
1326 mem
= XEXP (src
, 0);
1332 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1335 else if (!use_ep
&& mem
1336 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1338 rtx addr
= XEXP (mem
, 0);
1342 if (GET_CODE (addr
) == REG
)
1345 regno
= REGNO (addr
);
1348 else if (GET_CODE (addr
) == PLUS
1349 && GET_CODE (XEXP (addr
, 0)) == REG
1350 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1351 && ((INTVAL (XEXP (addr
, 1)))
1352 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1353 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1356 regno
= REGNO (XEXP (addr
, 0));
1365 regs
[regno
].last_insn
= insn
;
1366 if (!regs
[regno
].first_insn
)
1367 regs
[regno
].first_insn
= insn
;
1371 /* Loading up a register in the basic block zaps any savings
1373 if (GET_CODE (dest
) == REG
)
1375 enum machine_mode mode
= GET_MODE (dest
);
1379 regno
= REGNO (dest
);
1380 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1384 /* See if we can use the pointer before this
1389 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1391 if (max_uses
< regs
[i
].uses
)
1393 max_uses
= regs
[i
].uses
;
1399 && max_regno
>= regno
1400 && max_regno
< endregno
)
1402 substitute_ep_register (regs
[max_regno
].first_insn
,
1403 regs
[max_regno
].last_insn
,
1404 max_uses
, max_regno
, &r1
,
1407 /* Since we made a substitution, zap all remembered
1409 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1412 regs
[i
].first_insn
= NULL_RTX
;
1413 regs
[i
].last_insn
= NULL_RTX
;
1418 for (i
= regno
; i
< endregno
; i
++)
1421 regs
[i
].first_insn
= NULL_RTX
;
1422 regs
[i
].last_insn
= NULL_RTX
;
1430 /* # of registers saved by the interrupt handler. */
1431 #define INTERRUPT_FIXED_NUM 5
1433 /* # of bytes for registers saved by the interrupt handler. */
1434 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1436 /* # of words saved for other registers. */
1437 #define INTERRUPT_ALL_SAVE_NUM \
1438 (30 - INTERRUPT_FIXED_NUM)
1440 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1443 compute_register_save_size (long * p_reg_saved
)
1447 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1448 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1451 /* Count the return pointer if we need to save it. */
1452 if (crtl
->profile
&& !call_p
)
1454 df_set_regs_ever_live (LINK_POINTER_REGNUM
, true);
1458 /* Count space for the register saves. */
1459 if (interrupt_handler
)
1461 for (i
= 0; i
<= 31; i
++)
1465 if (df_regs_ever_live_p (i
) || call_p
)
1468 reg_saved
|= 1L << i
;
1472 /* We don't save/restore r0 or the stack pointer */
1474 case STACK_POINTER_REGNUM
:
1477 /* For registers with fixed use, we save them, set them to the
1478 appropriate value, and then restore them.
1479 These registers are handled specially, so don't list them
1480 on the list of registers to save in the prologue. */
1481 case 1: /* temp used to hold ep */
1483 case 10: /* temp used to call interrupt save/restore */
1484 case 11: /* temp used to call interrupt save/restore (long call) */
1485 case EP_REGNUM
: /* ep */
1492 /* Find the first register that needs to be saved. */
1493 for (i
= 0; i
<= 31; i
++)
1494 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1495 || i
== LINK_POINTER_REGNUM
))
1498 /* If it is possible that an out-of-line helper function might be
1499 used to generate the prologue for the current function, then we
1500 need to cover the possibility that such a helper function will
1501 be used, despite the fact that there might be gaps in the list of
1502 registers that need to be saved. To detect this we note that the
1503 helper functions always push at least register r29 (provided
1504 that the function is not an interrupt handler). */
1506 if (TARGET_PROLOG_FUNCTION
1507 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1512 reg_saved
|= 1L << i
;
1517 /* Helper functions save all registers between the starting
1518 register and the last register, regardless of whether they
1519 are actually used by the function or not. */
1520 for (; i
<= 29; i
++)
1523 reg_saved
|= 1L << i
;
1526 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1529 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1534 for (; i
<= 31; i
++)
1535 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1536 || i
== LINK_POINTER_REGNUM
))
1539 reg_saved
|= 1L << i
;
1545 *p_reg_saved
= reg_saved
;
1551 compute_frame_size (int size
, long * p_reg_saved
)
1554 + compute_register_save_size (p_reg_saved
)
1555 + crtl
->outgoing_args_size
);
1559 use_prolog_function (int num_save
, int frame_size
)
1561 int alloc_stack
= (4 * num_save
);
1562 int unalloc_stack
= frame_size
- alloc_stack
;
1563 int save_func_len
, restore_func_len
;
1564 int save_normal_len
, restore_normal_len
;
1566 if (! TARGET_DISABLE_CALLT
)
1567 save_func_len
= restore_func_len
= 2;
1569 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1573 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1574 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1577 /* See if we would have used ep to save the stack. */
1578 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1579 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1581 save_normal_len
= restore_normal_len
= 4 * num_save
;
1583 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1584 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1586 /* Don't bother checking if we don't actually save any space.
1587 This happens for instance if one register is saved and additional
1588 stack space is allocated. */
1589 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1593 expand_prologue (void)
1596 unsigned int size
= get_frame_size ();
1597 unsigned int actual_fsize
;
1598 unsigned int init_stack_alloc
= 0;
1601 unsigned int num_save
;
1603 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1606 actual_fsize
= compute_frame_size (size
, ®_saved
);
1608 /* Save/setup global registers for interrupt functions right now. */
1609 if (interrupt_handler
)
1611 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1612 emit_insn (gen_callt_save_interrupt ());
1614 emit_insn (gen_save_interrupt ());
1616 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1618 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1619 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1622 /* Identify all of the saved registers. */
1624 for (i
= 1; i
< 32; i
++)
1626 if (((1L << i
) & reg_saved
) != 0)
1627 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1630 /* See if we have an insn that allocates stack space and saves the particular
1631 registers we want to. */
1632 save_all
= NULL_RTX
;
1633 if (TARGET_PROLOG_FUNCTION
&& num_save
> 0)
1635 if (use_prolog_function (num_save
, actual_fsize
))
1637 int alloc_stack
= 4 * num_save
;
1640 save_all
= gen_rtx_PARALLEL
1642 rtvec_alloc (num_save
+ 1
1643 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1645 XVECEXP (save_all
, 0, 0)
1646 = gen_rtx_SET (VOIDmode
,
1648 gen_rtx_PLUS (Pmode
,
1650 GEN_INT(-alloc_stack
)));
1651 for (i
= 0; i
< num_save
; i
++)
1654 XVECEXP (save_all
, 0, i
+1)
1655 = gen_rtx_SET (VOIDmode
,
1657 gen_rtx_PLUS (Pmode
,
1663 if (TARGET_DISABLE_CALLT
)
1665 XVECEXP (save_all
, 0, num_save
+ 1)
1666 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1668 if (TARGET_LONG_CALLS
)
1669 XVECEXP (save_all
, 0, num_save
+ 2)
1670 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1673 code
= recog (save_all
, NULL_RTX
, NULL
);
1676 rtx insn
= emit_insn (save_all
);
1677 INSN_CODE (insn
) = code
;
1678 actual_fsize
-= alloc_stack
;
1682 save_all
= NULL_RTX
;
1686 /* If no prolog save function is available, store the registers the old
1687 fashioned way (one by one). */
1690 /* Special case interrupt functions that save all registers for a call. */
1691 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1693 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1694 emit_insn (gen_callt_save_all_interrupt ());
1696 emit_insn (gen_save_all_interrupt ());
1701 /* If the stack is too big, allocate it in chunks so we can do the
1702 register saves. We use the register save size so we use the ep
1704 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1705 init_stack_alloc
= compute_register_save_size (NULL
);
1707 init_stack_alloc
= actual_fsize
;
1709 /* Save registers at the beginning of the stack frame. */
1710 offset
= init_stack_alloc
- 4;
1712 if (init_stack_alloc
)
1713 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1715 GEN_INT (- (signed) init_stack_alloc
)));
1717 /* Save the return pointer first. */
1718 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1720 emit_move_insn (gen_rtx_MEM (SImode
,
1721 plus_constant (Pmode
,
1724 save_regs
[--num_save
]);
1728 for (i
= 0; i
< num_save
; i
++)
1730 emit_move_insn (gen_rtx_MEM (SImode
,
1731 plus_constant (Pmode
,
1740 /* Allocate the rest of the stack that was not allocated above (either it is
1741 > 32K or we just called a function to save the registers and needed more
1743 if (actual_fsize
> init_stack_alloc
)
1745 int diff
= actual_fsize
- init_stack_alloc
;
1746 if (CONST_OK_FOR_K (-diff
))
1747 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1752 rtx reg
= gen_rtx_REG (Pmode
, 12);
1753 emit_move_insn (reg
, GEN_INT (-diff
));
1754 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, reg
));
1758 /* If we need a frame pointer, set it up now. */
1759 if (frame_pointer_needed
)
1760 emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1765 expand_epilogue (void)
1768 unsigned int size
= get_frame_size ();
1770 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1771 rtx restore_regs
[32];
1773 unsigned int num_restore
;
1775 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1777 /* Eliminate the initial stack stored by interrupt functions. */
1778 if (interrupt_handler
)
1780 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1781 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1782 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1785 /* Cut off any dynamic stack created. */
1786 if (frame_pointer_needed
)
1787 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1789 /* Identify all of the saved registers. */
1791 for (i
= 1; i
< 32; i
++)
1793 if (((1L << i
) & reg_saved
) != 0)
1794 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1797 /* See if we have an insn that restores the particular registers we
1799 restore_all
= NULL_RTX
;
1801 if (TARGET_PROLOG_FUNCTION
1803 && !interrupt_handler
)
1805 int alloc_stack
= (4 * num_restore
);
1807 /* Don't bother checking if we don't actually save any space. */
1808 if (use_prolog_function (num_restore
, actual_fsize
))
1811 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1812 rtvec_alloc (num_restore
+ 2));
1813 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1814 XVECEXP (restore_all
, 0, 1)
1815 = gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1816 gen_rtx_PLUS (Pmode
,
1818 GEN_INT (alloc_stack
)));
1820 offset
= alloc_stack
- 4;
1821 for (i
= 0; i
< num_restore
; i
++)
1823 XVECEXP (restore_all
, 0, i
+2)
1824 = gen_rtx_SET (VOIDmode
,
1827 gen_rtx_PLUS (Pmode
,
1833 code
= recog (restore_all
, NULL_RTX
, NULL
);
1839 actual_fsize
-= alloc_stack
;
1842 if (CONST_OK_FOR_K (actual_fsize
))
1843 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1845 GEN_INT (actual_fsize
)));
1848 rtx reg
= gen_rtx_REG (Pmode
, 12);
1849 emit_move_insn (reg
, GEN_INT (actual_fsize
));
1850 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1856 insn
= emit_jump_insn (restore_all
);
1857 INSN_CODE (insn
) = code
;
1861 restore_all
= NULL_RTX
;
1865 /* If no epilogue save function is available, restore the registers the
1866 old fashioned way (one by one). */
1869 unsigned int init_stack_free
;
1871 /* If the stack is large, we need to cut it down in 2 pieces. */
1872 if (interrupt_handler
)
1873 init_stack_free
= 0;
1874 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1875 init_stack_free
= 4 * num_restore
;
1877 init_stack_free
= (signed) actual_fsize
;
1879 /* Deallocate the rest of the stack if it is > 32K. */
1880 if ((unsigned int) actual_fsize
> init_stack_free
)
1884 diff
= actual_fsize
- init_stack_free
;
1886 if (CONST_OK_FOR_K (diff
))
1887 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1892 rtx reg
= gen_rtx_REG (Pmode
, 12);
1893 emit_move_insn (reg
, GEN_INT (diff
));
1894 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1900 /* Special case interrupt functions that save all registers
1902 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1904 if (! TARGET_DISABLE_CALLT
)
1905 emit_insn (gen_callt_restore_all_interrupt ());
1907 emit_insn (gen_restore_all_interrupt ());
1911 /* Restore registers from the beginning of the stack frame. */
1912 int offset
= init_stack_free
- 4;
1914 /* Restore the return pointer first. */
1916 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
1918 emit_move_insn (restore_regs
[--num_restore
],
1919 gen_rtx_MEM (SImode
,
1920 plus_constant (Pmode
,
1926 for (i
= 0; i
< num_restore
; i
++)
1928 emit_move_insn (restore_regs
[i
],
1929 gen_rtx_MEM (SImode
,
1930 plus_constant (Pmode
,
1934 emit_use (restore_regs
[i
]);
1938 /* Cut back the remainder of the stack. */
1939 if (init_stack_free
)
1940 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1942 GEN_INT (init_stack_free
)));
1945 /* And return or use reti for interrupt handlers. */
1946 if (interrupt_handler
)
1948 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1949 emit_insn (gen_callt_return_interrupt ());
1951 emit_jump_insn (gen_return_interrupt ());
1953 else if (actual_fsize
)
1954 emit_jump_insn (gen_return_internal ());
1956 emit_jump_insn (gen_return_simple ());
1959 v850_interrupt_cache_p
= FALSE
;
1960 v850_interrupt_p
= FALSE
;
1963 /* Update the condition code from the insn. */
1965 notice_update_cc (rtx body
, rtx insn
)
1967 switch (get_attr_cc (insn
))
1970 /* Insn does not affect CC at all. */
1974 /* Insn does not change CC, but the 0'th operand has been changed. */
1975 if (cc_status
.value1
!= 0
1976 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
1977 cc_status
.value1
= 0;
1981 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1982 V,C is in an unusable state. */
1984 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
1985 cc_status
.value1
= recog_data
.operand
[0];
1989 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1990 C is in an unusable state. */
1992 cc_status
.flags
|= CC_NO_CARRY
;
1993 cc_status
.value1
= recog_data
.operand
[0];
1997 /* The insn is a compare instruction. */
1999 cc_status
.value1
= SET_SRC (body
);
2003 /* Insn doesn't leave CC in a usable state. */
2012 /* Retrieve the data area that has been chosen for the given decl. */
2015 v850_get_data_area (tree decl
)
2017 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2018 return DATA_AREA_SDA
;
2020 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2021 return DATA_AREA_TDA
;
2023 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2024 return DATA_AREA_ZDA
;
2026 return DATA_AREA_NORMAL
;
2029 /* Store the indicated data area in the decl's attributes. */
2032 v850_set_data_area (tree decl
, v850_data_area data_area
)
2038 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2039 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2040 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2045 DECL_ATTRIBUTES (decl
) = tree_cons
2046 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2049 /* Handle an "interrupt" attribute; arguments as in
2050 struct attribute_spec.handler. */
2052 v850_handle_interrupt_attribute (tree
* node
,
2054 tree args ATTRIBUTE_UNUSED
,
2055 int flags ATTRIBUTE_UNUSED
,
2056 bool * no_add_attrs
)
2058 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2060 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2062 *no_add_attrs
= true;
2068 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2069 struct attribute_spec.handler. */
2071 v850_handle_data_area_attribute (tree
* node
,
2073 tree args ATTRIBUTE_UNUSED
,
2074 int flags ATTRIBUTE_UNUSED
,
2075 bool * no_add_attrs
)
2077 v850_data_area data_area
;
2078 v850_data_area area
;
2081 /* Implement data area attribute. */
2082 if (is_attribute_p ("sda", name
))
2083 data_area
= DATA_AREA_SDA
;
2084 else if (is_attribute_p ("tda", name
))
2085 data_area
= DATA_AREA_TDA
;
2086 else if (is_attribute_p ("zda", name
))
2087 data_area
= DATA_AREA_ZDA
;
2091 switch (TREE_CODE (decl
))
2094 if (current_function_decl
!= NULL_TREE
)
2096 error_at (DECL_SOURCE_LOCATION (decl
),
2097 "data area attributes cannot be specified for "
2099 *no_add_attrs
= true;
2105 area
= v850_get_data_area (decl
);
2106 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2108 error ("data area of %q+D conflicts with previous declaration",
2110 *no_add_attrs
= true;
2122 /* Return nonzero if FUNC is an interrupt function as specified
2123 by the "interrupt" attribute. */
2126 v850_interrupt_function_p (tree func
)
2131 if (v850_interrupt_cache_p
)
2132 return v850_interrupt_p
;
2134 if (TREE_CODE (func
) != FUNCTION_DECL
)
2137 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2143 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2144 ret
= a
!= NULL_TREE
;
2147 /* Its not safe to trust global variables until after function inlining has
2149 if (reload_completed
| reload_in_progress
)
2150 v850_interrupt_p
= ret
;
2157 v850_encode_data_area (tree decl
, rtx symbol
)
2161 /* Map explicit sections into the appropriate attribute */
2162 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2164 if (DECL_SECTION_NAME (decl
))
2166 const char *name
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
2168 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2169 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2171 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2172 v850_set_data_area (decl
, DATA_AREA_SDA
);
2174 else if (streq (name
, ".tdata"))
2175 v850_set_data_area (decl
, DATA_AREA_TDA
);
2178 /* If no attribute, support -m{zda,sda,tda}=n */
2181 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2185 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2186 v850_set_data_area (decl
, DATA_AREA_TDA
);
2188 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2189 v850_set_data_area (decl
, DATA_AREA_SDA
);
2191 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2192 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2195 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2199 flags
= SYMBOL_REF_FLAGS (symbol
);
2200 switch (v850_get_data_area (decl
))
2202 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2203 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2204 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2205 default: gcc_unreachable ();
2207 SYMBOL_REF_FLAGS (symbol
) = flags
;
2211 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2213 default_encode_section_info (decl
, rtl
, first
);
2215 if (TREE_CODE (decl
) == VAR_DECL
2216 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2217 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2220 /* Construct a JR instruction to a routine that will perform the equivalent of
2221 the RTL passed in as an argument. This RTL is a function epilogue that
2222 pops registers off the stack and possibly releases some extra stack space
2223 as well. The code has already verified that the RTL matches these
2227 construct_restore_jr (rtx op
)
2229 int count
= XVECLEN (op
, 0);
2231 unsigned long int mask
;
2232 unsigned long int first
;
2233 unsigned long int last
;
2235 static char buff
[100]; /* XXX */
2239 error ("bogus JR construction: %d", count
);
2243 /* Work out how many bytes to pop off the stack before retrieving
2245 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2246 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2247 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2249 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2251 /* Each pop will remove 4 bytes from the stack.... */
2252 stack_bytes
-= (count
- 2) * 4;
2254 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2255 if (stack_bytes
!= 0)
2257 error ("bad amount of stack space removal: %d", stack_bytes
);
2261 /* Now compute the bit mask of registers to push. */
2263 for (i
= 2; i
< count
; i
++)
2265 rtx vector_element
= XVECEXP (op
, 0, i
);
2267 gcc_assert (GET_CODE (vector_element
) == SET
);
2268 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2269 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2272 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2275 /* Scan for the first register to pop. */
2276 for (first
= 0; first
< 32; first
++)
2278 if (mask
& (1 << first
))
2282 gcc_assert (first
< 32);
2284 /* Discover the last register to pop. */
2285 if (mask
& (1 << LINK_POINTER_REGNUM
))
2287 last
= LINK_POINTER_REGNUM
;
2291 gcc_assert (!stack_bytes
);
2292 gcc_assert (mask
& (1 << 29));
2297 /* Note, it is possible to have gaps in the register mask.
2298 We ignore this here, and generate a JR anyway. We will
2299 be popping more registers than is strictly necessary, but
2300 it does save code space. */
2302 if (TARGET_LONG_CALLS
)
2307 sprintf (name
, "__return_%s", reg_names
[first
]);
2309 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2311 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2317 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2319 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2326 /* Construct a JARL instruction to a routine that will perform the equivalent
2327 of the RTL passed as a parameter. This RTL is a function prologue that
2328 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2329 some stack space as well. The code has already verified that the RTL
2330 matches these requirements. */
2332 construct_save_jarl (rtx op
)
2334 int count
= XVECLEN (op
, 0);
2336 unsigned long int mask
;
2337 unsigned long int first
;
2338 unsigned long int last
;
2340 static char buff
[100]; /* XXX */
2342 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2344 error ("bogus JARL construction: %d", count
);
2349 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2350 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2351 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2352 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2354 /* Work out how many bytes to push onto the stack after storing the
2356 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2358 /* Each push will put 4 bytes from the stack.... */
2359 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2361 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2362 if (stack_bytes
!= 0)
2364 error ("bad amount of stack space removal: %d", stack_bytes
);
2368 /* Now compute the bit mask of registers to push. */
2370 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2372 rtx vector_element
= XVECEXP (op
, 0, i
);
2374 gcc_assert (GET_CODE (vector_element
) == SET
);
2375 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2376 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2379 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2382 /* Scan for the first register to push. */
2383 for (first
= 0; first
< 32; first
++)
2385 if (mask
& (1 << first
))
2389 gcc_assert (first
< 32);
2391 /* Discover the last register to push. */
2392 if (mask
& (1 << LINK_POINTER_REGNUM
))
2394 last
= LINK_POINTER_REGNUM
;
2398 gcc_assert (!stack_bytes
);
2399 gcc_assert (mask
& (1 << 29));
2404 /* Note, it is possible to have gaps in the register mask.
2405 We ignore this here, and generate a JARL anyway. We will
2406 be pushing more registers than is strictly necessary, but
2407 it does save code space. */
2409 if (TARGET_LONG_CALLS
)
2414 sprintf (name
, "__save_%s", reg_names
[first
]);
2416 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2418 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2424 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2426 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2433 extern tree last_assemble_variable_decl
;
2434 extern int size_directive_output
;
2436 /* A version of asm_output_aligned_bss() that copes with the special
2437 data areas of the v850. */
2439 v850_output_aligned_bss (FILE * file
,
2442 unsigned HOST_WIDE_INT size
,
2445 switch (v850_get_data_area (decl
))
2448 switch_to_section (zbss_section
);
2452 switch_to_section (sbss_section
);
2456 switch_to_section (tdata_section
);
2459 switch_to_section (bss_section
);
2463 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2464 #ifdef ASM_DECLARE_OBJECT_NAME
2465 last_assemble_variable_decl
= decl
;
2466 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2468 /* Standard thing is just output label for the object. */
2469 ASM_OUTPUT_LABEL (file
, name
);
2470 #endif /* ASM_DECLARE_OBJECT_NAME */
2471 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2474 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2476 v850_output_common (FILE * file
,
2482 if (decl
== NULL_TREE
)
2484 fprintf (file
, "%s", COMMON_ASM_OP
);
2488 switch (v850_get_data_area (decl
))
2491 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2495 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2499 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2503 fprintf (file
, "%s", COMMON_ASM_OP
);
2508 assemble_name (file
, name
);
2509 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2512 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2514 v850_output_local (FILE * file
,
2520 fprintf (file
, "%s", LOCAL_ASM_OP
);
2521 assemble_name (file
, name
);
2522 fprintf (file
, "\n");
2524 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2527 /* Add data area to the given declaration if a ghs data area pragma is
2528 currently in effect (#pragma ghs startXXX/endXXX). */
2530 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2533 && data_area_stack
->data_area
2534 && current_function_decl
== NULL_TREE
2535 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2536 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2537 v850_set_data_area (decl
, data_area_stack
->data_area
);
2539 /* Initialize the default names of the v850 specific sections,
2540 if this has not been done before. */
2542 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2544 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2545 = build_string (sizeof (".sdata")-1, ".sdata");
2547 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2548 = build_string (sizeof (".rosdata")-1, ".rosdata");
2550 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2551 = build_string (sizeof (".tdata")-1, ".tdata");
2553 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2554 = build_string (sizeof (".zdata")-1, ".zdata");
2556 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2557 = build_string (sizeof (".rozdata")-1, ".rozdata");
2560 if (current_function_decl
== NULL_TREE
2561 && (TREE_CODE (decl
) == VAR_DECL
2562 || TREE_CODE (decl
) == CONST_DECL
2563 || TREE_CODE (decl
) == FUNCTION_DECL
)
2564 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2565 && !DECL_SECTION_NAME (decl
))
2567 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2568 tree chosen_section
;
2570 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2571 kind
= GHS_SECTION_KIND_TEXT
;
2574 /* First choose a section kind based on the data area of the decl. */
2575 switch (v850_get_data_area (decl
))
2581 kind
= ((TREE_READONLY (decl
))
2582 ? GHS_SECTION_KIND_ROSDATA
2583 : GHS_SECTION_KIND_SDATA
);
2587 kind
= GHS_SECTION_KIND_TDATA
;
2591 kind
= ((TREE_READONLY (decl
))
2592 ? GHS_SECTION_KIND_ROZDATA
2593 : GHS_SECTION_KIND_ZDATA
);
2596 case DATA_AREA_NORMAL
: /* default data area */
2597 if (TREE_READONLY (decl
))
2598 kind
= GHS_SECTION_KIND_RODATA
;
2599 else if (DECL_INITIAL (decl
))
2600 kind
= GHS_SECTION_KIND_DATA
;
2602 kind
= GHS_SECTION_KIND_BSS
;
2606 /* Now, if the section kind has been explicitly renamed,
2607 then attach a section attribute. */
2608 chosen_section
= GHS_current_section_names
[(int) kind
];
2610 /* Otherwise, if this kind of section needs an explicit section
2611 attribute, then also attach one. */
2612 if (chosen_section
== NULL
)
2613 chosen_section
= GHS_default_section_names
[(int) kind
];
2617 /* Only set the section name if specified by a pragma, because
2618 otherwise it will force those variables to get allocated storage
2619 in this module, rather than by the linker. */
2620 DECL_SECTION_NAME (decl
) = chosen_section
;
2625 /* Construct a DISPOSE instruction that is the equivalent of
2626 the given RTX. We have already verified that this should
2630 construct_dispose_instruction (rtx op
)
2632 int count
= XVECLEN (op
, 0);
2634 unsigned long int mask
;
2636 static char buff
[ 100 ]; /* XXX */
2641 error ("bogus DISPOSE construction: %d", count
);
2645 /* Work out how many bytes to pop off the
2646 stack before retrieving registers. */
2647 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2648 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2649 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2651 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2653 /* Each pop will remove 4 bytes from the stack.... */
2654 stack_bytes
-= (count
- 2) * 4;
2656 /* Make sure that the amount we are popping
2657 will fit into the DISPOSE instruction. */
2658 if (stack_bytes
> 128)
2660 error ("too much stack space to dispose of: %d", stack_bytes
);
2664 /* Now compute the bit mask of registers to push. */
2667 for (i
= 2; i
< count
; i
++)
2669 rtx vector_element
= XVECEXP (op
, 0, i
);
2671 gcc_assert (GET_CODE (vector_element
) == SET
);
2672 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2673 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2676 if (REGNO (SET_DEST (vector_element
)) == 2)
2679 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2682 if (! TARGET_DISABLE_CALLT
2683 && (use_callt
|| stack_bytes
== 0))
2687 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2692 for (i
= 20; i
< 32; i
++)
2693 if (mask
& (1 << i
))
2697 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2699 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2700 i
, (mask
& (1 << 31)) ? "31c" : "29");
2705 static char regs
[100]; /* XXX */
2708 /* Generate the DISPOSE instruction. Note we could just issue the
2709 bit mask as a number as the assembler can cope with this, but for
2710 the sake of our readers we turn it into a textual description. */
2714 for (i
= 20; i
< 32; i
++)
2716 if (mask
& (1 << i
))
2721 strcat (regs
, ", ");
2726 strcat (regs
, reg_names
[ first
]);
2728 for (i
++; i
< 32; i
++)
2729 if ((mask
& (1 << i
)) == 0)
2734 strcat (regs
, " - ");
2735 strcat (regs
, reg_names
[ i
- 1 ] );
2740 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2746 /* Construct a PREPARE instruction that is the equivalent of
2747 the given RTL. We have already verified that this should
2751 construct_prepare_instruction (rtx op
)
2755 unsigned long int mask
;
2757 static char buff
[ 100 ]; /* XXX */
2760 if (XVECLEN (op
, 0) <= 1)
2762 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2766 /* Work out how many bytes to push onto
2767 the stack after storing the registers. */
2768 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2769 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2770 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2772 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2775 /* Make sure that the amount we are popping
2776 will fit into the DISPOSE instruction. */
2777 if (stack_bytes
< -128)
2779 error ("too much stack space to prepare: %d", stack_bytes
);
2783 /* Now compute the bit mask of registers to push. */
2786 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2788 rtx vector_element
= XVECEXP (op
, 0, i
);
2790 if (GET_CODE (vector_element
) == CLOBBER
)
2793 gcc_assert (GET_CODE (vector_element
) == SET
);
2794 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2795 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2798 if (REGNO (SET_SRC (vector_element
)) == 2)
2801 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2805 stack_bytes
+= count
* 4;
2807 if ((! TARGET_DISABLE_CALLT
)
2808 && (use_callt
|| stack_bytes
== 0))
2812 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2816 for (i
= 20; i
< 32; i
++)
2817 if (mask
& (1 << i
))
2821 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2823 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2824 i
, (mask
& (1 << 31)) ? "31c" : "29");
2828 static char regs
[100]; /* XXX */
2832 /* Generate the PREPARE instruction. Note we could just issue the
2833 bit mask as a number as the assembler can cope with this, but for
2834 the sake of our readers we turn it into a textual description. */
2838 for (i
= 20; i
< 32; i
++)
2840 if (mask
& (1 << i
))
2845 strcat (regs
, ", ");
2850 strcat (regs
, reg_names
[ first
]);
2852 for (i
++; i
< 32; i
++)
2853 if ((mask
& (1 << i
)) == 0)
2858 strcat (regs
, " - ");
2859 strcat (regs
, reg_names
[ i
- 1 ] );
2864 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2870 /* Return an RTX indicating where the return address to the
2871 calling function can be found. */
2874 v850_return_addr (int count
)
2879 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2882 /* Implement TARGET_ASM_INIT_SECTIONS. */
2885 v850_asm_init_sections (void)
2888 = get_unnamed_section (0, output_section_asm_op
,
2889 "\t.section .rosdata,\"a\"");
2892 = get_unnamed_section (0, output_section_asm_op
,
2893 "\t.section .rozdata,\"a\"");
2896 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2897 "\t.section .tdata,\"aw\"");
2900 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2901 "\t.section .zdata,\"aw\"");
2904 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2905 output_section_asm_op
,
2906 "\t.section .zbss,\"aw\"");
2910 v850_select_section (tree exp
,
2911 int reloc ATTRIBUTE_UNUSED
,
2912 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2914 if (TREE_CODE (exp
) == VAR_DECL
)
2917 if (!TREE_READONLY (exp
)
2918 || TREE_SIDE_EFFECTS (exp
)
2919 || !DECL_INITIAL (exp
)
2920 || (DECL_INITIAL (exp
) != error_mark_node
2921 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
2926 switch (v850_get_data_area (exp
))
2929 return is_const
? rozdata_section
: zdata_section
;
2932 return tdata_section
;
2935 return is_const
? rosdata_section
: sdata_section
;
2938 return is_const
? readonly_data_section
: data_section
;
2941 return readonly_data_section
;
2944 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2947 v850_function_value_regno_p (const unsigned int regno
)
2949 return (regno
== 10);
2952 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2955 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2957 /* Return values > 8 bytes in length in memory. */
2958 return int_size_in_bytes (type
) > 8 || TYPE_MODE (type
) == BLKmode
;
2961 /* Worker function for TARGET_FUNCTION_VALUE. */
2964 v850_function_value (const_tree valtype
,
2965 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
2966 bool outgoing ATTRIBUTE_UNUSED
)
2968 return gen_rtx_REG (TYPE_MODE (valtype
), 10);
2972 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2975 v850_setup_incoming_varargs (cumulative_args_t ca
,
2976 enum machine_mode mode ATTRIBUTE_UNUSED
,
2977 tree type ATTRIBUTE_UNUSED
,
2978 int *pretend_arg_size ATTRIBUTE_UNUSED
,
2979 int second_time ATTRIBUTE_UNUSED
)
2981 get_cumulative_args (ca
)->anonymous_args
= (!TARGET_GHS
? 1 : 0);
2984 /* Worker function for TARGET_CAN_ELIMINATE. */
2987 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
2989 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
2992 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2994 If TARGET_APP_REGS is not defined then add r2 and r5 to
2995 the pool of fixed registers. See PR 14505. */
2998 v850_conditional_register_usage (void)
3000 if (TARGET_APP_REGS
)
3002 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
3003 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
3007 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3010 v850_asm_trampoline_template (FILE *f
)
3012 fprintf (f
, "\tjarl .+4,r12\n");
3013 fprintf (f
, "\tld.w 12[r12],r20\n");
3014 fprintf (f
, "\tld.w 16[r12],r12\n");
3015 fprintf (f
, "\tjmp [r12]\n");
3016 fprintf (f
, "\tnop\n");
3017 fprintf (f
, "\t.long 0\n");
3018 fprintf (f
, "\t.long 0\n");
3021 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3024 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
3026 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3028 emit_block_move (m_tramp
, assemble_trampoline_template (),
3029 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3031 mem
= adjust_address (m_tramp
, SImode
, 16);
3032 emit_move_insn (mem
, chain_value
);
3033 mem
= adjust_address (m_tramp
, SImode
, 20);
3034 emit_move_insn (mem
, fnaddr
);
3038 v850_issue_rate (void)
3040 return (TARGET_V850E2_ALL
? 2 : 1);
3043 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3046 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
3048 return (GET_CODE (x
) == CONST_DOUBLE
3049 || !(GET_CODE (x
) == CONST
3050 && GET_CODE (XEXP (x
, 0)) == PLUS
3051 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3052 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3053 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3057 v850_memory_move_cost (enum machine_mode mode
,
3058 reg_class_t reg_class ATTRIBUTE_UNUSED
,
3061 switch (GET_MODE_SIZE (mode
))
3071 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3075 /* V850 specific attributes. */
3077 static const struct attribute_spec v850_attribute_table
[] =
3079 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3080 affects_type_identity } */
3081 { "interrupt_handler", 0, 0, true, false, false,
3082 v850_handle_interrupt_attribute
, false },
3083 { "interrupt", 0, 0, true, false, false,
3084 v850_handle_interrupt_attribute
, false },
3085 { "sda", 0, 0, true, false, false,
3086 v850_handle_data_area_attribute
, false },
3087 { "tda", 0, 0, true, false, false,
3088 v850_handle_data_area_attribute
, false },
3089 { "zda", 0, 0, true, false, false,
3090 v850_handle_data_area_attribute
, false },
3091 { NULL
, 0, 0, false, false, false, NULL
, false }
3094 /* Initialize the GCC target structure. */
3096 #undef TARGET_MEMORY_MOVE_COST
3097 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3099 #undef TARGET_ASM_ALIGNED_HI_OP
3100 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3102 #undef TARGET_PRINT_OPERAND
3103 #define TARGET_PRINT_OPERAND v850_print_operand
3104 #undef TARGET_PRINT_OPERAND_ADDRESS
3105 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3106 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3107 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3109 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3110 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3112 #undef TARGET_ATTRIBUTE_TABLE
3113 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3115 #undef TARGET_INSERT_ATTRIBUTES
3116 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3118 #undef TARGET_ASM_SELECT_SECTION
3119 #define TARGET_ASM_SELECT_SECTION v850_select_section
3121 /* The assembler supports switchable .bss sections, but
3122 v850_select_section doesn't yet make use of them. */
3123 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3124 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3126 #undef TARGET_ENCODE_SECTION_INFO
3127 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3129 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3130 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3132 #undef TARGET_RTX_COSTS
3133 #define TARGET_RTX_COSTS v850_rtx_costs
3135 #undef TARGET_ADDRESS_COST
3136 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3138 #undef TARGET_MACHINE_DEPENDENT_REORG
3139 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3141 #undef TARGET_SCHED_ISSUE_RATE
3142 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3144 #undef TARGET_FUNCTION_VALUE_REGNO_P
3145 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3146 #undef TARGET_FUNCTION_VALUE
3147 #define TARGET_FUNCTION_VALUE v850_function_value
3149 #undef TARGET_PROMOTE_PROTOTYPES
3150 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3152 #undef TARGET_RETURN_IN_MEMORY
3153 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3155 #undef TARGET_PASS_BY_REFERENCE
3156 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3158 #undef TARGET_CALLEE_COPIES
3159 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3161 #undef TARGET_SETUP_INCOMING_VARARGS
3162 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3164 #undef TARGET_ARG_PARTIAL_BYTES
3165 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3167 #undef TARGET_FUNCTION_ARG
3168 #define TARGET_FUNCTION_ARG v850_function_arg
3170 #undef TARGET_FUNCTION_ARG_ADVANCE
3171 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3173 #undef TARGET_CAN_ELIMINATE
3174 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3176 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3177 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3179 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3180 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3181 #undef TARGET_TRAMPOLINE_INIT
3182 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3184 #undef TARGET_STRICT_ARGUMENT_NAMING
3185 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3187 #undef TARGET_LEGITIMATE_CONSTANT_P
3188 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3190 struct gcc_target targetm
= TARGET_INITIALIZER
;
3192 #include "gt-v850.h"