1 /* Subroutines for insn-output.cc for NEC V850 series
2 Copyright (C) 1996-2024 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #define IN_TARGET_CODE 1
25 #include "coretypes.h"
33 #include "stringpool.h"
35 #include "insn-config.h"
39 #include "diagnostic-core.h"
40 #include "stor-layout.h"
43 #include "conditions.h"
45 #include "insn-attr.h"
50 /* This file should be included last. */
51 #include "target-def.h"
54 #define streq(a,b) (strcmp (a, b) == 0)
57 static void v850_print_operand_address (FILE *, machine_mode
, rtx
);
59 /* Names of the various data areas used on the v850. */
60 const char * GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
61 const char * GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
63 /* Track the current data area set by the data area pragma (which
64 can be nested). Tested by check_default_data_area. */
65 data_area_stack_element
* data_area_stack
= NULL
;
67 /* True if we don't need to check any more if the current
68 function is an interrupt handler. */
69 static int v850_interrupt_cache_p
= FALSE
;
71 /* Whether current function is an interrupt handler. */
72 static int v850_interrupt_p
= FALSE
;
74 static GTY(()) section
* rosdata_section
;
75 static GTY(()) section
* rozdata_section
;
76 static GTY(()) section
* tdata_section
;
77 static GTY(()) section
* zdata_section
;
78 static GTY(()) section
* zbss_section
;
80 /* We use this to wrap all emitted insns in the prologue. */
84 if (GET_CODE (x
) != CLOBBER
)
85 RTX_FRAME_RELATED_P (x
) = 1;
89 /* Mark all the subexpressions of the PARALLEL rtx PAR as
90 frame-related. Return PAR.
92 dwarf2out.cc:dwarf2out_frame_debug_expr ignores sub-expressions of a
93 PARALLEL rtx other than the first if they do not have the
94 FRAME_RELATED flag set on them. */
97 v850_all_frame_related (rtx par
)
99 int len
= XVECLEN (par
, 0);
102 gcc_assert (GET_CODE (par
) == PARALLEL
);
103 for (i
= 0; i
< len
; i
++)
104 F (XVECEXP (par
, 0, i
));
109 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
110 Specify whether to pass the argument by reference. */
113 v850_pass_by_reference (cumulative_args_t
, const function_arg_info
&arg
)
118 unsigned HOST_WIDE_INT size
= arg
.type_size_in_bytes ();
122 /* Return an RTX to represent where argument ARG will be passed to a function.
123 If the result is NULL_RTX, the argument will be pushed. */
126 v850_function_arg (cumulative_args_t cum_v
, const function_arg_info
&arg
)
128 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
129 rtx result
= NULL_RTX
;
135 size
= arg
.promoted_size_in_bytes ();
136 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
140 /* Once we have stopped using argument registers, do not start up again. */
141 cum
->nbytes
= 4 * UNITS_PER_WORD
;
146 align
= UNITS_PER_WORD
;
147 else if (size
<= UNITS_PER_WORD
&& arg
.type
)
148 align
= TYPE_ALIGN (arg
.type
) / BITS_PER_UNIT
;
152 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
154 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
157 if (arg
.type
== NULL_TREE
158 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
161 switch (cum
->nbytes
/ UNITS_PER_WORD
)
164 result
= gen_rtx_REG (arg
.mode
, 6);
167 result
= gen_rtx_REG (arg
.mode
, 7);
170 result
= gen_rtx_REG (arg
.mode
, 8);
173 result
= gen_rtx_REG (arg
.mode
, 9);
182 /* Return the number of bytes which must be put into registers
183 for values which are part in registers and part in memory. */
185 v850_arg_partial_bytes (cumulative_args_t cum_v
, const function_arg_info
&arg
)
187 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
193 size
= arg
.promoted_size_in_bytes ();
198 align
= UNITS_PER_WORD
;
200 align
= TYPE_ALIGN (arg
.type
) / BITS_PER_UNIT
;
204 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
206 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
209 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
212 if (arg
.type
== NULL_TREE
213 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
216 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
219 /* Update the data in CUM to advance over argument ARG. */
222 v850_function_arg_advance (cumulative_args_t cum_v
,
223 const function_arg_info
&arg
)
225 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
228 cum
->nbytes
+= ((arg
.promoted_size_in_bytes () + UNITS_PER_WORD
- 1)
231 cum
->nbytes
+= (((arg
.type
&& int_size_in_bytes (arg
.type
) > 8
232 ? GET_MODE_SIZE (Pmode
)
233 : (HOST_WIDE_INT
) arg
.promoted_size_in_bytes ())
234 + UNITS_PER_WORD
- 1)
238 /* Return the high and low words of a CONST_DOUBLE */
241 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
243 if (GET_CODE (x
) == CONST_DOUBLE
)
247 switch (GET_MODE (x
))
250 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (x
), t
);
251 *p_high
= t
[1]; /* since v850 is little endian */
252 *p_low
= t
[0]; /* high is second word */
256 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x
), *p_high
);
262 *p_high
= CONST_DOUBLE_HIGH (x
);
263 *p_low
= CONST_DOUBLE_LOW (x
);
271 fatal_insn ("const_double_split got a bad insn:", x
);
275 /* Return the cost of the rtx R with code CODE. */
278 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
280 if (CONST_OK_FOR_I (value
))
282 else if (CONST_OK_FOR_J (value
))
284 else if (CONST_OK_FOR_K (value
))
291 const_costs (rtx r
, enum rtx_code c
)
293 HOST_WIDE_INT high
, low
;
298 return const_costs_int (INTVAL (r
), 0);
301 const_double_split (r
, &high
, &low
);
302 if (GET_MODE (r
) == SFmode
)
303 return const_costs_int (high
, 1);
305 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
321 v850_rtx_costs (rtx x
, machine_mode mode
, int outer_code
,
322 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
324 enum rtx_code code
= GET_CODE (x
);
333 *total
= COSTS_N_INSNS (const_costs (x
, code
));
340 if (TARGET_V850E
&& !speed
)
348 && (mode
== SImode
|| mode
== HImode
|| mode
== QImode
))
350 if (GET_CODE (XEXP (x
, 1)) == REG
)
352 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
354 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
356 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
365 if (outer_code
== COMPARE
)
374 /* Print operand X using operand code CODE to assembly language output file
378 v850_print_operand (FILE * file
, rtx x
, int code
)
380 HOST_WIDE_INT high
, low
;
385 /* We use 'c' operands with symbols for .vtinherit. */
386 if (GET_CODE (x
) == SYMBOL_REF
)
388 output_addr_const(file
, x
);
397 switch ((code
== 'B' || code
== 'C' || code
== 'D')
398 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
401 if (code
== 'c' || code
== 'C')
402 fprintf (file
, "nz");
404 fprintf (file
, "ne");
407 if (code
== 'c' || code
== 'C')
413 if (code
== 'D' || code
== 'd')
416 fprintf (file
, "ge");
419 fprintf (file
, "gt");
422 fprintf (file
, "le");
425 if (code
== 'D' || code
== 'd')
428 fprintf (file
, "lt");
431 fprintf (file
, "nl");
437 fprintf (file
, "nh");
446 case 'F': /* High word of CONST_DOUBLE. */
447 switch (GET_CODE (x
))
450 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
454 const_double_split (x
, &high
, &low
);
455 fprintf (file
, "%ld", (long) high
);
462 case 'G': /* Low word of CONST_DOUBLE. */
463 switch (GET_CODE (x
))
466 fprintf (file
, "%ld", (long) INTVAL (x
));
470 const_double_split (x
, &high
, &low
);
471 fprintf (file
, "%ld", (long) low
);
479 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
482 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
485 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
487 if (GET_CODE (x
) == CONST
)
488 x
= XEXP (XEXP (x
, 0), 0);
490 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
492 if (SYMBOL_REF_ZDA_P (x
))
493 fprintf (file
, "zdaoff");
494 else if (SYMBOL_REF_SDA_P (x
))
495 fprintf (file
, "sdaoff");
496 else if (SYMBOL_REF_TDA_P (x
))
497 fprintf (file
, "tdaoff");
502 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
503 output_addr_const (file
, x
);
506 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
508 if (GET_CODE (x
) == CONST
)
509 x
= XEXP (XEXP (x
, 0), 0);
511 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
513 if (SYMBOL_REF_ZDA_P (x
))
514 fprintf (file
, "r0");
515 else if (SYMBOL_REF_SDA_P (x
))
516 fprintf (file
, "gp");
517 else if (SYMBOL_REF_TDA_P (x
))
518 fprintf (file
, "ep");
522 case 'R': /* 2nd word of a double. */
523 switch (GET_CODE (x
))
526 fprintf (file
, reg_names
[REGNO (x
) + 1]);
530 machine_mode mode
= GET_MODE (x
);
531 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
532 v850_print_operand_address (file
, mode
, x
);
533 if (GET_CODE (x
) == CONST_INT
)
534 fprintf (file
, "[r0]");
540 unsigned HOST_WIDE_INT v
= INTVAL (x
);
542 /* Trickery to avoid problems with shifting
543 32-bits at a time on a 32-bit host. */
546 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, v
);
551 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_HIGH (x
));
561 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
562 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
569 /* Like an 'S' operand above, but for unsigned loads only. */
570 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
575 case 'W': /* Print the instruction suffix. */
576 switch (GET_MODE (x
))
581 case E_QImode
: fputs (".b", file
); break;
582 case E_HImode
: fputs (".h", file
); break;
583 case E_SImode
: fputs (".w", file
); break;
584 case E_SFmode
: fputs (".w", file
); break;
587 case '.': /* Register r0. */
588 fputs (reg_names
[0], file
);
590 case 'z': /* Reg or zero. */
592 fputs (reg_names
[REGNO (x
)], file
);
593 else if ((GET_MODE(x
) == SImode
594 || GET_MODE(x
) == DFmode
595 || GET_MODE(x
) == SFmode
)
596 && x
== CONST0_RTX(GET_MODE(x
)))
597 fputs (reg_names
[0], file
);
600 gcc_assert (x
== const0_rtx
);
601 fputs (reg_names
[0], file
);
605 switch (GET_CODE (x
))
608 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
609 output_address (GET_MODE (x
),
610 gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
613 output_address (GET_MODE (x
), XEXP (x
, 0));
617 fputs (reg_names
[REGNO (x
)], file
);
620 fputs (reg_names
[subreg_regno (x
)], file
);
623 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_LOW (x
));
631 v850_print_operand_address (file
, VOIDmode
, x
);
642 /* Output assembly language output for the address ADDR to FILE. */
645 v850_print_operand_address (FILE * file
, machine_mode
/*mode*/, rtx addr
)
647 switch (GET_CODE (addr
))
650 fprintf (file
, "0[");
651 v850_print_operand (file
, addr
, 0);
655 if (GET_CODE (XEXP (addr
, 0)) == REG
)
658 fprintf (file
, "lo(");
659 v850_print_operand (file
, XEXP (addr
, 1), 0);
660 fprintf (file
, ")[");
661 v850_print_operand (file
, XEXP (addr
, 0), 0);
666 if (GET_CODE (XEXP (addr
, 0)) == REG
667 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
670 v850_print_operand (file
, XEXP (addr
, 1), 0);
672 v850_print_operand (file
, XEXP (addr
, 0), 0);
677 v850_print_operand (file
, XEXP (addr
, 0), 0);
679 v850_print_operand (file
, XEXP (addr
, 1), 0);
684 const char *off_name
= NULL
;
685 const char *reg_name
= NULL
;
687 if (SYMBOL_REF_ZDA_P (addr
))
692 else if (SYMBOL_REF_SDA_P (addr
))
697 else if (SYMBOL_REF_TDA_P (addr
))
704 fprintf (file
, "%s(", off_name
);
705 output_addr_const (file
, addr
);
707 fprintf (file
, ")[%s]", reg_name
);
711 if (special_symbolref_operand (addr
, VOIDmode
))
713 rtx x
= XEXP (XEXP (addr
, 0), 0);
714 const char *off_name
;
715 const char *reg_name
;
717 if (SYMBOL_REF_ZDA_P (x
))
722 else if (SYMBOL_REF_SDA_P (x
))
727 else if (SYMBOL_REF_TDA_P (x
))
735 fprintf (file
, "%s(", off_name
);
736 output_addr_const (file
, addr
);
737 fprintf (file
, ")[%s]", reg_name
);
740 output_addr_const (file
, addr
);
743 output_addr_const (file
, addr
);
749 v850_print_operand_punct_valid_p (unsigned char code
)
754 /* When assemble_integer is used to emit the offsets for a switch
755 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
756 output_addr_const will normally barf at this, but it is OK to omit
757 the truncate and just emit the difference of the two labels. The
758 .hword directive will automatically handle the truncation for us.
760 Returns true if rtx was handled, false otherwise. */
763 v850_output_addr_const_extra (FILE * file
, rtx x
)
765 if (GET_CODE (x
) != TRUNCATE
)
770 /* We must also handle the case where the switch table was passed a
771 constant value and so has been collapsed. In this case the first
772 label will have been deleted. In such a case it is OK to emit
773 nothing, since the table will not be used.
774 (cf gcc.c-torture/compile/990801-1.c). */
775 if (GET_CODE (x
) == MINUS
776 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
)
778 rtx_code_label
*label
779 = dyn_cast
<rtx_code_label
*> (XEXP (XEXP (x
, 0), 0));
780 if (label
&& label
->deleted ())
784 output_addr_const (file
, x
);
788 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
792 output_move_single (rtx
* operands
)
794 rtx dst
= operands
[0];
795 rtx src
= operands
[1];
802 else if (GET_CODE (src
) == CONST_INT
)
804 HOST_WIDE_INT value
= INTVAL (src
);
806 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
809 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
810 return "movea %1,%.,%0";
812 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
813 return "movhi hi0(%1),%.,%0";
815 /* A random constant. */
816 else if (TARGET_V850E_UP
)
819 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
822 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
824 HOST_WIDE_INT high
, low
;
826 const_double_split (src
, &high
, &low
);
828 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
831 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
832 return "movea %F1,%.,%0";
834 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
835 return "movhi hi0(%F1),%.,%0";
837 /* A random constant. */
838 else if (TARGET_V850E_UP
)
842 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
845 else if (GET_CODE (src
) == MEM
)
846 return "%S1ld%W1 %1,%0";
848 else if (special_symbolref_operand (src
, VOIDmode
))
849 return "movea %O1(%P1),%Q1,%0";
851 else if (GET_CODE (src
) == LABEL_REF
852 || GET_CODE (src
) == SYMBOL_REF
853 || GET_CODE (src
) == CONST
)
856 return "mov hilo(%1),%0";
858 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
861 else if (GET_CODE (src
) == HIGH
)
862 return "movhi hi(%1),%.,%0";
864 else if (GET_CODE (src
) == LO_SUM
)
866 operands
[2] = XEXP (src
, 0);
867 operands
[3] = XEXP (src
, 1);
868 return "movea lo(%3),%2,%0";
872 else if (GET_CODE (dst
) == MEM
)
875 return "%S0st%W0 %1,%0";
877 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
878 return "%S0st%W0 %.,%0";
880 else if (GET_CODE (src
) == CONST_DOUBLE
881 && CONST0_RTX (GET_MODE (dst
)) == src
)
882 return "%S0st%W0 %.,%0";
885 fatal_insn ("output_move_single:", gen_rtx_SET (dst
, src
));
890 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1
)
892 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
897 return CC_FPU_LEmode
;
899 return CC_FPU_GEmode
;
901 return CC_FPU_LTmode
;
903 return CC_FPU_GTmode
;
905 return CC_FPU_EQmode
;
907 return CC_FPU_NEmode
;
913 if (op1
== const0_rtx
914 && (cond
== EQ
|| cond
== NE
|| cond
== LT
|| cond
== GE
)
915 && (GET_CODE (op0
) == PLUS
|| GET_CODE (op0
) == MINUS
916 || GET_CODE (op0
) == NEG
|| GET_CODE (op0
) == AND
917 || GET_CODE (op0
) == IOR
|| GET_CODE (op0
) == XOR
918 || GET_CODE (op0
) == NOT
|| GET_CODE (op0
) == ASHIFT
))
925 v850_gen_float_compare (enum rtx_code cond
, machine_mode mode
, rtx op0
, rtx op1
)
927 if (GET_MODE (op0
) == DFmode
)
932 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
935 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
938 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
941 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
944 /* Note: There is no NE comparison operator. So we
945 perform an EQ comparison and invert the branch.
946 See v850_float_nz_comparison for how this is done. */
948 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
954 else if (mode
== SFmode
)
959 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
962 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
965 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
968 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
971 /* Note: There is no NE comparison operator. So we
972 perform an EQ comparison and invert the branch.
973 See v850_float_nz_comparison for how this is done. */
975 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
984 return v850_select_cc_mode (cond
, op0
, op1
);
987 /* Return maximum offset supported for a short EP memory reference of mode
988 MODE and signedness UNSIGNEDP. */
991 ep_memory_offset (machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
998 if (TARGET_SMALL_SLD
)
999 max_offset
= (1 << 4);
1000 else if ((TARGET_V850E_UP
)
1002 max_offset
= (1 << 4);
1004 max_offset
= (1 << 7);
1008 if (TARGET_SMALL_SLD
)
1009 max_offset
= (1 << 5);
1010 else if ((TARGET_V850E_UP
)
1012 max_offset
= (1 << 5);
1014 max_offset
= (1 << 8);
1019 max_offset
= (1 << 8);
1029 /* Return true if OP is a valid short EP memory reference */
1032 ep_memory_operand (rtx op
, machine_mode mode
, int unsigned_load
)
1038 /* If we are not using the EP register on a per-function basis
1039 then do not allow this optimization at all. This is to
1040 prevent the use of the SLD/SST instructions which cannot be
1041 guaranteed to work properly due to a hardware bug. */
1045 if (GET_CODE (op
) != MEM
)
1048 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1050 mask
= GET_MODE_SIZE (mode
) - 1;
1052 addr
= XEXP (op
, 0);
1053 if (GET_CODE (addr
) == CONST
)
1054 addr
= XEXP (addr
, 0);
1056 switch (GET_CODE (addr
))
1062 return SYMBOL_REF_TDA_P (addr
);
1065 return REGNO (addr
) == EP_REGNUM
;
1068 op0
= XEXP (addr
, 0);
1069 op1
= XEXP (addr
, 1);
1070 if (GET_CODE (op1
) == CONST_INT
1071 && INTVAL (op1
) < max_offset
1072 && INTVAL (op1
) >= 0
1073 && (INTVAL (op1
) & mask
) == 0)
1075 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1078 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1087 /* Substitute memory references involving a pointer, to use the ep pointer,
1088 taking care to save and preserve the ep. */
1091 substitute_ep_register (rtx_insn
*first_insn
,
1092 rtx_insn
*last_insn
,
1098 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1103 df_set_regs_ever_live (1, true);
1104 *p_r1
= gen_rtx_REG (Pmode
, 1);
1105 *p_ep
= gen_rtx_REG (Pmode
, 30);
1110 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1111 2 * (uses
- 3), uses
, reg_names
[regno
],
1112 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1113 INSN_UID (first_insn
), INSN_UID (last_insn
));
1115 if (NOTE_P (first_insn
))
1116 first_insn
= next_nonnote_insn (first_insn
);
1118 last_insn
= next_nonnote_insn (last_insn
);
1119 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1121 if (NONJUMP_INSN_P (insn
))
1123 rtx pattern
= single_set (insn
);
1125 /* Replace the memory references. */
1129 /* Memory operands are signed by default. */
1130 int unsignedp
= FALSE
;
1132 if (GET_CODE (SET_DEST (pattern
)) == MEM
1133 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1136 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1137 p_mem
= &SET_DEST (pattern
);
1139 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1140 p_mem
= &SET_SRC (pattern
);
1142 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1143 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1144 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1146 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1147 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1149 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1157 rtx addr
= XEXP (*p_mem
, 0);
1159 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1160 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1162 else if (GET_CODE (addr
) == PLUS
1163 && GET_CODE (XEXP (addr
, 0)) == REG
1164 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1165 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1166 && ((INTVAL (XEXP (addr
, 1)))
1167 < ep_memory_offset (GET_MODE (*p_mem
),
1169 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1170 *p_mem
= change_address (*p_mem
, VOIDmode
,
1171 gen_rtx_PLUS (Pmode
,
1179 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1180 insn
= prev_nonnote_insn (first_insn
);
1181 if (insn
&& NONJUMP_INSN_P (insn
)
1182 && GET_CODE (PATTERN (insn
)) == SET
1183 && SET_DEST (PATTERN (insn
)) == *p_ep
1184 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1187 emit_insn_before (gen_rtx_SET (*p_r1
, *p_ep
), first_insn
);
1189 emit_insn_before (gen_rtx_SET (*p_ep
, reg
), first_insn
);
1190 emit_insn_before (gen_rtx_SET (*p_ep
, *p_r1
), last_insn
);
1194 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1195 the -mep mode to copy heavily used pointers to ep to use the implicit
1204 rtx_insn
*first_insn
;
1205 rtx_insn
*last_insn
;
1207 regs
[FIRST_PSEUDO_REGISTER
];
1216 /* If not ep mode, just return now. */
1220 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1223 regs
[i
].first_insn
= NULL
;
1224 regs
[i
].last_insn
= NULL
;
1227 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1229 switch (GET_CODE (insn
))
1231 /* End of basic block */
1238 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1240 if (max_uses
< regs
[i
].uses
)
1242 max_uses
= regs
[i
].uses
;
1248 substitute_ep_register (regs
[max_regno
].first_insn
,
1249 regs
[max_regno
].last_insn
,
1250 max_uses
, max_regno
, &r1
, &ep
);
1254 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1257 regs
[i
].first_insn
= NULL
;
1258 regs
[i
].last_insn
= NULL
;
1266 pattern
= single_set (insn
);
1268 /* See if there are any memory references we can shorten. */
1271 rtx src
= SET_SRC (pattern
);
1272 rtx dest
= SET_DEST (pattern
);
1274 /* Memory operands are signed by default. */
1275 int unsignedp
= FALSE
;
1277 /* We might have (SUBREG (MEM)) here, so just get rid of the
1278 subregs to make this code simpler. */
1279 if (GET_CODE (dest
) == SUBREG
1280 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1281 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1282 alter_subreg (&dest
, false);
1283 if (GET_CODE (src
) == SUBREG
1284 && (GET_CODE (SUBREG_REG (src
)) == MEM
1285 || GET_CODE (SUBREG_REG (src
)) == REG
))
1286 alter_subreg (&src
, false);
1288 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1291 else if (GET_CODE (dest
) == MEM
)
1294 else if (GET_CODE (src
) == MEM
)
1297 else if (GET_CODE (src
) == SIGN_EXTEND
1298 && GET_CODE (XEXP (src
, 0)) == MEM
)
1299 mem
= XEXP (src
, 0);
1301 else if (GET_CODE (src
) == ZERO_EXTEND
1302 && GET_CODE (XEXP (src
, 0)) == MEM
)
1304 mem
= XEXP (src
, 0);
1310 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1313 else if (!use_ep
&& mem
1314 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1316 rtx addr
= XEXP (mem
, 0);
1320 if (GET_CODE (addr
) == REG
)
1323 regno
= REGNO (addr
);
1326 else if (GET_CODE (addr
) == PLUS
1327 && GET_CODE (XEXP (addr
, 0)) == REG
1328 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1329 && ((INTVAL (XEXP (addr
, 1)))
1330 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1331 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1334 regno
= REGNO (XEXP (addr
, 0));
1343 regs
[regno
].last_insn
= insn
;
1344 if (!regs
[regno
].first_insn
)
1345 regs
[regno
].first_insn
= insn
;
1349 /* Loading up a register in the basic block zaps any savings
1351 if (GET_CODE (dest
) == REG
)
1356 regno
= REGNO (dest
);
1357 endregno
= END_REGNO (dest
);
1361 /* See if we can use the pointer before this
1366 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1368 if (max_uses
< regs
[i
].uses
)
1370 max_uses
= regs
[i
].uses
;
1376 && max_regno
>= regno
1377 && max_regno
< endregno
)
1379 substitute_ep_register (regs
[max_regno
].first_insn
,
1380 regs
[max_regno
].last_insn
,
1381 max_uses
, max_regno
, &r1
,
1384 /* Since we made a substitution, zap all remembered
1386 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1389 regs
[i
].first_insn
= NULL
;
1390 regs
[i
].last_insn
= NULL
;
1395 for (i
= regno
; i
< endregno
; i
++)
1398 regs
[i
].first_insn
= NULL
;
1399 regs
[i
].last_insn
= NULL
;
1407 /* # of registers saved by the interrupt handler. */
1408 #define INTERRUPT_FIXED_NUM 5
1410 /* # of bytes for registers saved by the interrupt handler. */
1411 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1413 /* # of words saved for other registers. */
1414 #define INTERRUPT_ALL_SAVE_NUM \
1415 (30 - INTERRUPT_FIXED_NUM)
1417 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1420 compute_register_save_size (long * p_reg_saved
)
1424 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1425 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1428 /* Count space for the register saves. */
1429 if (interrupt_handler
)
1431 for (i
= 0; i
<= 31; i
++)
1435 if (df_regs_ever_live_p (i
) || call_p
)
1438 reg_saved
|= 1L << i
;
1442 /* We don't save/restore r0 or the stack pointer */
1444 case STACK_POINTER_REGNUM
:
1447 /* For registers with fixed use, we save them, set them to the
1448 appropriate value, and then restore them.
1449 These registers are handled specially, so don't list them
1450 on the list of registers to save in the prologue. */
1451 case 1: /* temp used to hold ep */
1453 case 10: /* temp used to call interrupt save/restore */
1454 case 11: /* temp used to call interrupt save/restore (long call) */
1455 case EP_REGNUM
: /* ep */
1462 /* Find the first register that needs to be saved. */
1463 for (i
= 0; i
<= 31; i
++)
1464 if (df_regs_ever_live_p (i
) && ((! call_used_or_fixed_reg_p (i
))
1465 || i
== LINK_POINTER_REGNUM
))
1468 /* If it is possible that an out-of-line helper function might be
1469 used to generate the prologue for the current function, then we
1470 need to cover the possibility that such a helper function will
1471 be used, despite the fact that there might be gaps in the list of
1472 registers that need to be saved. To detect this we note that the
1473 helper functions always push at least register r29 (provided
1474 that the function is not an interrupt handler). */
1476 if (TARGET_PROLOG_FUNCTION
1477 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1482 reg_saved
|= 1L << i
;
1487 /* Helper functions save all registers between the starting
1488 register and the last register, regardless of whether they
1489 are actually used by the function or not. */
1490 for (; i
<= 29; i
++)
1493 reg_saved
|= 1L << i
;
1496 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1499 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1504 for (; i
<= 31; i
++)
1505 if (df_regs_ever_live_p (i
) && ((! call_used_or_fixed_reg_p (i
))
1506 || i
== LINK_POINTER_REGNUM
))
1509 reg_saved
|= 1L << i
;
1515 *p_reg_saved
= reg_saved
;
1520 /* Typical stack layout should looks like this after the function's prologue:
1525 | | arguments saved | Increasing
1526 | | on the stack | addresses
1527 PARENT arg pointer -> | | /
1528 -------------------------- ---- -------------------
1529 | | - space for argument split between regs & stack
1531 CHILD | | \ <-- (return address here)
1536 frame pointer -> | | \ ___
1543 | | arguments | | Decreasing
1544 (hard) frame pointer | | / | | addresses
1545 and stack pointer -> | | / _|_ |
1546 -------------------------- ---- ------------------ V */
1549 compute_frame_size (poly_int64 size
, long * p_reg_saved
)
1552 + compute_register_save_size (p_reg_saved
)
1553 + crtl
->outgoing_args_size
);
1557 use_prolog_function (int num_save
, int frame_size
)
1559 int alloc_stack
= (4 * num_save
);
1560 int unalloc_stack
= frame_size
- alloc_stack
;
1561 int save_func_len
, restore_func_len
;
1562 int save_normal_len
, restore_normal_len
;
1564 if (! TARGET_DISABLE_CALLT
)
1565 save_func_len
= restore_func_len
= 2;
1567 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1571 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1572 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1575 /* See if we would have used ep to save the stack. */
1576 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1577 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1579 save_normal_len
= restore_normal_len
= 4 * num_save
;
1581 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1582 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1584 /* Don't bother checking if we don't actually save any space.
1585 This happens for instance if one register is saved and additional
1586 stack space is allocated. */
1587 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1591 increment_stack (signed int amount
, bool in_prologue
)
1598 inc
= GEN_INT (amount
);
1600 if (! CONST_OK_FOR_K (amount
))
1602 rtx reg
= gen_rtx_REG (Pmode
, 12);
1604 inc
= emit_move_insn (reg
, inc
);
1610 inc
= emit_insn (gen_addsi3_clobber_flags (stack_pointer_rtx
, stack_pointer_rtx
, inc
));
1616 expand_prologue (void)
1619 unsigned int size
= get_frame_size ();
1620 unsigned int actual_fsize
;
1621 unsigned int init_stack_alloc
= 0;
1624 unsigned int num_save
;
1626 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1629 actual_fsize
= compute_frame_size (size
, ®_saved
);
1631 if (flag_stack_usage_info
)
1632 current_function_static_stack_size
= actual_fsize
;
1634 /* Save/setup global registers for interrupt functions right now. */
1635 if (interrupt_handler
)
1637 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1638 emit_insn (gen_callt_save_interrupt ());
1640 emit_insn (gen_save_interrupt ());
1642 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1644 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1645 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1647 /* Interrupt functions are not passed arguments, so no need to
1648 allocate space for split structure arguments. */
1649 gcc_assert (crtl
->args
.pretend_args_size
== 0);
1652 /* Identify all of the saved registers. */
1654 for (i
= 1; i
< 32; i
++)
1656 if (((1L << i
) & reg_saved
) != 0)
1657 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1660 if (crtl
->args
.pretend_args_size
)
1664 increment_stack (- (actual_fsize
+ crtl
->args
.pretend_args_size
), true);
1668 increment_stack (- crtl
->args
.pretend_args_size
, true);
1671 /* See if we have an insn that allocates stack space and saves the particular
1672 registers we want to. Note that the helpers won't
1673 allocate additional space for registers GCC saves to complete a
1674 "split" structure argument. */
1675 save_all
= NULL_RTX
;
1676 if (TARGET_PROLOG_FUNCTION
1677 && !crtl
->args
.pretend_args_size
1680 if (use_prolog_function (num_save
, actual_fsize
))
1682 int alloc_stack
= 4 * num_save
;
1685 save_all
= gen_rtx_PARALLEL
1687 rtvec_alloc (num_save
+ 2
1688 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1690 XVECEXP (save_all
, 0, 0)
1691 = gen_rtx_SET (stack_pointer_rtx
,
1692 gen_rtx_PLUS (Pmode
,
1694 GEN_INT(-alloc_stack
)));
1695 for (i
= 0; i
< num_save
; i
++)
1698 XVECEXP (save_all
, 0, i
+1)
1699 = gen_rtx_SET (gen_rtx_MEM (Pmode
,
1700 gen_rtx_PLUS (Pmode
,
1706 XVECEXP (save_all
, 0, num_save
+ 1)
1707 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, CC_REGNUM
));
1709 if (TARGET_DISABLE_CALLT
)
1711 XVECEXP (save_all
, 0, num_save
+ 2)
1712 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1714 if (TARGET_LONG_CALLS
)
1715 XVECEXP (save_all
, 0, num_save
+ 3)
1716 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1719 v850_all_frame_related (save_all
);
1721 code
= recog (save_all
, NULL
, NULL
);
1724 rtx insn
= emit_insn (save_all
);
1725 INSN_CODE (insn
) = code
;
1726 actual_fsize
-= alloc_stack
;
1730 save_all
= NULL_RTX
;
1734 /* If no prolog save function is available, store the registers the old
1735 fashioned way (one by one). */
1738 /* Special case interrupt functions that save all registers for a call. */
1739 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1741 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1742 emit_insn (gen_callt_save_all_interrupt ());
1744 emit_insn (gen_save_all_interrupt ());
1749 /* If the stack is too big, allocate it in chunks so we can do the
1750 register saves. We use the register save size so we use the ep
1752 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1753 init_stack_alloc
= compute_register_save_size (NULL
);
1755 init_stack_alloc
= actual_fsize
;
1757 /* Save registers at the beginning of the stack frame. */
1758 offset
= init_stack_alloc
- 4;
1760 if (init_stack_alloc
)
1761 increment_stack (- (signed) init_stack_alloc
, true);
1763 /* Save the return pointer first. */
1764 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1766 F (emit_move_insn (gen_rtx_MEM (SImode
,
1767 plus_constant (Pmode
,
1770 save_regs
[--num_save
]));
1774 for (i
= 0; i
< num_save
; i
++)
1776 F (emit_move_insn (gen_rtx_MEM (SImode
,
1777 plus_constant (Pmode
,
1786 /* Allocate the rest of the stack that was not allocated above (either it is
1787 > 32K or we just called a function to save the registers and needed more
1789 if (actual_fsize
> init_stack_alloc
)
1790 increment_stack (init_stack_alloc
- actual_fsize
, true);
1792 /* If we need a frame pointer, set it up now. */
1793 if (frame_pointer_needed
)
1794 F (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
1799 expand_epilogue (void)
1802 unsigned int size
= get_frame_size ();
1804 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1805 rtx restore_regs
[32];
1807 unsigned int num_restore
;
1809 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1811 /* Eliminate the initial stack stored by interrupt functions. */
1812 if (interrupt_handler
)
1814 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1815 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1816 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1819 /* Cut off any dynamic stack created. */
1820 if (frame_pointer_needed
)
1821 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1823 /* Identify all of the saved registers. */
1825 for (i
= 1; i
< 32; i
++)
1827 if (((1L << i
) & reg_saved
) != 0)
1828 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1831 /* See if we have an insn that restores the particular registers we
1833 restore_all
= NULL_RTX
;
1835 if (TARGET_PROLOG_FUNCTION
1837 && !crtl
->args
.pretend_args_size
1838 && !interrupt_handler
)
1840 int alloc_stack
= (4 * num_restore
);
1842 /* Don't bother checking if we don't actually save any space. */
1843 if (use_prolog_function (num_restore
, actual_fsize
))
1846 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1847 rtvec_alloc (num_restore
+ 2));
1848 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1849 XVECEXP (restore_all
, 0, 1)
1850 = gen_rtx_SET (stack_pointer_rtx
,
1851 gen_rtx_PLUS (Pmode
,
1853 GEN_INT (alloc_stack
)));
1855 offset
= alloc_stack
- 4;
1856 for (i
= 0; i
< num_restore
; i
++)
1858 XVECEXP (restore_all
, 0, i
+2)
1859 = gen_rtx_SET (restore_regs
[i
],
1861 gen_rtx_PLUS (Pmode
,
1867 code
= recog (restore_all
, NULL
, NULL
);
1873 actual_fsize
-= alloc_stack
;
1874 increment_stack (actual_fsize
, false);
1876 insn
= emit_jump_insn (restore_all
);
1877 INSN_CODE (insn
) = code
;
1880 restore_all
= NULL_RTX
;
1884 /* If no epilogue save function is available, restore the registers the
1885 old fashioned way (one by one). */
1888 unsigned int init_stack_free
;
1890 /* If the stack is large, we need to cut it down in 2 pieces. */
1891 if (interrupt_handler
)
1892 init_stack_free
= 0;
1893 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1894 init_stack_free
= 4 * num_restore
;
1896 init_stack_free
= (signed) actual_fsize
;
1898 /* Deallocate the rest of the stack if it is > 32K. */
1899 if ((unsigned int) actual_fsize
> init_stack_free
)
1900 increment_stack (actual_fsize
- init_stack_free
, false);
1902 /* Special case interrupt functions that save all registers
1904 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1906 if (! TARGET_DISABLE_CALLT
)
1907 emit_insn (gen_callt_restore_all_interrupt ());
1909 emit_insn (gen_restore_all_interrupt ());
1913 /* Restore registers from the beginning of the stack frame. */
1914 int offset
= init_stack_free
- 4;
1916 /* Restore the return pointer first. */
1918 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
1920 emit_move_insn (restore_regs
[--num_restore
],
1921 gen_rtx_MEM (SImode
,
1922 plus_constant (Pmode
,
1928 for (i
= 0; i
< num_restore
; i
++)
1930 emit_move_insn (restore_regs
[i
],
1931 gen_rtx_MEM (SImode
,
1932 plus_constant (Pmode
,
1936 emit_use (restore_regs
[i
]);
1940 /* Cut back the remainder of the stack. */
1941 increment_stack (init_stack_free
+ crtl
->args
.pretend_args_size
,
1945 /* And return or use reti for interrupt handlers. */
1946 if (interrupt_handler
)
1948 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1949 emit_insn (gen_callt_return_interrupt ());
1951 emit_jump_insn (gen_return_interrupt ());
1953 else if (actual_fsize
)
1954 emit_jump_insn (gen_return_internal ());
1956 emit_jump_insn (gen_return_simple ());
1959 v850_interrupt_cache_p
= FALSE
;
1960 v850_interrupt_p
= FALSE
;
1963 /* Retrieve the data area that has been chosen for the given decl. */
1966 v850_get_data_area (tree decl
)
1968 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
1969 return DATA_AREA_SDA
;
1971 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
1972 return DATA_AREA_TDA
;
1974 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
1975 return DATA_AREA_ZDA
;
1977 return DATA_AREA_NORMAL
;
1980 /* Store the indicated data area in the decl's attributes. */
1983 v850_set_data_area (tree decl
, v850_data_area data_area
)
1989 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
1990 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
1991 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
1996 DECL_ATTRIBUTES (decl
) = tree_cons
1997 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2000 /* Handle an "interrupt" attribute; arguments as in
2001 struct attribute_spec.handler. */
2003 v850_handle_interrupt_attribute (tree
*node
, tree name
,
2004 tree args ATTRIBUTE_UNUSED
,
2005 int flags ATTRIBUTE_UNUSED
,
2006 bool * no_add_attrs
)
2008 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2010 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2012 *no_add_attrs
= true;
2018 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2019 struct attribute_spec.handler. */
2021 v850_handle_data_area_attribute (tree
*node
, tree name
,
2022 tree args ATTRIBUTE_UNUSED
,
2023 int flags ATTRIBUTE_UNUSED
,
2024 bool * no_add_attrs
)
2026 v850_data_area data_area
;
2027 v850_data_area area
;
2030 /* Implement data area attribute. */
2031 if (is_attribute_p ("sda", name
))
2032 data_area
= DATA_AREA_SDA
;
2033 else if (is_attribute_p ("tda", name
))
2034 data_area
= DATA_AREA_TDA
;
2035 else if (is_attribute_p ("zda", name
))
2036 data_area
= DATA_AREA_ZDA
;
2040 switch (TREE_CODE (decl
))
2043 if (current_function_decl
!= NULL_TREE
)
2045 error_at (DECL_SOURCE_LOCATION (decl
),
2046 "data area attributes cannot be specified for "
2048 *no_add_attrs
= true;
2054 area
= v850_get_data_area (decl
);
2055 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2057 error ("data area of %q+D conflicts with previous declaration",
2059 *no_add_attrs
= true;
2071 /* Return nonzero if FUNC is an interrupt function as specified
2072 by the "interrupt" attribute. */
2075 v850_interrupt_function_p (tree func
)
2080 if (v850_interrupt_cache_p
)
2081 return v850_interrupt_p
;
2083 if (TREE_CODE (func
) != FUNCTION_DECL
)
2086 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2092 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2093 ret
= a
!= NULL_TREE
;
2096 /* Its not safe to trust global variables until after function inlining has
2098 if (reload_completed
| reload_in_progress
)
2099 v850_interrupt_p
= ret
;
2106 v850_encode_data_area (tree decl
, rtx symbol
)
2110 /* Map explicit sections into the appropriate attribute */
2111 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2113 if (DECL_SECTION_NAME (decl
))
2115 const char *name
= DECL_SECTION_NAME (decl
);
2117 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2118 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2120 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2121 v850_set_data_area (decl
, DATA_AREA_SDA
);
2123 else if (streq (name
, ".tdata"))
2124 v850_set_data_area (decl
, DATA_AREA_TDA
);
2127 /* If no attribute, support -m{zda,sda,tda}=n */
2130 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2134 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2135 v850_set_data_area (decl
, DATA_AREA_TDA
);
2137 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2138 v850_set_data_area (decl
, DATA_AREA_SDA
);
2140 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2141 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2144 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2148 flags
= SYMBOL_REF_FLAGS (symbol
);
2149 switch (v850_get_data_area (decl
))
2151 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2152 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2153 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2154 default: gcc_unreachable ();
2156 SYMBOL_REF_FLAGS (symbol
) = flags
;
2160 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2162 default_encode_section_info (decl
, rtl
, first
);
2165 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2166 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2169 /* Construct a JR instruction to a routine that will perform the equivalent of
2170 the RTL passed in as an argument. This RTL is a function epilogue that
2171 pops registers off the stack and possibly releases some extra stack space
2172 as well. The code has already verified that the RTL matches these
2176 construct_restore_jr (rtx op
)
2178 int count
= XVECLEN (op
, 0);
2180 unsigned long int mask
;
2181 unsigned long int first
;
2182 unsigned long int last
;
2184 static char buff
[256]; /* XXX */
2188 error ("bogus JR construction: %d", count
);
2192 /* Work out how many bytes to pop off the stack before retrieving
2194 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2195 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2196 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2198 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2200 /* Each pop will remove 4 bytes from the stack.... */
2201 stack_bytes
-= (count
- 2) * 4;
2203 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2204 if (stack_bytes
!= 0)
2206 error ("bad amount of stack space removal: %d", stack_bytes
);
2210 /* Now compute the bit mask of registers to push. */
2212 for (i
= 2; i
< count
; i
++)
2214 rtx vector_element
= XVECEXP (op
, 0, i
);
2216 gcc_assert (GET_CODE (vector_element
) == SET
);
2217 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2218 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2221 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2224 /* Scan for the first register to pop. */
2225 for (first
= 0; first
< 32; first
++)
2227 if (mask
& (1 << first
))
2231 gcc_assert (first
< 32);
2233 /* Discover the last register to pop. */
2234 if (mask
& (1 << LINK_POINTER_REGNUM
))
2236 last
= LINK_POINTER_REGNUM
;
2240 gcc_assert (!stack_bytes
);
2241 gcc_assert (mask
& (1 << 29));
2246 /* Note, it is possible to have gaps in the register mask.
2247 We ignore this here, and generate a JR anyway. We will
2248 be popping more registers than is strictly necessary, but
2249 it does save code space. */
2251 if (TARGET_LONG_CALLS
)
2256 sprintf (name
, "__return_%s", reg_names
[first
]);
2258 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2260 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2266 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2268 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2275 /* Construct a JARL instruction to a routine that will perform the equivalent
2276 of the RTL passed as a parameter. This RTL is a function prologue that
2277 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2278 some stack space as well. The code has already verified that the RTL
2279 matches these requirements. */
2281 construct_save_jarl (rtx op
)
2283 int count
= XVECLEN (op
, 0);
2285 unsigned long int mask
;
2286 unsigned long int first
;
2287 unsigned long int last
;
2289 static char buff
[255]; /* XXX */
2291 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2293 error ("bogus JARL construction: %d", count
);
2298 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2299 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2300 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2301 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2303 /* Work out how many bytes to push onto the stack after storing the
2305 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2307 /* Each push will put 4 bytes from the stack.... */
2308 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 4 : 3)) * 4;
2310 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2311 if (stack_bytes
!= 0)
2313 error ("bad amount of stack space removal: %d", stack_bytes
);
2317 /* Now compute the bit mask of registers to push. */
2319 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 3 : 2); i
++)
2321 rtx vector_element
= XVECEXP (op
, 0, i
);
2323 gcc_assert (GET_CODE (vector_element
) == SET
);
2324 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2325 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2328 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2331 /* Scan for the first register to push. */
2332 for (first
= 0; first
< 32; first
++)
2334 if (mask
& (1 << first
))
2338 gcc_assert (first
< 32);
2340 /* Discover the last register to push. */
2341 if (mask
& (1 << LINK_POINTER_REGNUM
))
2343 last
= LINK_POINTER_REGNUM
;
2347 gcc_assert (!stack_bytes
);
2348 gcc_assert (mask
& (1 << 29));
2353 /* Note, it is possible to have gaps in the register mask.
2354 We ignore this here, and generate a JARL anyway. We will
2355 be pushing more registers than is strictly necessary, but
2356 it does save code space. */
2358 if (TARGET_LONG_CALLS
)
2363 sprintf (name
, "__save_%s", reg_names
[first
]);
2365 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2367 if (TARGET_V850E3V5_UP
)
2368 sprintf (buff
, "mov hilo(%s), r11\n\tjarl [r11], r10", name
);
2370 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2376 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2378 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2385 /* A version of asm_output_aligned_bss() that copes with the special
2386 data areas of the v850. */
2388 v850_output_aligned_bss (FILE * file
,
2391 unsigned HOST_WIDE_INT size
,
2394 switch (v850_get_data_area (decl
))
2397 switch_to_section (zbss_section
);
2401 switch_to_section (sbss_section
);
2405 switch_to_section (tdata_section
);
2409 switch_to_section (bss_section
);
2413 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2414 #ifdef ASM_DECLARE_OBJECT_NAME
2415 last_assemble_variable_decl
= decl
;
2416 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2418 /* Standard thing is just output label for the object. */
2419 ASM_OUTPUT_LABEL (file
, name
);
2420 #endif /* ASM_DECLARE_OBJECT_NAME */
2421 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2424 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2426 v850_output_common (FILE * file
,
2432 if (decl
== NULL_TREE
)
2434 fprintf (file
, "%s", COMMON_ASM_OP
);
2438 switch (v850_get_data_area (decl
))
2441 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2445 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2449 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2453 fprintf (file
, "%s", COMMON_ASM_OP
);
2458 assemble_name (file
, name
);
2459 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2462 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2464 v850_output_local (FILE * file
,
2470 fprintf (file
, "%s", LOCAL_ASM_OP
);
2471 assemble_name (file
, name
);
2472 fprintf (file
, "\n");
2474 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2477 /* Add data area to the given declaration if a ghs data area pragma is
2478 currently in effect (#pragma ghs startXXX/endXXX). */
2480 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2483 && data_area_stack
->data_area
2484 && current_function_decl
== NULL_TREE
2485 && (VAR_P (decl
) || TREE_CODE (decl
) == CONST_DECL
)
2486 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2487 v850_set_data_area (decl
, data_area_stack
->data_area
);
2489 /* Initialize the default names of the v850 specific sections,
2490 if this has not been done before. */
2492 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2494 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2497 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2500 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2503 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2506 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2510 if (current_function_decl
== NULL_TREE
2512 || TREE_CODE (decl
) == CONST_DECL
2513 || TREE_CODE (decl
) == FUNCTION_DECL
)
2514 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2515 && !DECL_SECTION_NAME (decl
))
2517 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2518 const char * chosen_section
;
2520 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2521 kind
= GHS_SECTION_KIND_TEXT
;
2524 /* First choose a section kind based on the data area of the decl. */
2525 switch (v850_get_data_area (decl
))
2531 kind
= ((TREE_READONLY (decl
))
2532 ? GHS_SECTION_KIND_ROSDATA
2533 : GHS_SECTION_KIND_SDATA
);
2537 kind
= GHS_SECTION_KIND_TDATA
;
2541 kind
= ((TREE_READONLY (decl
))
2542 ? GHS_SECTION_KIND_ROZDATA
2543 : GHS_SECTION_KIND_ZDATA
);
2546 case DATA_AREA_NORMAL
: /* default data area */
2547 if (TREE_READONLY (decl
))
2548 kind
= GHS_SECTION_KIND_RODATA
;
2549 else if (DECL_INITIAL (decl
))
2550 kind
= GHS_SECTION_KIND_DATA
;
2552 kind
= GHS_SECTION_KIND_BSS
;
2556 /* Now, if the section kind has been explicitly renamed,
2557 then attach a section attribute. */
2558 chosen_section
= GHS_current_section_names
[(int) kind
];
2560 /* Otherwise, if this kind of section needs an explicit section
2561 attribute, then also attach one. */
2562 if (chosen_section
== NULL
)
2563 chosen_section
= GHS_default_section_names
[(int) kind
];
2567 /* Only set the section name if specified by a pragma, because
2568 otherwise it will force those variables to get allocated storage
2569 in this module, rather than by the linker. */
2570 set_decl_section_name (decl
, chosen_section
);
2575 /* Construct a DISPOSE instruction that is the equivalent of
2576 the given RTX. We have already verified that this should
2580 construct_dispose_instruction (rtx op
)
2582 int count
= XVECLEN (op
, 0);
2584 unsigned long int mask
;
2586 static char buff
[ 120 ]; /* XXX */
2591 error ("bogus DISPOSE construction: %d", count
);
2595 /* Work out how many bytes to pop off the
2596 stack before retrieving registers. */
2597 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2598 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2599 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2601 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2603 /* Each pop will remove 4 bytes from the stack.... */
2604 stack_bytes
-= (count
- 2) * 4;
2606 /* Make sure that the amount we are popping
2607 will fit into the DISPOSE instruction. */
2608 if (stack_bytes
> 128)
2610 error ("too much stack space to dispose of: %d", stack_bytes
);
2614 /* Now compute the bit mask of registers to push. */
2617 for (i
= 2; i
< count
; i
++)
2619 rtx vector_element
= XVECEXP (op
, 0, i
);
2621 gcc_assert (GET_CODE (vector_element
) == SET
);
2622 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2623 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2626 if (REGNO (SET_DEST (vector_element
)) == 2)
2629 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2632 if (! TARGET_DISABLE_CALLT
2633 && (use_callt
|| stack_bytes
== 0))
2637 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2642 for (i
= 20; i
< 32; i
++)
2643 if (mask
& (1 << i
))
2647 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2649 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2650 i
, (mask
& (1 << 31)) ? "31c" : "29");
2655 static char regs
[100]; /* XXX */
2658 /* Generate the DISPOSE instruction. Note we could just issue the
2659 bit mask as a number as the assembler can cope with this, but for
2660 the sake of our readers we turn it into a textual description. */
2664 for (i
= 20; i
< 32; i
++)
2666 if (mask
& (1 << i
))
2671 strcat (regs
, ", ");
2676 strcat (regs
, reg_names
[ first
]);
2678 for (i
++; i
< 32; i
++)
2679 if ((mask
& (1 << i
)) == 0)
2684 strcat (regs
, " - ");
2685 strcat (regs
, reg_names
[ i
- 1 ] );
2690 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2696 /* Construct a PREPARE instruction that is the equivalent of
2697 the given RTL. We have already verified that this should
2701 construct_prepare_instruction (rtx op
)
2705 unsigned long int mask
;
2707 static char buff
[ 120 ]; /* XXX */
2710 if (XVECLEN (op
, 0) <= 1)
2712 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2716 /* Work out how many bytes to push onto
2717 the stack after storing the registers. */
2718 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2719 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2720 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2722 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2725 /* Make sure that the amount we are popping
2726 will fit into the DISPOSE instruction. */
2727 if (stack_bytes
< -128)
2729 error ("too much stack space to prepare: %d", stack_bytes
);
2733 /* Now compute the bit mask of registers to push. */
2736 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2738 rtx vector_element
= XVECEXP (op
, 0, i
);
2740 if (GET_CODE (vector_element
) == CLOBBER
)
2743 gcc_assert (GET_CODE (vector_element
) == SET
);
2744 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2745 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2748 if (REGNO (SET_SRC (vector_element
)) == 2)
2751 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2755 stack_bytes
+= count
* 4;
2757 if ((! TARGET_DISABLE_CALLT
)
2758 && (use_callt
|| stack_bytes
== 0))
2762 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2766 for (i
= 20; i
< 32; i
++)
2767 if (mask
& (1 << i
))
2771 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2773 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2774 i
, (mask
& (1 << 31)) ? "31c" : "29");
2778 static char regs
[100]; /* XXX */
2782 /* Generate the PREPARE instruction. Note we could just issue the
2783 bit mask as a number as the assembler can cope with this, but for
2784 the sake of our readers we turn it into a textual description. */
2788 for (i
= 20; i
< 32; i
++)
2790 if (mask
& (1 << i
))
2795 strcat (regs
, ", ");
2800 strcat (regs
, reg_names
[ first
]);
2802 for (i
++; i
< 32; i
++)
2803 if ((mask
& (1 << i
)) == 0)
2808 strcat (regs
, " - ");
2809 strcat (regs
, reg_names
[ i
- 1 ] );
2814 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2820 /* Return an RTX indicating where the return address to the
2821 calling function can be found. */
2824 v850_return_addr (int count
)
2829 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2832 /* Implement TARGET_ASM_INIT_SECTIONS. */
2835 v850_asm_init_sections (void)
2838 = get_unnamed_section (0, output_section_asm_op
,
2839 "\t.section .rosdata,\"a\"");
2842 = get_unnamed_section (0, output_section_asm_op
,
2843 "\t.section .rozdata,\"a\"");
2846 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2847 "\t.section .tdata,\"aw\"");
2850 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2851 "\t.section .zdata,\"aw\"");
2854 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2855 output_section_asm_op
,
2856 "\t.section .zbss,\"aw\"");
2860 v850_select_section (tree exp
,
2861 int reloc ATTRIBUTE_UNUSED
,
2862 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2864 if (TREE_CODE (exp
) == VAR_DECL
)
2867 if (!TREE_READONLY (exp
)
2868 || !DECL_INITIAL (exp
)
2869 || (DECL_INITIAL (exp
) != error_mark_node
2870 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
2875 switch (v850_get_data_area (exp
))
2878 return is_const
? rozdata_section
: zdata_section
;
2881 return tdata_section
;
2884 return is_const
? rosdata_section
: sdata_section
;
2887 return is_const
? readonly_data_section
: data_section
;
2890 return readonly_data_section
;
2893 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2896 v850_function_value_regno_p (const unsigned int regno
)
2898 return (regno
== RV_REGNUM
);
2901 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2904 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2906 /* Return values > 8 bytes in length in memory. */
2907 return int_size_in_bytes (type
) > 8
2908 || TYPE_MODE (type
) == BLKmode
2909 /* With the rh850 ABI return all aggregates in memory. */
2910 || ((! TARGET_GCC_ABI
) && AGGREGATE_TYPE_P (type
))
2914 /* Worker function for TARGET_FUNCTION_VALUE. */
2917 v850_function_value (const_tree valtype
,
2918 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
2919 bool outgoing ATTRIBUTE_UNUSED
)
2921 return gen_rtx_REG (TYPE_MODE (valtype
), RV_REGNUM
);
2924 /* Implement TARGET_LIBCALL_VALUE. */
2927 v850_libcall_value (machine_mode mode
,
2928 const_rtx func ATTRIBUTE_UNUSED
)
2930 return gen_rtx_REG (mode
, RV_REGNUM
);
2934 /* Worker function for TARGET_CAN_ELIMINATE. */
2937 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
2939 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
2942 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2944 If TARGET_APP_REGS is not defined then add r2 and r5 to
2945 the pool of fixed registers. See PR 14505. */
2948 v850_conditional_register_usage (void)
2950 if (TARGET_APP_REGS
)
2952 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
2953 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
2957 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
2960 v850_asm_trampoline_template (FILE *f
)
2962 fprintf (f
, "\tjarl .+4,r12\n");
2963 fprintf (f
, "\tld.w 12[r12],r19\n");
2964 fprintf (f
, "\tld.w 16[r12],r12\n");
2965 fprintf (f
, "\tjmp [r12]\n");
2966 fprintf (f
, "\tnop\n");
2967 fprintf (f
, "\t.long 0\n");
2968 fprintf (f
, "\t.long 0\n");
2971 /* Worker function for TARGET_TRAMPOLINE_INIT. */
2974 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
2976 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
2978 emit_block_move (m_tramp
, assemble_trampoline_template (),
2979 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
2981 mem
= adjust_address (m_tramp
, SImode
, 16);
2982 emit_move_insn (mem
, chain_value
);
2983 mem
= adjust_address (m_tramp
, SImode
, 20);
2984 emit_move_insn (mem
, fnaddr
);
2988 v850_issue_rate (void)
2990 return (TARGET_V850E2_UP
? 2 : 1);
2993 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
2996 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2998 return (GET_CODE (x
) == CONST_DOUBLE
2999 || !(GET_CODE (x
) == CONST
3000 && GET_CODE (XEXP (x
, 0)) == PLUS
3001 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3002 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3003 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3006 /* Helper function for `v850_legitimate_address_p'. */
3009 v850_reg_ok_for_base_p (const_rtx reg
, bool strict_p
)
3013 return REGNO_OK_FOR_BASE_P (REGNO (reg
));
3019 /* Accept either REG or SUBREG where a register is valid. */
3022 v850_rtx_ok_for_base_p (const_rtx x
, bool strict_p
)
3024 return ((REG_P (x
) && v850_reg_ok_for_base_p (x
, strict_p
))
3025 || (SUBREG_P (x
) && REG_P (SUBREG_REG (x
))
3026 && v850_reg_ok_for_base_p (SUBREG_REG (x
), strict_p
)));
3029 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
3032 v850_legitimate_address_p (machine_mode mode
, rtx x
, bool strict_p
,
3033 addr_space_t as ATTRIBUTE_UNUSED
,
3034 code_helper
= ERROR_MARK
)
3036 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
3038 if (v850_rtx_ok_for_base_p (x
, strict_p
))
3040 if (CONSTANT_ADDRESS_P (x
)
3041 && (mode
== QImode
|| INTVAL (x
) % 2 == 0)
3042 && (GET_MODE_SIZE (mode
) <= 4 || INTVAL (x
) % 4 == 0))
3044 if (GET_CODE (x
) == LO_SUM
3045 && REG_P (XEXP (x
, 0))
3046 && v850_reg_ok_for_base_p (XEXP (x
, 0), strict_p
)
3047 && CONSTANT_P (XEXP (x
, 1))
3048 && (!CONST_INT_P (XEXP (x
, 1))
3049 || ((mode
== QImode
|| INTVAL (XEXP (x
, 1)) % 2 == 0)
3050 && constraint_satisfied_p (XEXP (x
, 1), CONSTRAINT_K
)))
3051 && GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (word_mode
))
3053 if (special_symbolref_operand (x
, mode
)
3054 && (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (word_mode
)))
3056 if (GET_CODE (x
) == PLUS
3057 && v850_rtx_ok_for_base_p (XEXP (x
, 0), strict_p
)
3058 && (constraint_satisfied_p (XEXP (x
, 1), CONSTRAINT_K
)
3059 || (TARGET_V850E2V3_UP
3060 && (mode
== SImode
|| mode
== HImode
|| mode
== QImode
)
3061 && constraint_satisfied_p (XEXP (x
, 1), CONSTRAINT_W
)))
3062 && ((mode
== QImode
|| INTVAL (XEXP (x
, 1)) % 2 == 0)
3063 && CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))
3064 + (GET_MODE_NUNITS (mode
) * UNITS_PER_WORD
))))
3071 v850_memory_move_cost (machine_mode mode
,
3072 reg_class_t reg_class ATTRIBUTE_UNUSED
,
3075 switch (GET_MODE_SIZE (mode
))
3085 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3090 v850_adjust_insn_length (rtx_insn
*insn
, int length
)
3092 if (TARGET_V850E3V5_UP
)
3096 if (TARGET_LONG_CALLS
)
3098 /* call_internal_long, call_value_internal_long. */
3106 /* call_internal_short, call_value_internal_short. */
3115 /* V850 specific attributes. */
3117 TARGET_GNU_ATTRIBUTES (v850_attribute_table
,
3119 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
3120 affects_type_identity, handler, exclude } */
3121 { "interrupt_handler", 0, 0, true, false, false, false,
3122 v850_handle_interrupt_attribute
, NULL
},
3123 { "interrupt", 0, 0, true, false, false, false,
3124 v850_handle_interrupt_attribute
, NULL
},
3125 { "sda", 0, 0, true, false, false, false,
3126 v850_handle_data_area_attribute
, NULL
},
3127 { "tda", 0, 0, true, false, false, false,
3128 v850_handle_data_area_attribute
, NULL
},
3129 { "zda", 0, 0, true, false, false, false,
3130 v850_handle_data_area_attribute
, NULL
}
3134 v850_option_override (void)
3136 if (flag_exceptions
|| flag_non_call_exceptions
)
3137 flag_omit_frame_pointer
= 0;
3139 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3140 if (! TARGET_GCC_ABI
)
3141 target_flags
|= MASK_DISABLE_CALLT
;
3143 /* Save the initial options in case the user does function specific
3145 target_option_default_node
= target_option_current_node
3146 = build_target_option_node (&global_options
, &global_options_set
);
3150 v850_gen_movdi (rtx
* operands
)
3152 if (REG_P (operands
[0]))
3154 if (REG_P (operands
[1]))
3156 if (REGNO (operands
[0]) == (REGNO (operands
[1]) - 1))
3157 return "mov %1, %0; mov %R1, %R0";
3159 return "mov %R1, %R0; mov %1, %0";
3162 if (MEM_P (operands
[1]))
3164 if (REGNO (operands
[0]) & 1)
3165 /* Use two load word instructions to synthesise a load double. */
3166 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3168 return "ld.dw %1, %0";
3171 return "mov %1, %0; mov %R1, %R0";
3174 gcc_assert (REG_P (operands
[1]));
3176 if (REGNO (operands
[1]) & 1)
3177 /* Use two store word instructions to synthesise a store double. */
3178 return "st.w %1, %0 ; st.w %R1, %R0 ";
3180 return "st.dw %1, %0";
3183 /* Implement TARGET_HARD_REGNO_MODE_OK. */
3186 v850_hard_regno_mode_ok (unsigned int regno
, machine_mode mode
)
3188 return GET_MODE_SIZE (mode
) <= 4 || ((regno
& 1) == 0 && regno
!= 0);
3191 /* Implement TARGET_MODES_TIEABLE_P. */
3194 v850_modes_tieable_p (machine_mode mode1
, machine_mode mode2
)
3196 return (mode1
== mode2
3197 || (GET_MODE_SIZE (mode1
) <= 4 && GET_MODE_SIZE (mode2
) <= 4));
3201 v850_can_inline_p (tree caller
, tree callee
)
3203 tree caller_tree
= DECL_FUNCTION_SPECIFIC_TARGET (caller
);
3204 tree callee_tree
= DECL_FUNCTION_SPECIFIC_TARGET (callee
);
3206 const unsigned HOST_WIDE_INT safe_flags
= MASK_PROLOG_FUNCTION
;
3209 callee_tree
= target_option_default_node
;
3211 caller_tree
= target_option_default_node
;
3212 if (callee_tree
== caller_tree
)
3215 cl_target_option
*caller_opts
= TREE_TARGET_OPTION (caller_tree
);
3216 cl_target_option
*callee_opts
= TREE_TARGET_OPTION (callee_tree
);
3218 return ((caller_opts
->x_target_flags
& ~safe_flags
)
3219 == (callee_opts
->x_target_flags
& ~safe_flags
));
3223 /* Initialize the GCC target structure. */
3225 #undef TARGET_OPTION_OVERRIDE
3226 #define TARGET_OPTION_OVERRIDE v850_option_override
3228 #undef TARGET_MEMORY_MOVE_COST
3229 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3231 #undef TARGET_ASM_ALIGNED_HI_OP
3232 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3234 #undef TARGET_PRINT_OPERAND
3235 #define TARGET_PRINT_OPERAND v850_print_operand
3236 #undef TARGET_PRINT_OPERAND_ADDRESS
3237 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3238 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3239 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3241 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3242 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3244 #undef TARGET_ATTRIBUTE_TABLE
3245 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3247 #undef TARGET_INSERT_ATTRIBUTES
3248 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3250 #undef TARGET_ASM_SELECT_SECTION
3251 #define TARGET_ASM_SELECT_SECTION v850_select_section
3253 /* The assembler supports switchable .bss sections, but
3254 v850_select_section doesn't yet make use of them. */
3255 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3256 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3258 #undef TARGET_ENCODE_SECTION_INFO
3259 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3261 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3262 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3264 #undef TARGET_RTX_COSTS
3265 #define TARGET_RTX_COSTS v850_rtx_costs
3267 #undef TARGET_ADDRESS_COST
3268 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3270 #undef TARGET_MACHINE_DEPENDENT_REORG
3271 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3273 #undef TARGET_SCHED_ISSUE_RATE
3274 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3276 #undef TARGET_FUNCTION_VALUE_REGNO_P
3277 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3278 #undef TARGET_FUNCTION_VALUE
3279 #define TARGET_FUNCTION_VALUE v850_function_value
3280 #undef TARGET_LIBCALL_VALUE
3281 #define TARGET_LIBCALL_VALUE v850_libcall_value
3283 #undef TARGET_PROMOTE_PROTOTYPES
3284 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3286 #undef TARGET_RETURN_IN_MEMORY
3287 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3289 #undef TARGET_PASS_BY_REFERENCE
3290 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3292 #undef TARGET_CALLEE_COPIES
3293 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_arg_info_true
3295 #undef TARGET_ARG_PARTIAL_BYTES
3296 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3298 #undef TARGET_FUNCTION_ARG
3299 #define TARGET_FUNCTION_ARG v850_function_arg
3301 #undef TARGET_FUNCTION_ARG_ADVANCE
3302 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3304 #undef TARGET_CAN_ELIMINATE
3305 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3307 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3308 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3310 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3311 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3312 #undef TARGET_TRAMPOLINE_INIT
3313 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3315 #undef TARGET_LEGITIMATE_CONSTANT_P
3316 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3318 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
3319 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P v850_legitimate_address_p
3321 #undef TARGET_CAN_USE_DOLOOP_P
3322 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3324 #undef TARGET_HARD_REGNO_MODE_OK
3325 #define TARGET_HARD_REGNO_MODE_OK v850_hard_regno_mode_ok
3327 #undef TARGET_MODES_TIEABLE_P
3328 #define TARGET_MODES_TIEABLE_P v850_modes_tieable_p
3330 #undef TARGET_FLAGS_REGNUM
3331 #define TARGET_FLAGS_REGNUM 32
3333 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
3334 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
3336 #undef TARGET_CAN_INLINE_P
3337 #define TARGET_CAN_INLINE_P v850_can_inline_p
3340 struct gcc_target targetm
= TARGET_INITIALIZER
;
3342 #include "gt-v850.h"