1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2017 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "stringpool.h"
33 #include "insn-config.h"
37 #include "diagnostic-core.h"
38 #include "stor-layout.h"
41 #include "conditions.h"
43 #include "insn-attr.h"
48 /* This file should be included last. */
49 #include "target-def.h"
52 #define streq(a,b) (strcmp (a, b) == 0)
55 static void v850_print_operand_address (FILE *, machine_mode
, rtx
);
57 /* Names of the various data areas used on the v850. */
58 const char * GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
59 const char * GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
61 /* Track the current data area set by the data area pragma (which
62 can be nested). Tested by check_default_data_area. */
63 data_area_stack_element
* data_area_stack
= NULL
;
65 /* True if we don't need to check any more if the current
66 function is an interrupt handler. */
67 static int v850_interrupt_cache_p
= FALSE
;
69 rtx v850_compare_op0
, v850_compare_op1
;
71 /* Whether current function is an interrupt handler. */
72 static int v850_interrupt_p
= FALSE
;
74 static GTY(()) section
* rosdata_section
;
75 static GTY(()) section
* rozdata_section
;
76 static GTY(()) section
* tdata_section
;
77 static GTY(()) section
* zdata_section
;
78 static GTY(()) section
* zbss_section
;
80 /* We use this to wrap all emitted insns in the prologue. */
84 if (GET_CODE (x
) != CLOBBER
)
85 RTX_FRAME_RELATED_P (x
) = 1;
89 /* Mark all the subexpressions of the PARALLEL rtx PAR as
90 frame-related. Return PAR.
92 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
93 PARALLEL rtx other than the first if they do not have the
94 FRAME_RELATED flag set on them. */
97 v850_all_frame_related (rtx par
)
99 int len
= XVECLEN (par
, 0);
102 gcc_assert (GET_CODE (par
) == PARALLEL
);
103 for (i
= 0; i
< len
; i
++)
104 F (XVECEXP (par
, 0, i
));
109 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
110 Specify whether to pass the argument by reference. */
113 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
114 machine_mode mode
, const_tree type
,
115 bool named ATTRIBUTE_UNUSED
)
117 unsigned HOST_WIDE_INT size
;
123 size
= int_size_in_bytes (type
);
125 size
= GET_MODE_SIZE (mode
);
130 /* Return an RTX to represent where an argument with mode MODE
131 and type TYPE will be passed to a function. If the result
132 is NULL_RTX, the argument will be pushed. */
135 v850_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
136 const_tree type
, bool named
)
138 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
139 rtx result
= NULL_RTX
;
146 size
= int_size_in_bytes (type
);
148 size
= GET_MODE_SIZE (mode
);
150 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
154 /* Once we have stopped using argument registers, do not start up again. */
155 cum
->nbytes
= 4 * UNITS_PER_WORD
;
160 align
= UNITS_PER_WORD
;
161 else if (size
<= UNITS_PER_WORD
&& type
)
162 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
166 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
168 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
171 if (type
== NULL_TREE
172 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
175 switch (cum
->nbytes
/ UNITS_PER_WORD
)
178 result
= gen_rtx_REG (mode
, 6);
181 result
= gen_rtx_REG (mode
, 7);
184 result
= gen_rtx_REG (mode
, 8);
187 result
= gen_rtx_REG (mode
, 9);
196 /* Return the number of bytes which must be put into registers
197 for values which are part in registers and part in memory. */
199 v850_arg_partial_bytes (cumulative_args_t cum_v
, machine_mode mode
,
200 tree type
, bool named
)
202 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
209 size
= int_size_in_bytes (type
);
211 size
= GET_MODE_SIZE (mode
);
217 align
= UNITS_PER_WORD
;
219 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
223 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
225 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
228 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
231 if (type
== NULL_TREE
232 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
235 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
238 /* Update the data in CUM to advance over an argument
239 of mode MODE and data type TYPE.
240 (TYPE is null for libcalls where that information may not be available.) */
243 v850_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
244 const_tree type
, bool named ATTRIBUTE_UNUSED
)
246 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
249 cum
->nbytes
+= (((mode
!= BLKmode
250 ? GET_MODE_SIZE (mode
)
251 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1)
254 cum
->nbytes
+= (((type
&& int_size_in_bytes (type
) > 8
255 ? GET_MODE_SIZE (Pmode
)
257 ? GET_MODE_SIZE (mode
)
258 : int_size_in_bytes (type
))) + UNITS_PER_WORD
- 1)
262 /* Return the high and low words of a CONST_DOUBLE */
265 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
267 if (GET_CODE (x
) == CONST_DOUBLE
)
271 switch (GET_MODE (x
))
274 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (x
), t
);
275 *p_high
= t
[1]; /* since v850 is little endian */
276 *p_low
= t
[0]; /* high is second word */
280 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x
), *p_high
);
286 *p_high
= CONST_DOUBLE_HIGH (x
);
287 *p_low
= CONST_DOUBLE_LOW (x
);
295 fatal_insn ("const_double_split got a bad insn:", x
);
299 /* Return the cost of the rtx R with code CODE. */
302 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
304 if (CONST_OK_FOR_I (value
))
306 else if (CONST_OK_FOR_J (value
))
308 else if (CONST_OK_FOR_K (value
))
315 const_costs (rtx r
, enum rtx_code c
)
317 HOST_WIDE_INT high
, low
;
322 return const_costs_int (INTVAL (r
), 0);
325 const_double_split (r
, &high
, &low
);
326 if (GET_MODE (r
) == SFmode
)
327 return const_costs_int (high
, 1);
329 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
345 v850_rtx_costs (rtx x
, machine_mode mode
, int outer_code
,
346 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
348 enum rtx_code code
= GET_CODE (x
);
357 *total
= COSTS_N_INSNS (const_costs (x
, code
));
364 if (TARGET_V850E
&& !speed
)
372 && (mode
== SImode
|| mode
== HImode
|| mode
== QImode
))
374 if (GET_CODE (XEXP (x
, 1)) == REG
)
376 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
378 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
380 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
389 if (outer_code
== COMPARE
)
398 /* Print operand X using operand code CODE to assembly language output file
402 v850_print_operand (FILE * file
, rtx x
, int code
)
404 HOST_WIDE_INT high
, low
;
409 /* We use 'c' operands with symbols for .vtinherit. */
410 if (GET_CODE (x
) == SYMBOL_REF
)
412 output_addr_const(file
, x
);
419 switch ((code
== 'B' || code
== 'C')
420 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
423 if (code
== 'c' || code
== 'C')
424 fprintf (file
, "nz");
426 fprintf (file
, "ne");
429 if (code
== 'c' || code
== 'C')
435 fprintf (file
, "ge");
438 fprintf (file
, "gt");
441 fprintf (file
, "le");
444 fprintf (file
, "lt");
447 fprintf (file
, "nl");
453 fprintf (file
, "nh");
462 case 'F': /* High word of CONST_DOUBLE. */
463 switch (GET_CODE (x
))
466 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
470 const_double_split (x
, &high
, &low
);
471 fprintf (file
, "%ld", (long) high
);
478 case 'G': /* Low word of CONST_DOUBLE. */
479 switch (GET_CODE (x
))
482 fprintf (file
, "%ld", (long) INTVAL (x
));
486 const_double_split (x
, &high
, &low
);
487 fprintf (file
, "%ld", (long) low
);
495 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
498 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
501 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
503 if (GET_CODE (x
) == CONST
)
504 x
= XEXP (XEXP (x
, 0), 0);
506 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
508 if (SYMBOL_REF_ZDA_P (x
))
509 fprintf (file
, "zdaoff");
510 else if (SYMBOL_REF_SDA_P (x
))
511 fprintf (file
, "sdaoff");
512 else if (SYMBOL_REF_TDA_P (x
))
513 fprintf (file
, "tdaoff");
518 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
519 output_addr_const (file
, x
);
522 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
524 if (GET_CODE (x
) == CONST
)
525 x
= XEXP (XEXP (x
, 0), 0);
527 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
529 if (SYMBOL_REF_ZDA_P (x
))
530 fprintf (file
, "r0");
531 else if (SYMBOL_REF_SDA_P (x
))
532 fprintf (file
, "gp");
533 else if (SYMBOL_REF_TDA_P (x
))
534 fprintf (file
, "ep");
538 case 'R': /* 2nd word of a double. */
539 switch (GET_CODE (x
))
542 fprintf (file
, reg_names
[REGNO (x
) + 1]);
546 machine_mode mode
= GET_MODE (x
);
547 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
548 v850_print_operand_address (file
, mode
, x
);
549 if (GET_CODE (x
) == CONST_INT
)
550 fprintf (file
, "[r0]");
556 unsigned HOST_WIDE_INT v
= INTVAL (x
);
558 /* Trickery to avoid problems with shifting
559 32-bits at a time on a 32-bit host. */
562 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, v
);
567 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_HIGH (x
));
577 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
578 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
585 /* Like an 'S' operand above, but for unsigned loads only. */
586 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
591 case 'W': /* Print the instruction suffix. */
592 switch (GET_MODE (x
))
597 case E_QImode
: fputs (".b", file
); break;
598 case E_HImode
: fputs (".h", file
); break;
599 case E_SImode
: fputs (".w", file
); break;
600 case E_SFmode
: fputs (".w", file
); break;
603 case '.': /* Register r0. */
604 fputs (reg_names
[0], file
);
606 case 'z': /* Reg or zero. */
608 fputs (reg_names
[REGNO (x
)], file
);
609 else if ((GET_MODE(x
) == SImode
610 || GET_MODE(x
) == DFmode
611 || GET_MODE(x
) == SFmode
)
612 && x
== CONST0_RTX(GET_MODE(x
)))
613 fputs (reg_names
[0], file
);
616 gcc_assert (x
== const0_rtx
);
617 fputs (reg_names
[0], file
);
621 switch (GET_CODE (x
))
624 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
625 output_address (GET_MODE (x
),
626 gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
629 output_address (GET_MODE (x
), XEXP (x
, 0));
633 fputs (reg_names
[REGNO (x
)], file
);
636 fputs (reg_names
[subreg_regno (x
)], file
);
639 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_LOW (x
));
647 v850_print_operand_address (file
, VOIDmode
, x
);
658 /* Output assembly language output for the address ADDR to FILE. */
661 v850_print_operand_address (FILE * file
, machine_mode
/*mode*/, rtx addr
)
663 switch (GET_CODE (addr
))
666 fprintf (file
, "0[");
667 v850_print_operand (file
, addr
, 0);
671 if (GET_CODE (XEXP (addr
, 0)) == REG
)
674 fprintf (file
, "lo(");
675 v850_print_operand (file
, XEXP (addr
, 1), 0);
676 fprintf (file
, ")[");
677 v850_print_operand (file
, XEXP (addr
, 0), 0);
682 if (GET_CODE (XEXP (addr
, 0)) == REG
683 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
686 v850_print_operand (file
, XEXP (addr
, 1), 0);
688 v850_print_operand (file
, XEXP (addr
, 0), 0);
693 v850_print_operand (file
, XEXP (addr
, 0), 0);
695 v850_print_operand (file
, XEXP (addr
, 1), 0);
700 const char *off_name
= NULL
;
701 const char *reg_name
= NULL
;
703 if (SYMBOL_REF_ZDA_P (addr
))
708 else if (SYMBOL_REF_SDA_P (addr
))
713 else if (SYMBOL_REF_TDA_P (addr
))
720 fprintf (file
, "%s(", off_name
);
721 output_addr_const (file
, addr
);
723 fprintf (file
, ")[%s]", reg_name
);
727 if (special_symbolref_operand (addr
, VOIDmode
))
729 rtx x
= XEXP (XEXP (addr
, 0), 0);
730 const char *off_name
;
731 const char *reg_name
;
733 if (SYMBOL_REF_ZDA_P (x
))
738 else if (SYMBOL_REF_SDA_P (x
))
743 else if (SYMBOL_REF_TDA_P (x
))
751 fprintf (file
, "%s(", off_name
);
752 output_addr_const (file
, addr
);
753 fprintf (file
, ")[%s]", reg_name
);
756 output_addr_const (file
, addr
);
759 output_addr_const (file
, addr
);
765 v850_print_operand_punct_valid_p (unsigned char code
)
770 /* When assemble_integer is used to emit the offsets for a switch
771 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
772 output_addr_const will normally barf at this, but it is OK to omit
773 the truncate and just emit the difference of the two labels. The
774 .hword directive will automatically handle the truncation for us.
776 Returns true if rtx was handled, false otherwise. */
779 v850_output_addr_const_extra (FILE * file
, rtx x
)
781 if (GET_CODE (x
) != TRUNCATE
)
786 /* We must also handle the case where the switch table was passed a
787 constant value and so has been collapsed. In this case the first
788 label will have been deleted. In such a case it is OK to emit
789 nothing, since the table will not be used.
790 (cf gcc.c-torture/compile/990801-1.c). */
791 if (GET_CODE (x
) == MINUS
792 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
)
794 rtx_code_label
*label
795 = dyn_cast
<rtx_code_label
*> (XEXP (XEXP (x
, 0), 0));
796 if (label
&& label
->deleted ())
800 output_addr_const (file
, x
);
804 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
808 output_move_single (rtx
* operands
)
810 rtx dst
= operands
[0];
811 rtx src
= operands
[1];
818 else if (GET_CODE (src
) == CONST_INT
)
820 HOST_WIDE_INT value
= INTVAL (src
);
822 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
825 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
826 return "movea %1,%.,%0";
828 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
829 return "movhi hi0(%1),%.,%0";
831 /* A random constant. */
832 else if (TARGET_V850E_UP
)
835 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
838 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
840 HOST_WIDE_INT high
, low
;
842 const_double_split (src
, &high
, &low
);
844 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
847 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
848 return "movea %F1,%.,%0";
850 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
851 return "movhi hi0(%F1),%.,%0";
853 /* A random constant. */
854 else if (TARGET_V850E_UP
)
858 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
861 else if (GET_CODE (src
) == MEM
)
862 return "%S1ld%W1 %1,%0";
864 else if (special_symbolref_operand (src
, VOIDmode
))
865 return "movea %O1(%P1),%Q1,%0";
867 else if (GET_CODE (src
) == LABEL_REF
868 || GET_CODE (src
) == SYMBOL_REF
869 || GET_CODE (src
) == CONST
)
872 return "mov hilo(%1),%0";
874 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
877 else if (GET_CODE (src
) == HIGH
)
878 return "movhi hi(%1),%.,%0";
880 else if (GET_CODE (src
) == LO_SUM
)
882 operands
[2] = XEXP (src
, 0);
883 operands
[3] = XEXP (src
, 1);
884 return "movea lo(%3),%2,%0";
888 else if (GET_CODE (dst
) == MEM
)
891 return "%S0st%W0 %1,%0";
893 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
894 return "%S0st%W0 %.,%0";
896 else if (GET_CODE (src
) == CONST_DOUBLE
897 && CONST0_RTX (GET_MODE (dst
)) == src
)
898 return "%S0st%W0 %.,%0";
901 fatal_insn ("output_move_single:", gen_rtx_SET (dst
, src
));
906 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1 ATTRIBUTE_UNUSED
)
908 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
913 return CC_FPU_LEmode
;
915 return CC_FPU_GEmode
;
917 return CC_FPU_LTmode
;
919 return CC_FPU_GTmode
;
921 return CC_FPU_EQmode
;
923 return CC_FPU_NEmode
;
932 v850_gen_float_compare (enum rtx_code cond
, machine_mode mode ATTRIBUTE_UNUSED
, rtx op0
, rtx op1
)
934 if (GET_MODE (op0
) == DFmode
)
939 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
942 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
945 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
948 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
951 /* Note: There is no NE comparison operator. So we
952 perform an EQ comparison and invert the branch.
953 See v850_float_nz_comparison for how this is done. */
955 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
961 else if (GET_MODE (v850_compare_op0
) == SFmode
)
966 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
969 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
972 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
975 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
978 /* Note: There is no NE comparison operator. So we
979 perform an EQ comparison and invert the branch.
980 See v850_float_nz_comparison for how this is done. */
982 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
991 return v850_select_cc_mode (cond
, op0
, op1
);
995 v850_gen_compare (enum rtx_code cond
, machine_mode mode
, rtx op0
, rtx op1
)
997 if (GET_MODE_CLASS(GET_MODE (op0
)) != MODE_FLOAT
)
999 emit_insn (gen_cmpsi_insn (op0
, op1
));
1000 return gen_rtx_fmt_ee (cond
, mode
, gen_rtx_REG(CCmode
, CC_REGNUM
), const0_rtx
);
1005 mode
= v850_gen_float_compare (cond
, mode
, op0
, op1
);
1006 cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
1007 emit_insn (gen_rtx_SET (cc_reg
, gen_rtx_REG (mode
, FCC_REGNUM
)));
1009 return gen_rtx_fmt_ee (cond
, mode
, cc_reg
, const0_rtx
);
1013 /* Return maximum offset supported for a short EP memory reference of mode
1014 MODE and signedness UNSIGNEDP. */
1017 ep_memory_offset (machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
1024 if (TARGET_SMALL_SLD
)
1025 max_offset
= (1 << 4);
1026 else if ((TARGET_V850E_UP
)
1028 max_offset
= (1 << 4);
1030 max_offset
= (1 << 7);
1034 if (TARGET_SMALL_SLD
)
1035 max_offset
= (1 << 5);
1036 else if ((TARGET_V850E_UP
)
1038 max_offset
= (1 << 5);
1040 max_offset
= (1 << 8);
1045 max_offset
= (1 << 8);
1055 /* Return true if OP is a valid short EP memory reference */
1058 ep_memory_operand (rtx op
, machine_mode mode
, int unsigned_load
)
1064 /* If we are not using the EP register on a per-function basis
1065 then do not allow this optimization at all. This is to
1066 prevent the use of the SLD/SST instructions which cannot be
1067 guaranteed to work properly due to a hardware bug. */
1071 if (GET_CODE (op
) != MEM
)
1074 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1076 mask
= GET_MODE_SIZE (mode
) - 1;
1078 addr
= XEXP (op
, 0);
1079 if (GET_CODE (addr
) == CONST
)
1080 addr
= XEXP (addr
, 0);
1082 switch (GET_CODE (addr
))
1088 return SYMBOL_REF_TDA_P (addr
);
1091 return REGNO (addr
) == EP_REGNUM
;
1094 op0
= XEXP (addr
, 0);
1095 op1
= XEXP (addr
, 1);
1096 if (GET_CODE (op1
) == CONST_INT
1097 && INTVAL (op1
) < max_offset
1098 && INTVAL (op1
) >= 0
1099 && (INTVAL (op1
) & mask
) == 0)
1101 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1104 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1113 /* Substitute memory references involving a pointer, to use the ep pointer,
1114 taking care to save and preserve the ep. */
1117 substitute_ep_register (rtx_insn
*first_insn
,
1118 rtx_insn
*last_insn
,
1124 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1129 df_set_regs_ever_live (1, true);
1130 *p_r1
= gen_rtx_REG (Pmode
, 1);
1131 *p_ep
= gen_rtx_REG (Pmode
, 30);
1136 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1137 2 * (uses
- 3), uses
, reg_names
[regno
],
1138 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1139 INSN_UID (first_insn
), INSN_UID (last_insn
));
1141 if (NOTE_P (first_insn
))
1142 first_insn
= next_nonnote_insn (first_insn
);
1144 last_insn
= next_nonnote_insn (last_insn
);
1145 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1147 if (NONJUMP_INSN_P (insn
))
1149 rtx pattern
= single_set (insn
);
1151 /* Replace the memory references. */
1155 /* Memory operands are signed by default. */
1156 int unsignedp
= FALSE
;
1158 if (GET_CODE (SET_DEST (pattern
)) == MEM
1159 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1162 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1163 p_mem
= &SET_DEST (pattern
);
1165 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1166 p_mem
= &SET_SRC (pattern
);
1168 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1169 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1170 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1172 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1173 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1175 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1183 rtx addr
= XEXP (*p_mem
, 0);
1185 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1186 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1188 else if (GET_CODE (addr
) == PLUS
1189 && GET_CODE (XEXP (addr
, 0)) == REG
1190 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1191 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1192 && ((INTVAL (XEXP (addr
, 1)))
1193 < ep_memory_offset (GET_MODE (*p_mem
),
1195 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1196 *p_mem
= change_address (*p_mem
, VOIDmode
,
1197 gen_rtx_PLUS (Pmode
,
1205 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1206 insn
= prev_nonnote_insn (first_insn
);
1207 if (insn
&& NONJUMP_INSN_P (insn
)
1208 && GET_CODE (PATTERN (insn
)) == SET
1209 && SET_DEST (PATTERN (insn
)) == *p_ep
1210 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1213 emit_insn_before (gen_rtx_SET (*p_r1
, *p_ep
), first_insn
);
1215 emit_insn_before (gen_rtx_SET (*p_ep
, reg
), first_insn
);
1216 emit_insn_before (gen_rtx_SET (*p_ep
, *p_r1
), last_insn
);
1220 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1221 the -mep mode to copy heavily used pointers to ep to use the implicit
1230 rtx_insn
*first_insn
;
1231 rtx_insn
*last_insn
;
1233 regs
[FIRST_PSEUDO_REGISTER
];
1242 /* If not ep mode, just return now. */
1246 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1249 regs
[i
].first_insn
= NULL
;
1250 regs
[i
].last_insn
= NULL
;
1253 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1255 switch (GET_CODE (insn
))
1257 /* End of basic block */
1264 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1266 if (max_uses
< regs
[i
].uses
)
1268 max_uses
= regs
[i
].uses
;
1274 substitute_ep_register (regs
[max_regno
].first_insn
,
1275 regs
[max_regno
].last_insn
,
1276 max_uses
, max_regno
, &r1
, &ep
);
1280 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1283 regs
[i
].first_insn
= NULL
;
1284 regs
[i
].last_insn
= NULL
;
1292 pattern
= single_set (insn
);
1294 /* See if there are any memory references we can shorten. */
1297 rtx src
= SET_SRC (pattern
);
1298 rtx dest
= SET_DEST (pattern
);
1300 /* Memory operands are signed by default. */
1301 int unsignedp
= FALSE
;
1303 /* We might have (SUBREG (MEM)) here, so just get rid of the
1304 subregs to make this code simpler. */
1305 if (GET_CODE (dest
) == SUBREG
1306 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1307 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1308 alter_subreg (&dest
, false);
1309 if (GET_CODE (src
) == SUBREG
1310 && (GET_CODE (SUBREG_REG (src
)) == MEM
1311 || GET_CODE (SUBREG_REG (src
)) == REG
))
1312 alter_subreg (&src
, false);
1314 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1317 else if (GET_CODE (dest
) == MEM
)
1320 else if (GET_CODE (src
) == MEM
)
1323 else if (GET_CODE (src
) == SIGN_EXTEND
1324 && GET_CODE (XEXP (src
, 0)) == MEM
)
1325 mem
= XEXP (src
, 0);
1327 else if (GET_CODE (src
) == ZERO_EXTEND
1328 && GET_CODE (XEXP (src
, 0)) == MEM
)
1330 mem
= XEXP (src
, 0);
1336 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1339 else if (!use_ep
&& mem
1340 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1342 rtx addr
= XEXP (mem
, 0);
1346 if (GET_CODE (addr
) == REG
)
1349 regno
= REGNO (addr
);
1352 else if (GET_CODE (addr
) == PLUS
1353 && GET_CODE (XEXP (addr
, 0)) == REG
1354 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1355 && ((INTVAL (XEXP (addr
, 1)))
1356 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1357 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1360 regno
= REGNO (XEXP (addr
, 0));
1369 regs
[regno
].last_insn
= insn
;
1370 if (!regs
[regno
].first_insn
)
1371 regs
[regno
].first_insn
= insn
;
1375 /* Loading up a register in the basic block zaps any savings
1377 if (GET_CODE (dest
) == REG
)
1382 regno
= REGNO (dest
);
1383 endregno
= END_REGNO (dest
);
1387 /* See if we can use the pointer before this
1392 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1394 if (max_uses
< regs
[i
].uses
)
1396 max_uses
= regs
[i
].uses
;
1402 && max_regno
>= regno
1403 && max_regno
< endregno
)
1405 substitute_ep_register (regs
[max_regno
].first_insn
,
1406 regs
[max_regno
].last_insn
,
1407 max_uses
, max_regno
, &r1
,
1410 /* Since we made a substitution, zap all remembered
1412 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1415 regs
[i
].first_insn
= NULL
;
1416 regs
[i
].last_insn
= NULL
;
1421 for (i
= regno
; i
< endregno
; i
++)
1424 regs
[i
].first_insn
= NULL
;
1425 regs
[i
].last_insn
= NULL
;
1433 /* # of registers saved by the interrupt handler. */
1434 #define INTERRUPT_FIXED_NUM 5
1436 /* # of bytes for registers saved by the interrupt handler. */
1437 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1439 /* # of words saved for other registers. */
1440 #define INTERRUPT_ALL_SAVE_NUM \
1441 (30 - INTERRUPT_FIXED_NUM)
1443 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1446 compute_register_save_size (long * p_reg_saved
)
1450 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1451 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1454 /* Count space for the register saves. */
1455 if (interrupt_handler
)
1457 for (i
= 0; i
<= 31; i
++)
1461 if (df_regs_ever_live_p (i
) || call_p
)
1464 reg_saved
|= 1L << i
;
1468 /* We don't save/restore r0 or the stack pointer */
1470 case STACK_POINTER_REGNUM
:
1473 /* For registers with fixed use, we save them, set them to the
1474 appropriate value, and then restore them.
1475 These registers are handled specially, so don't list them
1476 on the list of registers to save in the prologue. */
1477 case 1: /* temp used to hold ep */
1479 case 10: /* temp used to call interrupt save/restore */
1480 case 11: /* temp used to call interrupt save/restore (long call) */
1481 case EP_REGNUM
: /* ep */
1488 /* Find the first register that needs to be saved. */
1489 for (i
= 0; i
<= 31; i
++)
1490 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1491 || i
== LINK_POINTER_REGNUM
))
1494 /* If it is possible that an out-of-line helper function might be
1495 used to generate the prologue for the current function, then we
1496 need to cover the possibility that such a helper function will
1497 be used, despite the fact that there might be gaps in the list of
1498 registers that need to be saved. To detect this we note that the
1499 helper functions always push at least register r29 (provided
1500 that the function is not an interrupt handler). */
1502 if (TARGET_PROLOG_FUNCTION
1503 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1508 reg_saved
|= 1L << i
;
1513 /* Helper functions save all registers between the starting
1514 register and the last register, regardless of whether they
1515 are actually used by the function or not. */
1516 for (; i
<= 29; i
++)
1519 reg_saved
|= 1L << i
;
1522 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1525 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1530 for (; i
<= 31; i
++)
1531 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1532 || i
== LINK_POINTER_REGNUM
))
1535 reg_saved
|= 1L << i
;
1541 *p_reg_saved
= reg_saved
;
1546 /* Typical stack layout should looks like this after the function's prologue:
1551 | | arguments saved | Increasing
1552 | | on the stack | addresses
1553 PARENT arg pointer -> | | /
1554 -------------------------- ---- -------------------
1555 | | - space for argument split between regs & stack
1557 CHILD | | \ <-- (return address here)
1562 frame pointer -> | | \ ___
1569 | | arguments | | Decreasing
1570 (hard) frame pointer | | / | | addresses
1571 and stack pointer -> | | / _|_ |
1572 -------------------------- ---- ------------------ V */
1575 compute_frame_size (int size
, long * p_reg_saved
)
1578 + compute_register_save_size (p_reg_saved
)
1579 + crtl
->outgoing_args_size
);
1583 use_prolog_function (int num_save
, int frame_size
)
1585 int alloc_stack
= (4 * num_save
);
1586 int unalloc_stack
= frame_size
- alloc_stack
;
1587 int save_func_len
, restore_func_len
;
1588 int save_normal_len
, restore_normal_len
;
1590 if (! TARGET_DISABLE_CALLT
)
1591 save_func_len
= restore_func_len
= 2;
1593 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1597 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1598 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1601 /* See if we would have used ep to save the stack. */
1602 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1603 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1605 save_normal_len
= restore_normal_len
= 4 * num_save
;
1607 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1608 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1610 /* Don't bother checking if we don't actually save any space.
1611 This happens for instance if one register is saved and additional
1612 stack space is allocated. */
1613 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1617 increment_stack (signed int amount
, bool in_prologue
)
1624 inc
= GEN_INT (amount
);
1626 if (! CONST_OK_FOR_K (amount
))
1628 rtx reg
= gen_rtx_REG (Pmode
, 12);
1630 inc
= emit_move_insn (reg
, inc
);
1636 inc
= emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, inc
));
1642 expand_prologue (void)
1645 unsigned int size
= get_frame_size ();
1646 unsigned int actual_fsize
;
1647 unsigned int init_stack_alloc
= 0;
1650 unsigned int num_save
;
1652 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1655 actual_fsize
= compute_frame_size (size
, ®_saved
);
1657 if (flag_stack_usage_info
)
1658 current_function_static_stack_size
= actual_fsize
;
1660 /* Save/setup global registers for interrupt functions right now. */
1661 if (interrupt_handler
)
1663 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1664 emit_insn (gen_callt_save_interrupt ());
1666 emit_insn (gen_save_interrupt ());
1668 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1670 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1671 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1673 /* Interrupt functions are not passed arguments, so no need to
1674 allocate space for split structure arguments. */
1675 gcc_assert (crtl
->args
.pretend_args_size
== 0);
1678 /* Identify all of the saved registers. */
1680 for (i
= 1; i
< 32; i
++)
1682 if (((1L << i
) & reg_saved
) != 0)
1683 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1686 if (crtl
->args
.pretend_args_size
)
1690 increment_stack (- (actual_fsize
+ crtl
->args
.pretend_args_size
), true);
1694 increment_stack (- crtl
->args
.pretend_args_size
, true);
1697 /* See if we have an insn that allocates stack space and saves the particular
1698 registers we want to. Note that the helpers won't
1699 allocate additional space for registers GCC saves to complete a
1700 "split" structure argument. */
1701 save_all
= NULL_RTX
;
1702 if (TARGET_PROLOG_FUNCTION
1703 && !crtl
->args
.pretend_args_size
1706 if (use_prolog_function (num_save
, actual_fsize
))
1708 int alloc_stack
= 4 * num_save
;
1711 save_all
= gen_rtx_PARALLEL
1713 rtvec_alloc (num_save
+ 1
1714 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1716 XVECEXP (save_all
, 0, 0)
1717 = gen_rtx_SET (stack_pointer_rtx
,
1718 gen_rtx_PLUS (Pmode
,
1720 GEN_INT(-alloc_stack
)));
1721 for (i
= 0; i
< num_save
; i
++)
1724 XVECEXP (save_all
, 0, i
+1)
1725 = gen_rtx_SET (gen_rtx_MEM (Pmode
,
1726 gen_rtx_PLUS (Pmode
,
1732 if (TARGET_DISABLE_CALLT
)
1734 XVECEXP (save_all
, 0, num_save
+ 1)
1735 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1737 if (TARGET_LONG_CALLS
)
1738 XVECEXP (save_all
, 0, num_save
+ 2)
1739 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1742 v850_all_frame_related (save_all
);
1744 code
= recog (save_all
, NULL
, NULL
);
1747 rtx insn
= emit_insn (save_all
);
1748 INSN_CODE (insn
) = code
;
1749 actual_fsize
-= alloc_stack
;
1753 save_all
= NULL_RTX
;
1757 /* If no prolog save function is available, store the registers the old
1758 fashioned way (one by one). */
1761 /* Special case interrupt functions that save all registers for a call. */
1762 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1764 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1765 emit_insn (gen_callt_save_all_interrupt ());
1767 emit_insn (gen_save_all_interrupt ());
1772 /* If the stack is too big, allocate it in chunks so we can do the
1773 register saves. We use the register save size so we use the ep
1775 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1776 init_stack_alloc
= compute_register_save_size (NULL
);
1778 init_stack_alloc
= actual_fsize
;
1780 /* Save registers at the beginning of the stack frame. */
1781 offset
= init_stack_alloc
- 4;
1783 if (init_stack_alloc
)
1784 increment_stack (- (signed) init_stack_alloc
, true);
1786 /* Save the return pointer first. */
1787 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1789 F (emit_move_insn (gen_rtx_MEM (SImode
,
1790 plus_constant (Pmode
,
1793 save_regs
[--num_save
]));
1797 for (i
= 0; i
< num_save
; i
++)
1799 F (emit_move_insn (gen_rtx_MEM (SImode
,
1800 plus_constant (Pmode
,
1809 /* Allocate the rest of the stack that was not allocated above (either it is
1810 > 32K or we just called a function to save the registers and needed more
1812 if (actual_fsize
> init_stack_alloc
)
1813 increment_stack (init_stack_alloc
- actual_fsize
, true);
1815 /* If we need a frame pointer, set it up now. */
1816 if (frame_pointer_needed
)
1817 F (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
1822 expand_epilogue (void)
1825 unsigned int size
= get_frame_size ();
1827 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1828 rtx restore_regs
[32];
1830 unsigned int num_restore
;
1832 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1834 /* Eliminate the initial stack stored by interrupt functions. */
1835 if (interrupt_handler
)
1837 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1838 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1839 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1842 /* Cut off any dynamic stack created. */
1843 if (frame_pointer_needed
)
1844 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1846 /* Identify all of the saved registers. */
1848 for (i
= 1; i
< 32; i
++)
1850 if (((1L << i
) & reg_saved
) != 0)
1851 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1854 /* See if we have an insn that restores the particular registers we
1856 restore_all
= NULL_RTX
;
1858 if (TARGET_PROLOG_FUNCTION
1860 && !crtl
->args
.pretend_args_size
1861 && !interrupt_handler
)
1863 int alloc_stack
= (4 * num_restore
);
1865 /* Don't bother checking if we don't actually save any space. */
1866 if (use_prolog_function (num_restore
, actual_fsize
))
1869 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1870 rtvec_alloc (num_restore
+ 2));
1871 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1872 XVECEXP (restore_all
, 0, 1)
1873 = gen_rtx_SET (stack_pointer_rtx
,
1874 gen_rtx_PLUS (Pmode
,
1876 GEN_INT (alloc_stack
)));
1878 offset
= alloc_stack
- 4;
1879 for (i
= 0; i
< num_restore
; i
++)
1881 XVECEXP (restore_all
, 0, i
+2)
1882 = gen_rtx_SET (restore_regs
[i
],
1884 gen_rtx_PLUS (Pmode
,
1890 code
= recog (restore_all
, NULL
, NULL
);
1896 actual_fsize
-= alloc_stack
;
1897 increment_stack (actual_fsize
, false);
1899 insn
= emit_jump_insn (restore_all
);
1900 INSN_CODE (insn
) = code
;
1903 restore_all
= NULL_RTX
;
1907 /* If no epilogue save function is available, restore the registers the
1908 old fashioned way (one by one). */
1911 unsigned int init_stack_free
;
1913 /* If the stack is large, we need to cut it down in 2 pieces. */
1914 if (interrupt_handler
)
1915 init_stack_free
= 0;
1916 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1917 init_stack_free
= 4 * num_restore
;
1919 init_stack_free
= (signed) actual_fsize
;
1921 /* Deallocate the rest of the stack if it is > 32K. */
1922 if ((unsigned int) actual_fsize
> init_stack_free
)
1923 increment_stack (actual_fsize
- init_stack_free
, false);
1925 /* Special case interrupt functions that save all registers
1927 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1929 if (! TARGET_DISABLE_CALLT
)
1930 emit_insn (gen_callt_restore_all_interrupt ());
1932 emit_insn (gen_restore_all_interrupt ());
1936 /* Restore registers from the beginning of the stack frame. */
1937 int offset
= init_stack_free
- 4;
1939 /* Restore the return pointer first. */
1941 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
1943 emit_move_insn (restore_regs
[--num_restore
],
1944 gen_rtx_MEM (SImode
,
1945 plus_constant (Pmode
,
1951 for (i
= 0; i
< num_restore
; i
++)
1953 emit_move_insn (restore_regs
[i
],
1954 gen_rtx_MEM (SImode
,
1955 plus_constant (Pmode
,
1959 emit_use (restore_regs
[i
]);
1963 /* Cut back the remainder of the stack. */
1964 increment_stack (init_stack_free
+ crtl
->args
.pretend_args_size
,
1968 /* And return or use reti for interrupt handlers. */
1969 if (interrupt_handler
)
1971 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1972 emit_insn (gen_callt_return_interrupt ());
1974 emit_jump_insn (gen_return_interrupt ());
1976 else if (actual_fsize
)
1977 emit_jump_insn (gen_return_internal ());
1979 emit_jump_insn (gen_return_simple ());
1982 v850_interrupt_cache_p
= FALSE
;
1983 v850_interrupt_p
= FALSE
;
1986 /* Update the condition code from the insn. */
1988 notice_update_cc (rtx body
, rtx_insn
*insn
)
1990 switch (get_attr_cc (insn
))
1993 /* Insn does not affect CC at all. */
1997 /* Insn does not change CC, but the 0'th operand has been changed. */
1998 if (cc_status
.value1
!= 0
1999 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
2000 cc_status
.value1
= 0;
2004 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2005 V,C is in an unusable state. */
2007 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
2008 cc_status
.value1
= recog_data
.operand
[0];
2012 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2013 C is in an unusable state. */
2015 cc_status
.flags
|= CC_NO_CARRY
;
2016 cc_status
.value1
= recog_data
.operand
[0];
2020 /* The insn is a compare instruction. */
2022 cc_status
.value1
= SET_SRC (body
);
2026 /* Insn doesn't leave CC in a usable state. */
2035 /* Retrieve the data area that has been chosen for the given decl. */
2038 v850_get_data_area (tree decl
)
2040 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2041 return DATA_AREA_SDA
;
2043 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2044 return DATA_AREA_TDA
;
2046 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2047 return DATA_AREA_ZDA
;
2049 return DATA_AREA_NORMAL
;
2052 /* Store the indicated data area in the decl's attributes. */
2055 v850_set_data_area (tree decl
, v850_data_area data_area
)
2061 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2062 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2063 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2068 DECL_ATTRIBUTES (decl
) = tree_cons
2069 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2072 /* Handle an "interrupt" attribute; arguments as in
2073 struct attribute_spec.handler. */
2075 v850_handle_interrupt_attribute (tree
* node
,
2077 tree args ATTRIBUTE_UNUSED
,
2078 int flags ATTRIBUTE_UNUSED
,
2079 bool * no_add_attrs
)
2081 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2083 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2085 *no_add_attrs
= true;
2091 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2092 struct attribute_spec.handler. */
2094 v850_handle_data_area_attribute (tree
* node
,
2096 tree args ATTRIBUTE_UNUSED
,
2097 int flags ATTRIBUTE_UNUSED
,
2098 bool * no_add_attrs
)
2100 v850_data_area data_area
;
2101 v850_data_area area
;
2104 /* Implement data area attribute. */
2105 if (is_attribute_p ("sda", name
))
2106 data_area
= DATA_AREA_SDA
;
2107 else if (is_attribute_p ("tda", name
))
2108 data_area
= DATA_AREA_TDA
;
2109 else if (is_attribute_p ("zda", name
))
2110 data_area
= DATA_AREA_ZDA
;
2114 switch (TREE_CODE (decl
))
2117 if (current_function_decl
!= NULL_TREE
)
2119 error_at (DECL_SOURCE_LOCATION (decl
),
2120 "data area attributes cannot be specified for "
2122 *no_add_attrs
= true;
2128 area
= v850_get_data_area (decl
);
2129 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2131 error ("data area of %q+D conflicts with previous declaration",
2133 *no_add_attrs
= true;
2145 /* Return nonzero if FUNC is an interrupt function as specified
2146 by the "interrupt" attribute. */
2149 v850_interrupt_function_p (tree func
)
2154 if (v850_interrupt_cache_p
)
2155 return v850_interrupt_p
;
2157 if (TREE_CODE (func
) != FUNCTION_DECL
)
2160 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2166 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2167 ret
= a
!= NULL_TREE
;
2170 /* Its not safe to trust global variables until after function inlining has
2172 if (reload_completed
| reload_in_progress
)
2173 v850_interrupt_p
= ret
;
2180 v850_encode_data_area (tree decl
, rtx symbol
)
2184 /* Map explicit sections into the appropriate attribute */
2185 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2187 if (DECL_SECTION_NAME (decl
))
2189 const char *name
= DECL_SECTION_NAME (decl
);
2191 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2192 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2194 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2195 v850_set_data_area (decl
, DATA_AREA_SDA
);
2197 else if (streq (name
, ".tdata"))
2198 v850_set_data_area (decl
, DATA_AREA_TDA
);
2201 /* If no attribute, support -m{zda,sda,tda}=n */
2204 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2208 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2209 v850_set_data_area (decl
, DATA_AREA_TDA
);
2211 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2212 v850_set_data_area (decl
, DATA_AREA_SDA
);
2214 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2215 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2218 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2222 flags
= SYMBOL_REF_FLAGS (symbol
);
2223 switch (v850_get_data_area (decl
))
2225 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2226 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2227 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2228 default: gcc_unreachable ();
2230 SYMBOL_REF_FLAGS (symbol
) = flags
;
2234 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2236 default_encode_section_info (decl
, rtl
, first
);
2238 if (TREE_CODE (decl
) == VAR_DECL
2239 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2240 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2243 /* Construct a JR instruction to a routine that will perform the equivalent of
2244 the RTL passed in as an argument. This RTL is a function epilogue that
2245 pops registers off the stack and possibly releases some extra stack space
2246 as well. The code has already verified that the RTL matches these
2250 construct_restore_jr (rtx op
)
2252 int count
= XVECLEN (op
, 0);
2254 unsigned long int mask
;
2255 unsigned long int first
;
2256 unsigned long int last
;
2258 static char buff
[100]; /* XXX */
2262 error ("bogus JR construction: %d", count
);
2266 /* Work out how many bytes to pop off the stack before retrieving
2268 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2269 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2270 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2272 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2274 /* Each pop will remove 4 bytes from the stack.... */
2275 stack_bytes
-= (count
- 2) * 4;
2277 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2278 if (stack_bytes
!= 0)
2280 error ("bad amount of stack space removal: %d", stack_bytes
);
2284 /* Now compute the bit mask of registers to push. */
2286 for (i
= 2; i
< count
; i
++)
2288 rtx vector_element
= XVECEXP (op
, 0, i
);
2290 gcc_assert (GET_CODE (vector_element
) == SET
);
2291 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2292 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2295 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2298 /* Scan for the first register to pop. */
2299 for (first
= 0; first
< 32; first
++)
2301 if (mask
& (1 << first
))
2305 gcc_assert (first
< 32);
2307 /* Discover the last register to pop. */
2308 if (mask
& (1 << LINK_POINTER_REGNUM
))
2310 last
= LINK_POINTER_REGNUM
;
2314 gcc_assert (!stack_bytes
);
2315 gcc_assert (mask
& (1 << 29));
2320 /* Note, it is possible to have gaps in the register mask.
2321 We ignore this here, and generate a JR anyway. We will
2322 be popping more registers than is strictly necessary, but
2323 it does save code space. */
2325 if (TARGET_LONG_CALLS
)
2330 sprintf (name
, "__return_%s", reg_names
[first
]);
2332 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2334 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2340 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2342 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2349 /* Construct a JARL instruction to a routine that will perform the equivalent
2350 of the RTL passed as a parameter. This RTL is a function prologue that
2351 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2352 some stack space as well. The code has already verified that the RTL
2353 matches these requirements. */
2355 construct_save_jarl (rtx op
)
2357 int count
= XVECLEN (op
, 0);
2359 unsigned long int mask
;
2360 unsigned long int first
;
2361 unsigned long int last
;
2363 static char buff
[100]; /* XXX */
2365 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2367 error ("bogus JARL construction: %d", count
);
2372 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2373 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2374 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2375 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2377 /* Work out how many bytes to push onto the stack after storing the
2379 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2381 /* Each push will put 4 bytes from the stack.... */
2382 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2384 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2385 if (stack_bytes
!= 0)
2387 error ("bad amount of stack space removal: %d", stack_bytes
);
2391 /* Now compute the bit mask of registers to push. */
2393 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2395 rtx vector_element
= XVECEXP (op
, 0, i
);
2397 gcc_assert (GET_CODE (vector_element
) == SET
);
2398 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2399 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2402 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2405 /* Scan for the first register to push. */
2406 for (first
= 0; first
< 32; first
++)
2408 if (mask
& (1 << first
))
2412 gcc_assert (first
< 32);
2414 /* Discover the last register to push. */
2415 if (mask
& (1 << LINK_POINTER_REGNUM
))
2417 last
= LINK_POINTER_REGNUM
;
2421 gcc_assert (!stack_bytes
);
2422 gcc_assert (mask
& (1 << 29));
2427 /* Note, it is possible to have gaps in the register mask.
2428 We ignore this here, and generate a JARL anyway. We will
2429 be pushing more registers than is strictly necessary, but
2430 it does save code space. */
2432 if (TARGET_LONG_CALLS
)
2437 sprintf (name
, "__save_%s", reg_names
[first
]);
2439 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2441 if (TARGET_V850E3V5_UP
)
2442 sprintf (buff
, "mov hilo(%s), r11\n\tjarl [r11], r10", name
);
2444 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2450 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2452 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2459 /* A version of asm_output_aligned_bss() that copes with the special
2460 data areas of the v850. */
2462 v850_output_aligned_bss (FILE * file
,
2465 unsigned HOST_WIDE_INT size
,
2468 switch (v850_get_data_area (decl
))
2471 switch_to_section (zbss_section
);
2475 switch_to_section (sbss_section
);
2479 switch_to_section (tdata_section
);
2483 switch_to_section (bss_section
);
2487 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2488 #ifdef ASM_DECLARE_OBJECT_NAME
2489 last_assemble_variable_decl
= decl
;
2490 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2492 /* Standard thing is just output label for the object. */
2493 ASM_OUTPUT_LABEL (file
, name
);
2494 #endif /* ASM_DECLARE_OBJECT_NAME */
2495 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2498 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2500 v850_output_common (FILE * file
,
2506 if (decl
== NULL_TREE
)
2508 fprintf (file
, "%s", COMMON_ASM_OP
);
2512 switch (v850_get_data_area (decl
))
2515 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2519 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2523 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2527 fprintf (file
, "%s", COMMON_ASM_OP
);
2532 assemble_name (file
, name
);
2533 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2536 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2538 v850_output_local (FILE * file
,
2544 fprintf (file
, "%s", LOCAL_ASM_OP
);
2545 assemble_name (file
, name
);
2546 fprintf (file
, "\n");
2548 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2551 /* Add data area to the given declaration if a ghs data area pragma is
2552 currently in effect (#pragma ghs startXXX/endXXX). */
2554 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2557 && data_area_stack
->data_area
2558 && current_function_decl
== NULL_TREE
2559 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2560 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2561 v850_set_data_area (decl
, data_area_stack
->data_area
);
2563 /* Initialize the default names of the v850 specific sections,
2564 if this has not been done before. */
2566 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2568 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2571 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2574 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2577 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2580 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2584 if (current_function_decl
== NULL_TREE
2585 && (TREE_CODE (decl
) == VAR_DECL
2586 || TREE_CODE (decl
) == CONST_DECL
2587 || TREE_CODE (decl
) == FUNCTION_DECL
)
2588 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2589 && !DECL_SECTION_NAME (decl
))
2591 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2592 const char * chosen_section
;
2594 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2595 kind
= GHS_SECTION_KIND_TEXT
;
2598 /* First choose a section kind based on the data area of the decl. */
2599 switch (v850_get_data_area (decl
))
2605 kind
= ((TREE_READONLY (decl
))
2606 ? GHS_SECTION_KIND_ROSDATA
2607 : GHS_SECTION_KIND_SDATA
);
2611 kind
= GHS_SECTION_KIND_TDATA
;
2615 kind
= ((TREE_READONLY (decl
))
2616 ? GHS_SECTION_KIND_ROZDATA
2617 : GHS_SECTION_KIND_ZDATA
);
2620 case DATA_AREA_NORMAL
: /* default data area */
2621 if (TREE_READONLY (decl
))
2622 kind
= GHS_SECTION_KIND_RODATA
;
2623 else if (DECL_INITIAL (decl
))
2624 kind
= GHS_SECTION_KIND_DATA
;
2626 kind
= GHS_SECTION_KIND_BSS
;
2630 /* Now, if the section kind has been explicitly renamed,
2631 then attach a section attribute. */
2632 chosen_section
= GHS_current_section_names
[(int) kind
];
2634 /* Otherwise, if this kind of section needs an explicit section
2635 attribute, then also attach one. */
2636 if (chosen_section
== NULL
)
2637 chosen_section
= GHS_default_section_names
[(int) kind
];
2641 /* Only set the section name if specified by a pragma, because
2642 otherwise it will force those variables to get allocated storage
2643 in this module, rather than by the linker. */
2644 set_decl_section_name (decl
, chosen_section
);
2649 /* Construct a DISPOSE instruction that is the equivalent of
2650 the given RTX. We have already verified that this should
2654 construct_dispose_instruction (rtx op
)
2656 int count
= XVECLEN (op
, 0);
2658 unsigned long int mask
;
2660 static char buff
[ 100 ]; /* XXX */
2665 error ("bogus DISPOSE construction: %d", count
);
2669 /* Work out how many bytes to pop off the
2670 stack before retrieving registers. */
2671 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2672 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2673 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2675 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2677 /* Each pop will remove 4 bytes from the stack.... */
2678 stack_bytes
-= (count
- 2) * 4;
2680 /* Make sure that the amount we are popping
2681 will fit into the DISPOSE instruction. */
2682 if (stack_bytes
> 128)
2684 error ("too much stack space to dispose of: %d", stack_bytes
);
2688 /* Now compute the bit mask of registers to push. */
2691 for (i
= 2; i
< count
; i
++)
2693 rtx vector_element
= XVECEXP (op
, 0, i
);
2695 gcc_assert (GET_CODE (vector_element
) == SET
);
2696 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2697 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2700 if (REGNO (SET_DEST (vector_element
)) == 2)
2703 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2706 if (! TARGET_DISABLE_CALLT
2707 && (use_callt
|| stack_bytes
== 0))
2711 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2716 for (i
= 20; i
< 32; i
++)
2717 if (mask
& (1 << i
))
2721 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2723 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2724 i
, (mask
& (1 << 31)) ? "31c" : "29");
2729 static char regs
[100]; /* XXX */
2732 /* Generate the DISPOSE instruction. Note we could just issue the
2733 bit mask as a number as the assembler can cope with this, but for
2734 the sake of our readers we turn it into a textual description. */
2738 for (i
= 20; i
< 32; i
++)
2740 if (mask
& (1 << i
))
2745 strcat (regs
, ", ");
2750 strcat (regs
, reg_names
[ first
]);
2752 for (i
++; i
< 32; i
++)
2753 if ((mask
& (1 << i
)) == 0)
2758 strcat (regs
, " - ");
2759 strcat (regs
, reg_names
[ i
- 1 ] );
2764 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2770 /* Construct a PREPARE instruction that is the equivalent of
2771 the given RTL. We have already verified that this should
2775 construct_prepare_instruction (rtx op
)
2779 unsigned long int mask
;
2781 static char buff
[ 100 ]; /* XXX */
2784 if (XVECLEN (op
, 0) <= 1)
2786 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2790 /* Work out how many bytes to push onto
2791 the stack after storing the registers. */
2792 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2793 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2794 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2796 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2799 /* Make sure that the amount we are popping
2800 will fit into the DISPOSE instruction. */
2801 if (stack_bytes
< -128)
2803 error ("too much stack space to prepare: %d", stack_bytes
);
2807 /* Now compute the bit mask of registers to push. */
2810 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2812 rtx vector_element
= XVECEXP (op
, 0, i
);
2814 if (GET_CODE (vector_element
) == CLOBBER
)
2817 gcc_assert (GET_CODE (vector_element
) == SET
);
2818 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2819 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2822 if (REGNO (SET_SRC (vector_element
)) == 2)
2825 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2829 stack_bytes
+= count
* 4;
2831 if ((! TARGET_DISABLE_CALLT
)
2832 && (use_callt
|| stack_bytes
== 0))
2836 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2840 for (i
= 20; i
< 32; i
++)
2841 if (mask
& (1 << i
))
2845 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2847 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2848 i
, (mask
& (1 << 31)) ? "31c" : "29");
2852 static char regs
[100]; /* XXX */
2856 /* Generate the PREPARE instruction. Note we could just issue the
2857 bit mask as a number as the assembler can cope with this, but for
2858 the sake of our readers we turn it into a textual description. */
2862 for (i
= 20; i
< 32; i
++)
2864 if (mask
& (1 << i
))
2869 strcat (regs
, ", ");
2874 strcat (regs
, reg_names
[ first
]);
2876 for (i
++; i
< 32; i
++)
2877 if ((mask
& (1 << i
)) == 0)
2882 strcat (regs
, " - ");
2883 strcat (regs
, reg_names
[ i
- 1 ] );
2888 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2894 /* Return an RTX indicating where the return address to the
2895 calling function can be found. */
2898 v850_return_addr (int count
)
2903 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2906 /* Implement TARGET_ASM_INIT_SECTIONS. */
2909 v850_asm_init_sections (void)
2912 = get_unnamed_section (0, output_section_asm_op
,
2913 "\t.section .rosdata,\"a\"");
2916 = get_unnamed_section (0, output_section_asm_op
,
2917 "\t.section .rozdata,\"a\"");
2920 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2921 "\t.section .tdata,\"aw\"");
2924 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2925 "\t.section .zdata,\"aw\"");
2928 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2929 output_section_asm_op
,
2930 "\t.section .zbss,\"aw\"");
2934 v850_select_section (tree exp
,
2935 int reloc ATTRIBUTE_UNUSED
,
2936 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2938 if (TREE_CODE (exp
) == VAR_DECL
)
2941 if (!TREE_READONLY (exp
)
2942 || TREE_SIDE_EFFECTS (exp
)
2943 || !DECL_INITIAL (exp
)
2944 || (DECL_INITIAL (exp
) != error_mark_node
2945 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
2950 switch (v850_get_data_area (exp
))
2953 return is_const
? rozdata_section
: zdata_section
;
2956 return tdata_section
;
2959 return is_const
? rosdata_section
: sdata_section
;
2962 return is_const
? readonly_data_section
: data_section
;
2965 return readonly_data_section
;
2968 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2971 v850_function_value_regno_p (const unsigned int regno
)
2973 return (regno
== RV_REGNUM
);
2976 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2979 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2981 /* Return values > 8 bytes in length in memory. */
2982 return int_size_in_bytes (type
) > 8
2983 || TYPE_MODE (type
) == BLKmode
2984 /* With the rh850 ABI return all aggregates in memory. */
2985 || ((! TARGET_GCC_ABI
) && AGGREGATE_TYPE_P (type
))
2989 /* Worker function for TARGET_FUNCTION_VALUE. */
2992 v850_function_value (const_tree valtype
,
2993 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
2994 bool outgoing ATTRIBUTE_UNUSED
)
2996 return gen_rtx_REG (TYPE_MODE (valtype
), RV_REGNUM
);
2999 /* Implement TARGET_LIBCALL_VALUE. */
3002 v850_libcall_value (machine_mode mode
,
3003 const_rtx func ATTRIBUTE_UNUSED
)
3005 return gen_rtx_REG (mode
, RV_REGNUM
);
3009 /* Worker function for TARGET_CAN_ELIMINATE. */
3012 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
3014 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
3017 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3019 If TARGET_APP_REGS is not defined then add r2 and r5 to
3020 the pool of fixed registers. See PR 14505. */
3023 v850_conditional_register_usage (void)
3025 if (TARGET_APP_REGS
)
3027 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
3028 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
3032 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3035 v850_asm_trampoline_template (FILE *f
)
3037 fprintf (f
, "\tjarl .+4,r12\n");
3038 fprintf (f
, "\tld.w 12[r12],r20\n");
3039 fprintf (f
, "\tld.w 16[r12],r12\n");
3040 fprintf (f
, "\tjmp [r12]\n");
3041 fprintf (f
, "\tnop\n");
3042 fprintf (f
, "\t.long 0\n");
3043 fprintf (f
, "\t.long 0\n");
3046 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3049 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
3051 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3053 emit_block_move (m_tramp
, assemble_trampoline_template (),
3054 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3056 mem
= adjust_address (m_tramp
, SImode
, 16);
3057 emit_move_insn (mem
, chain_value
);
3058 mem
= adjust_address (m_tramp
, SImode
, 20);
3059 emit_move_insn (mem
, fnaddr
);
3063 v850_issue_rate (void)
3065 return (TARGET_V850E2_UP
? 2 : 1);
3068 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3071 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
3073 return (GET_CODE (x
) == CONST_DOUBLE
3074 || !(GET_CODE (x
) == CONST
3075 && GET_CODE (XEXP (x
, 0)) == PLUS
3076 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3077 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3078 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3081 /* Helper function for `v850_legitimate_address_p'. */
3084 v850_reg_ok_for_base_p (const_rtx reg
, bool strict_p
)
3088 return REGNO_OK_FOR_BASE_P (REGNO (reg
));
3094 /* Accept either REG or SUBREG where a register is valid. */
3097 v850_rtx_ok_for_base_p (const_rtx x
, bool strict_p
)
3099 return ((REG_P (x
) && v850_reg_ok_for_base_p (x
, strict_p
))
3100 || (SUBREG_P (x
) && REG_P (SUBREG_REG (x
))
3101 && v850_reg_ok_for_base_p (SUBREG_REG (x
), strict_p
)));
3104 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
3107 v850_legitimate_address_p (machine_mode mode
, rtx x
, bool strict_p
,
3108 addr_space_t as ATTRIBUTE_UNUSED
)
3110 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
3112 if (v850_rtx_ok_for_base_p (x
, strict_p
))
3114 if (CONSTANT_ADDRESS_P (x
)
3115 && (mode
== QImode
|| INTVAL (x
) % 2 == 0)
3116 && (GET_MODE_SIZE (mode
) <= 4 || INTVAL (x
) % 4 == 0))
3118 if (GET_CODE (x
) == LO_SUM
3119 && REG_P (XEXP (x
, 0))
3120 && v850_reg_ok_for_base_p (XEXP (x
, 0), strict_p
)
3121 && CONSTANT_P (XEXP (x
, 1))
3122 && (!CONST_INT_P (XEXP (x
, 1))
3123 || ((mode
== QImode
|| INTVAL (XEXP (x
, 1)) % 2 == 0)
3124 && constraint_satisfied_p (XEXP (x
, 1), CONSTRAINT_K
)))
3125 && GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (word_mode
))
3127 if (special_symbolref_operand (x
, mode
)
3128 && (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (word_mode
)))
3130 if (GET_CODE (x
) == PLUS
3131 && v850_rtx_ok_for_base_p (XEXP (x
, 0), strict_p
)
3132 && constraint_satisfied_p (XEXP (x
,1), CONSTRAINT_K
)
3133 && ((mode
== QImode
|| INTVAL (XEXP (x
, 1)) % 2 == 0)
3134 && CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))
3135 + (GET_MODE_NUNITS (mode
) * UNITS_PER_WORD
))))
3142 v850_memory_move_cost (machine_mode mode
,
3143 reg_class_t reg_class ATTRIBUTE_UNUSED
,
3146 switch (GET_MODE_SIZE (mode
))
3156 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3161 v850_adjust_insn_length (rtx_insn
*insn
, int length
)
3163 if (TARGET_V850E3V5_UP
)
3167 if (TARGET_LONG_CALLS
)
3169 /* call_internal_long, call_value_internal_long. */
3177 /* call_internal_short, call_value_internal_short. */
3186 /* V850 specific attributes. */
3188 static const struct attribute_spec v850_attribute_table
[] =
3190 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3191 affects_type_identity, exclusions } */
3192 { "interrupt_handler", 0, 0, true, false, false,
3193 v850_handle_interrupt_attribute
, false, NULL
},
3194 { "interrupt", 0, 0, true, false, false,
3195 v850_handle_interrupt_attribute
, false, NULL
},
3196 { "sda", 0, 0, true, false, false,
3197 v850_handle_data_area_attribute
, false, NULL
},
3198 { "tda", 0, 0, true, false, false,
3199 v850_handle_data_area_attribute
, false, NULL
},
3200 { "zda", 0, 0, true, false, false,
3201 v850_handle_data_area_attribute
, false, NULL
},
3202 { NULL
, 0, 0, false, false, false, NULL
, false, NULL
}
3206 v850_option_override (void)
3208 if (flag_exceptions
|| flag_non_call_exceptions
)
3209 flag_omit_frame_pointer
= 0;
3211 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3212 if (! TARGET_GCC_ABI
)
3213 target_flags
|= MASK_DISABLE_CALLT
;
3217 v850_gen_movdi (rtx
* operands
)
3219 if (REG_P (operands
[0]))
3221 if (REG_P (operands
[1]))
3223 if (REGNO (operands
[0]) == (REGNO (operands
[1]) - 1))
3224 return "mov %1, %0; mov %R1, %R0";
3226 return "mov %R1, %R0; mov %1, %0";
3229 if (MEM_P (operands
[1]))
3231 if (REGNO (operands
[0]) & 1)
3232 /* Use two load word instructions to synthesise a load double. */
3233 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3235 return "ld.dw %1, %0";
3238 return "mov %1, %0; mov %R1, %R0";
3241 gcc_assert (REG_P (operands
[1]));
3243 if (REGNO (operands
[1]) & 1)
3244 /* Use two store word instructions to synthesise a store double. */
3245 return "st.w %1, %0 ; st.w %R1, %R0 ";
3247 return "st.dw %1, %0";
3250 /* Implement TARGET_HARD_REGNO_MODE_OK. */
3253 v850_hard_regno_mode_ok (unsigned int regno
, machine_mode mode
)
3255 return GET_MODE_SIZE (mode
) <= 4 || ((regno
& 1) == 0 && regno
!= 0);
3258 /* Implement TARGET_MODES_TIEABLE_P. */
3261 v850_modes_tieable_p (machine_mode mode1
, machine_mode mode2
)
3263 return (mode1
== mode2
3264 || (GET_MODE_SIZE (mode1
) <= 4 && GET_MODE_SIZE (mode2
) <= 4));
3267 /* Initialize the GCC target structure. */
3269 #undef TARGET_OPTION_OVERRIDE
3270 #define TARGET_OPTION_OVERRIDE v850_option_override
3272 #undef TARGET_MEMORY_MOVE_COST
3273 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3275 #undef TARGET_ASM_ALIGNED_HI_OP
3276 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3278 #undef TARGET_PRINT_OPERAND
3279 #define TARGET_PRINT_OPERAND v850_print_operand
3280 #undef TARGET_PRINT_OPERAND_ADDRESS
3281 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3282 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3283 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3285 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3286 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3288 #undef TARGET_ATTRIBUTE_TABLE
3289 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3291 #undef TARGET_INSERT_ATTRIBUTES
3292 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3294 #undef TARGET_ASM_SELECT_SECTION
3295 #define TARGET_ASM_SELECT_SECTION v850_select_section
3297 /* The assembler supports switchable .bss sections, but
3298 v850_select_section doesn't yet make use of them. */
3299 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3300 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3302 #undef TARGET_ENCODE_SECTION_INFO
3303 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3305 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3306 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3308 #undef TARGET_RTX_COSTS
3309 #define TARGET_RTX_COSTS v850_rtx_costs
3311 #undef TARGET_ADDRESS_COST
3312 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3314 #undef TARGET_MACHINE_DEPENDENT_REORG
3315 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3317 #undef TARGET_SCHED_ISSUE_RATE
3318 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3320 #undef TARGET_FUNCTION_VALUE_REGNO_P
3321 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3322 #undef TARGET_FUNCTION_VALUE
3323 #define TARGET_FUNCTION_VALUE v850_function_value
3324 #undef TARGET_LIBCALL_VALUE
3325 #define TARGET_LIBCALL_VALUE v850_libcall_value
3327 #undef TARGET_PROMOTE_PROTOTYPES
3328 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3330 #undef TARGET_RETURN_IN_MEMORY
3331 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3333 #undef TARGET_PASS_BY_REFERENCE
3334 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3336 #undef TARGET_CALLEE_COPIES
3337 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3339 #undef TARGET_ARG_PARTIAL_BYTES
3340 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3342 #undef TARGET_FUNCTION_ARG
3343 #define TARGET_FUNCTION_ARG v850_function_arg
3345 #undef TARGET_FUNCTION_ARG_ADVANCE
3346 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3348 #undef TARGET_CAN_ELIMINATE
3349 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3351 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3352 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3354 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3355 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3356 #undef TARGET_TRAMPOLINE_INIT
3357 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3359 #undef TARGET_LEGITIMATE_CONSTANT_P
3360 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3363 #define TARGET_LRA_P hook_bool_void_false
3365 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
3366 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P v850_legitimate_address_p
3368 #undef TARGET_CAN_USE_DOLOOP_P
3369 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3371 #undef TARGET_HARD_REGNO_MODE_OK
3372 #define TARGET_HARD_REGNO_MODE_OK v850_hard_regno_mode_ok
3374 #undef TARGET_MODES_TIEABLE_P
3375 #define TARGET_MODES_TIEABLE_P v850_modes_tieable_p
3377 struct gcc_target targetm
= TARGET_INITIALIZER
;
3379 #include "gt-v850.h"