1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2017 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "stringpool.h"
33 #include "insn-config.h"
37 #include "diagnostic-core.h"
38 #include "stor-layout.h"
41 #include "conditions.h"
43 #include "insn-attr.h"
48 /* This file should be included last. */
49 #include "target-def.h"
52 #define streq(a,b) (strcmp (a, b) == 0)
55 static void v850_print_operand_address (FILE *, machine_mode
, rtx
);
57 /* Names of the various data areas used on the v850. */
58 const char * GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
59 const char * GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
61 /* Track the current data area set by the data area pragma (which
62 can be nested). Tested by check_default_data_area. */
63 data_area_stack_element
* data_area_stack
= NULL
;
65 /* True if we don't need to check any more if the current
66 function is an interrupt handler. */
67 static int v850_interrupt_cache_p
= FALSE
;
69 rtx v850_compare_op0
, v850_compare_op1
;
71 /* Whether current function is an interrupt handler. */
72 static int v850_interrupt_p
= FALSE
;
74 static GTY(()) section
* rosdata_section
;
75 static GTY(()) section
* rozdata_section
;
76 static GTY(()) section
* tdata_section
;
77 static GTY(()) section
* zdata_section
;
78 static GTY(()) section
* zbss_section
;
80 /* We use this to wrap all emitted insns in the prologue. */
84 if (GET_CODE (x
) != CLOBBER
)
85 RTX_FRAME_RELATED_P (x
) = 1;
89 /* Mark all the subexpressions of the PARALLEL rtx PAR as
90 frame-related. Return PAR.
92 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
93 PARALLEL rtx other than the first if they do not have the
94 FRAME_RELATED flag set on them. */
97 v850_all_frame_related (rtx par
)
99 int len
= XVECLEN (par
, 0);
102 gcc_assert (GET_CODE (par
) == PARALLEL
);
103 for (i
= 0; i
< len
; i
++)
104 F (XVECEXP (par
, 0, i
));
109 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
110 Specify whether to pass the argument by reference. */
113 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
114 machine_mode mode
, const_tree type
,
115 bool named ATTRIBUTE_UNUSED
)
117 unsigned HOST_WIDE_INT size
;
123 size
= int_size_in_bytes (type
);
125 size
= GET_MODE_SIZE (mode
);
130 /* Return an RTX to represent where an argument with mode MODE
131 and type TYPE will be passed to a function. If the result
132 is NULL_RTX, the argument will be pushed. */
135 v850_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
136 const_tree type
, bool named
)
138 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
139 rtx result
= NULL_RTX
;
146 size
= int_size_in_bytes (type
);
148 size
= GET_MODE_SIZE (mode
);
150 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
154 /* Once we have stopped using argument registers, do not start up again. */
155 cum
->nbytes
= 4 * UNITS_PER_WORD
;
160 align
= UNITS_PER_WORD
;
161 else if (size
<= UNITS_PER_WORD
&& type
)
162 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
166 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
168 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
171 if (type
== NULL_TREE
172 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
175 switch (cum
->nbytes
/ UNITS_PER_WORD
)
178 result
= gen_rtx_REG (mode
, 6);
181 result
= gen_rtx_REG (mode
, 7);
184 result
= gen_rtx_REG (mode
, 8);
187 result
= gen_rtx_REG (mode
, 9);
196 /* Return the number of bytes which must be put into registers
197 for values which are part in registers and part in memory. */
199 v850_arg_partial_bytes (cumulative_args_t cum_v
, machine_mode mode
,
200 tree type
, bool named
)
202 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
209 size
= int_size_in_bytes (type
);
211 size
= GET_MODE_SIZE (mode
);
217 align
= UNITS_PER_WORD
;
219 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
223 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
225 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
228 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
231 if (type
== NULL_TREE
232 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
235 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
238 /* Update the data in CUM to advance over an argument
239 of mode MODE and data type TYPE.
240 (TYPE is null for libcalls where that information may not be available.) */
243 v850_function_arg_advance (cumulative_args_t cum_v
, machine_mode mode
,
244 const_tree type
, bool named ATTRIBUTE_UNUSED
)
246 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
249 cum
->nbytes
+= (((mode
!= BLKmode
250 ? GET_MODE_SIZE (mode
)
251 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1)
254 cum
->nbytes
+= (((type
&& int_size_in_bytes (type
) > 8
255 ? GET_MODE_SIZE (Pmode
)
257 ? GET_MODE_SIZE (mode
)
258 : int_size_in_bytes (type
))) + UNITS_PER_WORD
- 1)
262 /* Return the high and low words of a CONST_DOUBLE */
265 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
267 if (GET_CODE (x
) == CONST_DOUBLE
)
271 switch (GET_MODE (x
))
274 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (x
), t
);
275 *p_high
= t
[1]; /* since v850 is little endian */
276 *p_low
= t
[0]; /* high is second word */
280 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x
), *p_high
);
286 *p_high
= CONST_DOUBLE_HIGH (x
);
287 *p_low
= CONST_DOUBLE_LOW (x
);
295 fatal_insn ("const_double_split got a bad insn:", x
);
299 /* Return the cost of the rtx R with code CODE. */
302 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
304 if (CONST_OK_FOR_I (value
))
306 else if (CONST_OK_FOR_J (value
))
308 else if (CONST_OK_FOR_K (value
))
315 const_costs (rtx r
, enum rtx_code c
)
317 HOST_WIDE_INT high
, low
;
322 return const_costs_int (INTVAL (r
), 0);
325 const_double_split (r
, &high
, &low
);
326 if (GET_MODE (r
) == SFmode
)
327 return const_costs_int (high
, 1);
329 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
345 v850_rtx_costs (rtx x
, machine_mode mode
, int outer_code
,
346 int opno ATTRIBUTE_UNUSED
, int *total
, bool speed
)
348 enum rtx_code code
= GET_CODE (x
);
357 *total
= COSTS_N_INSNS (const_costs (x
, code
));
364 if (TARGET_V850E
&& !speed
)
372 && (mode
== SImode
|| mode
== HImode
|| mode
== QImode
))
374 if (GET_CODE (XEXP (x
, 1)) == REG
)
376 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
378 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
380 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
389 if (outer_code
== COMPARE
)
398 /* Print operand X using operand code CODE to assembly language output file
402 v850_print_operand (FILE * file
, rtx x
, int code
)
404 HOST_WIDE_INT high
, low
;
409 /* We use 'c' operands with symbols for .vtinherit. */
410 if (GET_CODE (x
) == SYMBOL_REF
)
412 output_addr_const(file
, x
);
419 switch ((code
== 'B' || code
== 'C')
420 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
423 if (code
== 'c' || code
== 'C')
424 fprintf (file
, "nz");
426 fprintf (file
, "ne");
429 if (code
== 'c' || code
== 'C')
435 fprintf (file
, "ge");
438 fprintf (file
, "gt");
441 fprintf (file
, "le");
444 fprintf (file
, "lt");
447 fprintf (file
, "nl");
453 fprintf (file
, "nh");
462 case 'F': /* High word of CONST_DOUBLE. */
463 switch (GET_CODE (x
))
466 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
470 const_double_split (x
, &high
, &low
);
471 fprintf (file
, "%ld", (long) high
);
478 case 'G': /* Low word of CONST_DOUBLE. */
479 switch (GET_CODE (x
))
482 fprintf (file
, "%ld", (long) INTVAL (x
));
486 const_double_split (x
, &high
, &low
);
487 fprintf (file
, "%ld", (long) low
);
495 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
498 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
501 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
503 if (GET_CODE (x
) == CONST
)
504 x
= XEXP (XEXP (x
, 0), 0);
506 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
508 if (SYMBOL_REF_ZDA_P (x
))
509 fprintf (file
, "zdaoff");
510 else if (SYMBOL_REF_SDA_P (x
))
511 fprintf (file
, "sdaoff");
512 else if (SYMBOL_REF_TDA_P (x
))
513 fprintf (file
, "tdaoff");
518 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
519 output_addr_const (file
, x
);
522 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
524 if (GET_CODE (x
) == CONST
)
525 x
= XEXP (XEXP (x
, 0), 0);
527 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
529 if (SYMBOL_REF_ZDA_P (x
))
530 fprintf (file
, "r0");
531 else if (SYMBOL_REF_SDA_P (x
))
532 fprintf (file
, "gp");
533 else if (SYMBOL_REF_TDA_P (x
))
534 fprintf (file
, "ep");
538 case 'R': /* 2nd word of a double. */
539 switch (GET_CODE (x
))
542 fprintf (file
, reg_names
[REGNO (x
) + 1]);
546 machine_mode mode
= GET_MODE (x
);
547 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
548 v850_print_operand_address (file
, mode
, x
);
549 if (GET_CODE (x
) == CONST_INT
)
550 fprintf (file
, "[r0]");
556 unsigned HOST_WIDE_INT v
= INTVAL (x
);
558 /* Trickery to avoid problems with shifting
559 32-bits at a time on a 32-bit host. */
562 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, v
);
567 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_HIGH (x
));
577 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
578 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
585 /* Like an 'S' operand above, but for unsigned loads only. */
586 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
591 case 'W': /* Print the instruction suffix. */
592 switch (GET_MODE (x
))
597 case QImode
: fputs (".b", file
); break;
598 case HImode
: fputs (".h", file
); break;
599 case SImode
: fputs (".w", file
); break;
600 case SFmode
: fputs (".w", file
); break;
603 case '.': /* Register r0. */
604 fputs (reg_names
[0], file
);
606 case 'z': /* Reg or zero. */
608 fputs (reg_names
[REGNO (x
)], file
);
609 else if ((GET_MODE(x
) == SImode
610 || GET_MODE(x
) == DFmode
611 || GET_MODE(x
) == SFmode
)
612 && x
== CONST0_RTX(GET_MODE(x
)))
613 fputs (reg_names
[0], file
);
616 gcc_assert (x
== const0_rtx
);
617 fputs (reg_names
[0], file
);
621 switch (GET_CODE (x
))
624 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
625 output_address (GET_MODE (x
),
626 gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
629 output_address (GET_MODE (x
), XEXP (x
, 0));
633 fputs (reg_names
[REGNO (x
)], file
);
636 fputs (reg_names
[subreg_regno (x
)], file
);
639 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, CONST_DOUBLE_LOW (x
));
647 v850_print_operand_address (file
, VOIDmode
, x
);
658 /* Output assembly language output for the address ADDR to FILE. */
661 v850_print_operand_address (FILE * file
, machine_mode
/*mode*/, rtx addr
)
663 switch (GET_CODE (addr
))
666 fprintf (file
, "0[");
667 v850_print_operand (file
, addr
, 0);
671 if (GET_CODE (XEXP (addr
, 0)) == REG
)
674 fprintf (file
, "lo(");
675 v850_print_operand (file
, XEXP (addr
, 1), 0);
676 fprintf (file
, ")[");
677 v850_print_operand (file
, XEXP (addr
, 0), 0);
682 if (GET_CODE (XEXP (addr
, 0)) == REG
683 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
686 v850_print_operand (file
, XEXP (addr
, 1), 0);
688 v850_print_operand (file
, XEXP (addr
, 0), 0);
693 v850_print_operand (file
, XEXP (addr
, 0), 0);
695 v850_print_operand (file
, XEXP (addr
, 1), 0);
700 const char *off_name
= NULL
;
701 const char *reg_name
= NULL
;
703 if (SYMBOL_REF_ZDA_P (addr
))
708 else if (SYMBOL_REF_SDA_P (addr
))
713 else if (SYMBOL_REF_TDA_P (addr
))
720 fprintf (file
, "%s(", off_name
);
721 output_addr_const (file
, addr
);
723 fprintf (file
, ")[%s]", reg_name
);
727 if (special_symbolref_operand (addr
, VOIDmode
))
729 rtx x
= XEXP (XEXP (addr
, 0), 0);
730 const char *off_name
;
731 const char *reg_name
;
733 if (SYMBOL_REF_ZDA_P (x
))
738 else if (SYMBOL_REF_SDA_P (x
))
743 else if (SYMBOL_REF_TDA_P (x
))
751 fprintf (file
, "%s(", off_name
);
752 output_addr_const (file
, addr
);
753 fprintf (file
, ")[%s]", reg_name
);
756 output_addr_const (file
, addr
);
759 output_addr_const (file
, addr
);
765 v850_print_operand_punct_valid_p (unsigned char code
)
770 /* When assemble_integer is used to emit the offsets for a switch
771 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
772 output_addr_const will normally barf at this, but it is OK to omit
773 the truncate and just emit the difference of the two labels. The
774 .hword directive will automatically handle the truncation for us.
776 Returns true if rtx was handled, false otherwise. */
779 v850_output_addr_const_extra (FILE * file
, rtx x
)
781 if (GET_CODE (x
) != TRUNCATE
)
786 /* We must also handle the case where the switch table was passed a
787 constant value and so has been collapsed. In this case the first
788 label will have been deleted. In such a case it is OK to emit
789 nothing, since the table will not be used.
790 (cf gcc.c-torture/compile/990801-1.c). */
791 if (GET_CODE (x
) == MINUS
792 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
)
794 rtx_code_label
*label
795 = dyn_cast
<rtx_code_label
*> (XEXP (XEXP (x
, 0), 0));
796 if (label
&& label
->deleted ())
800 output_addr_const (file
, x
);
804 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
808 output_move_single (rtx
* operands
)
810 rtx dst
= operands
[0];
811 rtx src
= operands
[1];
818 else if (GET_CODE (src
) == CONST_INT
)
820 HOST_WIDE_INT value
= INTVAL (src
);
822 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
825 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
826 return "movea %1,%.,%0";
828 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
829 return "movhi hi0(%1),%.,%0";
831 /* A random constant. */
832 else if (TARGET_V850E_UP
)
835 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
838 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
840 HOST_WIDE_INT high
, low
;
842 const_double_split (src
, &high
, &low
);
844 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
847 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
848 return "movea %F1,%.,%0";
850 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
851 return "movhi hi0(%F1),%.,%0";
853 /* A random constant. */
854 else if (TARGET_V850E_UP
)
858 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
861 else if (GET_CODE (src
) == MEM
)
862 return "%S1ld%W1 %1,%0";
864 else if (special_symbolref_operand (src
, VOIDmode
))
865 return "movea %O1(%P1),%Q1,%0";
867 else if (GET_CODE (src
) == LABEL_REF
868 || GET_CODE (src
) == SYMBOL_REF
869 || GET_CODE (src
) == CONST
)
872 return "mov hilo(%1),%0";
874 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
877 else if (GET_CODE (src
) == HIGH
)
878 return "movhi hi(%1),%.,%0";
880 else if (GET_CODE (src
) == LO_SUM
)
882 operands
[2] = XEXP (src
, 0);
883 operands
[3] = XEXP (src
, 1);
884 return "movea lo(%3),%2,%0";
888 else if (GET_CODE (dst
) == MEM
)
891 return "%S0st%W0 %1,%0";
893 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
894 return "%S0st%W0 %.,%0";
896 else if (GET_CODE (src
) == CONST_DOUBLE
897 && CONST0_RTX (GET_MODE (dst
)) == src
)
898 return "%S0st%W0 %.,%0";
901 fatal_insn ("output_move_single:", gen_rtx_SET (dst
, src
));
906 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1 ATTRIBUTE_UNUSED
)
908 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
913 return CC_FPU_LEmode
;
915 return CC_FPU_GEmode
;
917 return CC_FPU_LTmode
;
919 return CC_FPU_GTmode
;
921 return CC_FPU_EQmode
;
923 return CC_FPU_NEmode
;
932 v850_gen_float_compare (enum rtx_code cond
, machine_mode mode ATTRIBUTE_UNUSED
, rtx op0
, rtx op1
)
934 if (GET_MODE (op0
) == DFmode
)
939 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
942 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
945 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
948 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
951 /* Note: There is no NE comparison operator. So we
952 perform an EQ comparison and invert the branch.
953 See v850_float_nz_comparison for how this is done. */
955 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
961 else if (GET_MODE (v850_compare_op0
) == SFmode
)
966 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
969 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
972 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
975 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
978 /* Note: There is no NE comparison operator. So we
979 perform an EQ comparison and invert the branch.
980 See v850_float_nz_comparison for how this is done. */
982 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
991 return v850_select_cc_mode (cond
, op0
, op1
);
995 v850_gen_compare (enum rtx_code cond
, machine_mode mode
, rtx op0
, rtx op1
)
997 if (GET_MODE_CLASS(GET_MODE (op0
)) != MODE_FLOAT
)
999 emit_insn (gen_cmpsi_insn (op0
, op1
));
1000 return gen_rtx_fmt_ee (cond
, mode
, gen_rtx_REG(CCmode
, CC_REGNUM
), const0_rtx
);
1005 mode
= v850_gen_float_compare (cond
, mode
, op0
, op1
);
1006 cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
1007 emit_insn (gen_rtx_SET (cc_reg
, gen_rtx_REG (mode
, FCC_REGNUM
)));
1009 return gen_rtx_fmt_ee (cond
, mode
, cc_reg
, const0_rtx
);
1013 /* Return maximum offset supported for a short EP memory reference of mode
1014 MODE and signedness UNSIGNEDP. */
1017 ep_memory_offset (machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
1024 if (TARGET_SMALL_SLD
)
1025 max_offset
= (1 << 4);
1026 else if ((TARGET_V850E_UP
)
1028 max_offset
= (1 << 4);
1030 max_offset
= (1 << 7);
1034 if (TARGET_SMALL_SLD
)
1035 max_offset
= (1 << 5);
1036 else if ((TARGET_V850E_UP
)
1038 max_offset
= (1 << 5);
1040 max_offset
= (1 << 8);
1045 max_offset
= (1 << 8);
1055 /* Return true if OP is a valid short EP memory reference */
1058 ep_memory_operand (rtx op
, machine_mode mode
, int unsigned_load
)
1064 /* If we are not using the EP register on a per-function basis
1065 then do not allow this optimization at all. This is to
1066 prevent the use of the SLD/SST instructions which cannot be
1067 guaranteed to work properly due to a hardware bug. */
1071 if (GET_CODE (op
) != MEM
)
1074 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1076 mask
= GET_MODE_SIZE (mode
) - 1;
1078 addr
= XEXP (op
, 0);
1079 if (GET_CODE (addr
) == CONST
)
1080 addr
= XEXP (addr
, 0);
1082 switch (GET_CODE (addr
))
1088 return SYMBOL_REF_TDA_P (addr
);
1091 return REGNO (addr
) == EP_REGNUM
;
1094 op0
= XEXP (addr
, 0);
1095 op1
= XEXP (addr
, 1);
1096 if (GET_CODE (op1
) == CONST_INT
1097 && INTVAL (op1
) < max_offset
1098 && INTVAL (op1
) >= 0
1099 && (INTVAL (op1
) & mask
) == 0)
1101 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1104 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1113 /* Substitute memory references involving a pointer, to use the ep pointer,
1114 taking care to save and preserve the ep. */
1117 substitute_ep_register (rtx_insn
*first_insn
,
1118 rtx_insn
*last_insn
,
1124 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1129 df_set_regs_ever_live (1, true);
1130 *p_r1
= gen_rtx_REG (Pmode
, 1);
1131 *p_ep
= gen_rtx_REG (Pmode
, 30);
1136 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1137 2 * (uses
- 3), uses
, reg_names
[regno
],
1138 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1139 INSN_UID (first_insn
), INSN_UID (last_insn
));
1141 if (NOTE_P (first_insn
))
1142 first_insn
= next_nonnote_insn (first_insn
);
1144 last_insn
= next_nonnote_insn (last_insn
);
1145 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1147 if (NONJUMP_INSN_P (insn
))
1149 rtx pattern
= single_set (insn
);
1151 /* Replace the memory references. */
1155 /* Memory operands are signed by default. */
1156 int unsignedp
= FALSE
;
1158 if (GET_CODE (SET_DEST (pattern
)) == MEM
1159 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1162 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1163 p_mem
= &SET_DEST (pattern
);
1165 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1166 p_mem
= &SET_SRC (pattern
);
1168 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1169 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1170 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1172 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1173 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1175 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1183 rtx addr
= XEXP (*p_mem
, 0);
1185 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1186 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1188 else if (GET_CODE (addr
) == PLUS
1189 && GET_CODE (XEXP (addr
, 0)) == REG
1190 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1191 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1192 && ((INTVAL (XEXP (addr
, 1)))
1193 < ep_memory_offset (GET_MODE (*p_mem
),
1195 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1196 *p_mem
= change_address (*p_mem
, VOIDmode
,
1197 gen_rtx_PLUS (Pmode
,
1205 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1206 insn
= prev_nonnote_insn (first_insn
);
1207 if (insn
&& NONJUMP_INSN_P (insn
)
1208 && GET_CODE (PATTERN (insn
)) == SET
1209 && SET_DEST (PATTERN (insn
)) == *p_ep
1210 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1213 emit_insn_before (gen_rtx_SET (*p_r1
, *p_ep
), first_insn
);
1215 emit_insn_before (gen_rtx_SET (*p_ep
, reg
), first_insn
);
1216 emit_insn_before (gen_rtx_SET (*p_ep
, *p_r1
), last_insn
);
1220 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1221 the -mep mode to copy heavily used pointers to ep to use the implicit
1230 rtx_insn
*first_insn
;
1231 rtx_insn
*last_insn
;
1233 regs
[FIRST_PSEUDO_REGISTER
];
1242 /* If not ep mode, just return now. */
1246 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1249 regs
[i
].first_insn
= NULL
;
1250 regs
[i
].last_insn
= NULL
;
1253 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1255 switch (GET_CODE (insn
))
1257 /* End of basic block */
1264 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1266 if (max_uses
< regs
[i
].uses
)
1268 max_uses
= regs
[i
].uses
;
1274 substitute_ep_register (regs
[max_regno
].first_insn
,
1275 regs
[max_regno
].last_insn
,
1276 max_uses
, max_regno
, &r1
, &ep
);
1280 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1283 regs
[i
].first_insn
= NULL
;
1284 regs
[i
].last_insn
= NULL
;
1292 pattern
= single_set (insn
);
1294 /* See if there are any memory references we can shorten. */
1297 rtx src
= SET_SRC (pattern
);
1298 rtx dest
= SET_DEST (pattern
);
1300 /* Memory operands are signed by default. */
1301 int unsignedp
= FALSE
;
1303 /* We might have (SUBREG (MEM)) here, so just get rid of the
1304 subregs to make this code simpler. */
1305 if (GET_CODE (dest
) == SUBREG
1306 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1307 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1308 alter_subreg (&dest
, false);
1309 if (GET_CODE (src
) == SUBREG
1310 && (GET_CODE (SUBREG_REG (src
)) == MEM
1311 || GET_CODE (SUBREG_REG (src
)) == REG
))
1312 alter_subreg (&src
, false);
1314 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1317 else if (GET_CODE (dest
) == MEM
)
1320 else if (GET_CODE (src
) == MEM
)
1323 else if (GET_CODE (src
) == SIGN_EXTEND
1324 && GET_CODE (XEXP (src
, 0)) == MEM
)
1325 mem
= XEXP (src
, 0);
1327 else if (GET_CODE (src
) == ZERO_EXTEND
1328 && GET_CODE (XEXP (src
, 0)) == MEM
)
1330 mem
= XEXP (src
, 0);
1336 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1339 else if (!use_ep
&& mem
1340 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1342 rtx addr
= XEXP (mem
, 0);
1346 if (GET_CODE (addr
) == REG
)
1349 regno
= REGNO (addr
);
1352 else if (GET_CODE (addr
) == PLUS
1353 && GET_CODE (XEXP (addr
, 0)) == REG
1354 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1355 && ((INTVAL (XEXP (addr
, 1)))
1356 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1357 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1360 regno
= REGNO (XEXP (addr
, 0));
1369 regs
[regno
].last_insn
= insn
;
1370 if (!regs
[regno
].first_insn
)
1371 regs
[regno
].first_insn
= insn
;
1375 /* Loading up a register in the basic block zaps any savings
1377 if (GET_CODE (dest
) == REG
)
1379 machine_mode mode
= GET_MODE (dest
);
1383 regno
= REGNO (dest
);
1384 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1388 /* See if we can use the pointer before this
1393 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1395 if (max_uses
< regs
[i
].uses
)
1397 max_uses
= regs
[i
].uses
;
1403 && max_regno
>= regno
1404 && max_regno
< endregno
)
1406 substitute_ep_register (regs
[max_regno
].first_insn
,
1407 regs
[max_regno
].last_insn
,
1408 max_uses
, max_regno
, &r1
,
1411 /* Since we made a substitution, zap all remembered
1413 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1416 regs
[i
].first_insn
= NULL
;
1417 regs
[i
].last_insn
= NULL
;
1422 for (i
= regno
; i
< endregno
; i
++)
1425 regs
[i
].first_insn
= NULL
;
1426 regs
[i
].last_insn
= NULL
;
1434 /* # of registers saved by the interrupt handler. */
1435 #define INTERRUPT_FIXED_NUM 5
1437 /* # of bytes for registers saved by the interrupt handler. */
1438 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1440 /* # of words saved for other registers. */
1441 #define INTERRUPT_ALL_SAVE_NUM \
1442 (30 - INTERRUPT_FIXED_NUM)
1444 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1447 compute_register_save_size (long * p_reg_saved
)
1451 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1452 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1455 /* Count space for the register saves. */
1456 if (interrupt_handler
)
1458 for (i
= 0; i
<= 31; i
++)
1462 if (df_regs_ever_live_p (i
) || call_p
)
1465 reg_saved
|= 1L << i
;
1469 /* We don't save/restore r0 or the stack pointer */
1471 case STACK_POINTER_REGNUM
:
1474 /* For registers with fixed use, we save them, set them to the
1475 appropriate value, and then restore them.
1476 These registers are handled specially, so don't list them
1477 on the list of registers to save in the prologue. */
1478 case 1: /* temp used to hold ep */
1480 case 10: /* temp used to call interrupt save/restore */
1481 case 11: /* temp used to call interrupt save/restore (long call) */
1482 case EP_REGNUM
: /* ep */
1489 /* Find the first register that needs to be saved. */
1490 for (i
= 0; i
<= 31; i
++)
1491 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1492 || i
== LINK_POINTER_REGNUM
))
1495 /* If it is possible that an out-of-line helper function might be
1496 used to generate the prologue for the current function, then we
1497 need to cover the possibility that such a helper function will
1498 be used, despite the fact that there might be gaps in the list of
1499 registers that need to be saved. To detect this we note that the
1500 helper functions always push at least register r29 (provided
1501 that the function is not an interrupt handler). */
1503 if (TARGET_PROLOG_FUNCTION
1504 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1509 reg_saved
|= 1L << i
;
1514 /* Helper functions save all registers between the starting
1515 register and the last register, regardless of whether they
1516 are actually used by the function or not. */
1517 for (; i
<= 29; i
++)
1520 reg_saved
|= 1L << i
;
1523 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1526 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1531 for (; i
<= 31; i
++)
1532 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1533 || i
== LINK_POINTER_REGNUM
))
1536 reg_saved
|= 1L << i
;
1542 *p_reg_saved
= reg_saved
;
1547 /* Typical stack layout should looks like this after the function's prologue:
1552 | | arguments saved | Increasing
1553 | | on the stack | addresses
1554 PARENT arg pointer -> | | /
1555 -------------------------- ---- -------------------
1556 | | - space for argument split between regs & stack
1558 CHILD | | \ <-- (return address here)
1563 frame pointer -> | | \ ___
1570 | | arguments | | Decreasing
1571 (hard) frame pointer | | / | | addresses
1572 and stack pointer -> | | / _|_ |
1573 -------------------------- ---- ------------------ V */
1576 compute_frame_size (int size
, long * p_reg_saved
)
1579 + compute_register_save_size (p_reg_saved
)
1580 + crtl
->outgoing_args_size
);
1584 use_prolog_function (int num_save
, int frame_size
)
1586 int alloc_stack
= (4 * num_save
);
1587 int unalloc_stack
= frame_size
- alloc_stack
;
1588 int save_func_len
, restore_func_len
;
1589 int save_normal_len
, restore_normal_len
;
1591 if (! TARGET_DISABLE_CALLT
)
1592 save_func_len
= restore_func_len
= 2;
1594 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1598 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1599 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1602 /* See if we would have used ep to save the stack. */
1603 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1604 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1606 save_normal_len
= restore_normal_len
= 4 * num_save
;
1608 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1609 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1611 /* Don't bother checking if we don't actually save any space.
1612 This happens for instance if one register is saved and additional
1613 stack space is allocated. */
1614 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1618 increment_stack (signed int amount
, bool in_prologue
)
1625 inc
= GEN_INT (amount
);
1627 if (! CONST_OK_FOR_K (amount
))
1629 rtx reg
= gen_rtx_REG (Pmode
, 12);
1631 inc
= emit_move_insn (reg
, inc
);
1637 inc
= emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, inc
));
1643 expand_prologue (void)
1646 unsigned int size
= get_frame_size ();
1647 unsigned int actual_fsize
;
1648 unsigned int init_stack_alloc
= 0;
1651 unsigned int num_save
;
1653 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1656 actual_fsize
= compute_frame_size (size
, ®_saved
);
1658 if (flag_stack_usage_info
)
1659 current_function_static_stack_size
= actual_fsize
;
1661 /* Save/setup global registers for interrupt functions right now. */
1662 if (interrupt_handler
)
1664 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1665 emit_insn (gen_callt_save_interrupt ());
1667 emit_insn (gen_save_interrupt ());
1669 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1671 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1672 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1674 /* Interrupt functions are not passed arguments, so no need to
1675 allocate space for split structure arguments. */
1676 gcc_assert (crtl
->args
.pretend_args_size
== 0);
1679 /* Identify all of the saved registers. */
1681 for (i
= 1; i
< 32; i
++)
1683 if (((1L << i
) & reg_saved
) != 0)
1684 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1687 if (crtl
->args
.pretend_args_size
)
1691 increment_stack (- (actual_fsize
+ crtl
->args
.pretend_args_size
), true);
1695 increment_stack (- crtl
->args
.pretend_args_size
, true);
1698 /* See if we have an insn that allocates stack space and saves the particular
1699 registers we want to. Note that the helpers won't
1700 allocate additional space for registers GCC saves to complete a
1701 "split" structure argument. */
1702 save_all
= NULL_RTX
;
1703 if (TARGET_PROLOG_FUNCTION
1704 && !crtl
->args
.pretend_args_size
1707 if (use_prolog_function (num_save
, actual_fsize
))
1709 int alloc_stack
= 4 * num_save
;
1712 save_all
= gen_rtx_PARALLEL
1714 rtvec_alloc (num_save
+ 1
1715 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1717 XVECEXP (save_all
, 0, 0)
1718 = gen_rtx_SET (stack_pointer_rtx
,
1719 gen_rtx_PLUS (Pmode
,
1721 GEN_INT(-alloc_stack
)));
1722 for (i
= 0; i
< num_save
; i
++)
1725 XVECEXP (save_all
, 0, i
+1)
1726 = gen_rtx_SET (gen_rtx_MEM (Pmode
,
1727 gen_rtx_PLUS (Pmode
,
1733 if (TARGET_DISABLE_CALLT
)
1735 XVECEXP (save_all
, 0, num_save
+ 1)
1736 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1738 if (TARGET_LONG_CALLS
)
1739 XVECEXP (save_all
, 0, num_save
+ 2)
1740 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1743 v850_all_frame_related (save_all
);
1745 code
= recog (save_all
, NULL
, NULL
);
1748 rtx insn
= emit_insn (save_all
);
1749 INSN_CODE (insn
) = code
;
1750 actual_fsize
-= alloc_stack
;
1754 save_all
= NULL_RTX
;
1758 /* If no prolog save function is available, store the registers the old
1759 fashioned way (one by one). */
1762 /* Special case interrupt functions that save all registers for a call. */
1763 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1765 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1766 emit_insn (gen_callt_save_all_interrupt ());
1768 emit_insn (gen_save_all_interrupt ());
1773 /* If the stack is too big, allocate it in chunks so we can do the
1774 register saves. We use the register save size so we use the ep
1776 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1777 init_stack_alloc
= compute_register_save_size (NULL
);
1779 init_stack_alloc
= actual_fsize
;
1781 /* Save registers at the beginning of the stack frame. */
1782 offset
= init_stack_alloc
- 4;
1784 if (init_stack_alloc
)
1785 increment_stack (- (signed) init_stack_alloc
, true);
1787 /* Save the return pointer first. */
1788 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1790 F (emit_move_insn (gen_rtx_MEM (SImode
,
1791 plus_constant (Pmode
,
1794 save_regs
[--num_save
]));
1798 for (i
= 0; i
< num_save
; i
++)
1800 F (emit_move_insn (gen_rtx_MEM (SImode
,
1801 plus_constant (Pmode
,
1810 /* Allocate the rest of the stack that was not allocated above (either it is
1811 > 32K or we just called a function to save the registers and needed more
1813 if (actual_fsize
> init_stack_alloc
)
1814 increment_stack (init_stack_alloc
- actual_fsize
, true);
1816 /* If we need a frame pointer, set it up now. */
1817 if (frame_pointer_needed
)
1818 F (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
1823 expand_epilogue (void)
1826 unsigned int size
= get_frame_size ();
1828 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1829 rtx restore_regs
[32];
1831 unsigned int num_restore
;
1833 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1835 /* Eliminate the initial stack stored by interrupt functions. */
1836 if (interrupt_handler
)
1838 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1839 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1840 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1843 /* Cut off any dynamic stack created. */
1844 if (frame_pointer_needed
)
1845 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1847 /* Identify all of the saved registers. */
1849 for (i
= 1; i
< 32; i
++)
1851 if (((1L << i
) & reg_saved
) != 0)
1852 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1855 /* See if we have an insn that restores the particular registers we
1857 restore_all
= NULL_RTX
;
1859 if (TARGET_PROLOG_FUNCTION
1861 && !crtl
->args
.pretend_args_size
1862 && !interrupt_handler
)
1864 int alloc_stack
= (4 * num_restore
);
1866 /* Don't bother checking if we don't actually save any space. */
1867 if (use_prolog_function (num_restore
, actual_fsize
))
1870 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1871 rtvec_alloc (num_restore
+ 2));
1872 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1873 XVECEXP (restore_all
, 0, 1)
1874 = gen_rtx_SET (stack_pointer_rtx
,
1875 gen_rtx_PLUS (Pmode
,
1877 GEN_INT (alloc_stack
)));
1879 offset
= alloc_stack
- 4;
1880 for (i
= 0; i
< num_restore
; i
++)
1882 XVECEXP (restore_all
, 0, i
+2)
1883 = gen_rtx_SET (restore_regs
[i
],
1885 gen_rtx_PLUS (Pmode
,
1891 code
= recog (restore_all
, NULL
, NULL
);
1897 actual_fsize
-= alloc_stack
;
1898 increment_stack (actual_fsize
, false);
1900 insn
= emit_jump_insn (restore_all
);
1901 INSN_CODE (insn
) = code
;
1904 restore_all
= NULL_RTX
;
1908 /* If no epilogue save function is available, restore the registers the
1909 old fashioned way (one by one). */
1912 unsigned int init_stack_free
;
1914 /* If the stack is large, we need to cut it down in 2 pieces. */
1915 if (interrupt_handler
)
1916 init_stack_free
= 0;
1917 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1918 init_stack_free
= 4 * num_restore
;
1920 init_stack_free
= (signed) actual_fsize
;
1922 /* Deallocate the rest of the stack if it is > 32K. */
1923 if ((unsigned int) actual_fsize
> init_stack_free
)
1924 increment_stack (actual_fsize
- init_stack_free
, false);
1926 /* Special case interrupt functions that save all registers
1928 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1930 if (! TARGET_DISABLE_CALLT
)
1931 emit_insn (gen_callt_restore_all_interrupt ());
1933 emit_insn (gen_restore_all_interrupt ());
1937 /* Restore registers from the beginning of the stack frame. */
1938 int offset
= init_stack_free
- 4;
1940 /* Restore the return pointer first. */
1942 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
1944 emit_move_insn (restore_regs
[--num_restore
],
1945 gen_rtx_MEM (SImode
,
1946 plus_constant (Pmode
,
1952 for (i
= 0; i
< num_restore
; i
++)
1954 emit_move_insn (restore_regs
[i
],
1955 gen_rtx_MEM (SImode
,
1956 plus_constant (Pmode
,
1960 emit_use (restore_regs
[i
]);
1964 /* Cut back the remainder of the stack. */
1965 increment_stack (init_stack_free
+ crtl
->args
.pretend_args_size
,
1969 /* And return or use reti for interrupt handlers. */
1970 if (interrupt_handler
)
1972 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E_UP
))
1973 emit_insn (gen_callt_return_interrupt ());
1975 emit_jump_insn (gen_return_interrupt ());
1977 else if (actual_fsize
)
1978 emit_jump_insn (gen_return_internal ());
1980 emit_jump_insn (gen_return_simple ());
1983 v850_interrupt_cache_p
= FALSE
;
1984 v850_interrupt_p
= FALSE
;
1987 /* Update the condition code from the insn. */
1989 notice_update_cc (rtx body
, rtx_insn
*insn
)
1991 switch (get_attr_cc (insn
))
1994 /* Insn does not affect CC at all. */
1998 /* Insn does not change CC, but the 0'th operand has been changed. */
1999 if (cc_status
.value1
!= 0
2000 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
2001 cc_status
.value1
= 0;
2005 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2006 V,C is in an unusable state. */
2008 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
2009 cc_status
.value1
= recog_data
.operand
[0];
2013 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2014 C is in an unusable state. */
2016 cc_status
.flags
|= CC_NO_CARRY
;
2017 cc_status
.value1
= recog_data
.operand
[0];
2021 /* The insn is a compare instruction. */
2023 cc_status
.value1
= SET_SRC (body
);
2027 /* Insn doesn't leave CC in a usable state. */
2036 /* Retrieve the data area that has been chosen for the given decl. */
2039 v850_get_data_area (tree decl
)
2041 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2042 return DATA_AREA_SDA
;
2044 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2045 return DATA_AREA_TDA
;
2047 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2048 return DATA_AREA_ZDA
;
2050 return DATA_AREA_NORMAL
;
2053 /* Store the indicated data area in the decl's attributes. */
2056 v850_set_data_area (tree decl
, v850_data_area data_area
)
2062 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2063 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2064 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2069 DECL_ATTRIBUTES (decl
) = tree_cons
2070 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2073 /* Handle an "interrupt" attribute; arguments as in
2074 struct attribute_spec.handler. */
2076 v850_handle_interrupt_attribute (tree
* node
,
2078 tree args ATTRIBUTE_UNUSED
,
2079 int flags ATTRIBUTE_UNUSED
,
2080 bool * no_add_attrs
)
2082 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2084 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2086 *no_add_attrs
= true;
2092 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2093 struct attribute_spec.handler. */
2095 v850_handle_data_area_attribute (tree
* node
,
2097 tree args ATTRIBUTE_UNUSED
,
2098 int flags ATTRIBUTE_UNUSED
,
2099 bool * no_add_attrs
)
2101 v850_data_area data_area
;
2102 v850_data_area area
;
2105 /* Implement data area attribute. */
2106 if (is_attribute_p ("sda", name
))
2107 data_area
= DATA_AREA_SDA
;
2108 else if (is_attribute_p ("tda", name
))
2109 data_area
= DATA_AREA_TDA
;
2110 else if (is_attribute_p ("zda", name
))
2111 data_area
= DATA_AREA_ZDA
;
2115 switch (TREE_CODE (decl
))
2118 if (current_function_decl
!= NULL_TREE
)
2120 error_at (DECL_SOURCE_LOCATION (decl
),
2121 "data area attributes cannot be specified for "
2123 *no_add_attrs
= true;
2129 area
= v850_get_data_area (decl
);
2130 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2132 error ("data area of %q+D conflicts with previous declaration",
2134 *no_add_attrs
= true;
2146 /* Return nonzero if FUNC is an interrupt function as specified
2147 by the "interrupt" attribute. */
2150 v850_interrupt_function_p (tree func
)
2155 if (v850_interrupt_cache_p
)
2156 return v850_interrupt_p
;
2158 if (TREE_CODE (func
) != FUNCTION_DECL
)
2161 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2167 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2168 ret
= a
!= NULL_TREE
;
2171 /* Its not safe to trust global variables until after function inlining has
2173 if (reload_completed
| reload_in_progress
)
2174 v850_interrupt_p
= ret
;
2181 v850_encode_data_area (tree decl
, rtx symbol
)
2185 /* Map explicit sections into the appropriate attribute */
2186 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2188 if (DECL_SECTION_NAME (decl
))
2190 const char *name
= DECL_SECTION_NAME (decl
);
2192 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2193 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2195 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2196 v850_set_data_area (decl
, DATA_AREA_SDA
);
2198 else if (streq (name
, ".tdata"))
2199 v850_set_data_area (decl
, DATA_AREA_TDA
);
2202 /* If no attribute, support -m{zda,sda,tda}=n */
2205 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2209 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2210 v850_set_data_area (decl
, DATA_AREA_TDA
);
2212 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2213 v850_set_data_area (decl
, DATA_AREA_SDA
);
2215 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2216 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2219 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2223 flags
= SYMBOL_REF_FLAGS (symbol
);
2224 switch (v850_get_data_area (decl
))
2226 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2227 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2228 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2229 default: gcc_unreachable ();
2231 SYMBOL_REF_FLAGS (symbol
) = flags
;
2235 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2237 default_encode_section_info (decl
, rtl
, first
);
2239 if (TREE_CODE (decl
) == VAR_DECL
2240 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2241 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2244 /* Construct a JR instruction to a routine that will perform the equivalent of
2245 the RTL passed in as an argument. This RTL is a function epilogue that
2246 pops registers off the stack and possibly releases some extra stack space
2247 as well. The code has already verified that the RTL matches these
2251 construct_restore_jr (rtx op
)
2253 int count
= XVECLEN (op
, 0);
2255 unsigned long int mask
;
2256 unsigned long int first
;
2257 unsigned long int last
;
2259 static char buff
[100]; /* XXX */
2263 error ("bogus JR construction: %d", count
);
2267 /* Work out how many bytes to pop off the stack before retrieving
2269 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2270 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2271 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2273 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2275 /* Each pop will remove 4 bytes from the stack.... */
2276 stack_bytes
-= (count
- 2) * 4;
2278 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2279 if (stack_bytes
!= 0)
2281 error ("bad amount of stack space removal: %d", stack_bytes
);
2285 /* Now compute the bit mask of registers to push. */
2287 for (i
= 2; i
< count
; i
++)
2289 rtx vector_element
= XVECEXP (op
, 0, i
);
2291 gcc_assert (GET_CODE (vector_element
) == SET
);
2292 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2293 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2296 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2299 /* Scan for the first register to pop. */
2300 for (first
= 0; first
< 32; first
++)
2302 if (mask
& (1 << first
))
2306 gcc_assert (first
< 32);
2308 /* Discover the last register to pop. */
2309 if (mask
& (1 << LINK_POINTER_REGNUM
))
2311 last
= LINK_POINTER_REGNUM
;
2315 gcc_assert (!stack_bytes
);
2316 gcc_assert (mask
& (1 << 29));
2321 /* Note, it is possible to have gaps in the register mask.
2322 We ignore this here, and generate a JR anyway. We will
2323 be popping more registers than is strictly necessary, but
2324 it does save code space. */
2326 if (TARGET_LONG_CALLS
)
2331 sprintf (name
, "__return_%s", reg_names
[first
]);
2333 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2335 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2341 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2343 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2350 /* Construct a JARL instruction to a routine that will perform the equivalent
2351 of the RTL passed as a parameter. This RTL is a function prologue that
2352 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2353 some stack space as well. The code has already verified that the RTL
2354 matches these requirements. */
2356 construct_save_jarl (rtx op
)
2358 int count
= XVECLEN (op
, 0);
2360 unsigned long int mask
;
2361 unsigned long int first
;
2362 unsigned long int last
;
2364 static char buff
[100]; /* XXX */
2366 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2368 error ("bogus JARL construction: %d", count
);
2373 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2374 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2375 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2376 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2378 /* Work out how many bytes to push onto the stack after storing the
2380 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2382 /* Each push will put 4 bytes from the stack.... */
2383 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2385 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2386 if (stack_bytes
!= 0)
2388 error ("bad amount of stack space removal: %d", stack_bytes
);
2392 /* Now compute the bit mask of registers to push. */
2394 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2396 rtx vector_element
= XVECEXP (op
, 0, i
);
2398 gcc_assert (GET_CODE (vector_element
) == SET
);
2399 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2400 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2403 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2406 /* Scan for the first register to push. */
2407 for (first
= 0; first
< 32; first
++)
2409 if (mask
& (1 << first
))
2413 gcc_assert (first
< 32);
2415 /* Discover the last register to push. */
2416 if (mask
& (1 << LINK_POINTER_REGNUM
))
2418 last
= LINK_POINTER_REGNUM
;
2422 gcc_assert (!stack_bytes
);
2423 gcc_assert (mask
& (1 << 29));
2428 /* Note, it is possible to have gaps in the register mask.
2429 We ignore this here, and generate a JARL anyway. We will
2430 be pushing more registers than is strictly necessary, but
2431 it does save code space. */
2433 if (TARGET_LONG_CALLS
)
2438 sprintf (name
, "__save_%s", reg_names
[first
]);
2440 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2442 if (TARGET_V850E3V5_UP
)
2443 sprintf (buff
, "mov hilo(%s), r11\n\tjarl [r11], r10", name
);
2445 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2451 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2453 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2460 /* A version of asm_output_aligned_bss() that copes with the special
2461 data areas of the v850. */
2463 v850_output_aligned_bss (FILE * file
,
2466 unsigned HOST_WIDE_INT size
,
2469 switch (v850_get_data_area (decl
))
2472 switch_to_section (zbss_section
);
2476 switch_to_section (sbss_section
);
2480 switch_to_section (tdata_section
);
2484 switch_to_section (bss_section
);
2488 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2489 #ifdef ASM_DECLARE_OBJECT_NAME
2490 last_assemble_variable_decl
= decl
;
2491 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2493 /* Standard thing is just output label for the object. */
2494 ASM_OUTPUT_LABEL (file
, name
);
2495 #endif /* ASM_DECLARE_OBJECT_NAME */
2496 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2499 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2501 v850_output_common (FILE * file
,
2507 if (decl
== NULL_TREE
)
2509 fprintf (file
, "%s", COMMON_ASM_OP
);
2513 switch (v850_get_data_area (decl
))
2516 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2520 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2524 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2528 fprintf (file
, "%s", COMMON_ASM_OP
);
2533 assemble_name (file
, name
);
2534 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2537 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2539 v850_output_local (FILE * file
,
2545 fprintf (file
, "%s", LOCAL_ASM_OP
);
2546 assemble_name (file
, name
);
2547 fprintf (file
, "\n");
2549 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2552 /* Add data area to the given declaration if a ghs data area pragma is
2553 currently in effect (#pragma ghs startXXX/endXXX). */
2555 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2558 && data_area_stack
->data_area
2559 && current_function_decl
== NULL_TREE
2560 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2561 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2562 v850_set_data_area (decl
, data_area_stack
->data_area
);
2564 /* Initialize the default names of the v850 specific sections,
2565 if this has not been done before. */
2567 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2569 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2572 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2575 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2578 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2581 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2585 if (current_function_decl
== NULL_TREE
2586 && (TREE_CODE (decl
) == VAR_DECL
2587 || TREE_CODE (decl
) == CONST_DECL
2588 || TREE_CODE (decl
) == FUNCTION_DECL
)
2589 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2590 && !DECL_SECTION_NAME (decl
))
2592 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2593 const char * chosen_section
;
2595 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2596 kind
= GHS_SECTION_KIND_TEXT
;
2599 /* First choose a section kind based on the data area of the decl. */
2600 switch (v850_get_data_area (decl
))
2606 kind
= ((TREE_READONLY (decl
))
2607 ? GHS_SECTION_KIND_ROSDATA
2608 : GHS_SECTION_KIND_SDATA
);
2612 kind
= GHS_SECTION_KIND_TDATA
;
2616 kind
= ((TREE_READONLY (decl
))
2617 ? GHS_SECTION_KIND_ROZDATA
2618 : GHS_SECTION_KIND_ZDATA
);
2621 case DATA_AREA_NORMAL
: /* default data area */
2622 if (TREE_READONLY (decl
))
2623 kind
= GHS_SECTION_KIND_RODATA
;
2624 else if (DECL_INITIAL (decl
))
2625 kind
= GHS_SECTION_KIND_DATA
;
2627 kind
= GHS_SECTION_KIND_BSS
;
2631 /* Now, if the section kind has been explicitly renamed,
2632 then attach a section attribute. */
2633 chosen_section
= GHS_current_section_names
[(int) kind
];
2635 /* Otherwise, if this kind of section needs an explicit section
2636 attribute, then also attach one. */
2637 if (chosen_section
== NULL
)
2638 chosen_section
= GHS_default_section_names
[(int) kind
];
2642 /* Only set the section name if specified by a pragma, because
2643 otherwise it will force those variables to get allocated storage
2644 in this module, rather than by the linker. */
2645 set_decl_section_name (decl
, chosen_section
);
2650 /* Construct a DISPOSE instruction that is the equivalent of
2651 the given RTX. We have already verified that this should
2655 construct_dispose_instruction (rtx op
)
2657 int count
= XVECLEN (op
, 0);
2659 unsigned long int mask
;
2661 static char buff
[ 100 ]; /* XXX */
2666 error ("bogus DISPOSE construction: %d", count
);
2670 /* Work out how many bytes to pop off the
2671 stack before retrieving registers. */
2672 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2673 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2674 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2676 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2678 /* Each pop will remove 4 bytes from the stack.... */
2679 stack_bytes
-= (count
- 2) * 4;
2681 /* Make sure that the amount we are popping
2682 will fit into the DISPOSE instruction. */
2683 if (stack_bytes
> 128)
2685 error ("too much stack space to dispose of: %d", stack_bytes
);
2689 /* Now compute the bit mask of registers to push. */
2692 for (i
= 2; i
< count
; i
++)
2694 rtx vector_element
= XVECEXP (op
, 0, i
);
2696 gcc_assert (GET_CODE (vector_element
) == SET
);
2697 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2698 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2701 if (REGNO (SET_DEST (vector_element
)) == 2)
2704 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2707 if (! TARGET_DISABLE_CALLT
2708 && (use_callt
|| stack_bytes
== 0))
2712 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2717 for (i
= 20; i
< 32; i
++)
2718 if (mask
& (1 << i
))
2722 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2724 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2725 i
, (mask
& (1 << 31)) ? "31c" : "29");
2730 static char regs
[100]; /* XXX */
2733 /* Generate the DISPOSE instruction. Note we could just issue the
2734 bit mask as a number as the assembler can cope with this, but for
2735 the sake of our readers we turn it into a textual description. */
2739 for (i
= 20; i
< 32; i
++)
2741 if (mask
& (1 << i
))
2746 strcat (regs
, ", ");
2751 strcat (regs
, reg_names
[ first
]);
2753 for (i
++; i
< 32; i
++)
2754 if ((mask
& (1 << i
)) == 0)
2759 strcat (regs
, " - ");
2760 strcat (regs
, reg_names
[ i
- 1 ] );
2765 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2771 /* Construct a PREPARE instruction that is the equivalent of
2772 the given RTL. We have already verified that this should
2776 construct_prepare_instruction (rtx op
)
2780 unsigned long int mask
;
2782 static char buff
[ 100 ]; /* XXX */
2785 if (XVECLEN (op
, 0) <= 1)
2787 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2791 /* Work out how many bytes to push onto
2792 the stack after storing the registers. */
2793 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2794 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2795 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2797 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2800 /* Make sure that the amount we are popping
2801 will fit into the DISPOSE instruction. */
2802 if (stack_bytes
< -128)
2804 error ("too much stack space to prepare: %d", stack_bytes
);
2808 /* Now compute the bit mask of registers to push. */
2811 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2813 rtx vector_element
= XVECEXP (op
, 0, i
);
2815 if (GET_CODE (vector_element
) == CLOBBER
)
2818 gcc_assert (GET_CODE (vector_element
) == SET
);
2819 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2820 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2823 if (REGNO (SET_SRC (vector_element
)) == 2)
2826 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2830 stack_bytes
+= count
* 4;
2832 if ((! TARGET_DISABLE_CALLT
)
2833 && (use_callt
|| stack_bytes
== 0))
2837 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2841 for (i
= 20; i
< 32; i
++)
2842 if (mask
& (1 << i
))
2846 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2848 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2849 i
, (mask
& (1 << 31)) ? "31c" : "29");
2853 static char regs
[100]; /* XXX */
2857 /* Generate the PREPARE instruction. Note we could just issue the
2858 bit mask as a number as the assembler can cope with this, but for
2859 the sake of our readers we turn it into a textual description. */
2863 for (i
= 20; i
< 32; i
++)
2865 if (mask
& (1 << i
))
2870 strcat (regs
, ", ");
2875 strcat (regs
, reg_names
[ first
]);
2877 for (i
++; i
< 32; i
++)
2878 if ((mask
& (1 << i
)) == 0)
2883 strcat (regs
, " - ");
2884 strcat (regs
, reg_names
[ i
- 1 ] );
2889 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2895 /* Return an RTX indicating where the return address to the
2896 calling function can be found. */
2899 v850_return_addr (int count
)
2904 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2907 /* Implement TARGET_ASM_INIT_SECTIONS. */
2910 v850_asm_init_sections (void)
2913 = get_unnamed_section (0, output_section_asm_op
,
2914 "\t.section .rosdata,\"a\"");
2917 = get_unnamed_section (0, output_section_asm_op
,
2918 "\t.section .rozdata,\"a\"");
2921 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2922 "\t.section .tdata,\"aw\"");
2925 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2926 "\t.section .zdata,\"aw\"");
2929 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2930 output_section_asm_op
,
2931 "\t.section .zbss,\"aw\"");
2935 v850_select_section (tree exp
,
2936 int reloc ATTRIBUTE_UNUSED
,
2937 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
2939 if (TREE_CODE (exp
) == VAR_DECL
)
2942 if (!TREE_READONLY (exp
)
2943 || TREE_SIDE_EFFECTS (exp
)
2944 || !DECL_INITIAL (exp
)
2945 || (DECL_INITIAL (exp
) != error_mark_node
2946 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
2951 switch (v850_get_data_area (exp
))
2954 return is_const
? rozdata_section
: zdata_section
;
2957 return tdata_section
;
2960 return is_const
? rosdata_section
: sdata_section
;
2963 return is_const
? readonly_data_section
: data_section
;
2966 return readonly_data_section
;
2969 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2972 v850_function_value_regno_p (const unsigned int regno
)
2974 return (regno
== RV_REGNUM
);
2977 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2980 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
2982 /* Return values > 8 bytes in length in memory. */
2983 return int_size_in_bytes (type
) > 8
2984 || TYPE_MODE (type
) == BLKmode
2985 /* With the rh850 ABI return all aggregates in memory. */
2986 || ((! TARGET_GCC_ABI
) && AGGREGATE_TYPE_P (type
))
2990 /* Worker function for TARGET_FUNCTION_VALUE. */
2993 v850_function_value (const_tree valtype
,
2994 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
2995 bool outgoing ATTRIBUTE_UNUSED
)
2997 return gen_rtx_REG (TYPE_MODE (valtype
), RV_REGNUM
);
3000 /* Implement TARGET_LIBCALL_VALUE. */
3003 v850_libcall_value (machine_mode mode
,
3004 const_rtx func ATTRIBUTE_UNUSED
)
3006 return gen_rtx_REG (mode
, RV_REGNUM
);
3010 /* Worker function for TARGET_CAN_ELIMINATE. */
3013 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
3015 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
3018 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3020 If TARGET_APP_REGS is not defined then add r2 and r5 to
3021 the pool of fixed registers. See PR 14505. */
3024 v850_conditional_register_usage (void)
3026 if (TARGET_APP_REGS
)
3028 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
3029 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
3033 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3036 v850_asm_trampoline_template (FILE *f
)
3038 fprintf (f
, "\tjarl .+4,r12\n");
3039 fprintf (f
, "\tld.w 12[r12],r20\n");
3040 fprintf (f
, "\tld.w 16[r12],r12\n");
3041 fprintf (f
, "\tjmp [r12]\n");
3042 fprintf (f
, "\tnop\n");
3043 fprintf (f
, "\t.long 0\n");
3044 fprintf (f
, "\t.long 0\n");
3047 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3050 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
3052 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3054 emit_block_move (m_tramp
, assemble_trampoline_template (),
3055 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3057 mem
= adjust_address (m_tramp
, SImode
, 16);
3058 emit_move_insn (mem
, chain_value
);
3059 mem
= adjust_address (m_tramp
, SImode
, 20);
3060 emit_move_insn (mem
, fnaddr
);
3064 v850_issue_rate (void)
3066 return (TARGET_V850E2_UP
? 2 : 1);
3069 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3072 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
3074 return (GET_CODE (x
) == CONST_DOUBLE
3075 || !(GET_CODE (x
) == CONST
3076 && GET_CODE (XEXP (x
, 0)) == PLUS
3077 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3078 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3079 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3082 /* Helper function for `v850_legitimate_address_p'. */
3085 v850_reg_ok_for_base_p (const_rtx reg
, bool strict_p
)
3089 return REGNO_OK_FOR_BASE_P (REGNO (reg
));
3095 /* Accept either REG or SUBREG where a register is valid. */
3098 v850_rtx_ok_for_base_p (const_rtx x
, bool strict_p
)
3100 return ((REG_P (x
) && v850_reg_ok_for_base_p (x
, strict_p
))
3101 || (SUBREG_P (x
) && REG_P (SUBREG_REG (x
))
3102 && v850_reg_ok_for_base_p (SUBREG_REG (x
), strict_p
)));
3105 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
3108 v850_legitimate_address_p (machine_mode mode
, rtx x
, bool strict_p
,
3109 addr_space_t as ATTRIBUTE_UNUSED
)
3111 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
3113 if (v850_rtx_ok_for_base_p (x
, strict_p
))
3115 if (CONSTANT_ADDRESS_P (x
)
3116 && (mode
== QImode
|| INTVAL (x
) % 2 == 0)
3117 && (GET_MODE_SIZE (mode
) <= 4 || INTVAL (x
) % 4 == 0))
3119 if (GET_CODE (x
) == LO_SUM
3120 && REG_P (XEXP (x
, 0))
3121 && v850_reg_ok_for_base_p (XEXP (x
, 0), strict_p
)
3122 && CONSTANT_P (XEXP (x
, 1))
3123 && (!CONST_INT_P (XEXP (x
, 1))
3124 || ((mode
== QImode
|| INTVAL (XEXP (x
, 1)) % 2 == 0)
3125 && constraint_satisfied_p (XEXP (x
, 1), CONSTRAINT_K
)))
3126 && GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (word_mode
))
3128 if (special_symbolref_operand (x
, mode
)
3129 && (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (word_mode
)))
3131 if (GET_CODE (x
) == PLUS
3132 && v850_rtx_ok_for_base_p (XEXP (x
, 0), strict_p
)
3133 && constraint_satisfied_p (XEXP (x
,1), CONSTRAINT_K
)
3134 && ((mode
== QImode
|| INTVAL (XEXP (x
, 1)) % 2 == 0)
3135 && CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))
3136 + (GET_MODE_NUNITS (mode
) * UNITS_PER_WORD
))))
3143 v850_memory_move_cost (machine_mode mode
,
3144 reg_class_t reg_class ATTRIBUTE_UNUSED
,
3147 switch (GET_MODE_SIZE (mode
))
3157 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3162 v850_adjust_insn_length (rtx_insn
*insn
, int length
)
3164 if (TARGET_V850E3V5_UP
)
3168 if (TARGET_LONG_CALLS
)
3170 /* call_internal_long, call_value_internal_long. */
3178 /* call_internal_short, call_value_internal_short. */
3187 /* V850 specific attributes. */
3189 static const struct attribute_spec v850_attribute_table
[] =
3191 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3192 affects_type_identity } */
3193 { "interrupt_handler", 0, 0, true, false, false,
3194 v850_handle_interrupt_attribute
, false },
3195 { "interrupt", 0, 0, true, false, false,
3196 v850_handle_interrupt_attribute
, false },
3197 { "sda", 0, 0, true, false, false,
3198 v850_handle_data_area_attribute
, false },
3199 { "tda", 0, 0, true, false, false,
3200 v850_handle_data_area_attribute
, false },
3201 { "zda", 0, 0, true, false, false,
3202 v850_handle_data_area_attribute
, false },
3203 { NULL
, 0, 0, false, false, false, NULL
, false }
3207 v850_option_override (void)
3209 if (flag_exceptions
|| flag_non_call_exceptions
)
3210 flag_omit_frame_pointer
= 0;
3212 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3213 if (! TARGET_GCC_ABI
)
3214 target_flags
|= MASK_DISABLE_CALLT
;
3218 v850_gen_movdi (rtx
* operands
)
3220 if (REG_P (operands
[0]))
3222 if (REG_P (operands
[1]))
3224 if (REGNO (operands
[0]) == (REGNO (operands
[1]) - 1))
3225 return "mov %1, %0; mov %R1, %R0";
3227 return "mov %R1, %R0; mov %1, %0";
3230 if (MEM_P (operands
[1]))
3232 if (REGNO (operands
[0]) & 1)
3233 /* Use two load word instructions to synthesise a load double. */
3234 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3236 return "ld.dw %1, %0";
3239 return "mov %1, %0; mov %R1, %R0";
3242 gcc_assert (REG_P (operands
[1]));
3244 if (REGNO (operands
[1]) & 1)
3245 /* Use two store word instructions to synthesise a store double. */
3246 return "st.w %1, %0 ; st.w %R1, %R0 ";
3248 return "st.dw %1, %0";
3251 /* Initialize the GCC target structure. */
3253 #undef TARGET_OPTION_OVERRIDE
3254 #define TARGET_OPTION_OVERRIDE v850_option_override
3256 #undef TARGET_MEMORY_MOVE_COST
3257 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3259 #undef TARGET_ASM_ALIGNED_HI_OP
3260 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3262 #undef TARGET_PRINT_OPERAND
3263 #define TARGET_PRINT_OPERAND v850_print_operand
3264 #undef TARGET_PRINT_OPERAND_ADDRESS
3265 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3266 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3267 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3269 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3270 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3272 #undef TARGET_ATTRIBUTE_TABLE
3273 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3275 #undef TARGET_INSERT_ATTRIBUTES
3276 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3278 #undef TARGET_ASM_SELECT_SECTION
3279 #define TARGET_ASM_SELECT_SECTION v850_select_section
3281 /* The assembler supports switchable .bss sections, but
3282 v850_select_section doesn't yet make use of them. */
3283 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3284 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3286 #undef TARGET_ENCODE_SECTION_INFO
3287 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3289 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3290 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3292 #undef TARGET_RTX_COSTS
3293 #define TARGET_RTX_COSTS v850_rtx_costs
3295 #undef TARGET_ADDRESS_COST
3296 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3298 #undef TARGET_MACHINE_DEPENDENT_REORG
3299 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3301 #undef TARGET_SCHED_ISSUE_RATE
3302 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3304 #undef TARGET_FUNCTION_VALUE_REGNO_P
3305 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3306 #undef TARGET_FUNCTION_VALUE
3307 #define TARGET_FUNCTION_VALUE v850_function_value
3308 #undef TARGET_LIBCALL_VALUE
3309 #define TARGET_LIBCALL_VALUE v850_libcall_value
3311 #undef TARGET_PROMOTE_PROTOTYPES
3312 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3314 #undef TARGET_RETURN_IN_MEMORY
3315 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3317 #undef TARGET_PASS_BY_REFERENCE
3318 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3320 #undef TARGET_CALLEE_COPIES
3321 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3323 #undef TARGET_ARG_PARTIAL_BYTES
3324 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3326 #undef TARGET_FUNCTION_ARG
3327 #define TARGET_FUNCTION_ARG v850_function_arg
3329 #undef TARGET_FUNCTION_ARG_ADVANCE
3330 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3332 #undef TARGET_CAN_ELIMINATE
3333 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3335 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3336 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3338 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3339 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3340 #undef TARGET_TRAMPOLINE_INIT
3341 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3343 #undef TARGET_LEGITIMATE_CONSTANT_P
3344 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3347 #define TARGET_LRA_P hook_bool_void_false
3349 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
3350 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P v850_legitimate_address_p
3352 #undef TARGET_CAN_USE_DOLOOP_P
3353 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3355 struct gcc_target targetm
= TARGET_INITIALIZER
;
3357 #include "gt-v850.h"