1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008-2018 Free Software Foundation, Inc.
3 Contributed by Red Hat.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 * Re-enable memory-to-memory copies and fix up reload. */
25 #define IN_TARGET_CODE 1
29 #include "coretypes.h"
34 #include "stringpool.h"
42 #include "diagnostic-core.h"
44 #include "stor-layout.h"
51 #include "langhooks.h"
55 /* This file should be included last. */
56 #include "target-def.h"
58 static unsigned int rx_gp_base_regnum_val
= INVALID_REGNUM
;
59 static unsigned int rx_pid_base_regnum_val
= INVALID_REGNUM
;
60 static unsigned int rx_num_interrupt_regs
;
63 rx_gp_base_regnum (void)
65 if (rx_gp_base_regnum_val
== INVALID_REGNUM
)
67 return rx_gp_base_regnum_val
;
71 rx_pid_base_regnum (void)
73 if (rx_pid_base_regnum_val
== INVALID_REGNUM
)
75 return rx_pid_base_regnum_val
;
78 /* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
81 rx_decl_for_addr (rtx op
)
83 if (GET_CODE (op
) == MEM
)
85 if (GET_CODE (op
) == CONST
)
87 while (GET_CODE (op
) == PLUS
)
89 if (GET_CODE (op
) == SYMBOL_REF
)
90 return SYMBOL_REF_DECL (op
);
94 static void rx_print_operand (FILE *, rtx
, int);
96 #define CC_FLAG_S (1 << 0)
97 #define CC_FLAG_Z (1 << 1)
98 #define CC_FLAG_O (1 << 2)
99 #define CC_FLAG_C (1 << 3)
100 #define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
102 static unsigned int flags_from_mode (machine_mode mode
);
103 static unsigned int flags_from_code (enum rtx_code code
);
105 /* Return true if OP is a reference to an object in a PID data area. */
109 PID_NOT_PID
= 0, /* The object is not in the PID data area. */
110 PID_ENCODED
, /* The object is in the PID data area. */
111 PID_UNENCODED
/* The object will be placed in the PID data area, but it has not been placed there yet. */
115 rx_pid_data_operand (rtx op
)
122 if (GET_CODE (op
) == PLUS
123 && GET_CODE (XEXP (op
, 0)) == REG
124 && GET_CODE (XEXP (op
, 1)) == CONST
125 && GET_CODE (XEXP (XEXP (op
, 1), 0)) == UNSPEC
)
128 op_decl
= rx_decl_for_addr (op
);
132 if (TREE_READONLY (op_decl
))
133 return PID_UNENCODED
;
137 /* Sigh, some special cases. */
138 if (GET_CODE (op
) == SYMBOL_REF
139 || GET_CODE (op
) == LABEL_REF
)
140 return PID_UNENCODED
;
147 rx_legitimize_address (rtx x
,
148 rtx oldx ATTRIBUTE_UNUSED
,
149 machine_mode mode ATTRIBUTE_UNUSED
)
151 if (rx_pid_data_operand (x
) == PID_UNENCODED
)
153 rtx rv
= gen_pid_addr (gen_rtx_REG (SImode
, rx_pid_base_regnum ()), x
);
157 if (GET_CODE (x
) == PLUS
158 && GET_CODE (XEXP (x
, 0)) == PLUS
159 && REG_P (XEXP (XEXP (x
, 0), 0))
160 && REG_P (XEXP (x
, 1)))
161 return force_reg (SImode
, x
);
166 /* Return true if OP is a reference to an object in a small data area. */
169 rx_small_data_operand (rtx op
)
171 if (rx_small_data_limit
== 0)
174 if (GET_CODE (op
) == SYMBOL_REF
)
175 return SYMBOL_REF_SMALL_P (op
);
181 rx_is_legitimate_address (machine_mode mode
, rtx x
,
182 bool strict ATTRIBUTE_UNUSED
)
184 if (RTX_OK_FOR_BASE (x
, strict
))
185 /* Register Indirect. */
188 if ((GET_MODE_SIZE (mode
) == 4
189 || GET_MODE_SIZE (mode
) == 2
190 || GET_MODE_SIZE (mode
) == 1)
191 && (GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
))
192 /* Pre-decrement Register Indirect or
193 Post-increment Register Indirect. */
194 return RTX_OK_FOR_BASE (XEXP (x
, 0), strict
);
196 switch (rx_pid_data_operand (x
))
206 if (GET_CODE (x
) == PLUS
)
208 rtx arg1
= XEXP (x
, 0);
209 rtx arg2
= XEXP (x
, 1);
210 rtx index
= NULL_RTX
;
212 if (REG_P (arg1
) && RTX_OK_FOR_BASE (arg1
, strict
))
214 else if (REG_P (arg2
) && RTX_OK_FOR_BASE (arg2
, strict
))
219 switch (GET_CODE (index
))
223 /* Register Relative: REG + INT.
224 Only positive, mode-aligned, mode-sized
225 displacements are allowed. */
226 HOST_WIDE_INT val
= INTVAL (index
);
232 switch (GET_MODE_SIZE (mode
))
235 case 4: factor
= 4; break;
236 case 2: factor
= 2; break;
237 case 1: factor
= 1; break;
240 if (val
> (65535 * factor
))
242 return (val
% factor
) == 0;
246 /* Unscaled Indexed Register Indirect: REG + REG
247 Size has to be "QI", REG has to be valid. */
248 return GET_MODE_SIZE (mode
) == 1 && RTX_OK_FOR_BASE (index
, strict
);
252 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
253 Factor has to equal the mode size, REG has to be valid. */
256 factor
= XEXP (index
, 1);
257 index
= XEXP (index
, 0);
260 && RTX_OK_FOR_BASE (index
, strict
)
261 && CONST_INT_P (factor
)
262 && GET_MODE_SIZE (mode
) == INTVAL (factor
);
270 /* Small data area accesses turn into register relative offsets. */
271 return rx_small_data_operand (x
);
274 /* Returns TRUE for simple memory addresses, ie ones
275 that do not involve register indirect addressing
276 or pre/post increment/decrement. */
279 rx_is_restricted_memory_address (rtx mem
, machine_mode mode
)
281 if (! rx_is_legitimate_address
282 (mode
, mem
, reload_in_progress
|| reload_completed
))
285 switch (GET_CODE (mem
))
288 /* Simple memory addresses are OK. */
292 return RX_REG_P (SUBREG_REG (mem
));
302 /* Only allow REG+INT addressing. */
303 base
= XEXP (mem
, 0);
304 index
= XEXP (mem
, 1);
306 if (! RX_REG_P (base
) || ! CONST_INT_P (index
))
309 return IN_RANGE (INTVAL (index
), 0, (0x10000 * GET_MODE_SIZE (mode
)) - 1);
313 /* Can happen when small data is being supported.
314 Assume that it will be resolved into GP+INT. */
322 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
325 rx_mode_dependent_address_p (const_rtx addr
, addr_space_t as ATTRIBUTE_UNUSED
)
327 if (GET_CODE (addr
) == CONST
)
328 addr
= XEXP (addr
, 0);
330 switch (GET_CODE (addr
))
332 /* --REG and REG++ only work in SImode. */
339 if (! REG_P (XEXP (addr
, 0)))
342 addr
= XEXP (addr
, 1);
344 switch (GET_CODE (addr
))
347 /* REG+REG only works in SImode. */
351 /* REG+INT is only mode independent if INT is a
352 multiple of 4, positive and will fit into 16-bits. */
353 if (((INTVAL (addr
) & 3) == 0)
354 && IN_RANGE (INTVAL (addr
), 4, 0xfffc))
363 /* REG+REG*SCALE is always mode dependent. */
367 /* Not recognized, so treat as mode dependent. */
375 /* These are all mode independent. */
379 /* Everything else is unrecognized,
380 so treat as mode dependent. */
385 /* A C compound statement to output to stdio stream FILE the
386 assembler syntax for an instruction operand that is a memory
387 reference whose address is ADDR. */
390 rx_print_operand_address (FILE * file
, machine_mode
/*mode*/, rtx addr
)
392 switch (GET_CODE (addr
))
396 rx_print_operand (file
, addr
, 0);
401 fprintf (file
, "[-");
402 rx_print_operand (file
, XEXP (addr
, 0), 0);
408 rx_print_operand (file
, XEXP (addr
, 0), 0);
409 fprintf (file
, "+]");
414 rtx arg1
= XEXP (addr
, 0);
415 rtx arg2
= XEXP (addr
, 1);
418 if (REG_P (arg1
) && RTX_OK_FOR_BASE (arg1
, true))
419 base
= arg1
, index
= arg2
;
420 else if (REG_P (arg2
) && RTX_OK_FOR_BASE (arg2
, true))
421 base
= arg2
, index
= arg1
;
424 rx_print_operand (file
, arg1
, 0);
425 fprintf (file
, " + ");
426 rx_print_operand (file
, arg2
, 0);
430 if (REG_P (index
) || GET_CODE (index
) == MULT
)
433 rx_print_operand (file
, index
, 'A');
436 else /* GET_CODE (index) == CONST_INT */
438 rx_print_operand (file
, index
, 'A');
441 rx_print_operand (file
, base
, 0);
447 if (GET_CODE (XEXP (addr
, 0)) == UNSPEC
)
449 addr
= XEXP (addr
, 0);
450 gcc_assert (XINT (addr
, 1) == UNSPEC_CONST
);
452 addr
= XVECEXP (addr
, 0, 0);
453 gcc_assert (CONST_INT_P (addr
));
455 output_addr_const (file
, addr
);
459 output_addr_const (file
, XEXP (addr
, 0));
463 addr
= XVECEXP (addr
, 0, 0);
470 output_addr_const (file
, addr
);
476 rx_print_integer (FILE * file
, HOST_WIDE_INT val
)
479 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, val
);
483 ? "0%" HOST_WIDE_INT_PRINT
"xH" : HOST_WIDE_INT_PRINT_HEX
,
488 rx_assemble_integer (rtx x
, unsigned int size
, int is_aligned
)
490 const char * op
= integer_asm_op (size
, is_aligned
);
492 if (! CONST_INT_P (x
))
493 return default_assemble_integer (x
, size
, is_aligned
);
497 fputs (op
, asm_out_file
);
499 rx_print_integer (asm_out_file
, INTVAL (x
));
500 fputc ('\n', asm_out_file
);
505 /* Handles the insertion of a single operand into the assembler output.
506 The %<letter> directives supported are:
508 %A Print an operand without a leading # character.
509 %B Print an integer comparison name.
510 %C Print a control register name.
511 %F Print a condition code flag name.
512 %G Register used for small-data-area addressing
513 %H Print high part of a DImode register, integer or address.
514 %L Print low part of a DImode register, integer or address.
515 %N Print the negation of the immediate value.
516 %P Register used for PID addressing
517 %Q If the operand is a MEM, then correctly generate
518 register indirect or register relative addressing.
519 %R Like %Q but for zero-extending loads. */
522 rx_print_operand (FILE * file
, rtx op
, int letter
)
524 bool unsigned_load
= false;
525 bool print_hash
= true;
528 && ((GET_CODE (op
) == CONST
529 && GET_CODE (XEXP (op
, 0)) == UNSPEC
)
530 || GET_CODE (op
) == UNSPEC
))
539 /* Print an operand without a leading #. */
543 switch (GET_CODE (op
))
547 output_addr_const (file
, op
);
550 fprintf (file
, "%ld", (long) INTVAL (op
));
553 rx_print_operand (file
, op
, 0);
560 enum rtx_code code
= GET_CODE (op
);
561 machine_mode mode
= GET_MODE (XEXP (op
, 0));
564 if (mode
== CC_Fmode
)
566 /* C flag is undefined, and O flag carries unordered. None of the
567 branch combinations that include O use it helpfully. */
594 unsigned int flags
= flags_from_mode (mode
);
599 ret
= (flags
& CC_FLAG_O
? "lt" : "n");
602 ret
= (flags
& CC_FLAG_O
? "ge" : "pz");
631 gcc_checking_assert ((flags_from_code (code
) & ~flags
) == 0);
638 gcc_assert (CONST_INT_P (op
));
641 case CTRLREG_PSW
: fprintf (file
, "psw"); break;
642 case CTRLREG_USP
: fprintf (file
, "usp"); break;
643 case CTRLREG_FPSW
: fprintf (file
, "fpsw"); break;
644 case CTRLREG_CPEN
: fprintf (file
, "cpen"); break;
645 case CTRLREG_BPSW
: fprintf (file
, "bpsw"); break;
646 case CTRLREG_BPC
: fprintf (file
, "bpc"); break;
647 case CTRLREG_ISP
: fprintf (file
, "isp"); break;
648 case CTRLREG_FINTV
: fprintf (file
, "fintv"); break;
649 case CTRLREG_INTB
: fprintf (file
, "intb"); break;
651 warning (0, "unrecognized control register number: %d - using 'psw'",
653 fprintf (file
, "psw");
659 gcc_assert (CONST_INT_P (op
));
662 case 0: case 'c': case 'C': fprintf (file
, "C"); break;
663 case 1: case 'z': case 'Z': fprintf (file
, "Z"); break;
664 case 2: case 's': case 'S': fprintf (file
, "S"); break;
665 case 3: case 'o': case 'O': fprintf (file
, "O"); break;
666 case 8: case 'i': case 'I': fprintf (file
, "I"); break;
667 case 9: case 'u': case 'U': fprintf (file
, "U"); break;
674 fprintf (file
, "%s", reg_names
[rx_gp_base_regnum ()]);
678 switch (GET_CODE (op
))
681 fprintf (file
, "%s", reg_names
[REGNO (op
) + (WORDS_BIG_ENDIAN
? 0 : 1)]);
685 HOST_WIDE_INT v
= INTVAL (op
);
688 /* Trickery to avoid problems with shifting 32 bits at a time. */
691 rx_print_integer (file
, v
);
696 rx_print_integer (file
, CONST_DOUBLE_HIGH (op
));
699 if (! WORDS_BIG_ENDIAN
)
700 op
= adjust_address (op
, SImode
, 4);
701 output_address (GET_MODE (op
), XEXP (op
, 0));
709 switch (GET_CODE (op
))
712 fprintf (file
, "%s", reg_names
[REGNO (op
) + (WORDS_BIG_ENDIAN
? 1 : 0)]);
716 rx_print_integer (file
, INTVAL (op
) & 0xffffffff);
720 rx_print_integer (file
, CONST_DOUBLE_LOW (op
));
723 if (WORDS_BIG_ENDIAN
)
724 op
= adjust_address (op
, SImode
, 4);
725 output_address (GET_MODE (op
), XEXP (op
, 0));
733 gcc_assert (CONST_INT_P (op
));
735 rx_print_integer (file
, - INTVAL (op
));
739 fprintf (file
, "%s", reg_names
[rx_pid_base_regnum ()]);
743 gcc_assert (GET_MODE_SIZE (GET_MODE (op
)) <= 4);
744 unsigned_load
= true;
749 HOST_WIDE_INT offset
;
756 else if (GET_CODE (op
) == PLUS
)
760 if (REG_P (XEXP (op
, 0)))
762 displacement
= XEXP (op
, 1);
767 displacement
= XEXP (op
, 0);
769 gcc_assert (REG_P (op
));
772 gcc_assert (CONST_INT_P (displacement
));
773 offset
= INTVAL (displacement
);
774 gcc_assert (offset
>= 0);
776 fprintf (file
, "%ld", offset
);
782 rx_print_operand (file
, op
, 0);
783 fprintf (file
, "].");
785 switch (GET_MODE_SIZE (GET_MODE (mem
)))
788 gcc_assert (offset
<= 65535 * 1);
789 fprintf (file
, unsigned_load
? "UB" : "B");
792 gcc_assert (offset
% 2 == 0);
793 gcc_assert (offset
<= 65535 * 2);
794 fprintf (file
, unsigned_load
? "UW" : "W");
797 gcc_assert (offset
% 4 == 0);
798 gcc_assert (offset
<= 65535 * 4);
810 if (GET_CODE (op
) == CONST
811 && GET_CODE (XEXP (op
, 0)) == UNSPEC
)
813 else if (GET_CODE (op
) == CONST
814 && GET_CODE (XEXP (op
, 0)) == PLUS
815 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == UNSPEC
816 && GET_CODE (XEXP (XEXP (op
, 0), 1)) == CONST_INT
)
821 rx_print_operand (file
, XEXP (XEXP (op
, 0), 0), 'A');
822 fprintf (file
, " + ");
823 output_addr_const (file
, XEXP (XEXP (op
, 0), 1));
828 switch (GET_CODE (op
))
831 /* Should be the scaled part of an
832 indexed register indirect address. */
834 rtx base
= XEXP (op
, 0);
835 rtx index
= XEXP (op
, 1);
837 /* Check for a swaped index register and scaling factor.
838 Not sure if this can happen, but be prepared to handle it. */
839 if (CONST_INT_P (base
) && REG_P (index
))
846 gcc_assert (REG_P (base
));
847 gcc_assert (REGNO (base
) < FIRST_PSEUDO_REGISTER
);
848 gcc_assert (CONST_INT_P (index
));
849 /* Do not try to verify the value of the scalar as it is based
850 on the mode of the MEM not the mode of the MULT. (Which
851 will always be SImode). */
852 fprintf (file
, "%s", reg_names
[REGNO (base
)]);
857 output_address (GET_MODE (op
), XEXP (op
, 0));
861 output_address (VOIDmode
, op
);
865 gcc_assert (REGNO (op
) < FIRST_PSEUDO_REGISTER
);
866 fprintf (file
, "%s", reg_names
[REGNO (op
)]);
870 gcc_assert (subreg_regno (op
) < FIRST_PSEUDO_REGISTER
);
871 fprintf (file
, "%s", reg_names
[subreg_regno (op
)]);
874 /* This will only be single precision.... */
879 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (op
), val
);
882 fprintf (file
, TARGET_AS100_SYNTAX
? "0%lxH" : "0x%lx", val
);
889 rx_print_integer (file
, INTVAL (op
));
893 switch (XINT (op
, 1))
895 case UNSPEC_PID_ADDR
:
901 sym
= XVECEXP (op
, 0, 0);
904 if (GET_CODE (sym
) == PLUS
)
909 output_addr_const (file
, sym
);
913 output_addr_const (file
, add
);
915 fprintf (file
, "-__pid_base");
926 rx_print_operand_address (file
, VOIDmode
, op
);
936 /* Maybe convert an operand into its PID format. */
939 rx_maybe_pidify_operand (rtx op
, int copy_to_reg
)
941 if (rx_pid_data_operand (op
) == PID_UNENCODED
)
943 if (GET_CODE (op
) == MEM
)
945 rtx a
= gen_pid_addr (gen_rtx_REG (SImode
, rx_pid_base_regnum ()), XEXP (op
, 0));
946 op
= replace_equiv_address (op
, a
);
950 op
= gen_pid_addr (gen_rtx_REG (SImode
, rx_pid_base_regnum ()), op
);
954 op
= copy_to_mode_reg (GET_MODE (op
), op
);
959 /* Returns an assembler template for a move instruction. */
962 rx_gen_move_template (rtx
* operands
, bool is_movu
)
964 static char out_template
[64];
965 const char * extension
= TARGET_AS100_SYNTAX
? ".L" : "";
966 const char * src_template
;
967 const char * dst_template
;
968 rtx dest
= operands
[0];
969 rtx src
= operands
[1];
971 /* Decide which extension, if any, should be given to the move instruction. */
972 switch (CONST_INT_P (src
) ? GET_MODE (dest
) : GET_MODE (src
))
975 /* The .B extension is not valid when
976 loading an immediate into a register. */
977 if (! REG_P (dest
) || ! CONST_INT_P (src
))
981 if (! REG_P (dest
) || ! CONST_INT_P (src
))
982 /* The .W extension is not valid when
983 loading an immediate into a register. */
993 /* This mode is used by constants. */
1000 if (MEM_P (src
) && rx_pid_data_operand (XEXP (src
, 0)) == PID_UNENCODED
)
1002 gcc_assert (GET_MODE (src
) != DImode
);
1003 gcc_assert (GET_MODE (src
) != DFmode
);
1005 src_template
= "(%A1 - __pid_base)[%P1]";
1007 else if (MEM_P (src
) && rx_small_data_operand (XEXP (src
, 0)))
1009 gcc_assert (GET_MODE (src
) != DImode
);
1010 gcc_assert (GET_MODE (src
) != DFmode
);
1012 src_template
= "%%gp(%A1)[%G1]";
1015 src_template
= "%1";
1017 if (MEM_P (dest
) && rx_small_data_operand (XEXP (dest
, 0)))
1019 gcc_assert (GET_MODE (dest
) != DImode
);
1020 gcc_assert (GET_MODE (dest
) != DFmode
);
1022 dst_template
= "%%gp(%A0)[%G0]";
1025 dst_template
= "%0";
1027 if (GET_MODE (dest
) == DImode
|| GET_MODE (dest
) == DFmode
)
1029 gcc_assert (! is_movu
);
1031 if (REG_P (src
) && REG_P (dest
) && (REGNO (dest
) == REGNO (src
) + 1))
1032 sprintf (out_template
, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
1034 sprintf (out_template
, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
1037 sprintf (out_template
, "%s%s\t%s, %s", is_movu
? "movu" : "mov",
1038 extension
, src_template
, dst_template
);
1039 return out_template
;
1042 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
1044 static inline unsigned int
1045 rx_round_up (unsigned int value
, unsigned int alignment
)
1048 return (value
+ alignment
) & (~ alignment
);
1051 /* Return the number of bytes in the argument registers
1052 occupied by an argument of type TYPE and mode MODE. */
1055 rx_function_arg_size (machine_mode mode
, const_tree type
)
1057 unsigned int num_bytes
;
1059 num_bytes
= (mode
== BLKmode
)
1060 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1061 return rx_round_up (num_bytes
, UNITS_PER_WORD
);
1064 #define NUM_ARG_REGS 4
1065 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1067 /* Return an RTL expression describing the register holding a function
1068 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1069 be passed on the stack. CUM describes the previous parameters to the
1070 function and NAMED is false if the parameter is part of a variable
1071 parameter list, or the last named parameter before the start of a
1072 variable parameter list. */
1075 rx_function_arg (cumulative_args_t cum
, machine_mode mode
,
1076 const_tree type
, bool named
)
1078 unsigned int next_reg
;
1079 unsigned int bytes_so_far
= *get_cumulative_args (cum
);
1081 unsigned int rounded_size
;
1083 /* An exploded version of rx_function_arg_size. */
1084 size
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1085 /* If the size is not known it cannot be passed in registers. */
1089 rounded_size
= rx_round_up (size
, UNITS_PER_WORD
);
1091 /* Don't pass this arg via registers if there
1092 are insufficient registers to hold all of it. */
1093 if (rounded_size
+ bytes_so_far
> MAX_NUM_ARG_BYTES
)
1096 /* Unnamed arguments and the last named argument in a
1097 variadic function are always passed on the stack. */
1101 /* Structures must occupy an exact number of registers,
1102 otherwise they are passed on the stack. */
1103 if ((type
== NULL
|| AGGREGATE_TYPE_P (type
))
1104 && (size
% UNITS_PER_WORD
) != 0)
1107 next_reg
= (bytes_so_far
/ UNITS_PER_WORD
) + 1;
1109 return gen_rtx_REG (mode
, next_reg
);
1113 rx_function_arg_advance (cumulative_args_t cum
, machine_mode mode
,
1114 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1116 *get_cumulative_args (cum
) += rx_function_arg_size (mode
, type
);
1120 rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED
,
1121 const_tree type ATTRIBUTE_UNUSED
)
1123 /* Older versions of the RX backend aligned all on-stack arguments
1124 to 32-bits. The RX C ABI however says that they should be
1125 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1127 return STACK_BOUNDARY
;
1132 return DECL_ALIGN (type
);
1133 return TYPE_ALIGN (type
);
1136 return PARM_BOUNDARY
;
1139 /* Return an RTL describing where a function return value of type RET_TYPE
1143 rx_function_value (const_tree ret_type
,
1144 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
1145 bool outgoing ATTRIBUTE_UNUSED
)
1147 machine_mode mode
= TYPE_MODE (ret_type
);
1149 /* RX ABI specifies that small integer types are
1150 promoted to int when returned by a function. */
1151 if (GET_MODE_SIZE (mode
) > 0
1152 && GET_MODE_SIZE (mode
) < 4
1153 && ! COMPLEX_MODE_P (mode
)
1154 && ! VECTOR_TYPE_P (ret_type
)
1155 && ! VECTOR_MODE_P (mode
)
1157 return gen_rtx_REG (SImode
, FUNC_RETURN_REGNUM
);
1159 return gen_rtx_REG (mode
, FUNC_RETURN_REGNUM
);
1162 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1163 regard to function returns as does TARGET_FUNCTION_VALUE. */
1166 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED
,
1168 int * punsignedp ATTRIBUTE_UNUSED
,
1169 const_tree funtype ATTRIBUTE_UNUSED
,
1173 || GET_MODE_SIZE (mode
) >= 4
1174 || COMPLEX_MODE_P (mode
)
1175 || VECTOR_MODE_P (mode
)
1176 || VECTOR_TYPE_P (type
)
1177 || GET_MODE_SIZE (mode
) < 1)
1184 rx_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1188 if (TYPE_MODE (type
) != BLKmode
1189 && ! AGGREGATE_TYPE_P (type
))
1192 size
= int_size_in_bytes (type
);
1193 /* Large structs and those whose size is not an
1194 exact multiple of 4 are returned in memory. */
1197 || (size
% UNITS_PER_WORD
) != 0;
1201 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED
,
1202 int incoming ATTRIBUTE_UNUSED
)
1204 return gen_rtx_REG (Pmode
, STRUCT_VAL_REGNUM
);
1208 rx_return_in_msb (const_tree valtype
)
1210 return TARGET_BIG_ENDIAN_DATA
1211 && (AGGREGATE_TYPE_P (valtype
) || TREE_CODE (valtype
) == COMPLEX_TYPE
);
1214 /* Returns true if the provided function has the specified attribute. */
1217 has_func_attr (const_tree decl
, const char * func_attr
)
1219 if (decl
== NULL_TREE
)
1220 decl
= current_function_decl
;
1222 return lookup_attribute (func_attr
, DECL_ATTRIBUTES (decl
)) != NULL_TREE
;
1225 /* Returns true if the provided function has the "fast_interrupt" attribute. */
1228 is_fast_interrupt_func (const_tree decl
)
1230 return has_func_attr (decl
, "fast_interrupt");
1233 /* Returns true if the provided function has the "interrupt" attribute. */
1236 is_interrupt_func (const_tree decl
)
1238 return has_func_attr (decl
, "interrupt");
1241 /* Returns true if the provided function has the "naked" attribute. */
1244 is_naked_func (const_tree decl
)
1246 return has_func_attr (decl
, "naked");
1249 static bool use_fixed_regs
= false;
1252 rx_conditional_register_usage (void)
1254 static bool using_fixed_regs
= false;
1258 rx_pid_base_regnum_val
= GP_BASE_REGNUM
- rx_num_interrupt_regs
;
1259 fixed_regs
[rx_pid_base_regnum_val
] = call_used_regs
[rx_pid_base_regnum_val
] = 1;
1262 if (rx_small_data_limit
> 0)
1265 rx_gp_base_regnum_val
= rx_pid_base_regnum_val
- 1;
1267 rx_gp_base_regnum_val
= GP_BASE_REGNUM
- rx_num_interrupt_regs
;
1269 fixed_regs
[rx_gp_base_regnum_val
] = call_used_regs
[rx_gp_base_regnum_val
] = 1;
1272 if (use_fixed_regs
!= using_fixed_regs
)
1274 static char saved_fixed_regs
[FIRST_PSEUDO_REGISTER
];
1275 static char saved_call_used_regs
[FIRST_PSEUDO_REGISTER
];
1281 memcpy (saved_fixed_regs
, fixed_regs
, sizeof fixed_regs
);
1282 memcpy (saved_call_used_regs
, call_used_regs
, sizeof call_used_regs
);
1284 /* This is for fast interrupt handlers. Any register in
1285 the range r10 to r13 (inclusive) that is currently
1286 marked as fixed is now a viable, call-used register. */
1287 for (r
= 10; r
<= 13; r
++)
1291 call_used_regs
[r
] = 1;
1294 /* Mark r7 as fixed. This is just a hack to avoid
1295 altering the reg_alloc_order array so that the newly
1296 freed r10-r13 registers are the preferred registers. */
1297 fixed_regs
[7] = call_used_regs
[7] = 1;
1301 /* Restore the normal register masks. */
1302 memcpy (fixed_regs
, saved_fixed_regs
, sizeof fixed_regs
);
1303 memcpy (call_used_regs
, saved_call_used_regs
, sizeof call_used_regs
);
1306 using_fixed_regs
= use_fixed_regs
;
1313 struct decl_chain
* next
;
1316 /* Stack of decls for which we have issued warnings. */
1317 static struct decl_chain
* warned_decls
= NULL
;
1320 add_warned_decl (tree fndecl
)
1322 struct decl_chain
* warned
= (struct decl_chain
*) xmalloc (sizeof * warned
);
1324 warned
->fndecl
= fndecl
;
1325 warned
->next
= warned_decls
;
1326 warned_decls
= warned
;
1329 /* Returns TRUE if FNDECL is on our list of warned about decls. */
1332 already_warned (tree fndecl
)
1334 struct decl_chain
* warned
;
1336 for (warned
= warned_decls
;
1338 warned
= warned
->next
)
1339 if (warned
->fndecl
== fndecl
)
1345 /* Perform any actions necessary before starting to compile FNDECL.
1346 For the RX we use this to make sure that we have the correct
1347 set of register masks selected. If FNDECL is NULL then we are
1348 compiling top level things. */
1351 rx_set_current_function (tree fndecl
)
1353 /* Remember the last target of rx_set_current_function. */
1354 static tree rx_previous_fndecl
;
1355 bool prev_was_fast_interrupt
;
1356 bool current_is_fast_interrupt
;
1358 /* Only change the context if the function changes. This hook is called
1359 several times in the course of compiling a function, and we don't want
1360 to slow things down too much or call target_reinit when it isn't safe. */
1361 if (fndecl
== rx_previous_fndecl
)
1364 prev_was_fast_interrupt
1365 = rx_previous_fndecl
1366 ? is_fast_interrupt_func (rx_previous_fndecl
) : false;
1368 current_is_fast_interrupt
1369 = fndecl
? is_fast_interrupt_func (fndecl
) : false;
1371 if (prev_was_fast_interrupt
!= current_is_fast_interrupt
)
1373 use_fixed_regs
= current_is_fast_interrupt
;
1377 if (current_is_fast_interrupt
&& rx_warn_multiple_fast_interrupts
)
1379 /* We do not warn about the first fast interrupt routine that
1380 we see. Instead we just push it onto the stack. */
1381 if (warned_decls
== NULL
)
1382 add_warned_decl (fndecl
);
1384 /* Otherwise if this fast interrupt is one for which we have
1385 not already issued a warning, generate one and then push
1386 it onto the stack as well. */
1387 else if (! already_warned (fndecl
))
1389 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1390 fndecl
, warned_decls
->fndecl
);
1391 add_warned_decl (fndecl
);
1395 rx_previous_fndecl
= fndecl
;
1398 /* Typical stack layout should looks like this after the function's prologue:
1403 | | arguments saved | Increasing
1404 | | on the stack | addresses
1405 PARENT arg pointer -> | | /
1406 -------------------------- ---- -------------------
1407 CHILD |ret | return address
1417 frame pointer -> | | /
1420 | | outgoing | Decreasing
1421 | | arguments | addresses
1422 current stack pointer -> | | / |
1423 -------------------------- ---- ------------------ V
1427 bit_count (unsigned int x
)
1429 const unsigned int m1
= 0x55555555;
1430 const unsigned int m2
= 0x33333333;
1431 const unsigned int m4
= 0x0f0f0f0f;
1434 x
= (x
& m2
) + ((x
>> 2) & m2
);
1435 x
= (x
+ (x
>> 4)) & m4
;
1438 return (x
+ (x
>> 16)) & 0x3f;
1441 #define MUST_SAVE_ACC_REGISTER \
1442 (TARGET_SAVE_ACC_REGISTER \
1443 && (is_interrupt_func (NULL_TREE) \
1444 || is_fast_interrupt_func (NULL_TREE)))
1446 /* Returns either the lowest numbered and highest numbered registers that
1447 occupy the call-saved area of the stack frame, if the registers are
1448 stored as a contiguous block, or else a bitmask of the individual
1449 registers if they are stored piecemeal.
1451 Also computes the size of the frame and the size of the outgoing
1452 arguments block (in bytes). */
1455 rx_get_stack_layout (unsigned int * lowest
,
1456 unsigned int * highest
,
1457 unsigned int * register_mask
,
1458 unsigned int * frame_size
,
1459 unsigned int * stack_size
)
1464 unsigned int fixed_reg
= 0;
1465 unsigned int save_mask
;
1466 unsigned int pushed_mask
;
1467 unsigned int unneeded_pushes
;
1469 if (is_naked_func (NULL_TREE
))
1471 /* Naked functions do not create their own stack frame.
1472 Instead the programmer must do that for us. */
1475 * register_mask
= 0;
1481 for (save_mask
= high
= low
= 0, reg
= 1; reg
< CC_REGNUM
; reg
++)
1483 if ((df_regs_ever_live_p (reg
)
1484 /* Always save all call clobbered registers inside non-leaf
1485 interrupt handlers, even if they are not live - they may
1486 be used in (non-interrupt aware) routines called from this one. */
1487 || (call_used_regs
[reg
]
1488 && is_interrupt_func (NULL_TREE
)
1489 && ! crtl
->is_leaf
))
1490 && (! call_used_regs
[reg
]
1491 /* Even call clobbered registered must
1492 be pushed inside interrupt handlers. */
1493 || is_interrupt_func (NULL_TREE
)
1494 /* Likewise for fast interrupt handlers, except registers r10 -
1495 r13. These are normally call-saved, but may have been set
1496 to call-used by rx_conditional_register_usage. If so then
1497 they can be used in the fast interrupt handler without
1498 saving them on the stack. */
1499 || (is_fast_interrupt_func (NULL_TREE
)
1500 && ! IN_RANGE (reg
, 10, 13))))
1506 save_mask
|= 1 << reg
;
1509 /* Remember if we see a fixed register
1510 after having found the low register. */
1511 if (low
!= 0 && fixed_reg
== 0 && fixed_regs
[reg
])
1515 /* If we have to save the accumulator register, make sure
1516 that at least two registers are pushed into the frame. */
1517 if (MUST_SAVE_ACC_REGISTER
1518 && bit_count (save_mask
) < 2)
1520 save_mask
|= (1 << 13) | (1 << 14);
1523 if (high
== 0 || low
== high
)
1527 /* Decide if it would be faster fill in the call-saved area of the stack
1528 frame using multiple PUSH instructions instead of a single PUSHM
1531 SAVE_MASK is a bitmask of the registers that must be stored in the
1532 call-save area. PUSHED_MASK is a bitmask of the registers that would
1533 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1534 is a bitmask of those registers in pushed_mask that are not in
1537 We use a simple heuristic that says that it is better to use
1538 multiple PUSH instructions if the number of unnecessary pushes is
1539 greater than the number of necessary pushes.
1541 We also use multiple PUSH instructions if there are any fixed registers
1542 between LOW and HIGH. The only way that this can happen is if the user
1543 has specified --fixed-<reg-name> on the command line and in such
1544 circumstances we do not want to touch the fixed registers at all.
1546 Note also that the code in the prologue/epilogue handlers will
1547 automatically merge multiple PUSHes of adjacent registers into a single
1550 FIXME: Is it worth improving this heuristic ? */
1551 pushed_mask
= (HOST_WIDE_INT_M1U
<< low
) & ~(HOST_WIDE_INT_M1U
<< (high
+ 1));
1552 unneeded_pushes
= (pushed_mask
& (~ save_mask
)) & pushed_mask
;
1554 if ((fixed_reg
&& fixed_reg
<= high
)
1555 || (optimize_function_for_speed_p (cfun
)
1556 && bit_count (save_mask
) < bit_count (unneeded_pushes
)))
1558 /* Use multiple pushes. */
1561 * register_mask
= save_mask
;
1565 /* Use one push multiple instruction. */
1568 * register_mask
= 0;
1571 * frame_size
= rx_round_up
1572 (get_frame_size (), STACK_BOUNDARY
/ BITS_PER_UNIT
);
1574 if (crtl
->args
.size
> 0)
1575 * frame_size
+= rx_round_up
1576 (crtl
->args
.size
, STACK_BOUNDARY
/ BITS_PER_UNIT
);
1578 * stack_size
= rx_round_up
1579 (crtl
->outgoing_args_size
, STACK_BOUNDARY
/ BITS_PER_UNIT
);
1582 /* Generate a PUSHM instruction that matches the given operands. */
1585 rx_emit_stack_pushm (rtx
* operands
)
1587 HOST_WIDE_INT last_reg
;
1590 gcc_assert (CONST_INT_P (operands
[0]));
1591 last_reg
= (INTVAL (operands
[0]) / UNITS_PER_WORD
) - 1;
1593 gcc_assert (GET_CODE (operands
[1]) == PARALLEL
);
1594 first_push
= XVECEXP (operands
[1], 0, 1);
1595 gcc_assert (SET_P (first_push
));
1596 first_push
= SET_SRC (first_push
);
1597 gcc_assert (REG_P (first_push
));
1599 asm_fprintf (asm_out_file
, "\tpushm\t%s-%s\n",
1600 reg_names
[REGNO (first_push
) - last_reg
],
1601 reg_names
[REGNO (first_push
)]);
1604 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1607 gen_rx_store_vector (unsigned int low
, unsigned int high
)
1610 unsigned int count
= (high
- low
) + 2;
1613 vector
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (count
));
1615 XVECEXP (vector
, 0, 0) =
1616 gen_rtx_SET (stack_pointer_rtx
,
1617 gen_rtx_MINUS (SImode
, stack_pointer_rtx
,
1618 GEN_INT ((count
- 1) * UNITS_PER_WORD
)));
1620 for (i
= 0; i
< count
- 1; i
++)
1621 XVECEXP (vector
, 0, i
+ 1) =
1622 gen_rtx_SET (gen_rtx_MEM (SImode
,
1623 gen_rtx_MINUS (SImode
, stack_pointer_rtx
,
1624 GEN_INT ((i
+ 1) * UNITS_PER_WORD
))),
1625 gen_rtx_REG (SImode
, high
- i
));
1629 /* Mark INSN as being frame related. If it is a PARALLEL
1630 then mark each element as being frame related as well. */
1633 mark_frame_related (rtx insn
)
1635 RTX_FRAME_RELATED_P (insn
) = 1;
1636 insn
= PATTERN (insn
);
1638 if (GET_CODE (insn
) == PARALLEL
)
1642 for (i
= 0; i
< (unsigned) XVECLEN (insn
, 0); i
++)
1643 RTX_FRAME_RELATED_P (XVECEXP (insn
, 0, i
)) = 1;
1647 /* Create CFI notes for register pops. */
1649 add_pop_cfi_notes (rtx_insn
*insn
, unsigned int high
, unsigned int low
)
1651 rtx t
= plus_constant (Pmode
, stack_pointer_rtx
,
1652 (high
- low
+ 1) * UNITS_PER_WORD
);
1653 t
= gen_rtx_SET (stack_pointer_rtx
, t
);
1654 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, t
);
1655 RTX_FRAME_RELATED_P (insn
) = 1;
1656 for (unsigned int i
= low
; i
<= high
; i
++)
1657 add_reg_note (insn
, REG_CFA_RESTORE
, gen_rtx_REG (word_mode
, i
));
1662 ok_for_max_constant (HOST_WIDE_INT val
)
1664 if (rx_max_constant_size
== 0 || rx_max_constant_size
== 4)
1665 /* If there is no constraint on the size of constants
1666 used as operands, then any value is legitimate. */
1669 /* rx_max_constant_size specifies the maximum number
1670 of bytes that can be used to hold a signed value. */
1671 return IN_RANGE (val
, (HOST_WIDE_INT_M1U
<< (rx_max_constant_size
* 8)),
1672 ( 1 << (rx_max_constant_size
* 8)));
1675 /* Generate an ADD of SRC plus VAL into DEST.
1676 Handles the case where VAL is too big for max_constant_value.
1677 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1680 gen_safe_add (rtx dest
, rtx src
, rtx val
, bool is_frame_related
)
1684 if (val
== NULL_RTX
|| INTVAL (val
) == 0)
1686 gcc_assert (dest
!= src
);
1688 insn
= emit_move_insn (dest
, src
);
1690 else if (ok_for_max_constant (INTVAL (val
)))
1691 insn
= emit_insn (gen_addsi3 (dest
, src
, val
));
1694 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1695 will not reject it. */
1696 val
= gen_rtx_CONST (SImode
, gen_rtx_UNSPEC (SImode
, gen_rtvec (1, val
), UNSPEC_CONST
));
1697 insn
= emit_insn (gen_addsi3 (dest
, src
, val
));
1699 if (is_frame_related
)
1700 /* We have to provide our own frame related note here
1701 as the dwarf2out code cannot be expected to grok
1703 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
1704 gen_rtx_SET (dest
, gen_rtx_PLUS (SImode
, src
, val
)));
1708 if (is_frame_related
)
1709 RTX_FRAME_RELATED_P (insn
) = 1;
1713 push_regs (unsigned int high
, unsigned int low
)
1718 insn
= emit_insn (gen_stack_push (gen_rtx_REG (SImode
, low
)));
1720 insn
= emit_insn (gen_stack_pushm (GEN_INT (((high
- low
) + 1) * UNITS_PER_WORD
),
1721 gen_rx_store_vector (low
, high
)));
1722 mark_frame_related (insn
);
1726 rx_expand_prologue (void)
1728 unsigned int stack_size
;
1729 unsigned int frame_size
;
1735 /* Naked functions use their own, programmer provided prologues. */
1736 if (is_naked_func (NULL_TREE
))
1739 rx_get_stack_layout (& low
, & high
, & mask
, & frame_size
, & stack_size
);
1741 if (flag_stack_usage_info
)
1742 current_function_static_stack_size
= frame_size
+ stack_size
;
1744 /* If we use any of the callee-saved registers, save them now. */
1747 /* Push registers in reverse order. */
1748 for (reg
= CC_REGNUM
; reg
--;)
1749 if (mask
& (1 << reg
))
1753 /* Look for a span of registers.
1754 Note - we do not have to worry about -Os and whether
1755 it is better to use a single, longer PUSHM as
1756 rx_get_stack_layout has already done that for us. */
1758 if ((mask
& (1 << reg
)) == 0)
1763 push_regs (high
, low
);
1764 if (reg
== (unsigned) -1)
1769 push_regs (high
, low
);
1771 if (MUST_SAVE_ACC_REGISTER
)
1773 unsigned int acc_high
, acc_low
;
1775 /* Interrupt handlers have to preserve the accumulator
1776 register if so requested by the user. Use the first
1777 two pushed registers as intermediaries. */
1780 acc_low
= acc_high
= 0;
1782 for (reg
= 1; reg
< CC_REGNUM
; reg
++)
1783 if (mask
& (1 << reg
))
1794 /* We have assumed that there are at least two registers pushed... */
1795 gcc_assert (acc_high
!= 0);
1797 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1798 We just assume that they are zero. */
1799 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode
, acc_low
)));
1800 emit_insn (gen_mvfachi (gen_rtx_REG (SImode
, acc_high
)));
1801 emit_insn (gen_stack_push (gen_rtx_REG (SImode
, acc_low
)));
1802 emit_insn (gen_stack_push (gen_rtx_REG (SImode
, acc_high
)));
1809 /* We have assumed that there are at least two registers pushed... */
1810 gcc_assert (acc_high
<= high
);
1812 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode
, acc_low
)));
1813 emit_insn (gen_mvfachi (gen_rtx_REG (SImode
, acc_high
)));
1814 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD
),
1815 gen_rx_store_vector (acc_low
, acc_high
)));
1819 /* If needed, set up the frame pointer. */
1820 if (frame_pointer_needed
)
1821 gen_safe_add (frame_pointer_rtx
, stack_pointer_rtx
,
1822 GEN_INT (- (HOST_WIDE_INT
) frame_size
), true);
1824 /* Allocate space for the outgoing args.
1825 If the stack frame has not already been set up then handle this as well. */
1830 if (frame_pointer_needed
)
1831 gen_safe_add (stack_pointer_rtx
, frame_pointer_rtx
,
1832 GEN_INT (- (HOST_WIDE_INT
) stack_size
), true);
1834 gen_safe_add (stack_pointer_rtx
, stack_pointer_rtx
,
1835 GEN_INT (- (HOST_WIDE_INT
) (frame_size
+ stack_size
)),
1839 gen_safe_add (stack_pointer_rtx
, stack_pointer_rtx
,
1840 GEN_INT (- (HOST_WIDE_INT
) stack_size
), true);
1842 else if (frame_size
)
1844 if (! frame_pointer_needed
)
1845 gen_safe_add (stack_pointer_rtx
, stack_pointer_rtx
,
1846 GEN_INT (- (HOST_WIDE_INT
) frame_size
), true);
1848 gen_safe_add (stack_pointer_rtx
, frame_pointer_rtx
, NULL_RTX
,
1849 false /* False because the epilogue will use the FP not the SP. */);
1854 add_vector_labels (FILE *file
, const char *aname
)
1858 const char *vname
= "vect";
1862 /* This node is for the vector/interrupt tag itself */
1863 vec_attr
= lookup_attribute (aname
, DECL_ATTRIBUTES (current_function_decl
));
1867 /* Now point it at the first argument */
1868 vec_attr
= TREE_VALUE (vec_attr
);
1870 /* Iterate through the arguments. */
1873 val_attr
= TREE_VALUE (vec_attr
);
1874 switch (TREE_CODE (val_attr
))
1877 s
= TREE_STRING_POINTER (val_attr
);
1878 goto string_id_common
;
1880 case IDENTIFIER_NODE
:
1881 s
= IDENTIFIER_POINTER (val_attr
);
1884 if (strcmp (s
, "$default") == 0)
1886 fprintf (file
, "\t.global\t$tableentry$default$%s\n", vname
);
1887 fprintf (file
, "$tableentry$default$%s:\n", vname
);
1894 vnum
= TREE_INT_CST_LOW (val_attr
);
1896 fprintf (file
, "\t.global\t$tableentry$%d$%s\n", vnum
, vname
);
1897 fprintf (file
, "$tableentry$%d$%s:\n", vnum
, vname
);
1904 vec_attr
= TREE_CHAIN (vec_attr
);
1910 rx_output_function_prologue (FILE * file
)
1912 add_vector_labels (file
, "interrupt");
1913 add_vector_labels (file
, "vector");
1915 if (is_fast_interrupt_func (NULL_TREE
))
1916 asm_fprintf (file
, "\t; Note: Fast Interrupt Handler\n");
1918 if (is_interrupt_func (NULL_TREE
))
1919 asm_fprintf (file
, "\t; Note: Interrupt Handler\n");
1921 if (is_naked_func (NULL_TREE
))
1922 asm_fprintf (file
, "\t; Note: Naked Function\n");
1924 if (cfun
->static_chain_decl
!= NULL
)
1925 asm_fprintf (file
, "\t; Note: Nested function declared "
1926 "inside another function.\n");
1928 if (crtl
->calls_eh_return
)
1929 asm_fprintf (file
, "\t; Note: Calls __builtin_eh_return.\n");
1932 /* Generate a POPM or RTSD instruction that matches the given operands. */
1935 rx_emit_stack_popm (rtx
* operands
, bool is_popm
)
1937 HOST_WIDE_INT stack_adjust
;
1938 HOST_WIDE_INT last_reg
;
1941 gcc_assert (CONST_INT_P (operands
[0]));
1942 stack_adjust
= INTVAL (operands
[0]);
1944 gcc_assert (GET_CODE (operands
[1]) == PARALLEL
);
1945 last_reg
= XVECLEN (operands
[1], 0) - (is_popm
? 2 : 3);
1947 first_push
= XVECEXP (operands
[1], 0, 1);
1948 gcc_assert (SET_P (first_push
));
1949 first_push
= SET_DEST (first_push
);
1950 gcc_assert (REG_P (first_push
));
1953 asm_fprintf (asm_out_file
, "\tpopm\t%s-%s\n",
1954 reg_names
[REGNO (first_push
)],
1955 reg_names
[REGNO (first_push
) + last_reg
]);
1957 asm_fprintf (asm_out_file
, "\trtsd\t#%d, %s-%s\n",
1959 reg_names
[REGNO (first_push
)],
1960 reg_names
[REGNO (first_push
) + last_reg
]);
1963 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1966 gen_rx_rtsd_vector (unsigned int adjust
, unsigned int low
, unsigned int high
)
1969 unsigned int bias
= 3;
1970 unsigned int count
= (high
- low
) + bias
;
1973 vector
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (count
));
1975 XVECEXP (vector
, 0, 0) =
1976 gen_rtx_SET (stack_pointer_rtx
,
1977 plus_constant (Pmode
, stack_pointer_rtx
, adjust
));
1979 for (i
= 0; i
< count
- 2; i
++)
1980 XVECEXP (vector
, 0, i
+ 1) =
1981 gen_rtx_SET (gen_rtx_REG (SImode
, low
+ i
),
1982 gen_rtx_MEM (SImode
,
1983 i
== 0 ? stack_pointer_rtx
1984 : plus_constant (Pmode
, stack_pointer_rtx
,
1985 i
* UNITS_PER_WORD
)));
1987 XVECEXP (vector
, 0, count
- 1) = ret_rtx
;
1992 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1995 gen_rx_popm_vector (unsigned int low
, unsigned int high
)
1998 unsigned int count
= (high
- low
) + 2;
2001 vector
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (count
));
2003 XVECEXP (vector
, 0, 0) =
2004 gen_rtx_SET (stack_pointer_rtx
,
2005 plus_constant (Pmode
, stack_pointer_rtx
,
2006 (count
- 1) * UNITS_PER_WORD
));
2008 for (i
= 0; i
< count
- 1; i
++)
2009 XVECEXP (vector
, 0, i
+ 1) =
2010 gen_rtx_SET (gen_rtx_REG (SImode
, low
+ i
),
2011 gen_rtx_MEM (SImode
,
2012 i
== 0 ? stack_pointer_rtx
2013 : plus_constant (Pmode
, stack_pointer_rtx
,
2014 i
* UNITS_PER_WORD
)));
2019 /* Returns true if a simple return insn can be used. */
2022 rx_can_use_simple_return (void)
2026 unsigned int frame_size
;
2027 unsigned int stack_size
;
2028 unsigned int register_mask
;
2030 if (is_naked_func (NULL_TREE
)
2031 || is_fast_interrupt_func (NULL_TREE
)
2032 || is_interrupt_func (NULL_TREE
))
2035 rx_get_stack_layout (& low
, & high
, & register_mask
,
2036 & frame_size
, & stack_size
);
2038 return (register_mask
== 0
2039 && (frame_size
+ stack_size
) == 0
2044 pop_regs (unsigned int high
, unsigned int low
)
2048 insn
= emit_insn (gen_stack_pop (gen_rtx_REG (SImode
, low
)));
2050 insn
= emit_insn (gen_stack_popm (GEN_INT (((high
- low
) + 1)
2052 gen_rx_popm_vector (low
, high
)));
2053 add_pop_cfi_notes (insn
, high
, low
);
2057 rx_expand_epilogue (bool is_sibcall
)
2061 unsigned int frame_size
;
2062 unsigned int stack_size
;
2063 unsigned int register_mask
;
2064 unsigned int regs_size
;
2066 unsigned HOST_WIDE_INT total_size
;
2068 /* FIXME: We do not support indirect sibcalls at the moment becaause we
2069 cannot guarantee that the register holding the function address is a
2070 call-used register. If it is a call-saved register then the stack
2071 pop instructions generated in the epilogue will corrupt the address
2074 Creating a new call-used-only register class works but then the
2075 reload pass gets stuck because it cannot always find a call-used
2076 register for spilling sibcalls.
2078 The other possible solution is for this pass to scan forward for the
2079 sibcall instruction (if it has been generated) and work out if it
2080 is an indirect sibcall using a call-saved register. If it is then
2081 the address can copied into a call-used register in this epilogue
2082 code and the sibcall instruction modified to use that register. */
2084 if (is_naked_func (NULL_TREE
))
2086 gcc_assert (! is_sibcall
);
2088 /* Naked functions use their own, programmer provided epilogues.
2089 But, in order to keep gcc happy we have to generate some kind of
2091 emit_jump_insn (gen_naked_return ());
2095 rx_get_stack_layout (& low
, & high
, & register_mask
,
2096 & frame_size
, & stack_size
);
2098 total_size
= frame_size
+ stack_size
;
2099 regs_size
= ((high
- low
) + 1) * UNITS_PER_WORD
;
2101 /* See if we are unable to use the special stack frame deconstruct and
2102 return instructions. In most cases we can use them, but the exceptions
2105 - Sibling calling functions deconstruct the frame but do not return to
2106 their caller. Instead they branch to their sibling and allow their
2107 return instruction to return to this function's parent.
2109 - Fast and normal interrupt handling functions have to use special
2110 return instructions.
2112 - Functions where we have pushed a fragmented set of registers into the
2113 call-save area must have the same set of registers popped. */
2115 || is_fast_interrupt_func (NULL_TREE
)
2116 || is_interrupt_func (NULL_TREE
)
2119 /* Cannot use the special instructions - deconstruct by hand. */
2121 gen_safe_add (stack_pointer_rtx
, stack_pointer_rtx
,
2122 GEN_INT (total_size
), false);
2124 if (MUST_SAVE_ACC_REGISTER
)
2126 unsigned int acc_low
, acc_high
;
2128 /* Reverse the saving of the accumulator register onto the stack.
2129 Note we must adjust the saved "low" accumulator value as it
2130 is really the middle 32-bits of the accumulator. */
2133 acc_low
= acc_high
= 0;
2135 for (reg
= 1; reg
< CC_REGNUM
; reg
++)
2136 if (register_mask
& (1 << reg
))
2146 emit_insn (gen_stack_pop (gen_rtx_REG (SImode
, acc_high
)));
2147 emit_insn (gen_stack_pop (gen_rtx_REG (SImode
, acc_low
)));
2153 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD
),
2154 gen_rx_popm_vector (acc_low
, acc_high
)));
2157 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode
, acc_low
),
2158 gen_rtx_REG (SImode
, acc_low
),
2160 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode
, acc_low
)));
2161 emit_insn (gen_mvtachi (gen_rtx_REG (SImode
, acc_high
)));
2166 for (reg
= 0; reg
< CC_REGNUM
; reg
++)
2167 if (register_mask
& (1 << reg
))
2170 while (register_mask
& (1 << high
))
2172 pop_regs (high
- 1, low
);
2177 pop_regs (high
, low
);
2179 if (is_fast_interrupt_func (NULL_TREE
))
2181 gcc_assert (! is_sibcall
);
2182 emit_jump_insn (gen_fast_interrupt_return ());
2184 else if (is_interrupt_func (NULL_TREE
))
2186 gcc_assert (! is_sibcall
);
2187 emit_jump_insn (gen_exception_return ());
2189 else if (! is_sibcall
)
2190 emit_jump_insn (gen_simple_return ());
2195 /* If we allocated space on the stack, free it now. */
2198 unsigned HOST_WIDE_INT rtsd_size
;
2200 /* See if we can use the RTSD instruction. */
2201 rtsd_size
= total_size
+ regs_size
;
2202 if (rtsd_size
< 1024 && (rtsd_size
% 4) == 0)
2205 emit_jump_insn (gen_pop_and_return
2206 (GEN_INT (rtsd_size
),
2207 gen_rx_rtsd_vector (rtsd_size
, low
, high
)));
2209 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size
)));
2214 gen_safe_add (stack_pointer_rtx
, stack_pointer_rtx
,
2215 GEN_INT (total_size
), false);
2219 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size
),
2220 gen_rx_rtsd_vector (regs_size
,
2223 emit_jump_insn (gen_simple_return ());
2227 /* Compute the offset (in words) between FROM (arg pointer
2228 or frame pointer) and TO (frame pointer or stack pointer).
2229 See ASCII art comment at the start of rx_expand_prologue
2230 for more information. */
2233 rx_initial_elimination_offset (int from
, int to
)
2237 unsigned int frame_size
;
2238 unsigned int stack_size
;
2241 rx_get_stack_layout (& low
, & high
, & mask
, & frame_size
, & stack_size
);
2243 if (from
== ARG_POINTER_REGNUM
)
2245 /* Extend the computed size of the stack frame to
2246 include the registers pushed in the prologue. */
2248 frame_size
+= ((high
- low
) + 1) * UNITS_PER_WORD
;
2250 frame_size
+= bit_count (mask
) * UNITS_PER_WORD
;
2252 /* Remember to include the return address. */
2253 frame_size
+= 1 * UNITS_PER_WORD
;
2255 if (to
== FRAME_POINTER_REGNUM
)
2258 gcc_assert (to
== STACK_POINTER_REGNUM
);
2259 return frame_size
+ stack_size
;
2262 gcc_assert (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
);
2266 /* Decide if a variable should go into one of the small data sections. */
2269 rx_in_small_data (const_tree decl
)
2272 const char * section
;
2274 if (rx_small_data_limit
== 0)
2277 if (TREE_CODE (decl
) != VAR_DECL
)
2280 /* We do not put read-only variables into a small data area because
2281 they would be placed with the other read-only sections, far away
2282 from the read-write data sections, and we only have one small
2284 Similarly commons are placed in the .bss section which might be
2285 far away (and out of alignment with respect to) the .data section. */
2286 if (TREE_READONLY (decl
) || DECL_COMMON (decl
))
2289 section
= DECL_SECTION_NAME (decl
);
2291 return (strcmp (section
, "D_2") == 0) || (strcmp (section
, "B_2") == 0);
2293 size
= int_size_in_bytes (TREE_TYPE (decl
));
2295 return (size
> 0) && (size
<= rx_small_data_limit
);
2298 /* Return a section for X.
2299 The only special thing we do here is to honor small data. */
2302 rx_select_rtx_section (machine_mode mode
,
2304 unsigned HOST_WIDE_INT align
)
2306 if (rx_small_data_limit
> 0
2307 && GET_MODE_SIZE (mode
) <= rx_small_data_limit
2308 && align
<= (unsigned HOST_WIDE_INT
) rx_small_data_limit
* BITS_PER_UNIT
)
2309 return sdata_section
;
2311 return default_elf_select_rtx_section (mode
, x
, align
);
2315 rx_select_section (tree decl
,
2317 unsigned HOST_WIDE_INT align
)
2319 if (rx_small_data_limit
> 0)
2321 switch (categorize_decl_for_section (decl
, reloc
))
2323 case SECCAT_SDATA
: return sdata_section
;
2324 case SECCAT_SBSS
: return sbss_section
;
2325 case SECCAT_SRODATA
:
2326 /* Fall through. We do not put small, read only
2327 data into the C_2 section because we are not
2328 using the C_2 section. We do not use the C_2
2329 section because it is located with the other
2330 read-only data sections, far away from the read-write
2331 data sections and we only have one small data
2338 /* If we are supporting the Renesas assembler
2339 we cannot use mergeable sections. */
2340 if (TARGET_AS100_SYNTAX
)
2341 switch (categorize_decl_for_section (decl
, reloc
))
2343 case SECCAT_RODATA_MERGE_CONST
:
2344 case SECCAT_RODATA_MERGE_STR_INIT
:
2345 case SECCAT_RODATA_MERGE_STR
:
2346 return readonly_data_section
;
2352 return default_elf_select_section (decl
, reloc
, align
);
2380 static GTY(()) tree rx_builtins
[(int) RX_BUILTIN_max
];
2383 rx_init_builtins (void)
2385 #define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2386 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2387 add_builtin_function ("__builtin_rx_" LC_NAME, \
2388 build_function_type_list (RET_TYPE##_type_node, \
2390 RX_BUILTIN_##UC_NAME, \
2391 BUILT_IN_MD, NULL, NULL_TREE)
2393 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
2394 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2395 add_builtin_function ("__builtin_rx_" LC_NAME, \
2396 build_function_type_list (RET_TYPE##_type_node, \
2397 ARG_TYPE##_type_node, \
2399 RX_BUILTIN_##UC_NAME, \
2400 BUILT_IN_MD, NULL, NULL_TREE)
2402 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2403 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2404 add_builtin_function ("__builtin_rx_" LC_NAME, \
2405 build_function_type_list (RET_TYPE##_type_node, \
2406 ARG_TYPE1##_type_node,\
2407 ARG_TYPE2##_type_node,\
2409 RX_BUILTIN_##UC_NAME, \
2410 BUILT_IN_MD, NULL, NULL_TREE)
2412 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2413 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2414 add_builtin_function ("__builtin_rx_" LC_NAME, \
2415 build_function_type_list (RET_TYPE##_type_node, \
2416 ARG_TYPE1##_type_node,\
2417 ARG_TYPE2##_type_node,\
2418 ARG_TYPE3##_type_node,\
2420 RX_BUILTIN_##UC_NAME, \
2421 BUILT_IN_MD, NULL, NULL_TREE)
2423 ADD_RX_BUILTIN0 (BRK
, "brk", void);
2424 ADD_RX_BUILTIN1 (CLRPSW
, "clrpsw", void, integer
);
2425 ADD_RX_BUILTIN1 (SETPSW
, "setpsw", void, integer
);
2426 ADD_RX_BUILTIN1 (INT
, "int", void, integer
);
2427 ADD_RX_BUILTIN2 (MACHI
, "machi", void, intSI
, intSI
);
2428 ADD_RX_BUILTIN2 (MACLO
, "maclo", void, intSI
, intSI
);
2429 ADD_RX_BUILTIN2 (MULHI
, "mulhi", void, intSI
, intSI
);
2430 ADD_RX_BUILTIN2 (MULLO
, "mullo", void, intSI
, intSI
);
2431 ADD_RX_BUILTIN0 (MVFACHI
, "mvfachi", intSI
);
2432 ADD_RX_BUILTIN0 (MVFACMI
, "mvfacmi", intSI
);
2433 ADD_RX_BUILTIN1 (MVTACHI
, "mvtachi", void, intSI
);
2434 ADD_RX_BUILTIN1 (MVTACLO
, "mvtaclo", void, intSI
);
2435 ADD_RX_BUILTIN0 (RMPA
, "rmpa", void);
2436 ADD_RX_BUILTIN1 (MVFC
, "mvfc", intSI
, integer
);
2437 ADD_RX_BUILTIN2 (MVTC
, "mvtc", void, integer
, integer
);
2438 ADD_RX_BUILTIN1 (MVTIPL
, "mvtipl", void, integer
);
2439 ADD_RX_BUILTIN1 (RACW
, "racw", void, integer
);
2440 ADD_RX_BUILTIN1 (ROUND
, "round", intSI
, float);
2441 ADD_RX_BUILTIN1 (REVW
, "revw", intSI
, intSI
);
2442 ADD_RX_BUILTIN0 (WAIT
, "wait", void);
2445 /* Return the RX builtin for CODE. */
2448 rx_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
2450 if (code
>= RX_BUILTIN_max
)
2451 return error_mark_node
;
2453 return rx_builtins
[code
];
2457 rx_expand_void_builtin_1_arg (rtx arg
, rtx (* gen_func
)(rtx
), bool reg
)
2459 if (reg
&& ! REG_P (arg
))
2460 arg
= force_reg (SImode
, arg
);
2462 emit_insn (gen_func (arg
));
2468 rx_expand_builtin_mvtc (tree exp
)
2470 rtx arg1
= expand_normal (CALL_EXPR_ARG (exp
, 0));
2471 rtx arg2
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2473 if (! CONST_INT_P (arg1
))
2477 arg2
= force_reg (SImode
, arg2
);
2479 emit_insn (gen_mvtc (arg1
, arg2
));
2485 rx_expand_builtin_mvfc (tree t_arg
, rtx target
)
2487 rtx arg
= expand_normal (t_arg
);
2489 if (! CONST_INT_P (arg
))
2492 if (target
== NULL_RTX
)
2495 if (! REG_P (target
))
2496 target
= force_reg (SImode
, target
);
2498 emit_insn (gen_mvfc (target
, arg
));
2504 rx_expand_builtin_mvtipl (rtx arg
)
2506 /* The RX610 does not support the MVTIPL instruction. */
2507 if (rx_cpu_type
== RX610
)
2510 if (! CONST_INT_P (arg
) || ! IN_RANGE (INTVAL (arg
), 0, (1 << 4) - 1))
2513 emit_insn (gen_mvtipl (arg
));
2519 rx_expand_builtin_mac (tree exp
, rtx (* gen_func
)(rtx
, rtx
))
2521 rtx arg1
= expand_normal (CALL_EXPR_ARG (exp
, 0));
2522 rtx arg2
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2525 arg1
= force_reg (SImode
, arg1
);
2528 arg2
= force_reg (SImode
, arg2
);
2530 emit_insn (gen_func (arg1
, arg2
));
2536 rx_expand_int_builtin_1_arg (rtx arg
,
2538 rtx (* gen_func
)(rtx
, rtx
),
2542 if (!mem_ok
|| ! MEM_P (arg
))
2543 arg
= force_reg (SImode
, arg
);
2545 if (target
== NULL_RTX
|| ! REG_P (target
))
2546 target
= gen_reg_rtx (SImode
);
2548 emit_insn (gen_func (target
, arg
));
2554 rx_expand_int_builtin_0_arg (rtx target
, rtx (* gen_func
)(rtx
))
2556 if (target
== NULL_RTX
|| ! REG_P (target
))
2557 target
= gen_reg_rtx (SImode
);
2559 emit_insn (gen_func (target
));
2565 rx_expand_builtin_round (rtx arg
, rtx target
)
2567 if ((! REG_P (arg
) && ! MEM_P (arg
))
2568 || GET_MODE (arg
) != SFmode
)
2569 arg
= force_reg (SFmode
, arg
);
2571 if (target
== NULL_RTX
|| ! REG_P (target
))
2572 target
= gen_reg_rtx (SImode
);
2574 emit_insn (gen_lrintsf2 (target
, arg
));
2580 valid_psw_flag (rtx op
, const char *which
)
2582 static int mvtc_inform_done
= 0;
2584 if (GET_CODE (op
) == CONST_INT
)
2585 switch (INTVAL (op
))
2587 case 0: case 'c': case 'C':
2588 case 1: case 'z': case 'Z':
2589 case 2: case 's': case 'S':
2590 case 3: case 'o': case 'O':
2591 case 8: case 'i': case 'I':
2592 case 9: case 'u': case 'U':
2596 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which
);
2597 if (!mvtc_inform_done
)
2598 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2599 mvtc_inform_done
= 1;
2605 rx_expand_builtin (tree exp
,
2607 rtx subtarget ATTRIBUTE_UNUSED
,
2608 machine_mode mode ATTRIBUTE_UNUSED
,
2609 int ignore ATTRIBUTE_UNUSED
)
2611 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
2612 tree arg
= call_expr_nargs (exp
) >= 1 ? CALL_EXPR_ARG (exp
, 0) : NULL_TREE
;
2613 rtx op
= arg
? expand_normal (arg
) : NULL_RTX
;
2614 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
2618 case RX_BUILTIN_BRK
: emit_insn (gen_brk ()); return NULL_RTX
;
2619 case RX_BUILTIN_CLRPSW
:
2620 if (!valid_psw_flag (op
, "clrpsw"))
2622 return rx_expand_void_builtin_1_arg (op
, gen_clrpsw
, false);
2623 case RX_BUILTIN_SETPSW
:
2624 if (!valid_psw_flag (op
, "setpsw"))
2626 return rx_expand_void_builtin_1_arg (op
, gen_setpsw
, false);
2627 case RX_BUILTIN_INT
: return rx_expand_void_builtin_1_arg
2628 (op
, gen_int
, false);
2629 case RX_BUILTIN_MACHI
: return rx_expand_builtin_mac (exp
, gen_machi
);
2630 case RX_BUILTIN_MACLO
: return rx_expand_builtin_mac (exp
, gen_maclo
);
2631 case RX_BUILTIN_MULHI
: return rx_expand_builtin_mac (exp
, gen_mulhi
);
2632 case RX_BUILTIN_MULLO
: return rx_expand_builtin_mac (exp
, gen_mullo
);
2633 case RX_BUILTIN_MVFACHI
: return rx_expand_int_builtin_0_arg
2634 (target
, gen_mvfachi
);
2635 case RX_BUILTIN_MVFACMI
: return rx_expand_int_builtin_0_arg
2636 (target
, gen_mvfacmi
);
2637 case RX_BUILTIN_MVTACHI
: return rx_expand_void_builtin_1_arg
2638 (op
, gen_mvtachi
, true);
2639 case RX_BUILTIN_MVTACLO
: return rx_expand_void_builtin_1_arg
2640 (op
, gen_mvtaclo
, true);
2641 case RX_BUILTIN_RMPA
:
2642 if (rx_allow_string_insns
)
2643 emit_insn (gen_rmpa ());
2645 error ("-mno-allow-string-insns forbids the generation of the RMPA instruction");
2647 case RX_BUILTIN_MVFC
: return rx_expand_builtin_mvfc (arg
, target
);
2648 case RX_BUILTIN_MVTC
: return rx_expand_builtin_mvtc (exp
);
2649 case RX_BUILTIN_MVTIPL
: return rx_expand_builtin_mvtipl (op
);
2650 case RX_BUILTIN_RACW
: return rx_expand_void_builtin_1_arg
2651 (op
, gen_racw
, false);
2652 case RX_BUILTIN_ROUND
: return rx_expand_builtin_round (op
, target
);
2653 case RX_BUILTIN_REVW
: return rx_expand_int_builtin_1_arg
2654 (op
, target
, gen_revw
, false);
2655 case RX_BUILTIN_WAIT
: emit_insn (gen_wait ()); return NULL_RTX
;
2658 internal_error ("bad builtin code");
2665 /* Place an element into a constructor or destructor section.
2666 Like default_ctor_section_asm_out_constructor in varasm.c
2667 except that it uses .init_array (or .fini_array) and it
2668 handles constructor priorities. */
2671 rx_elf_asm_cdtor (rtx symbol
, int priority
, bool is_ctor
)
2675 if (priority
!= DEFAULT_INIT_PRIORITY
)
2679 sprintf (buf
, "%s.%.5u",
2680 is_ctor
? ".init_array" : ".fini_array",
2682 s
= get_section (buf
, SECTION_WRITE
, NULL_TREE
);
2689 switch_to_section (s
);
2690 assemble_align (POINTER_SIZE
);
2691 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
2695 rx_elf_asm_constructor (rtx symbol
, int priority
)
2697 rx_elf_asm_cdtor (symbol
, priority
, /* is_ctor= */true);
2701 rx_elf_asm_destructor (rtx symbol
, int priority
)
2703 rx_elf_asm_cdtor (symbol
, priority
, /* is_ctor= */false);
2706 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2709 rx_handle_func_attribute (tree
* node
,
2711 tree args ATTRIBUTE_UNUSED
,
2712 int flags ATTRIBUTE_UNUSED
,
2713 bool * no_add_attrs
)
2715 gcc_assert (DECL_P (* node
));
2717 if (TREE_CODE (* node
) != FUNCTION_DECL
)
2719 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2721 * no_add_attrs
= true;
2724 /* FIXME: We ought to check for conflicting attributes. */
2726 /* FIXME: We ought to check that the interrupt and exception
2727 handler attributes have been applied to void functions. */
2731 /* Check "vector" attribute. */
2734 rx_handle_vector_attribute (tree
* node
,
2737 int flags ATTRIBUTE_UNUSED
,
2738 bool * no_add_attrs
)
2740 gcc_assert (DECL_P (* node
));
2741 gcc_assert (args
!= NULL_TREE
);
2743 if (TREE_CODE (* node
) != FUNCTION_DECL
)
2745 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2747 * no_add_attrs
= true;
2753 /* Table of RX specific attributes. */
2754 const struct attribute_spec rx_attribute_table
[] =
2756 /* Name, min_len, max_len, decl_req, type_req, fn_type_req,
2757 affects_type_identity, handler, exclude. */
2758 { "fast_interrupt", 0, 0, true, false, false, false,
2759 rx_handle_func_attribute
, NULL
},
2760 { "interrupt", 0, -1, true, false, false, false,
2761 rx_handle_func_attribute
, NULL
},
2762 { "naked", 0, 0, true, false, false, false,
2763 rx_handle_func_attribute
, NULL
},
2764 { "vector", 1, -1, true, false, false, false,
2765 rx_handle_vector_attribute
, NULL
},
2766 { NULL
, 0, 0, false, false, false, false, NULL
, NULL
}
2769 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
2772 rx_override_options_after_change (void)
2774 static bool first_time
= TRUE
;
2778 /* If this is the first time through and the user has not disabled
2779 the use of RX FPU hardware then enable -ffinite-math-only,
2780 since the FPU instructions do not support NaNs and infinities. */
2782 flag_finite_math_only
= 1;
2788 /* Alert the user if they are changing the optimization options
2789 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2791 && !flag_finite_math_only
)
2792 warning (0, "RX FPU instructions do not support NaNs and infinities");
2797 rx_option_override (void)
2800 cl_deferred_option
*opt
;
2801 vec
<cl_deferred_option
> *v
= (vec
<cl_deferred_option
> *) rx_deferred_options
;
2804 FOR_EACH_VEC_ELT (*v
, i
, opt
)
2806 switch (opt
->opt_index
)
2808 case OPT_mint_register_
:
2812 fixed_regs
[10] = call_used_regs
[10] = 1;
2815 fixed_regs
[11] = call_used_regs
[11] = 1;
2818 fixed_regs
[12] = call_used_regs
[12] = 1;
2821 fixed_regs
[13] = call_used_regs
[13] = 1;
2824 rx_num_interrupt_regs
= opt
->value
;
2827 rx_num_interrupt_regs
= 0;
2828 /* Error message already given because rx_handle_option
2839 /* This target defaults to strict volatile bitfields. */
2840 if (flag_strict_volatile_bitfields
< 0 && abi_version_at_least(2))
2841 flag_strict_volatile_bitfields
= 1;
2843 rx_override_options_after_change ();
2845 /* These values are bytes, not log. */
2846 if (! optimize_size
)
2848 if (flag_align_jumps
&& !str_align_jumps
)
2849 str_align_jumps
= ((rx_cpu_type
== RX100
2850 || rx_cpu_type
== RX200
) ? "4" : "8");
2851 if (flag_align_loops
&& !str_align_loops
)
2852 str_align_loops
= ((rx_cpu_type
== RX100
2853 || rx_cpu_type
== RX200
) ? "4" : "8");
2854 if (flag_align_labels
&& !str_align_labels
)
2855 str_align_labels
= ((rx_cpu_type
== RX100
2856 || rx_cpu_type
== RX200
) ? "4" : "8");
2862 rx_allocate_stack_slots_for_args (void)
2864 /* Naked functions should not allocate stack slots for arguments. */
2865 return ! is_naked_func (NULL_TREE
);
2869 rx_func_attr_inlinable (const_tree decl
)
2871 return ! is_fast_interrupt_func (decl
)
2872 && ! is_interrupt_func (decl
)
2873 && ! is_naked_func (decl
);
2877 rx_warn_func_return (tree decl
)
2879 /* Naked functions are implemented entirely in assembly, including the
2880 return sequence, so suppress warnings about this. */
2881 return !is_naked_func (decl
);
2884 /* Return nonzero if it is ok to make a tail-call to DECL,
2885 a function_decl or NULL if this is an indirect call, using EXP */
2888 rx_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
2893 /* Do not allow indirect tailcalls. The
2894 sibcall patterns do not support them. */
2898 /* Never tailcall from inside interrupt handlers or naked functions. */
2899 if (is_fast_interrupt_func (NULL_TREE
)
2900 || is_interrupt_func (NULL_TREE
)
2901 || is_naked_func (NULL_TREE
))
2908 rx_file_start (void)
2910 if (! TARGET_AS100_SYNTAX
)
2911 default_file_start ();
2915 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED
)
2917 /* The packed attribute overrides the MS behavior. */
2918 return ! TYPE_PACKED (record_type
);
2921 /* Returns true if X a legitimate constant for an immediate
2922 operand on the RX. X is already known to satisfy CONSTANT_P. */
2925 rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
2927 switch (GET_CODE (x
))
2932 if (GET_CODE (x
) == PLUS
)
2934 if (! CONST_INT_P (XEXP (x
, 1)))
2937 /* GCC would not pass us CONST_INT + CONST_INT so we
2938 know that we have {SYMBOL|LABEL} + CONST_INT. */
2940 gcc_assert (! CONST_INT_P (x
));
2943 switch (GET_CODE (x
))
2950 return XINT (x
, 1) == UNSPEC_CONST
|| XINT (x
, 1) == UNSPEC_PID_ADDR
;
2953 /* FIXME: Can this ever happen ? */
2962 return (rx_max_constant_size
== 0 || rx_max_constant_size
== 4);
2966 gcc_assert (CONST_INT_P (x
));
2970 return ok_for_max_constant (INTVAL (x
));
2974 rx_address_cost (rtx addr
, machine_mode mode ATTRIBUTE_UNUSED
,
2975 addr_space_t as ATTRIBUTE_UNUSED
, bool speed
)
2979 if (GET_CODE (addr
) != PLUS
)
2980 return COSTS_N_INSNS (1);
2985 if (REG_P (a
) && REG_P (b
))
2986 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2987 return COSTS_N_INSNS (4);
2990 /* [REG+OFF] is just as fast as [REG]. */
2991 return COSTS_N_INSNS (1);
2994 && ((INTVAL (b
) > 128) || INTVAL (b
) < -127))
2995 /* Try to discourage REG + <large OFF> when optimizing for size. */
2996 return COSTS_N_INSNS (2);
2998 return COSTS_N_INSNS (1);
3002 rx_rtx_costs (rtx x
, machine_mode mode
, int outer_code ATTRIBUTE_UNUSED
,
3003 int opno ATTRIBUTE_UNUSED
, int* total
, bool speed
)
3005 if (x
== const0_rtx
)
3011 switch (GET_CODE (x
))
3016 *total
= COSTS_N_INSNS (2);
3027 *total
= COSTS_N_INSNS (1);
3032 /* This is the worst case for a division. Pessimize divisions when
3033 not optimizing for size and allow reciprocal optimizations which
3034 produce bigger code. */
3035 *total
= COSTS_N_INSNS (20);
3037 *total
= COSTS_N_INSNS (3);
3042 /* This is the worst case for a division. Pessimize divisions when
3043 not optimizing for size and allow reciprocal optimizations which
3044 produce bigger code. */
3045 *total
= COSTS_N_INSNS (18);
3047 *total
= COSTS_N_INSNS (3);
3058 rx_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
3060 /* We can always eliminate to the frame pointer.
3061 We can eliminate to the stack pointer unless a frame
3062 pointer is needed. */
3064 return to
== FRAME_POINTER_REGNUM
3065 || ( to
== STACK_POINTER_REGNUM
&& ! frame_pointer_needed
);
3070 rx_trampoline_template (FILE * file
)
3072 /* Output assembler code for a block containing the constant
3073 part of a trampoline, leaving space for the variable parts.
3075 On the RX, (where r8 is the static chain regnum) the trampoline
3078 mov #<static chain value>, r8
3079 mov #<function's address>, r9
3082 In big-endian-data-mode however instructions are read into the CPU
3083 4 bytes at a time. These bytes are then swapped around before being
3084 passed to the decoder. So...we must partition our trampoline into
3085 4 byte packets and swap these packets around so that the instruction
3086 reader will reverse the process. But, in order to avoid splitting
3087 the 32-bit constants across these packet boundaries, (making inserting
3088 them into the constructed trampoline very difficult) we have to pad the
3089 instruction sequence with NOP insns. ie:
3101 if (! TARGET_BIG_ENDIAN_DATA
)
3103 asm_fprintf (file
, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM
);
3104 asm_fprintf (file
, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM
);
3105 asm_fprintf (file
, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM
);
3109 char r8
= '0' + STATIC_CHAIN_REGNUM
;
3110 char r9
= '0' + TRAMPOLINE_TEMP_REGNUM
;
3112 if (TARGET_AS100_SYNTAX
)
3114 asm_fprintf (file
, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8
);
3115 asm_fprintf (file
, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3116 asm_fprintf (file
, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9
);
3117 asm_fprintf (file
, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3118 asm_fprintf (file
, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9
);
3122 asm_fprintf (file
, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8
);
3123 asm_fprintf (file
, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3124 asm_fprintf (file
, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9
);
3125 asm_fprintf (file
, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3126 asm_fprintf (file
, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9
);
3132 rx_trampoline_init (rtx tramp
, tree fndecl
, rtx chain
)
3134 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3136 emit_block_move (tramp
, assemble_trampoline_template (),
3137 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3139 if (TARGET_BIG_ENDIAN_DATA
)
3141 emit_move_insn (adjust_address (tramp
, SImode
, 4), chain
);
3142 emit_move_insn (adjust_address (tramp
, SImode
, 12), fnaddr
);
3146 emit_move_insn (adjust_address (tramp
, SImode
, 2), chain
);
3147 emit_move_insn (adjust_address (tramp
, SImode
, 6 + 2), fnaddr
);
3152 rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
3153 reg_class_t regclass ATTRIBUTE_UNUSED
,
3156 return (in
? 2 : 0) + REGISTER_MOVE_COST (mode
, regclass
, regclass
);
3159 /* Convert a CC_MODE to the set of flags that it represents. */
3162 flags_from_mode (machine_mode mode
)
3167 return CC_FLAG_S
| CC_FLAG_Z
;
3169 return CC_FLAG_S
| CC_FLAG_Z
| CC_FLAG_O
;
3171 return CC_FLAG_S
| CC_FLAG_Z
| CC_FLAG_C
;
3173 return CC_FLAG_S
| CC_FLAG_Z
| CC_FLAG_O
| CC_FLAG_C
;
3181 /* Convert a set of flags to a CC_MODE that can implement it. */
3184 mode_from_flags (unsigned int f
)
3195 else if (f
& CC_FLAG_C
)
3201 /* Convert an RTX_CODE to the set of flags needed to implement it.
3202 This assumes an integer comparison. */
3205 flags_from_code (enum rtx_code code
)
3214 return CC_FLAG_S
| CC_FLAG_O
| CC_FLAG_Z
;
3220 return CC_FLAG_C
| CC_FLAG_Z
;
3229 /* Return a CC_MODE of which both M1 and M2 are subsets. */
3232 rx_cc_modes_compatible (machine_mode m1
, machine_mode m2
)
3236 /* Early out for identical modes. */
3240 /* There's no valid combination for FP vs non-FP. */
3241 f
= flags_from_mode (m1
) | flags_from_mode (m2
);
3245 /* Otherwise, see what mode can implement all the flags. */
3246 return mode_from_flags (f
);
3249 /* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
3252 rx_select_cc_mode (enum rtx_code cmp_code
, rtx x
, rtx y
)
3254 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
3257 if (y
!= const0_rtx
)
3260 return mode_from_flags (flags_from_code (cmp_code
));
3263 /* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
3264 CC_MODE, and use that in branches based on that compare. */
3267 rx_split_cbranch (machine_mode cc_mode
, enum rtx_code cmp1
,
3268 rtx c1
, rtx c2
, rtx label
)
3272 flags
= gen_rtx_REG (cc_mode
, CC_REG
);
3273 x
= gen_rtx_COMPARE (cc_mode
, c1
, c2
);
3274 x
= gen_rtx_SET (flags
, x
);
3277 x
= gen_rtx_fmt_ee (cmp1
, VOIDmode
, flags
, const0_rtx
);
3278 x
= gen_rtx_IF_THEN_ELSE (VOIDmode
, x
, label
, pc_rtx
);
3279 x
= gen_rtx_SET (pc_rtx
, x
);
3283 /* A helper function for matching parallels that set the flags. */
3286 rx_match_ccmode (rtx insn
, machine_mode cc_mode
)
3289 machine_mode flags_mode
;
3291 gcc_checking_assert (XVECLEN (PATTERN (insn
), 0) == 2);
3293 op1
= XVECEXP (PATTERN (insn
), 0, 0);
3294 gcc_checking_assert (GET_CODE (SET_SRC (op1
)) == COMPARE
);
3296 flags
= SET_DEST (op1
);
3297 flags_mode
= GET_MODE (flags
);
3299 if (GET_MODE (SET_SRC (op1
)) != flags_mode
)
3301 if (GET_MODE_CLASS (flags_mode
) != MODE_CC
)
3304 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3305 if (flags_from_mode (flags_mode
) & ~flags_from_mode (cc_mode
))
3313 rx_max_skip_for_label (rtx_insn
*lab
)
3327 op
= next_nonnote_nondebug_insn (op
);
3329 while (op
&& (LABEL_P (op
)
3330 || (INSN_P (op
) && GET_CODE (PATTERN (op
)) == USE
)));
3334 opsize
= get_attr_length (op
);
3335 if (opsize
>= 0 && opsize
< 8)
3336 return MAX (0, opsize
- 1);
3341 rx_align_log_for_label (rtx_insn
*lab
, int uses_threshold
)
3343 /* This is a simple heuristic to guess when an alignment would not be useful
3344 because the delay due to the inserted NOPs would be greater than the delay
3345 due to the misaligned branch. If uses_threshold is zero then the alignment
3346 is always useful. */
3347 if (LABEL_P (lab
) && LABEL_NUSES (lab
) < uses_threshold
)
3353 /* Return zero if max_skip not a positive number. */
3354 int max_skip
= rx_max_skip_for_label (lab
);
3358 /* These values are log, not bytes. */
3359 if (rx_cpu_type
== RX100
|| rx_cpu_type
== RX200
)
3360 return 2; /* 4 bytes */
3361 return 3; /* 8 bytes */
3365 rx_align_for_label (rtx_insn
*lab
, int uses_threshold
)
3367 return align_flags (rx_align_log_for_label (lab
, uses_threshold
),
3368 rx_max_skip_for_label (lab
));
3371 /* Compute the real length of the extending load-and-op instructions. */
3374 rx_adjust_insn_length (rtx_insn
*insn
, int current_length
)
3376 rtx extend
, mem
, offset
;
3381 return current_length
;
3383 switch (INSN_CODE (insn
))
3386 return current_length
;
3388 case CODE_FOR_plussi3_zero_extendhi
:
3389 case CODE_FOR_andsi3_zero_extendhi
:
3390 case CODE_FOR_iorsi3_zero_extendhi
:
3391 case CODE_FOR_xorsi3_zero_extendhi
:
3392 case CODE_FOR_divsi3_zero_extendhi
:
3393 case CODE_FOR_udivsi3_zero_extendhi
:
3394 case CODE_FOR_minussi3_zero_extendhi
:
3395 case CODE_FOR_smaxsi3_zero_extendhi
:
3396 case CODE_FOR_sminsi3_zero_extendhi
:
3397 case CODE_FOR_multsi3_zero_extendhi
:
3398 case CODE_FOR_comparesi3_zero_extendhi
:
3403 case CODE_FOR_plussi3_sign_extendhi
:
3404 case CODE_FOR_andsi3_sign_extendhi
:
3405 case CODE_FOR_iorsi3_sign_extendhi
:
3406 case CODE_FOR_xorsi3_sign_extendhi
:
3407 case CODE_FOR_divsi3_sign_extendhi
:
3408 case CODE_FOR_udivsi3_sign_extendhi
:
3409 case CODE_FOR_minussi3_sign_extendhi
:
3410 case CODE_FOR_smaxsi3_sign_extendhi
:
3411 case CODE_FOR_sminsi3_sign_extendhi
:
3412 case CODE_FOR_multsi3_sign_extendhi
:
3413 case CODE_FOR_comparesi3_sign_extendhi
:
3418 case CODE_FOR_plussi3_zero_extendqi
:
3419 case CODE_FOR_andsi3_zero_extendqi
:
3420 case CODE_FOR_iorsi3_zero_extendqi
:
3421 case CODE_FOR_xorsi3_zero_extendqi
:
3422 case CODE_FOR_divsi3_zero_extendqi
:
3423 case CODE_FOR_udivsi3_zero_extendqi
:
3424 case CODE_FOR_minussi3_zero_extendqi
:
3425 case CODE_FOR_smaxsi3_zero_extendqi
:
3426 case CODE_FOR_sminsi3_zero_extendqi
:
3427 case CODE_FOR_multsi3_zero_extendqi
:
3428 case CODE_FOR_comparesi3_zero_extendqi
:
3433 case CODE_FOR_plussi3_sign_extendqi
:
3434 case CODE_FOR_andsi3_sign_extendqi
:
3435 case CODE_FOR_iorsi3_sign_extendqi
:
3436 case CODE_FOR_xorsi3_sign_extendqi
:
3437 case CODE_FOR_divsi3_sign_extendqi
:
3438 case CODE_FOR_udivsi3_sign_extendqi
:
3439 case CODE_FOR_minussi3_sign_extendqi
:
3440 case CODE_FOR_smaxsi3_sign_extendqi
:
3441 case CODE_FOR_sminsi3_sign_extendqi
:
3442 case CODE_FOR_multsi3_sign_extendqi
:
3443 case CODE_FOR_comparesi3_sign_extendqi
:
3449 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3450 extend
= single_set (insn
);
3451 gcc_assert (extend
!= NULL_RTX
);
3453 extend
= SET_SRC (extend
);
3454 if (GET_CODE (XEXP (extend
, 0)) == ZERO_EXTEND
3455 || GET_CODE (XEXP (extend
, 0)) == SIGN_EXTEND
)
3456 extend
= XEXP (extend
, 0);
3458 extend
= XEXP (extend
, 1);
3460 gcc_assert ((zero
&& (GET_CODE (extend
) == ZERO_EXTEND
))
3461 || (! zero
&& (GET_CODE (extend
) == SIGN_EXTEND
)));
3463 mem
= XEXP (extend
, 0);
3464 gcc_checking_assert (MEM_P (mem
));
3465 if (REG_P (XEXP (mem
, 0)))
3466 return (zero
&& factor
== 1) ? 2 : 3;
3468 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3469 gcc_checking_assert (GET_CODE (XEXP (mem
, 0)) == PLUS
);
3470 gcc_checking_assert (REG_P (XEXP (XEXP (mem
, 0), 0)));
3472 offset
= XEXP (XEXP (mem
, 0), 1);
3473 gcc_checking_assert (GET_CODE (offset
) == CONST_INT
);
3475 if (IN_RANGE (INTVAL (offset
), 0, 255 * factor
))
3476 return (zero
&& factor
== 1) ? 3 : 4;
3478 return (zero
&& factor
== 1) ? 4 : 5;
3482 rx_narrow_volatile_bitfield (void)
3488 rx_ok_to_inline (tree caller
, tree callee
)
3490 /* Do not inline functions with local variables
3491 into a naked CALLER - naked function have no stack frame and
3492 locals need a frame in order to have somewhere to live.
3494 Unfortunately we have no way to determine the presence of
3495 local variables in CALLEE, so we have to be cautious and
3496 assume that there might be some there.
3498 We do allow inlining when CALLEE has the "inline" type
3499 modifier or the "always_inline" or "gnu_inline" attributes. */
3500 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller
)) == NULL_TREE
3501 || DECL_DECLARED_INLINE_P (callee
)
3502 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee
)) != NULL_TREE
3503 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee
)) != NULL_TREE
;
3507 rx_enable_lra (void)
3509 return TARGET_ENABLE_LRA
;
3512 rx_atomic_sequence::rx_atomic_sequence (const_tree fun_decl
)
3514 if (is_fast_interrupt_func (fun_decl
) || is_interrupt_func (fun_decl
))
3516 /* If we are inside an interrupt handler, assume that interrupts are
3517 off -- which is the default hardware behavior. In this case, there
3518 is no need to disable the interrupts. */
3519 m_prev_psw_reg
= NULL
;
3523 m_prev_psw_reg
= gen_reg_rtx (SImode
);
3524 emit_insn (gen_mvfc (m_prev_psw_reg
, GEN_INT (CTRLREG_PSW
)));
3525 emit_insn (gen_clrpsw (GEN_INT ('I')));
3529 rx_atomic_sequence::~rx_atomic_sequence (void)
3531 if (m_prev_psw_reg
!= NULL
)
3532 emit_insn (gen_mvtc (GEN_INT (CTRLREG_PSW
), m_prev_psw_reg
));
3535 /* Given an insn and a reg number, tell whether the reg dies or is unused
3538 rx_reg_dead_or_unused_after_insn (const rtx_insn
* i
, int regno
)
3540 return find_regno_note (i
, REG_DEAD
, regno
) != NULL
3541 || find_regno_note (i
, REG_UNUSED
, regno
) != NULL
;
3544 /* Copy dead and unused notes from SRC to DST for the specified REGNO. */
3546 rx_copy_reg_dead_or_unused_notes (rtx reg
, const rtx_insn
* src
, rtx_insn
* dst
)
3548 int regno
= REGNO (SUBREG_P (reg
) ? SUBREG_REG (reg
) : reg
);
3550 if (rtx note
= find_regno_note (src
, REG_DEAD
, regno
))
3551 add_shallow_copy_of_reg_note (dst
, note
);
3553 if (rtx note
= find_regno_note (src
, REG_UNUSED
, regno
))
3554 add_shallow_copy_of_reg_note (dst
, note
);
3557 /* Try to fuse the current bit-operation insn with the surrounding memory load
3560 rx_fuse_in_memory_bitop (rtx
* operands
, rtx_insn
* curr_insn
,
3561 rtx (*gen_insn
)(rtx
, rtx
))
3563 rtx op2_reg
= SUBREG_P (operands
[2]) ? SUBREG_REG (operands
[2]) : operands
[2];
3565 set_of_reg op2_def
= rx_find_set_of_reg (op2_reg
, curr_insn
,
3566 prev_nonnote_nondebug_insn_bb
);
3567 if (op2_def
.set_src
== NULL_RTX
3568 || !MEM_P (op2_def
.set_src
)
3569 || GET_MODE (op2_def
.set_src
) != QImode
3570 || !rx_is_restricted_memory_address (XEXP (op2_def
.set_src
, 0),
3571 GET_MODE (op2_def
.set_src
))
3572 || reg_used_between_p (operands
[2], op2_def
.insn
, curr_insn
)
3573 || !rx_reg_dead_or_unused_after_insn (curr_insn
, REGNO (op2_reg
))
3577 /* The register operand originates from a memory load and the memory load
3578 could be fused with the bitop insn.
3579 Look for the following memory store with the same memory operand. */
3580 rtx mem
= op2_def
.set_src
;
3582 /* If the memory is an auto-mod address, it can't be fused. */
3583 if (GET_CODE (XEXP (mem
, 0)) == POST_INC
3584 || GET_CODE (XEXP (mem
, 0)) == PRE_INC
3585 || GET_CODE (XEXP (mem
, 0)) == POST_DEC
3586 || GET_CODE (XEXP (mem
, 0)) == PRE_DEC
)
3589 rtx_insn
* op0_use
= rx_find_use_of_reg (operands
[0], curr_insn
,
3590 next_nonnote_nondebug_insn_bb
);
3592 || !(GET_CODE (PATTERN (op0_use
)) == SET
3593 && RX_REG_P (XEXP (PATTERN (op0_use
), 1))
3594 && reg_overlap_mentioned_p (operands
[0], XEXP (PATTERN (op0_use
), 1))
3595 && rtx_equal_p (mem
, XEXP (PATTERN (op0_use
), 0)))
3596 || !rx_reg_dead_or_unused_after_insn (op0_use
, REGNO (operands
[0]))
3597 || reg_set_between_p (operands
[2], curr_insn
, op0_use
))
3600 /* If the load-modify-store operation is fused it could potentially modify
3601 load/store ordering if there are other memory accesses between the load
3602 and the store for this insn. If there are volatile mems between the load
3603 and store it's better not to change the ordering. If there is a call
3604 between the load and store, it's also not safe to fuse it. */
3605 for (rtx_insn
* i
= next_nonnote_nondebug_insn_bb (op2_def
.insn
);
3606 i
!= NULL
&& i
!= op0_use
;
3607 i
= next_nonnote_nondebug_insn_bb (i
))
3608 if (volatile_insn_p (PATTERN (i
)) || CALL_P (i
))
3611 emit_insn (gen_insn (mem
, gen_lowpart (QImode
, operands
[1])));
3612 set_insn_deleted (op2_def
.insn
);
3613 set_insn_deleted (op0_use
);
3617 /* Implement TARGET_HARD_REGNO_NREGS. */
3620 rx_hard_regno_nregs (unsigned int, machine_mode mode
)
3622 return CLASS_MAX_NREGS (0, mode
);
3625 /* Implement TARGET_HARD_REGNO_MODE_OK. */
3628 rx_hard_regno_mode_ok (unsigned int regno
, machine_mode
)
3630 return REGNO_REG_CLASS (regno
) == GR_REGS
;
3633 /* Implement TARGET_MODES_TIEABLE_P. */
3636 rx_modes_tieable_p (machine_mode mode1
, machine_mode mode2
)
3638 return ((GET_MODE_CLASS (mode1
) == MODE_FLOAT
3639 || GET_MODE_CLASS (mode1
) == MODE_COMPLEX_FLOAT
)
3640 == (GET_MODE_CLASS (mode2
) == MODE_FLOAT
3641 || GET_MODE_CLASS (mode2
) == MODE_COMPLEX_FLOAT
));
3644 #undef TARGET_NARROW_VOLATILE_BITFIELD
3645 #define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3647 #undef TARGET_CAN_INLINE_P
3648 #define TARGET_CAN_INLINE_P rx_ok_to_inline
3650 #undef TARGET_FUNCTION_VALUE
3651 #define TARGET_FUNCTION_VALUE rx_function_value
3653 #undef TARGET_RETURN_IN_MSB
3654 #define TARGET_RETURN_IN_MSB rx_return_in_msb
3656 #undef TARGET_IN_SMALL_DATA_P
3657 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
3659 #undef TARGET_RETURN_IN_MEMORY
3660 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3662 #undef TARGET_HAVE_SRODATA_SECTION
3663 #define TARGET_HAVE_SRODATA_SECTION true
3665 #undef TARGET_ASM_SELECT_RTX_SECTION
3666 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3668 #undef TARGET_ASM_SELECT_SECTION
3669 #define TARGET_ASM_SELECT_SECTION rx_select_section
3671 #undef TARGET_INIT_BUILTINS
3672 #define TARGET_INIT_BUILTINS rx_init_builtins
3674 #undef TARGET_BUILTIN_DECL
3675 #define TARGET_BUILTIN_DECL rx_builtin_decl
3677 #undef TARGET_EXPAND_BUILTIN
3678 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
3680 #undef TARGET_ASM_CONSTRUCTOR
3681 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3683 #undef TARGET_ASM_DESTRUCTOR
3684 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3686 #undef TARGET_STRUCT_VALUE_RTX
3687 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3689 #undef TARGET_ATTRIBUTE_TABLE
3690 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3692 #undef TARGET_ASM_FILE_START
3693 #define TARGET_ASM_FILE_START rx_file_start
3695 #undef TARGET_MS_BITFIELD_LAYOUT_P
3696 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3698 #undef TARGET_LEGITIMATE_ADDRESS_P
3699 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3701 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
3702 #define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3704 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3705 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3707 #undef TARGET_ASM_FUNCTION_PROLOGUE
3708 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3710 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3711 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3713 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3714 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3716 #undef TARGET_FUNCTION_ARG
3717 #define TARGET_FUNCTION_ARG rx_function_arg
3719 #undef TARGET_FUNCTION_ARG_ADVANCE
3720 #define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3722 #undef TARGET_FUNCTION_ARG_BOUNDARY
3723 #define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3725 #undef TARGET_SET_CURRENT_FUNCTION
3726 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3728 #undef TARGET_ASM_INTEGER
3729 #define TARGET_ASM_INTEGER rx_assemble_integer
3731 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3732 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3734 #undef TARGET_MAX_ANCHOR_OFFSET
3735 #define TARGET_MAX_ANCHOR_OFFSET 32
3737 #undef TARGET_ADDRESS_COST
3738 #define TARGET_ADDRESS_COST rx_address_cost
3740 #undef TARGET_CAN_ELIMINATE
3741 #define TARGET_CAN_ELIMINATE rx_can_eliminate
3743 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3744 #define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3746 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3747 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3749 #undef TARGET_TRAMPOLINE_INIT
3750 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3752 #undef TARGET_PRINT_OPERAND
3753 #define TARGET_PRINT_OPERAND rx_print_operand
3755 #undef TARGET_PRINT_OPERAND_ADDRESS
3756 #define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3758 #undef TARGET_CC_MODES_COMPATIBLE
3759 #define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3761 #undef TARGET_MEMORY_MOVE_COST
3762 #define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3764 #undef TARGET_OPTION_OVERRIDE
3765 #define TARGET_OPTION_OVERRIDE rx_option_override
3767 #undef TARGET_PROMOTE_FUNCTION_MODE
3768 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3770 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3771 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
3773 #undef TARGET_FLAGS_REGNUM
3774 #define TARGET_FLAGS_REGNUM CC_REG
3776 #undef TARGET_LEGITIMATE_CONSTANT_P
3777 #define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
3779 #undef TARGET_LEGITIMIZE_ADDRESS
3780 #define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3782 #undef TARGET_WARN_FUNC_RETURN
3783 #define TARGET_WARN_FUNC_RETURN rx_warn_func_return
3786 #define TARGET_LRA_P rx_enable_lra
3788 #undef TARGET_HARD_REGNO_NREGS
3789 #define TARGET_HARD_REGNO_NREGS rx_hard_regno_nregs
3790 #undef TARGET_HARD_REGNO_MODE_OK
3791 #define TARGET_HARD_REGNO_MODE_OK rx_hard_regno_mode_ok
3793 #undef TARGET_MODES_TIEABLE_P
3794 #define TARGET_MODES_TIEABLE_P rx_modes_tieable_p
3796 #undef TARGET_RTX_COSTS
3797 #define TARGET_RTX_COSTS rx_rtx_costs
3799 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
3800 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
3802 struct gcc_target targetm
= TARGET_INITIALIZER
;