1 /* Subroutines for insn-output.c for Matsushita MN10300 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
41 #include "diagnostic-core.h"
44 #include "target-def.h"
47 /* This is used by GOTaddr2picreg to uniquely identify
49 int mn10300_unspec_int_label_counter
;
51 /* This is used in the am33_2.0-linux-gnu port, in which global symbol
52 names are not prefixed by underscores, to tell whether to prefix a
53 label with a plus sign or not, so that the assembler can tell
54 symbol names from register names. */
55 int mn10300_protect_label
;
57 /* The selected processor. */
58 enum processor_type mn10300_processor
= PROCESSOR_DEFAULT
;
60 /* Processor type to select for tuning. */
61 static const char * mn10300_tune_string
= NULL
;
63 /* Selected processor type for tuning. */
64 enum processor_type mn10300_tune_cpu
= PROCESSOR_DEFAULT
;
66 /* The size of the callee register save area. Right now we save everything
67 on entry since it costs us nothing in code size. It does cost us from a
68 speed standpoint, so we want to optimize this sooner or later. */
69 #define REG_SAVE_BYTES (4 * df_regs_ever_live_p (2) \
70 + 4 * df_regs_ever_live_p (3) \
71 + 4 * df_regs_ever_live_p (6) \
72 + 4 * df_regs_ever_live_p (7) \
73 + 16 * (df_regs_ever_live_p (14) \
74 || df_regs_ever_live_p (15) \
75 || df_regs_ever_live_p (16) \
76 || df_regs_ever_live_p (17)))
78 static int mn10300_address_cost (rtx
, bool);
80 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
81 static const struct default_options mn10300_option_optimization_table
[] =
83 { OPT_LEVELS_1_PLUS
, OPT_fomit_frame_pointer
, NULL
, 1 },
84 { OPT_LEVELS_NONE
, 0, NULL
, 0 }
87 /* Implement TARGET_HANDLE_OPTION. */
90 mn10300_handle_option (size_t code
,
91 const char *arg ATTRIBUTE_UNUSED
,
97 mn10300_processor
= value
? PROCESSOR_AM33
: PROCESSOR_MN10300
;
101 mn10300_processor
= (value
103 : MIN (PROCESSOR_AM33
, PROCESSOR_DEFAULT
));
107 mn10300_processor
= (value
? PROCESSOR_AM34
: PROCESSOR_DEFAULT
);
111 mn10300_tune_string
= arg
;
119 /* Implement TARGET_OPTION_OVERRIDE. */
122 mn10300_option_override (void)
125 target_flags
&= ~MASK_MULT_BUG
;
128 /* Disable scheduling for the MN10300 as we do
129 not have timing information available for it. */
130 flag_schedule_insns
= 0;
131 flag_schedule_insns_after_reload
= 0;
134 if (mn10300_tune_string
)
136 if (strcasecmp (mn10300_tune_string
, "mn10300") == 0)
137 mn10300_tune_cpu
= PROCESSOR_MN10300
;
138 else if (strcasecmp (mn10300_tune_string
, "am33") == 0)
139 mn10300_tune_cpu
= PROCESSOR_AM33
;
140 else if (strcasecmp (mn10300_tune_string
, "am33-2") == 0)
141 mn10300_tune_cpu
= PROCESSOR_AM33_2
;
142 else if (strcasecmp (mn10300_tune_string
, "am34") == 0)
143 mn10300_tune_cpu
= PROCESSOR_AM34
;
145 error ("-mtune= expects mn10300, am33, am33-2, or am34");
150 mn10300_file_start (void)
152 default_file_start ();
155 fprintf (asm_out_file
, "\t.am33_2\n");
156 else if (TARGET_AM33
)
157 fprintf (asm_out_file
, "\t.am33\n");
160 /* Print operand X using operand code CODE to assembly language output file
164 mn10300_print_operand (FILE *file
, rtx x
, int code
)
170 if (GET_MODE (XEXP (x
, 0)) == CC_FLOATmode
)
172 switch (code
== 'b' ? GET_CODE (x
)
173 : reverse_condition_maybe_unordered (GET_CODE (x
)))
176 fprintf (file
, "ne");
179 fprintf (file
, "eq");
182 fprintf (file
, "ge");
185 fprintf (file
, "gt");
188 fprintf (file
, "le");
191 fprintf (file
, "lt");
194 fprintf (file
, "lge");
197 fprintf (file
, "uo");
200 fprintf (file
, "lg");
203 fprintf (file
, "ue");
206 fprintf (file
, "uge");
209 fprintf (file
, "ug");
212 fprintf (file
, "ule");
215 fprintf (file
, "ul");
222 /* These are normal and reversed branches. */
223 switch (code
== 'b' ? GET_CODE (x
) : reverse_condition (GET_CODE (x
)))
226 fprintf (file
, "ne");
229 fprintf (file
, "eq");
232 fprintf (file
, "ge");
235 fprintf (file
, "gt");
238 fprintf (file
, "le");
241 fprintf (file
, "lt");
244 fprintf (file
, "cc");
247 fprintf (file
, "hi");
250 fprintf (file
, "ls");
253 fprintf (file
, "cs");
260 /* This is used for the operand to a call instruction;
261 if it's a REG, enclose it in parens, else output
262 the operand normally. */
266 mn10300_print_operand (file
, x
, 0);
270 mn10300_print_operand (file
, x
, 0);
274 switch (GET_CODE (x
))
278 output_address (XEXP (x
, 0));
283 fprintf (file
, "fd%d", REGNO (x
) - 18);
291 /* These are the least significant word in a 64bit value. */
293 switch (GET_CODE (x
))
297 output_address (XEXP (x
, 0));
302 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
306 fprintf (file
, "%s", reg_names
[subreg_regno (x
)]);
314 switch (GET_MODE (x
))
317 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
318 REAL_VALUE_TO_TARGET_DOUBLE (rv
, val
);
319 fprintf (file
, "0x%lx", val
[0]);
322 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
323 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
[0]);
324 fprintf (file
, "0x%lx", val
[0]);
328 mn10300_print_operand_address (file
,
329 GEN_INT (CONST_DOUBLE_LOW (x
)));
340 split_double (x
, &low
, &high
);
341 fprintf (file
, "%ld", (long)INTVAL (low
));
350 /* Similarly, but for the most significant word. */
352 switch (GET_CODE (x
))
356 x
= adjust_address (x
, SImode
, 4);
357 output_address (XEXP (x
, 0));
362 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
366 fprintf (file
, "%s", reg_names
[subreg_regno (x
) + 1]);
374 switch (GET_MODE (x
))
377 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
378 REAL_VALUE_TO_TARGET_DOUBLE (rv
, val
);
379 fprintf (file
, "0x%lx", val
[1]);
385 mn10300_print_operand_address (file
,
386 GEN_INT (CONST_DOUBLE_HIGH (x
)));
397 split_double (x
, &low
, &high
);
398 fprintf (file
, "%ld", (long)INTVAL (high
));
409 if (REG_P (XEXP (x
, 0)))
410 output_address (gen_rtx_PLUS (SImode
, XEXP (x
, 0), const0_rtx
));
412 output_address (XEXP (x
, 0));
417 gcc_assert (INTVAL (x
) >= -128 && INTVAL (x
) <= 255);
418 fprintf (file
, "%d", (int)((~INTVAL (x
)) & 0xff));
422 gcc_assert (INTVAL (x
) >= -128 && INTVAL (x
) <= 255);
423 fprintf (file
, "%d", (int)(INTVAL (x
) & 0xff));
426 /* For shift counts. The hardware ignores the upper bits of
427 any immediate, but the assembler will flag an out of range
428 shift count as an error. So we mask off the high bits
429 of the immediate here. */
433 fprintf (file
, "%d", (int)(INTVAL (x
) & 0x1f));
439 switch (GET_CODE (x
))
443 output_address (XEXP (x
, 0));
452 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
456 fprintf (file
, "%s", reg_names
[subreg_regno (x
)]);
459 /* This will only be single precision.... */
465 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
466 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
467 fprintf (file
, "0x%lx", val
);
477 mn10300_print_operand_address (file
, x
);
486 /* Output assembly language output for the address ADDR to FILE. */
489 mn10300_print_operand_address (FILE *file
, rtx addr
)
491 switch (GET_CODE (addr
))
494 mn10300_print_operand_address (file
, XEXP (addr
, 0));
498 mn10300_print_operand (file
, addr
, 0);
503 if (REG_P (XEXP (addr
, 0))
504 && REG_OK_FOR_BASE_P (XEXP (addr
, 0)))
505 base
= XEXP (addr
, 0), index
= XEXP (addr
, 1);
506 else if (REG_P (XEXP (addr
, 1))
507 && REG_OK_FOR_BASE_P (XEXP (addr
, 1)))
508 base
= XEXP (addr
, 1), index
= XEXP (addr
, 0);
511 mn10300_print_operand (file
, index
, 0);
513 mn10300_print_operand (file
, base
, 0);;
517 output_addr_const (file
, addr
);
520 output_addr_const (file
, addr
);
525 /* Implement TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA.
527 Used for PIC-specific UNSPECs. */
530 mn10300_asm_output_addr_const_extra (FILE *file
, rtx x
)
532 if (GET_CODE (x
) == UNSPEC
)
536 case UNSPEC_INT_LABEL
:
537 asm_fprintf (file
, ".%LLIL" HOST_WIDE_INT_PRINT_DEC
,
538 INTVAL (XVECEXP (x
, 0, 0)));
541 /* GLOBAL_OFFSET_TABLE or local symbols, no suffix. */
542 output_addr_const (file
, XVECEXP (x
, 0, 0));
545 output_addr_const (file
, XVECEXP (x
, 0, 0));
546 fputs ("@GOT", file
);
549 output_addr_const (file
, XVECEXP (x
, 0, 0));
550 fputs ("@GOTOFF", file
);
553 output_addr_const (file
, XVECEXP (x
, 0, 0));
554 fputs ("@PLT", file
);
556 case UNSPEC_GOTSYM_OFF
:
557 assemble_name (file
, GOT_SYMBOL_NAME
);
559 output_addr_const (file
, XVECEXP (x
, 0, 0));
571 /* Count the number of FP registers that have to be saved. */
573 fp_regs_to_save (void)
580 for (i
= FIRST_FP_REGNUM
; i
<= LAST_FP_REGNUM
; ++i
)
581 if (df_regs_ever_live_p (i
) && ! call_really_used_regs
[i
])
587 /* Print a set of registers in the format required by "movm" and "ret".
588 Register K is saved if bit K of MASK is set. The data and address
589 registers can be stored individually, but the extended registers cannot.
590 We assume that the mask already takes that into account. For instance,
591 bits 14 to 17 must have the same value. */
594 mn10300_print_reg_list (FILE *file
, int mask
)
602 for (i
= 0; i
< FIRST_EXTENDED_REGNUM
; i
++)
603 if ((mask
& (1 << i
)) != 0)
607 fputs (reg_names
[i
], file
);
611 if ((mask
& 0x3c000) != 0)
613 gcc_assert ((mask
& 0x3c000) == 0x3c000);
616 fputs ("exreg1", file
);
624 mn10300_can_use_return_insn (void)
626 /* size includes the fixed stack space needed for function calls. */
627 int size
= get_frame_size () + crtl
->outgoing_args_size
;
629 /* And space for the return pointer. */
630 size
+= crtl
->outgoing_args_size
? 4 : 0;
632 return (reload_completed
634 && !df_regs_ever_live_p (2)
635 && !df_regs_ever_live_p (3)
636 && !df_regs_ever_live_p (6)
637 && !df_regs_ever_live_p (7)
638 && !df_regs_ever_live_p (14)
639 && !df_regs_ever_live_p (15)
640 && !df_regs_ever_live_p (16)
641 && !df_regs_ever_live_p (17)
642 && fp_regs_to_save () == 0
643 && !frame_pointer_needed
);
646 /* Returns the set of live, callee-saved registers as a bitmask. The
647 callee-saved extended registers cannot be stored individually, so
648 all of them will be included in the mask if any one of them is used. */
651 mn10300_get_live_callee_saved_regs (void)
657 for (i
= 0; i
<= LAST_EXTENDED_REGNUM
; i
++)
658 if (df_regs_ever_live_p (i
) && ! call_really_used_regs
[i
])
660 if ((mask
& 0x3c000) != 0)
669 RTX_FRAME_RELATED_P (r
) = 1;
673 /* Generate an instruction that pushes several registers onto the stack.
674 Register K will be saved if bit K in MASK is set. The function does
675 nothing if MASK is zero.
677 To be compatible with the "movm" instruction, the lowest-numbered
678 register must be stored in the lowest slot. If MASK is the set
679 { R1,...,RN }, where R1...RN are ordered least first, the generated
680 instruction will have the form:
683 (set (reg:SI 9) (plus:SI (reg:SI 9) (const_int -N*4)))
684 (set (mem:SI (plus:SI (reg:SI 9)
688 (set (mem:SI (plus:SI (reg:SI 9)
693 mn10300_gen_multiple_store (int mask
)
702 /* Count how many registers need to be saved. */
704 for (i
= 0; i
<= LAST_EXTENDED_REGNUM
; i
++)
705 if ((mask
& (1 << i
)) != 0)
708 /* We need one PARALLEL element to update the stack pointer and
709 an additional element for each register that is stored. */
710 par
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (count
+ 1));
712 /* Create the instruction that updates the stack pointer. */
714 = F (gen_rtx_SET (SImode
,
716 gen_rtx_PLUS (SImode
,
718 GEN_INT (-count
* 4))));
720 /* Create each store. */
722 for (i
= LAST_EXTENDED_REGNUM
; i
>= 0; i
--)
723 if ((mask
& (1 << i
)) != 0)
725 rtx address
= gen_rtx_PLUS (SImode
,
727 GEN_INT (-pari
* 4));
728 XVECEXP(par
, 0, pari
)
729 = F (gen_rtx_SET (VOIDmode
,
730 gen_rtx_MEM (SImode
, address
),
731 gen_rtx_REG (SImode
, i
)));
740 mn10300_expand_prologue (void)
744 /* SIZE includes the fixed stack space needed for function calls. */
745 size
= get_frame_size () + crtl
->outgoing_args_size
;
746 size
+= (crtl
->outgoing_args_size
? 4 : 0);
748 /* If we use any of the callee-saved registers, save them now. */
749 mn10300_gen_multiple_store (mn10300_get_live_callee_saved_regs ());
751 if (TARGET_AM33_2
&& fp_regs_to_save ())
753 int num_regs_to_save
= fp_regs_to_save (), i
;
759 save_sp_partial_merge
,
763 unsigned int strategy_size
= (unsigned)-1, this_strategy_size
;
766 /* We have several different strategies to save FP registers.
767 We can store them using SP offsets, which is beneficial if
768 there are just a few registers to save, or we can use `a0' in
769 post-increment mode (`a0' is the only call-clobbered address
770 register that is never used to pass information to a
771 function). Furthermore, if we don't need a frame pointer, we
772 can merge the two SP adds into a single one, but this isn't
773 always beneficial; sometimes we can just split the two adds
774 so that we don't exceed a 16-bit constant size. The code
775 below will select which strategy to use, so as to generate
776 smallest code. Ties are broken in favor or shorter sequences
777 (in terms of number of instructions). */
779 #define SIZE_ADD_AX(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
780 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 2)
781 #define SIZE_ADD_SP(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
782 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 3)
784 /* We add 0 * (S) in two places to promote to the type of S,
785 so that all arms of the conditional have the same type. */
786 #define SIZE_FMOV_LIMIT(S,N,L,SIZE1,SIZE2,ELSE) \
787 (((S) >= (L)) ? 0 * (S) + (SIZE1) * (N) \
788 : ((S) + 4 * (N) >= (L)) ? (((L) - (S)) / 4 * (SIZE2) \
789 + ((S) + 4 * (N) - (L)) / 4 * (SIZE1)) \
791 #define SIZE_FMOV_SP_(S,N) \
792 (SIZE_FMOV_LIMIT ((S), (N), (1 << 24), 7, 6, \
793 SIZE_FMOV_LIMIT ((S), (N), (1 << 8), 6, 4, \
794 (S) ? 4 * (N) : 3 + 4 * ((N) - 1))))
795 #define SIZE_FMOV_SP(S,N) (SIZE_FMOV_SP_ ((unsigned HOST_WIDE_INT)(S), (N)))
797 /* Consider alternative save_sp_merge only if we don't need the
798 frame pointer and size is nonzero. */
799 if (! frame_pointer_needed
&& size
)
801 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
802 this_strategy_size
= SIZE_ADD_SP (-(size
+ 4 * num_regs_to_save
));
803 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
804 this_strategy_size
+= SIZE_FMOV_SP (size
, num_regs_to_save
);
806 if (this_strategy_size
< strategy_size
)
808 strategy
= save_sp_merge
;
809 strategy_size
= this_strategy_size
;
813 /* Consider alternative save_sp_no_merge unconditionally. */
814 /* Insn: add -4 * num_regs_to_save, sp. */
815 this_strategy_size
= SIZE_ADD_SP (-4 * num_regs_to_save
);
816 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
817 this_strategy_size
+= SIZE_FMOV_SP (0, num_regs_to_save
);
820 /* Insn: add -size, sp. */
821 this_strategy_size
+= SIZE_ADD_SP (-size
);
824 if (this_strategy_size
< strategy_size
)
826 strategy
= save_sp_no_merge
;
827 strategy_size
= this_strategy_size
;
830 /* Consider alternative save_sp_partial_merge only if we don't
831 need a frame pointer and size is reasonably large. */
832 if (! frame_pointer_needed
&& size
+ 4 * num_regs_to_save
> 128)
834 /* Insn: add -128, sp. */
835 this_strategy_size
= SIZE_ADD_SP (-128);
836 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
837 this_strategy_size
+= SIZE_FMOV_SP (128 - 4 * num_regs_to_save
,
841 /* Insn: add 128-size, sp. */
842 this_strategy_size
+= SIZE_ADD_SP (128 - size
);
845 if (this_strategy_size
< strategy_size
)
847 strategy
= save_sp_partial_merge
;
848 strategy_size
= this_strategy_size
;
852 /* Consider alternative save_a0_merge only if we don't need a
853 frame pointer, size is nonzero and the user hasn't
854 changed the calling conventions of a0. */
855 if (! frame_pointer_needed
&& size
856 && call_really_used_regs
[FIRST_ADDRESS_REGNUM
]
857 && ! fixed_regs
[FIRST_ADDRESS_REGNUM
])
859 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
860 this_strategy_size
= SIZE_ADD_SP (-(size
+ 4 * num_regs_to_save
));
861 /* Insn: mov sp, a0. */
862 this_strategy_size
++;
865 /* Insn: add size, a0. */
866 this_strategy_size
+= SIZE_ADD_AX (size
);
868 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
869 this_strategy_size
+= 3 * num_regs_to_save
;
871 if (this_strategy_size
< strategy_size
)
873 strategy
= save_a0_merge
;
874 strategy_size
= this_strategy_size
;
878 /* Consider alternative save_a0_no_merge if the user hasn't
879 changed the calling conventions of a0. */
880 if (call_really_used_regs
[FIRST_ADDRESS_REGNUM
]
881 && ! fixed_regs
[FIRST_ADDRESS_REGNUM
])
883 /* Insn: add -4 * num_regs_to_save, sp. */
884 this_strategy_size
= SIZE_ADD_SP (-4 * num_regs_to_save
);
885 /* Insn: mov sp, a0. */
886 this_strategy_size
++;
887 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
888 this_strategy_size
+= 3 * num_regs_to_save
;
891 /* Insn: add -size, sp. */
892 this_strategy_size
+= SIZE_ADD_SP (-size
);
895 if (this_strategy_size
< strategy_size
)
897 strategy
= save_a0_no_merge
;
898 strategy_size
= this_strategy_size
;
902 /* Emit the initial SP add, common to all strategies. */
905 case save_sp_no_merge
:
906 case save_a0_no_merge
:
907 F (emit_insn (gen_addsi3 (stack_pointer_rtx
,
909 GEN_INT (-4 * num_regs_to_save
))));
913 case save_sp_partial_merge
:
914 F (emit_insn (gen_addsi3 (stack_pointer_rtx
,
917 xsize
= 128 - 4 * num_regs_to_save
;
923 F (emit_insn (gen_addsi3 (stack_pointer_rtx
,
925 GEN_INT (-(size
+ 4 * num_regs_to_save
)))));
926 /* We'll have to adjust FP register saves according to the
929 /* Since we've already created the stack frame, don't do it
930 again at the end of the function. */
938 /* Now prepare register a0, if we have decided to use it. */
942 case save_sp_no_merge
:
943 case save_sp_partial_merge
:
948 case save_a0_no_merge
:
949 reg
= gen_rtx_REG (SImode
, FIRST_ADDRESS_REGNUM
);
950 F (emit_insn (gen_movsi (reg
, stack_pointer_rtx
)));
952 F (emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (xsize
))));
953 reg
= gen_rtx_POST_INC (SImode
, reg
);
960 /* Now actually save the FP registers. */
961 for (i
= FIRST_FP_REGNUM
; i
<= LAST_FP_REGNUM
; ++i
)
962 if (df_regs_ever_live_p (i
) && ! call_really_used_regs
[i
])
970 /* If we aren't using `a0', use an SP offset. */
973 addr
= gen_rtx_PLUS (SImode
,
978 addr
= stack_pointer_rtx
;
983 F (emit_insn (gen_movsf (gen_rtx_MEM (SFmode
, addr
),
984 gen_rtx_REG (SFmode
, i
))));
988 /* Now put the frame pointer into the frame pointer register. */
989 if (frame_pointer_needed
)
990 F (emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
));
992 /* Allocate stack for this frame. */
994 F (emit_insn (gen_addsi3 (stack_pointer_rtx
,
998 if (flag_pic
&& df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM
))
999 emit_insn (gen_GOTaddr2picreg ());
1003 mn10300_expand_epilogue (void)
1007 /* SIZE includes the fixed stack space needed for function calls. */
1008 size
= get_frame_size () + crtl
->outgoing_args_size
;
1009 size
+= (crtl
->outgoing_args_size
? 4 : 0);
1011 if (TARGET_AM33_2
&& fp_regs_to_save ())
1013 int num_regs_to_save
= fp_regs_to_save (), i
;
1016 /* We have several options to restore FP registers. We could
1017 load them from SP offsets, but, if there are enough FP
1018 registers to restore, we win if we use a post-increment
1021 /* If we have a frame pointer, it's the best option, because we
1022 already know it has the value we want. */
1023 if (frame_pointer_needed
)
1024 reg
= gen_rtx_REG (SImode
, FRAME_POINTER_REGNUM
);
1025 /* Otherwise, we may use `a1', since it's call-clobbered and
1026 it's never used for return values. But only do so if it's
1027 smaller than using SP offsets. */
1030 enum { restore_sp_post_adjust
,
1031 restore_sp_pre_adjust
,
1032 restore_sp_partial_adjust
,
1033 restore_a1
} strategy
;
1034 unsigned int this_strategy_size
, strategy_size
= (unsigned)-1;
1036 /* Consider using sp offsets before adjusting sp. */
1037 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1038 this_strategy_size
= SIZE_FMOV_SP (size
, num_regs_to_save
);
1039 /* If size is too large, we'll have to adjust SP with an
1041 if (size
+ 4 * num_regs_to_save
+ REG_SAVE_BYTES
> 255)
1043 /* Insn: add size + 4 * num_regs_to_save, sp. */
1044 this_strategy_size
+= SIZE_ADD_SP (size
+ 4 * num_regs_to_save
);
1046 /* If we don't have to restore any non-FP registers,
1047 we'll be able to save one byte by using rets. */
1048 if (! REG_SAVE_BYTES
)
1049 this_strategy_size
--;
1051 if (this_strategy_size
< strategy_size
)
1053 strategy
= restore_sp_post_adjust
;
1054 strategy_size
= this_strategy_size
;
1057 /* Consider using sp offsets after adjusting sp. */
1058 /* Insn: add size, sp. */
1059 this_strategy_size
= SIZE_ADD_SP (size
);
1060 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1061 this_strategy_size
+= SIZE_FMOV_SP (0, num_regs_to_save
);
1062 /* We're going to use ret to release the FP registers
1063 save area, so, no savings. */
1065 if (this_strategy_size
< strategy_size
)
1067 strategy
= restore_sp_pre_adjust
;
1068 strategy_size
= this_strategy_size
;
1071 /* Consider using sp offsets after partially adjusting sp.
1072 When size is close to 32Kb, we may be able to adjust SP
1073 with an imm16 add instruction while still using fmov
1075 if (size
+ 4 * num_regs_to_save
+ REG_SAVE_BYTES
> 255)
1077 /* Insn: add size + 4 * num_regs_to_save
1078 + REG_SAVE_BYTES - 252,sp. */
1079 this_strategy_size
= SIZE_ADD_SP (size
+ 4 * num_regs_to_save
1080 + REG_SAVE_BYTES
- 252);
1081 /* Insn: fmov (##,sp),fs#, fo each fs# to be restored. */
1082 this_strategy_size
+= SIZE_FMOV_SP (252 - REG_SAVE_BYTES
1083 - 4 * num_regs_to_save
,
1085 /* We're going to use ret to release the FP registers
1086 save area, so, no savings. */
1088 if (this_strategy_size
< strategy_size
)
1090 strategy
= restore_sp_partial_adjust
;
1091 strategy_size
= this_strategy_size
;
1095 /* Consider using a1 in post-increment mode, as long as the
1096 user hasn't changed the calling conventions of a1. */
1097 if (call_really_used_regs
[FIRST_ADDRESS_REGNUM
+ 1]
1098 && ! fixed_regs
[FIRST_ADDRESS_REGNUM
+1])
1100 /* Insn: mov sp,a1. */
1101 this_strategy_size
= 1;
1104 /* Insn: add size,a1. */
1105 this_strategy_size
+= SIZE_ADD_AX (size
);
1107 /* Insn: fmov (a1+),fs#, for each fs# to be restored. */
1108 this_strategy_size
+= 3 * num_regs_to_save
;
1109 /* If size is large enough, we may be able to save a
1111 if (size
+ 4 * num_regs_to_save
+ REG_SAVE_BYTES
> 255)
1113 /* Insn: mov a1,sp. */
1114 this_strategy_size
+= 2;
1116 /* If we don't have to restore any non-FP registers,
1117 we'll be able to save one byte by using rets. */
1118 if (! REG_SAVE_BYTES
)
1119 this_strategy_size
--;
1121 if (this_strategy_size
< strategy_size
)
1123 strategy
= restore_a1
;
1124 strategy_size
= this_strategy_size
;
1130 case restore_sp_post_adjust
:
1133 case restore_sp_pre_adjust
:
1134 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1140 case restore_sp_partial_adjust
:
1141 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1143 GEN_INT (size
+ 4 * num_regs_to_save
1144 + REG_SAVE_BYTES
- 252)));
1145 size
= 252 - REG_SAVE_BYTES
- 4 * num_regs_to_save
;
1149 reg
= gen_rtx_REG (SImode
, FIRST_ADDRESS_REGNUM
+ 1);
1150 emit_insn (gen_movsi (reg
, stack_pointer_rtx
));
1152 emit_insn (gen_addsi3 (reg
, reg
, GEN_INT (size
)));
1160 /* Adjust the selected register, if any, for post-increment. */
1162 reg
= gen_rtx_POST_INC (SImode
, reg
);
1164 for (i
= FIRST_FP_REGNUM
; i
<= LAST_FP_REGNUM
; ++i
)
1165 if (df_regs_ever_live_p (i
) && ! call_really_used_regs
[i
])
1173 /* If we aren't using a post-increment register, use an
1175 addr
= gen_rtx_PLUS (SImode
,
1180 addr
= stack_pointer_rtx
;
1184 emit_insn (gen_movsf (gen_rtx_REG (SFmode
, i
),
1185 gen_rtx_MEM (SFmode
, addr
)));
1188 /* If we were using the restore_a1 strategy and the number of
1189 bytes to be released won't fit in the `ret' byte, copy `a1'
1190 to `sp', to avoid having to use `add' to adjust it. */
1191 if (! frame_pointer_needed
&& reg
&& size
+ REG_SAVE_BYTES
> 255)
1193 emit_move_insn (stack_pointer_rtx
, XEXP (reg
, 0));
1198 /* Maybe cut back the stack, except for the register save area.
1200 If the frame pointer exists, then use the frame pointer to
1203 If the stack size + register save area is more than 255 bytes,
1204 then the stack must be cut back here since the size + register
1205 save size is too big for a ret/retf instruction.
1207 Else leave it alone, it will be cut back as part of the
1208 ret/retf instruction, or there wasn't any stack to begin with.
1210 Under no circumstances should the register save area be
1211 deallocated here, that would leave a window where an interrupt
1212 could occur and trash the register save area. */
1213 if (frame_pointer_needed
)
1215 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
1218 else if (size
+ REG_SAVE_BYTES
> 255)
1220 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1226 /* Adjust the stack and restore callee-saved registers, if any. */
1227 if (size
|| df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1228 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1229 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1230 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1231 || frame_pointer_needed
)
1232 emit_jump_insn (gen_return_internal_regs
1233 (GEN_INT (size
+ REG_SAVE_BYTES
)));
1235 emit_jump_insn (gen_return_internal ());
1238 /* Recognize the PARALLEL rtx generated by mn10300_gen_multiple_store().
1239 This function is for MATCH_PARALLEL and so assumes OP is known to be
1240 parallel. If OP is a multiple store, return a mask indicating which
1241 registers it saves. Return 0 otherwise. */
1244 mn10300_store_multiple_operation (rtx op
,
1245 enum machine_mode mode ATTRIBUTE_UNUSED
)
1253 count
= XVECLEN (op
, 0);
1257 /* Check that first instruction has the form (set (sp) (plus A B)) */
1258 elt
= XVECEXP (op
, 0, 0);
1259 if (GET_CODE (elt
) != SET
1260 || (! REG_P (SET_DEST (elt
)))
1261 || REGNO (SET_DEST (elt
)) != STACK_POINTER_REGNUM
1262 || GET_CODE (SET_SRC (elt
)) != PLUS
)
1265 /* Check that A is the stack pointer and B is the expected stack size.
1266 For OP to match, each subsequent instruction should push a word onto
1267 the stack. We therefore expect the first instruction to create
1268 COUNT-1 stack slots. */
1269 elt
= SET_SRC (elt
);
1270 if ((! REG_P (XEXP (elt
, 0)))
1271 || REGNO (XEXP (elt
, 0)) != STACK_POINTER_REGNUM
1272 || (! CONST_INT_P (XEXP (elt
, 1)))
1273 || INTVAL (XEXP (elt
, 1)) != -(count
- 1) * 4)
1276 /* Now go through the rest of the vector elements. They must be
1277 ordered so that the first instruction stores the highest-numbered
1278 register to the highest stack slot and that subsequent instructions
1279 store a lower-numbered register to the slot below.
1281 LAST keeps track of the smallest-numbered register stored so far.
1282 MASK is the set of stored registers. */
1283 last
= LAST_EXTENDED_REGNUM
+ 1;
1285 for (i
= 1; i
< count
; i
++)
1287 /* Check that element i is a (set (mem M) R) and that R is valid. */
1288 elt
= XVECEXP (op
, 0, i
);
1289 if (GET_CODE (elt
) != SET
1290 || (! MEM_P (SET_DEST (elt
)))
1291 || (! REG_P (SET_SRC (elt
)))
1292 || REGNO (SET_SRC (elt
)) >= last
)
1295 /* R was OK, so provisionally add it to MASK. We return 0 in any
1296 case if the rest of the instruction has a flaw. */
1297 last
= REGNO (SET_SRC (elt
));
1298 mask
|= (1 << last
);
1300 /* Check that M has the form (plus (sp) (const_int -I*4)) */
1301 elt
= XEXP (SET_DEST (elt
), 0);
1302 if (GET_CODE (elt
) != PLUS
1303 || (! REG_P (XEXP (elt
, 0)))
1304 || REGNO (XEXP (elt
, 0)) != STACK_POINTER_REGNUM
1305 || (! CONST_INT_P (XEXP (elt
, 1)))
1306 || INTVAL (XEXP (elt
, 1)) != -i
* 4)
1310 /* All or none of the callee-saved extended registers must be in the set. */
1311 if ((mask
& 0x3c000) != 0
1312 && (mask
& 0x3c000) != 0x3c000)
1318 /* Implement TARGET_PREFERRED_RELOAD_CLASS. */
1321 mn10300_preferred_reload_class (rtx x
, reg_class_t rclass
)
1323 if (x
== stack_pointer_rtx
&& rclass
!= SP_REGS
)
1324 return ADDRESS_OR_EXTENDED_REGS
;
1327 && !HARD_REGISTER_P (x
))
1328 || (GET_CODE (x
) == SUBREG
1329 && REG_P (SUBREG_REG (x
))
1330 && !HARD_REGISTER_P (SUBREG_REG (x
))))
1331 return LIMIT_RELOAD_CLASS (GET_MODE (x
), rclass
);
1336 /* Implement TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
1339 mn10300_preferred_output_reload_class (rtx x
, reg_class_t rclass
)
1341 if (x
== stack_pointer_rtx
&& rclass
!= SP_REGS
)
1342 return ADDRESS_OR_EXTENDED_REGS
;
1347 /* What (if any) secondary registers are needed to move IN with mode
1348 MODE into a register in register class RCLASS.
1350 We might be able to simplify this. */
1353 mn10300_secondary_reload_class (enum reg_class rclass
, enum machine_mode mode
,
1358 /* Strip off any SUBREG expressions from IN. Basically we want
1359 to know if IN is a pseudo or (subreg (pseudo)) as those can
1360 turn into MEMs during reload. */
1361 while (GET_CODE (inner
) == SUBREG
)
1362 inner
= SUBREG_REG (inner
);
1364 /* Memory loads less than a full word wide can't have an
1365 address or stack pointer destination. They must use
1366 a data register as an intermediate register. */
1369 && REGNO (inner
) >= FIRST_PSEUDO_REGISTER
))
1370 && (mode
== QImode
|| mode
== HImode
)
1371 && (rclass
== ADDRESS_REGS
|| rclass
== SP_REGS
1372 || rclass
== SP_OR_ADDRESS_REGS
))
1375 return DATA_OR_EXTENDED_REGS
;
1379 /* We can't directly load sp + const_int into a data register;
1380 we must use an address register as an intermediate. */
1381 if (rclass
!= SP_REGS
1382 && rclass
!= ADDRESS_REGS
1383 && rclass
!= SP_OR_ADDRESS_REGS
1384 && rclass
!= SP_OR_EXTENDED_REGS
1385 && rclass
!= ADDRESS_OR_EXTENDED_REGS
1386 && rclass
!= SP_OR_ADDRESS_OR_EXTENDED_REGS
1387 && (in
== stack_pointer_rtx
1388 || (GET_CODE (in
) == PLUS
1389 && (XEXP (in
, 0) == stack_pointer_rtx
1390 || XEXP (in
, 1) == stack_pointer_rtx
))))
1391 return ADDRESS_REGS
;
1394 && rclass
== FP_REGS
)
1396 /* We can't load directly into an FP register from a
1397 constant address. */
1399 && CONSTANT_ADDRESS_P (XEXP (in
, 0)))
1400 return DATA_OR_EXTENDED_REGS
;
1402 /* Handle case were a pseudo may not get a hard register
1403 but has an equivalent memory location defined. */
1405 && REGNO (inner
) >= FIRST_PSEUDO_REGISTER
1406 && reg_equiv_mem
[REGNO (inner
)]
1407 && CONSTANT_ADDRESS_P (XEXP (reg_equiv_mem
[REGNO (inner
)], 0)))
1408 return DATA_OR_EXTENDED_REGS
;
1411 /* Otherwise assume no secondary reloads are needed. */
1416 mn10300_initial_offset (int from
, int to
)
1418 /* The difference between the argument pointer and the frame pointer
1419 is the size of the callee register save area. */
1420 if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
1422 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1423 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1424 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1425 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1426 || fp_regs_to_save ()
1427 || frame_pointer_needed
)
1428 return REG_SAVE_BYTES
1429 + 4 * fp_regs_to_save ();
1434 /* The difference between the argument pointer and the stack pointer is
1435 the sum of the size of this function's frame, the callee register save
1436 area, and the fixed stack space needed for function calls (if any). */
1437 if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1439 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1440 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1441 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1442 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1443 || fp_regs_to_save ()
1444 || frame_pointer_needed
)
1445 return (get_frame_size () + REG_SAVE_BYTES
1446 + 4 * fp_regs_to_save ()
1447 + (crtl
->outgoing_args_size
1448 ? crtl
->outgoing_args_size
+ 4 : 0));
1450 return (get_frame_size ()
1451 + (crtl
->outgoing_args_size
1452 ? crtl
->outgoing_args_size
+ 4 : 0));
1455 /* The difference between the frame pointer and stack pointer is the sum
1456 of the size of this function's frame and the fixed stack space needed
1457 for function calls (if any). */
1458 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1459 return (get_frame_size ()
1460 + (crtl
->outgoing_args_size
1461 ? crtl
->outgoing_args_size
+ 4 : 0));
1466 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1469 mn10300_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
1471 /* Return values > 8 bytes in length in memory. */
1472 return (int_size_in_bytes (type
) > 8
1473 || int_size_in_bytes (type
) == 0
1474 || TYPE_MODE (type
) == BLKmode
);
1477 /* Flush the argument registers to the stack for a stdarg function;
1478 return the new argument pointer. */
1480 mn10300_builtin_saveregs (void)
1483 tree fntype
= TREE_TYPE (current_function_decl
);
1484 int argadj
= ((!stdarg_p (fntype
))
1485 ? UNITS_PER_WORD
: 0);
1486 alias_set_type set
= get_varargs_alias_set ();
1489 offset
= plus_constant (crtl
->args
.arg_offset_rtx
, argadj
);
1491 offset
= crtl
->args
.arg_offset_rtx
;
1493 mem
= gen_rtx_MEM (SImode
, crtl
->args
.internal_arg_pointer
);
1494 set_mem_alias_set (mem
, set
);
1495 emit_move_insn (mem
, gen_rtx_REG (SImode
, 0));
1497 mem
= gen_rtx_MEM (SImode
,
1498 plus_constant (crtl
->args
.internal_arg_pointer
, 4));
1499 set_mem_alias_set (mem
, set
);
1500 emit_move_insn (mem
, gen_rtx_REG (SImode
, 1));
1502 return copy_to_reg (expand_binop (Pmode
, add_optab
,
1503 crtl
->args
.internal_arg_pointer
,
1504 offset
, 0, 0, OPTAB_LIB_WIDEN
));
1508 mn10300_va_start (tree valist
, rtx nextarg
)
1510 nextarg
= expand_builtin_saveregs ();
1511 std_expand_builtin_va_start (valist
, nextarg
);
1514 /* Return true when a parameter should be passed by reference. */
1517 mn10300_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
1518 enum machine_mode mode
, const_tree type
,
1519 bool named ATTRIBUTE_UNUSED
)
1521 unsigned HOST_WIDE_INT size
;
1524 size
= int_size_in_bytes (type
);
1526 size
= GET_MODE_SIZE (mode
);
1528 return (size
> 8 || size
== 0);
1531 /* Return an RTX to represent where a value with mode MODE will be returned
1532 from a function. If the result is NULL_RTX, the argument is pushed. */
1535 mn10300_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
1536 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1538 rtx result
= NULL_RTX
;
1541 /* We only support using 2 data registers as argument registers. */
1544 /* Figure out the size of the object to be passed. */
1545 if (mode
== BLKmode
)
1546 size
= int_size_in_bytes (type
);
1548 size
= GET_MODE_SIZE (mode
);
1550 cum
->nbytes
= (cum
->nbytes
+ 3) & ~3;
1552 /* Don't pass this arg via a register if all the argument registers
1554 if (cum
->nbytes
> nregs
* UNITS_PER_WORD
)
1557 /* Don't pass this arg via a register if it would be split between
1558 registers and memory. */
1559 if (type
== NULL_TREE
1560 && cum
->nbytes
+ size
> nregs
* UNITS_PER_WORD
)
1563 switch (cum
->nbytes
/ UNITS_PER_WORD
)
1566 result
= gen_rtx_REG (mode
, FIRST_ARGUMENT_REGNUM
);
1569 result
= gen_rtx_REG (mode
, FIRST_ARGUMENT_REGNUM
+ 1);
1578 /* Update the data in CUM to advance over an argument
1579 of mode MODE and data type TYPE.
1580 (TYPE is null for libcalls where that information may not be available.) */
1583 mn10300_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
1584 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1586 cum
->nbytes
+= (mode
!= BLKmode
1587 ? (GET_MODE_SIZE (mode
) + 3) & ~3
1588 : (int_size_in_bytes (type
) + 3) & ~3);
1591 /* Return the number of bytes of registers to use for an argument passed
1592 partially in registers and partially in memory. */
1595 mn10300_arg_partial_bytes (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
1596 tree type
, bool named ATTRIBUTE_UNUSED
)
1600 /* We only support using 2 data registers as argument registers. */
1603 /* Figure out the size of the object to be passed. */
1604 if (mode
== BLKmode
)
1605 size
= int_size_in_bytes (type
);
1607 size
= GET_MODE_SIZE (mode
);
1609 cum
->nbytes
= (cum
->nbytes
+ 3) & ~3;
1611 /* Don't pass this arg via a register if all the argument registers
1613 if (cum
->nbytes
> nregs
* UNITS_PER_WORD
)
1616 if (cum
->nbytes
+ size
<= nregs
* UNITS_PER_WORD
)
1619 /* Don't pass this arg via a register if it would be split between
1620 registers and memory. */
1621 if (type
== NULL_TREE
1622 && cum
->nbytes
+ size
> nregs
* UNITS_PER_WORD
)
1625 return nregs
* UNITS_PER_WORD
- cum
->nbytes
;
1628 /* Return the location of the function's value. This will be either
1629 $d0 for integer functions, $a0 for pointers, or a PARALLEL of both
1630 $d0 and $a0 if the -mreturn-pointer-on-do flag is set. Note that
1631 we only return the PARALLEL for outgoing values; we do not want
1632 callers relying on this extra copy. */
1635 mn10300_function_value (const_tree valtype
,
1636 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
1640 enum machine_mode mode
= TYPE_MODE (valtype
);
1642 if (! POINTER_TYPE_P (valtype
))
1643 return gen_rtx_REG (mode
, FIRST_DATA_REGNUM
);
1644 else if (! TARGET_PTR_A0D0
|| ! outgoing
1645 || cfun
->returns_struct
)
1646 return gen_rtx_REG (mode
, FIRST_ADDRESS_REGNUM
);
1648 rv
= gen_rtx_PARALLEL (mode
, rtvec_alloc (2));
1650 = gen_rtx_EXPR_LIST (VOIDmode
,
1651 gen_rtx_REG (mode
, FIRST_ADDRESS_REGNUM
),
1655 = gen_rtx_EXPR_LIST (VOIDmode
,
1656 gen_rtx_REG (mode
, FIRST_DATA_REGNUM
),
1661 /* Implements TARGET_LIBCALL_VALUE. */
1664 mn10300_libcall_value (enum machine_mode mode
,
1665 const_rtx fun ATTRIBUTE_UNUSED
)
1667 return gen_rtx_REG (mode
, FIRST_DATA_REGNUM
);
1670 /* Implements FUNCTION_VALUE_REGNO_P. */
1673 mn10300_function_value_regno_p (const unsigned int regno
)
1675 return (regno
== FIRST_DATA_REGNUM
|| regno
== FIRST_ADDRESS_REGNUM
);
1678 /* Output a compare insn. */
1681 mn10300_output_cmp (rtx operand
, rtx insn
)
1686 /* We can save a byte if we can find a register which has the value
1688 temp
= PREV_INSN (insn
);
1689 while (optimize
&& temp
)
1693 /* We allow the search to go through call insns. We record
1694 the fact that we've past a CALL_INSN and reject matches which
1695 use call clobbered registers. */
1698 || GET_CODE (temp
) == BARRIER
)
1704 if (GET_CODE (temp
) == NOTE
)
1706 temp
= PREV_INSN (temp
);
1710 /* It must be an insn, see if it is a simple set. */
1711 set
= single_set (temp
);
1714 temp
= PREV_INSN (temp
);
1718 /* Are we setting a data register to zero (this does not win for
1721 If it's a call clobbered register, have we past a call?
1723 Make sure the register we find isn't the same as ourself;
1724 the mn10300 can't encode that.
1726 ??? reg_set_between_p return nonzero anytime we pass a CALL_INSN
1727 so the code to detect calls here isn't doing anything useful. */
1728 if (REG_P (SET_DEST (set
))
1729 && SET_SRC (set
) == CONST0_RTX (GET_MODE (SET_DEST (set
)))
1730 && !reg_set_between_p (SET_DEST (set
), temp
, insn
)
1731 && (REGNO_REG_CLASS (REGNO (SET_DEST (set
)))
1732 == REGNO_REG_CLASS (REGNO (operand
)))
1733 && REGNO_REG_CLASS (REGNO (SET_DEST (set
))) != EXTENDED_REGS
1734 && REGNO (SET_DEST (set
)) != REGNO (operand
)
1736 || ! call_really_used_regs
[REGNO (SET_DEST (set
))]))
1739 xoperands
[0] = operand
;
1740 xoperands
[1] = SET_DEST (set
);
1742 output_asm_insn ("cmp %1,%0", xoperands
);
1746 if (REGNO_REG_CLASS (REGNO (operand
)) == EXTENDED_REGS
1747 && REG_P (SET_DEST (set
))
1748 && SET_SRC (set
) == CONST0_RTX (GET_MODE (SET_DEST (set
)))
1749 && !reg_set_between_p (SET_DEST (set
), temp
, insn
)
1750 && (REGNO_REG_CLASS (REGNO (SET_DEST (set
)))
1751 != REGNO_REG_CLASS (REGNO (operand
)))
1752 && REGNO_REG_CLASS (REGNO (SET_DEST (set
))) == EXTENDED_REGS
1753 && REGNO (SET_DEST (set
)) != REGNO (operand
)
1755 || ! call_really_used_regs
[REGNO (SET_DEST (set
))]))
1758 xoperands
[0] = operand
;
1759 xoperands
[1] = SET_DEST (set
);
1761 output_asm_insn ("cmp %1,%0", xoperands
);
1764 temp
= PREV_INSN (temp
);
1769 /* Similarly, but when using a zero_extract pattern for a btst where
1770 the source operand might end up in memory. */
1772 mn10300_mask_ok_for_mem_btst (int len
, int bit
)
1774 unsigned int mask
= 0;
1783 /* MASK must bit into an 8bit value. */
1784 return (((mask
& 0xff) == mask
)
1785 || ((mask
& 0xff00) == mask
)
1786 || ((mask
& 0xff0000) == mask
)
1787 || ((mask
& 0xff000000) == mask
));
1790 /* Return 1 if X contains a symbolic expression. We know these
1791 expressions will have one of a few well defined forms, so
1792 we need only check those forms. */
1795 mn10300_symbolic_operand (rtx op
,
1796 enum machine_mode mode ATTRIBUTE_UNUSED
)
1798 switch (GET_CODE (op
))
1805 return ((GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
1806 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
1807 && CONST_INT_P (XEXP (op
, 1)));
1813 /* Try machine dependent ways of modifying an illegitimate address
1814 to be legitimate. If we find one, return the new valid address.
1815 This macro is used in only one place: `memory_address' in explow.c.
1817 OLDX is the address as it was before break_out_memory_refs was called.
1818 In some cases it is useful to look at this to decide what needs to be done.
1820 Normally it is always safe for this macro to do nothing. It exists to
1821 recognize opportunities to optimize the output.
1823 But on a few ports with segmented architectures and indexed addressing
1824 (mn10300, hppa) it is used to rewrite certain problematical addresses. */
1827 mn10300_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1828 enum machine_mode mode ATTRIBUTE_UNUSED
)
1830 if (flag_pic
&& ! mn10300_legitimate_pic_operand_p (x
))
1831 x
= mn10300_legitimize_pic_address (oldx
, NULL_RTX
);
1833 /* Uh-oh. We might have an address for x[n-100000]. This needs
1834 special handling to avoid creating an indexed memory address
1835 with x-100000 as the base. */
1836 if (GET_CODE (x
) == PLUS
1837 && mn10300_symbolic_operand (XEXP (x
, 1), VOIDmode
))
1839 /* Ugly. We modify things here so that the address offset specified
1840 by the index expression is computed first, then added to x to form
1841 the entire address. */
1843 rtx regx1
, regy1
, regy2
, y
;
1845 /* Strip off any CONST. */
1847 if (GET_CODE (y
) == CONST
)
1850 if (GET_CODE (y
) == PLUS
|| GET_CODE (y
) == MINUS
)
1852 regx1
= force_reg (Pmode
, force_operand (XEXP (x
, 0), 0));
1853 regy1
= force_reg (Pmode
, force_operand (XEXP (y
, 0), 0));
1854 regy2
= force_reg (Pmode
, force_operand (XEXP (y
, 1), 0));
1855 regx1
= force_reg (Pmode
,
1856 gen_rtx_fmt_ee (GET_CODE (y
), Pmode
, regx1
,
1858 return force_reg (Pmode
, gen_rtx_PLUS (Pmode
, regx1
, regy1
));
1864 /* Convert a non-PIC address in `orig' to a PIC address using @GOT or
1865 @GOTOFF in `reg'. */
1868 mn10300_legitimize_pic_address (rtx orig
, rtx reg
)
1870 if (GET_CODE (orig
) == LABEL_REF
1871 || (GET_CODE (orig
) == SYMBOL_REF
1872 && (CONSTANT_POOL_ADDRESS_P (orig
)
1873 || ! MN10300_GLOBAL_P (orig
))))
1876 reg
= gen_reg_rtx (Pmode
);
1878 emit_insn (gen_symGOTOFF2reg (reg
, orig
));
1881 else if (GET_CODE (orig
) == SYMBOL_REF
)
1884 reg
= gen_reg_rtx (Pmode
);
1886 emit_insn (gen_symGOT2reg (reg
, orig
));
1892 /* Return zero if X references a SYMBOL_REF or LABEL_REF whose symbol
1893 isn't protected by a PIC unspec; nonzero otherwise. */
1896 mn10300_legitimate_pic_operand_p (rtx x
)
1901 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
)
1904 if (GET_CODE (x
) == UNSPEC
1905 && (XINT (x
, 1) == UNSPEC_PIC
1906 || XINT (x
, 1) == UNSPEC_GOT
1907 || XINT (x
, 1) == UNSPEC_GOTOFF
1908 || XINT (x
, 1) == UNSPEC_PLT
1909 || XINT (x
, 1) == UNSPEC_GOTSYM_OFF
))
1912 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
1913 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
1919 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1920 if (! mn10300_legitimate_pic_operand_p (XVECEXP (x
, i
, j
)))
1923 else if (fmt
[i
] == 'e'
1924 && ! mn10300_legitimate_pic_operand_p (XEXP (x
, i
)))
1931 /* Return TRUE if the address X, taken from a (MEM:MODE X) rtx, is
1932 legitimate, and FALSE otherwise.
1934 On the mn10300, the value in the address register must be
1935 in the same memory space/segment as the effective address.
1937 This is problematical for reload since it does not understand
1938 that base+index != index+base in a memory reference.
1940 Note it is still possible to use reg+reg addressing modes,
1941 it's just much more difficult. For a discussion of a possible
1942 workaround and solution, see the comments in pa.c before the
1943 function record_unscaled_index_insn_codes. */
1946 mn10300_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1948 if (CONSTANT_ADDRESS_P (x
)
1949 && (! flag_pic
|| mn10300_legitimate_pic_operand_p (x
)))
1952 if (RTX_OK_FOR_BASE_P (x
, strict
))
1956 && GET_CODE (x
) == POST_INC
1957 && RTX_OK_FOR_BASE_P (XEXP (x
, 0), strict
)
1958 && (mode
== SImode
|| mode
== SFmode
|| mode
== HImode
))
1961 if (GET_CODE (x
) == PLUS
)
1963 rtx base
= 0, index
= 0;
1965 if (REG_P (XEXP (x
, 0))
1966 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x
, 0)), strict
))
1969 index
= XEXP (x
, 1);
1972 if (REG_P (XEXP (x
, 1))
1973 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x
, 1)), strict
))
1976 index
= XEXP (x
, 0);
1979 if (base
!= 0 && index
!= 0)
1981 if (CONST_INT_P (index
))
1983 if (GET_CODE (index
) == CONST
1984 && GET_CODE (XEXP (index
, 0)) != PLUS
1986 || (mn10300_legitimate_pic_operand_p (index
)
1987 && GET_MODE_SIZE (mode
) == 4)))
1995 /* Used by LEGITIMATE_CONSTANT_P(). Returns TRUE if X is a valid
1996 constant. Note that some "constants" aren't valid, such as TLS
1997 symbols and unconverted GOT-based references, so we eliminate
2001 mn10300_legitimate_constant_p (rtx x
)
2003 switch (GET_CODE (x
))
2008 if (GET_CODE (x
) == PLUS
)
2010 if (! CONST_INT_P (XEXP (x
, 1)))
2015 /* Only some unspecs are valid as "constants". */
2016 if (GET_CODE (x
) == UNSPEC
)
2018 switch (XINT (x
, 1))
2020 case UNSPEC_INT_LABEL
:
2031 /* We must have drilled down to a symbol. */
2032 if (! mn10300_symbolic_operand (x
, Pmode
))
2044 mn10300_address_cost_1 (rtx x
, int *unsig
)
2046 switch (GET_CODE (x
))
2049 switch (REGNO_REG_CLASS (REGNO (x
)))
2075 return (mn10300_address_cost_1 (XEXP (x
, 0), unsig
)
2076 + mn10300_address_cost_1 (XEXP (x
, 1), unsig
));
2081 return mn10300_address_cost (XEXP (x
, 0), !optimize_size
);
2085 return mn10300_address_cost_1 (XEXP (x
, 0), unsig
);
2088 if (INTVAL (x
) == 0)
2090 if (INTVAL (x
) + (*unsig
? 0 : 0x80) < 0x100)
2092 if (INTVAL (x
) + (*unsig
? 0 : 0x8000) < 0x10000)
2094 if (INTVAL (x
) + (*unsig
? 0 : 0x800000) < 0x1000000)
2110 mn10300_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
2113 return mn10300_address_cost_1 (x
, &s
);
2117 mn10300_rtx_costs (rtx x
, int code
, int outer_code
, int *total
,
2118 bool speed ATTRIBUTE_UNUSED
)
2123 /* Zeros are extremely cheap. */
2124 if (INTVAL (x
) == 0 && (outer_code
== SET
|| outer_code
== COMPARE
))
2126 /* If it fits in 8 bits, then it's still relatively cheap. */
2127 else if (INT_8_BITS (INTVAL (x
)))
2129 /* This is the "base" cost, includes constants where either the
2130 upper or lower 16bits are all zeros. */
2131 else if (INT_16_BITS (INTVAL (x
))
2132 || (INTVAL (x
) & 0xffff) == 0
2133 || (INTVAL (x
) & 0xffff0000) == 0)
2142 /* These are more costly than a CONST_INT, but we can relax them,
2143 so they're less costly than a CONST_DOUBLE. */
2148 /* We don't optimize CONST_DOUBLEs well nor do we relax them well,
2149 so their cost is very high. */
2154 /* This is cheap, we can use btst. */
2155 if (outer_code
== COMPARE
)
2159 /* ??? This probably needs more work. */
2171 /* Check whether a constant used to initialize a DImode or DFmode can
2172 use a clr instruction. The code here must be kept in sync with
2176 mn10300_wide_const_load_uses_clr (rtx operands
[2])
2178 long val
[2] = {0, 0};
2180 if ((! REG_P (operands
[0]))
2181 || REGNO_REG_CLASS (REGNO (operands
[0])) != DATA_REGS
)
2184 switch (GET_CODE (operands
[1]))
2189 split_double (operands
[1], &low
, &high
);
2190 val
[0] = INTVAL (low
);
2191 val
[1] = INTVAL (high
);
2196 if (GET_MODE (operands
[1]) == DFmode
)
2200 REAL_VALUE_FROM_CONST_DOUBLE (rv
, operands
[1]);
2201 REAL_VALUE_TO_TARGET_DOUBLE (rv
, val
);
2203 else if (GET_MODE (operands
[1]) == VOIDmode
2204 || GET_MODE (operands
[1]) == DImode
)
2206 val
[0] = CONST_DOUBLE_LOW (operands
[1]);
2207 val
[1] = CONST_DOUBLE_HIGH (operands
[1]);
2215 return val
[0] == 0 || val
[1] == 0;
2217 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
2218 may access it using GOTOFF instead of GOT. */
2221 mn10300_encode_section_info (tree decl
, rtx rtl
, int first ATTRIBUTE_UNUSED
)
2227 symbol
= XEXP (rtl
, 0);
2228 if (GET_CODE (symbol
) != SYMBOL_REF
)
2232 SYMBOL_REF_FLAG (symbol
) = (*targetm
.binds_local_p
) (decl
);
2235 /* Dispatch tables on the mn10300 are extremely expensive in terms of code
2236 and readonly data size. So we crank up the case threshold value to
2237 encourage a series of if/else comparisons to implement many small switch
2238 statements. In theory, this value could be increased much more if we
2239 were solely optimizing for space, but we keep it "reasonable" to avoid
2240 serious code efficiency lossage. */
2243 mn10300_case_values_threshold (void)
2248 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
2251 mn10300_asm_trampoline_template (FILE *f
)
2253 fprintf (f
, "\tadd -4,sp\n");
2254 fprintf (f
, "\t.long 0x0004fffa\n");
2255 fprintf (f
, "\tmov (0,sp),a0\n");
2256 fprintf (f
, "\tadd 4,sp\n");
2257 fprintf (f
, "\tmov (13,a0),a1\n");
2258 fprintf (f
, "\tmov (17,a0),a0\n");
2259 fprintf (f
, "\tjmp (a0)\n");
2260 fprintf (f
, "\t.long 0\n");
2261 fprintf (f
, "\t.long 0\n");
2264 /* Worker function for TARGET_TRAMPOLINE_INIT. */
2267 mn10300_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
2269 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
2272 emit_block_move (m_tramp
, assemble_trampoline_template (),
2273 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
2275 mem
= adjust_address (m_tramp
, SImode
, 0x14);
2276 emit_move_insn (mem
, chain_value
);
2277 mem
= adjust_address (m_tramp
, SImode
, 0x18);
2278 emit_move_insn (mem
, fnaddr
);
2281 /* Output the assembler code for a C++ thunk function.
2282 THUNK_DECL is the declaration for the thunk function itself, FUNCTION
2283 is the decl for the target function. DELTA is an immediate constant
2284 offset to be added to the THIS parameter. If VCALL_OFFSET is nonzero
2285 the word at the adjusted address *(*THIS' + VCALL_OFFSET) should be
2286 additionally added to THIS. Finally jump to the entry point of
2290 mn10300_asm_output_mi_thunk (FILE * file
,
2291 tree thunk_fndecl ATTRIBUTE_UNUSED
,
2292 HOST_WIDE_INT delta
,
2293 HOST_WIDE_INT vcall_offset
,
2298 /* Get the register holding the THIS parameter. Handle the case
2299 where there is a hidden first argument for a returned structure. */
2300 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
2301 _this
= reg_names
[FIRST_ARGUMENT_REGNUM
+ 1];
2303 _this
= reg_names
[FIRST_ARGUMENT_REGNUM
];
2305 fprintf (file
, "\t%s Thunk Entry Point:\n", ASM_COMMENT_START
);
2308 fprintf (file
, "\tadd %d, %s\n", (int) delta
, _this
);
2312 const char * scratch
= reg_names
[FIRST_ADDRESS_REGNUM
+ 1];
2314 fprintf (file
, "\tmov %s, %s\n", _this
, scratch
);
2315 fprintf (file
, "\tmov (%s), %s\n", scratch
, scratch
);
2316 fprintf (file
, "\tadd %d, %s\n", (int) vcall_offset
, scratch
);
2317 fprintf (file
, "\tmov (%s), %s\n", scratch
, scratch
);
2318 fprintf (file
, "\tadd %s, %s\n", scratch
, _this
);
2321 fputs ("\tjmp ", file
);
2322 assemble_name (file
, XSTR (XEXP (DECL_RTL (function
), 0), 0));
2326 /* Return true if mn10300_output_mi_thunk would be able to output the
2327 assembler code for the thunk function specified by the arguments
2328 it is passed, and false otherwise. */
2331 mn10300_can_output_mi_thunk (const_tree thunk_fndecl ATTRIBUTE_UNUSED
,
2332 HOST_WIDE_INT delta ATTRIBUTE_UNUSED
,
2333 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED
,
2334 const_tree function ATTRIBUTE_UNUSED
)
2340 mn10300_hard_regno_mode_ok (unsigned int regno
, enum machine_mode mode
)
2342 if (REGNO_REG_CLASS (regno
) == FP_REGS
2343 || REGNO_REG_CLASS (regno
) == FP_ACC_REGS
)
2344 /* Do not store integer values in FP registers. */
2345 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& ((regno
& 1) == 0);
2347 if (((regno
) & 1) == 0 || GET_MODE_SIZE (mode
) == 4)
2350 if (REGNO_REG_CLASS (regno
) == DATA_REGS
2351 || (TARGET_AM33
&& REGNO_REG_CLASS (regno
) == ADDRESS_REGS
)
2352 || REGNO_REG_CLASS (regno
) == EXTENDED_REGS
)
2353 return GET_MODE_SIZE (mode
) <= 4;
2359 mn10300_modes_tieable (enum machine_mode mode1
, enum machine_mode mode2
)
2361 if (GET_MODE_CLASS (mode1
) == MODE_FLOAT
2362 && GET_MODE_CLASS (mode2
) != MODE_FLOAT
)
2365 if (GET_MODE_CLASS (mode2
) == MODE_FLOAT
2366 && GET_MODE_CLASS (mode1
) != MODE_FLOAT
)
2371 || (GET_MODE_SIZE (mode1
) <= 4 && GET_MODE_SIZE (mode2
) <= 4))
2378 mn10300_select_cc_mode (rtx x
)
2380 return (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
) ? CC_FLOATmode
: CCmode
;
2384 is_load_insn (rtx insn
)
2386 if (GET_CODE (PATTERN (insn
)) != SET
)
2389 return MEM_P (SET_SRC (PATTERN (insn
)));
2393 is_store_insn (rtx insn
)
2395 if (GET_CODE (PATTERN (insn
)) != SET
)
2398 return MEM_P (SET_DEST (PATTERN (insn
)));
2401 /* Update scheduling costs for situations that cannot be
2402 described using the attributes and DFA machinery.
2403 DEP is the insn being scheduled.
2404 INSN is the previous insn.
2405 COST is the current cycle cost for DEP. */
2408 mn10300_adjust_sched_cost (rtx insn
, rtx link
, rtx dep
, int cost
)
2410 int timings
= get_attr_timings (insn
);
2415 if (GET_CODE (insn
) == PARALLEL
)
2416 insn
= XVECEXP (insn
, 0, 0);
2418 if (GET_CODE (dep
) == PARALLEL
)
2419 dep
= XVECEXP (dep
, 0, 0);
2421 /* For the AM34 a load instruction that follows a
2422 store instruction incurs an extra cycle of delay. */
2423 if (mn10300_tune_cpu
== PROCESSOR_AM34
2424 && is_load_insn (dep
)
2425 && is_store_insn (insn
))
2428 /* For the AM34 a non-store, non-branch FPU insn that follows
2429 another FPU insn incurs a one cycle throughput increase. */
2430 else if (mn10300_tune_cpu
== PROCESSOR_AM34
2431 && ! is_store_insn (insn
)
2433 && GET_CODE (PATTERN (dep
)) == SET
2434 && GET_CODE (PATTERN (insn
)) == SET
2435 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep
)))) == MODE_FLOAT
2436 && GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn
)))) == MODE_FLOAT
)
2439 /* Resolve the conflict described in section 1-7-4 of
2440 Chapter 3 of the MN103E Series Instruction Manual
2443 "When the preceeding instruction is a CPU load or
2444 store instruction, a following FPU instruction
2445 cannot be executed until the CPU completes the
2446 latency period even though there are no register
2447 or flag dependencies between them." */
2449 /* Only the AM33-2 (and later) CPUs have FPU instructions. */
2450 if (! TARGET_AM33_2
)
2453 /* If a data dependence already exists then the cost is correct. */
2454 if (REG_NOTE_KIND (link
) == 0)
2457 /* Check that the instruction about to scheduled is an FPU instruction. */
2458 if (GET_CODE (PATTERN (dep
)) != SET
)
2461 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (dep
)))) != MODE_FLOAT
)
2464 /* Now check to see if the previous instruction is a load or store. */
2465 if (! is_load_insn (insn
) && ! is_store_insn (insn
))
2468 /* XXX: Verify: The text of 1-7-4 implies that the restriction
2469 only applies when an INTEGER load/store preceeds an FPU
2470 instruction, but is this true ? For now we assume that it is. */
2471 if (GET_MODE_CLASS (GET_MODE (SET_SRC (PATTERN (insn
)))) != MODE_INT
)
2474 /* Extract the latency value from the timings attribute. */
2475 return timings
< 100 ? (timings
% 10) : (timings
% 100);
2479 mn10300_conditional_register_usage (void)
2485 for (i
= FIRST_EXTENDED_REGNUM
;
2486 i
<= LAST_EXTENDED_REGNUM
; i
++)
2487 fixed_regs
[i
] = call_used_regs
[i
] = 1;
2491 for (i
= FIRST_FP_REGNUM
;
2492 i
<= LAST_FP_REGNUM
; i
++)
2493 fixed_regs
[i
] = call_used_regs
[i
] = 1;
2496 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] =
2497 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2500 /* Initialize the GCC target structure. */
2502 #undef TARGET_EXCEPT_UNWIND_INFO
2503 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
2505 #undef TARGET_ASM_ALIGNED_HI_OP
2506 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2508 #undef TARGET_LEGITIMIZE_ADDRESS
2509 #define TARGET_LEGITIMIZE_ADDRESS mn10300_legitimize_address
2511 #undef TARGET_RTX_COSTS
2512 #define TARGET_RTX_COSTS mn10300_rtx_costs
2513 #undef TARGET_ADDRESS_COST
2514 #define TARGET_ADDRESS_COST mn10300_address_cost
2516 #undef TARGET_ASM_FILE_START
2517 #define TARGET_ASM_FILE_START mn10300_file_start
2518 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
2519 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
2521 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
2522 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA mn10300_asm_output_addr_const_extra
2524 #undef TARGET_DEFAULT_TARGET_FLAGS
2525 #define TARGET_DEFAULT_TARGET_FLAGS MASK_MULT_BUG | MASK_PTR_A0D0
2526 #undef TARGET_HANDLE_OPTION
2527 #define TARGET_HANDLE_OPTION mn10300_handle_option
2528 #undef TARGET_OPTION_OVERRIDE
2529 #define TARGET_OPTION_OVERRIDE mn10300_option_override
2530 #undef TARGET_OPTION_OPTIMIZATION_TABLE
2531 #define TARGET_OPTION_OPTIMIZATION_TABLE mn10300_option_optimization_table
2533 #undef TARGET_ENCODE_SECTION_INFO
2534 #define TARGET_ENCODE_SECTION_INFO mn10300_encode_section_info
2536 #undef TARGET_PROMOTE_PROTOTYPES
2537 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2538 #undef TARGET_RETURN_IN_MEMORY
2539 #define TARGET_RETURN_IN_MEMORY mn10300_return_in_memory
2540 #undef TARGET_PASS_BY_REFERENCE
2541 #define TARGET_PASS_BY_REFERENCE mn10300_pass_by_reference
2542 #undef TARGET_CALLEE_COPIES
2543 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
2544 #undef TARGET_ARG_PARTIAL_BYTES
2545 #define TARGET_ARG_PARTIAL_BYTES mn10300_arg_partial_bytes
2546 #undef TARGET_FUNCTION_ARG
2547 #define TARGET_FUNCTION_ARG mn10300_function_arg
2548 #undef TARGET_FUNCTION_ARG_ADVANCE
2549 #define TARGET_FUNCTION_ARG_ADVANCE mn10300_function_arg_advance
2551 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
2552 #define TARGET_EXPAND_BUILTIN_SAVEREGS mn10300_builtin_saveregs
2553 #undef TARGET_EXPAND_BUILTIN_VA_START
2554 #define TARGET_EXPAND_BUILTIN_VA_START mn10300_va_start
2556 #undef TARGET_CASE_VALUES_THRESHOLD
2557 #define TARGET_CASE_VALUES_THRESHOLD mn10300_case_values_threshold
2559 #undef TARGET_LEGITIMATE_ADDRESS_P
2560 #define TARGET_LEGITIMATE_ADDRESS_P mn10300_legitimate_address_p
2562 #undef TARGET_PREFERRED_RELOAD_CLASS
2563 #define TARGET_PREFERRED_RELOAD_CLASS mn10300_preferred_reload_class
2564 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2565 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS mn10300_preferred_output_reload_class
2567 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
2568 #define TARGET_ASM_TRAMPOLINE_TEMPLATE mn10300_asm_trampoline_template
2569 #undef TARGET_TRAMPOLINE_INIT
2570 #define TARGET_TRAMPOLINE_INIT mn10300_trampoline_init
2572 #undef TARGET_FUNCTION_VALUE
2573 #define TARGET_FUNCTION_VALUE mn10300_function_value
2574 #undef TARGET_LIBCALL_VALUE
2575 #define TARGET_LIBCALL_VALUE mn10300_libcall_value
2577 #undef TARGET_ASM_OUTPUT_MI_THUNK
2578 #define TARGET_ASM_OUTPUT_MI_THUNK mn10300_asm_output_mi_thunk
2579 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2580 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK mn10300_can_output_mi_thunk
2582 #undef TARGET_SCHED_ADJUST_COST
2583 #define TARGET_SCHED_ADJUST_COST mn10300_adjust_sched_cost
2585 #undef TARGET_CONDITIONAL_REGISTER_USAGE
2586 #define TARGET_CONDITIONAL_REGISTER_USAGE mn10300_conditional_register_usage
2588 struct gcc_target targetm
= TARGET_INITIALIZER
;