1 /* Output routines for Sunplus S+CORE processor
2 Copyright (C) 2005, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
3 Contributed by Sunnorth.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
32 #include "diagnostic-core.h"
47 #include "target-def.h"
48 #include "integrate.h"
49 #include "langhooks.h"
54 static void score_option_override (void);
56 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
57 static const struct default_options score_option_optimization_table
[] =
59 { OPT_LEVELS_1_PLUS
, OPT_fomit_frame_pointer
, NULL
, 1 },
60 { OPT_LEVELS_NONE
, 0, NULL
, 0 }
63 #undef TARGET_ASM_FILE_START
64 #define TARGET_ASM_FILE_START score_asm_file_start
66 #undef TARGET_ASM_FILE_END
67 #define TARGET_ASM_FILE_END score_asm_file_end
69 #undef TARGET_ASM_FUNCTION_PROLOGUE
70 #define TARGET_ASM_FUNCTION_PROLOGUE score_function_prologue
72 #undef TARGET_ASM_FUNCTION_EPILOGUE
73 #define TARGET_ASM_FUNCTION_EPILOGUE score_function_epilogue
75 #undef TARGET_DEFAULT_TARGET_FLAGS
76 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
77 #undef TARGET_HANDLE_OPTION
78 #define TARGET_HANDLE_OPTION score_handle_option
79 #undef TARGET_OPTION_OVERRIDE
80 #define TARGET_OPTION_OVERRIDE score_option_override
81 #undef TARGET_OPTION_OPTIMIZATION_TABLE
82 #define TARGET_OPTION_OPTIMIZATION_TABLE score_option_optimization_table
84 #undef TARGET_LEGITIMIZE_ADDRESS
85 #define TARGET_LEGITIMIZE_ADDRESS score_legitimize_address
87 #undef TARGET_SCHED_ISSUE_RATE
88 #define TARGET_SCHED_ISSUE_RATE score_issue_rate
90 #undef TARGET_ASM_SELECT_RTX_SECTION
91 #define TARGET_ASM_SELECT_RTX_SECTION score_select_rtx_section
93 #undef TARGET_IN_SMALL_DATA_P
94 #define TARGET_IN_SMALL_DATA_P score_in_small_data_p
96 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
97 #define TARGET_FUNCTION_OK_FOR_SIBCALL score_function_ok_for_sibcall
99 #undef TARGET_STRICT_ARGUMENT_NAMING
100 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
102 #undef TARGET_ASM_OUTPUT_MI_THUNK
103 #define TARGET_ASM_OUTPUT_MI_THUNK score_output_mi_thunk
105 #undef TARGET_PROMOTE_FUNCTION_MODE
106 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
108 #undef TARGET_PROMOTE_PROTOTYPES
109 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
111 #undef TARGET_MUST_PASS_IN_STACK
112 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
114 #undef TARGET_ARG_PARTIAL_BYTES
115 #define TARGET_ARG_PARTIAL_BYTES score_arg_partial_bytes
117 #undef TARGET_FUNCTION_ARG
118 #define TARGET_FUNCTION_ARG score_function_arg
120 #undef TARGET_FUNCTION_ARG_ADVANCE
121 #define TARGET_FUNCTION_ARG_ADVANCE score_function_arg_advance
123 #undef TARGET_PASS_BY_REFERENCE
124 #define TARGET_PASS_BY_REFERENCE score_pass_by_reference
126 #undef TARGET_RETURN_IN_MEMORY
127 #define TARGET_RETURN_IN_MEMORY score_return_in_memory
129 #undef TARGET_RTX_COSTS
130 #define TARGET_RTX_COSTS score_rtx_costs
132 #undef TARGET_ADDRESS_COST
133 #define TARGET_ADDRESS_COST score_address_cost
135 #undef TARGET_LEGITIMATE_ADDRESS_P
136 #define TARGET_LEGITIMATE_ADDRESS_P score_legitimate_address_p
138 #undef TARGET_CAN_ELIMINATE
139 #define TARGET_CAN_ELIMINATE score_can_eliminate
141 #undef TARGET_CONDITIONAL_REGISTER_USAGE
142 #define TARGET_CONDITIONAL_REGISTER_USAGE score_conditional_register_usage
144 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
145 #define TARGET_ASM_TRAMPOLINE_TEMPLATE score_asm_trampoline_template
146 #undef TARGET_TRAMPOLINE_INIT
147 #define TARGET_TRAMPOLINE_INIT score_trampoline_init
149 struct extern_list
*extern_head
= 0;
151 /* default 0 = NO_REGS */
152 enum reg_class score_char_to_class
[256];
154 /* Implement TARGET_RETURN_IN_MEMORY. In S+core,
155 small structures are returned in a register.
156 Objects with varying size must still be returned in memory. */
158 score_return_in_memory (const_tree type
, const_tree fndecl ATTRIBUTE_UNUSED
)
160 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
161 return score7_return_in_memory (type
, fndecl
);
162 else if (TARGET_SCORE3
)
163 return score3_return_in_memory (type
, fndecl
);
168 /* Return nonzero when an argument must be passed by reference. */
170 score_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
171 enum machine_mode mode
, const_tree type
,
172 bool named ATTRIBUTE_UNUSED
)
174 /* If we have a variable-sized parameter, we have no choice. */
175 return targetm
.calls
.must_pass_in_stack (mode
, type
);
178 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. Generate rtl rather than asm text
179 in order to avoid duplicating too much logic from elsewhere. */
181 score_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
182 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
185 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
186 score7_output_mi_thunk (file
, thunk_fndecl
, delta
, vcall_offset
, function
);
187 else if (TARGET_SCORE3
)
188 score3_output_mi_thunk (file
, thunk_fndecl
, delta
, vcall_offset
, function
);
193 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
195 score_function_ok_for_sibcall (ATTRIBUTE_UNUSED tree decl
,
196 ATTRIBUTE_UNUSED tree exp
)
201 /* Set up the stack and frame (if desired) for the function. */
203 score_function_prologue (FILE *file
, HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
205 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
206 score7_function_prologue (file
, size
);
207 else if (TARGET_SCORE3
)
208 score3_function_prologue (file
, size
);
213 /* Do any necessary cleanup after a function to restore stack, frame,
216 score_function_epilogue (FILE *file
,
217 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
219 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
220 score7_function_epilogue (file
, size
);
221 else if (TARGET_SCORE3
)
222 score3_function_epilogue (file
, size
);
227 /* Implement TARGET_SCHED_ISSUE_RATE. */
229 score_issue_rate (void)
234 /* Choose the section to use for the constant rtx expression X that has
237 score_select_rtx_section (enum machine_mode mode
, rtx x
,
238 unsigned HOST_WIDE_INT align
)
240 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
241 return score7_select_rtx_section (mode
, x
, align
);
242 else if (TARGET_SCORE3
)
243 return score3_select_rtx_section (mode
, x
, align
);
248 /* Implement TARGET_IN_SMALL_DATA_P. */
250 score_in_small_data_p (const_tree decl
)
252 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
253 return score7_in_small_data_p (decl
);
254 else if (TARGET_SCORE3
)
255 return score3_in_small_data_p (decl
);
260 /* Implement TARGET_ASM_FILE_START. */
262 score_asm_file_start (void)
265 fprintf (asm_out_file
, "# Sunplus S+core5 %s rev=%s\n",
266 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
267 else if (TARGET_SCORE5U
)
268 fprintf (asm_out_file
, "# Sunplus S+core5u %s rev=%s\n",
269 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
270 else if (TARGET_SCORE7D
)
271 fprintf (asm_out_file
, "# Sunplus S+core7d %s rev=%s\n",
272 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
273 else if (TARGET_SCORE7
)
274 fprintf (asm_out_file
, "# Sunplus S+core7 %s rev=%s\n",
275 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
276 else if (TARGET_SCORE3D
)
277 fprintf (asm_out_file
, "# Sunplus S+core3d %s rev=%s\n",
278 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
279 else if (TARGET_SCORE3
)
280 fprintf (asm_out_file
, "# Sunplus S+core3 %s rev=%s\n",
281 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
283 fprintf (asm_out_file
, "# Sunplus S+core unknown %s rev=%s\n",
284 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
286 default_file_start ();
289 fprintf (asm_out_file
, "\t.set pic\n");
292 /* Implement TARGET_ASM_FILE_END. When using assembler macros, emit
293 .externs for any small-data variables that turned out to be external. */
295 score_asm_file_end (void)
297 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
298 score7_asm_file_end ();
299 else if (TARGET_SCORE3
)
300 score3_asm_file_end ();
305 #define MASK_ALL_CPU_BITS \
306 (MASK_SCORE5 | MASK_SCORE5U | MASK_SCORE7 | MASK_SCORE7D \
307 | MASK_SCORE3 | MASK_SCORE3D)
309 /* Implement TARGET_HANDLE_OPTION. */
311 score_handle_option (size_t code
, const char *arg
, int value ATTRIBUTE_UNUSED
)
316 target_flags
&= ~(MASK_ALL_CPU_BITS
);
317 target_flags
|= MASK_SCORE7
| MASK_SCORE7D
;
321 target_flags
&= ~(MASK_ALL_CPU_BITS
);
322 target_flags
|= MASK_SCORE3
| MASK_SCORE3D
;
326 if (strcmp (arg
, "score5") == 0)
328 target_flags
&= ~(MASK_ALL_CPU_BITS
);
329 target_flags
|= MASK_SCORE5
;
332 else if (strcmp (arg
, "score5u") == 0)
334 target_flags
&= ~(MASK_ALL_CPU_BITS
);
335 target_flags
|= MASK_SCORE5U
;
338 else if (strcmp (arg
, "score7") == 0)
340 target_flags
&= ~(MASK_ALL_CPU_BITS
);
341 target_flags
|= MASK_SCORE7
;
344 else if (strcmp (arg
, "score7d") == 0)
346 target_flags
&= ~(MASK_ALL_CPU_BITS
);
347 target_flags
|= MASK_SCORE7
| MASK_SCORE7D
;
350 else if (strcmp (arg
, "score3") == 0)
352 target_flags
&= ~(MASK_ALL_CPU_BITS
);
353 target_flags
|= MASK_SCORE3
;
356 else if (strcmp (arg
, "score3d") == 0)
358 target_flags
&= ~(MASK_ALL_CPU_BITS
);
359 target_flags
|= MASK_SCORE3
| MASK_SCORE3D
;
370 /* Implement TARGET_OPTION_OVERRIDE hook. */
372 score_option_override (void)
374 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
375 score7_option_override ();
376 else if (TARGET_SCORE3
)
377 score3_option_override ();
379 score7_option_override ();
382 /* Implement REGNO_REG_CLASS macro. */
384 score_reg_class (int regno
)
386 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
387 return score7_reg_class (regno
);
388 else if (TARGET_SCORE3
)
389 return score3_reg_class (regno
);
394 /* Implement PREFERRED_RELOAD_CLASS macro. */
396 score_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, enum reg_class rclass
)
398 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
399 return score7_preferred_reload_class (x
, rclass
);
400 else if (TARGET_SCORE3
)
401 return score3_preferred_reload_class (x
, rclass
);
406 /* Implement SECONDARY_INPUT_RELOAD_CLASS
407 and SECONDARY_OUTPUT_RELOAD_CLASS macro. */
409 score_secondary_reload_class (enum reg_class rclass
,
410 enum machine_mode mode ATTRIBUTE_UNUSED
,
413 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
414 return score7_secondary_reload_class (rclass
, mode
, x
);
415 else if (TARGET_SCORE3
)
416 return score3_secondary_reload_class (rclass
, mode
, x
);
421 /* Implement CONST_OK_FOR_LETTER_P macro. */
423 score_const_ok_for_letter_p (HOST_WIDE_INT value
, char c
)
425 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
426 return score7_const_ok_for_letter_p (value
, c
);
427 else if (TARGET_SCORE3
)
428 return score3_const_ok_for_letter_p (value
, c
);
433 /* Implement EXTRA_CONSTRAINT macro. */
435 score_extra_constraint (rtx op
, char c
)
437 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
438 return score7_extra_constraint (op
, c
);
439 else if (TARGET_SCORE3
)
440 return score3_extra_constraint (op
, c
);
445 /* Return truth value on whether or not a given hard register
446 can support a given mode. */
448 score_hard_regno_mode_ok (unsigned int regno
, enum machine_mode mode
)
450 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
451 return score7_hard_regno_mode_ok (regno
, mode
);
452 else if (TARGET_SCORE3
)
453 return score3_hard_regno_mode_ok (regno
, mode
);
458 /* We can always eliminate to the hard frame pointer. We can eliminate
459 to the stack pointer unless a frame pointer is needed. */
462 score_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
464 return (to
== HARD_FRAME_POINTER_REGNUM
465 || (to
== STACK_POINTER_REGNUM
&& !frame_pointer_needed
));
468 /* Implement INITIAL_ELIMINATION_OFFSET. FROM is either the frame
469 pointer or argument pointer. TO is either the stack pointer or
470 hard frame pointer. */
472 score_initial_elimination_offset (int from
,
473 int to ATTRIBUTE_UNUSED
)
475 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
476 return score7_initial_elimination_offset (from
, to
);
477 else if (TARGET_SCORE3
)
478 return score3_initial_elimination_offset (from
, to
);
483 /* Argument support functions. */
485 /* Initialize CUMULATIVE_ARGS for a function. */
487 score_init_cumulative_args (CUMULATIVE_ARGS
*cum
,
488 tree fntype ATTRIBUTE_UNUSED
,
489 rtx libname ATTRIBUTE_UNUSED
)
491 memset (cum
, 0, sizeof (CUMULATIVE_ARGS
));
494 /* Implement TARGET_FUNCTION_ARG_ADVANCE hook. */
496 score_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
497 const_tree type
, bool named
)
499 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
500 score7_function_arg_advance (cum
, mode
, type
, named
);
501 else if (TARGET_SCORE3
)
502 score3_function_arg_advance (cum
, mode
, type
, named
);
507 /* Implement TARGET_ARG_PARTIAL_BYTES macro. */
509 score_arg_partial_bytes (CUMULATIVE_ARGS
*cum
,
510 enum machine_mode mode
, tree type
, bool named
)
512 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
513 return score7_arg_partial_bytes (cum
, mode
, type
, named
);
514 else if (TARGET_SCORE3
)
515 return score3_arg_partial_bytes (cum
, mode
, type
, named
);
520 /* Implement TARGET_FUNCTION_ARG hook. */
522 score_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
523 const_tree type
, bool named
)
525 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
526 return score7_function_arg (cum
, mode
, type
, named
);
527 else if (TARGET_SCORE3
)
528 return score3_function_arg (cum
, mode
, type
, named
);
533 /* Implement FUNCTION_VALUE and LIBCALL_VALUE. For normal calls,
534 VALTYPE is the return type and MODE is VOIDmode. For libcalls,
535 VALTYPE is null and MODE is the mode of the return value. */
537 score_function_value (const_tree valtype
, const_tree func ATTRIBUTE_UNUSED
,
538 enum machine_mode mode
)
540 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
541 return score7_function_value (valtype
, func
, mode
);
542 else if (TARGET_SCORE3
)
543 return score3_function_value (valtype
, func
, mode
);
548 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
550 score_asm_trampoline_template (FILE *f
)
552 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
553 score7_asm_trampoline_template (f
);
554 else if (TARGET_SCORE3
)
555 score3_asm_trampoline_template (f
);
560 /* Implement TARGET_TRAMPOLINE_INIT. */
562 score_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
564 /* ??? These two routines are identical. */
565 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
566 score7_trampoline_init (m_tramp
, fndecl
, chain_value
);
567 else if (TARGET_SCORE3
)
568 score3_trampoline_init (m_tramp
, fndecl
, chain_value
);
573 /* This function is used to implement REG_MODE_OK_FOR_BASE_P macro. */
575 score_regno_mode_ok_for_base_p (int regno
, int strict
)
577 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
578 return score7_regno_mode_ok_for_base_p (regno
, strict
);
579 else if (TARGET_SCORE3
)
580 return score3_regno_mode_ok_for_base_p (regno
, strict
);
585 /* Implement TARGET_LEGITIMIZE_ADDRESS_P. */
587 score_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
589 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
590 return score7_legitimate_address_p (mode
, x
, strict
);
591 else if (TARGET_SCORE3
)
592 return score3_legitimate_address_p (mode
, x
, strict
);
597 /* This function is used to implement LEGITIMIZE_ADDRESS. If X can
598 be legitimized in a way that the generic machinery might not expect,
599 return the new address, else return X. */
601 score_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
602 enum machine_mode mode ATTRIBUTE_UNUSED
)
604 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
605 return score7_legitimize_address (x
);
606 else if (TARGET_SCORE3
)
607 return score3_legitimize_address (x
);
612 /* Return a number assessing the cost of moving a register in class
615 score_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
616 enum reg_class from
, enum reg_class to
)
618 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
619 return score7_register_move_cost (mode
, from
, to
);
620 else if (TARGET_SCORE3
)
621 return score3_register_move_cost (mode
, from
, to
);
626 /* Implement TARGET_RTX_COSTS macro. */
628 score_rtx_costs (rtx x
, int code
, int outer_code
, int *total
,
629 bool speed ATTRIBUTE_UNUSED
)
631 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
632 return score7_rtx_costs (x
, code
, outer_code
, total
, speed
);
633 else if (TARGET_SCORE3
)
634 return score3_rtx_costs (x
, code
, outer_code
, total
, speed
);
639 /* Implement TARGET_ADDRESS_COST macro. */
641 score_address_cost (rtx addr
,
642 bool speed ATTRIBUTE_UNUSED
)
644 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
645 return score7_address_cost (addr
);
646 else if (TARGET_SCORE3
)
647 return score3_address_cost (addr
);
652 /* Implement ASM_OUTPUT_EXTERNAL macro. */
654 score_output_external (FILE *file ATTRIBUTE_UNUSED
,
655 tree decl
, const char *name
)
657 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
658 return score7_output_external (file
, decl
, name
);
659 else if (TARGET_SCORE3
)
660 return score3_output_external (file
, decl
, name
);
665 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
666 back to a previous frame. */
668 score_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
670 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
671 return score7_return_addr (count
, frame
);
672 else if (TARGET_SCORE3
)
673 return score3_return_addr (count
, frame
);
678 /* Implement PRINT_OPERAND macro. */
680 score_print_operand (FILE *file
, rtx op
, int c
)
682 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
683 score7_print_operand (file
, op
, c
);
684 else if (TARGET_SCORE3
)
685 score3_print_operand (file
, op
, c
);
690 /* Implement PRINT_OPERAND_ADDRESS macro. */
692 score_print_operand_address (FILE *file
, rtx x
)
694 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
695 score7_print_operand_address (file
, x
);
696 else if (TARGET_SCORE3
)
697 score3_print_operand_address (file
, x
);
702 /* Implement SELECT_CC_MODE macro. */
704 score_select_cc_mode (enum rtx_code op
, rtx x
, rtx y
)
706 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
707 return score7_select_cc_mode (op
, x
, y
);
708 else if (TARGET_SCORE3
)
709 return score3_select_cc_mode (op
, x
, y
);
714 /* Return true if X is a symbolic constant that can be calculated in
715 the same way as a bare symbol. If it is, store the type of the
716 symbol in *SYMBOL_TYPE. */
718 score_symbolic_constant_p (rtx x
, enum score_symbol_type
*symbol_type
)
720 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
721 return score7_symbolic_constant_p (x
, symbol_type
);
722 else if (TARGET_SCORE3
)
723 return score3_symbolic_constant_p (x
, symbol_type
);
728 /* Generate the prologue instructions for entry into a S+core function. */
730 score_prologue (void)
732 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
734 else if (TARGET_SCORE3
)
740 /* Generate the epilogue instructions in a S+core function. */
742 score_epilogue (int sibcall_p
)
744 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
745 score7_epilogue (sibcall_p
);
746 else if (TARGET_SCORE3
)
747 score3_epilogue (sibcall_p
);
752 /* Call and sibcall pattern all need call this function. */
754 score_call (rtx
*ops
, bool sib
)
756 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
757 score7_call (ops
, sib
);
758 else if (TARGET_SCORE3
)
759 score3_call (ops
, sib
);
764 /* Call value and sibcall value pattern all need call this function. */
766 score_call_value (rtx
*ops
, bool sib
)
768 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
769 score7_call_value (ops
, sib
);
770 else if (TARGET_SCORE3
)
771 score3_call_value (ops
, sib
);
777 score_movsicc (rtx
*ops
)
779 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
780 score7_movsicc (ops
);
781 else if (TARGET_SCORE3
)
782 score3_movsicc (ops
);
789 score_movdi (rtx
*ops
)
791 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
793 else if (TARGET_SCORE3
)
800 score_zero_extract_andi (rtx
*ops
)
802 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
803 score7_zero_extract_andi (ops
);
804 else if (TARGET_SCORE3
)
805 score3_zero_extract_andi (ops
);
810 /* Output asm insn for move. */
812 score_move (rtx
*ops
)
814 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
815 return score7_move (ops
);
816 else if (TARGET_SCORE3
)
817 return score3_move (ops
);
822 /* Output asm insn for load. */
824 score_linsn (rtx
*ops
, enum score_mem_unit unit
, bool sign
)
826 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
827 return score7_linsn (ops
, unit
, sign
);
828 else if (TARGET_SCORE3
)
829 return score3_linsn (ops
, unit
, sign
);
834 /* Output asm insn for store. */
836 score_sinsn (rtx
*ops
, enum score_mem_unit unit
)
838 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
839 return score7_sinsn (ops
, unit
);
840 else if (TARGET_SCORE3
)
841 return score3_sinsn (ops
, unit
);
846 /* Output asm insn for load immediate. */
848 score_limm (rtx
*ops
)
850 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
851 return score7_limm (ops
);
852 else if (TARGET_SCORE3
)
853 return score3_limm (ops
);
859 /* Generate add insn. */
861 score_select_add_imm (rtx
*ops
, bool set_cc
)
863 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
864 return score7_select_add_imm (ops
, set_cc
);
865 else if (TARGET_SCORE3
)
866 return score3_select_add_imm (ops
, set_cc
);
871 /* Output arith insn. */
873 score_select (rtx
*ops
, const char *inst_pre
,
874 bool commu
, const char *letter
, bool set_cc
)
876 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
877 return score7_select (ops
, inst_pre
, commu
, letter
, set_cc
);
878 else if (TARGET_SCORE3
)
879 return score3_select (ops
, inst_pre
, commu
, letter
, set_cc
);
884 /* Output switch case insn, only supported in score3. */
886 score_output_casesi (rtx
*operands
)
889 return score3_output_casesi (operands
);
894 /* Output rpush insn, only supported in score3. */
896 score_rpush (rtx
*operands
)
899 return score3_rpush (operands
);
904 /* Output rpop insn, only supported in score3. */
906 score_rpop (rtx
*operands
)
909 return score3_rpop (operands
);
914 /* Emit lcb/lce insns. */
916 score_unaligned_load (rtx
*ops
)
924 if (INTVAL (len
) != BITS_PER_WORD
925 || (INTVAL (off
) % BITS_PER_UNIT
) != 0)
928 gcc_assert (GET_MODE_SIZE (GET_MODE (dst
)) == GET_MODE_SIZE (SImode
));
930 addr_reg
= copy_addr_to_reg (XEXP (src
, 0));
931 emit_insn (gen_move_lcb (addr_reg
, addr_reg
));
932 emit_insn (gen_move_lce (addr_reg
, addr_reg
, dst
));
937 /* Emit scb/sce insns. */
939 score_unaligned_store (rtx
*ops
)
947 if (INTVAL(len
) != BITS_PER_WORD
948 || (INTVAL(off
) % BITS_PER_UNIT
) != 0)
951 gcc_assert (GET_MODE_SIZE (GET_MODE (src
)) == GET_MODE_SIZE (SImode
));
953 addr_reg
= copy_addr_to_reg (XEXP (dst
, 0));
954 emit_insn (gen_move_scb (addr_reg
, addr_reg
, src
));
955 emit_insn (gen_move_sce (addr_reg
, addr_reg
));
960 /* If length is short, generate move insns straight. */
962 score_block_move_straight (rtx dst
, rtx src
, HOST_WIDE_INT length
)
964 HOST_WIDE_INT leftover
;
968 leftover
= length
% UNITS_PER_WORD
;
970 reg_count
= length
/ UNITS_PER_WORD
;
972 regs
= XALLOCAVEC (rtx
, reg_count
);
973 for (i
= 0; i
< reg_count
; i
++)
974 regs
[i
] = gen_reg_rtx (SImode
);
976 /* Load from src to regs. */
977 if (MEM_ALIGN (src
) >= BITS_PER_WORD
)
979 HOST_WIDE_INT offset
= 0;
980 for (i
= 0; i
< reg_count
; offset
+= UNITS_PER_WORD
, i
++)
981 emit_move_insn (regs
[i
], adjust_address (src
, SImode
, offset
));
983 else if (reg_count
>= 1)
985 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
987 emit_insn (gen_move_lcb (src_reg
, src_reg
));
988 for (i
= 0; i
< (reg_count
- 1); i
++)
989 emit_insn (gen_move_lcw (src_reg
, src_reg
, regs
[i
]));
990 emit_insn (gen_move_lce (src_reg
, src_reg
, regs
[i
]));
993 /* Store regs to dest. */
994 if (MEM_ALIGN (dst
) >= BITS_PER_WORD
)
996 HOST_WIDE_INT offset
= 0;
997 for (i
= 0; i
< reg_count
; offset
+= UNITS_PER_WORD
, i
++)
998 emit_move_insn (adjust_address (dst
, SImode
, offset
), regs
[i
]);
1000 else if (reg_count
>= 1)
1002 rtx dst_reg
= copy_addr_to_reg (XEXP (dst
, 0));
1004 emit_insn (gen_move_scb (dst_reg
, dst_reg
, regs
[0]));
1005 for (i
= 1; i
< reg_count
; i
++)
1006 emit_insn (gen_move_scw (dst_reg
, dst_reg
, regs
[i
]));
1007 emit_insn (gen_move_sce (dst_reg
, dst_reg
));
1010 /* Mop up any left-over bytes. */
1013 src
= adjust_address (src
, BLKmode
, length
);
1014 dst
= adjust_address (dst
, BLKmode
, length
);
1015 move_by_pieces (dst
, src
, leftover
,
1016 MIN (MEM_ALIGN (src
), MEM_ALIGN (dst
)), 0);
1020 /* Generate loop head when dst or src is unaligned. */
1022 score_block_move_loop_head (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1023 rtx src_reg
, HOST_WIDE_INT src_align
,
1024 HOST_WIDE_INT length
)
1026 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1027 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1029 rtx temp
= gen_reg_rtx (SImode
);
1031 gcc_assert (length
== UNITS_PER_WORD
);
1035 emit_insn (gen_move_lcb (src_reg
, src_reg
));
1036 emit_insn (gen_move_lcw (src_reg
, src_reg
, temp
));
1039 emit_insn (gen_move_lw_a (src_reg
,
1040 src_reg
, gen_int_mode (4, SImode
), temp
));
1043 emit_insn (gen_move_scb (dst_reg
, dst_reg
, temp
));
1045 emit_insn (gen_move_sw_a (dst_reg
,
1046 dst_reg
, gen_int_mode (4, SImode
), temp
));
1049 /* Generate loop body, copy length bytes per iteration. */
1051 score_block_move_loop_body (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1052 rtx src_reg
, HOST_WIDE_INT src_align
,
1053 HOST_WIDE_INT length
)
1055 int reg_count
= length
/ UNITS_PER_WORD
;
1056 rtx
*regs
= XALLOCAVEC (rtx
, reg_count
);
1058 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1059 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1061 for (i
= 0; i
< reg_count
; i
++)
1062 regs
[i
] = gen_reg_rtx (SImode
);
1066 for (i
= 0; i
< reg_count
; i
++)
1067 emit_insn (gen_move_lcw (src_reg
, src_reg
, regs
[i
]));
1071 for (i
= 0; i
< reg_count
; i
++)
1072 emit_insn (gen_move_lw_a (src_reg
,
1073 src_reg
, gen_int_mode (4, SImode
), regs
[i
]));
1078 for (i
= 0; i
< reg_count
; i
++)
1079 emit_insn (gen_move_scw (dst_reg
, dst_reg
, regs
[i
]));
1083 for (i
= 0; i
< reg_count
; i
++)
1084 emit_insn (gen_move_sw_a (dst_reg
,
1085 dst_reg
, gen_int_mode (4, SImode
), regs
[i
]));
1089 /* Generate loop foot, copy the leftover bytes. */
1091 score_block_move_loop_foot (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1092 rtx src_reg
, HOST_WIDE_INT src_align
,
1093 HOST_WIDE_INT length
)
1095 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1096 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1098 HOST_WIDE_INT leftover
;
1100 leftover
= length
% UNITS_PER_WORD
;
1104 score_block_move_loop_body (dst_reg
, dst_align
,
1105 src_reg
, src_align
, length
);
1108 emit_insn (gen_move_sce (dst_reg
, dst_reg
));
1112 HOST_WIDE_INT src_adj
= src_unaligned
? -4 : 0;
1113 HOST_WIDE_INT dst_adj
= dst_unaligned
? -4 : 0;
1116 gcc_assert (leftover
< UNITS_PER_WORD
);
1118 if (leftover
>= UNITS_PER_WORD
/ 2
1119 && src_align
>= BITS_PER_WORD
/ 2
1120 && dst_align
>= BITS_PER_WORD
/ 2)
1122 temp
= gen_reg_rtx (HImode
);
1123 emit_insn (gen_move_lhu_b (src_reg
, src_reg
,
1124 gen_int_mode (src_adj
, SImode
), temp
));
1125 emit_insn (gen_move_sh_b (dst_reg
, dst_reg
,
1126 gen_int_mode (dst_adj
, SImode
), temp
));
1127 leftover
-= UNITS_PER_WORD
/ 2;
1128 src_adj
= UNITS_PER_WORD
/ 2;
1129 dst_adj
= UNITS_PER_WORD
/ 2;
1132 while (leftover
> 0)
1134 temp
= gen_reg_rtx (QImode
);
1135 emit_insn (gen_move_lbu_b (src_reg
, src_reg
,
1136 gen_int_mode (src_adj
, SImode
), temp
));
1137 emit_insn (gen_move_sb_b (dst_reg
, dst_reg
,
1138 gen_int_mode (dst_adj
, SImode
), temp
));
1146 #define MIN_MOVE_REGS 3
1147 #define MIN_MOVE_BYTES (MIN_MOVE_REGS * UNITS_PER_WORD)
1148 #define MAX_MOVE_REGS 4
1149 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD)
1151 /* The length is large, generate a loop if necessary.
1152 The loop is consisted by loop head/body/foot. */
1154 score_block_move_loop (rtx dst
, rtx src
, HOST_WIDE_INT length
)
1156 HOST_WIDE_INT src_align
= MEM_ALIGN (src
);
1157 HOST_WIDE_INT dst_align
= MEM_ALIGN (dst
);
1158 HOST_WIDE_INT loop_mov_bytes
;
1159 HOST_WIDE_INT iteration
= 0;
1160 HOST_WIDE_INT head_length
= 0, leftover
;
1161 rtx label
, src_reg
, dst_reg
, final_dst
, test
;
1163 bool gen_loop_head
= (src_align
< BITS_PER_WORD
1164 || dst_align
< BITS_PER_WORD
);
1167 head_length
+= UNITS_PER_WORD
;
1169 for (loop_mov_bytes
= MAX_MOVE_BYTES
;
1170 loop_mov_bytes
>= MIN_MOVE_BYTES
;
1171 loop_mov_bytes
-= UNITS_PER_WORD
)
1173 iteration
= (length
- head_length
) / loop_mov_bytes
;
1179 score_block_move_straight (dst
, src
, length
);
1183 leftover
= (length
- head_length
) % loop_mov_bytes
;
1186 src_reg
= copy_addr_to_reg (XEXP (src
, 0));
1187 dst_reg
= copy_addr_to_reg (XEXP (dst
, 0));
1188 final_dst
= expand_simple_binop (Pmode
, PLUS
, dst_reg
, GEN_INT (length
),
1192 score_block_move_loop_head (dst_reg
, dst_align
,
1193 src_reg
, src_align
, head_length
);
1195 label
= gen_label_rtx ();
1198 score_block_move_loop_body (dst_reg
, dst_align
,
1199 src_reg
, src_align
, loop_mov_bytes
);
1201 test
= gen_rtx_NE (VOIDmode
, dst_reg
, final_dst
);
1202 emit_jump_insn (gen_cbranchsi4 (test
, dst_reg
, final_dst
, label
));
1204 score_block_move_loop_foot (dst_reg
, dst_align
,
1205 src_reg
, src_align
, leftover
);
1208 /* Generate block move, for misc.md: "movmemsi". */
1210 score_block_move (rtx
*ops
)
1214 rtx length
= ops
[2];
1216 if (TARGET_LITTLE_ENDIAN
1217 && (MEM_ALIGN (src
) < BITS_PER_WORD
|| MEM_ALIGN (dst
) < BITS_PER_WORD
)
1218 && INTVAL (length
) >= UNITS_PER_WORD
)
1221 if (GET_CODE (length
) == CONST_INT
)
1223 if (INTVAL (length
) <= 2 * MAX_MOVE_BYTES
)
1225 score_block_move_straight (dst
, src
, INTVAL (length
));
1228 else if (optimize
&&
1229 !(flag_unroll_loops
|| flag_unroll_all_loops
))
1231 score_block_move_loop (dst
, src
, INTVAL (length
));
1239 score_conditional_register_usage (void)
1242 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] =
1243 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 0;
1246 struct gcc_target targetm
= TARGET_INITIALIZER
;