1 /* Output routines for Sunplus S+CORE processor
2 Copyright (C) 2005, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Sunnorth.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
47 #include "target-def.h"
48 #include "integrate.h"
49 #include "langhooks.h"
54 #undef TARGET_ASM_FILE_START
55 #define TARGET_ASM_FILE_START score_asm_file_start
57 #undef TARGET_ASM_FILE_END
58 #define TARGET_ASM_FILE_END score_asm_file_end
60 #undef TARGET_ASM_FUNCTION_PROLOGUE
61 #define TARGET_ASM_FUNCTION_PROLOGUE score_function_prologue
63 #undef TARGET_ASM_FUNCTION_EPILOGUE
64 #define TARGET_ASM_FUNCTION_EPILOGUE score_function_epilogue
66 #undef TARGET_DEFAULT_TARGET_FLAGS
67 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
68 #undef TARGET_HANDLE_OPTION
69 #define TARGET_HANDLE_OPTION score_handle_option
71 #undef TARGET_SCHED_ISSUE_RATE
72 #define TARGET_SCHED_ISSUE_RATE score_issue_rate
74 #undef TARGET_ASM_SELECT_RTX_SECTION
75 #define TARGET_ASM_SELECT_RTX_SECTION score_select_rtx_section
77 #undef TARGET_IN_SMALL_DATA_P
78 #define TARGET_IN_SMALL_DATA_P score_in_small_data_p
80 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
81 #define TARGET_FUNCTION_OK_FOR_SIBCALL score_function_ok_for_sibcall
83 #undef TARGET_STRICT_ARGUMENT_NAMING
84 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
86 #undef TARGET_ASM_OUTPUT_MI_THUNK
87 #define TARGET_ASM_OUTPUT_MI_THUNK score_output_mi_thunk
89 #undef TARGET_PROMOTE_FUNCTION_ARGS
90 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
92 #undef TARGET_PROMOTE_FUNCTION_RETURN
93 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
95 #undef TARGET_PROMOTE_PROTOTYPES
96 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
98 #undef TARGET_MUST_PASS_IN_STACK
99 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
101 #undef TARGET_ARG_PARTIAL_BYTES
102 #define TARGET_ARG_PARTIAL_BYTES score_arg_partial_bytes
104 #undef TARGET_PASS_BY_REFERENCE
105 #define TARGET_PASS_BY_REFERENCE score_pass_by_reference
107 #undef TARGET_RETURN_IN_MEMORY
108 #define TARGET_RETURN_IN_MEMORY score_return_in_memory
110 #undef TARGET_RTX_COSTS
111 #define TARGET_RTX_COSTS score_rtx_costs
113 #undef TARGET_ADDRESS_COST
114 #define TARGET_ADDRESS_COST score_address_cost
116 struct extern_list
*extern_head
= 0;
117 rtx cmp_op0
, cmp_op1
;
119 /* default 0 = NO_REGS */
120 enum reg_class score_char_to_class
[256];
122 /* Implement TARGET_RETURN_IN_MEMORY. In S+core,
123 small structures are returned in a register.
124 Objects with varying size must still be returned in memory. */
126 score_return_in_memory (tree type
, tree fndecl ATTRIBUTE_UNUSED
)
128 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
129 return score7_return_in_memory (type
, fndecl
);
130 else if (TARGET_SCORE3
)
131 return score3_return_in_memory (type
, fndecl
);
136 /* Return nonzero when an argument must be passed by reference. */
138 score_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
139 enum machine_mode mode
, tree type
,
140 bool named ATTRIBUTE_UNUSED
)
142 /* If we have a variable-sized parameter, we have no choice. */
143 return targetm
.calls
.must_pass_in_stack (mode
, type
);
146 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. Generate rtl rather than asm text
147 in order to avoid duplicating too much logic from elsewhere. */
149 score_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
150 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
153 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
154 return score7_output_mi_thunk (file
, thunk_fndecl
, delta
,
155 vcall_offset
, function
);
156 else if (TARGET_SCORE3
)
157 return score3_output_mi_thunk (file
, thunk_fndecl
, delta
,
158 vcall_offset
, function
);
162 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
164 score_function_ok_for_sibcall (ATTRIBUTE_UNUSED tree decl
,
165 ATTRIBUTE_UNUSED tree exp
)
170 /* Set up the stack and frame (if desired) for the function. */
172 score_function_prologue (FILE *file
, HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
174 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
175 return score7_function_prologue (file
, size
);
176 else if (TARGET_SCORE3
)
177 return score3_function_prologue (file
, size
);
182 /* Do any necessary cleanup after a function to restore stack, frame,
185 score_function_epilogue (FILE *file
,
186 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
188 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
189 return score7_function_epilogue (file
, size
);
190 else if (TARGET_SCORE3
)
191 return score3_function_epilogue (file
, size
);
196 /* Implement TARGET_SCHED_ISSUE_RATE. */
198 score_issue_rate (void)
203 /* Choose the section to use for the constant rtx expression X that has
206 score_select_rtx_section (enum machine_mode mode
, rtx x
,
207 unsigned HOST_WIDE_INT align
)
209 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
210 return score7_select_rtx_section (mode
, x
, align
);
211 else if (TARGET_SCORE3
)
212 return score3_select_rtx_section (mode
, x
, align
);
217 /* Implement TARGET_IN_SMALL_DATA_P. */
219 score_in_small_data_p (tree decl
)
221 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
222 return score7_in_small_data_p (decl
);
223 else if (TARGET_SCORE3
)
224 return score3_in_small_data_p (decl
);
229 /* Implement TARGET_ASM_FILE_START. */
231 score_asm_file_start (void)
234 fprintf (asm_out_file
, "# Sunplus S+core5 %s rev=%s\n",
235 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
236 else if (TARGET_SCORE5U
)
237 fprintf (asm_out_file
, "# Sunplus S+core5u %s rev=%s\n",
238 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
239 else if (TARGET_SCORE7D
)
240 fprintf (asm_out_file
, "# Sunplus S+core7d %s rev=%s\n",
241 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
242 else if (TARGET_SCORE7
)
243 fprintf (asm_out_file
, "# Sunplus S+core7 %s rev=%s\n",
244 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
245 else if (TARGET_SCORE3D
)
246 fprintf (asm_out_file
, "# Sunplus S+core3d %s rev=%s\n",
247 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
248 else if (TARGET_SCORE3
)
249 fprintf (asm_out_file
, "# Sunplus S+core3 %s rev=%s\n",
250 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
252 fprintf (asm_out_file
, "# Sunplus S+core unknown %s rev=%s\n",
253 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
255 default_file_start ();
258 fprintf (asm_out_file
, "\t.set pic\n");
261 /* Implement TARGET_ASM_FILE_END. When using assembler macros, emit
262 .externs for any small-data variables that turned out to be external. */
264 score_asm_file_end (void)
266 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
267 return score7_asm_file_end ();
268 else if (TARGET_SCORE3
)
269 return score3_asm_file_end ();
274 #define MASK_ALL_CPU_BITS \
275 (MASK_SCORE5 | MASK_SCORE5U | MASK_SCORE7 | MASK_SCORE7D \
276 | MASK_SCORE3 | MASK_SCORE3D)
278 /* Implement TARGET_HANDLE_OPTION. */
280 score_handle_option (size_t code
, const char *arg
, int value ATTRIBUTE_UNUSED
)
285 target_flags
&= ~(MASK_ALL_CPU_BITS
);
286 target_flags
|= MASK_SCORE7
| MASK_SCORE7D
;
290 target_flags
&= ~(MASK_ALL_CPU_BITS
);
291 target_flags
|= MASK_SCORE3
| MASK_SCORE3D
;
295 if (strcmp (arg
, "score5") == 0)
297 target_flags
&= ~(MASK_ALL_CPU_BITS
);
298 target_flags
|= MASK_SCORE5
;
301 else if (strcmp (arg
, "score5u") == 0)
303 target_flags
&= ~(MASK_ALL_CPU_BITS
);
304 target_flags
|= MASK_SCORE5U
;
307 else if (strcmp (arg
, "score7") == 0)
309 target_flags
&= ~(MASK_ALL_CPU_BITS
);
310 target_flags
|= MASK_SCORE7
;
313 else if (strcmp (arg
, "score7d") == 0)
315 target_flags
&= ~(MASK_ALL_CPU_BITS
);
316 target_flags
|= MASK_SCORE7
| MASK_SCORE7D
;
319 else if (strcmp (arg
, "score3") == 0)
321 target_flags
&= ~(MASK_ALL_CPU_BITS
);
322 target_flags
|= MASK_SCORE3
;
325 else if (strcmp (arg
, "score3d") == 0)
327 target_flags
&= ~(MASK_ALL_CPU_BITS
);
328 target_flags
|= MASK_SCORE3
| MASK_SCORE3D
;
339 /* Implement OVERRIDE_OPTIONS macro. */
341 score_override_options (void)
343 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
344 return score7_override_options ();
345 else if (TARGET_SCORE3
)
346 return score3_override_options ();
348 return score7_override_options ();
351 /* Implement REGNO_REG_CLASS macro. */
353 score_reg_class (int regno
)
355 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
356 return score7_reg_class (regno
);
357 else if (TARGET_SCORE3
)
358 return score3_reg_class (regno
);
363 /* Implement PREFERRED_RELOAD_CLASS macro. */
365 score_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, enum reg_class rclass
)
367 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
368 return score7_preferred_reload_class (x
, rclass
);
369 else if (TARGET_SCORE3
)
370 return score3_preferred_reload_class (x
, rclass
);
375 /* Implement SECONDARY_INPUT_RELOAD_CLASS
376 and SECONDARY_OUTPUT_RELOAD_CLASS macro. */
378 score_secondary_reload_class (enum reg_class rclass
,
379 enum machine_mode mode ATTRIBUTE_UNUSED
,
382 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
383 return score7_secondary_reload_class (rclass
, mode
, x
);
384 else if (TARGET_SCORE3
)
385 return score3_secondary_reload_class (rclass
, mode
, x
);
390 /* Implement CONST_OK_FOR_LETTER_P macro. */
392 score_const_ok_for_letter_p (HOST_WIDE_INT value
, char c
)
394 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
395 return score7_const_ok_for_letter_p (value
, c
);
396 else if (TARGET_SCORE3
)
397 return score3_const_ok_for_letter_p (value
, c
);
402 /* Implement EXTRA_CONSTRAINT macro. */
404 score_extra_constraint (rtx op
, char c
)
406 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
407 return score7_extra_constraint (op
, c
);
408 else if (TARGET_SCORE3
)
409 return score3_extra_constraint (op
, c
);
414 /* Return truth value on whether or not a given hard register
415 can support a given mode. */
417 score_hard_regno_mode_ok (unsigned int regno
, enum machine_mode mode
)
419 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
420 return score7_hard_regno_mode_ok (regno
, mode
);
421 else if (TARGET_SCORE3
)
422 return score3_hard_regno_mode_ok (regno
, mode
);
427 /* Implement INITIAL_ELIMINATION_OFFSET. FROM is either the frame
428 pointer or argument pointer. TO is either the stack pointer or
429 hard frame pointer. */
431 score_initial_elimination_offset (int from
,
432 int to ATTRIBUTE_UNUSED
)
434 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
435 return score7_initial_elimination_offset (from
, to
);
436 else if (TARGET_SCORE3
)
437 return score3_initial_elimination_offset (from
, to
);
442 /* Argument support functions. */
444 /* Initialize CUMULATIVE_ARGS for a function. */
446 score_init_cumulative_args (CUMULATIVE_ARGS
*cum
,
447 tree fntype ATTRIBUTE_UNUSED
,
448 rtx libname ATTRIBUTE_UNUSED
)
450 memset (cum
, 0, sizeof (CUMULATIVE_ARGS
));
453 /* Implement FUNCTION_ARG_ADVANCE macro. */
455 score_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
456 tree type
, int named
)
458 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
459 return score7_function_arg_advance (cum
, mode
, type
, named
);
460 else if (TARGET_SCORE3
)
461 return score3_function_arg_advance (cum
, mode
, type
, named
);
466 /* Implement TARGET_ARG_PARTIAL_BYTES macro. */
468 score_arg_partial_bytes (CUMULATIVE_ARGS
*cum
,
469 enum machine_mode mode
, tree type
, bool named
)
471 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
472 return score7_arg_partial_bytes (cum
, mode
, type
, named
);
473 else if (TARGET_SCORE3
)
474 return score3_arg_partial_bytes (cum
, mode
, type
, named
);
479 /* Implement FUNCTION_ARG macro. */
481 score_function_arg (const CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
482 tree type
, int named
)
484 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
485 return score7_function_arg (cum
, mode
, type
, named
);
486 else if (TARGET_SCORE3
)
487 return score3_function_arg (cum
, mode
, type
, named
);
492 /* Implement FUNCTION_VALUE and LIBCALL_VALUE. For normal calls,
493 VALTYPE is the return type and MODE is VOIDmode. For libcalls,
494 VALTYPE is null and MODE is the mode of the return value. */
496 score_function_value (tree valtype
, tree func ATTRIBUTE_UNUSED
,
497 enum machine_mode mode
)
499 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
500 return score7_function_value (valtype
, func
, mode
);
501 else if (TARGET_SCORE3
)
502 return score3_function_value (valtype
, func
, mode
);
507 /* Implement INITIALIZE_TRAMPOLINE macro. */
509 score_initialize_trampoline (rtx ADDR
, rtx FUNC
, rtx CHAIN
)
511 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
512 return score7_initialize_trampoline (ADDR
, FUNC
, CHAIN
);
513 else if (TARGET_SCORE3
)
514 return score3_initialize_trampoline (ADDR
, FUNC
, CHAIN
);
519 /* This function is used to implement REG_MODE_OK_FOR_BASE_P macro. */
521 score_regno_mode_ok_for_base_p (int regno
, int strict
)
523 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
524 return score7_regno_mode_ok_for_base_p (regno
, strict
);
525 else if (TARGET_SCORE3
)
526 return score3_regno_mode_ok_for_base_p (regno
, strict
);
531 /* Implement GO_IF_LEGITIMATE_ADDRESS macro. */
533 score_address_p (enum machine_mode mode
, rtx x
, int strict
)
535 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
536 return score7_address_p (mode
, x
, strict
);
537 else if (TARGET_SCORE3
)
538 return score3_address_p (mode
, x
, strict
);
543 /* This function is used to implement LEGITIMIZE_ADDRESS. If *XLOC can
544 be legitimized in a way that the generic machinery might not expect,
545 put the new address in *XLOC and return true. */
547 score_legitimize_address (rtx
*xloc
)
549 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
550 return score7_legitimize_address (xloc
);
551 else if (TARGET_SCORE3
)
552 return score3_legitimize_address (xloc
);
557 /* Return a number assessing the cost of moving a register in class
560 score_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
561 enum reg_class from
, enum reg_class to
)
563 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
564 return score7_register_move_cost (mode
, from
, to
);
565 else if (TARGET_SCORE3
)
566 return score3_register_move_cost (mode
, from
, to
);
571 /* Implement TARGET_RTX_COSTS macro. */
573 score_rtx_costs (rtx x
, int code
, int outer_code
, int *total
,
574 bool speed ATTRIBUTE_UNUSED
)
576 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
577 return score7_rtx_costs (x
, code
, outer_code
, total
, speed
);
578 else if (TARGET_SCORE3
)
579 return score3_rtx_costs (x
, code
, outer_code
, total
, speed
);
584 /* Implement TARGET_ADDRESS_COST macro. */
586 score_address_cost (rtx addr
,
587 bool speed ATTRIBUTE_UNUSED
)
589 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
590 return score7_address_cost (addr
);
591 else if (TARGET_SCORE3
)
592 return score3_address_cost (addr
);
597 /* Implement ASM_OUTPUT_EXTERNAL macro. */
599 score_output_external (FILE *file ATTRIBUTE_UNUSED
,
600 tree decl
, const char *name
)
602 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
603 return score7_output_external (file
, decl
, name
);
604 else if (TARGET_SCORE3
)
605 return score3_output_external (file
, decl
, name
);
610 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
611 back to a previous frame. */
613 score_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
615 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
616 return score7_return_addr (count
, frame
);
617 else if (TARGET_SCORE3
)
618 return score3_return_addr (count
, frame
);
623 /* Implement PRINT_OPERAND macro. */
625 score_print_operand (FILE *file
, rtx op
, int c
)
627 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
628 return score7_print_operand (file
, op
, c
);
629 else if (TARGET_SCORE3
)
630 return score3_print_operand (file
, op
, c
);
635 /* Implement PRINT_OPERAND_ADDRESS macro. */
637 score_print_operand_address (FILE *file
, rtx x
)
639 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
640 return score7_print_operand_address (file
, x
);
641 else if (TARGET_SCORE3
)
642 return score3_print_operand_address (file
, x
);
647 /* Implement SELECT_CC_MODE macro. */
649 score_select_cc_mode (enum rtx_code op
, rtx x
, rtx y
)
651 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
652 return score7_select_cc_mode (op
, x
, y
);
653 else if (TARGET_SCORE3
)
654 return score3_select_cc_mode (op
, x
, y
);
659 /* Return true if X is a symbolic constant that can be calculated in
660 the same way as a bare symbol. If it is, store the type of the
661 symbol in *SYMBOL_TYPE. */
663 score_symbolic_constant_p (rtx x
, enum score_symbol_type
*symbol_type
)
665 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
666 return score7_symbolic_constant_p (x
, symbol_type
);
667 else if (TARGET_SCORE3
)
668 return score3_symbolic_constant_p (x
, symbol_type
);
673 /* Generate the prologue instructions for entry into a S+core function. */
675 score_prologue (void)
677 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
678 return score7_prologue ();
679 else if (TARGET_SCORE3
)
680 return score3_prologue ();
685 /* Generate the epilogue instructions in a S+core function. */
687 score_epilogue (int sibcall_p
)
689 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
690 return score7_epilogue (sibcall_p
);
691 else if (TARGET_SCORE3
)
692 return score3_epilogue (sibcall_p
);
698 score_gen_cmp (enum machine_mode mode
)
700 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
701 return score7_gen_cmp (mode
);
702 else if (TARGET_SCORE3
)
703 return score3_gen_cmp (mode
);
708 /* Call and sibcall pattern all need call this function. */
710 score_call (rtx
*ops
, bool sib
)
712 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
713 return score7_call (ops
, sib
);
714 else if (TARGET_SCORE3
)
715 return score3_call (ops
, sib
);
720 /* Call value and sibcall value pattern all need call this function. */
722 score_call_value (rtx
*ops
, bool sib
)
724 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
725 return score7_call_value (ops
, sib
);
726 else if (TARGET_SCORE3
)
727 return score3_call_value (ops
, sib
);
733 score_movsicc (rtx
*ops
)
735 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
736 return score7_movsicc (ops
);
737 else if (TARGET_SCORE3
)
738 return score3_movsicc (ops
);
745 score_movdi (rtx
*ops
)
747 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
748 return score7_movdi (ops
);
749 else if (TARGET_SCORE3
)
750 return score3_movdi (ops
);
756 score_zero_extract_andi (rtx
*ops
)
758 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
759 return score7_zero_extract_andi (ops
);
760 else if (TARGET_SCORE3
)
761 return score3_zero_extract_andi (ops
);
766 /* Output asm insn for move. */
768 score_move (rtx
*ops
)
770 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
771 return score7_move (ops
);
772 else if (TARGET_SCORE3
)
773 return score3_move (ops
);
778 /* Output asm insn for load. */
780 score_linsn (rtx
*ops
, enum score_mem_unit unit
, bool sign
)
782 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
783 return score7_linsn (ops
, unit
, sign
);
784 else if (TARGET_SCORE3
)
785 return score3_linsn (ops
, unit
, sign
);
790 /* Output asm insn for store. */
792 score_sinsn (rtx
*ops
, enum score_mem_unit unit
)
794 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
795 return score7_sinsn (ops
, unit
);
796 else if (TARGET_SCORE3
)
797 return score3_sinsn (ops
, unit
);
802 /* Output asm insn for load immediate. */
804 score_limm (rtx
*ops
)
806 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
807 return score7_limm (ops
);
808 else if (TARGET_SCORE3
)
809 return score3_limm (ops
);
815 /* Generate add insn. */
817 score_select_add_imm (rtx
*ops
, bool set_cc
)
819 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
820 return score7_select_add_imm (ops
, set_cc
);
821 else if (TARGET_SCORE3
)
822 return score3_select_add_imm (ops
, set_cc
);
827 /* Output arith insn. */
829 score_select (rtx
*ops
, const char *inst_pre
,
830 bool commu
, const char *letter
, bool set_cc
)
832 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
833 return score7_select (ops
, inst_pre
, commu
, letter
, set_cc
);
834 else if (TARGET_SCORE3
)
835 return score3_select (ops
, inst_pre
, commu
, letter
, set_cc
);
840 /* Output switch case insn, only supported in score3. */
842 score_output_casesi (rtx
*operands
)
845 return score3_output_casesi (operands
);
850 /* Output rpush insn, only supported in score3. */
852 score_rpush (rtx
*operands
)
855 return score3_rpush (operands
);
860 /* Output rpop insn, only supported in score3. */
862 score_rpop (rtx
*operands
)
865 return score3_rpop (operands
);
870 /* Emit lcb/lce insns. */
872 score_unaligned_load (rtx
*ops
)
880 if (INTVAL (len
) != BITS_PER_WORD
881 || (INTVAL (off
) % BITS_PER_UNIT
) != 0)
884 gcc_assert (GET_MODE_SIZE (GET_MODE (dst
)) == GET_MODE_SIZE (SImode
));
886 addr_reg
= copy_addr_to_reg (XEXP (src
, 0));
887 emit_insn (gen_move_lcb (addr_reg
, addr_reg
));
888 emit_insn (gen_move_lce (addr_reg
, addr_reg
, dst
));
893 /* Emit scb/sce insns. */
895 score_unaligned_store (rtx
*ops
)
903 if (INTVAL(len
) != BITS_PER_WORD
904 || (INTVAL(off
) % BITS_PER_UNIT
) != 0)
907 gcc_assert (GET_MODE_SIZE (GET_MODE (src
)) == GET_MODE_SIZE (SImode
));
909 addr_reg
= copy_addr_to_reg (XEXP (dst
, 0));
910 emit_insn (gen_move_scb (addr_reg
, addr_reg
, src
));
911 emit_insn (gen_move_sce (addr_reg
, addr_reg
));
916 /* If length is short, generate move insns straight. */
918 score_block_move_straight (rtx dst
, rtx src
, HOST_WIDE_INT length
)
920 HOST_WIDE_INT leftover
;
924 leftover
= length
% UNITS_PER_WORD
;
926 reg_count
= length
/ UNITS_PER_WORD
;
928 regs
= XALLOCAVEC (rtx
, reg_count
);
929 for (i
= 0; i
< reg_count
; i
++)
930 regs
[i
] = gen_reg_rtx (SImode
);
932 /* Load from src to regs. */
933 if (MEM_ALIGN (src
) >= BITS_PER_WORD
)
935 HOST_WIDE_INT offset
= 0;
936 for (i
= 0; i
< reg_count
; offset
+= UNITS_PER_WORD
, i
++)
937 emit_move_insn (regs
[i
], adjust_address (src
, SImode
, offset
));
939 else if (reg_count
>= 1)
941 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
943 emit_insn (gen_move_lcb (src_reg
, src_reg
));
944 for (i
= 0; i
< (reg_count
- 1); i
++)
945 emit_insn (gen_move_lcw (src_reg
, src_reg
, regs
[i
]));
946 emit_insn (gen_move_lce (src_reg
, src_reg
, regs
[i
]));
949 /* Store regs to dest. */
950 if (MEM_ALIGN (dst
) >= BITS_PER_WORD
)
952 HOST_WIDE_INT offset
= 0;
953 for (i
= 0; i
< reg_count
; offset
+= UNITS_PER_WORD
, i
++)
954 emit_move_insn (adjust_address (dst
, SImode
, offset
), regs
[i
]);
956 else if (reg_count
>= 1)
958 rtx dst_reg
= copy_addr_to_reg (XEXP (dst
, 0));
960 emit_insn (gen_move_scb (dst_reg
, dst_reg
, regs
[0]));
961 for (i
= 1; i
< reg_count
; i
++)
962 emit_insn (gen_move_scw (dst_reg
, dst_reg
, regs
[i
]));
963 emit_insn (gen_move_sce (dst_reg
, dst_reg
));
966 /* Mop up any left-over bytes. */
969 src
= adjust_address (src
, BLKmode
, length
);
970 dst
= adjust_address (dst
, BLKmode
, length
);
971 move_by_pieces (dst
, src
, leftover
,
972 MIN (MEM_ALIGN (src
), MEM_ALIGN (dst
)), 0);
976 /* Generate loop head when dst or src is unaligned. */
978 score_block_move_loop_head (rtx dst_reg
, HOST_WIDE_INT dst_align
,
979 rtx src_reg
, HOST_WIDE_INT src_align
,
980 HOST_WIDE_INT length
)
982 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
983 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
985 rtx temp
= gen_reg_rtx (SImode
);
987 gcc_assert (length
== UNITS_PER_WORD
);
991 emit_insn (gen_move_lcb (src_reg
, src_reg
));
992 emit_insn (gen_move_lcw (src_reg
, src_reg
, temp
));
995 emit_insn (gen_move_lw_a (src_reg
,
996 src_reg
, gen_int_mode (4, SImode
), temp
));
999 emit_insn (gen_move_scb (dst_reg
, dst_reg
, temp
));
1001 emit_insn (gen_move_sw_a (dst_reg
,
1002 dst_reg
, gen_int_mode (4, SImode
), temp
));
1005 /* Generate loop body, copy length bytes per iteration. */
1007 score_block_move_loop_body (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1008 rtx src_reg
, HOST_WIDE_INT src_align
,
1009 HOST_WIDE_INT length
)
1011 int reg_count
= length
/ UNITS_PER_WORD
;
1012 rtx
*regs
= XALLOCAVEC (rtx
, reg_count
);
1014 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1015 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1017 for (i
= 0; i
< reg_count
; i
++)
1018 regs
[i
] = gen_reg_rtx (SImode
);
1022 for (i
= 0; i
< reg_count
; i
++)
1023 emit_insn (gen_move_lcw (src_reg
, src_reg
, regs
[i
]));
1027 for (i
= 0; i
< reg_count
; i
++)
1028 emit_insn (gen_move_lw_a (src_reg
,
1029 src_reg
, gen_int_mode (4, SImode
), regs
[i
]));
1034 for (i
= 0; i
< reg_count
; i
++)
1035 emit_insn (gen_move_scw (dst_reg
, dst_reg
, regs
[i
]));
1039 for (i
= 0; i
< reg_count
; i
++)
1040 emit_insn (gen_move_sw_a (dst_reg
,
1041 dst_reg
, gen_int_mode (4, SImode
), regs
[i
]));
1045 /* Generate loop foot, copy the leftover bytes. */
1047 score_block_move_loop_foot (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1048 rtx src_reg
, HOST_WIDE_INT src_align
,
1049 HOST_WIDE_INT length
)
1051 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1052 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1054 HOST_WIDE_INT leftover
;
1056 leftover
= length
% UNITS_PER_WORD
;
1060 score_block_move_loop_body (dst_reg
, dst_align
,
1061 src_reg
, src_align
, length
);
1064 emit_insn (gen_move_sce (dst_reg
, dst_reg
));
1068 HOST_WIDE_INT src_adj
= src_unaligned
? -4 : 0;
1069 HOST_WIDE_INT dst_adj
= dst_unaligned
? -4 : 0;
1072 gcc_assert (leftover
< UNITS_PER_WORD
);
1074 if (leftover
>= UNITS_PER_WORD
/ 2
1075 && src_align
>= BITS_PER_WORD
/ 2
1076 && dst_align
>= BITS_PER_WORD
/ 2)
1078 temp
= gen_reg_rtx (HImode
);
1079 emit_insn (gen_move_lhu_b (src_reg
, src_reg
,
1080 gen_int_mode (src_adj
, SImode
), temp
));
1081 emit_insn (gen_move_sh_b (dst_reg
, dst_reg
,
1082 gen_int_mode (dst_adj
, SImode
), temp
));
1083 leftover
-= UNITS_PER_WORD
/ 2;
1084 src_adj
= UNITS_PER_WORD
/ 2;
1085 dst_adj
= UNITS_PER_WORD
/ 2;
1088 while (leftover
> 0)
1090 temp
= gen_reg_rtx (QImode
);
1091 emit_insn (gen_move_lbu_b (src_reg
, src_reg
,
1092 gen_int_mode (src_adj
, SImode
), temp
));
1093 emit_insn (gen_move_sb_b (dst_reg
, dst_reg
,
1094 gen_int_mode (dst_adj
, SImode
), temp
));
1102 #define MIN_MOVE_REGS 3
1103 #define MIN_MOVE_BYTES (MIN_MOVE_REGS * UNITS_PER_WORD)
1104 #define MAX_MOVE_REGS 4
1105 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD)
1107 /* The length is large, generate a loop if necessary.
1108 The loop is consisted by loop head/body/foot. */
1110 score_block_move_loop (rtx dst
, rtx src
, HOST_WIDE_INT length
)
1112 HOST_WIDE_INT src_align
= MEM_ALIGN (src
);
1113 HOST_WIDE_INT dst_align
= MEM_ALIGN (dst
);
1114 HOST_WIDE_INT loop_mov_bytes
;
1115 HOST_WIDE_INT iteration
= 0;
1116 HOST_WIDE_INT head_length
= 0, leftover
;
1117 rtx label
, src_reg
, dst_reg
, final_dst
;
1119 bool gen_loop_head
= (src_align
< BITS_PER_WORD
1120 || dst_align
< BITS_PER_WORD
);
1123 head_length
+= UNITS_PER_WORD
;
1125 for (loop_mov_bytes
= MAX_MOVE_BYTES
;
1126 loop_mov_bytes
>= MIN_MOVE_BYTES
;
1127 loop_mov_bytes
-= UNITS_PER_WORD
)
1129 iteration
= (length
- head_length
) / loop_mov_bytes
;
1135 score_block_move_straight (dst
, src
, length
);
1139 leftover
= (length
- head_length
) % loop_mov_bytes
;
1142 src_reg
= copy_addr_to_reg (XEXP (src
, 0));
1143 dst_reg
= copy_addr_to_reg (XEXP (dst
, 0));
1144 final_dst
= expand_simple_binop (Pmode
, PLUS
, dst_reg
, GEN_INT (length
),
1148 score_block_move_loop_head (dst_reg
, dst_align
,
1149 src_reg
, src_align
, head_length
);
1151 label
= gen_label_rtx ();
1154 score_block_move_loop_body (dst_reg
, dst_align
,
1155 src_reg
, src_align
, loop_mov_bytes
);
1157 emit_insn (gen_cmpsi (dst_reg
, final_dst
));
1158 emit_jump_insn (gen_bne (label
));
1160 score_block_move_loop_foot (dst_reg
, dst_align
,
1161 src_reg
, src_align
, leftover
);
1164 /* Generate block move, for misc.md: "movmemsi". */
1166 score_block_move (rtx
*ops
)
1170 rtx length
= ops
[2];
1172 if (TARGET_LITTLE_ENDIAN
1173 && (MEM_ALIGN (src
) < BITS_PER_WORD
|| MEM_ALIGN (dst
) < BITS_PER_WORD
)
1174 && INTVAL (length
) >= UNITS_PER_WORD
)
1177 if (GET_CODE (length
) == CONST_INT
)
1179 if (INTVAL (length
) <= 2 * MAX_MOVE_BYTES
)
1181 score_block_move_straight (dst
, src
, INTVAL (length
));
1184 else if (optimize
&&
1185 !(flag_unroll_loops
|| flag_unroll_all_loops
))
1187 score_block_move_loop (dst
, src
, INTVAL (length
));
1194 struct gcc_target targetm
= TARGET_INITIALIZER
;