1 /* Output routines for Sunplus S+CORE processor
2 Copyright (C) 2005, 2007 Free Software Foundation, Inc.
3 Contributed by Sunnorth.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
47 #include "target-def.h"
48 #include "integrate.h"
49 #include "langhooks.h"
53 #undef TARGET_ASM_FILE_START
54 #define TARGET_ASM_FILE_START score_asm_file_start
56 #undef TARGET_ASM_FILE_END
57 #define TARGET_ASM_FILE_END score_asm_file_end
59 #undef TARGET_ASM_FUNCTION_PROLOGUE
60 #define TARGET_ASM_FUNCTION_PROLOGUE score_function_prologue
62 #undef TARGET_ASM_FUNCTION_EPILOGUE
63 #define TARGET_ASM_FUNCTION_EPILOGUE score_function_epilogue
65 #undef TARGET_DEFAULT_TARGET_FLAGS
66 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
67 #undef TARGET_HANDLE_OPTION
68 #define TARGET_HANDLE_OPTION score_handle_option
70 #undef TARGET_SCHED_ISSUE_RATE
71 #define TARGET_SCHED_ISSUE_RATE score_issue_rate
73 #undef TARGET_ASM_SELECT_RTX_SECTION
74 #define TARGET_ASM_SELECT_RTX_SECTION score_select_rtx_section
76 #undef TARGET_IN_SMALL_DATA_P
77 #define TARGET_IN_SMALL_DATA_P score_in_small_data_p
79 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
80 #define TARGET_FUNCTION_OK_FOR_SIBCALL score_function_ok_for_sibcall
82 #undef TARGET_STRICT_ARGUMENT_NAMING
83 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
85 #undef TARGET_ASM_OUTPUT_MI_THUNK
86 #define TARGET_ASM_OUTPUT_MI_THUNK score_output_mi_thunk
88 #undef TARGET_PROMOTE_FUNCTION_ARGS
89 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
91 #undef TARGET_PROMOTE_FUNCTION_RETURN
92 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
94 #undef TARGET_PROMOTE_PROTOTYPES
95 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
97 #undef TARGET_MUST_PASS_IN_STACK
98 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
100 #undef TARGET_ARG_PARTIAL_BYTES
101 #define TARGET_ARG_PARTIAL_BYTES score_arg_partial_bytes
103 #undef TARGET_PASS_BY_REFERENCE
104 #define TARGET_PASS_BY_REFERENCE score_pass_by_reference
106 #undef TARGET_RETURN_IN_MEMORY
107 #define TARGET_RETURN_IN_MEMORY score_return_in_memory
109 #undef TARGET_RTX_COSTS
110 #define TARGET_RTX_COSTS score_rtx_costs
112 #undef TARGET_ADDRESS_COST
113 #define TARGET_ADDRESS_COST score_address_cost
115 struct extern_list
*extern_head
= 0;
116 rtx cmp_op0
, cmp_op1
;
118 /* default 0 = NO_REGS */
119 enum reg_class score_char_to_class
[256];
121 /* Implement TARGET_RETURN_IN_MEMORY. In S+core,
122 small structures are returned in a register.
123 Objects with varying size must still be returned in memory. */
125 score_return_in_memory (tree type
, tree fndecl ATTRIBUTE_UNUSED
)
127 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
128 return score7_return_in_memory (type
, fndecl
);
129 else if (TARGET_SCORE3
)
130 return score3_return_in_memory (type
, fndecl
);
135 /* Return nonzero when an argument must be passed by reference. */
137 score_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
138 enum machine_mode mode
, tree type
,
139 bool named ATTRIBUTE_UNUSED
)
141 /* If we have a variable-sized parameter, we have no choice. */
142 return targetm
.calls
.must_pass_in_stack (mode
, type
);
145 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. Generate rtl rather than asm text
146 in order to avoid duplicating too much logic from elsewhere. */
148 score_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
149 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
152 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
153 return score7_output_mi_thunk (file
, thunk_fndecl
, delta
,
154 vcall_offset
, function
);
155 else if (TARGET_SCORE3
)
156 return score3_output_mi_thunk (file
, thunk_fndecl
, delta
,
157 vcall_offset
, function
);
161 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
163 score_function_ok_for_sibcall (ATTRIBUTE_UNUSED tree decl
,
164 ATTRIBUTE_UNUSED tree exp
)
169 /* Set up the stack and frame (if desired) for the function. */
171 score_function_prologue (FILE *file
, HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
173 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
174 return score7_function_prologue (file
, size
);
175 else if (TARGET_SCORE3
)
176 return score3_function_prologue (file
, size
);
181 /* Do any necessary cleanup after a function to restore stack, frame,
184 score_function_epilogue (FILE *file
,
185 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
187 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
188 return score7_function_epilogue (file
, size
);
189 else if (TARGET_SCORE3
)
190 return score3_function_epilogue (file
, size
);
195 /* Implement TARGET_SCHED_ISSUE_RATE. */
197 score_issue_rate (void)
202 /* Choose the section to use for the constant rtx expression X that has
205 score_select_rtx_section (enum machine_mode mode
, rtx x
,
206 unsigned HOST_WIDE_INT align
)
208 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
209 return score7_select_rtx_section (mode
, x
, align
);
210 else if (TARGET_SCORE3
)
211 return score3_select_rtx_section (mode
, x
, align
);
216 /* Implement TARGET_IN_SMALL_DATA_P. */
218 score_in_small_data_p (tree decl
)
220 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
221 return score7_in_small_data_p (decl
);
222 else if (TARGET_SCORE3
)
223 return score3_in_small_data_p (decl
);
228 /* Implement TARGET_ASM_FILE_START. */
230 score_asm_file_start (void)
233 fprintf (asm_out_file
, "# Sunplus S+core5 %s rev=%s\n",
234 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
235 else if (TARGET_SCORE5U
)
236 fprintf (asm_out_file
, "# Sunplus S+core5u %s rev=%s\n",
237 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
238 else if (TARGET_SCORE7D
)
239 fprintf (asm_out_file
, "# Sunplus S+core7d %s rev=%s\n",
240 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
241 else if (TARGET_SCORE7
)
242 fprintf (asm_out_file
, "# Sunplus S+core7 %s rev=%s\n",
243 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
244 else if (TARGET_SCORE3D
)
245 fprintf (asm_out_file
, "# Sunplus S+core3d %s rev=%s\n",
246 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
247 else if (TARGET_SCORE3
)
248 fprintf (asm_out_file
, "# Sunplus S+core3 %s rev=%s\n",
249 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
251 fprintf (asm_out_file
, "# Sunplus S+core unknown %s rev=%s\n",
252 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
254 default_file_start ();
257 fprintf (asm_out_file
, "\t.set pic\n");
260 /* Implement TARGET_ASM_FILE_END. When using assembler macros, emit
261 .externs for any small-data variables that turned out to be external. */
263 score_asm_file_end (void)
265 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
266 return score7_asm_file_end ();
267 else if (TARGET_SCORE3
)
268 return score3_asm_file_end ();
273 #define MASK_ALL_CPU_BITS \
274 (MASK_SCORE5 | MASK_SCORE5U | MASK_SCORE7 | MASK_SCORE7D \
275 | MASK_SCORE3 | MASK_SCORE3D)
277 /* Implement TARGET_HANDLE_OPTION. */
279 score_handle_option (size_t code
, const char *arg
, int value ATTRIBUTE_UNUSED
)
284 target_flags
&= ~(MASK_ALL_CPU_BITS
);
285 target_flags
|= MASK_SCORE7
| MASK_SCORE7D
;
289 target_flags
&= ~(MASK_ALL_CPU_BITS
);
290 target_flags
|= MASK_SCORE3
| MASK_SCORE3D
;
294 if (strcmp (arg
, "score5") == 0)
296 target_flags
&= ~(MASK_ALL_CPU_BITS
);
297 target_flags
|= MASK_SCORE5
;
300 else if (strcmp (arg
, "score5u") == 0)
302 target_flags
&= ~(MASK_ALL_CPU_BITS
);
303 target_flags
|= MASK_SCORE5U
;
306 else if (strcmp (arg
, "score7") == 0)
308 target_flags
&= ~(MASK_ALL_CPU_BITS
);
309 target_flags
|= MASK_SCORE7
;
312 else if (strcmp (arg
, "score7d") == 0)
314 target_flags
&= ~(MASK_ALL_CPU_BITS
);
315 target_flags
|= MASK_SCORE7
| MASK_SCORE7D
;
318 else if (strcmp (arg
, "score3") == 0)
320 target_flags
&= ~(MASK_ALL_CPU_BITS
);
321 target_flags
|= MASK_SCORE3
;
324 else if (strcmp (arg
, "score3d") == 0)
326 target_flags
&= ~(MASK_ALL_CPU_BITS
);
327 target_flags
|= MASK_SCORE3
| MASK_SCORE3D
;
338 /* Implement OVERRIDE_OPTIONS macro. */
340 score_override_options (void)
342 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
343 return score7_override_options ();
344 else if (TARGET_SCORE3
)
345 return score3_override_options ();
347 return score7_override_options ();
350 /* Implement REGNO_REG_CLASS macro. */
352 score_reg_class (int regno
)
354 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
355 return score7_reg_class (regno
);
356 else if (TARGET_SCORE3
)
357 return score3_reg_class (regno
);
362 /* Implement PREFERRED_RELOAD_CLASS macro. */
364 score_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, enum reg_class
class)
366 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
367 return score7_preferred_reload_class (x
, class);
368 else if (TARGET_SCORE3
)
369 return score3_preferred_reload_class (x
, class);
374 /* Implement SECONDARY_INPUT_RELOAD_CLASS
375 and SECONDARY_OUTPUT_RELOAD_CLASS macro. */
377 score_secondary_reload_class (enum reg_class
class,
378 enum machine_mode mode ATTRIBUTE_UNUSED
,
381 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
382 return score7_secondary_reload_class (class, mode
, x
);
383 else if (TARGET_SCORE3
)
384 return score3_secondary_reload_class (class, mode
, x
);
389 /* Implement CONST_OK_FOR_LETTER_P macro. */
391 score_const_ok_for_letter_p (HOST_WIDE_INT value
, char c
)
393 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
394 return score7_const_ok_for_letter_p (value
, c
);
395 else if (TARGET_SCORE3
)
396 return score3_const_ok_for_letter_p (value
, c
);
401 /* Implement EXTRA_CONSTRAINT macro. */
403 score_extra_constraint (rtx op
, char c
)
405 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
406 return score7_extra_constraint (op
, c
);
407 else if (TARGET_SCORE3
)
408 return score3_extra_constraint (op
, c
);
413 /* Return truth value on whether or not a given hard register
414 can support a given mode. */
416 score_hard_regno_mode_ok (unsigned int regno
, enum machine_mode mode
)
418 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
419 return score7_hard_regno_mode_ok (regno
, mode
);
420 else if (TARGET_SCORE3
)
421 return score3_hard_regno_mode_ok (regno
, mode
);
426 /* Implement INITIAL_ELIMINATION_OFFSET. FROM is either the frame
427 pointer or argument pointer. TO is either the stack pointer or
428 hard frame pointer. */
430 score_initial_elimination_offset (int from
,
431 int to ATTRIBUTE_UNUSED
)
433 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
434 return score7_initial_elimination_offset (from
, to
);
435 else if (TARGET_SCORE3
)
436 return score3_initial_elimination_offset (from
, to
);
441 /* Argument support functions. */
443 /* Initialize CUMULATIVE_ARGS for a function. */
445 score_init_cumulative_args (CUMULATIVE_ARGS
*cum
,
446 tree fntype ATTRIBUTE_UNUSED
,
447 rtx libname ATTRIBUTE_UNUSED
)
449 memset (cum
, 0, sizeof (CUMULATIVE_ARGS
));
452 /* Implement FUNCTION_ARG_ADVANCE macro. */
454 score_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
455 tree type
, int named
)
457 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
458 return score7_function_arg_advance (cum
, mode
, type
, named
);
459 else if (TARGET_SCORE3
)
460 return score3_function_arg_advance (cum
, mode
, type
, named
);
465 /* Implement TARGET_ARG_PARTIAL_BYTES macro. */
467 score_arg_partial_bytes (CUMULATIVE_ARGS
*cum
,
468 enum machine_mode mode
, tree type
, bool named
)
470 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
471 return score7_arg_partial_bytes (cum
, mode
, type
, named
);
472 else if (TARGET_SCORE3
)
473 return score3_arg_partial_bytes (cum
, mode
, type
, named
);
478 /* Implement FUNCTION_ARG macro. */
480 score_function_arg (const CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
481 tree type
, int named
)
483 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
484 return score7_function_arg (cum
, mode
, type
, named
);
485 else if (TARGET_SCORE3
)
486 return score3_function_arg (cum
, mode
, type
, named
);
491 /* Implement FUNCTION_VALUE and LIBCALL_VALUE. For normal calls,
492 VALTYPE is the return type and MODE is VOIDmode. For libcalls,
493 VALTYPE is null and MODE is the mode of the return value. */
495 score_function_value (tree valtype
, tree func ATTRIBUTE_UNUSED
,
496 enum machine_mode mode
)
498 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
499 return score7_function_value (valtype
, func
, mode
);
500 else if (TARGET_SCORE3
)
501 return score3_function_value (valtype
, func
, mode
);
506 /* Implement INITIALIZE_TRAMPOLINE macro. */
508 score_initialize_trampoline (rtx ADDR
, rtx FUNC
, rtx CHAIN
)
510 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
511 return score7_initialize_trampoline (ADDR
, FUNC
, CHAIN
);
512 else if (TARGET_SCORE3
)
513 return score3_initialize_trampoline (ADDR
, FUNC
, CHAIN
);
518 /* This function is used to implement REG_MODE_OK_FOR_BASE_P macro. */
520 score_regno_mode_ok_for_base_p (int regno
, int strict
)
522 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
523 return score7_regno_mode_ok_for_base_p (regno
, strict
);
524 else if (TARGET_SCORE3
)
525 return score3_regno_mode_ok_for_base_p (regno
, strict
);
530 /* Implement GO_IF_LEGITIMATE_ADDRESS macro. */
532 score_address_p (enum machine_mode mode
, rtx x
, int strict
)
534 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
535 return score7_address_p (mode
, x
, strict
);
536 else if (TARGET_SCORE3
)
537 return score3_address_p (mode
, x
, strict
);
542 /* This function is used to implement LEGITIMIZE_ADDRESS. If *XLOC can
543 be legitimized in a way that the generic machinery might not expect,
544 put the new address in *XLOC and return true. */
546 score_legitimize_address (rtx
*xloc
)
548 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
549 return score7_legitimize_address (xloc
);
550 else if (TARGET_SCORE3
)
551 return score3_legitimize_address (xloc
);
556 /* Return a number assessing the cost of moving a register in class
559 score_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
560 enum reg_class from
, enum reg_class to
)
562 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
563 return score7_register_move_cost (mode
, from
, to
);
564 else if (TARGET_SCORE3
)
565 return score3_register_move_cost (mode
, from
, to
);
570 /* Implement TARGET_RTX_COSTS macro. */
572 score_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
574 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
575 return score7_rtx_costs (x
, code
, outer_code
, total
);
576 else if (TARGET_SCORE3
)
577 return score3_rtx_costs (x
, code
, outer_code
, total
);
582 /* Implement TARGET_ADDRESS_COST macro. */
584 score_address_cost (rtx addr
)
586 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
587 return score7_address_cost (addr
);
588 else if (TARGET_SCORE3
)
589 return score3_address_cost (addr
);
594 /* Implement ASM_OUTPUT_EXTERNAL macro. */
596 score_output_external (FILE *file ATTRIBUTE_UNUSED
,
597 tree decl
, const char *name
)
599 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
600 return score7_output_external (file
, decl
, name
);
601 else if (TARGET_SCORE3
)
602 return score3_output_external (file
, decl
, name
);
607 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
608 back to a previous frame. */
610 score_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
612 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
613 return score7_return_addr (count
, frame
);
614 else if (TARGET_SCORE3
)
615 return score3_return_addr (count
, frame
);
620 /* Implement PRINT_OPERAND macro. */
622 score_print_operand (FILE *file
, rtx op
, int c
)
624 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
625 return score7_print_operand (file
, op
, c
);
626 else if (TARGET_SCORE3
)
627 return score3_print_operand (file
, op
, c
);
632 /* Implement PRINT_OPERAND_ADDRESS macro. */
634 score_print_operand_address (FILE *file
, rtx x
)
636 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
637 return score7_print_operand_address (file
, x
);
638 else if (TARGET_SCORE3
)
639 return score3_print_operand_address (file
, x
);
644 /* Implement SELECT_CC_MODE macro. */
646 score_select_cc_mode (enum rtx_code op
, rtx x
, rtx y
)
648 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
649 return score7_select_cc_mode (op
, x
, y
);
650 else if (TARGET_SCORE3
)
651 return score3_select_cc_mode (op
, x
, y
);
656 /* Return true if X is a symbolic constant that can be calculated in
657 the same way as a bare symbol. If it is, store the type of the
658 symbol in *SYMBOL_TYPE. */
660 score_symbolic_constant_p (rtx x
, enum score_symbol_type
*symbol_type
)
662 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
663 return score7_symbolic_constant_p (x
, symbol_type
);
664 else if (TARGET_SCORE3
)
665 return score3_symbolic_constant_p (x
, symbol_type
);
670 /* Generate the prologue instructions for entry into a S+core function. */
672 score_prologue (void)
674 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
675 return score7_prologue ();
676 else if (TARGET_SCORE3
)
677 return score3_prologue ();
682 /* Generate the epilogue instructions in a S+core function. */
684 score_epilogue (int sibcall_p
)
686 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
687 return score7_epilogue (sibcall_p
);
688 else if (TARGET_SCORE3
)
689 return score3_epilogue (sibcall_p
);
695 score_gen_cmp (enum machine_mode mode
)
697 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
698 return score7_gen_cmp (mode
);
699 else if (TARGET_SCORE3
)
700 return score3_gen_cmp (mode
);
705 /* Call and sibcall pattern all need call this function. */
707 score_call (rtx
*ops
, bool sib
)
709 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
710 return score7_call (ops
, sib
);
711 else if (TARGET_SCORE3
)
712 return score3_call (ops
, sib
);
717 /* Call value and sibcall value pattern all need call this function. */
719 score_call_value (rtx
*ops
, bool sib
)
721 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
722 return score7_call_value (ops
, sib
);
723 else if (TARGET_SCORE3
)
724 return score3_call_value (ops
, sib
);
730 score_movsicc (rtx
*ops
)
732 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
733 return score7_movsicc (ops
);
734 else if (TARGET_SCORE3
)
735 return score3_movsicc (ops
);
742 score_movdi (rtx
*ops
)
744 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
745 return score7_movdi (ops
);
746 else if (TARGET_SCORE3
)
747 return score3_movdi (ops
);
753 score_zero_extract_andi (rtx
*ops
)
755 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
756 return score7_zero_extract_andi (ops
);
757 else if (TARGET_SCORE3
)
758 return score3_zero_extract_andi (ops
);
763 /* Output asm insn for move. */
765 score_move (rtx
*ops
)
767 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
768 return score7_move (ops
);
769 else if (TARGET_SCORE3
)
770 return score3_move (ops
);
775 /* Output asm insn for load. */
777 score_linsn (rtx
*ops
, enum score_mem_unit unit
, bool sign
)
779 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
780 return score7_linsn (ops
, unit
, sign
);
781 else if (TARGET_SCORE3
)
782 return score3_linsn (ops
, unit
, sign
);
787 /* Output asm insn for store. */
789 score_sinsn (rtx
*ops
, enum score_mem_unit unit
)
791 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
792 return score7_sinsn (ops
, unit
);
793 else if (TARGET_SCORE3
)
794 return score3_sinsn (ops
, unit
);
799 /* Output asm insn for load immediate. */
801 score_limm (rtx
*ops
)
803 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
804 return score7_limm (ops
);
805 else if (TARGET_SCORE3
)
806 return score3_limm (ops
);
812 /* Generate add insn. */
814 score_select_add_imm (rtx
*ops
, bool set_cc
)
816 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
817 return score7_select_add_imm (ops
, set_cc
);
818 else if (TARGET_SCORE3
)
819 return score3_select_add_imm (ops
, set_cc
);
824 /* Output arith insn. */
826 score_select (rtx
*ops
, const char *inst_pre
,
827 bool commu
, const char *letter
, bool set_cc
)
829 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
830 return score7_select (ops
, inst_pre
, commu
, letter
, set_cc
);
831 else if (TARGET_SCORE3
)
832 return score3_select (ops
, inst_pre
, commu
, letter
, set_cc
);
837 /* Output switch case insn, only supported in score3. */
839 score_output_casesi (rtx
*operands
)
842 return score3_output_casesi (operands
);
847 /* Output rpush insn, only supported in score3. */
849 score_rpush (rtx
*operands
)
852 return score3_rpush (operands
);
857 /* Output rpop insn, only supported in score3. */
859 score_rpop (rtx
*operands
)
862 return score3_rpop (operands
);
867 /* Emit lcb/lce insns. */
869 score_unaligned_load (rtx
*ops
)
877 if (INTVAL (len
) != BITS_PER_WORD
878 || (INTVAL (off
) % BITS_PER_UNIT
) != 0)
881 gcc_assert (GET_MODE_SIZE (GET_MODE (dst
)) == GET_MODE_SIZE (SImode
));
883 addr_reg
= copy_addr_to_reg (XEXP (src
, 0));
884 emit_insn (gen_move_lcb (addr_reg
, addr_reg
));
885 emit_insn (gen_move_lce (addr_reg
, addr_reg
, dst
));
890 /* Emit scb/sce insns. */
892 score_unaligned_store (rtx
*ops
)
900 if (INTVAL(len
) != BITS_PER_WORD
901 || (INTVAL(off
) % BITS_PER_UNIT
) != 0)
904 gcc_assert (GET_MODE_SIZE (GET_MODE (src
)) == GET_MODE_SIZE (SImode
));
906 addr_reg
= copy_addr_to_reg (XEXP (dst
, 0));
907 emit_insn (gen_move_scb (addr_reg
, addr_reg
, src
));
908 emit_insn (gen_move_sce (addr_reg
, addr_reg
));
913 /* If length is short, generate move insns straight. */
915 score_block_move_straight (rtx dst
, rtx src
, HOST_WIDE_INT length
)
917 HOST_WIDE_INT leftover
;
921 leftover
= length
% UNITS_PER_WORD
;
923 reg_count
= length
/ UNITS_PER_WORD
;
925 regs
= alloca (sizeof (rtx
) * reg_count
);
926 for (i
= 0; i
< reg_count
; i
++)
927 regs
[i
] = gen_reg_rtx (SImode
);
929 /* Load from src to regs. */
930 if (MEM_ALIGN (src
) >= BITS_PER_WORD
)
932 HOST_WIDE_INT offset
= 0;
933 for (i
= 0; i
< reg_count
; offset
+= UNITS_PER_WORD
, i
++)
934 emit_move_insn (regs
[i
], adjust_address (src
, SImode
, offset
));
936 else if (reg_count
>= 1)
938 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
940 emit_insn (gen_move_lcb (src_reg
, src_reg
));
941 for (i
= 0; i
< (reg_count
- 1); i
++)
942 emit_insn (gen_move_lcw (src_reg
, src_reg
, regs
[i
]));
943 emit_insn (gen_move_lce (src_reg
, src_reg
, regs
[i
]));
946 /* Store regs to dest. */
947 if (MEM_ALIGN (dst
) >= BITS_PER_WORD
)
949 HOST_WIDE_INT offset
= 0;
950 for (i
= 0; i
< reg_count
; offset
+= UNITS_PER_WORD
, i
++)
951 emit_move_insn (adjust_address (dst
, SImode
, offset
), regs
[i
]);
953 else if (reg_count
>= 1)
955 rtx dst_reg
= copy_addr_to_reg (XEXP (dst
, 0));
957 emit_insn (gen_move_scb (dst_reg
, dst_reg
, regs
[0]));
958 for (i
= 1; i
< reg_count
; i
++)
959 emit_insn (gen_move_scw (dst_reg
, dst_reg
, regs
[i
]));
960 emit_insn (gen_move_sce (dst_reg
, dst_reg
));
963 /* Mop up any left-over bytes. */
966 src
= adjust_address (src
, BLKmode
, length
);
967 dst
= adjust_address (dst
, BLKmode
, length
);
968 move_by_pieces (dst
, src
, leftover
,
969 MIN (MEM_ALIGN (src
), MEM_ALIGN (dst
)), 0);
973 /* Generate loop head when dst or src is unaligned. */
975 score_block_move_loop_head (rtx dst_reg
, HOST_WIDE_INT dst_align
,
976 rtx src_reg
, HOST_WIDE_INT src_align
,
977 HOST_WIDE_INT length
)
979 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
980 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
982 rtx temp
= gen_reg_rtx (SImode
);
984 gcc_assert (length
== UNITS_PER_WORD
);
988 emit_insn (gen_move_lcb (src_reg
, src_reg
));
989 emit_insn (gen_move_lcw (src_reg
, src_reg
, temp
));
992 emit_insn (gen_move_lw_a (src_reg
,
993 src_reg
, gen_int_mode (4, SImode
), temp
));
996 emit_insn (gen_move_scb (dst_reg
, dst_reg
, temp
));
998 emit_insn (gen_move_sw_a (dst_reg
,
999 dst_reg
, gen_int_mode (4, SImode
), temp
));
1002 /* Generate loop body, copy length bytes per iteration. */
1004 score_block_move_loop_body (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1005 rtx src_reg
, HOST_WIDE_INT src_align
,
1006 HOST_WIDE_INT length
)
1008 int reg_count
= length
/ UNITS_PER_WORD
;
1009 rtx
*regs
= alloca (sizeof (rtx
) * reg_count
);
1011 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1012 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1014 for (i
= 0; i
< reg_count
; i
++)
1015 regs
[i
] = gen_reg_rtx (SImode
);
1019 for (i
= 0; i
< reg_count
; i
++)
1020 emit_insn (gen_move_lcw (src_reg
, src_reg
, regs
[i
]));
1024 for (i
= 0; i
< reg_count
; i
++)
1025 emit_insn (gen_move_lw_a (src_reg
,
1026 src_reg
, gen_int_mode (4, SImode
), regs
[i
]));
1031 for (i
= 0; i
< reg_count
; i
++)
1032 emit_insn (gen_move_scw (dst_reg
, dst_reg
, regs
[i
]));
1036 for (i
= 0; i
< reg_count
; i
++)
1037 emit_insn (gen_move_sw_a (dst_reg
,
1038 dst_reg
, gen_int_mode (4, SImode
), regs
[i
]));
1042 /* Generate loop foot, copy the leftover bytes. */
1044 score_block_move_loop_foot (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1045 rtx src_reg
, HOST_WIDE_INT src_align
,
1046 HOST_WIDE_INT length
)
1048 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1049 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1051 HOST_WIDE_INT leftover
;
1053 leftover
= length
% UNITS_PER_WORD
;
1057 score_block_move_loop_body (dst_reg
, dst_align
,
1058 src_reg
, src_align
, length
);
1061 emit_insn (gen_move_sce (dst_reg
, dst_reg
));
1065 HOST_WIDE_INT src_adj
= src_unaligned
? -4 : 0;
1066 HOST_WIDE_INT dst_adj
= dst_unaligned
? -4 : 0;
1069 gcc_assert (leftover
< UNITS_PER_WORD
);
1071 if (leftover
>= UNITS_PER_WORD
/ 2
1072 && src_align
>= BITS_PER_WORD
/ 2
1073 && dst_align
>= BITS_PER_WORD
/ 2)
1075 temp
= gen_reg_rtx (HImode
);
1076 emit_insn (gen_move_lhu_b (src_reg
, src_reg
,
1077 gen_int_mode (src_adj
, SImode
), temp
));
1078 emit_insn (gen_move_sh_b (dst_reg
, dst_reg
,
1079 gen_int_mode (dst_adj
, SImode
), temp
));
1080 leftover
-= UNITS_PER_WORD
/ 2;
1081 src_adj
= UNITS_PER_WORD
/ 2;
1082 dst_adj
= UNITS_PER_WORD
/ 2;
1085 while (leftover
> 0)
1087 temp
= gen_reg_rtx (QImode
);
1088 emit_insn (gen_move_lbu_b (src_reg
, src_reg
,
1089 gen_int_mode (src_adj
, SImode
), temp
));
1090 emit_insn (gen_move_sb_b (dst_reg
, dst_reg
,
1091 gen_int_mode (dst_adj
, SImode
), temp
));
1099 #define MIN_MOVE_REGS 3
1100 #define MIN_MOVE_BYTES (MIN_MOVE_REGS * UNITS_PER_WORD)
1101 #define MAX_MOVE_REGS 4
1102 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD)
1104 /* The length is large, generate a loop if necessary.
1105 The loop is consisted by loop head/body/foot. */
1107 score_block_move_loop (rtx dst
, rtx src
, HOST_WIDE_INT length
)
1109 HOST_WIDE_INT src_align
= MEM_ALIGN (src
);
1110 HOST_WIDE_INT dst_align
= MEM_ALIGN (dst
);
1111 HOST_WIDE_INT loop_mov_bytes
;
1112 HOST_WIDE_INT iteration
= 0;
1113 HOST_WIDE_INT head_length
= 0, leftover
;
1114 rtx label
, src_reg
, dst_reg
, final_dst
;
1116 bool gen_loop_head
= (src_align
< BITS_PER_WORD
1117 || dst_align
< BITS_PER_WORD
);
1120 head_length
+= UNITS_PER_WORD
;
1122 for (loop_mov_bytes
= MAX_MOVE_BYTES
;
1123 loop_mov_bytes
>= MIN_MOVE_BYTES
;
1124 loop_mov_bytes
-= UNITS_PER_WORD
)
1126 iteration
= (length
- head_length
) / loop_mov_bytes
;
1132 score_block_move_straight (dst
, src
, length
);
1136 leftover
= (length
- head_length
) % loop_mov_bytes
;
1139 src_reg
= copy_addr_to_reg (XEXP (src
, 0));
1140 dst_reg
= copy_addr_to_reg (XEXP (dst
, 0));
1141 final_dst
= expand_simple_binop (Pmode
, PLUS
, dst_reg
, GEN_INT (length
),
1145 score_block_move_loop_head (dst_reg
, dst_align
,
1146 src_reg
, src_align
, head_length
);
1148 label
= gen_label_rtx ();
1151 score_block_move_loop_body (dst_reg
, dst_align
,
1152 src_reg
, src_align
, loop_mov_bytes
);
1154 emit_insn (gen_cmpsi (dst_reg
, final_dst
));
1155 emit_jump_insn (gen_bne (label
));
1157 score_block_move_loop_foot (dst_reg
, dst_align
,
1158 src_reg
, src_align
, leftover
);
1161 /* Generate block move, for misc.md: "movmemsi". */
1163 score_block_move (rtx
*ops
)
1167 rtx length
= ops
[2];
1169 if (TARGET_LITTLE_ENDIAN
1170 && (MEM_ALIGN (src
) < BITS_PER_WORD
|| MEM_ALIGN (dst
) < BITS_PER_WORD
)
1171 && INTVAL (length
) >= UNITS_PER_WORD
)
1174 if (GET_CODE (length
) == CONST_INT
)
1176 if (INTVAL (length
) <= 2 * MAX_MOVE_BYTES
)
1178 score_block_move_straight (dst
, src
, INTVAL (length
));
1181 else if (optimize
&&
1182 !(flag_unroll_loops
|| flag_unroll_all_loops
))
1184 score_block_move_loop (dst
, src
, INTVAL (length
));
1191 struct gcc_target targetm
= TARGET_INITIALIZER
;