1 /* Output routines for Sunplus S+CORE processor
2 Copyright (C) 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Sunnorth.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
46 #include "target-def.h"
47 #include "integrate.h"
48 #include "langhooks.h"
53 #undef TARGET_ASM_FILE_START
54 #define TARGET_ASM_FILE_START score_asm_file_start
56 #undef TARGET_ASM_FILE_END
57 #define TARGET_ASM_FILE_END score_asm_file_end
59 #undef TARGET_ASM_FUNCTION_PROLOGUE
60 #define TARGET_ASM_FUNCTION_PROLOGUE score_function_prologue
62 #undef TARGET_ASM_FUNCTION_EPILOGUE
63 #define TARGET_ASM_FUNCTION_EPILOGUE score_function_epilogue
65 #undef TARGET_DEFAULT_TARGET_FLAGS
66 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
67 #undef TARGET_HANDLE_OPTION
68 #define TARGET_HANDLE_OPTION score_handle_option
70 #undef TARGET_LEGITIMIZE_ADDRESS
71 #define TARGET_LEGITIMIZE_ADDRESS score_legitimize_address
73 #undef TARGET_SCHED_ISSUE_RATE
74 #define TARGET_SCHED_ISSUE_RATE score_issue_rate
76 #undef TARGET_ASM_SELECT_RTX_SECTION
77 #define TARGET_ASM_SELECT_RTX_SECTION score_select_rtx_section
79 #undef TARGET_IN_SMALL_DATA_P
80 #define TARGET_IN_SMALL_DATA_P score_in_small_data_p
82 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
83 #define TARGET_FUNCTION_OK_FOR_SIBCALL score_function_ok_for_sibcall
85 #undef TARGET_STRICT_ARGUMENT_NAMING
86 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
88 #undef TARGET_ASM_OUTPUT_MI_THUNK
89 #define TARGET_ASM_OUTPUT_MI_THUNK score_output_mi_thunk
91 #undef TARGET_PROMOTE_FUNCTION_MODE
92 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
94 #undef TARGET_PROMOTE_PROTOTYPES
95 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
97 #undef TARGET_MUST_PASS_IN_STACK
98 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
100 #undef TARGET_ARG_PARTIAL_BYTES
101 #define TARGET_ARG_PARTIAL_BYTES score_arg_partial_bytes
103 #undef TARGET_PASS_BY_REFERENCE
104 #define TARGET_PASS_BY_REFERENCE score_pass_by_reference
106 #undef TARGET_RETURN_IN_MEMORY
107 #define TARGET_RETURN_IN_MEMORY score_return_in_memory
109 #undef TARGET_RTX_COSTS
110 #define TARGET_RTX_COSTS score_rtx_costs
112 #undef TARGET_ADDRESS_COST
113 #define TARGET_ADDRESS_COST score_address_cost
115 #undef TARGET_LEGITIMATE_ADDRESS_P
116 #define TARGET_LEGITIMATE_ADDRESS_P score_legitimate_address_p
118 #undef TARGET_CAN_ELIMINATE
119 #define TARGET_CAN_ELIMINATE score_can_eliminate
121 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
122 #define TARGET_ASM_TRAMPOLINE_TEMPLATE score_asm_trampoline_template
123 #undef TARGET_TRAMPOLINE_INIT
124 #define TARGET_TRAMPOLINE_INIT score_trampoline_init
126 struct extern_list
*extern_head
= 0;
128 /* default 0 = NO_REGS */
129 enum reg_class score_char_to_class
[256];
131 /* Implement TARGET_RETURN_IN_MEMORY. In S+core,
132 small structures are returned in a register.
133 Objects with varying size must still be returned in memory. */
135 score_return_in_memory (tree type
, tree fndecl ATTRIBUTE_UNUSED
)
137 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
138 return score7_return_in_memory (type
, fndecl
);
139 else if (TARGET_SCORE3
)
140 return score3_return_in_memory (type
, fndecl
);
145 /* Return nonzero when an argument must be passed by reference. */
147 score_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
148 enum machine_mode mode
, tree type
,
149 bool named ATTRIBUTE_UNUSED
)
151 /* If we have a variable-sized parameter, we have no choice. */
152 return targetm
.calls
.must_pass_in_stack (mode
, type
);
155 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. Generate rtl rather than asm text
156 in order to avoid duplicating too much logic from elsewhere. */
158 score_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
159 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
162 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
163 return score7_output_mi_thunk (file
, thunk_fndecl
, delta
,
164 vcall_offset
, function
);
165 else if (TARGET_SCORE3
)
166 return score3_output_mi_thunk (file
, thunk_fndecl
, delta
,
167 vcall_offset
, function
);
171 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
173 score_function_ok_for_sibcall (ATTRIBUTE_UNUSED tree decl
,
174 ATTRIBUTE_UNUSED tree exp
)
179 /* Set up the stack and frame (if desired) for the function. */
181 score_function_prologue (FILE *file
, HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
183 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
184 return score7_function_prologue (file
, size
);
185 else if (TARGET_SCORE3
)
186 return score3_function_prologue (file
, size
);
191 /* Do any necessary cleanup after a function to restore stack, frame,
194 score_function_epilogue (FILE *file
,
195 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
197 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
198 return score7_function_epilogue (file
, size
);
199 else if (TARGET_SCORE3
)
200 return score3_function_epilogue (file
, size
);
205 /* Implement TARGET_SCHED_ISSUE_RATE. */
207 score_issue_rate (void)
212 /* Choose the section to use for the constant rtx expression X that has
215 score_select_rtx_section (enum machine_mode mode
, rtx x
,
216 unsigned HOST_WIDE_INT align
)
218 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
219 return score7_select_rtx_section (mode
, x
, align
);
220 else if (TARGET_SCORE3
)
221 return score3_select_rtx_section (mode
, x
, align
);
226 /* Implement TARGET_IN_SMALL_DATA_P. */
228 score_in_small_data_p (tree decl
)
230 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
231 return score7_in_small_data_p (decl
);
232 else if (TARGET_SCORE3
)
233 return score3_in_small_data_p (decl
);
238 /* Implement TARGET_ASM_FILE_START. */
240 score_asm_file_start (void)
243 fprintf (asm_out_file
, "# Sunplus S+core5 %s rev=%s\n",
244 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
245 else if (TARGET_SCORE5U
)
246 fprintf (asm_out_file
, "# Sunplus S+core5u %s rev=%s\n",
247 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
248 else if (TARGET_SCORE7D
)
249 fprintf (asm_out_file
, "# Sunplus S+core7d %s rev=%s\n",
250 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
251 else if (TARGET_SCORE7
)
252 fprintf (asm_out_file
, "# Sunplus S+core7 %s rev=%s\n",
253 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
254 else if (TARGET_SCORE3D
)
255 fprintf (asm_out_file
, "# Sunplus S+core3d %s rev=%s\n",
256 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
257 else if (TARGET_SCORE3
)
258 fprintf (asm_out_file
, "# Sunplus S+core3 %s rev=%s\n",
259 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
261 fprintf (asm_out_file
, "# Sunplus S+core unknown %s rev=%s\n",
262 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
264 default_file_start ();
267 fprintf (asm_out_file
, "\t.set pic\n");
270 /* Implement TARGET_ASM_FILE_END. When using assembler macros, emit
271 .externs for any small-data variables that turned out to be external. */
273 score_asm_file_end (void)
275 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
276 return score7_asm_file_end ();
277 else if (TARGET_SCORE3
)
278 return score3_asm_file_end ();
283 #define MASK_ALL_CPU_BITS \
284 (MASK_SCORE5 | MASK_SCORE5U | MASK_SCORE7 | MASK_SCORE7D \
285 | MASK_SCORE3 | MASK_SCORE3D)
287 /* Implement TARGET_HANDLE_OPTION. */
289 score_handle_option (size_t code
, const char *arg
, int value ATTRIBUTE_UNUSED
)
294 target_flags
&= ~(MASK_ALL_CPU_BITS
);
295 target_flags
|= MASK_SCORE7
| MASK_SCORE7D
;
299 target_flags
&= ~(MASK_ALL_CPU_BITS
);
300 target_flags
|= MASK_SCORE3
| MASK_SCORE3D
;
304 if (strcmp (arg
, "score5") == 0)
306 target_flags
&= ~(MASK_ALL_CPU_BITS
);
307 target_flags
|= MASK_SCORE5
;
310 else if (strcmp (arg
, "score5u") == 0)
312 target_flags
&= ~(MASK_ALL_CPU_BITS
);
313 target_flags
|= MASK_SCORE5U
;
316 else if (strcmp (arg
, "score7") == 0)
318 target_flags
&= ~(MASK_ALL_CPU_BITS
);
319 target_flags
|= MASK_SCORE7
;
322 else if (strcmp (arg
, "score7d") == 0)
324 target_flags
&= ~(MASK_ALL_CPU_BITS
);
325 target_flags
|= MASK_SCORE7
| MASK_SCORE7D
;
328 else if (strcmp (arg
, "score3") == 0)
330 target_flags
&= ~(MASK_ALL_CPU_BITS
);
331 target_flags
|= MASK_SCORE3
;
334 else if (strcmp (arg
, "score3d") == 0)
336 target_flags
&= ~(MASK_ALL_CPU_BITS
);
337 target_flags
|= MASK_SCORE3
| MASK_SCORE3D
;
348 /* Implement OVERRIDE_OPTIONS macro. */
350 score_override_options (void)
352 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
353 return score7_override_options ();
354 else if (TARGET_SCORE3
)
355 return score3_override_options ();
357 return score7_override_options ();
360 /* Implement REGNO_REG_CLASS macro. */
362 score_reg_class (int regno
)
364 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
365 return score7_reg_class (regno
);
366 else if (TARGET_SCORE3
)
367 return score3_reg_class (regno
);
372 /* Implement PREFERRED_RELOAD_CLASS macro. */
374 score_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, enum reg_class rclass
)
376 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
377 return score7_preferred_reload_class (x
, rclass
);
378 else if (TARGET_SCORE3
)
379 return score3_preferred_reload_class (x
, rclass
);
384 /* Implement SECONDARY_INPUT_RELOAD_CLASS
385 and SECONDARY_OUTPUT_RELOAD_CLASS macro. */
387 score_secondary_reload_class (enum reg_class rclass
,
388 enum machine_mode mode ATTRIBUTE_UNUSED
,
391 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
392 return score7_secondary_reload_class (rclass
, mode
, x
);
393 else if (TARGET_SCORE3
)
394 return score3_secondary_reload_class (rclass
, mode
, x
);
399 /* Implement CONST_OK_FOR_LETTER_P macro. */
401 score_const_ok_for_letter_p (HOST_WIDE_INT value
, char c
)
403 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
404 return score7_const_ok_for_letter_p (value
, c
);
405 else if (TARGET_SCORE3
)
406 return score3_const_ok_for_letter_p (value
, c
);
411 /* Implement EXTRA_CONSTRAINT macro. */
413 score_extra_constraint (rtx op
, char c
)
415 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
416 return score7_extra_constraint (op
, c
);
417 else if (TARGET_SCORE3
)
418 return score3_extra_constraint (op
, c
);
423 /* Return truth value on whether or not a given hard register
424 can support a given mode. */
426 score_hard_regno_mode_ok (unsigned int regno
, enum machine_mode mode
)
428 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
429 return score7_hard_regno_mode_ok (regno
, mode
);
430 else if (TARGET_SCORE3
)
431 return score3_hard_regno_mode_ok (regno
, mode
);
436 /* We can always eliminate to the hard frame pointer. We can eliminate
437 to the stack pointer unless a frame pointer is needed. */
440 score_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
442 return (to
== HARD_FRAME_POINTER_REGNUM
443 || (to
== STACK_POINTER_REGNUM
&& !frame_pointer_needed
));
446 /* Implement INITIAL_ELIMINATION_OFFSET. FROM is either the frame
447 pointer or argument pointer. TO is either the stack pointer or
448 hard frame pointer. */
450 score_initial_elimination_offset (int from
,
451 int to ATTRIBUTE_UNUSED
)
453 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
454 return score7_initial_elimination_offset (from
, to
);
455 else if (TARGET_SCORE3
)
456 return score3_initial_elimination_offset (from
, to
);
461 /* Argument support functions. */
463 /* Initialize CUMULATIVE_ARGS for a function. */
465 score_init_cumulative_args (CUMULATIVE_ARGS
*cum
,
466 tree fntype ATTRIBUTE_UNUSED
,
467 rtx libname ATTRIBUTE_UNUSED
)
469 memset (cum
, 0, sizeof (CUMULATIVE_ARGS
));
472 /* Implement FUNCTION_ARG_ADVANCE macro. */
474 score_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
475 tree type
, int named
)
477 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
478 return score7_function_arg_advance (cum
, mode
, type
, named
);
479 else if (TARGET_SCORE3
)
480 return score3_function_arg_advance (cum
, mode
, type
, named
);
485 /* Implement TARGET_ARG_PARTIAL_BYTES macro. */
487 score_arg_partial_bytes (CUMULATIVE_ARGS
*cum
,
488 enum machine_mode mode
, tree type
, bool named
)
490 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
491 return score7_arg_partial_bytes (cum
, mode
, type
, named
);
492 else if (TARGET_SCORE3
)
493 return score3_arg_partial_bytes (cum
, mode
, type
, named
);
498 /* Implement FUNCTION_ARG macro. */
500 score_function_arg (const CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
501 tree type
, int named
)
503 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
504 return score7_function_arg (cum
, mode
, type
, named
);
505 else if (TARGET_SCORE3
)
506 return score3_function_arg (cum
, mode
, type
, named
);
511 /* Implement FUNCTION_VALUE and LIBCALL_VALUE. For normal calls,
512 VALTYPE is the return type and MODE is VOIDmode. For libcalls,
513 VALTYPE is null and MODE is the mode of the return value. */
515 score_function_value (tree valtype
, tree func ATTRIBUTE_UNUSED
,
516 enum machine_mode mode
)
518 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
519 return score7_function_value (valtype
, func
, mode
);
520 else if (TARGET_SCORE3
)
521 return score3_function_value (valtype
, func
, mode
);
526 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
528 score_asm_trampoline_template (FILE *f
)
530 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
531 return score7_asm_trampoline_template (f
);
532 else if (TARGET_SCORE3
)
533 return score3_asm_trampoline_template (f
);
538 /* Implement TARGET_TRAMPOLINE_INIT. */
540 score_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
542 /* ??? These two routines are identical. */
543 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
544 return score7_trampoline_init (m_tramp
, fndecl
, chain_value
);
545 else if (TARGET_SCORE3
)
546 return score3_trampoline_init (m_tramp
, fndecl
, chain_value
);
551 /* This function is used to implement REG_MODE_OK_FOR_BASE_P macro. */
553 score_regno_mode_ok_for_base_p (int regno
, int strict
)
555 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
556 return score7_regno_mode_ok_for_base_p (regno
, strict
);
557 else if (TARGET_SCORE3
)
558 return score3_regno_mode_ok_for_base_p (regno
, strict
);
563 /* Implement TARGET_LEGITIMIZE_ADDRESS_P. */
565 score_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
567 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
568 return score7_legitimate_address_p (mode
, x
, strict
);
569 else if (TARGET_SCORE3
)
570 return score3_legitimate_address_p (mode
, x
, strict
);
575 /* This function is used to implement LEGITIMIZE_ADDRESS. If X can
576 be legitimized in a way that the generic machinery might not expect,
577 return the new address, else return X. */
579 score_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
580 enum machine_mode mode ATTRIBUTE_UNUSED
)
582 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
583 return score7_legitimize_address (x
);
584 else if (TARGET_SCORE3
)
585 return score3_legitimize_address (x
);
590 /* Return a number assessing the cost of moving a register in class
593 score_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
594 enum reg_class from
, enum reg_class to
)
596 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
597 return score7_register_move_cost (mode
, from
, to
);
598 else if (TARGET_SCORE3
)
599 return score3_register_move_cost (mode
, from
, to
);
604 /* Implement TARGET_RTX_COSTS macro. */
606 score_rtx_costs (rtx x
, int code
, int outer_code
, int *total
,
607 bool speed ATTRIBUTE_UNUSED
)
609 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
610 return score7_rtx_costs (x
, code
, outer_code
, total
, speed
);
611 else if (TARGET_SCORE3
)
612 return score3_rtx_costs (x
, code
, outer_code
, total
, speed
);
617 /* Implement TARGET_ADDRESS_COST macro. */
619 score_address_cost (rtx addr
,
620 bool speed ATTRIBUTE_UNUSED
)
622 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
623 return score7_address_cost (addr
);
624 else if (TARGET_SCORE3
)
625 return score3_address_cost (addr
);
630 /* Implement ASM_OUTPUT_EXTERNAL macro. */
632 score_output_external (FILE *file ATTRIBUTE_UNUSED
,
633 tree decl
, const char *name
)
635 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
636 return score7_output_external (file
, decl
, name
);
637 else if (TARGET_SCORE3
)
638 return score3_output_external (file
, decl
, name
);
643 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
644 back to a previous frame. */
646 score_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
648 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
649 return score7_return_addr (count
, frame
);
650 else if (TARGET_SCORE3
)
651 return score3_return_addr (count
, frame
);
656 /* Implement PRINT_OPERAND macro. */
658 score_print_operand (FILE *file
, rtx op
, int c
)
660 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
661 return score7_print_operand (file
, op
, c
);
662 else if (TARGET_SCORE3
)
663 return score3_print_operand (file
, op
, c
);
668 /* Implement PRINT_OPERAND_ADDRESS macro. */
670 score_print_operand_address (FILE *file
, rtx x
)
672 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
673 return score7_print_operand_address (file
, x
);
674 else if (TARGET_SCORE3
)
675 return score3_print_operand_address (file
, x
);
680 /* Implement SELECT_CC_MODE macro. */
682 score_select_cc_mode (enum rtx_code op
, rtx x
, rtx y
)
684 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
685 return score7_select_cc_mode (op
, x
, y
);
686 else if (TARGET_SCORE3
)
687 return score3_select_cc_mode (op
, x
, y
);
692 /* Return true if X is a symbolic constant that can be calculated in
693 the same way as a bare symbol. If it is, store the type of the
694 symbol in *SYMBOL_TYPE. */
696 score_symbolic_constant_p (rtx x
, enum score_symbol_type
*symbol_type
)
698 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
699 return score7_symbolic_constant_p (x
, symbol_type
);
700 else if (TARGET_SCORE3
)
701 return score3_symbolic_constant_p (x
, symbol_type
);
706 /* Generate the prologue instructions for entry into a S+core function. */
708 score_prologue (void)
710 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
711 return score7_prologue ();
712 else if (TARGET_SCORE3
)
713 return score3_prologue ();
718 /* Generate the epilogue instructions in a S+core function. */
720 score_epilogue (int sibcall_p
)
722 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
723 return score7_epilogue (sibcall_p
);
724 else if (TARGET_SCORE3
)
725 return score3_epilogue (sibcall_p
);
730 /* Call and sibcall pattern all need call this function. */
732 score_call (rtx
*ops
, bool sib
)
734 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
735 return score7_call (ops
, sib
);
736 else if (TARGET_SCORE3
)
737 return score3_call (ops
, sib
);
742 /* Call value and sibcall value pattern all need call this function. */
744 score_call_value (rtx
*ops
, bool sib
)
746 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
747 return score7_call_value (ops
, sib
);
748 else if (TARGET_SCORE3
)
749 return score3_call_value (ops
, sib
);
755 score_movsicc (rtx
*ops
)
757 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
758 return score7_movsicc (ops
);
759 else if (TARGET_SCORE3
)
760 return score3_movsicc (ops
);
767 score_movdi (rtx
*ops
)
769 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
770 return score7_movdi (ops
);
771 else if (TARGET_SCORE3
)
772 return score3_movdi (ops
);
778 score_zero_extract_andi (rtx
*ops
)
780 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
781 return score7_zero_extract_andi (ops
);
782 else if (TARGET_SCORE3
)
783 return score3_zero_extract_andi (ops
);
788 /* Output asm insn for move. */
790 score_move (rtx
*ops
)
792 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
793 return score7_move (ops
);
794 else if (TARGET_SCORE3
)
795 return score3_move (ops
);
800 /* Output asm insn for load. */
802 score_linsn (rtx
*ops
, enum score_mem_unit unit
, bool sign
)
804 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
805 return score7_linsn (ops
, unit
, sign
);
806 else if (TARGET_SCORE3
)
807 return score3_linsn (ops
, unit
, sign
);
812 /* Output asm insn for store. */
814 score_sinsn (rtx
*ops
, enum score_mem_unit unit
)
816 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
817 return score7_sinsn (ops
, unit
);
818 else if (TARGET_SCORE3
)
819 return score3_sinsn (ops
, unit
);
824 /* Output asm insn for load immediate. */
826 score_limm (rtx
*ops
)
828 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
829 return score7_limm (ops
);
830 else if (TARGET_SCORE3
)
831 return score3_limm (ops
);
837 /* Generate add insn. */
839 score_select_add_imm (rtx
*ops
, bool set_cc
)
841 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
842 return score7_select_add_imm (ops
, set_cc
);
843 else if (TARGET_SCORE3
)
844 return score3_select_add_imm (ops
, set_cc
);
849 /* Output arith insn. */
851 score_select (rtx
*ops
, const char *inst_pre
,
852 bool commu
, const char *letter
, bool set_cc
)
854 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
855 return score7_select (ops
, inst_pre
, commu
, letter
, set_cc
);
856 else if (TARGET_SCORE3
)
857 return score3_select (ops
, inst_pre
, commu
, letter
, set_cc
);
862 /* Output switch case insn, only supported in score3. */
864 score_output_casesi (rtx
*operands
)
867 return score3_output_casesi (operands
);
872 /* Output rpush insn, only supported in score3. */
874 score_rpush (rtx
*operands
)
877 return score3_rpush (operands
);
882 /* Output rpop insn, only supported in score3. */
884 score_rpop (rtx
*operands
)
887 return score3_rpop (operands
);
892 /* Emit lcb/lce insns. */
894 score_unaligned_load (rtx
*ops
)
902 if (INTVAL (len
) != BITS_PER_WORD
903 || (INTVAL (off
) % BITS_PER_UNIT
) != 0)
906 gcc_assert (GET_MODE_SIZE (GET_MODE (dst
)) == GET_MODE_SIZE (SImode
));
908 addr_reg
= copy_addr_to_reg (XEXP (src
, 0));
909 emit_insn (gen_move_lcb (addr_reg
, addr_reg
));
910 emit_insn (gen_move_lce (addr_reg
, addr_reg
, dst
));
915 /* Emit scb/sce insns. */
917 score_unaligned_store (rtx
*ops
)
925 if (INTVAL(len
) != BITS_PER_WORD
926 || (INTVAL(off
) % BITS_PER_UNIT
) != 0)
929 gcc_assert (GET_MODE_SIZE (GET_MODE (src
)) == GET_MODE_SIZE (SImode
));
931 addr_reg
= copy_addr_to_reg (XEXP (dst
, 0));
932 emit_insn (gen_move_scb (addr_reg
, addr_reg
, src
));
933 emit_insn (gen_move_sce (addr_reg
, addr_reg
));
938 /* If length is short, generate move insns straight. */
940 score_block_move_straight (rtx dst
, rtx src
, HOST_WIDE_INT length
)
942 HOST_WIDE_INT leftover
;
946 leftover
= length
% UNITS_PER_WORD
;
948 reg_count
= length
/ UNITS_PER_WORD
;
950 regs
= XALLOCAVEC (rtx
, reg_count
);
951 for (i
= 0; i
< reg_count
; i
++)
952 regs
[i
] = gen_reg_rtx (SImode
);
954 /* Load from src to regs. */
955 if (MEM_ALIGN (src
) >= BITS_PER_WORD
)
957 HOST_WIDE_INT offset
= 0;
958 for (i
= 0; i
< reg_count
; offset
+= UNITS_PER_WORD
, i
++)
959 emit_move_insn (regs
[i
], adjust_address (src
, SImode
, offset
));
961 else if (reg_count
>= 1)
963 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
965 emit_insn (gen_move_lcb (src_reg
, src_reg
));
966 for (i
= 0; i
< (reg_count
- 1); i
++)
967 emit_insn (gen_move_lcw (src_reg
, src_reg
, regs
[i
]));
968 emit_insn (gen_move_lce (src_reg
, src_reg
, regs
[i
]));
971 /* Store regs to dest. */
972 if (MEM_ALIGN (dst
) >= BITS_PER_WORD
)
974 HOST_WIDE_INT offset
= 0;
975 for (i
= 0; i
< reg_count
; offset
+= UNITS_PER_WORD
, i
++)
976 emit_move_insn (adjust_address (dst
, SImode
, offset
), regs
[i
]);
978 else if (reg_count
>= 1)
980 rtx dst_reg
= copy_addr_to_reg (XEXP (dst
, 0));
982 emit_insn (gen_move_scb (dst_reg
, dst_reg
, regs
[0]));
983 for (i
= 1; i
< reg_count
; i
++)
984 emit_insn (gen_move_scw (dst_reg
, dst_reg
, regs
[i
]));
985 emit_insn (gen_move_sce (dst_reg
, dst_reg
));
988 /* Mop up any left-over bytes. */
991 src
= adjust_address (src
, BLKmode
, length
);
992 dst
= adjust_address (dst
, BLKmode
, length
);
993 move_by_pieces (dst
, src
, leftover
,
994 MIN (MEM_ALIGN (src
), MEM_ALIGN (dst
)), 0);
998 /* Generate loop head when dst or src is unaligned. */
1000 score_block_move_loop_head (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1001 rtx src_reg
, HOST_WIDE_INT src_align
,
1002 HOST_WIDE_INT length
)
1004 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1005 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1007 rtx temp
= gen_reg_rtx (SImode
);
1009 gcc_assert (length
== UNITS_PER_WORD
);
1013 emit_insn (gen_move_lcb (src_reg
, src_reg
));
1014 emit_insn (gen_move_lcw (src_reg
, src_reg
, temp
));
1017 emit_insn (gen_move_lw_a (src_reg
,
1018 src_reg
, gen_int_mode (4, SImode
), temp
));
1021 emit_insn (gen_move_scb (dst_reg
, dst_reg
, temp
));
1023 emit_insn (gen_move_sw_a (dst_reg
,
1024 dst_reg
, gen_int_mode (4, SImode
), temp
));
1027 /* Generate loop body, copy length bytes per iteration. */
1029 score_block_move_loop_body (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1030 rtx src_reg
, HOST_WIDE_INT src_align
,
1031 HOST_WIDE_INT length
)
1033 int reg_count
= length
/ UNITS_PER_WORD
;
1034 rtx
*regs
= XALLOCAVEC (rtx
, reg_count
);
1036 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1037 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1039 for (i
= 0; i
< reg_count
; i
++)
1040 regs
[i
] = gen_reg_rtx (SImode
);
1044 for (i
= 0; i
< reg_count
; i
++)
1045 emit_insn (gen_move_lcw (src_reg
, src_reg
, regs
[i
]));
1049 for (i
= 0; i
< reg_count
; i
++)
1050 emit_insn (gen_move_lw_a (src_reg
,
1051 src_reg
, gen_int_mode (4, SImode
), regs
[i
]));
1056 for (i
= 0; i
< reg_count
; i
++)
1057 emit_insn (gen_move_scw (dst_reg
, dst_reg
, regs
[i
]));
1061 for (i
= 0; i
< reg_count
; i
++)
1062 emit_insn (gen_move_sw_a (dst_reg
,
1063 dst_reg
, gen_int_mode (4, SImode
), regs
[i
]));
1067 /* Generate loop foot, copy the leftover bytes. */
1069 score_block_move_loop_foot (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1070 rtx src_reg
, HOST_WIDE_INT src_align
,
1071 HOST_WIDE_INT length
)
1073 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1074 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1076 HOST_WIDE_INT leftover
;
1078 leftover
= length
% UNITS_PER_WORD
;
1082 score_block_move_loop_body (dst_reg
, dst_align
,
1083 src_reg
, src_align
, length
);
1086 emit_insn (gen_move_sce (dst_reg
, dst_reg
));
1090 HOST_WIDE_INT src_adj
= src_unaligned
? -4 : 0;
1091 HOST_WIDE_INT dst_adj
= dst_unaligned
? -4 : 0;
1094 gcc_assert (leftover
< UNITS_PER_WORD
);
1096 if (leftover
>= UNITS_PER_WORD
/ 2
1097 && src_align
>= BITS_PER_WORD
/ 2
1098 && dst_align
>= BITS_PER_WORD
/ 2)
1100 temp
= gen_reg_rtx (HImode
);
1101 emit_insn (gen_move_lhu_b (src_reg
, src_reg
,
1102 gen_int_mode (src_adj
, SImode
), temp
));
1103 emit_insn (gen_move_sh_b (dst_reg
, dst_reg
,
1104 gen_int_mode (dst_adj
, SImode
), temp
));
1105 leftover
-= UNITS_PER_WORD
/ 2;
1106 src_adj
= UNITS_PER_WORD
/ 2;
1107 dst_adj
= UNITS_PER_WORD
/ 2;
1110 while (leftover
> 0)
1112 temp
= gen_reg_rtx (QImode
);
1113 emit_insn (gen_move_lbu_b (src_reg
, src_reg
,
1114 gen_int_mode (src_adj
, SImode
), temp
));
1115 emit_insn (gen_move_sb_b (dst_reg
, dst_reg
,
1116 gen_int_mode (dst_adj
, SImode
), temp
));
1124 #define MIN_MOVE_REGS 3
1125 #define MIN_MOVE_BYTES (MIN_MOVE_REGS * UNITS_PER_WORD)
1126 #define MAX_MOVE_REGS 4
1127 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD)
1129 /* The length is large, generate a loop if necessary.
1130 The loop is consisted by loop head/body/foot. */
1132 score_block_move_loop (rtx dst
, rtx src
, HOST_WIDE_INT length
)
1134 HOST_WIDE_INT src_align
= MEM_ALIGN (src
);
1135 HOST_WIDE_INT dst_align
= MEM_ALIGN (dst
);
1136 HOST_WIDE_INT loop_mov_bytes
;
1137 HOST_WIDE_INT iteration
= 0;
1138 HOST_WIDE_INT head_length
= 0, leftover
;
1139 rtx label
, src_reg
, dst_reg
, final_dst
, test
;
1141 bool gen_loop_head
= (src_align
< BITS_PER_WORD
1142 || dst_align
< BITS_PER_WORD
);
1145 head_length
+= UNITS_PER_WORD
;
1147 for (loop_mov_bytes
= MAX_MOVE_BYTES
;
1148 loop_mov_bytes
>= MIN_MOVE_BYTES
;
1149 loop_mov_bytes
-= UNITS_PER_WORD
)
1151 iteration
= (length
- head_length
) / loop_mov_bytes
;
1157 score_block_move_straight (dst
, src
, length
);
1161 leftover
= (length
- head_length
) % loop_mov_bytes
;
1164 src_reg
= copy_addr_to_reg (XEXP (src
, 0));
1165 dst_reg
= copy_addr_to_reg (XEXP (dst
, 0));
1166 final_dst
= expand_simple_binop (Pmode
, PLUS
, dst_reg
, GEN_INT (length
),
1170 score_block_move_loop_head (dst_reg
, dst_align
,
1171 src_reg
, src_align
, head_length
);
1173 label
= gen_label_rtx ();
1176 score_block_move_loop_body (dst_reg
, dst_align
,
1177 src_reg
, src_align
, loop_mov_bytes
);
1179 test
= gen_rtx_NE (VOIDmode
, dst_reg
, final_dst
);
1180 emit_jump_insn (gen_cbranchsi4 (test
, dst_reg
, final_dst
, label
));
1182 score_block_move_loop_foot (dst_reg
, dst_align
,
1183 src_reg
, src_align
, leftover
);
1186 /* Generate block move, for misc.md: "movmemsi". */
1188 score_block_move (rtx
*ops
)
1192 rtx length
= ops
[2];
1194 if (TARGET_LITTLE_ENDIAN
1195 && (MEM_ALIGN (src
) < BITS_PER_WORD
|| MEM_ALIGN (dst
) < BITS_PER_WORD
)
1196 && INTVAL (length
) >= UNITS_PER_WORD
)
1199 if (GET_CODE (length
) == CONST_INT
)
1201 if (INTVAL (length
) <= 2 * MAX_MOVE_BYTES
)
1203 score_block_move_straight (dst
, src
, INTVAL (length
));
1206 else if (optimize
&&
1207 !(flag_unroll_loops
|| flag_unroll_all_loops
))
1209 score_block_move_loop (dst
, src
, INTVAL (length
));
1216 struct gcc_target targetm
= TARGET_INITIALIZER
;