1 /* Output routines for Sunplus S+CORE processor
2 Copyright (C) 2005, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
3 Contributed by Sunnorth.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-attr.h"
32 #include "diagnostic-core.h"
47 #include "target-def.h"
48 #include "integrate.h"
49 #include "langhooks.h"
54 #undef TARGET_ASM_FILE_START
55 #define TARGET_ASM_FILE_START score_asm_file_start
57 #undef TARGET_ASM_FILE_END
58 #define TARGET_ASM_FILE_END score_asm_file_end
60 #undef TARGET_ASM_FUNCTION_PROLOGUE
61 #define TARGET_ASM_FUNCTION_PROLOGUE score_function_prologue
63 #undef TARGET_ASM_FUNCTION_EPILOGUE
64 #define TARGET_ASM_FUNCTION_EPILOGUE score_function_epilogue
66 #undef TARGET_DEFAULT_TARGET_FLAGS
67 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
68 #undef TARGET_HANDLE_OPTION
69 #define TARGET_HANDLE_OPTION score_handle_option
71 #undef TARGET_LEGITIMIZE_ADDRESS
72 #define TARGET_LEGITIMIZE_ADDRESS score_legitimize_address
74 #undef TARGET_SCHED_ISSUE_RATE
75 #define TARGET_SCHED_ISSUE_RATE score_issue_rate
77 #undef TARGET_ASM_SELECT_RTX_SECTION
78 #define TARGET_ASM_SELECT_RTX_SECTION score_select_rtx_section
80 #undef TARGET_IN_SMALL_DATA_P
81 #define TARGET_IN_SMALL_DATA_P score_in_small_data_p
83 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
84 #define TARGET_FUNCTION_OK_FOR_SIBCALL score_function_ok_for_sibcall
86 #undef TARGET_STRICT_ARGUMENT_NAMING
87 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
89 #undef TARGET_ASM_OUTPUT_MI_THUNK
90 #define TARGET_ASM_OUTPUT_MI_THUNK score_output_mi_thunk
92 #undef TARGET_PROMOTE_FUNCTION_MODE
93 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
95 #undef TARGET_PROMOTE_PROTOTYPES
96 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
98 #undef TARGET_MUST_PASS_IN_STACK
99 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
101 #undef TARGET_ARG_PARTIAL_BYTES
102 #define TARGET_ARG_PARTIAL_BYTES score_arg_partial_bytes
104 #undef TARGET_PASS_BY_REFERENCE
105 #define TARGET_PASS_BY_REFERENCE score_pass_by_reference
107 #undef TARGET_RETURN_IN_MEMORY
108 #define TARGET_RETURN_IN_MEMORY score_return_in_memory
110 #undef TARGET_RTX_COSTS
111 #define TARGET_RTX_COSTS score_rtx_costs
113 #undef TARGET_ADDRESS_COST
114 #define TARGET_ADDRESS_COST score_address_cost
116 #undef TARGET_LEGITIMATE_ADDRESS_P
117 #define TARGET_LEGITIMATE_ADDRESS_P score_legitimate_address_p
119 #undef TARGET_CAN_ELIMINATE
120 #define TARGET_CAN_ELIMINATE score_can_eliminate
122 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
123 #define TARGET_ASM_TRAMPOLINE_TEMPLATE score_asm_trampoline_template
124 #undef TARGET_TRAMPOLINE_INIT
125 #define TARGET_TRAMPOLINE_INIT score_trampoline_init
127 struct extern_list
*extern_head
= 0;
129 /* default 0 = NO_REGS */
130 enum reg_class score_char_to_class
[256];
132 /* Implement TARGET_RETURN_IN_MEMORY. In S+core,
133 small structures are returned in a register.
134 Objects with varying size must still be returned in memory. */
136 score_return_in_memory (tree type
, tree fndecl ATTRIBUTE_UNUSED
)
138 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
139 return score7_return_in_memory (type
, fndecl
);
140 else if (TARGET_SCORE3
)
141 return score3_return_in_memory (type
, fndecl
);
146 /* Return nonzero when an argument must be passed by reference. */
148 score_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
149 enum machine_mode mode
, tree type
,
150 bool named ATTRIBUTE_UNUSED
)
152 /* If we have a variable-sized parameter, we have no choice. */
153 return targetm
.calls
.must_pass_in_stack (mode
, type
);
156 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. Generate rtl rather than asm text
157 in order to avoid duplicating too much logic from elsewhere. */
159 score_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
160 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
163 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
164 return score7_output_mi_thunk (file
, thunk_fndecl
, delta
,
165 vcall_offset
, function
);
166 else if (TARGET_SCORE3
)
167 return score3_output_mi_thunk (file
, thunk_fndecl
, delta
,
168 vcall_offset
, function
);
172 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
174 score_function_ok_for_sibcall (ATTRIBUTE_UNUSED tree decl
,
175 ATTRIBUTE_UNUSED tree exp
)
180 /* Set up the stack and frame (if desired) for the function. */
182 score_function_prologue (FILE *file
, HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
184 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
185 return score7_function_prologue (file
, size
);
186 else if (TARGET_SCORE3
)
187 return score3_function_prologue (file
, size
);
192 /* Do any necessary cleanup after a function to restore stack, frame,
195 score_function_epilogue (FILE *file
,
196 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
198 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
199 return score7_function_epilogue (file
, size
);
200 else if (TARGET_SCORE3
)
201 return score3_function_epilogue (file
, size
);
206 /* Implement TARGET_SCHED_ISSUE_RATE. */
208 score_issue_rate (void)
213 /* Choose the section to use for the constant rtx expression X that has
216 score_select_rtx_section (enum machine_mode mode
, rtx x
,
217 unsigned HOST_WIDE_INT align
)
219 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
220 return score7_select_rtx_section (mode
, x
, align
);
221 else if (TARGET_SCORE3
)
222 return score3_select_rtx_section (mode
, x
, align
);
227 /* Implement TARGET_IN_SMALL_DATA_P. */
229 score_in_small_data_p (tree decl
)
231 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
232 return score7_in_small_data_p (decl
);
233 else if (TARGET_SCORE3
)
234 return score3_in_small_data_p (decl
);
239 /* Implement TARGET_ASM_FILE_START. */
241 score_asm_file_start (void)
244 fprintf (asm_out_file
, "# Sunplus S+core5 %s rev=%s\n",
245 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
246 else if (TARGET_SCORE5U
)
247 fprintf (asm_out_file
, "# Sunplus S+core5u %s rev=%s\n",
248 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
249 else if (TARGET_SCORE7D
)
250 fprintf (asm_out_file
, "# Sunplus S+core7d %s rev=%s\n",
251 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
252 else if (TARGET_SCORE7
)
253 fprintf (asm_out_file
, "# Sunplus S+core7 %s rev=%s\n",
254 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
255 else if (TARGET_SCORE3D
)
256 fprintf (asm_out_file
, "# Sunplus S+core3d %s rev=%s\n",
257 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
258 else if (TARGET_SCORE3
)
259 fprintf (asm_out_file
, "# Sunplus S+core3 %s rev=%s\n",
260 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
262 fprintf (asm_out_file
, "# Sunplus S+core unknown %s rev=%s\n",
263 TARGET_LITTLE_ENDIAN
? "el" : "eb", SCORE_GCC_VERSION
);
265 default_file_start ();
268 fprintf (asm_out_file
, "\t.set pic\n");
271 /* Implement TARGET_ASM_FILE_END. When using assembler macros, emit
272 .externs for any small-data variables that turned out to be external. */
274 score_asm_file_end (void)
276 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
277 return score7_asm_file_end ();
278 else if (TARGET_SCORE3
)
279 return score3_asm_file_end ();
284 #define MASK_ALL_CPU_BITS \
285 (MASK_SCORE5 | MASK_SCORE5U | MASK_SCORE7 | MASK_SCORE7D \
286 | MASK_SCORE3 | MASK_SCORE3D)
288 /* Implement TARGET_HANDLE_OPTION. */
290 score_handle_option (size_t code
, const char *arg
, int value
)
295 g_switch_value
= value
;
300 target_flags
&= ~(MASK_ALL_CPU_BITS
);
301 target_flags
|= MASK_SCORE7
| MASK_SCORE7D
;
305 target_flags
&= ~(MASK_ALL_CPU_BITS
);
306 target_flags
|= MASK_SCORE3
| MASK_SCORE3D
;
310 if (strcmp (arg
, "score5") == 0)
312 target_flags
&= ~(MASK_ALL_CPU_BITS
);
313 target_flags
|= MASK_SCORE5
;
316 else if (strcmp (arg
, "score5u") == 0)
318 target_flags
&= ~(MASK_ALL_CPU_BITS
);
319 target_flags
|= MASK_SCORE5U
;
322 else if (strcmp (arg
, "score7") == 0)
324 target_flags
&= ~(MASK_ALL_CPU_BITS
);
325 target_flags
|= MASK_SCORE7
;
328 else if (strcmp (arg
, "score7d") == 0)
330 target_flags
&= ~(MASK_ALL_CPU_BITS
);
331 target_flags
|= MASK_SCORE7
| MASK_SCORE7D
;
334 else if (strcmp (arg
, "score3") == 0)
336 target_flags
&= ~(MASK_ALL_CPU_BITS
);
337 target_flags
|= MASK_SCORE3
;
340 else if (strcmp (arg
, "score3d") == 0)
342 target_flags
&= ~(MASK_ALL_CPU_BITS
);
343 target_flags
|= MASK_SCORE3
| MASK_SCORE3D
;
354 /* Implement OVERRIDE_OPTIONS macro. */
356 score_override_options (void)
358 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
359 return score7_override_options ();
360 else if (TARGET_SCORE3
)
361 return score3_override_options ();
363 return score7_override_options ();
366 /* Implement REGNO_REG_CLASS macro. */
368 score_reg_class (int regno
)
370 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
371 return score7_reg_class (regno
);
372 else if (TARGET_SCORE3
)
373 return score3_reg_class (regno
);
378 /* Implement PREFERRED_RELOAD_CLASS macro. */
380 score_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, enum reg_class rclass
)
382 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
383 return score7_preferred_reload_class (x
, rclass
);
384 else if (TARGET_SCORE3
)
385 return score3_preferred_reload_class (x
, rclass
);
390 /* Implement SECONDARY_INPUT_RELOAD_CLASS
391 and SECONDARY_OUTPUT_RELOAD_CLASS macro. */
393 score_secondary_reload_class (enum reg_class rclass
,
394 enum machine_mode mode ATTRIBUTE_UNUSED
,
397 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
398 return score7_secondary_reload_class (rclass
, mode
, x
);
399 else if (TARGET_SCORE3
)
400 return score3_secondary_reload_class (rclass
, mode
, x
);
405 /* Implement CONST_OK_FOR_LETTER_P macro. */
407 score_const_ok_for_letter_p (HOST_WIDE_INT value
, char c
)
409 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
410 return score7_const_ok_for_letter_p (value
, c
);
411 else if (TARGET_SCORE3
)
412 return score3_const_ok_for_letter_p (value
, c
);
417 /* Implement EXTRA_CONSTRAINT macro. */
419 score_extra_constraint (rtx op
, char c
)
421 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
422 return score7_extra_constraint (op
, c
);
423 else if (TARGET_SCORE3
)
424 return score3_extra_constraint (op
, c
);
429 /* Return truth value on whether or not a given hard register
430 can support a given mode. */
432 score_hard_regno_mode_ok (unsigned int regno
, enum machine_mode mode
)
434 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
435 return score7_hard_regno_mode_ok (regno
, mode
);
436 else if (TARGET_SCORE3
)
437 return score3_hard_regno_mode_ok (regno
, mode
);
442 /* We can always eliminate to the hard frame pointer. We can eliminate
443 to the stack pointer unless a frame pointer is needed. */
446 score_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
448 return (to
== HARD_FRAME_POINTER_REGNUM
449 || (to
== STACK_POINTER_REGNUM
&& !frame_pointer_needed
));
452 /* Implement INITIAL_ELIMINATION_OFFSET. FROM is either the frame
453 pointer or argument pointer. TO is either the stack pointer or
454 hard frame pointer. */
456 score_initial_elimination_offset (int from
,
457 int to ATTRIBUTE_UNUSED
)
459 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
460 return score7_initial_elimination_offset (from
, to
);
461 else if (TARGET_SCORE3
)
462 return score3_initial_elimination_offset (from
, to
);
467 /* Argument support functions. */
469 /* Initialize CUMULATIVE_ARGS for a function. */
471 score_init_cumulative_args (CUMULATIVE_ARGS
*cum
,
472 tree fntype ATTRIBUTE_UNUSED
,
473 rtx libname ATTRIBUTE_UNUSED
)
475 memset (cum
, 0, sizeof (CUMULATIVE_ARGS
));
478 /* Implement FUNCTION_ARG_ADVANCE macro. */
480 score_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
481 tree type
, int named
)
483 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
484 return score7_function_arg_advance (cum
, mode
, type
, named
);
485 else if (TARGET_SCORE3
)
486 return score3_function_arg_advance (cum
, mode
, type
, named
);
491 /* Implement TARGET_ARG_PARTIAL_BYTES macro. */
493 score_arg_partial_bytes (CUMULATIVE_ARGS
*cum
,
494 enum machine_mode mode
, tree type
, bool named
)
496 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
497 return score7_arg_partial_bytes (cum
, mode
, type
, named
);
498 else if (TARGET_SCORE3
)
499 return score3_arg_partial_bytes (cum
, mode
, type
, named
);
504 /* Implement FUNCTION_ARG macro. */
506 score_function_arg (const CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
507 tree type
, int named
)
509 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
510 return score7_function_arg (cum
, mode
, type
, named
);
511 else if (TARGET_SCORE3
)
512 return score3_function_arg (cum
, mode
, type
, named
);
517 /* Implement FUNCTION_VALUE and LIBCALL_VALUE. For normal calls,
518 VALTYPE is the return type and MODE is VOIDmode. For libcalls,
519 VALTYPE is null and MODE is the mode of the return value. */
521 score_function_value (tree valtype
, tree func ATTRIBUTE_UNUSED
,
522 enum machine_mode mode
)
524 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
525 return score7_function_value (valtype
, func
, mode
);
526 else if (TARGET_SCORE3
)
527 return score3_function_value (valtype
, func
, mode
);
532 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
534 score_asm_trampoline_template (FILE *f
)
536 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
537 return score7_asm_trampoline_template (f
);
538 else if (TARGET_SCORE3
)
539 return score3_asm_trampoline_template (f
);
544 /* Implement TARGET_TRAMPOLINE_INIT. */
546 score_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
548 /* ??? These two routines are identical. */
549 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
550 return score7_trampoline_init (m_tramp
, fndecl
, chain_value
);
551 else if (TARGET_SCORE3
)
552 return score3_trampoline_init (m_tramp
, fndecl
, chain_value
);
557 /* This function is used to implement REG_MODE_OK_FOR_BASE_P macro. */
559 score_regno_mode_ok_for_base_p (int regno
, int strict
)
561 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
562 return score7_regno_mode_ok_for_base_p (regno
, strict
);
563 else if (TARGET_SCORE3
)
564 return score3_regno_mode_ok_for_base_p (regno
, strict
);
569 /* Implement TARGET_LEGITIMIZE_ADDRESS_P. */
571 score_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
573 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
574 return score7_legitimate_address_p (mode
, x
, strict
);
575 else if (TARGET_SCORE3
)
576 return score3_legitimate_address_p (mode
, x
, strict
);
581 /* This function is used to implement LEGITIMIZE_ADDRESS. If X can
582 be legitimized in a way that the generic machinery might not expect,
583 return the new address, else return X. */
585 score_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
586 enum machine_mode mode ATTRIBUTE_UNUSED
)
588 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
589 return score7_legitimize_address (x
);
590 else if (TARGET_SCORE3
)
591 return score3_legitimize_address (x
);
596 /* Return a number assessing the cost of moving a register in class
599 score_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
600 enum reg_class from
, enum reg_class to
)
602 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
603 return score7_register_move_cost (mode
, from
, to
);
604 else if (TARGET_SCORE3
)
605 return score3_register_move_cost (mode
, from
, to
);
610 /* Implement TARGET_RTX_COSTS macro. */
612 score_rtx_costs (rtx x
, int code
, int outer_code
, int *total
,
613 bool speed ATTRIBUTE_UNUSED
)
615 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
616 return score7_rtx_costs (x
, code
, outer_code
, total
, speed
);
617 else if (TARGET_SCORE3
)
618 return score3_rtx_costs (x
, code
, outer_code
, total
, speed
);
623 /* Implement TARGET_ADDRESS_COST macro. */
625 score_address_cost (rtx addr
,
626 bool speed ATTRIBUTE_UNUSED
)
628 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
629 return score7_address_cost (addr
);
630 else if (TARGET_SCORE3
)
631 return score3_address_cost (addr
);
636 /* Implement ASM_OUTPUT_EXTERNAL macro. */
638 score_output_external (FILE *file ATTRIBUTE_UNUSED
,
639 tree decl
, const char *name
)
641 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
642 return score7_output_external (file
, decl
, name
);
643 else if (TARGET_SCORE3
)
644 return score3_output_external (file
, decl
, name
);
649 /* Implement RETURN_ADDR_RTX. Note, we do not support moving
650 back to a previous frame. */
652 score_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
654 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
655 return score7_return_addr (count
, frame
);
656 else if (TARGET_SCORE3
)
657 return score3_return_addr (count
, frame
);
662 /* Implement PRINT_OPERAND macro. */
664 score_print_operand (FILE *file
, rtx op
, int c
)
666 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
667 return score7_print_operand (file
, op
, c
);
668 else if (TARGET_SCORE3
)
669 return score3_print_operand (file
, op
, c
);
674 /* Implement PRINT_OPERAND_ADDRESS macro. */
676 score_print_operand_address (FILE *file
, rtx x
)
678 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
679 return score7_print_operand_address (file
, x
);
680 else if (TARGET_SCORE3
)
681 return score3_print_operand_address (file
, x
);
686 /* Implement SELECT_CC_MODE macro. */
688 score_select_cc_mode (enum rtx_code op
, rtx x
, rtx y
)
690 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
691 return score7_select_cc_mode (op
, x
, y
);
692 else if (TARGET_SCORE3
)
693 return score3_select_cc_mode (op
, x
, y
);
698 /* Return true if X is a symbolic constant that can be calculated in
699 the same way as a bare symbol. If it is, store the type of the
700 symbol in *SYMBOL_TYPE. */
702 score_symbolic_constant_p (rtx x
, enum score_symbol_type
*symbol_type
)
704 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
705 return score7_symbolic_constant_p (x
, symbol_type
);
706 else if (TARGET_SCORE3
)
707 return score3_symbolic_constant_p (x
, symbol_type
);
712 /* Generate the prologue instructions for entry into a S+core function. */
714 score_prologue (void)
716 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
717 return score7_prologue ();
718 else if (TARGET_SCORE3
)
719 return score3_prologue ();
724 /* Generate the epilogue instructions in a S+core function. */
726 score_epilogue (int sibcall_p
)
728 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
729 return score7_epilogue (sibcall_p
);
730 else if (TARGET_SCORE3
)
731 return score3_epilogue (sibcall_p
);
736 /* Call and sibcall pattern all need call this function. */
738 score_call (rtx
*ops
, bool sib
)
740 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
741 return score7_call (ops
, sib
);
742 else if (TARGET_SCORE3
)
743 return score3_call (ops
, sib
);
748 /* Call value and sibcall value pattern all need call this function. */
750 score_call_value (rtx
*ops
, bool sib
)
752 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
753 return score7_call_value (ops
, sib
);
754 else if (TARGET_SCORE3
)
755 return score3_call_value (ops
, sib
);
761 score_movsicc (rtx
*ops
)
763 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
764 return score7_movsicc (ops
);
765 else if (TARGET_SCORE3
)
766 return score3_movsicc (ops
);
773 score_movdi (rtx
*ops
)
775 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
776 return score7_movdi (ops
);
777 else if (TARGET_SCORE3
)
778 return score3_movdi (ops
);
784 score_zero_extract_andi (rtx
*ops
)
786 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
787 return score7_zero_extract_andi (ops
);
788 else if (TARGET_SCORE3
)
789 return score3_zero_extract_andi (ops
);
794 /* Output asm insn for move. */
796 score_move (rtx
*ops
)
798 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
799 return score7_move (ops
);
800 else if (TARGET_SCORE3
)
801 return score3_move (ops
);
806 /* Output asm insn for load. */
808 score_linsn (rtx
*ops
, enum score_mem_unit unit
, bool sign
)
810 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
811 return score7_linsn (ops
, unit
, sign
);
812 else if (TARGET_SCORE3
)
813 return score3_linsn (ops
, unit
, sign
);
818 /* Output asm insn for store. */
820 score_sinsn (rtx
*ops
, enum score_mem_unit unit
)
822 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
823 return score7_sinsn (ops
, unit
);
824 else if (TARGET_SCORE3
)
825 return score3_sinsn (ops
, unit
);
830 /* Output asm insn for load immediate. */
832 score_limm (rtx
*ops
)
834 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
835 return score7_limm (ops
);
836 else if (TARGET_SCORE3
)
837 return score3_limm (ops
);
843 /* Generate add insn. */
845 score_select_add_imm (rtx
*ops
, bool set_cc
)
847 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
848 return score7_select_add_imm (ops
, set_cc
);
849 else if (TARGET_SCORE3
)
850 return score3_select_add_imm (ops
, set_cc
);
855 /* Output arith insn. */
857 score_select (rtx
*ops
, const char *inst_pre
,
858 bool commu
, const char *letter
, bool set_cc
)
860 if (TARGET_SCORE5
|| TARGET_SCORE5U
|| TARGET_SCORE7
|| TARGET_SCORE7D
)
861 return score7_select (ops
, inst_pre
, commu
, letter
, set_cc
);
862 else if (TARGET_SCORE3
)
863 return score3_select (ops
, inst_pre
, commu
, letter
, set_cc
);
868 /* Output switch case insn, only supported in score3. */
870 score_output_casesi (rtx
*operands
)
873 return score3_output_casesi (operands
);
878 /* Output rpush insn, only supported in score3. */
880 score_rpush (rtx
*operands
)
883 return score3_rpush (operands
);
888 /* Output rpop insn, only supported in score3. */
890 score_rpop (rtx
*operands
)
893 return score3_rpop (operands
);
898 /* Emit lcb/lce insns. */
900 score_unaligned_load (rtx
*ops
)
908 if (INTVAL (len
) != BITS_PER_WORD
909 || (INTVAL (off
) % BITS_PER_UNIT
) != 0)
912 gcc_assert (GET_MODE_SIZE (GET_MODE (dst
)) == GET_MODE_SIZE (SImode
));
914 addr_reg
= copy_addr_to_reg (XEXP (src
, 0));
915 emit_insn (gen_move_lcb (addr_reg
, addr_reg
));
916 emit_insn (gen_move_lce (addr_reg
, addr_reg
, dst
));
921 /* Emit scb/sce insns. */
923 score_unaligned_store (rtx
*ops
)
931 if (INTVAL(len
) != BITS_PER_WORD
932 || (INTVAL(off
) % BITS_PER_UNIT
) != 0)
935 gcc_assert (GET_MODE_SIZE (GET_MODE (src
)) == GET_MODE_SIZE (SImode
));
937 addr_reg
= copy_addr_to_reg (XEXP (dst
, 0));
938 emit_insn (gen_move_scb (addr_reg
, addr_reg
, src
));
939 emit_insn (gen_move_sce (addr_reg
, addr_reg
));
944 /* If length is short, generate move insns straight. */
946 score_block_move_straight (rtx dst
, rtx src
, HOST_WIDE_INT length
)
948 HOST_WIDE_INT leftover
;
952 leftover
= length
% UNITS_PER_WORD
;
954 reg_count
= length
/ UNITS_PER_WORD
;
956 regs
= XALLOCAVEC (rtx
, reg_count
);
957 for (i
= 0; i
< reg_count
; i
++)
958 regs
[i
] = gen_reg_rtx (SImode
);
960 /* Load from src to regs. */
961 if (MEM_ALIGN (src
) >= BITS_PER_WORD
)
963 HOST_WIDE_INT offset
= 0;
964 for (i
= 0; i
< reg_count
; offset
+= UNITS_PER_WORD
, i
++)
965 emit_move_insn (regs
[i
], adjust_address (src
, SImode
, offset
));
967 else if (reg_count
>= 1)
969 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
971 emit_insn (gen_move_lcb (src_reg
, src_reg
));
972 for (i
= 0; i
< (reg_count
- 1); i
++)
973 emit_insn (gen_move_lcw (src_reg
, src_reg
, regs
[i
]));
974 emit_insn (gen_move_lce (src_reg
, src_reg
, regs
[i
]));
977 /* Store regs to dest. */
978 if (MEM_ALIGN (dst
) >= BITS_PER_WORD
)
980 HOST_WIDE_INT offset
= 0;
981 for (i
= 0; i
< reg_count
; offset
+= UNITS_PER_WORD
, i
++)
982 emit_move_insn (adjust_address (dst
, SImode
, offset
), regs
[i
]);
984 else if (reg_count
>= 1)
986 rtx dst_reg
= copy_addr_to_reg (XEXP (dst
, 0));
988 emit_insn (gen_move_scb (dst_reg
, dst_reg
, regs
[0]));
989 for (i
= 1; i
< reg_count
; i
++)
990 emit_insn (gen_move_scw (dst_reg
, dst_reg
, regs
[i
]));
991 emit_insn (gen_move_sce (dst_reg
, dst_reg
));
994 /* Mop up any left-over bytes. */
997 src
= adjust_address (src
, BLKmode
, length
);
998 dst
= adjust_address (dst
, BLKmode
, length
);
999 move_by_pieces (dst
, src
, leftover
,
1000 MIN (MEM_ALIGN (src
), MEM_ALIGN (dst
)), 0);
1004 /* Generate loop head when dst or src is unaligned. */
1006 score_block_move_loop_head (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1007 rtx src_reg
, HOST_WIDE_INT src_align
,
1008 HOST_WIDE_INT length
)
1010 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1011 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1013 rtx temp
= gen_reg_rtx (SImode
);
1015 gcc_assert (length
== UNITS_PER_WORD
);
1019 emit_insn (gen_move_lcb (src_reg
, src_reg
));
1020 emit_insn (gen_move_lcw (src_reg
, src_reg
, temp
));
1023 emit_insn (gen_move_lw_a (src_reg
,
1024 src_reg
, gen_int_mode (4, SImode
), temp
));
1027 emit_insn (gen_move_scb (dst_reg
, dst_reg
, temp
));
1029 emit_insn (gen_move_sw_a (dst_reg
,
1030 dst_reg
, gen_int_mode (4, SImode
), temp
));
1033 /* Generate loop body, copy length bytes per iteration. */
1035 score_block_move_loop_body (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1036 rtx src_reg
, HOST_WIDE_INT src_align
,
1037 HOST_WIDE_INT length
)
1039 int reg_count
= length
/ UNITS_PER_WORD
;
1040 rtx
*regs
= XALLOCAVEC (rtx
, reg_count
);
1042 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1043 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1045 for (i
= 0; i
< reg_count
; i
++)
1046 regs
[i
] = gen_reg_rtx (SImode
);
1050 for (i
= 0; i
< reg_count
; i
++)
1051 emit_insn (gen_move_lcw (src_reg
, src_reg
, regs
[i
]));
1055 for (i
= 0; i
< reg_count
; i
++)
1056 emit_insn (gen_move_lw_a (src_reg
,
1057 src_reg
, gen_int_mode (4, SImode
), regs
[i
]));
1062 for (i
= 0; i
< reg_count
; i
++)
1063 emit_insn (gen_move_scw (dst_reg
, dst_reg
, regs
[i
]));
1067 for (i
= 0; i
< reg_count
; i
++)
1068 emit_insn (gen_move_sw_a (dst_reg
,
1069 dst_reg
, gen_int_mode (4, SImode
), regs
[i
]));
1073 /* Generate loop foot, copy the leftover bytes. */
1075 score_block_move_loop_foot (rtx dst_reg
, HOST_WIDE_INT dst_align
,
1076 rtx src_reg
, HOST_WIDE_INT src_align
,
1077 HOST_WIDE_INT length
)
1079 bool src_unaligned
= (src_align
< BITS_PER_WORD
);
1080 bool dst_unaligned
= (dst_align
< BITS_PER_WORD
);
1082 HOST_WIDE_INT leftover
;
1084 leftover
= length
% UNITS_PER_WORD
;
1088 score_block_move_loop_body (dst_reg
, dst_align
,
1089 src_reg
, src_align
, length
);
1092 emit_insn (gen_move_sce (dst_reg
, dst_reg
));
1096 HOST_WIDE_INT src_adj
= src_unaligned
? -4 : 0;
1097 HOST_WIDE_INT dst_adj
= dst_unaligned
? -4 : 0;
1100 gcc_assert (leftover
< UNITS_PER_WORD
);
1102 if (leftover
>= UNITS_PER_WORD
/ 2
1103 && src_align
>= BITS_PER_WORD
/ 2
1104 && dst_align
>= BITS_PER_WORD
/ 2)
1106 temp
= gen_reg_rtx (HImode
);
1107 emit_insn (gen_move_lhu_b (src_reg
, src_reg
,
1108 gen_int_mode (src_adj
, SImode
), temp
));
1109 emit_insn (gen_move_sh_b (dst_reg
, dst_reg
,
1110 gen_int_mode (dst_adj
, SImode
), temp
));
1111 leftover
-= UNITS_PER_WORD
/ 2;
1112 src_adj
= UNITS_PER_WORD
/ 2;
1113 dst_adj
= UNITS_PER_WORD
/ 2;
1116 while (leftover
> 0)
1118 temp
= gen_reg_rtx (QImode
);
1119 emit_insn (gen_move_lbu_b (src_reg
, src_reg
,
1120 gen_int_mode (src_adj
, SImode
), temp
));
1121 emit_insn (gen_move_sb_b (dst_reg
, dst_reg
,
1122 gen_int_mode (dst_adj
, SImode
), temp
));
1130 #define MIN_MOVE_REGS 3
1131 #define MIN_MOVE_BYTES (MIN_MOVE_REGS * UNITS_PER_WORD)
1132 #define MAX_MOVE_REGS 4
1133 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD)
1135 /* The length is large, generate a loop if necessary.
1136 The loop is consisted by loop head/body/foot. */
1138 score_block_move_loop (rtx dst
, rtx src
, HOST_WIDE_INT length
)
1140 HOST_WIDE_INT src_align
= MEM_ALIGN (src
);
1141 HOST_WIDE_INT dst_align
= MEM_ALIGN (dst
);
1142 HOST_WIDE_INT loop_mov_bytes
;
1143 HOST_WIDE_INT iteration
= 0;
1144 HOST_WIDE_INT head_length
= 0, leftover
;
1145 rtx label
, src_reg
, dst_reg
, final_dst
, test
;
1147 bool gen_loop_head
= (src_align
< BITS_PER_WORD
1148 || dst_align
< BITS_PER_WORD
);
1151 head_length
+= UNITS_PER_WORD
;
1153 for (loop_mov_bytes
= MAX_MOVE_BYTES
;
1154 loop_mov_bytes
>= MIN_MOVE_BYTES
;
1155 loop_mov_bytes
-= UNITS_PER_WORD
)
1157 iteration
= (length
- head_length
) / loop_mov_bytes
;
1163 score_block_move_straight (dst
, src
, length
);
1167 leftover
= (length
- head_length
) % loop_mov_bytes
;
1170 src_reg
= copy_addr_to_reg (XEXP (src
, 0));
1171 dst_reg
= copy_addr_to_reg (XEXP (dst
, 0));
1172 final_dst
= expand_simple_binop (Pmode
, PLUS
, dst_reg
, GEN_INT (length
),
1176 score_block_move_loop_head (dst_reg
, dst_align
,
1177 src_reg
, src_align
, head_length
);
1179 label
= gen_label_rtx ();
1182 score_block_move_loop_body (dst_reg
, dst_align
,
1183 src_reg
, src_align
, loop_mov_bytes
);
1185 test
= gen_rtx_NE (VOIDmode
, dst_reg
, final_dst
);
1186 emit_jump_insn (gen_cbranchsi4 (test
, dst_reg
, final_dst
, label
));
1188 score_block_move_loop_foot (dst_reg
, dst_align
,
1189 src_reg
, src_align
, leftover
);
1192 /* Generate block move, for misc.md: "movmemsi". */
1194 score_block_move (rtx
*ops
)
1198 rtx length
= ops
[2];
1200 if (TARGET_LITTLE_ENDIAN
1201 && (MEM_ALIGN (src
) < BITS_PER_WORD
|| MEM_ALIGN (dst
) < BITS_PER_WORD
)
1202 && INTVAL (length
) >= UNITS_PER_WORD
)
1205 if (GET_CODE (length
) == CONST_INT
)
1207 if (INTVAL (length
) <= 2 * MAX_MOVE_BYTES
)
1209 score_block_move_straight (dst
, src
, INTVAL (length
));
1212 else if (optimize
&&
1213 !(flag_unroll_loops
|| flag_unroll_all_loops
))
1215 score_block_move_loop (dst
, src
, INTVAL (length
));
1222 struct gcc_target targetm
= TARGET_INITIALIZER
;