1 /* Target definitions for the MorphoRISC1
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
36 #include "integrate.h"
45 #include "insn-flags.h"
49 #include "target-def.h"
50 #include "basic-block.h"
52 /* Frame pointer register mask. */
53 #define FP_MASK (1 << (GPR_FP))
55 /* Link register mask. */
56 #define LINK_MASK (1 << (GPR_LINK))
58 /* Given a SIZE in bytes, advance to the next word. */
59 #define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
61 /* A C structure for machine-specific, per-function data.
62 This is added to the cfun structure. */
63 struct machine_function
GTY(())
65 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
66 int ra_needs_full_frame
;
67 struct rtx_def
* eh_stack_adjust
;
68 int interrupt_handler
;
72 /* Define the information needed to generate branch and scc insns.
73 This is stored from the compare operation. */
74 struct rtx_def
* mt_compare_op0
;
75 struct rtx_def
* mt_compare_op1
;
77 /* Current frame information calculated by compute_frame_size. */
78 struct mt_frame_info current_frame_info
;
80 /* Zero structure to initialize current_frame_info. */
81 struct mt_frame_info zero_frame_info
;
83 /* mt doesn't have unsigned compares need a library call for this. */
84 struct rtx_def
* mt_ucmpsi3_libcall
;
86 static int mt_flag_delayed_branch
;
90 mt_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED
,
91 int incoming ATTRIBUTE_UNUSED
)
93 return gen_rtx_REG (Pmode
, RETVAL_REGNUM
);
96 /* Implement RETURN_ADDR_RTX. */
98 mt_return_addr_rtx (int count
)
103 return get_hard_reg_initial_val (Pmode
, GPR_LINK
);
106 /* The following variable value indicates the number of nops required
107 between the current instruction and the next instruction to avoid
108 any pipeline hazards. */
109 static int mt_nops_required
= 0;
110 static const char * mt_nop_reasons
= "";
112 /* Implement ASM_OUTPUT_OPCODE. */
114 mt_asm_output_opcode (FILE *f ATTRIBUTE_UNUSED
, const char *ptr
)
116 if (mt_nops_required
)
117 fprintf (f
, ";# need %d nops because of %s\n\t",
118 mt_nops_required
, mt_nop_reasons
);
120 while (mt_nops_required
)
122 fprintf (f
, "nop\n\t");
129 /* Given an insn, return whether it's a memory operation or a branch
130 operation, otherwise return TYPE_ARITH. */
131 static enum attr_type
132 mt_get_attr_type (rtx complete_insn
)
134 rtx insn
= PATTERN (complete_insn
);
136 if (JUMP_P (complete_insn
))
138 if (CALL_P (complete_insn
))
141 if (GET_CODE (insn
) != SET
)
144 if (SET_DEST (insn
) == pc_rtx
)
147 if (GET_CODE (SET_DEST (insn
)) == MEM
)
150 if (GET_CODE (SET_SRC (insn
)) == MEM
)
156 /* A helper routine for insn_dependent_p called through note_stores. */
159 insn_dependent_p_1 (rtx x
, rtx pat ATTRIBUTE_UNUSED
, void *data
)
161 rtx
* pinsn
= (rtx
*) data
;
163 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
167 /* Return true if anything in insn X is (anti,output,true)
168 dependent on anything in insn Y. */
171 insn_dependent_p (rtx x
, rtx y
)
175 if (! INSN_P (x
) || ! INSN_P (y
))
179 note_stores (PATTERN (x
), insn_dependent_p_1
, &tmp
);
184 note_stores (PATTERN (y
), insn_dependent_p_1
, &tmp
);
185 return (tmp
== NULL_RTX
);
189 /* Return true if anything in insn X is true dependent on anything in
192 insn_true_dependent_p (rtx x
, rtx y
)
196 if (! INSN_P (x
) || ! INSN_P (y
))
200 note_stores (PATTERN (x
), insn_dependent_p_1
, &tmp
);
201 return (tmp
== NULL_RTX
);
204 /* The following determines the number of nops that need to be
205 inserted between the previous instructions and current instruction
206 to avoid pipeline hazards on the mt processor. Remember that
207 the function is not called for asm insns. */
210 mt_final_prescan_insn (rtx insn
,
211 rtx
* opvec ATTRIBUTE_UNUSED
,
212 int noperands ATTRIBUTE_UNUSED
)
215 enum attr_type prev_attr
;
217 mt_nops_required
= 0;
220 /* ms2 constraints are dealt with in reorg. */
224 /* Only worry about real instructions. */
228 /* Find the previous real instructions. */
229 for (prev_i
= PREV_INSN (insn
);
231 && (! INSN_P (prev_i
)
232 || GET_CODE (PATTERN (prev_i
)) == USE
233 || GET_CODE (PATTERN (prev_i
)) == CLOBBER
);
234 prev_i
= PREV_INSN (prev_i
))
236 /* If we meet a barrier, there is no flow through here. */
237 if (BARRIER_P (prev_i
))
241 /* If there isn't one then there is nothing that we need do. */
242 if (prev_i
== NULL
|| ! INSN_P (prev_i
))
245 prev_attr
= mt_get_attr_type (prev_i
);
247 /* Delayed branch slots already taken care of by delay branch scheduling. */
248 if (prev_attr
== TYPE_BRANCH
)
251 switch (mt_get_attr_type (insn
))
255 /* Avoid consecutive memory operation. */
256 if ((prev_attr
== TYPE_LOAD
|| prev_attr
== TYPE_STORE
)
257 && TARGET_MS1_64_001
)
259 mt_nops_required
= 1;
260 mt_nop_reasons
= "consecutive mem ops";
266 /* One cycle of delay is required between load
267 and the dependent arithmetic instruction. */
268 if (prev_attr
== TYPE_LOAD
269 && insn_true_dependent_p (prev_i
, insn
))
271 mt_nops_required
= 1;
272 mt_nop_reasons
= "load->arith dependency delay";
277 if (insn_dependent_p (prev_i
, insn
))
279 if (prev_attr
== TYPE_ARITH
&& TARGET_MS1_64_001
)
281 /* One cycle of delay between arith
282 instructions and branch dependent on arith. */
283 mt_nops_required
= 1;
284 mt_nop_reasons
= "arith->branch dependency delay";
286 else if (prev_attr
== TYPE_LOAD
)
288 /* Two cycles of delay are required
289 between load and dependent branch. */
290 if (TARGET_MS1_64_001
)
291 mt_nops_required
= 2;
293 mt_nops_required
= 1;
294 mt_nop_reasons
= "load->branch dependency delay";
300 fatal_insn ("mt_final_prescan_insn, invalid insn #1", insn
);
305 /* Print debugging information for a frame. */
307 mt_debug_stack (struct mt_frame_info
* info
)
313 error ("info pointer NULL");
317 fprintf (stderr
, "\nStack information for function %s:\n",
318 ((current_function_decl
&& DECL_NAME (current_function_decl
))
319 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
322 fprintf (stderr
, "\ttotal_size = %d\n", info
->total_size
);
323 fprintf (stderr
, "\tpretend_size = %d\n", info
->pretend_size
);
324 fprintf (stderr
, "\targs_size = %d\n", info
->args_size
);
325 fprintf (stderr
, "\textra_size = %d\n", info
->extra_size
);
326 fprintf (stderr
, "\treg_size = %d\n", info
->reg_size
);
327 fprintf (stderr
, "\tvar_size = %d\n", info
->var_size
);
328 fprintf (stderr
, "\tframe_size = %d\n", info
->frame_size
);
329 fprintf (stderr
, "\treg_mask = 0x%x\n", info
->reg_mask
);
330 fprintf (stderr
, "\tsave_fp = %d\n", info
->save_fp
);
331 fprintf (stderr
, "\tsave_lr = %d\n", info
->save_lr
);
332 fprintf (stderr
, "\tinitialized = %d\n", info
->initialized
);
333 fprintf (stderr
, "\tsaved registers =");
335 /* Print out reg_mask in a more readable format. */
336 for (regno
= GPR_R0
; regno
<= GPR_LAST
; regno
++)
337 if ( (1 << regno
) & info
->reg_mask
)
338 fprintf (stderr
, " %s", reg_names
[regno
]);
344 /* Print a memory address as an operand to reference that memory location. */
347 mt_print_operand_simple_address (FILE * file
, rtx addr
)
350 error ("PRINT_OPERAND_ADDRESS, null pointer");
353 switch (GET_CODE (addr
))
356 fprintf (file
, "%s, #0", reg_names
[REGNO (addr
)]);
363 rtx arg0
= XEXP (addr
, 0);
364 rtx arg1
= XEXP (addr
, 1);
366 if (GET_CODE (arg0
) == REG
)
370 if (GET_CODE (offset
) == REG
)
371 fatal_insn ("PRINT_OPERAND_ADDRESS, 2 regs", addr
);
374 else if (GET_CODE (arg1
) == REG
)
375 reg
= arg1
, offset
= arg0
;
376 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
378 fprintf (file
, "%s, #", reg_names
[GPR_R0
]);
379 output_addr_const (file
, addr
);
382 fprintf (file
, "%s, #", reg_names
[REGNO (reg
)]);
383 output_addr_const (file
, offset
);
391 output_addr_const (file
, addr
);
395 fatal_insn ("PRINT_OPERAND_ADDRESS, invalid insn #1", addr
);
400 /* Implement PRINT_OPERAND_ADDRESS. */
402 mt_print_operand_address (FILE * file
, rtx addr
)
404 if (GET_CODE (addr
) == AND
405 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
406 && INTVAL (XEXP (addr
, 1)) == -3)
407 mt_print_operand_simple_address (file
, XEXP (addr
, 0));
409 mt_print_operand_simple_address (file
, addr
);
412 /* Implement PRINT_OPERAND. */
414 mt_print_operand (FILE * file
, rtx x
, int code
)
419 /* Output a nop if there's nothing for the delay slot. */
420 if (dbr_sequence_length () == 0)
421 fputs ("\n\tnop", file
);
425 fprintf(file
, "#%%hi16(");
426 output_addr_const (file
, x
);
431 fprintf(file
, "#%%lo16(");
432 output_addr_const (file
, x
);
437 fprintf(file
, "#%ld", ~INTVAL (x
));
441 if (GET_CODE (x
) == CONST_INT
&& INTVAL (x
) == 0)
443 fputs (reg_names
[GPR_R0
], file
);
452 /* output_operand_lossage ("mt_print_operand: unknown code"); */
453 fprintf (file
, "unknown code");
457 switch (GET_CODE (x
))
460 fputs (reg_names
[REGNO (x
)], file
);
465 fprintf(file
, "#%ld", INTVAL (x
));
469 mt_print_operand_address(file
, XEXP (x
,0));
474 output_addr_const (file
, x
);
478 fprintf(file
, "Uknown code: %d", GET_CODE (x
));
485 /* Implement INIT_CUMULATIVE_ARGS. */
487 mt_init_cumulative_args (CUMULATIVE_ARGS
* cum
, tree fntype
, rtx libname
,
488 tree fndecl ATTRIBUTE_UNUSED
, int incoming
)
492 if (TARGET_DEBUG_ARG
)
494 fprintf (stderr
, "\nmt_init_cumulative_args:");
497 fputs (" incoming", stderr
);
501 tree ret_type
= TREE_TYPE (fntype
);
502 fprintf (stderr
, " return = %s,",
503 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
506 if (libname
&& GET_CODE (libname
) == SYMBOL_REF
)
507 fprintf (stderr
, " libname = %s", XSTR (libname
, 0));
509 if (cfun
->returns_struct
)
510 fprintf (stderr
, " return-struct");
516 /* Compute the slot number to pass an argument in.
517 Returns the slot number or -1 if passing on the stack.
519 CUM is a variable of type CUMULATIVE_ARGS which gives info about
520 the preceding args and about the function being called.
521 MODE is the argument's machine mode.
522 TYPE is the data type of the argument (as a tree).
523 This is null for libcalls where that information may
525 NAMED is nonzero if this argument is a named parameter
526 (otherwise it is an extra parameter matching an ellipsis).
527 INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
528 *PREGNO records the register number to use if scalar type. */
531 mt_function_arg_slotno (const CUMULATIVE_ARGS
* cum
,
532 enum machine_mode mode
,
534 int named ATTRIBUTE_UNUSED
,
535 int incoming_p ATTRIBUTE_UNUSED
,
538 int regbase
= FIRST_ARG_REGNUM
;
541 if (mode
== VOIDmode
|| targetm
.calls
.must_pass_in_stack (mode
, type
))
544 if (slotno
>= MT_NUM_ARG_REGS
)
547 * pregno
= regbase
+ slotno
;
552 /* Implement FUNCTION_ARG. */
554 mt_function_arg (const CUMULATIVE_ARGS
* cum
,
555 enum machine_mode mode
,
563 slotno
= mt_function_arg_slotno (cum
, mode
, type
, named
, incoming_p
, ®no
);
568 reg
= gen_rtx_REG (mode
, regno
);
573 /* Implement FUNCTION_ARG_ADVANCE. */
575 mt_function_arg_advance (CUMULATIVE_ARGS
* cum
,
576 enum machine_mode mode
,
577 tree type ATTRIBUTE_UNUSED
,
582 /* We pass 0 for incoming_p here, it doesn't matter. */
583 slotno
= mt_function_arg_slotno (cum
, mode
, type
, named
, 0, ®no
);
585 * cum
+= (mode
!= BLKmode
586 ? ROUND_ADVANCE (GET_MODE_SIZE (mode
))
587 : ROUND_ADVANCE (int_size_in_bytes (type
)));
589 if (TARGET_DEBUG_ARG
)
591 "mt_function_arg_advance: words = %2d, mode = %4s, named = %d, size = %3d\n",
592 *cum
, GET_MODE_NAME (mode
), named
,
593 (*cum
) * UNITS_PER_WORD
);
596 /* Implement hook TARGET_ARG_PARTIAL_BYTES.
598 Returns the number of bytes at the beginning of an argument that
599 must be put in registers. The value must be zero for arguments
600 that are passed entirely in registers or that are entirely pushed
603 mt_arg_partial_bytes (CUMULATIVE_ARGS
* pcum
,
604 enum machine_mode mode
,
606 bool named ATTRIBUTE_UNUSED
)
612 words
= ((int_size_in_bytes (type
) + UNITS_PER_WORD
- 1)
615 words
= (GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
617 if (! targetm
.calls
.pass_by_reference (&cum
, mode
, type
, named
)
618 && cum
< MT_NUM_ARG_REGS
619 && (cum
+ words
) > MT_NUM_ARG_REGS
)
621 int bytes
= (MT_NUM_ARG_REGS
- cum
) * UNITS_PER_WORD
;
624 fprintf (stderr
, "function_arg_partial_nregs = %d\n", bytes
);
632 /* Implement TARGET_PASS_BY_REFERENCE hook. */
634 mt_pass_by_reference (CUMULATIVE_ARGS
* cum ATTRIBUTE_UNUSED
,
635 enum machine_mode mode ATTRIBUTE_UNUSED
,
637 bool named ATTRIBUTE_UNUSED
)
639 return (type
&& int_size_in_bytes (type
) > 4 * UNITS_PER_WORD
);
642 /* Implement FUNCTION_ARG_BOUNDARY. */
644 mt_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED
,
645 tree type ATTRIBUTE_UNUSED
)
647 return BITS_PER_WORD
;
650 /* Implement REG_OK_FOR_BASE_P. */
652 mt_reg_ok_for_base_p (rtx x
, int strict
)
655 return (((unsigned) REGNO (x
)) < FIRST_PSEUDO_REGISTER
);
659 /* Helper function of mt_legitimate_address_p. Return true if XINSN
660 is a simple address, otherwise false. */
662 mt_legitimate_simple_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
663 rtx xinsn
, int strict
)
667 fprintf (stderr
, "\n========== GO_IF_LEGITIMATE_ADDRESS, %sstrict\n",
668 strict
? "" : "not ");
672 if (GET_CODE (xinsn
) == REG
&& mt_reg_ok_for_base_p (xinsn
, strict
))
675 if (GET_CODE (xinsn
) == PLUS
676 && GET_CODE (XEXP (xinsn
, 0)) == REG
677 && mt_reg_ok_for_base_p (XEXP (xinsn
, 0), strict
)
678 && GET_CODE (XEXP (xinsn
, 1)) == CONST_INT
679 && SMALL_INT (XEXP (xinsn
, 1)))
686 /* Helper function of GO_IF_LEGITIMATE_ADDRESS. Return non-zero if
687 XINSN is a legitimate address on MT. */
689 mt_legitimate_address_p (enum machine_mode mode
, rtx xinsn
, int strict
)
691 if (mt_legitimate_simple_address_p (mode
, xinsn
, strict
))
695 && GET_CODE (xinsn
) == AND
696 && GET_CODE (XEXP (xinsn
, 1)) == CONST_INT
697 && INTVAL (XEXP (xinsn
, 1)) == -3)
698 return mt_legitimate_simple_address_p (mode
, XEXP (xinsn
, 0), strict
);
703 /* Return truth value of whether OP can be used as an operands where a
704 register or 16 bit unsigned integer is needed. */
707 uns_arith_operand (rtx op
, enum machine_mode mode
)
709 if (GET_CODE (op
) == CONST_INT
&& SMALL_INT_UNSIGNED (op
))
712 return register_operand (op
, mode
);
715 /* Return truth value of whether OP can be used as an operands where a
716 16 bit integer is needed. */
719 arith_operand (rtx op
, enum machine_mode mode
)
721 if (GET_CODE (op
) == CONST_INT
&& SMALL_INT (op
))
724 return register_operand (op
, mode
);
727 /* Return truth value of whether OP is a register or the constant 0. */
730 reg_or_0_operand (rtx op
, enum machine_mode mode
)
732 switch (GET_CODE (op
))
735 return INTVAL (op
) == 0;
739 return register_operand (op
, mode
);
748 /* Return truth value of whether OP is a constant that requires two
749 loads to put in a register. */
752 big_const_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
754 if (GET_CODE (op
) == CONST_INT
&& CONST_OK_FOR_LETTER_P (INTVAL (op
), 'M'))
760 /* Return truth value of whether OP is a constant that require only
761 one load to put in a register. */
764 single_const_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
766 if (big_const_operand (op
, mode
)
767 || GET_CODE (op
) == CONST
768 || GET_CODE (op
) == LABEL_REF
769 || GET_CODE (op
) == SYMBOL_REF
)
775 /* True if the current function is an interrupt handler
776 (either via #pragma or an attribute specification). */
777 int interrupt_handler
;
778 enum processor_type mt_cpu
;
780 static struct machine_function
*
781 mt_init_machine_status (void)
783 struct machine_function
*f
;
785 f
= ggc_alloc_cleared (sizeof (struct machine_function
));
790 /* Implement OVERRIDE_OPTIONS. */
792 mt_override_options (void)
794 if (mt_cpu_string
!= NULL
)
796 if (!strcmp (mt_cpu_string
, "ms1-64-001"))
797 mt_cpu
= PROCESSOR_MS1_64_001
;
798 else if (!strcmp (mt_cpu_string
, "ms1-16-002"))
799 mt_cpu
= PROCESSOR_MS1_16_002
;
800 else if (!strcmp (mt_cpu_string
, "ms1-16-003"))
801 mt_cpu
= PROCESSOR_MS1_16_003
;
802 else if (!strcmp (mt_cpu_string
, "ms2"))
803 mt_cpu
= PROCESSOR_MS2
;
805 error ("bad value (%s) for -march= switch", mt_cpu_string
);
808 mt_cpu
= PROCESSOR_MS1_16_002
;
812 flag_omit_frame_pointer
= 0;
816 /* We do delayed branch filling in machine dependent reorg */
817 mt_flag_delayed_branch
= flag_delayed_branch
;
818 flag_delayed_branch
= 0;
820 init_machine_status
= mt_init_machine_status
;
823 /* Do what is necessary for `va_start'. We look at the current function
824 to determine if stdarg or varargs is used and return the address of the
825 first unnamed parameter. */
828 mt_setup_incoming_varargs (CUMULATIVE_ARGS
*cum
,
829 enum machine_mode mode ATTRIBUTE_UNUSED
,
830 tree type ATTRIBUTE_UNUSED
,
831 int *pretend_size
, int no_rtl
)
834 int regs
= MT_NUM_ARG_REGS
- *cum
;
836 *pretend_size
= regs
< 0 ? 0 : GET_MODE_SIZE (SImode
) * regs
;
841 for (regno
= *cum
; regno
< MT_NUM_ARG_REGS
; regno
++)
843 rtx reg
= gen_rtx_REG (SImode
, FIRST_ARG_REGNUM
+ regno
);
844 rtx slot
= gen_rtx_PLUS (Pmode
,
845 gen_rtx_REG (SImode
, ARG_POINTER_REGNUM
),
846 GEN_INT (UNITS_PER_WORD
* regno
));
848 emit_move_insn (gen_rtx_MEM (SImode
, slot
), reg
);
852 /* Returns the number of bytes offset between the frame pointer and the stack
853 pointer for the current function. SIZE is the number of bytes of space
854 needed for local variables. */
857 mt_compute_frame_size (int size
)
860 unsigned int total_size
;
861 unsigned int var_size
;
862 unsigned int args_size
;
863 unsigned int pretend_size
;
864 unsigned int extra_size
;
865 unsigned int reg_size
;
866 unsigned int frame_size
;
867 unsigned int reg_mask
;
870 args_size
= current_function_outgoing_args_size
;
871 pretend_size
= current_function_pretend_args_size
;
872 extra_size
= FIRST_PARM_OFFSET (0);
873 total_size
= extra_size
+ pretend_size
+ args_size
+ var_size
;
877 /* Calculate space needed for registers. */
878 for (regno
= GPR_R0
; regno
<= GPR_LAST
; regno
++)
880 if (MUST_SAVE_REGISTER (regno
))
882 reg_size
+= UNITS_PER_WORD
;
883 reg_mask
|= 1 << regno
;
887 current_frame_info
.save_fp
= (regs_ever_live
[GPR_FP
]
888 || frame_pointer_needed
889 || interrupt_handler
);
890 current_frame_info
.save_lr
= (regs_ever_live
[GPR_LINK
]
892 || interrupt_handler
);
894 reg_size
+= (current_frame_info
.save_fp
+ current_frame_info
.save_lr
)
896 total_size
+= reg_size
;
897 total_size
= ((total_size
+ 3) & ~3);
899 frame_size
= total_size
;
901 /* Save computed information. */
902 current_frame_info
.pretend_size
= pretend_size
;
903 current_frame_info
.var_size
= var_size
;
904 current_frame_info
.args_size
= args_size
;
905 current_frame_info
.reg_size
= reg_size
;
906 current_frame_info
.frame_size
= args_size
+ var_size
;
907 current_frame_info
.total_size
= total_size
;
908 current_frame_info
.extra_size
= extra_size
;
909 current_frame_info
.reg_mask
= reg_mask
;
910 current_frame_info
.initialized
= reload_completed
;
915 /* Emit code to save REG in stack offset pointed to by MEM.
916 STACK_OFFSET is the offset from the SP where the save will happen.
917 This function sets the REG_FRAME_RELATED_EXPR note accordingly. */
919 mt_emit_save_restore (enum save_direction direction
,
920 rtx reg
, rtx mem
, int stack_offset
)
922 if (direction
== FROM_PROCESSOR_TO_MEM
)
926 insn
= emit_move_insn (mem
, reg
);
927 RTX_FRAME_RELATED_P (insn
) = 1;
930 (REG_FRAME_RELATED_EXPR
,
931 gen_rtx_SET (VOIDmode
,
933 gen_rtx_PLUS (SImode
,
935 GEN_INT (stack_offset
))),
940 emit_move_insn (reg
, mem
);
944 /* Emit code to save the frame pointer in the prologue and restore
945 frame pointer in epilogue. */
948 mt_emit_save_fp (enum save_direction direction
,
949 struct mt_frame_info info
)
952 int reg_mask
= info
.reg_mask
& ~(FP_MASK
| LINK_MASK
);
953 int offset
= info
.total_size
;
954 int stack_offset
= info
.total_size
;
956 /* If there is nothing to save, get out now. */
957 if (! info
.save_fp
&& ! info
.save_lr
&& ! reg_mask
)
960 /* If offset doesn't fit in a 15-bit signed integer,
961 uses a scratch registers to get a smaller offset. */
962 if (CONST_OK_FOR_LETTER_P(offset
, 'O'))
963 base_reg
= stack_pointer_rtx
;
966 /* Use the scratch register R9 that holds old stack pointer. */
967 base_reg
= gen_rtx_REG (SImode
, GPR_R9
);
973 offset
-= UNITS_PER_WORD
;
974 stack_offset
-= UNITS_PER_WORD
;
976 (direction
, gen_rtx_REG (SImode
, GPR_FP
),
978 gen_rtx_PLUS (SImode
, base_reg
, GEN_INT (offset
))),
983 /* Emit code to save registers in the prologue and restore register
987 mt_emit_save_regs (enum save_direction direction
,
988 struct mt_frame_info info
)
992 int reg_mask
= info
.reg_mask
& ~(FP_MASK
| LINK_MASK
);
993 int offset
= info
.total_size
;
994 int stack_offset
= info
.total_size
;
996 /* If there is nothing to save, get out now. */
997 if (! info
.save_fp
&& ! info
.save_lr
&& ! reg_mask
)
1000 /* If offset doesn't fit in a 15-bit signed integer,
1001 uses a scratch registers to get a smaller offset. */
1002 if (CONST_OK_FOR_LETTER_P(offset
, 'O'))
1003 base_reg
= stack_pointer_rtx
;
1006 /* Use the scratch register R9 that holds old stack pointer. */
1007 base_reg
= gen_rtx_REG (SImode
, GPR_R9
);
1013 /* This just records the space for it, the actual move generated in
1014 mt_emit_save_fp (). */
1015 offset
-= UNITS_PER_WORD
;
1016 stack_offset
-= UNITS_PER_WORD
;
1021 offset
-= UNITS_PER_WORD
;
1022 stack_offset
-= UNITS_PER_WORD
;
1023 mt_emit_save_restore
1024 (direction
, gen_rtx_REG (SImode
, GPR_LINK
),
1025 gen_rtx_MEM (SImode
,
1026 gen_rtx_PLUS (SImode
, base_reg
, GEN_INT (offset
))),
1030 /* Save any needed call-saved regs. */
1031 for (regno
= GPR_R0
; regno
<= GPR_LAST
; regno
++)
1033 if ((reg_mask
& (1 << regno
)) != 0)
1035 offset
-= UNITS_PER_WORD
;
1036 stack_offset
-= UNITS_PER_WORD
;
1037 mt_emit_save_restore
1038 (direction
, gen_rtx_REG (SImode
, regno
),
1039 gen_rtx_MEM (SImode
,
1040 gen_rtx_PLUS (SImode
, base_reg
, GEN_INT (offset
))),
1046 /* Return true if FUNC is a function with the 'interrupt' attribute. */
1048 mt_interrupt_function_p (tree func
)
1052 if (TREE_CODE (func
) != FUNCTION_DECL
)
1055 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
1056 return a
!= NULL_TREE
;
1059 /* Generate prologue code. */
1061 mt_expand_prologue (void)
1064 unsigned int frame_size
;
1066 if (mt_interrupt_function_p (current_function_decl
))
1068 interrupt_handler
= 1;
1070 cfun
->machine
->interrupt_handler
= 1;
1073 mt_compute_frame_size (get_frame_size ());
1075 if (TARGET_DEBUG_STACK
)
1076 mt_debug_stack (¤t_frame_info
);
1078 /* Compute size of stack adjustment. */
1079 frame_size
= current_frame_info
.total_size
;
1081 /* If offset doesn't fit in a 15-bit signed integer,
1082 uses a scratch registers to get a smaller offset. */
1083 if (CONST_OK_FOR_LETTER_P(frame_size
, 'O'))
1084 size_rtx
= GEN_INT (frame_size
);
1087 /* We do not have any scratch registers. */
1088 gcc_assert (!interrupt_handler
);
1090 size_rtx
= gen_rtx_REG (SImode
, GPR_R9
);
1091 insn
= emit_move_insn (size_rtx
, GEN_INT (frame_size
& 0xffff0000));
1092 insn
= emit_insn (gen_iorsi3 (size_rtx
, size_rtx
,
1093 GEN_INT (frame_size
& 0x0000ffff)));
1096 /* Allocate stack for this frame. */
1097 /* Make stack adjustment and use scratch register if constant too
1098 large to fit as immediate. */
1101 insn
= emit_insn (gen_subsi3 (stack_pointer_rtx
,
1104 RTX_FRAME_RELATED_P (insn
) = 1;
1106 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
1107 gen_rtx_SET (VOIDmode
,
1109 gen_rtx_MINUS (SImode
,
1111 GEN_INT (frame_size
))),
1115 /* Set R9 to point to old sp if required for access to register save
1117 if ( current_frame_info
.reg_size
!= 0
1118 && !CONST_OK_FOR_LETTER_P (frame_size
, 'O'))
1119 emit_insn (gen_addsi3 (size_rtx
, size_rtx
, stack_pointer_rtx
));
1121 /* Save the frame pointer. */
1122 mt_emit_save_fp (FROM_PROCESSOR_TO_MEM
, current_frame_info
);
1124 /* Now put the frame pointer into the frame pointer register. */
1125 if (frame_pointer_needed
)
1127 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
1128 RTX_FRAME_RELATED_P (insn
) = 1;
1131 /* Save the registers. */
1132 mt_emit_save_regs (FROM_PROCESSOR_TO_MEM
, current_frame_info
);
1134 /* If we are profiling, make sure no instructions are scheduled before
1135 the call to mcount. */
1137 emit_insn (gen_blockage ());
1140 /* Implement EPILOGUE_USES. */
1142 mt_epilogue_uses (int regno
)
1144 if (cfun
->machine
&& cfun
->machine
->interrupt_handler
&& reload_completed
)
1146 return regno
== GPR_LINK
;
1149 /* Generate epilogue. EH_MODE is NORMAL_EPILOGUE when generating a
1150 function epilogue, or EH_EPILOGUE when generating an EH
1153 mt_expand_epilogue (enum epilogue_type eh_mode
)
1156 unsigned frame_size
;
1158 mt_compute_frame_size (get_frame_size ());
1160 if (TARGET_DEBUG_STACK
)
1161 mt_debug_stack (& current_frame_info
);
1163 /* Compute size of stack adjustment. */
1164 frame_size
= current_frame_info
.total_size
;
1166 /* If offset doesn't fit in a 15-bit signed integer,
1167 uses a scratch registers to get a smaller offset. */
1168 if (CONST_OK_FOR_LETTER_P(frame_size
, 'O'))
1169 size_rtx
= GEN_INT (frame_size
);
1172 /* We do not have any scratch registers. */
1173 gcc_assert (!interrupt_handler
);
1175 size_rtx
= gen_rtx_REG (SImode
, GPR_R9
);
1176 insn
= emit_move_insn (size_rtx
, GEN_INT (frame_size
& 0xffff0000));
1177 insn
= emit_insn (gen_iorsi3 (size_rtx
, size_rtx
,
1178 GEN_INT (frame_size
& 0x0000ffff)));
1179 /* Set R9 to point to old sp if required for access to register
1181 emit_insn (gen_addsi3 (size_rtx
, size_rtx
, stack_pointer_rtx
));
1184 /* Restore sp if there was some possible change to it. */
1185 if (frame_pointer_needed
)
1186 insn
= emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
1188 /* Restore the registers. */
1189 mt_emit_save_fp (FROM_MEM_TO_PROCESSOR
, current_frame_info
);
1190 mt_emit_save_regs (FROM_MEM_TO_PROCESSOR
, current_frame_info
);
1192 /* Make stack adjustment and use scratch register if constant too
1193 large to fit as immediate. */
1196 if (CONST_OK_FOR_LETTER_P(frame_size
, 'O'))
1197 /* Can handle this with simple add. */
1198 insn
= emit_insn (gen_addsi3 (stack_pointer_rtx
,
1202 /* Scratch reg R9 has the old sp value. */
1203 insn
= emit_move_insn (stack_pointer_rtx
,
1204 gen_rtx_REG (SImode
, GPR_R9
));
1207 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
1208 gen_rtx_SET (VOIDmode
,
1210 gen_rtx_PLUS (SImode
,
1212 GEN_INT (frame_size
))),
1216 if (cfun
->machine
&& cfun
->machine
->eh_stack_adjust
!= NULL_RTX
)
1217 /* Perform the additional bump for __throw. */
1218 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1220 cfun
->machine
->eh_stack_adjust
));
1222 /* Generate the appropriate return. */
1223 if (eh_mode
== EH_EPILOGUE
)
1225 emit_jump_insn (gen_eh_return_internal ());
1228 else if (interrupt_handler
)
1229 emit_jump_insn (gen_return_interrupt_internal ());
1231 emit_jump_insn (gen_return_internal ());
1233 /* Reset state info for each function. */
1234 interrupt_handler
= 0;
1235 current_frame_info
= zero_frame_info
;
1237 cfun
->machine
->eh_stack_adjust
= NULL_RTX
;
1241 /* Generate code for the "eh_return" pattern. */
1243 mt_expand_eh_return (rtx
* operands
)
1245 if (GET_CODE (operands
[0]) != REG
1246 || REGNO (operands
[0]) != EH_RETURN_STACKADJ_REGNO
)
1248 rtx sp
= EH_RETURN_STACKADJ_RTX
;
1250 emit_move_insn (sp
, operands
[0]);
1254 emit_insn (gen_eh_epilogue (operands
[0]));
1257 /* Generate code for the "eh_epilogue" pattern. */
1259 mt_emit_eh_epilogue (rtx
* operands ATTRIBUTE_UNUSED
)
1261 cfun
->machine
->eh_stack_adjust
= EH_RETURN_STACKADJ_RTX
; /* operands[0]; */
1262 mt_expand_epilogue (EH_EPILOGUE
);
1265 /* Handle an "interrupt" attribute. */
1267 mt_handle_interrupt_attribute (tree
* node
,
1269 tree args ATTRIBUTE_UNUSED
,
1270 int flags ATTRIBUTE_UNUSED
,
1271 bool * no_add_attrs
)
1273 if (TREE_CODE (*node
) != FUNCTION_DECL
)
1275 warning (OPT_Wattributes
,
1276 "%qs attribute only applies to functions",
1277 IDENTIFIER_POINTER (name
));
1278 *no_add_attrs
= true;
1284 /* Table of machine attributes. */
1285 const struct attribute_spec mt_attribute_table
[] =
1287 /* name, min, max, decl?, type?, func?, handler */
1288 { "interrupt", 0, 0, false, false, false, mt_handle_interrupt_attribute
},
1289 { NULL
, 0, 0, false, false, false, NULL
}
1292 /* Implement INITIAL_ELIMINATION_OFFSET. */
1294 mt_initial_elimination_offset (int from
, int to
)
1296 mt_compute_frame_size (get_frame_size ());
1298 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1301 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
1302 return current_frame_info
.total_size
;
1304 else if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
1305 return current_frame_info
.total_size
;
1311 /* Generate a compare for CODE. Return a brand-new rtx that
1312 represents the result of the compare. */
1315 mt_generate_compare (enum rtx_code code
, rtx op0
, rtx op1
)
1317 rtx scratch0
, scratch1
, const_scratch
;
1325 /* Need to adjust ranges for faking unsigned compares. */
1326 scratch0
= gen_reg_rtx (SImode
);
1327 scratch1
= gen_reg_rtx (SImode
);
1328 const_scratch
= force_reg (SImode
, GEN_INT(MT_MIN_INT
));
1329 emit_insn (gen_addsi3 (scratch0
, const_scratch
, op0
));
1330 emit_insn (gen_addsi3 (scratch1
, const_scratch
, op1
));
1338 /* Adjust compare operator to fake unsigned compares. */
1354 /* Generate the actual compare. */
1355 return gen_rtx_fmt_ee (code
, VOIDmode
, scratch0
, scratch1
);
1358 /* Emit a branch of kind CODE to location LOC. */
1361 mt_emit_cbranch (enum rtx_code code
, rtx loc
, rtx op0
, rtx op1
)
1363 rtx condition_rtx
, loc_ref
;
1365 if (! reg_or_0_operand (op0
, SImode
))
1366 op0
= copy_to_mode_reg (SImode
, op0
);
1368 if (! reg_or_0_operand (op1
, SImode
))
1369 op1
= copy_to_mode_reg (SImode
, op1
);
1371 condition_rtx
= mt_generate_compare (code
, op0
, op1
);
1372 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
1373 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
1374 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
1378 /* Subfunction of the following function. Update the flags of any MEM
1379 found in part of X. */
1382 mt_set_memflags_1 (rtx x
, int in_struct_p
, int volatile_p
)
1386 switch (GET_CODE (x
))
1390 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1391 mt_set_memflags_1 (XVECEXP (x
, 0, i
), in_struct_p
, volatile_p
);
1395 mt_set_memflags_1 (PATTERN (x
), in_struct_p
, volatile_p
);
1399 mt_set_memflags_1 (SET_DEST (x
), in_struct_p
, volatile_p
);
1400 mt_set_memflags_1 (SET_SRC (x
), in_struct_p
, volatile_p
);
1404 MEM_IN_STRUCT_P (x
) = in_struct_p
;
1405 MEM_VOLATILE_P (x
) = volatile_p
;
1406 /* Sadly, we cannot use alias sets because the extra aliasing
1407 produced by the AND interferes. Given that two-byte quantities
1408 are the only thing we would be able to differentiate anyway,
1409 there does not seem to be any point in convoluting the early
1410 out of the alias check. */
1411 /* set_mem_alias_set (x, alias_set); */
1419 /* Look for any MEMs in the current sequence of insns and set the
1420 in-struct, unchanging, and volatile flags from the flags in REF.
1421 If REF is not a MEM, don't do anything. */
1424 mt_set_memflags (rtx ref
)
1427 int in_struct_p
, volatile_p
;
1429 if (GET_CODE (ref
) != MEM
)
1432 in_struct_p
= MEM_IN_STRUCT_P (ref
);
1433 volatile_p
= MEM_VOLATILE_P (ref
);
1435 /* This is only called from mt.md, after having had something
1436 generated from one of the insn patterns. So if everything is
1437 zero, the pattern is already up-to-date. */
1438 if (! in_struct_p
&& ! volatile_p
)
1441 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1442 mt_set_memflags_1 (insn
, in_struct_p
, volatile_p
);
1445 /* Implement SECONDARY_RELOAD_CLASS. */
1447 mt_secondary_reload_class (enum reg_class
class ATTRIBUTE_UNUSED
,
1448 enum machine_mode mode
,
1451 if ((mode
== QImode
&& (!TARGET_BYTE_ACCESS
)) || mode
== HImode
)
1453 if (GET_CODE (x
) == MEM
1454 || (GET_CODE (x
) == REG
&& true_regnum (x
) == -1)
1455 || (GET_CODE (x
) == SUBREG
1456 && (GET_CODE (SUBREG_REG (x
)) == MEM
1457 || (GET_CODE (SUBREG_REG (x
)) == REG
1458 && true_regnum (SUBREG_REG (x
)) == -1))))
1459 return GENERAL_REGS
;
1465 /* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE
1468 mt_function_value (tree valtype
, enum machine_mode mode
, tree func_decl ATTRIBUTE_UNUSED
)
1470 if ((mode
) == DImode
|| (mode
) == DFmode
)
1471 return gen_rtx_MEM (mode
, gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
));
1474 mode
= TYPE_MODE (valtype
);
1476 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
1479 /* Split a move into two smaller pieces.
1480 MODE indicates the reduced mode. OPERANDS[0] is the original destination
1481 OPERANDS[1] is the original src. The new destinations are
1482 OPERANDS[2] and OPERANDS[4], while the new sources are OPERANDS[3]
1486 mt_split_words (enum machine_mode nmode
,
1487 enum machine_mode omode
,
1490 rtx dl
,dh
; /* src/dest pieces. */
1492 int move_high_first
= 0; /* Assume no overlap. */
1494 switch (GET_CODE (operands
[0])) /* Dest. */
1498 if ((GET_CODE (operands
[1]) == REG
1499 || GET_CODE (operands
[1]) == SUBREG
)
1500 && true_regnum (operands
[0]) <= true_regnum (operands
[1]))
1501 move_high_first
= 1;
1503 if (GET_CODE (operands
[0]) == SUBREG
)
1505 dl
= gen_rtx_SUBREG (nmode
, SUBREG_REG (operands
[0]),
1506 SUBREG_BYTE (operands
[0]) + GET_MODE_SIZE (nmode
));
1507 dh
= gen_rtx_SUBREG (nmode
, SUBREG_REG (operands
[0]), SUBREG_BYTE (operands
[0]));
1509 else if (GET_CODE (operands
[0]) == REG
&& ! IS_PSEUDO_P (operands
[0]))
1511 int r
= REGNO (operands
[0]);
1512 dh
= gen_rtx_REG (nmode
, r
);
1513 dl
= gen_rtx_REG (nmode
, r
+ HARD_REGNO_NREGS (r
, nmode
));
1517 dh
= gen_rtx_SUBREG (nmode
, operands
[0], 0);
1518 dl
= gen_rtx_SUBREG (nmode
, operands
[0], GET_MODE_SIZE (nmode
));
1523 switch (GET_CODE (XEXP (operands
[0], 0)))
1529 dl
= operand_subword (operands
[0],
1530 GET_MODE_SIZE (nmode
)/UNITS_PER_WORD
,
1532 dh
= operand_subword (operands
[0], 0, 0, omode
);
1539 switch (GET_CODE (operands
[1]))
1542 if (! IS_PSEUDO_P (operands
[1]))
1544 int r
= REGNO (operands
[1]);
1546 sh
= gen_rtx_REG (nmode
, r
);
1547 sl
= gen_rtx_REG (nmode
, r
+ HARD_REGNO_NREGS (r
, nmode
));
1551 sh
= gen_rtx_SUBREG (nmode
, operands
[1], 0);
1552 sl
= gen_rtx_SUBREG (nmode
, operands
[1], GET_MODE_SIZE (nmode
));
1557 if (operands
[1] == const0_rtx
)
1558 sh
= sl
= const0_rtx
;
1560 split_double (operands
[1], & sh
, & sl
);
1564 if (operands
[1] == const0_rtx
)
1565 sh
= sl
= const0_rtx
;
1582 sl
= gen_rtx_SUBREG (nmode
,
1583 SUBREG_REG (operands
[1]),
1584 SUBREG_BYTE (operands
[1]) + GET_MODE_SIZE (nmode
));
1585 sh
= gen_rtx_SUBREG (nmode
,
1586 SUBREG_REG (operands
[1]),
1587 SUBREG_BYTE (operands
[1]));
1591 switch (GET_CODE (XEXP (operands
[1], 0)))
1598 sl
= operand_subword (operands
[1],
1599 GET_MODE_SIZE (nmode
)/UNITS_PER_WORD
,
1601 sh
= operand_subword (operands
[1], 0, 0, omode
);
1603 /* Check if the DF load is going to clobber the register
1604 used for the address, and if so make sure that is going
1605 to be the second move. */
1606 if (GET_CODE (dl
) == REG
1608 == true_regnum (XEXP (XEXP (sl
, 0 ), 0)))
1609 move_high_first
= 1;
1616 if (move_high_first
)
1633 /* Implement TARGET_MUST_PASS_IN_STACK hook. */
1635 mt_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED
, tree type
)
1637 return (((type
) != 0
1638 && (TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
1639 || TREE_ADDRESSABLE (type
))));
1642 /* Increment the counter for the number of loop instructions in the
1643 current function. */
1645 void mt_add_loop (void)
1647 cfun
->machine
->has_loops
++;
1651 /* Maxium loop nesting depth. */
1652 #define MAX_LOOP_DEPTH 4
1653 /* Maxium size of a loop (allows some headroom for delayed branch slot
1655 #define MAX_LOOP_LENGTH (200 * 4)
1657 /* We need to keep a vector of loops */
1658 typedef struct loop_info
*loop_info
;
1659 DEF_VEC_P (loop_info
);
1660 DEF_VEC_ALLOC_P (loop_info
,heap
);
1662 /* Information about a loop we have found (or are in the process of
1664 struct loop_info
GTY (())
1666 /* loop number, for dumps */
1669 /* Predecessor block of the loop. This is the one that falls into
1670 the loop and contains the initialization instruction. */
1671 basic_block predecessor
;
1673 /* First block in the loop. This is the one branched to by the dbnz
1677 /* Last block in the loop (the one with the dbnz insn */
1680 /* The successor block of the loop. This is the one the dbnz insn
1682 basic_block successor
;
1684 /* The dbnz insn. */
1687 /* The initialization insn. */
1690 /* The new initialization instruction. */
1693 /* The new ending instruction. */
1696 /* The new label placed at the end of the loop. */
1699 /* The nesting depth of the loop. Set to -1 for a bad loop. */
1702 /* The length of the loop. */
1705 /* Next loop in the graph. */
1706 struct loop_info
*next
;
1708 /* Vector of blocks only within the loop, (excluding those within
1710 VEC (basic_block
,heap
) *blocks
;
1712 /* Vector of inner loops within this loop */
1713 VEC (loop_info
,heap
) *loops
;
1716 /* Information used during loop detection. */
1717 typedef struct loop_work
GTY(())
1719 /* Basic block to be scanned. */
1722 /* Loop it will be within. */
1727 DEF_VEC_O (loop_work
);
1728 DEF_VEC_ALLOC_O (loop_work
,heap
);
1730 /* Determine the nesting and length of LOOP. Return false if the loop
1734 mt_loop_nesting (loop_info loop
)
1738 int inner_depth
= 0;
1742 /* Make sure we only have one entry point. */
1743 if (EDGE_COUNT (loop
->head
->preds
) == 2)
1745 loop
->predecessor
= EDGE_PRED (loop
->head
, 0)->src
;
1746 if (loop
->predecessor
== loop
->tail
)
1747 /* We wanted the other predecessor. */
1748 loop
->predecessor
= EDGE_PRED (loop
->head
, 1)->src
;
1750 /* We can only place a loop insn on a fall through edge of a
1751 single exit block. */
1752 if (EDGE_COUNT (loop
->predecessor
->succs
) != 1
1753 || !(EDGE_SUCC (loop
->predecessor
, 0)->flags
& EDGE_FALLTHRU
))
1754 loop
->predecessor
= NULL
;
1757 /* Mark this loop as bad for now. */
1759 if (loop
->predecessor
)
1761 for (ix
= 0; VEC_iterate (loop_info
, loop
->loops
, ix
++, inner
);)
1764 mt_loop_nesting (inner
);
1766 if (inner
->depth
< 0)
1772 if (inner_depth
< inner
->depth
)
1773 inner_depth
= inner
->depth
;
1774 loop
->length
+= inner
->length
;
1777 /* Set the proper loop depth, if it was good. */
1778 if (inner_depth
>= 0)
1779 loop
->depth
= inner_depth
+ 1;
1782 return (loop
->depth
> 0
1783 && loop
->predecessor
1784 && loop
->depth
< MAX_LOOP_DEPTH
1785 && loop
->length
< MAX_LOOP_LENGTH
);
1788 /* Determine the length of block BB. */
1791 mt_block_length (basic_block bb
)
1796 for (insn
= BB_HEAD (bb
);
1797 insn
!= NEXT_INSN (BB_END (bb
));
1798 insn
= NEXT_INSN (insn
))
1804 /* Calls are not allowed in loops. */
1805 length
= MAX_LOOP_LENGTH
+ 1;
1809 length
+= get_attr_length (insn
);
1814 /* Scan the blocks of LOOP (and its inferiors) looking for uses of
1815 REG. Return true, if we find any. Don't count the loop's dbnz
1816 insn if it matches DBNZ. */
1819 mt_scan_loop (loop_info loop
, rtx reg
, rtx dbnz
)
1825 for (ix
= 0; VEC_iterate (basic_block
, loop
->blocks
, ix
, bb
); ix
++)
1829 for (insn
= BB_HEAD (bb
);
1830 insn
!= NEXT_INSN (BB_END (bb
));
1831 insn
= NEXT_INSN (insn
))
1837 if (reg_mentioned_p (reg
, PATTERN (insn
)))
1841 for (ix
= 0; VEC_iterate (loop_info
, loop
->loops
, ix
, inner
); ix
++)
1842 if (mt_scan_loop (inner
, reg
, NULL_RTX
))
1848 /* MS2 has a loop instruction which needs to be placed just before the
1849 loop. It indicates the end of the loop and specifies the number of
1850 loop iterations. It can be nested with an automatically maintained
1851 stack of counter and end address registers. It's an ideal
1852 candidate for doloop. Unfortunately, gcc presumes that loops
1853 always end with an explicit instriction, and the doloop_begin
1854 instruction is not a flow control instruction so it can be
1855 scheduled earlier than just before the start of the loop. To make
1856 matters worse, the optimization pipeline can duplicate loop exit
1857 and entrance blocks and fails to track abnormally exiting loops.
1858 Thus we cannot simply use doloop.
1860 What we do is emit a dbnz pattern for the doloop optimization, and
1861 let that be optimized as normal. Then in machine dependent reorg
1862 we have to repeat the loop searching algorithm. We use the
1863 flow graph to find closed loops ending in a dbnz insn. We then try
1864 and convert it to use the loop instruction. The conditions are,
1866 * the loop has no abnormal exits, duplicated end conditions or
1867 duplicated entrance blocks
1869 * the loop counter register is only used in the dbnz instruction
1872 * we can find the instruction setting the initial value of the loop
1875 * the loop is not executed more than 65535 times. (This might be
1876 changed to 2^32-1, and would therefore allow variable initializers.)
1878 * the loop is not nested more than 4 deep 5) there are no
1879 subroutine calls in the loop. */
1882 mt_reorg_loops (FILE *dump_file
)
1885 loop_info loops
= NULL
;
1889 VEC (loop_work
,heap
) *works
= VEC_alloc (loop_work
,heap
,20);
1893 bool replaced
= false;
1895 /* Find all the possible loop tails. This means searching for every
1896 dbnz instruction. For each one found, create a loop_info
1897 structure and add the head block to the work list. */
1900 rtx tail
= BB_END (bb
);
1902 while (GET_CODE (tail
) == NOTE
)
1903 tail
= PREV_INSN (tail
);
1906 if (recog_memoized (tail
) == CODE_FOR_decrement_and_branch_until_zero
)
1908 /* A possible loop end */
1910 loop
= XNEW (struct loop_info
);
1914 loop
->head
= BRANCH_EDGE (bb
)->dest
;
1915 loop
->successor
= FALLTHRU_EDGE (bb
)->dest
;
1916 loop
->predecessor
= NULL
;
1919 loop
->length
= mt_block_length (bb
);
1920 loop
->blocks
= VEC_alloc (basic_block
, heap
, 20);
1921 VEC_quick_push (basic_block
, loop
->blocks
, bb
);
1923 loop
->loop_no
= nloops
++;
1925 loop
->init
= loop
->end_label
= NULL_RTX
;
1926 loop
->loop_init
= loop
->loop_end
= NULL_RTX
;
1928 work
= VEC_safe_push (loop_work
, heap
, works
, NULL
);
1929 work
->block
= loop
->head
;
1936 fprintf (dump_file
, ";; potential loop %d ending at\n",
1938 print_rtl_single (dump_file
, tail
);
1943 /* Now find all the closed loops.
1944 until work list empty,
1945 if block's auxptr is set
1947 if block's loop's start != block
1950 append block's loop's fallthrough block to worklist
1951 increment this loop's depth
1952 else if block is exit block
1956 for each target of block
1958 while (VEC_iterate (loop_work
, works
, dwork
++, work
))
1962 if (bb
== EXIT_BLOCK_PTR
)
1963 /* We've reached the exit block. The loop must be bad. */
1967 /* We've not seen this block before. Add it to the loop's
1968 list and then add each successor to the work list. */
1970 loop
->length
+= mt_block_length (bb
);
1971 VEC_safe_push (basic_block
, heap
, loop
->blocks
, bb
);
1972 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1974 if (!VEC_space (loop_work
, works
, 1))
1978 VEC_block_remove (loop_work
, works
, 0, dwork
);
1982 VEC_reserve (loop_work
, heap
, works
, 1);
1984 work
= VEC_quick_push (loop_work
, works
, NULL
);
1985 work
->block
= EDGE_SUCC (bb
, ei
.index
)->dest
;
1989 else if (bb
->aux
!= loop
)
1991 /* We've seen this block in a different loop. If it's not
1992 the other loop's head, then this loop must be bad.
1993 Otherwise, the other loop might be a nested loop, so
1994 continue from that loop's successor. */
1995 loop_info other
= bb
->aux
;
1997 if (other
->head
!= bb
)
2001 VEC_safe_push (loop_info
, heap
, loop
->loops
, other
);
2002 work
= VEC_safe_push (loop_work
, heap
, works
, NULL
);
2004 work
->block
= other
->successor
;
2008 VEC_free (loop_work
, heap
, works
);
2010 /* Now optimize the loops. */
2011 for (loop
= loops
; loop
; loop
= loop
->next
)
2013 rtx iter_reg
, insn
, init_insn
;
2014 rtx init_val
, loop_end
, loop_init
, end_label
, head_label
;
2016 if (!mt_loop_nesting (loop
))
2019 fprintf (dump_file
, ";; loop %d is bad\n", loop
->loop_no
);
2023 /* Get the loop iteration register. */
2024 iter_reg
= SET_DEST (XVECEXP (PATTERN (loop
->dbnz
), 0, 1));
2026 if (!REG_P (iter_reg
))
2030 fprintf (dump_file
, ";; loop %d has spilled iteration count\n",
2035 /* Look for the initializing insn */
2036 init_insn
= NULL_RTX
;
2037 for (insn
= BB_END (loop
->predecessor
);
2038 insn
!= PREV_INSN (BB_HEAD (loop
->predecessor
));
2039 insn
= PREV_INSN (insn
))
2043 if (reg_mentioned_p (iter_reg
, PATTERN (insn
)))
2045 rtx set
= single_set (insn
);
2047 if (set
&& rtx_equal_p (iter_reg
, SET_DEST (set
)))
2056 fprintf (dump_file
, ";; loop %d has no initializer\n",
2062 fprintf (dump_file
, ";; loop %d initialized by\n",
2064 print_rtl_single (dump_file
, init_insn
);
2067 init_val
= PATTERN (init_insn
);
2068 if (GET_CODE (init_val
) == SET
)
2069 init_val
= SET_SRC (init_val
);
2070 if (GET_CODE (init_val
) != CONST_INT
|| INTVAL (init_val
) >= 65535)
2073 fprintf (dump_file
, ";; loop %d has complex initializer\n",
2078 /* Scan all the blocks to make sure they don't use iter_reg. */
2079 if (mt_scan_loop (loop
, iter_reg
, loop
->dbnz
))
2082 fprintf (dump_file
, ";; loop %d uses iterator\n",
2087 /* The loop is good for replacement. */
2089 /* loop is 1 based, dbnz is zero based. */
2090 init_val
= GEN_INT (INTVAL (init_val
) + 1);
2092 iter_reg
= gen_rtx_REG (SImode
, LOOP_FIRST
+ loop
->depth
- 1);
2093 end_label
= gen_label_rtx ();
2094 head_label
= XEXP (SET_SRC (XVECEXP (PATTERN (loop
->dbnz
), 0, 0)), 1);
2095 loop_end
= gen_loop_end (iter_reg
, head_label
);
2096 loop_init
= gen_loop_init (iter_reg
, init_val
, end_label
);
2097 loop
->init
= init_insn
;
2098 loop
->end_label
= end_label
;
2099 loop
->loop_init
= loop_init
;
2100 loop
->loop_end
= loop_end
;
2105 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
2107 print_rtl_single (dump_file
, loop
->loop_init
);
2108 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
2110 print_rtl_single (dump_file
, loop
->loop_end
);
2114 /* Now apply the optimizations. Do it this way so we don't mess up
2115 the flow graph half way through. */
2116 for (loop
= loops
; loop
; loop
= loop
->next
)
2117 if (loop
->loop_init
)
2119 emit_jump_insn_after (loop
->loop_init
, BB_END (loop
->predecessor
));
2120 delete_insn (loop
->init
);
2121 emit_label_before (loop
->end_label
, loop
->dbnz
);
2122 emit_jump_insn_before (loop
->loop_end
, loop
->dbnz
);
2123 delete_insn (loop
->dbnz
);
2126 /* Free up the loop structures */
2131 VEC_free (loop_info
, heap
, loop
->loops
);
2132 VEC_free (basic_block
, heap
, loop
->blocks
);
2136 if (replaced
&& dump_file
)
2138 fprintf (dump_file
, ";; Replaced loops\n");
2139 print_rtl (dump_file
, get_insns ());
2143 /* Structures to hold branch information during reorg. */
2144 typedef struct branch_info
2146 rtx insn
; /* The branch insn. */
2148 struct branch_info
*next
;
2151 typedef struct label_info
2153 rtx label
; /* The label. */
2154 branch_info
*branches
; /* branches to this label. */
2155 struct label_info
*next
;
2158 /* Chain of labels found in current function, used during reorg. */
2159 static label_info
*mt_labels
;
2161 /* If *X is a label, add INSN to the list of branches for that
2165 mt_add_branches (rtx
*x
, void *insn
)
2167 if (GET_CODE (*x
) == LABEL_REF
)
2169 branch_info
*branch
= xmalloc (sizeof (*branch
));
2170 rtx label
= XEXP (*x
, 0);
2173 for (info
= mt_labels
; info
; info
= info
->next
)
2174 if (info
->label
== label
)
2179 info
= xmalloc (sizeof (*info
));
2180 info
->next
= mt_labels
;
2183 info
->label
= label
;
2184 info
->branches
= NULL
;
2187 branch
->next
= info
->branches
;
2188 info
->branches
= branch
;
2189 branch
->insn
= insn
;
2194 /* If BRANCH has a filled delay slot, check if INSN is dependent upon
2195 it. If so, undo the delay slot fill. Returns the next insn, if
2196 we patch out the branch. Returns the branch insn, if we cannot
2197 patch out the branch (due to anti-dependency in the delay slot).
2198 In that case, the caller must insert nops at the branch target. */
2201 mt_check_delay_slot (rtx branch
, rtx insn
)
2208 gcc_assert (GET_CODE (PATTERN (branch
)) == SEQUENCE
);
2209 if (INSN_DELETED_P (branch
))
2211 slot
= XVECEXP (PATTERN (branch
), 0, 1);
2213 tmp
= PATTERN (insn
);
2214 note_stores (PATTERN (slot
), insn_dependent_p_1
, &tmp
);
2216 /* Not dependent. */
2219 /* Undo the delay slot. */
2220 jmp
= XVECEXP (PATTERN (branch
), 0, 0);
2222 tmp
= PATTERN (jmp
);
2223 note_stores (PATTERN (slot
), insn_dependent_p_1
, &tmp
);
2225 /* Anti dependent. */
2228 p
= PREV_INSN (branch
);
2229 NEXT_INSN (p
) = slot
;
2230 PREV_INSN (slot
) = p
;
2231 NEXT_INSN (slot
) = jmp
;
2232 PREV_INSN (jmp
) = slot
;
2233 NEXT_INSN (jmp
) = branch
;
2234 PREV_INSN (branch
) = jmp
;
2235 XVECEXP (PATTERN (branch
), 0, 0) = NULL_RTX
;
2236 XVECEXP (PATTERN (branch
), 0, 1) = NULL_RTX
;
2237 delete_insn (branch
);
2241 /* Insert nops to satisfy pipeline constraints. We only deal with ms2
2242 constraints here. Earlier CPUs are dealt with by inserting nops with
2243 final_prescan (but that can lead to inferior code, and is
2244 impractical with ms2's JAL hazard).
2246 ms2 dynamic constraints
2247 1) a load and a following use must be separated by one insn
2248 2) an insn and a following dependent call must be separated by two insns
2250 only arith insns are placed in delay slots so #1 cannot happen with
2251 a load in a delay slot. #2 can happen with an arith insn in the
2255 mt_reorg_hazard (void)
2259 /* Find all the branches */
2260 for (insn
= get_insns ();
2262 insn
= NEXT_INSN (insn
))
2269 jmp
= PATTERN (insn
);
2271 if (GET_CODE (jmp
) != SEQUENCE
)
2272 /* If it's not got a filled delay slot, then it can't
2276 jmp
= XVECEXP (jmp
, 0, 0);
2278 if (recog_memoized (jmp
) == CODE_FOR_tablejump
)
2279 for (jmp
= XEXP (XEXP (XVECEXP (PATTERN (jmp
), 0, 1), 0), 0);
2280 !JUMP_TABLE_DATA_P (jmp
);
2281 jmp
= NEXT_INSN (jmp
))
2284 for_each_rtx (&PATTERN (jmp
), mt_add_branches
, insn
);
2287 /* Now scan for dependencies. */
2288 for (insn
= get_insns ();
2289 insn
&& !INSN_P (insn
);
2290 insn
= NEXT_INSN (insn
))
2298 enum attr_type attr
;
2300 gcc_assert (INSN_P (insn
) && !INSN_DELETED_P (insn
));
2301 for (next
= NEXT_INSN (insn
);
2303 next
= NEXT_INSN (next
))
2307 if (GET_CODE (PATTERN (next
)) != USE
)
2312 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2313 jmp
= XVECEXP (PATTERN (insn
), 0, 0);
2315 attr
= recog_memoized (jmp
) >= 0 ? get_attr_type (jmp
) : TYPE_UNKNOWN
;
2317 if (next
&& attr
== TYPE_LOAD
)
2319 /* A load. See if NEXT is dependent, and if so insert a
2322 tmp
= PATTERN (next
);
2323 if (GET_CODE (tmp
) == SEQUENCE
)
2324 tmp
= PATTERN (XVECEXP (tmp
, 0, 0));
2325 note_stores (PATTERN (insn
), insn_dependent_p_1
, &tmp
);
2327 emit_insn_after (gen_nop (), insn
);
2330 if (attr
== TYPE_CALL
)
2332 /* A call. Make sure we're not dependent on either of the
2333 previous two dynamic instructions. */
2337 rtx rescan
= NULL_RTX
;
2339 for (count
= 2; count
&& !nops
;)
2343 prev
= PREV_INSN (prev
);
2346 /* If we reach the start of the function, we must
2347 presume the caller set the address in the delay
2348 slot of the call instruction. */
2353 if (BARRIER_P (prev
))
2357 /* Look at branches to this label. */
2359 branch_info
*branch
;
2361 for (label
= mt_labels
;
2363 label
= label
->next
)
2364 if (label
->label
== prev
)
2366 for (branch
= label
->branches
;
2368 branch
= branch
->next
)
2370 tmp
= mt_check_delay_slot (branch
->insn
, jmp
);
2372 if (tmp
== branch
->insn
)
2378 if (tmp
&& branch
->insn
== next
)
2385 if (!INSN_P (prev
) || GET_CODE (PATTERN (prev
)) == USE
)
2388 if (GET_CODE (PATTERN (prev
)) == SEQUENCE
)
2390 /* Look at the delay slot. */
2391 tmp
= mt_check_delay_slot (prev
, jmp
);
2397 type
= (INSN_CODE (prev
) >= 0 ? get_attr_type (prev
)
2399 if (type
== TYPE_CALL
|| type
== TYPE_BRANCH
)
2402 if (type
== TYPE_LOAD
2403 || type
== TYPE_ARITH
2404 || type
== TYPE_COMPLEX
)
2406 tmp
= PATTERN (jmp
);
2407 note_stores (PATTERN (prev
), insn_dependent_p_1
, &tmp
);
2415 if (INSN_CODE (prev
) >= 0)
2420 for (next
= NEXT_INSN (rescan
);
2421 next
&& !INSN_P (next
);
2422 next
= NEXT_INSN (next
))
2425 emit_insn_before (gen_nop (), insn
);
2429 /* Free the data structures. */
2432 label_info
*label
= mt_labels
;
2433 branch_info
*branch
, *next
;
2435 mt_labels
= label
->next
;
2436 for (branch
= label
->branches
; branch
; branch
= next
)
2438 next
= branch
->next
;
2445 /* Fixup the looping instructions, do delayed branch scheduling, fixup
2446 scheduling hazards. */
2449 mt_machine_reorg (void)
2451 if (cfun
->machine
->has_loops
&& TARGET_MS2
)
2452 mt_reorg_loops (dump_file
);
2454 if (mt_flag_delayed_branch
)
2455 dbr_schedule (get_insns ());
2459 /* Force all instructions to be split into their final form. */
2460 split_all_insns_noflow ();
2465 /* Initialize the GCC target structure. */
2466 const struct attribute_spec mt_attribute_table
[];
2468 #undef TARGET_ATTRIBUTE_TABLE
2469 #define TARGET_ATTRIBUTE_TABLE mt_attribute_table
2470 #undef TARGET_STRUCT_VALUE_RTX
2471 #define TARGET_STRUCT_VALUE_RTX mt_struct_value_rtx
2472 #undef TARGET_PROMOTE_PROTOTYPES
2473 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2474 #undef TARGET_PASS_BY_REFERENCE
2475 #define TARGET_PASS_BY_REFERENCE mt_pass_by_reference
2476 #undef TARGET_MUST_PASS_IN_STACK
2477 #define TARGET_MUST_PASS_IN_STACK mt_pass_in_stack
2478 #undef TARGET_ARG_PARTIAL_BYTES
2479 #define TARGET_ARG_PARTIAL_BYTES mt_arg_partial_bytes
2480 #undef TARGET_SETUP_INCOMING_VARARGS
2481 #define TARGET_SETUP_INCOMING_VARARGS mt_setup_incoming_varargs
2482 #undef TARGET_MACHINE_DEPENDENT_REORG
2483 #define TARGET_MACHINE_DEPENDENT_REORG mt_machine_reorg
2485 struct gcc_target targetm
= TARGET_INITIALIZER
;