Remove old autovect-branch by moving to "dead" directory.
[official-gcc.git] / old-autovect-branch / gcc / config / mt / mt.c
blob7efd2f78e4d6cedaf5a8f5cd93b587950905e73b
1 /* Target definitions for the MorphoRISC1
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "recog.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "integrate.h"
37 #include "tree.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "flags.h"
43 #include "tm_p.h"
44 #include "ggc.h"
45 #include "insn-flags.h"
46 #include "obstack.h"
47 #include "except.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "basic-block.h"
52 /* Frame pointer register mask. */
53 #define FP_MASK (1 << (GPR_FP))
55 /* Link register mask. */
56 #define LINK_MASK (1 << (GPR_LINK))
58 /* First GPR. */
59 #define MT_INT_ARG_FIRST 1
61 /* Given a SIZE in bytes, advance to the next word. */
62 #define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
64 /* A C structure for machine-specific, per-function data.
65 This is added to the cfun structure. */
66 struct machine_function GTY(())
68 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
69 int ra_needs_full_frame;
70 struct rtx_def * eh_stack_adjust;
71 int interrupt_handler;
72 int has_loops;
75 /* Define the information needed to generate branch and scc insns.
76 This is stored from the compare operation. */
77 struct rtx_def * mt_compare_op0;
78 struct rtx_def * mt_compare_op1;
80 /* Current frame information calculated by compute_frame_size. */
81 struct mt_frame_info current_frame_info;
83 /* Zero structure to initialize current_frame_info. */
84 struct mt_frame_info zero_frame_info;
86 /* mt doesn't have unsigned compares need a library call for this. */
87 struct rtx_def * mt_ucmpsi3_libcall;
89 static int mt_flag_delayed_branch;
92 static rtx
93 mt_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
94 int incoming ATTRIBUTE_UNUSED)
96 return gen_rtx_REG (Pmode, RETVAL_REGNUM);
99 /* Implement RETURN_ADDR_RTX. */
101 mt_return_addr_rtx (int count)
103 if (count != 0)
104 return NULL_RTX;
106 return get_hard_reg_initial_val (Pmode, GPR_LINK);
109 /* The following variable value indicates the number of nops required
110 between the current instruction and the next instruction to avoid
111 any pipeline hazards. */
112 static int mt_nops_required = 0;
113 static const char * mt_nop_reasons = "";
115 /* Implement ASM_OUTPUT_OPCODE. */
116 const char *
117 mt_asm_output_opcode (FILE *f ATTRIBUTE_UNUSED, const char *ptr)
119 if (mt_nops_required)
120 fprintf (f, ";# need %d nops because of %s\n\t",
121 mt_nops_required, mt_nop_reasons);
123 while (mt_nops_required)
125 fprintf (f, "or r0, r0, r0\n\t");
126 -- mt_nops_required;
129 return ptr;
132 /* Given an insn, return whether it's a memory operation or a branch
133 operation, otherwise return TYPE_ARITH. */
134 static enum attr_type
135 mt_get_attr_type (rtx complete_insn)
137 rtx insn = PATTERN (complete_insn);
139 if (JUMP_P (complete_insn))
140 return TYPE_BRANCH;
141 if (CALL_P (complete_insn))
142 return TYPE_BRANCH;
144 if (GET_CODE (insn) != SET)
145 return TYPE_ARITH;
147 if (SET_DEST (insn) == pc_rtx)
148 return TYPE_BRANCH;
150 if (GET_CODE (SET_DEST (insn)) == MEM)
151 return TYPE_STORE;
153 if (GET_CODE (SET_SRC (insn)) == MEM)
154 return TYPE_LOAD;
156 return TYPE_ARITH;
159 /* A helper routine for insn_dependent_p called through note_stores. */
161 static void
162 insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
164 rtx * pinsn = (rtx *) data;
166 if (*pinsn && reg_mentioned_p (x, *pinsn))
167 *pinsn = NULL_RTX;
170 /* Return true if anything in insn X is (anti,output,true)
171 dependent on anything in insn Y. */
173 static bool
174 insn_dependent_p (rtx x, rtx y)
176 rtx tmp;
178 if (! INSN_P (x) || ! INSN_P (y))
179 return 0;
181 tmp = PATTERN (y);
182 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
183 if (tmp == NULL_RTX)
184 return true;
186 tmp = PATTERN (x);
187 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
188 return (tmp == NULL_RTX);
192 /* Return true if anything in insn X is true dependent on anything in
193 insn Y. */
194 static bool
195 insn_true_dependent_p (rtx x, rtx y)
197 rtx tmp;
199 if (! INSN_P (x) || ! INSN_P (y))
200 return 0;
202 tmp = PATTERN (y);
203 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
204 return (tmp == NULL_RTX);
207 /* The following determines the number of nops that need to be
208 inserted between the previous instructions and current instruction
209 to avoid pipeline hazards on the mt processor. Remember that
210 the function is not called for asm insns. */
212 void
213 mt_final_prescan_insn (rtx insn,
214 rtx * opvec ATTRIBUTE_UNUSED,
215 int noperands ATTRIBUTE_UNUSED)
217 rtx prev_i;
218 enum attr_type prev_attr;
220 mt_nops_required = 0;
221 mt_nop_reasons = "";
223 /* ms2 constraints are dealt with in reorg. */
224 if (TARGET_MS2)
225 return;
227 /* Only worry about real instructions. */
228 if (! INSN_P (insn))
229 return;
231 /* Find the previous real instructions. */
232 for (prev_i = PREV_INSN (insn);
233 prev_i != NULL
234 && (! INSN_P (prev_i)
235 || GET_CODE (PATTERN (prev_i)) == USE
236 || GET_CODE (PATTERN (prev_i)) == CLOBBER);
237 prev_i = PREV_INSN (prev_i))
239 /* If we meet a barrier, there is no flow through here. */
240 if (BARRIER_P (prev_i))
241 return;
244 /* If there isn't one then there is nothing that we need do. */
245 if (prev_i == NULL || ! INSN_P (prev_i))
246 return;
248 prev_attr = mt_get_attr_type (prev_i);
250 /* Delayed branch slots already taken care of by delay branch scheduling. */
251 if (prev_attr == TYPE_BRANCH)
252 return;
254 switch (mt_get_attr_type (insn))
256 case TYPE_LOAD:
257 case TYPE_STORE:
258 /* Avoid consecutive memory operation. */
259 if ((prev_attr == TYPE_LOAD || prev_attr == TYPE_STORE)
260 && TARGET_MS1_64_001)
262 mt_nops_required = 1;
263 mt_nop_reasons = "consecutive mem ops";
265 /* Drop through. */
267 case TYPE_ARITH:
268 case TYPE_COMPLEX:
269 /* One cycle of delay is required between load
270 and the dependent arithmetic instruction. */
271 if (prev_attr == TYPE_LOAD
272 && insn_true_dependent_p (prev_i, insn))
274 mt_nops_required = 1;
275 mt_nop_reasons = "load->arith dependency delay";
277 break;
279 case TYPE_BRANCH:
280 if (insn_dependent_p (prev_i, insn))
282 if (prev_attr == TYPE_ARITH && TARGET_MS1_64_001)
284 /* One cycle of delay between arith
285 instructions and branch dependent on arith. */
286 mt_nops_required = 1;
287 mt_nop_reasons = "arith->branch dependency delay";
289 else if (prev_attr == TYPE_LOAD)
291 /* Two cycles of delay are required
292 between load and dependent branch. */
293 if (TARGET_MS1_64_001)
294 mt_nops_required = 2;
295 else
296 mt_nops_required = 1;
297 mt_nop_reasons = "load->branch dependency delay";
300 break;
302 default:
303 fatal_insn ("mt_final_prescan_insn, invalid insn #1", insn);
304 break;
308 /* Print debugging information for a frame. */
309 static void
310 mt_debug_stack (struct mt_frame_info * info)
312 int regno;
314 if (!info)
316 error ("info pointer NULL");
317 gcc_unreachable ();
320 fprintf (stderr, "\nStack information for function %s:\n",
321 ((current_function_decl && DECL_NAME (current_function_decl))
322 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
323 : "<unknown>"));
325 fprintf (stderr, "\ttotal_size = %d\n", info->total_size);
326 fprintf (stderr, "\tpretend_size = %d\n", info->pretend_size);
327 fprintf (stderr, "\targs_size = %d\n", info->args_size);
328 fprintf (stderr, "\textra_size = %d\n", info->extra_size);
329 fprintf (stderr, "\treg_size = %d\n", info->reg_size);
330 fprintf (stderr, "\tvar_size = %d\n", info->var_size);
331 fprintf (stderr, "\tframe_size = %d\n", info->frame_size);
332 fprintf (stderr, "\treg_mask = 0x%x\n", info->reg_mask);
333 fprintf (stderr, "\tsave_fp = %d\n", info->save_fp);
334 fprintf (stderr, "\tsave_lr = %d\n", info->save_lr);
335 fprintf (stderr, "\tinitialized = %d\n", info->initialized);
336 fprintf (stderr, "\tsaved registers =");
338 /* Print out reg_mask in a more readable format. */
339 for (regno = GPR_R0; regno <= GPR_LAST; regno++)
340 if ( (1 << regno) & info->reg_mask)
341 fprintf (stderr, " %s", reg_names[regno]);
343 putc ('\n', stderr);
344 fflush (stderr);
347 /* Print a memory address as an operand to reference that memory location. */
349 static void
350 mt_print_operand_simple_address (FILE * file, rtx addr)
352 if (!addr)
353 error ("PRINT_OPERAND_ADDRESS, null pointer");
355 else
356 switch (GET_CODE (addr))
358 case REG:
359 fprintf (file, "%s, #0", reg_names [REGNO (addr)]);
360 break;
362 case PLUS:
364 rtx reg = 0;
365 rtx offset = 0;
366 rtx arg0 = XEXP (addr, 0);
367 rtx arg1 = XEXP (addr, 1);
369 if (GET_CODE (arg0) == REG)
371 reg = arg0;
372 offset = arg1;
373 if (GET_CODE (offset) == REG)
374 fatal_insn ("PRINT_OPERAND_ADDRESS, 2 regs", addr);
377 else if (GET_CODE (arg1) == REG)
378 reg = arg1, offset = arg0;
379 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
381 fprintf (file, "%s, #", reg_names [GPR_R0]);
382 output_addr_const (file, addr);
383 break;
385 fprintf (file, "%s, #", reg_names [REGNO (reg)]);
386 output_addr_const (file, offset);
387 break;
390 case LABEL_REF:
391 case SYMBOL_REF:
392 case CONST_INT:
393 case CONST:
394 output_addr_const (file, addr);
395 break;
397 default:
398 fatal_insn ("PRINT_OPERAND_ADDRESS, invalid insn #1", addr);
399 break;
403 /* Implement PRINT_OPERAND_ADDRESS. */
404 void
405 mt_print_operand_address (FILE * file, rtx addr)
407 if (GET_CODE (addr) == AND
408 && GET_CODE (XEXP (addr, 1)) == CONST_INT
409 && INTVAL (XEXP (addr, 1)) == -3)
410 mt_print_operand_simple_address (file, XEXP (addr, 0));
411 else
412 mt_print_operand_simple_address (file, addr);
415 /* Implement PRINT_OPERAND. */
416 void
417 mt_print_operand (FILE * file, rtx x, int code)
419 switch (code)
421 case '#':
422 /* Output a nop if there's nothing for the delay slot. */
423 if (dbr_sequence_length () == 0)
424 fputs ("\n\tor r0, r0, r0", file);
425 return;
427 case 'H':
428 fprintf(file, "#%%hi16(");
429 output_addr_const (file, x);
430 fprintf(file, ")");
431 return;
433 case 'L':
434 fprintf(file, "#%%lo16(");
435 output_addr_const (file, x);
436 fprintf(file, ")");
437 return;
439 case 'N':
440 fprintf(file, "#%ld", ~INTVAL (x));
441 return;
443 case 'z':
444 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
446 fputs (reg_names[GPR_R0], file);
447 return;
450 case 0:
451 /* Handled below. */
452 break;
454 default:
455 /* output_operand_lossage ("mt_print_operand: unknown code"); */
456 fprintf (file, "unknown code");
457 return;
460 switch (GET_CODE (x))
462 case REG:
463 fputs (reg_names [REGNO (x)], file);
464 break;
466 case CONST:
467 case CONST_INT:
468 fprintf(file, "#%ld", INTVAL (x));
469 break;
471 case MEM:
472 mt_print_operand_address(file, XEXP (x,0));
473 break;
475 case LABEL_REF:
476 case SYMBOL_REF:
477 output_addr_const (file, x);
478 break;
480 default:
481 fprintf(file, "Uknown code: %d", GET_CODE (x));
482 break;
485 return;
488 /* Implement INIT_CUMULATIVE_ARGS. */
489 void
490 mt_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype, rtx libname,
491 tree fndecl ATTRIBUTE_UNUSED, int incoming)
493 *cum = 0;
495 if (TARGET_DEBUG_ARG)
497 fprintf (stderr, "\nmt_init_cumulative_args:");
499 if (incoming)
500 fputs (" incoming", stderr);
502 if (fntype)
504 tree ret_type = TREE_TYPE (fntype);
505 fprintf (stderr, " return = %s,",
506 tree_code_name[ (int)TREE_CODE (ret_type) ]);
509 if (libname && GET_CODE (libname) == SYMBOL_REF)
510 fprintf (stderr, " libname = %s", XSTR (libname, 0));
512 if (cfun->returns_struct)
513 fprintf (stderr, " return-struct");
515 putc ('\n', stderr);
519 /* Compute the slot number to pass an argument in.
520 Returns the slot number or -1 if passing on the stack.
522 CUM is a variable of type CUMULATIVE_ARGS which gives info about
523 the preceding args and about the function being called.
524 MODE is the argument's machine mode.
525 TYPE is the data type of the argument (as a tree).
526 This is null for libcalls where that information may
527 not be available.
528 NAMED is nonzero if this argument is a named parameter
529 (otherwise it is an extra parameter matching an ellipsis).
530 INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
531 *PREGNO records the register number to use if scalar type. */
533 static int
534 mt_function_arg_slotno (const CUMULATIVE_ARGS * cum,
535 enum machine_mode mode,
536 tree type,
537 int named ATTRIBUTE_UNUSED,
538 int incoming_p ATTRIBUTE_UNUSED,
539 int * pregno)
541 int regbase = MT_INT_ARG_FIRST;
542 int slotno = * cum;
544 if (mode == VOIDmode || targetm.calls.must_pass_in_stack (mode, type))
545 return -1;
547 if (slotno >= MT_NUM_ARG_REGS)
548 return -1;
550 * pregno = regbase + slotno;
552 return slotno;
555 /* Implement FUNCTION_ARG. */
557 mt_function_arg (const CUMULATIVE_ARGS * cum,
558 enum machine_mode mode,
559 tree type,
560 int named,
561 int incoming_p)
563 int slotno, regno;
564 rtx reg;
566 slotno = mt_function_arg_slotno (cum, mode, type, named, incoming_p, &regno);
568 if (slotno == -1)
569 reg = NULL_RTX;
570 else
571 reg = gen_rtx_REG (mode, regno);
573 return reg;
576 /* Implement FUNCTION_ARG_ADVANCE. */
577 void
578 mt_function_arg_advance (CUMULATIVE_ARGS * cum,
579 enum machine_mode mode,
580 tree type ATTRIBUTE_UNUSED,
581 int named)
583 int slotno, regno;
585 /* We pass 0 for incoming_p here, it doesn't matter. */
586 slotno = mt_function_arg_slotno (cum, mode, type, named, 0, &regno);
588 * cum += (mode != BLKmode
589 ? ROUND_ADVANCE (GET_MODE_SIZE (mode))
590 : ROUND_ADVANCE (int_size_in_bytes (type)));
592 if (TARGET_DEBUG_ARG)
593 fprintf (stderr,
594 "mt_function_arg_advance: words = %2d, mode = %4s, named = %d, size = %3d\n",
595 *cum, GET_MODE_NAME (mode), named,
596 (*cum) * UNITS_PER_WORD);
599 /* Implement hook TARGET_ARG_PARTIAL_BYTES.
601 Returns the number of bytes at the beginning of an argument that
602 must be put in registers. The value must be zero for arguments
603 that are passed entirely in registers or that are entirely pushed
604 on the stack. */
605 static int
606 mt_arg_partial_bytes (CUMULATIVE_ARGS * pcum,
607 enum machine_mode mode,
608 tree type,
609 bool named ATTRIBUTE_UNUSED)
611 int cum = * pcum;
612 int words;
614 if (mode == BLKmode)
615 words = ((int_size_in_bytes (type) + UNITS_PER_WORD - 1)
616 / UNITS_PER_WORD);
617 else
618 words = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
620 if (! targetm.calls.pass_by_reference (&cum, mode, type, named)
621 && cum < MT_NUM_ARG_REGS
622 && (cum + words) > MT_NUM_ARG_REGS)
624 int bytes = (MT_NUM_ARG_REGS - cum) * UNITS_PER_WORD;
626 if (TARGET_DEBUG)
627 fprintf (stderr, "function_arg_partial_nregs = %d\n", bytes);
628 return bytes;
631 return 0;
635 /* Implement TARGET_PASS_BY_REFERENCE hook. */
636 static bool
637 mt_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
638 enum machine_mode mode ATTRIBUTE_UNUSED,
639 tree type,
640 bool named ATTRIBUTE_UNUSED)
642 return (type && int_size_in_bytes (type) > 4 * UNITS_PER_WORD);
645 /* Implement FUNCTION_ARG_BOUNDARY. */
647 mt_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
648 tree type ATTRIBUTE_UNUSED)
650 return BITS_PER_WORD;
653 /* Implement REG_OK_FOR_BASE_P. */
655 mt_reg_ok_for_base_p (rtx x, int strict)
657 if (strict)
658 return (((unsigned) REGNO (x)) < FIRST_PSEUDO_REGISTER);
659 return 1;
662 /* Helper function of mt_legitimate_address_p. Return true if XINSN
663 is a simple address, otherwise false. */
664 static bool
665 mt_legitimate_simple_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
666 rtx xinsn, int strict)
668 if (TARGET_DEBUG)
670 fprintf (stderr, "\n========== GO_IF_LEGITIMATE_ADDRESS, %sstrict\n",
671 strict ? "" : "not ");
672 debug_rtx (xinsn);
675 if (GET_CODE (xinsn) == REG && mt_reg_ok_for_base_p (xinsn, strict))
676 return true;
678 if (GET_CODE (xinsn) == PLUS
679 && GET_CODE (XEXP (xinsn, 0)) == REG
680 && mt_reg_ok_for_base_p (XEXP (xinsn, 0), strict)
681 && GET_CODE (XEXP (xinsn, 1)) == CONST_INT
682 && SMALL_INT (XEXP (xinsn, 1)))
683 return true;
685 return false;
689 /* Helper function of GO_IF_LEGITIMATE_ADDRESS. Return non-zero if
690 XINSN is a legitimate address on MT. */
692 mt_legitimate_address_p (enum machine_mode mode, rtx xinsn, int strict)
694 if (mt_legitimate_simple_address_p (mode, xinsn, strict))
695 return 1;
697 if ((mode) == SImode
698 && GET_CODE (xinsn) == AND
699 && GET_CODE (XEXP (xinsn, 1)) == CONST_INT
700 && INTVAL (XEXP (xinsn, 1)) == -3)
701 return mt_legitimate_simple_address_p (mode, XEXP (xinsn, 0), strict);
702 else
703 return 0;
706 /* Return truth value of whether OP can be used as an operands where a
707 register or 16 bit unsigned integer is needed. */
710 uns_arith_operand (rtx op, enum machine_mode mode)
712 if (GET_CODE (op) == CONST_INT && SMALL_INT_UNSIGNED (op))
713 return 1;
715 return register_operand (op, mode);
718 /* Return truth value of whether OP can be used as an operands where a
719 16 bit integer is needed. */
722 arith_operand (rtx op, enum machine_mode mode)
724 if (GET_CODE (op) == CONST_INT && SMALL_INT (op))
725 return 1;
727 return register_operand (op, mode);
730 /* Return truth value of whether OP is a register or the constant 0. */
733 reg_or_0_operand (rtx op, enum machine_mode mode)
735 switch (GET_CODE (op))
737 case CONST_INT:
738 return INTVAL (op) == 0;
740 case REG:
741 case SUBREG:
742 return register_operand (op, mode);
744 default:
745 break;
748 return 0;
751 /* Return truth value of whether OP is a constant that requires two
752 loads to put in a register. */
755 big_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
757 if (GET_CODE (op) == CONST_INT && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
758 return 1;
760 return 0;
763 /* Return truth value of whether OP is a constant that require only
764 one load to put in a register. */
767 single_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
769 if (big_const_operand (op, mode)
770 || GET_CODE (op) == CONST
771 || GET_CODE (op) == LABEL_REF
772 || GET_CODE (op) == SYMBOL_REF)
773 return 0;
775 return 1;
778 /* True if the current function is an interrupt handler
779 (either via #pragma or an attribute specification). */
780 int interrupt_handler;
781 enum processor_type mt_cpu;
783 static struct machine_function *
784 mt_init_machine_status (void)
786 struct machine_function *f;
788 f = ggc_alloc_cleared (sizeof (struct machine_function));
790 return f;
793 /* Implement OVERRIDE_OPTIONS. */
794 void
795 mt_override_options (void)
797 if (mt_cpu_string != NULL)
799 if (!strcmp (mt_cpu_string, "ms1-64-001"))
800 mt_cpu = PROCESSOR_MS1_64_001;
801 else if (!strcmp (mt_cpu_string, "ms1-16-002"))
802 mt_cpu = PROCESSOR_MS1_16_002;
803 else if (!strcmp (mt_cpu_string, "ms1-16-003"))
804 mt_cpu = PROCESSOR_MS1_16_003;
805 else if (!strcmp (mt_cpu_string, "ms2"))
806 mt_cpu = PROCESSOR_MS2;
807 else
808 error ("bad value (%s) for -march= switch", mt_cpu_string);
810 else
811 mt_cpu = PROCESSOR_MS1_16_002;
813 if (flag_exceptions)
815 flag_omit_frame_pointer = 0;
816 flag_gcse = 0;
819 /* We do delayed branch filling in machine dependent reorg */
820 mt_flag_delayed_branch = flag_delayed_branch;
821 flag_delayed_branch = 0;
823 init_machine_status = mt_init_machine_status;
826 /* Do what is necessary for `va_start'. We look at the current function
827 to determine if stdarg or varargs is used and return the address of the
828 first unnamed parameter. */
830 static rtx
831 mt_builtin_saveregs (void)
833 int first_reg = 0;
834 rtx address;
835 int regno;
837 for (regno = first_reg; regno < MT_NUM_ARG_REGS; regno ++)
838 emit_move_insn
839 (gen_rtx_MEM (word_mode,
840 gen_rtx_PLUS (Pmode,
841 gen_rtx_REG (SImode, ARG_POINTER_REGNUM),
842 GEN_INT (UNITS_PER_WORD * regno))),
843 gen_rtx_REG (word_mode,
844 MT_INT_ARG_FIRST + regno));
846 address = gen_rtx_PLUS (Pmode,
847 gen_rtx_REG (SImode, ARG_POINTER_REGNUM),
848 GEN_INT (UNITS_PER_WORD * first_reg));
849 return address;
852 /* Implement `va_start'. */
854 void
855 mt_va_start (tree valist, rtx nextarg)
857 mt_builtin_saveregs ();
858 std_expand_builtin_va_start (valist, nextarg);
861 /* Returns the number of bytes offset between the frame pointer and the stack
862 pointer for the current function. SIZE is the number of bytes of space
863 needed for local variables. */
865 unsigned int
866 mt_compute_frame_size (int size)
868 int regno;
869 unsigned int total_size;
870 unsigned int var_size;
871 unsigned int args_size;
872 unsigned int pretend_size;
873 unsigned int extra_size;
874 unsigned int reg_size;
875 unsigned int frame_size;
876 unsigned int reg_mask;
878 var_size = size;
879 args_size = current_function_outgoing_args_size;
880 pretend_size = current_function_pretend_args_size;
881 extra_size = FIRST_PARM_OFFSET (0);
882 total_size = extra_size + pretend_size + args_size + var_size;
883 reg_size = 0;
884 reg_mask = 0;
886 /* Calculate space needed for registers. */
887 for (regno = GPR_R0; regno <= GPR_LAST; regno++)
889 if (MUST_SAVE_REGISTER (regno))
891 reg_size += UNITS_PER_WORD;
892 reg_mask |= 1 << regno;
896 current_frame_info.save_fp = (regs_ever_live [GPR_FP]
897 || frame_pointer_needed
898 || interrupt_handler);
899 current_frame_info.save_lr = (regs_ever_live [GPR_LINK]
900 || profile_flag
901 || interrupt_handler);
903 reg_size += (current_frame_info.save_fp + current_frame_info.save_lr)
904 * UNITS_PER_WORD;
905 total_size += reg_size;
906 total_size = ((total_size + 3) & ~3);
908 frame_size = total_size;
910 /* Save computed information. */
911 current_frame_info.pretend_size = pretend_size;
912 current_frame_info.var_size = var_size;
913 current_frame_info.args_size = args_size;
914 current_frame_info.reg_size = reg_size;
915 current_frame_info.frame_size = args_size + var_size;
916 current_frame_info.total_size = total_size;
917 current_frame_info.extra_size = extra_size;
918 current_frame_info.reg_mask = reg_mask;
919 current_frame_info.initialized = reload_completed;
921 return total_size;
924 /* Emit code to save REG in stack offset pointed to by MEM.
925 STACK_OFFSET is the offset from the SP where the save will happen.
926 This function sets the REG_FRAME_RELATED_EXPR note accordingly. */
927 static void
928 mt_emit_save_restore (enum save_direction direction,
929 rtx reg, rtx mem, int stack_offset)
931 if (direction == FROM_PROCESSOR_TO_MEM)
933 rtx insn;
935 insn = emit_move_insn (mem, reg);
936 RTX_FRAME_RELATED_P (insn) = 1;
937 REG_NOTES (insn)
938 = gen_rtx_EXPR_LIST
939 (REG_FRAME_RELATED_EXPR,
940 gen_rtx_SET (VOIDmode,
941 gen_rtx_MEM (SImode,
942 gen_rtx_PLUS (SImode,
943 stack_pointer_rtx,
944 GEN_INT (stack_offset))),
945 reg),
946 REG_NOTES (insn));
948 else
949 emit_move_insn (reg, mem);
953 /* Emit code to save the frame pointer in the prologue and restore
954 frame pointer in epilogue. */
956 static void
957 mt_emit_save_fp (enum save_direction direction,
958 struct mt_frame_info info)
960 rtx base_reg;
961 int reg_mask = info.reg_mask & ~(FP_MASK | LINK_MASK);
962 int offset = info.total_size;
963 int stack_offset = info.total_size;
965 /* If there is nothing to save, get out now. */
966 if (! info.save_fp && ! info.save_lr && ! reg_mask)
967 return;
969 /* If offset doesn't fit in a 15-bit signed integer,
970 uses a scratch registers to get a smaller offset. */
971 if (CONST_OK_FOR_LETTER_P(offset, 'O'))
972 base_reg = stack_pointer_rtx;
973 else
975 /* Use the scratch register R9 that holds old stack pointer. */
976 base_reg = gen_rtx_REG (SImode, GPR_R9);
977 offset = 0;
980 if (info.save_fp)
982 offset -= UNITS_PER_WORD;
983 stack_offset -= UNITS_PER_WORD;
984 mt_emit_save_restore
985 (direction, gen_rtx_REG (SImode, GPR_FP),
986 gen_rtx_MEM (SImode,
987 gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
988 stack_offset);
992 /* Emit code to save registers in the prologue and restore register
993 in epilogue. */
995 static void
996 mt_emit_save_regs (enum save_direction direction,
997 struct mt_frame_info info)
999 rtx base_reg;
1000 int regno;
1001 int reg_mask = info.reg_mask & ~(FP_MASK | LINK_MASK);
1002 int offset = info.total_size;
1003 int stack_offset = info.total_size;
1005 /* If there is nothing to save, get out now. */
1006 if (! info.save_fp && ! info.save_lr && ! reg_mask)
1007 return;
1009 /* If offset doesn't fit in a 15-bit signed integer,
1010 uses a scratch registers to get a smaller offset. */
1011 if (CONST_OK_FOR_LETTER_P(offset, 'O'))
1012 base_reg = stack_pointer_rtx;
1013 else
1015 /* Use the scratch register R9 that holds old stack pointer. */
1016 base_reg = gen_rtx_REG (SImode, GPR_R9);
1017 offset = 0;
1020 if (info.save_fp)
1022 /* This just records the space for it, the actual move generated in
1023 mt_emit_save_fp (). */
1024 offset -= UNITS_PER_WORD;
1025 stack_offset -= UNITS_PER_WORD;
1028 if (info.save_lr)
1030 offset -= UNITS_PER_WORD;
1031 stack_offset -= UNITS_PER_WORD;
1032 mt_emit_save_restore
1033 (direction, gen_rtx_REG (SImode, GPR_LINK),
1034 gen_rtx_MEM (SImode,
1035 gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
1036 stack_offset);
1039 /* Save any needed call-saved regs. */
1040 for (regno = GPR_R0; regno <= GPR_LAST; regno++)
1042 if ((reg_mask & (1 << regno)) != 0)
1044 offset -= UNITS_PER_WORD;
1045 stack_offset -= UNITS_PER_WORD;
1046 mt_emit_save_restore
1047 (direction, gen_rtx_REG (SImode, regno),
1048 gen_rtx_MEM (SImode,
1049 gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
1050 stack_offset);
1055 /* Return true if FUNC is a function with the 'interrupt' attribute. */
1056 static bool
1057 mt_interrupt_function_p (tree func)
1059 tree a;
1061 if (TREE_CODE (func) != FUNCTION_DECL)
1062 return false;
1064 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
1065 return a != NULL_TREE;
1068 /* Generate prologue code. */
1069 void
1070 mt_expand_prologue (void)
1072 rtx size_rtx, insn;
1073 unsigned int frame_size;
1075 if (mt_interrupt_function_p (current_function_decl))
1077 interrupt_handler = 1;
1078 if (cfun->machine)
1079 cfun->machine->interrupt_handler = 1;
1082 mt_compute_frame_size (get_frame_size ());
1084 if (TARGET_DEBUG_STACK)
1085 mt_debug_stack (&current_frame_info);
1087 /* Compute size of stack adjustment. */
1088 frame_size = current_frame_info.total_size;
1090 /* If offset doesn't fit in a 15-bit signed integer,
1091 uses a scratch registers to get a smaller offset. */
1092 if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1093 size_rtx = GEN_INT (frame_size);
1094 else
1096 /* We do not have any scratch registers. */
1097 gcc_assert (!interrupt_handler);
1099 size_rtx = gen_rtx_REG (SImode, GPR_R9);
1100 insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
1101 insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
1102 GEN_INT (frame_size & 0x0000ffff)));
1105 /* Allocate stack for this frame. */
1106 /* Make stack adjustment and use scratch register if constant too
1107 large to fit as immediate. */
1108 if (frame_size)
1110 insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
1111 stack_pointer_rtx,
1112 size_rtx));
1113 RTX_FRAME_RELATED_P (insn) = 1;
1114 REG_NOTES (insn)
1115 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1116 gen_rtx_SET (VOIDmode,
1117 stack_pointer_rtx,
1118 gen_rtx_MINUS (SImode,
1119 stack_pointer_rtx,
1120 GEN_INT (frame_size))),
1121 REG_NOTES (insn));
1124 /* Set R9 to point to old sp if required for access to register save
1125 area. */
1126 if ( current_frame_info.reg_size != 0
1127 && !CONST_OK_FOR_LETTER_P (frame_size, 'O'))
1128 emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
1130 /* Save the frame pointer. */
1131 mt_emit_save_fp (FROM_PROCESSOR_TO_MEM, current_frame_info);
1133 /* Now put the frame pointer into the frame pointer register. */
1134 if (frame_pointer_needed)
1136 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1137 RTX_FRAME_RELATED_P (insn) = 1;
1140 /* Save the registers. */
1141 mt_emit_save_regs (FROM_PROCESSOR_TO_MEM, current_frame_info);
1143 /* If we are profiling, make sure no instructions are scheduled before
1144 the call to mcount. */
1145 if (profile_flag)
1146 emit_insn (gen_blockage ());
1149 /* Implement EPILOGUE_USES. */
1151 mt_epilogue_uses (int regno)
1153 if (cfun->machine && cfun->machine->interrupt_handler && reload_completed)
1154 return 1;
1155 return regno == GPR_LINK;
1158 /* Generate epilogue. EH_MODE is NORMAL_EPILOGUE when generating a
1159 function epilogue, or EH_EPILOGUE when generating an EH
1160 epilogue. */
1161 void
1162 mt_expand_epilogue (enum epilogue_type eh_mode)
1164 rtx size_rtx, insn;
1165 unsigned frame_size;
1167 mt_compute_frame_size (get_frame_size ());
1169 if (TARGET_DEBUG_STACK)
1170 mt_debug_stack (& current_frame_info);
1172 /* Compute size of stack adjustment. */
1173 frame_size = current_frame_info.total_size;
1175 /* If offset doesn't fit in a 15-bit signed integer,
1176 uses a scratch registers to get a smaller offset. */
1177 if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1178 size_rtx = GEN_INT (frame_size);
1179 else
1181 /* We do not have any scratch registers. */
1182 gcc_assert (!interrupt_handler);
1184 size_rtx = gen_rtx_REG (SImode, GPR_R9);
1185 insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
1186 insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
1187 GEN_INT (frame_size & 0x0000ffff)));
1188 /* Set R9 to point to old sp if required for access to register
1189 save area. */
1190 emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
1193 /* Restore sp if there was some possible change to it. */
1194 if (frame_pointer_needed)
1195 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1197 /* Restore the registers. */
1198 mt_emit_save_fp (FROM_MEM_TO_PROCESSOR, current_frame_info);
1199 mt_emit_save_regs (FROM_MEM_TO_PROCESSOR, current_frame_info);
1201 /* Make stack adjustment and use scratch register if constant too
1202 large to fit as immediate. */
1203 if (frame_size)
1205 if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1206 /* Can handle this with simple add. */
1207 insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
1208 stack_pointer_rtx,
1209 size_rtx));
1210 else
1211 /* Scratch reg R9 has the old sp value. */
1212 insn = emit_move_insn (stack_pointer_rtx,
1213 gen_rtx_REG (SImode, GPR_R9));
1215 REG_NOTES (insn)
1216 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1217 gen_rtx_SET (VOIDmode,
1218 stack_pointer_rtx,
1219 gen_rtx_PLUS (SImode,
1220 stack_pointer_rtx,
1221 GEN_INT (frame_size))),
1222 REG_NOTES (insn));
1225 if (cfun->machine && cfun->machine->eh_stack_adjust != NULL_RTX)
1226 /* Perform the additional bump for __throw. */
1227 emit_insn (gen_addsi3 (stack_pointer_rtx,
1228 stack_pointer_rtx,
1229 cfun->machine->eh_stack_adjust));
1231 /* Generate the appropriate return. */
1232 if (eh_mode == EH_EPILOGUE)
1234 emit_jump_insn (gen_eh_return_internal ());
1235 emit_barrier ();
1237 else if (interrupt_handler)
1238 emit_jump_insn (gen_return_interrupt_internal ());
1239 else
1240 emit_jump_insn (gen_return_internal ());
1242 /* Reset state info for each function. */
1243 interrupt_handler = 0;
1244 current_frame_info = zero_frame_info;
1245 if (cfun->machine)
1246 cfun->machine->eh_stack_adjust = NULL_RTX;
1250 /* Generate code for the "eh_return" pattern. */
1251 void
1252 mt_expand_eh_return (rtx * operands)
1254 if (GET_CODE (operands[0]) != REG
1255 || REGNO (operands[0]) != EH_RETURN_STACKADJ_REGNO)
1257 rtx sp = EH_RETURN_STACKADJ_RTX;
1259 emit_move_insn (sp, operands[0]);
1260 operands[0] = sp;
1263 emit_insn (gen_eh_epilogue (operands[0]));
1266 /* Generate code for the "eh_epilogue" pattern. */
1267 void
1268 mt_emit_eh_epilogue (rtx * operands ATTRIBUTE_UNUSED)
1270 cfun->machine->eh_stack_adjust = EH_RETURN_STACKADJ_RTX; /* operands[0]; */
1271 mt_expand_epilogue (EH_EPILOGUE);
1274 /* Handle an "interrupt" attribute. */
1275 static tree
1276 mt_handle_interrupt_attribute (tree * node,
1277 tree name,
1278 tree args ATTRIBUTE_UNUSED,
1279 int flags ATTRIBUTE_UNUSED,
1280 bool * no_add_attrs)
1282 if (TREE_CODE (*node) != FUNCTION_DECL)
1284 warning (OPT_Wattributes,
1285 "%qs attribute only applies to functions",
1286 IDENTIFIER_POINTER (name));
1287 *no_add_attrs = true;
1290 return NULL_TREE;
1293 /* Table of machine attributes. */
1294 const struct attribute_spec mt_attribute_table[] =
1296 /* name, min, max, decl?, type?, func?, handler */
1297 { "interrupt", 0, 0, false, false, false, mt_handle_interrupt_attribute },
1298 { NULL, 0, 0, false, false, false, NULL }
1301 /* Implement INITIAL_ELIMINATION_OFFSET. */
1303 mt_initial_elimination_offset (int from, int to)
1305 mt_compute_frame_size (get_frame_size ());
1307 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1308 return 0;
1310 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1311 return current_frame_info.total_size;
1313 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1314 return current_frame_info.total_size;
1316 else
1317 gcc_unreachable ();
1320 /* Generate a compare for CODE. Return a brand-new rtx that
1321 represents the result of the compare. */
1323 static rtx
1324 mt_generate_compare (enum rtx_code code, rtx op0, rtx op1)
1326 rtx scratch0, scratch1, const_scratch;
1328 switch (code)
1330 case GTU:
1331 case LTU:
1332 case GEU:
1333 case LEU:
1334 /* Need to adjust ranges for faking unsigned compares. */
1335 scratch0 = gen_reg_rtx (SImode);
1336 scratch1 = gen_reg_rtx (SImode);
1337 const_scratch = force_reg (SImode, GEN_INT(MT_MIN_INT));
1338 emit_insn (gen_addsi3 (scratch0, const_scratch, op0));
1339 emit_insn (gen_addsi3 (scratch1, const_scratch, op1));
1340 break;
1341 default:
1342 scratch0 = op0;
1343 scratch1 = op1;
1344 break;
1347 /* Adjust compare operator to fake unsigned compares. */
1348 switch (code)
1350 case GTU:
1351 code = GT; break;
1352 case LTU:
1353 code = LT; break;
1354 case GEU:
1355 code = GE; break;
1356 case LEU:
1357 code = LE; break;
1358 default:
1359 /* do nothing */
1360 break;
1363 /* Generate the actual compare. */
1364 return gen_rtx_fmt_ee (code, VOIDmode, scratch0, scratch1);
1367 /* Emit a branch of kind CODE to location LOC. */
1369 void
1370 mt_emit_cbranch (enum rtx_code code, rtx loc, rtx op0, rtx op1)
1372 rtx condition_rtx, loc_ref;
1374 if (! reg_or_0_operand (op0, SImode))
1375 op0 = copy_to_mode_reg (SImode, op0);
1377 if (! reg_or_0_operand (op1, SImode))
1378 op1 = copy_to_mode_reg (SImode, op1);
1380 condition_rtx = mt_generate_compare (code, op0, op1);
1381 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
1382 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1383 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
1384 loc_ref, pc_rtx)));
1387 /* Subfunction of the following function. Update the flags of any MEM
1388 found in part of X. */
1390 static void
1391 mt_set_memflags_1 (rtx x, int in_struct_p, int volatile_p)
1393 int i;
1395 switch (GET_CODE (x))
1397 case SEQUENCE:
1398 case PARALLEL:
1399 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1400 mt_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p);
1401 break;
1403 case INSN:
1404 mt_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p);
1405 break;
1407 case SET:
1408 mt_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p);
1409 mt_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p);
1410 break;
1412 case MEM:
1413 MEM_IN_STRUCT_P (x) = in_struct_p;
1414 MEM_VOLATILE_P (x) = volatile_p;
1415 /* Sadly, we cannot use alias sets because the extra aliasing
1416 produced by the AND interferes. Given that two-byte quantities
1417 are the only thing we would be able to differentiate anyway,
1418 there does not seem to be any point in convoluting the early
1419 out of the alias check. */
1420 /* set_mem_alias_set (x, alias_set); */
1421 break;
1423 default:
1424 break;
1428 /* Look for any MEMs in the current sequence of insns and set the
1429 in-struct, unchanging, and volatile flags from the flags in REF.
1430 If REF is not a MEM, don't do anything. */
1432 void
1433 mt_set_memflags (rtx ref)
1435 rtx insn;
1436 int in_struct_p, volatile_p;
1438 if (GET_CODE (ref) != MEM)
1439 return;
1441 in_struct_p = MEM_IN_STRUCT_P (ref);
1442 volatile_p = MEM_VOLATILE_P (ref);
1444 /* This is only called from mt.md, after having had something
1445 generated from one of the insn patterns. So if everything is
1446 zero, the pattern is already up-to-date. */
1447 if (! in_struct_p && ! volatile_p)
1448 return;
1450 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1451 mt_set_memflags_1 (insn, in_struct_p, volatile_p);
1454 /* Implement SECONDARY_RELOAD_CLASS. */
1455 enum reg_class
1456 mt_secondary_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
1457 enum machine_mode mode,
1458 rtx x)
1460 if ((mode == QImode && (!TARGET_BYTE_ACCESS)) || mode == HImode)
1462 if (GET_CODE (x) == MEM
1463 || (GET_CODE (x) == REG && true_regnum (x) == -1)
1464 || (GET_CODE (x) == SUBREG
1465 && (GET_CODE (SUBREG_REG (x)) == MEM
1466 || (GET_CODE (SUBREG_REG (x)) == REG
1467 && true_regnum (SUBREG_REG (x)) == -1))))
1468 return GENERAL_REGS;
1471 return NO_REGS;
1474 /* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE
1475 macros. */
1477 mt_function_value (tree valtype, enum machine_mode mode, tree func_decl ATTRIBUTE_UNUSED)
1479 if ((mode) == DImode || (mode) == DFmode)
1480 return gen_rtx_MEM (mode, gen_rtx_REG (mode, RETURN_VALUE_REGNUM));
1482 if (valtype)
1483 mode = TYPE_MODE (valtype);
1485 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1488 /* Split a move into two smaller pieces.
1489 MODE indicates the reduced mode. OPERANDS[0] is the original destination
1490 OPERANDS[1] is the original src. The new destinations are
1491 OPERANDS[2] and OPERANDS[4], while the new sources are OPERANDS[3]
1492 and OPERANDS[5]. */
1494 void
1495 mt_split_words (enum machine_mode nmode,
1496 enum machine_mode omode,
1497 rtx *operands)
1499 rtx dl,dh; /* src/dest pieces. */
1500 rtx sl,sh;
1501 int move_high_first = 0; /* Assume no overlap. */
1503 switch (GET_CODE (operands[0])) /* Dest. */
1505 case SUBREG:
1506 case REG:
1507 if ((GET_CODE (operands[1]) == REG
1508 || GET_CODE (operands[1]) == SUBREG)
1509 && true_regnum (operands[0]) <= true_regnum (operands[1]))
1510 move_high_first = 1;
1512 if (GET_CODE (operands[0]) == SUBREG)
1514 dl = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]),
1515 SUBREG_BYTE (operands[0]) + GET_MODE_SIZE (nmode));
1516 dh = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]), SUBREG_BYTE (operands[0]));
1518 else if (GET_CODE (operands[0]) == REG && ! IS_PSEUDO_P (operands[0]))
1520 int r = REGNO (operands[0]);
1521 dh = gen_rtx_REG (nmode, r);
1522 dl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
1524 else
1526 dh = gen_rtx_SUBREG (nmode, operands[0], 0);
1527 dl = gen_rtx_SUBREG (nmode, operands[0], GET_MODE_SIZE (nmode));
1529 break;
1531 case MEM:
1532 switch (GET_CODE (XEXP (operands[0], 0)))
1534 case POST_INC:
1535 case POST_DEC:
1536 gcc_unreachable ();
1537 default:
1538 dl = operand_subword (operands[0],
1539 GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
1540 0, omode);
1541 dh = operand_subword (operands[0], 0, 0, omode);
1543 break;
1544 default:
1545 gcc_unreachable ();
1548 switch (GET_CODE (operands[1]))
1550 case REG:
1551 if (! IS_PSEUDO_P (operands[1]))
1553 int r = REGNO (operands[1]);
1555 sh = gen_rtx_REG (nmode, r);
1556 sl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
1558 else
1560 sh = gen_rtx_SUBREG (nmode, operands[1], 0);
1561 sl = gen_rtx_SUBREG (nmode, operands[1], GET_MODE_SIZE (nmode));
1563 break;
1565 case CONST_DOUBLE:
1566 if (operands[1] == const0_rtx)
1567 sh = sl = const0_rtx;
1568 else
1569 split_double (operands[1], & sh, & sl);
1570 break;
1572 case CONST_INT:
1573 if (operands[1] == const0_rtx)
1574 sh = sl = const0_rtx;
1575 else
1577 int vl, vh;
1579 switch (nmode)
1581 default:
1582 gcc_unreachable ();
1585 sl = GEN_INT (vl);
1586 sh = GEN_INT (vh);
1588 break;
1590 case SUBREG:
1591 sl = gen_rtx_SUBREG (nmode,
1592 SUBREG_REG (operands[1]),
1593 SUBREG_BYTE (operands[1]) + GET_MODE_SIZE (nmode));
1594 sh = gen_rtx_SUBREG (nmode,
1595 SUBREG_REG (operands[1]),
1596 SUBREG_BYTE (operands[1]));
1597 break;
1599 case MEM:
1600 switch (GET_CODE (XEXP (operands[1], 0)))
1602 case POST_DEC:
1603 case POST_INC:
1604 gcc_unreachable ();
1605 break;
1606 default:
1607 sl = operand_subword (operands[1],
1608 GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
1609 0, omode);
1610 sh = operand_subword (operands[1], 0, 0, omode);
1612 /* Check if the DF load is going to clobber the register
1613 used for the address, and if so make sure that is going
1614 to be the second move. */
1615 if (GET_CODE (dl) == REG
1616 && true_regnum (dl)
1617 == true_regnum (XEXP (XEXP (sl, 0 ), 0)))
1618 move_high_first = 1;
1620 break;
1621 default:
1622 gcc_unreachable ();
1625 if (move_high_first)
1627 operands[2] = dh;
1628 operands[3] = sh;
1629 operands[4] = dl;
1630 operands[5] = sl;
1632 else
1634 operands[2] = dl;
1635 operands[3] = sl;
1636 operands[4] = dh;
1637 operands[5] = sh;
1639 return;
1642 /* Implement TARGET_MUST_PASS_IN_STACK hook. */
1643 static bool
1644 mt_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
1646 return (((type) != 0
1647 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1648 || TREE_ADDRESSABLE (type))));
1651 /* Increment the counter for the number of loop instructions in the
1652 current function. */
1654 void mt_add_loop (void)
1656 cfun->machine->has_loops++;
1660 /* Maxium loop nesting depth. */
1661 #define MAX_LOOP_DEPTH 4
1662 /* Maxium size of a loop (allows some headroom for delayed branch slot
1663 filling. */
1664 #define MAX_LOOP_LENGTH (200 * 4)
1666 /* We need to keep a vector of basic blocks */
1667 DEF_VEC_P (basic_block);
1668 DEF_VEC_ALLOC_P (basic_block,heap);
1670 /* And a vector of loops */
1671 typedef struct loop_info *loop_info;
1672 DEF_VEC_P (loop_info);
1673 DEF_VEC_ALLOC_P (loop_info,heap);
1675 /* Information about a loop we have found (or are in the process of
1676 finding). */
1677 struct loop_info GTY (())
1679 /* loop number, for dumps */
1680 int loop_no;
1682 /* Predecessor block of the loop. This is the one that falls into
1683 the loop and contains the initialization instruction. */
1684 basic_block predecessor;
1686 /* First block in the loop. This is the one branched to by the dbnz
1687 insn. */
1688 basic_block head;
1690 /* Last block in the loop (the one with the dbnz insn */
1691 basic_block tail;
1693 /* The successor block of the loop. This is the one the dbnz insn
1694 falls into. */
1695 basic_block successor;
1697 /* The dbnz insn. */
1698 rtx dbnz;
1700 /* The initialization insn. */
1701 rtx init;
1703 /* The new initialization instruction. */
1704 rtx loop_init;
1706 /* The new ending instruction. */
1707 rtx loop_end;
1709 /* The new label placed at the end of the loop. */
1710 rtx end_label;
1712 /* The nesting depth of the loop. Set to -1 for a bad loop. */
1713 int depth;
1715 /* The length of the loop. */
1716 int length;
1718 /* Next loop in the graph. */
1719 struct loop_info *next;
1721 /* Vector of blocks only within the loop, (excluding those within
1722 inner loops). */
1723 VEC (basic_block,heap) *blocks;
1725 /* Vector of inner loops within this loop */
1726 VEC (loop_info,heap) *loops;
1729 /* Information used during loop detection. */
1730 typedef struct loop_work GTY(())
1732 /* Basic block to be scanned. */
1733 basic_block block;
1735 /* Loop it will be within. */
1736 loop_info loop;
1737 } loop_work;
1739 /* Work list. */
1740 DEF_VEC_O (loop_work);
1741 DEF_VEC_ALLOC_O (loop_work,heap);
1743 /* Determine the nesting and length of LOOP. Return false if the loop
1744 is bad. */
1746 static bool
1747 mt_loop_nesting (loop_info loop)
1749 loop_info inner;
1750 unsigned ix;
1751 int inner_depth = 0;
1753 if (!loop->depth)
1755 /* Make sure we only have one entry point. */
1756 if (EDGE_COUNT (loop->head->preds) == 2)
1758 loop->predecessor = EDGE_PRED (loop->head, 0)->src;
1759 if (loop->predecessor == loop->tail)
1760 /* We wanted the other predecessor. */
1761 loop->predecessor = EDGE_PRED (loop->head, 1)->src;
1763 /* We can only place a loop insn on a fall through edge of a
1764 single exit block. */
1765 if (EDGE_COUNT (loop->predecessor->succs) != 1
1766 || !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU))
1767 loop->predecessor = NULL;
1770 /* Mark this loop as bad for now. */
1771 loop->depth = -1;
1772 if (loop->predecessor)
1774 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix++, inner);)
1776 if (!inner->depth)
1777 mt_loop_nesting (inner);
1779 if (inner->depth < 0)
1781 inner_depth = -1;
1782 break;
1785 if (inner_depth < inner->depth)
1786 inner_depth = inner->depth;
1787 loop->length += inner->length;
1790 /* Set the proper loop depth, if it was good. */
1791 if (inner_depth >= 0)
1792 loop->depth = inner_depth + 1;
1795 return (loop->depth > 0
1796 && loop->predecessor
1797 && loop->depth < MAX_LOOP_DEPTH
1798 && loop->length < MAX_LOOP_LENGTH);
1801 /* Determine the length of block BB. */
1803 static int
1804 mt_block_length (basic_block bb)
1806 int length = 0;
1807 rtx insn;
1809 for (insn = BB_HEAD (bb);
1810 insn != NEXT_INSN (BB_END (bb));
1811 insn = NEXT_INSN (insn))
1813 if (!INSN_P (insn))
1814 continue;
1815 if (CALL_P (insn))
1817 /* Calls are not allowed in loops. */
1818 length = MAX_LOOP_LENGTH + 1;
1819 break;
1822 length += get_attr_length (insn);
1824 return length;
1827 /* Scan the blocks of LOOP (and its inferiors) looking for uses of
1828 REG. Return true, if we find any. Don't count the loop's dbnz
1829 insn if it matches DBNZ. */
1831 static bool
1832 mt_scan_loop (loop_info loop, rtx reg, rtx dbnz)
1834 unsigned ix;
1835 loop_info inner;
1836 basic_block bb;
1838 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
1840 rtx insn;
1842 for (insn = BB_HEAD (bb);
1843 insn != NEXT_INSN (BB_END (bb));
1844 insn = NEXT_INSN (insn))
1846 if (!INSN_P (insn))
1847 continue;
1848 if (insn == dbnz)
1849 continue;
1850 if (reg_mentioned_p (reg, PATTERN (insn)))
1851 return true;
1854 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
1855 if (mt_scan_loop (inner, reg, NULL_RTX))
1856 return true;
1858 return false;
1861 /* MS2 has a loop instruction which needs to be placed just before the
1862 loop. It indicates the end of the loop and specifies the number of
1863 loop iterations. It can be nested with an automatically maintained
1864 stack of counter and end address registers. It's an ideal
1865 candidate for doloop. Unfortunately, gcc presumes that loops
1866 always end with an explicit instriction, and the doloop_begin
1867 instruction is not a flow control instruction so it can be
1868 scheduled earlier than just before the start of the loop. To make
1869 matters worse, the optimization pipeline can duplicate loop exit
1870 and entrance blocks and fails to track abnormally exiting loops.
1871 Thus we cannot simply use doloop.
1873 What we do is emit a dbnz pattern for the doloop optimization, and
1874 let that be optimized as normal. Then in machine dependent reorg
1875 we have to repeat the loop searching algorithm. We use the
1876 flow graph to find closed loops ending in a dbnz insn. We then try
1877 and convert it to use the loop instruction. The conditions are,
1879 * the loop has no abnormal exits, duplicated end conditions or
1880 duplicated entrance blocks
1882 * the loop counter register is only used in the dbnz instruction
1883 within the loop
1885 * we can find the instruction setting the initial value of the loop
1886 counter
1888 * the loop is not executed more than 65535 times. (This might be
1889 changed to 2^32-1, and would therefore allow variable initializers.)
1891 * the loop is not nested more than 4 deep 5) there are no
1892 subroutine calls in the loop. */
1894 static void
1895 mt_reorg_loops (FILE *dump_file)
1897 basic_block bb;
1898 loop_info loops = NULL;
1899 loop_info loop;
1900 int nloops = 0;
1901 unsigned dwork = 0;
1902 VEC (loop_work,heap) *works = VEC_alloc (loop_work,heap,20);
1903 loop_work *work;
1904 edge e;
1905 edge_iterator ei;
1906 bool replaced = false;
1908 /* Find all the possible loop tails. This means searching for every
1909 dbnz instruction. For each one found, create a loop_info
1910 structure and add the head block to the work list. */
1911 FOR_EACH_BB (bb)
1913 rtx tail = BB_END (bb);
1915 while (GET_CODE (tail) == NOTE)
1916 tail = PREV_INSN (tail);
1918 bb->aux = NULL;
1919 if (recog_memoized (tail) == CODE_FOR_decrement_and_branch_until_zero)
1921 /* A possible loop end */
1923 loop = XNEW (struct loop_info);
1924 loop->next = loops;
1925 loops = loop;
1926 loop->tail = bb;
1927 loop->head = BRANCH_EDGE (bb)->dest;
1928 loop->successor = FALLTHRU_EDGE (bb)->dest;
1929 loop->predecessor = NULL;
1930 loop->dbnz = tail;
1931 loop->depth = 0;
1932 loop->length = mt_block_length (bb);
1933 loop->blocks = VEC_alloc (basic_block, heap, 20);
1934 VEC_quick_push (basic_block, loop->blocks, bb);
1935 loop->loops = NULL;
1936 loop->loop_no = nloops++;
1938 loop->init = loop->end_label = NULL_RTX;
1939 loop->loop_init = loop->loop_end = NULL_RTX;
1941 work = VEC_safe_push (loop_work, heap, works, NULL);
1942 work->block = loop->head;
1943 work->loop = loop;
1945 bb->aux = loop;
1947 if (dump_file)
1949 fprintf (dump_file, ";; potential loop %d ending at\n",
1950 loop->loop_no);
1951 print_rtl_single (dump_file, tail);
1956 /* Now find all the closed loops.
1957 until work list empty,
1958 if block's auxptr is set
1959 if != loop slot
1960 if block's loop's start != block
1961 mark loop as bad
1962 else
1963 append block's loop's fallthrough block to worklist
1964 increment this loop's depth
1965 else if block is exit block
1966 mark loop as bad
1967 else
1968 set auxptr
1969 for each target of block
1970 add to worklist */
1971 while (VEC_iterate (loop_work, works, dwork++, work))
1973 loop = work->loop;
1974 bb = work->block;
1975 if (bb == EXIT_BLOCK_PTR)
1976 /* We've reached the exit block. The loop must be bad. */
1977 loop->depth = -1;
1978 else if (!bb->aux)
1980 /* We've not seen this block before. Add it to the loop's
1981 list and then add each successor to the work list. */
1982 bb->aux = loop;
1983 loop->length += mt_block_length (bb);
1984 VEC_safe_push (basic_block, heap, loop->blocks, bb);
1985 FOR_EACH_EDGE (e, ei, bb->succs)
1987 if (!VEC_space (loop_work, works, 1))
1989 if (dwork)
1991 VEC_block_remove (loop_work, works, 0, dwork);
1992 dwork = 0;
1994 else
1995 VEC_reserve (loop_work, heap, works, 1);
1997 work = VEC_quick_push (loop_work, works, NULL);
1998 work->block = EDGE_SUCC (bb, ei.index)->dest;
1999 work->loop = loop;
2002 else if (bb->aux != loop)
2004 /* We've seen this block in a different loop. If it's not
2005 the other loop's head, then this loop must be bad.
2006 Otherwise, the other loop might be a nested loop, so
2007 continue from that loop's successor. */
2008 loop_info other = bb->aux;
2010 if (other->head != bb)
2011 loop->depth = -1;
2012 else
2014 VEC_safe_push (loop_info, heap, loop->loops, other);
2015 work = VEC_safe_push (loop_work, heap, works, NULL);
2016 work->loop = loop;
2017 work->block = other->successor;
2021 VEC_free (loop_work, heap, works);
2023 /* Now optimize the loops. */
2024 for (loop = loops; loop; loop = loop->next)
2026 rtx iter_reg, insn, init_insn;
2027 rtx init_val, loop_end, loop_init, end_label, head_label;
2029 if (!mt_loop_nesting (loop))
2031 if (dump_file)
2032 fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
2033 continue;
2036 /* Get the loop iteration register. */
2037 iter_reg = SET_DEST (XVECEXP (PATTERN (loop->dbnz), 0, 1));
2039 if (!REG_P (iter_reg))
2041 /* Spilled */
2042 if (dump_file)
2043 fprintf (dump_file, ";; loop %d has spilled iteration count\n",
2044 loop->loop_no);
2045 continue;
2048 /* Look for the initializing insn */
2049 init_insn = NULL_RTX;
2050 for (insn = BB_END (loop->predecessor);
2051 insn != PREV_INSN (BB_HEAD (loop->predecessor));
2052 insn = PREV_INSN (insn))
2054 if (!INSN_P (insn))
2055 continue;
2056 if (reg_mentioned_p (iter_reg, PATTERN (insn)))
2058 rtx set = single_set (insn);
2060 if (set && rtx_equal_p (iter_reg, SET_DEST (set)))
2061 init_insn = insn;
2062 break;
2066 if (!init_insn)
2068 if (dump_file)
2069 fprintf (dump_file, ";; loop %d has no initializer\n",
2070 loop->loop_no);
2071 continue;
2073 if (dump_file)
2075 fprintf (dump_file, ";; loop %d initialized by\n",
2076 loop->loop_no);
2077 print_rtl_single (dump_file, init_insn);
2080 init_val = PATTERN (init_insn);
2081 if (GET_CODE (init_val) == SET)
2082 init_val = SET_SRC (init_val);
2083 if (GET_CODE (init_val) != CONST_INT || INTVAL (init_val) >= 65535)
2085 if (dump_file)
2086 fprintf (dump_file, ";; loop %d has complex initializer\n",
2087 loop->loop_no);
2088 continue;
2091 /* Scan all the blocks to make sure they don't use iter_reg. */
2092 if (mt_scan_loop (loop, iter_reg, loop->dbnz))
2094 if (dump_file)
2095 fprintf (dump_file, ";; loop %d uses iterator\n",
2096 loop->loop_no);
2097 continue;
2100 /* The loop is good for replacement. */
2102 /* loop is 1 based, dbnz is zero based. */
2103 init_val = GEN_INT (INTVAL (init_val) + 1);
2105 iter_reg = gen_rtx_REG (SImode, LOOP_FIRST + loop->depth - 1);
2106 end_label = gen_label_rtx ();
2107 head_label = XEXP (SET_SRC (XVECEXP (PATTERN (loop->dbnz), 0, 0)), 1);
2108 loop_end = gen_loop_end (iter_reg, head_label);
2109 loop_init = gen_loop_init (iter_reg, init_val, end_label);
2110 loop->init = init_insn;
2111 loop->end_label = end_label;
2112 loop->loop_init = loop_init;
2113 loop->loop_end = loop_end;
2114 replaced = true;
2116 if (dump_file)
2118 fprintf (dump_file, ";; replacing loop %d initializer with\n",
2119 loop->loop_no);
2120 print_rtl_single (dump_file, loop->loop_init);
2121 fprintf (dump_file, ";; replacing loop %d terminator with\n",
2122 loop->loop_no);
2123 print_rtl_single (dump_file, loop->loop_end);
2127 /* Now apply the optimizations. Do it this way so we don't mess up
2128 the flow graph half way through. */
2129 for (loop = loops; loop; loop = loop->next)
2130 if (loop->loop_init)
2132 emit_jump_insn_after (loop->loop_init, BB_END (loop->predecessor));
2133 delete_insn (loop->init);
2134 emit_label_before (loop->end_label, loop->dbnz);
2135 emit_jump_insn_before (loop->loop_end, loop->dbnz);
2136 delete_insn (loop->dbnz);
2139 /* Free up the loop structures */
2140 while (loops)
2142 loop = loops;
2143 loops = loop->next;
2144 VEC_free (loop_info, heap, loop->loops);
2145 VEC_free (basic_block, heap, loop->blocks);
2146 XDELETE (loop);
2149 if (replaced && dump_file)
2151 fprintf (dump_file, ";; Replaced loops\n");
2152 print_rtl (dump_file, get_insns ());
2156 /* Structures to hold branch information during reorg. */
2157 typedef struct branch_info
2159 rtx insn; /* The branch insn. */
2161 struct branch_info *next;
2162 } branch_info;
2164 typedef struct label_info
2166 rtx label; /* The label. */
2167 branch_info *branches; /* branches to this label. */
2168 struct label_info *next;
2169 } label_info;
2171 /* Chain of labels found in current function, used during reorg. */
2172 static label_info *mt_labels;
2174 /* If *X is a label, add INSN to the list of branches for that
2175 label. */
2177 static int
2178 mt_add_branches (rtx *x, void *insn)
2180 if (GET_CODE (*x) == LABEL_REF)
2182 branch_info *branch = xmalloc (sizeof (*branch));
2183 rtx label = XEXP (*x, 0);
2184 label_info *info;
2186 for (info = mt_labels; info; info = info->next)
2187 if (info->label == label)
2188 break;
2190 if (!info)
2192 info = xmalloc (sizeof (*info));
2193 info->next = mt_labels;
2194 mt_labels = info;
2196 info->label = label;
2197 info->branches = NULL;
2200 branch->next = info->branches;
2201 info->branches = branch;
2202 branch->insn = insn;
2204 return 0;
2207 /* If BRANCH has a filled delay slot, check if INSN is dependent upon
2208 it. If so, undo the delay slot fill. Returns the next insn, if
2209 we patch out the branch. Returns the branch insn, if we cannot
2210 patch out the branch (due to anti-dependency in the delay slot).
2211 In that case, the caller must insert nops at the branch target. */
2213 static rtx
2214 mt_check_delay_slot (rtx branch, rtx insn)
2216 rtx slot;
2217 rtx tmp;
2218 rtx p;
2219 rtx jmp;
2221 gcc_assert (GET_CODE (PATTERN (branch)) == SEQUENCE);
2222 if (INSN_DELETED_P (branch))
2223 return NULL_RTX;
2224 slot = XVECEXP (PATTERN (branch), 0, 1);
2226 tmp = PATTERN (insn);
2227 note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
2228 if (tmp)
2229 /* Not dependent. */
2230 return NULL_RTX;
2232 /* Undo the delay slot. */
2233 jmp = XVECEXP (PATTERN (branch), 0, 0);
2235 tmp = PATTERN (jmp);
2236 note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
2237 if (!tmp)
2238 /* Anti dependent. */
2239 return branch;
2241 p = PREV_INSN (branch);
2242 NEXT_INSN (p) = slot;
2243 PREV_INSN (slot) = p;
2244 NEXT_INSN (slot) = jmp;
2245 PREV_INSN (jmp) = slot;
2246 NEXT_INSN (jmp) = branch;
2247 PREV_INSN (branch) = jmp;
2248 XVECEXP (PATTERN (branch), 0, 0) = NULL_RTX;
2249 XVECEXP (PATTERN (branch), 0, 1) = NULL_RTX;
2250 delete_insn (branch);
2251 return jmp;
2254 /* Insert nops to satisfy pipeline constraints. We only deal with ms2
2255 constraints here. Earlier CPUs are dealt with by inserting nops with
2256 final_prescan (but that can lead to inferior code, and is
2257 impractical with ms2's JAL hazard).
2259 ms2 dynamic constraints
2260 1) a load and a following use must be separated by one insn
2261 2) an insn and a following dependent call must be separated by two insns
2263 only arith insns are placed in delay slots so #1 cannot happen with
2264 a load in a delay slot. #2 can happen with an arith insn in the
2265 delay slot. */
2267 static void
2268 mt_reorg_hazard (void)
2270 rtx insn, next;
2272 /* Find all the branches */
2273 for (insn = get_insns ();
2274 insn;
2275 insn = NEXT_INSN (insn))
2277 rtx jmp;
2279 if (!INSN_P (insn))
2280 continue;
2282 jmp = PATTERN (insn);
2284 if (GET_CODE (jmp) != SEQUENCE)
2285 /* If it's not got a filled delay slot, then it can't
2286 conflict. */
2287 continue;
2289 jmp = XVECEXP (jmp, 0, 0);
2291 if (recog_memoized (jmp) == CODE_FOR_tablejump)
2292 for (jmp = XEXP (XEXP (XVECEXP (PATTERN (jmp), 0, 1), 0), 0);
2293 !JUMP_TABLE_DATA_P (jmp);
2294 jmp = NEXT_INSN (jmp))
2295 continue;
2297 for_each_rtx (&PATTERN (jmp), mt_add_branches, insn);
2300 /* Now scan for dependencies. */
2301 for (insn = get_insns ();
2302 insn && !INSN_P (insn);
2303 insn = NEXT_INSN (insn))
2304 continue;
2306 for (;
2307 insn;
2308 insn = next)
2310 rtx jmp, tmp;
2311 enum attr_type attr;
2313 gcc_assert (INSN_P (insn) && !INSN_DELETED_P (insn));
2314 for (next = NEXT_INSN (insn);
2315 next && !INSN_P (next);
2316 next = NEXT_INSN (next))
2317 continue;
2319 jmp = insn;
2320 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2321 jmp = XVECEXP (PATTERN (insn), 0, 0);
2323 attr = recog_memoized (jmp) >= 0 ? get_attr_type (jmp) : TYPE_UNKNOWN;
2325 if (next && attr == TYPE_LOAD)
2327 /* A load. See if NEXT is dependent, and if so insert a
2328 nop. */
2330 tmp = PATTERN (next);
2331 if (GET_CODE (tmp) == SEQUENCE)
2332 tmp = PATTERN (XVECEXP (tmp, 0, 0));
2333 note_stores (PATTERN (insn), insn_dependent_p_1, &tmp);
2334 if (!tmp)
2335 emit_insn_after (gen_nop (), insn);
2338 if (attr == TYPE_CALL)
2340 /* A call. Make sure we're not dependent on either of the
2341 previous two dynamic instructions. */
2342 int nops = 0;
2343 int count;
2344 rtx prev = insn;
2345 rtx rescan = NULL_RTX;
2347 for (count = 2; count && !nops;)
2349 int type;
2351 prev = PREV_INSN (prev);
2352 if (!prev)
2354 /* If we reach the start of the function, we must
2355 presume the caller set the address in the delay
2356 slot of the call instruction. */
2357 nops = count;
2358 break;
2361 if (BARRIER_P (prev))
2362 break;
2363 if (LABEL_P (prev))
2365 /* Look at branches to this label. */
2366 label_info *label;
2367 branch_info *branch;
2369 for (label = mt_labels;
2370 label;
2371 label = label->next)
2372 if (label->label == prev)
2374 for (branch = label->branches;
2375 branch;
2376 branch = branch->next)
2378 tmp = mt_check_delay_slot (branch->insn, jmp);
2380 if (tmp == branch->insn)
2382 nops = count;
2383 break;
2386 if (tmp && branch->insn == next)
2387 rescan = tmp;
2389 break;
2391 continue;
2393 if (!INSN_P (prev))
2394 continue;
2396 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
2398 /* Look at the delay slot. */
2399 tmp = mt_check_delay_slot (prev, jmp);
2400 if (tmp == prev)
2401 nops = count;
2402 break;
2405 type = (INSN_CODE (prev) >= 0 ? get_attr_type (prev)
2406 : TYPE_COMPLEX);
2407 if (type == TYPE_CALL || type == TYPE_BRANCH)
2408 break;
2410 if (type == TYPE_LOAD
2411 || type == TYPE_ARITH
2412 || type == TYPE_COMPLEX)
2414 tmp = PATTERN (jmp);
2415 note_stores (PATTERN (prev), insn_dependent_p_1, &tmp);
2416 if (!tmp)
2418 nops = count;
2419 break;
2423 if (INSN_CODE (prev) >= 0)
2425 rtx set = single_set (prev);
2427 /* A noop set will get deleted in a later split pass,
2428 so we can't count on it for hazard avoidance. */
2429 if (!set || !set_noop_p (set))
2430 count--;
2434 if (rescan)
2435 for (next = NEXT_INSN (rescan);
2436 next && !INSN_P (next);
2437 next = NEXT_INSN (next))
2438 continue;
2439 while (nops--)
2440 emit_insn_before (gen_nop (), insn);
2444 /* Free the data structures. */
2445 while (mt_labels)
2447 label_info *label = mt_labels;
2448 branch_info *branch, *next;
2450 mt_labels = label->next;
2451 for (branch = label->branches; branch; branch = next)
2453 next = branch->next;
2454 free (branch);
2456 free (label);
2460 /* Fixup the looping instructions, do delayed branch scheduling, fixup
2461 scheduling hazards. */
2463 static void
2464 mt_machine_reorg (void)
2466 if (cfun->machine->has_loops && TARGET_MS2)
2467 mt_reorg_loops (dump_file);
2469 if (mt_flag_delayed_branch)
2470 dbr_schedule (get_insns (), dump_file);
2472 if (TARGET_MS2)
2473 mt_reorg_hazard ();
2476 /* Initialize the GCC target structure. */
2477 const struct attribute_spec mt_attribute_table[];
2479 #undef TARGET_ATTRIBUTE_TABLE
2480 #define TARGET_ATTRIBUTE_TABLE mt_attribute_table
2481 #undef TARGET_STRUCT_VALUE_RTX
2482 #define TARGET_STRUCT_VALUE_RTX mt_struct_value_rtx
2483 #undef TARGET_PROMOTE_PROTOTYPES
2484 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2485 #undef TARGET_PASS_BY_REFERENCE
2486 #define TARGET_PASS_BY_REFERENCE mt_pass_by_reference
2487 #undef TARGET_MUST_PASS_IN_STACK
2488 #define TARGET_MUST_PASS_IN_STACK mt_pass_in_stack
2489 #undef TARGET_ARG_PARTIAL_BYTES
2490 #define TARGET_ARG_PARTIAL_BYTES mt_arg_partial_bytes
2491 #undef TARGET_MACHINE_DEPENDENT_REORG
2492 #define TARGET_MACHINE_DEPENDENT_REORG mt_machine_reorg
2494 struct gcc_target targetm = TARGET_INITIALIZER;
2496 #include "gt-mt.h"