Merge from mainline
[official-gcc.git] / gcc / config / mt / mt.c
blob59529acfb0e4aea7ffc9a79e3940a0435e00862e
1 /* Target definitions for the MorphoRISC1
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "recog.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "integrate.h"
37 #include "tree.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "flags.h"
43 #include "tm_p.h"
44 #include "ggc.h"
45 #include "insn-flags.h"
46 #include "obstack.h"
47 #include "except.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "basic-block.h"
52 /* Frame pointer register mask. */
53 #define FP_MASK (1 << (GPR_FP))
55 /* Link register mask. */
56 #define LINK_MASK (1 << (GPR_LINK))
58 /* Given a SIZE in bytes, advance to the next word. */
59 #define ROUND_ADVANCE(SIZE) (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
61 /* A C structure for machine-specific, per-function data.
62 This is added to the cfun structure. */
63 struct machine_function GTY(())
65 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
66 int ra_needs_full_frame;
67 struct rtx_def * eh_stack_adjust;
68 int interrupt_handler;
69 int has_loops;
72 /* Define the information needed to generate branch and scc insns.
73 This is stored from the compare operation. */
74 struct rtx_def * mt_compare_op0;
75 struct rtx_def * mt_compare_op1;
77 /* Current frame information calculated by compute_frame_size. */
78 struct mt_frame_info current_frame_info;
80 /* Zero structure to initialize current_frame_info. */
81 struct mt_frame_info zero_frame_info;
83 /* mt doesn't have unsigned compares need a library call for this. */
84 struct rtx_def * mt_ucmpsi3_libcall;
86 static int mt_flag_delayed_branch;
89 static rtx
90 mt_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
91 int incoming ATTRIBUTE_UNUSED)
93 return gen_rtx_REG (Pmode, RETVAL_REGNUM);
96 /* Implement RETURN_ADDR_RTX. */
97 rtx
98 mt_return_addr_rtx (int count)
100 if (count != 0)
101 return NULL_RTX;
103 return get_hard_reg_initial_val (Pmode, GPR_LINK);
106 /* The following variable value indicates the number of nops required
107 between the current instruction and the next instruction to avoid
108 any pipeline hazards. */
109 static int mt_nops_required = 0;
110 static const char * mt_nop_reasons = "";
112 /* Implement ASM_OUTPUT_OPCODE. */
113 const char *
114 mt_asm_output_opcode (FILE *f ATTRIBUTE_UNUSED, const char *ptr)
116 if (mt_nops_required)
117 fprintf (f, ";# need %d nops because of %s\n\t",
118 mt_nops_required, mt_nop_reasons);
120 while (mt_nops_required)
122 fprintf (f, "nop\n\t");
123 -- mt_nops_required;
126 return ptr;
129 /* Given an insn, return whether it's a memory operation or a branch
130 operation, otherwise return TYPE_ARITH. */
131 static enum attr_type
132 mt_get_attr_type (rtx complete_insn)
134 rtx insn = PATTERN (complete_insn);
136 if (JUMP_P (complete_insn))
137 return TYPE_BRANCH;
138 if (CALL_P (complete_insn))
139 return TYPE_BRANCH;
141 if (GET_CODE (insn) != SET)
142 return TYPE_ARITH;
144 if (SET_DEST (insn) == pc_rtx)
145 return TYPE_BRANCH;
147 if (GET_CODE (SET_DEST (insn)) == MEM)
148 return TYPE_STORE;
150 if (GET_CODE (SET_SRC (insn)) == MEM)
151 return TYPE_LOAD;
153 return TYPE_ARITH;
156 /* A helper routine for insn_dependent_p called through note_stores. */
158 static void
159 insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
161 rtx * pinsn = (rtx *) data;
163 if (*pinsn && reg_mentioned_p (x, *pinsn))
164 *pinsn = NULL_RTX;
167 /* Return true if anything in insn X is (anti,output,true)
168 dependent on anything in insn Y. */
170 static bool
171 insn_dependent_p (rtx x, rtx y)
173 rtx tmp;
175 if (! INSN_P (x) || ! INSN_P (y))
176 return 0;
178 tmp = PATTERN (y);
179 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
180 if (tmp == NULL_RTX)
181 return true;
183 tmp = PATTERN (x);
184 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
185 return (tmp == NULL_RTX);
189 /* Return true if anything in insn X is true dependent on anything in
190 insn Y. */
191 static bool
192 insn_true_dependent_p (rtx x, rtx y)
194 rtx tmp;
196 if (! INSN_P (x) || ! INSN_P (y))
197 return 0;
199 tmp = PATTERN (y);
200 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
201 return (tmp == NULL_RTX);
204 /* The following determines the number of nops that need to be
205 inserted between the previous instructions and current instruction
206 to avoid pipeline hazards on the mt processor. Remember that
207 the function is not called for asm insns. */
209 void
210 mt_final_prescan_insn (rtx insn,
211 rtx * opvec ATTRIBUTE_UNUSED,
212 int noperands ATTRIBUTE_UNUSED)
214 rtx prev_i;
215 enum attr_type prev_attr;
217 mt_nops_required = 0;
218 mt_nop_reasons = "";
220 /* ms2 constraints are dealt with in reorg. */
221 if (TARGET_MS2)
222 return;
224 /* Only worry about real instructions. */
225 if (! INSN_P (insn))
226 return;
228 /* Find the previous real instructions. */
229 for (prev_i = PREV_INSN (insn);
230 prev_i != NULL
231 && (! INSN_P (prev_i)
232 || GET_CODE (PATTERN (prev_i)) == USE
233 || GET_CODE (PATTERN (prev_i)) == CLOBBER);
234 prev_i = PREV_INSN (prev_i))
236 /* If we meet a barrier, there is no flow through here. */
237 if (BARRIER_P (prev_i))
238 return;
241 /* If there isn't one then there is nothing that we need do. */
242 if (prev_i == NULL || ! INSN_P (prev_i))
243 return;
245 prev_attr = mt_get_attr_type (prev_i);
247 /* Delayed branch slots already taken care of by delay branch scheduling. */
248 if (prev_attr == TYPE_BRANCH)
249 return;
251 switch (mt_get_attr_type (insn))
253 case TYPE_LOAD:
254 case TYPE_STORE:
255 /* Avoid consecutive memory operation. */
256 if ((prev_attr == TYPE_LOAD || prev_attr == TYPE_STORE)
257 && TARGET_MS1_64_001)
259 mt_nops_required = 1;
260 mt_nop_reasons = "consecutive mem ops";
262 /* Drop through. */
264 case TYPE_ARITH:
265 case TYPE_COMPLEX:
266 /* One cycle of delay is required between load
267 and the dependent arithmetic instruction. */
268 if (prev_attr == TYPE_LOAD
269 && insn_true_dependent_p (prev_i, insn))
271 mt_nops_required = 1;
272 mt_nop_reasons = "load->arith dependency delay";
274 break;
276 case TYPE_BRANCH:
277 if (insn_dependent_p (prev_i, insn))
279 if (prev_attr == TYPE_ARITH && TARGET_MS1_64_001)
281 /* One cycle of delay between arith
282 instructions and branch dependent on arith. */
283 mt_nops_required = 1;
284 mt_nop_reasons = "arith->branch dependency delay";
286 else if (prev_attr == TYPE_LOAD)
288 /* Two cycles of delay are required
289 between load and dependent branch. */
290 if (TARGET_MS1_64_001)
291 mt_nops_required = 2;
292 else
293 mt_nops_required = 1;
294 mt_nop_reasons = "load->branch dependency delay";
297 break;
299 default:
300 fatal_insn ("mt_final_prescan_insn, invalid insn #1", insn);
301 break;
305 /* Print debugging information for a frame. */
306 static void
307 mt_debug_stack (struct mt_frame_info * info)
309 int regno;
311 if (!info)
313 error ("info pointer NULL");
314 gcc_unreachable ();
317 fprintf (stderr, "\nStack information for function %s:\n",
318 ((current_function_decl && DECL_NAME (current_function_decl))
319 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
320 : "<unknown>"));
322 fprintf (stderr, "\ttotal_size = %d\n", info->total_size);
323 fprintf (stderr, "\tpretend_size = %d\n", info->pretend_size);
324 fprintf (stderr, "\targs_size = %d\n", info->args_size);
325 fprintf (stderr, "\textra_size = %d\n", info->extra_size);
326 fprintf (stderr, "\treg_size = %d\n", info->reg_size);
327 fprintf (stderr, "\tvar_size = %d\n", info->var_size);
328 fprintf (stderr, "\tframe_size = %d\n", info->frame_size);
329 fprintf (stderr, "\treg_mask = 0x%x\n", info->reg_mask);
330 fprintf (stderr, "\tsave_fp = %d\n", info->save_fp);
331 fprintf (stderr, "\tsave_lr = %d\n", info->save_lr);
332 fprintf (stderr, "\tinitialized = %d\n", info->initialized);
333 fprintf (stderr, "\tsaved registers =");
335 /* Print out reg_mask in a more readable format. */
336 for (regno = GPR_R0; regno <= GPR_LAST; regno++)
337 if ( (1 << regno) & info->reg_mask)
338 fprintf (stderr, " %s", reg_names[regno]);
340 putc ('\n', stderr);
341 fflush (stderr);
344 /* Print a memory address as an operand to reference that memory location. */
346 static void
347 mt_print_operand_simple_address (FILE * file, rtx addr)
349 if (!addr)
350 error ("PRINT_OPERAND_ADDRESS, null pointer");
352 else
353 switch (GET_CODE (addr))
355 case REG:
356 fprintf (file, "%s, #0", reg_names [REGNO (addr)]);
357 break;
359 case PLUS:
361 rtx reg = 0;
362 rtx offset = 0;
363 rtx arg0 = XEXP (addr, 0);
364 rtx arg1 = XEXP (addr, 1);
366 if (GET_CODE (arg0) == REG)
368 reg = arg0;
369 offset = arg1;
370 if (GET_CODE (offset) == REG)
371 fatal_insn ("PRINT_OPERAND_ADDRESS, 2 regs", addr);
374 else if (GET_CODE (arg1) == REG)
375 reg = arg1, offset = arg0;
376 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
378 fprintf (file, "%s, #", reg_names [GPR_R0]);
379 output_addr_const (file, addr);
380 break;
382 fprintf (file, "%s, #", reg_names [REGNO (reg)]);
383 output_addr_const (file, offset);
384 break;
387 case LABEL_REF:
388 case SYMBOL_REF:
389 case CONST_INT:
390 case CONST:
391 output_addr_const (file, addr);
392 break;
394 default:
395 fatal_insn ("PRINT_OPERAND_ADDRESS, invalid insn #1", addr);
396 break;
400 /* Implement PRINT_OPERAND_ADDRESS. */
401 void
402 mt_print_operand_address (FILE * file, rtx addr)
404 if (GET_CODE (addr) == AND
405 && GET_CODE (XEXP (addr, 1)) == CONST_INT
406 && INTVAL (XEXP (addr, 1)) == -3)
407 mt_print_operand_simple_address (file, XEXP (addr, 0));
408 else
409 mt_print_operand_simple_address (file, addr);
412 /* Implement PRINT_OPERAND. */
413 void
414 mt_print_operand (FILE * file, rtx x, int code)
416 switch (code)
418 case '#':
419 /* Output a nop if there's nothing for the delay slot. */
420 if (dbr_sequence_length () == 0)
421 fputs ("\n\tnop", file);
422 return;
424 case 'H':
425 fprintf(file, "#%%hi16(");
426 output_addr_const (file, x);
427 fprintf(file, ")");
428 return;
430 case 'L':
431 fprintf(file, "#%%lo16(");
432 output_addr_const (file, x);
433 fprintf(file, ")");
434 return;
436 case 'N':
437 fprintf(file, "#%ld", ~INTVAL (x));
438 return;
440 case 'z':
441 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
443 fputs (reg_names[GPR_R0], file);
444 return;
447 case 0:
448 /* Handled below. */
449 break;
451 default:
452 /* output_operand_lossage ("mt_print_operand: unknown code"); */
453 fprintf (file, "unknown code");
454 return;
457 switch (GET_CODE (x))
459 case REG:
460 fputs (reg_names [REGNO (x)], file);
461 break;
463 case CONST:
464 case CONST_INT:
465 fprintf(file, "#%ld", INTVAL (x));
466 break;
468 case MEM:
469 mt_print_operand_address(file, XEXP (x,0));
470 break;
472 case LABEL_REF:
473 case SYMBOL_REF:
474 output_addr_const (file, x);
475 break;
477 default:
478 fprintf(file, "Uknown code: %d", GET_CODE (x));
479 break;
482 return;
485 /* Implement INIT_CUMULATIVE_ARGS. */
486 void
487 mt_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype, rtx libname,
488 tree fndecl ATTRIBUTE_UNUSED, int incoming)
490 *cum = 0;
492 if (TARGET_DEBUG_ARG)
494 fprintf (stderr, "\nmt_init_cumulative_args:");
496 if (incoming)
497 fputs (" incoming", stderr);
499 if (fntype)
501 tree ret_type = TREE_TYPE (fntype);
502 fprintf (stderr, " return = %s,",
503 tree_code_name[ (int)TREE_CODE (ret_type) ]);
506 if (libname && GET_CODE (libname) == SYMBOL_REF)
507 fprintf (stderr, " libname = %s", XSTR (libname, 0));
509 if (cfun->returns_struct)
510 fprintf (stderr, " return-struct");
512 putc ('\n', stderr);
516 /* Compute the slot number to pass an argument in.
517 Returns the slot number or -1 if passing on the stack.
519 CUM is a variable of type CUMULATIVE_ARGS which gives info about
520 the preceding args and about the function being called.
521 MODE is the argument's machine mode.
522 TYPE is the data type of the argument (as a tree).
523 This is null for libcalls where that information may
524 not be available.
525 NAMED is nonzero if this argument is a named parameter
526 (otherwise it is an extra parameter matching an ellipsis).
527 INCOMING_P is zero for FUNCTION_ARG, nonzero for FUNCTION_INCOMING_ARG.
528 *PREGNO records the register number to use if scalar type. */
530 static int
531 mt_function_arg_slotno (const CUMULATIVE_ARGS * cum,
532 enum machine_mode mode,
533 tree type,
534 int named ATTRIBUTE_UNUSED,
535 int incoming_p ATTRIBUTE_UNUSED,
536 int * pregno)
538 int regbase = FIRST_ARG_REGNUM;
539 int slotno = * cum;
541 if (mode == VOIDmode || targetm.calls.must_pass_in_stack (mode, type))
542 return -1;
544 if (slotno >= MT_NUM_ARG_REGS)
545 return -1;
547 * pregno = regbase + slotno;
549 return slotno;
552 /* Implement FUNCTION_ARG. */
554 mt_function_arg (const CUMULATIVE_ARGS * cum,
555 enum machine_mode mode,
556 tree type,
557 int named,
558 int incoming_p)
560 int slotno, regno;
561 rtx reg;
563 slotno = mt_function_arg_slotno (cum, mode, type, named, incoming_p, &regno);
565 if (slotno == -1)
566 reg = NULL_RTX;
567 else
568 reg = gen_rtx_REG (mode, regno);
570 return reg;
573 /* Implement FUNCTION_ARG_ADVANCE. */
574 void
575 mt_function_arg_advance (CUMULATIVE_ARGS * cum,
576 enum machine_mode mode,
577 tree type ATTRIBUTE_UNUSED,
578 int named)
580 int slotno, regno;
582 /* We pass 0 for incoming_p here, it doesn't matter. */
583 slotno = mt_function_arg_slotno (cum, mode, type, named, 0, &regno);
585 * cum += (mode != BLKmode
586 ? ROUND_ADVANCE (GET_MODE_SIZE (mode))
587 : ROUND_ADVANCE (int_size_in_bytes (type)));
589 if (TARGET_DEBUG_ARG)
590 fprintf (stderr,
591 "mt_function_arg_advance: words = %2d, mode = %4s, named = %d, size = %3d\n",
592 *cum, GET_MODE_NAME (mode), named,
593 (*cum) * UNITS_PER_WORD);
596 /* Implement hook TARGET_ARG_PARTIAL_BYTES.
598 Returns the number of bytes at the beginning of an argument that
599 must be put in registers. The value must be zero for arguments
600 that are passed entirely in registers or that are entirely pushed
601 on the stack. */
602 static int
603 mt_arg_partial_bytes (CUMULATIVE_ARGS * pcum,
604 enum machine_mode mode,
605 tree type,
606 bool named ATTRIBUTE_UNUSED)
608 int cum = * pcum;
609 int words;
611 if (mode == BLKmode)
612 words = ((int_size_in_bytes (type) + UNITS_PER_WORD - 1)
613 / UNITS_PER_WORD);
614 else
615 words = (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
617 if (! targetm.calls.pass_by_reference (&cum, mode, type, named)
618 && cum < MT_NUM_ARG_REGS
619 && (cum + words) > MT_NUM_ARG_REGS)
621 int bytes = (MT_NUM_ARG_REGS - cum) * UNITS_PER_WORD;
623 if (TARGET_DEBUG)
624 fprintf (stderr, "function_arg_partial_nregs = %d\n", bytes);
625 return bytes;
628 return 0;
632 /* Implement TARGET_PASS_BY_REFERENCE hook. */
633 static bool
634 mt_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
635 enum machine_mode mode ATTRIBUTE_UNUSED,
636 tree type,
637 bool named ATTRIBUTE_UNUSED)
639 return (type && int_size_in_bytes (type) > 4 * UNITS_PER_WORD);
642 /* Implement FUNCTION_ARG_BOUNDARY. */
644 mt_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
645 tree type ATTRIBUTE_UNUSED)
647 return BITS_PER_WORD;
650 /* Implement REG_OK_FOR_BASE_P. */
652 mt_reg_ok_for_base_p (rtx x, int strict)
654 if (strict)
655 return (((unsigned) REGNO (x)) < FIRST_PSEUDO_REGISTER);
656 return 1;
659 /* Helper function of mt_legitimate_address_p. Return true if XINSN
660 is a simple address, otherwise false. */
661 static bool
662 mt_legitimate_simple_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
663 rtx xinsn, int strict)
665 if (TARGET_DEBUG)
667 fprintf (stderr, "\n========== GO_IF_LEGITIMATE_ADDRESS, %sstrict\n",
668 strict ? "" : "not ");
669 debug_rtx (xinsn);
672 if (GET_CODE (xinsn) == REG && mt_reg_ok_for_base_p (xinsn, strict))
673 return true;
675 if (GET_CODE (xinsn) == PLUS
676 && GET_CODE (XEXP (xinsn, 0)) == REG
677 && mt_reg_ok_for_base_p (XEXP (xinsn, 0), strict)
678 && GET_CODE (XEXP (xinsn, 1)) == CONST_INT
679 && SMALL_INT (XEXP (xinsn, 1)))
680 return true;
682 return false;
686 /* Helper function of GO_IF_LEGITIMATE_ADDRESS. Return non-zero if
687 XINSN is a legitimate address on MT. */
689 mt_legitimate_address_p (enum machine_mode mode, rtx xinsn, int strict)
691 if (mt_legitimate_simple_address_p (mode, xinsn, strict))
692 return 1;
694 if ((mode) == SImode
695 && GET_CODE (xinsn) == AND
696 && GET_CODE (XEXP (xinsn, 1)) == CONST_INT
697 && INTVAL (XEXP (xinsn, 1)) == -3)
698 return mt_legitimate_simple_address_p (mode, XEXP (xinsn, 0), strict);
699 else
700 return 0;
703 /* Return truth value of whether OP can be used as an operands where a
704 register or 16 bit unsigned integer is needed. */
707 uns_arith_operand (rtx op, enum machine_mode mode)
709 if (GET_CODE (op) == CONST_INT && SMALL_INT_UNSIGNED (op))
710 return 1;
712 return register_operand (op, mode);
715 /* Return truth value of whether OP can be used as an operands where a
716 16 bit integer is needed. */
719 arith_operand (rtx op, enum machine_mode mode)
721 if (GET_CODE (op) == CONST_INT && SMALL_INT (op))
722 return 1;
724 return register_operand (op, mode);
727 /* Return truth value of whether OP is a register or the constant 0. */
730 reg_or_0_operand (rtx op, enum machine_mode mode)
732 switch (GET_CODE (op))
734 case CONST_INT:
735 return INTVAL (op) == 0;
737 case REG:
738 case SUBREG:
739 return register_operand (op, mode);
741 default:
742 break;
745 return 0;
748 /* Return truth value of whether OP is a constant that requires two
749 loads to put in a register. */
752 big_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
754 if (GET_CODE (op) == CONST_INT && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
755 return 1;
757 return 0;
760 /* Return truth value of whether OP is a constant that require only
761 one load to put in a register. */
764 single_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
766 if (big_const_operand (op, mode)
767 || GET_CODE (op) == CONST
768 || GET_CODE (op) == LABEL_REF
769 || GET_CODE (op) == SYMBOL_REF)
770 return 0;
772 return 1;
775 /* True if the current function is an interrupt handler
776 (either via #pragma or an attribute specification). */
777 int interrupt_handler;
778 enum processor_type mt_cpu;
780 static struct machine_function *
781 mt_init_machine_status (void)
783 struct machine_function *f;
785 f = ggc_alloc_cleared (sizeof (struct machine_function));
787 return f;
790 /* Implement OVERRIDE_OPTIONS. */
791 void
792 mt_override_options (void)
794 if (mt_cpu_string != NULL)
796 if (!strcmp (mt_cpu_string, "ms1-64-001"))
797 mt_cpu = PROCESSOR_MS1_64_001;
798 else if (!strcmp (mt_cpu_string, "ms1-16-002"))
799 mt_cpu = PROCESSOR_MS1_16_002;
800 else if (!strcmp (mt_cpu_string, "ms1-16-003"))
801 mt_cpu = PROCESSOR_MS1_16_003;
802 else if (!strcmp (mt_cpu_string, "ms2"))
803 mt_cpu = PROCESSOR_MS2;
804 else
805 error ("bad value (%s) for -march= switch", mt_cpu_string);
807 else
808 mt_cpu = PROCESSOR_MS1_16_002;
810 if (flag_exceptions)
812 flag_omit_frame_pointer = 0;
813 flag_gcse = 0;
816 /* We do delayed branch filling in machine dependent reorg */
817 mt_flag_delayed_branch = flag_delayed_branch;
818 flag_delayed_branch = 0;
820 init_machine_status = mt_init_machine_status;
823 /* Do what is necessary for `va_start'. We look at the current function
824 to determine if stdarg or varargs is used and return the address of the
825 first unnamed parameter. */
827 static void
828 mt_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
829 enum machine_mode mode ATTRIBUTE_UNUSED,
830 tree type ATTRIBUTE_UNUSED,
831 int *pretend_size, int no_rtl)
833 int regno;
834 int regs = MT_NUM_ARG_REGS - *cum;
836 *pretend_size = regs < 0 ? 0 : GET_MODE_SIZE (SImode) * regs;
838 if (no_rtl)
839 return;
841 for (regno = *cum; regno < MT_NUM_ARG_REGS; regno++)
843 rtx reg = gen_rtx_REG (SImode, FIRST_ARG_REGNUM + regno);
844 rtx slot = gen_rtx_PLUS (Pmode,
845 gen_rtx_REG (SImode, ARG_POINTER_REGNUM),
846 GEN_INT (UNITS_PER_WORD * regno));
848 emit_move_insn (gen_rtx_MEM (SImode, slot), reg);
852 /* Returns the number of bytes offset between the frame pointer and the stack
853 pointer for the current function. SIZE is the number of bytes of space
854 needed for local variables. */
856 unsigned int
857 mt_compute_frame_size (int size)
859 int regno;
860 unsigned int total_size;
861 unsigned int var_size;
862 unsigned int args_size;
863 unsigned int pretend_size;
864 unsigned int extra_size;
865 unsigned int reg_size;
866 unsigned int frame_size;
867 unsigned int reg_mask;
869 var_size = size;
870 args_size = current_function_outgoing_args_size;
871 pretend_size = current_function_pretend_args_size;
872 extra_size = FIRST_PARM_OFFSET (0);
873 total_size = extra_size + pretend_size + args_size + var_size;
874 reg_size = 0;
875 reg_mask = 0;
877 /* Calculate space needed for registers. */
878 for (regno = GPR_R0; regno <= GPR_LAST; regno++)
880 if (MUST_SAVE_REGISTER (regno))
882 reg_size += UNITS_PER_WORD;
883 reg_mask |= 1 << regno;
887 current_frame_info.save_fp = (regs_ever_live [GPR_FP]
888 || frame_pointer_needed
889 || interrupt_handler);
890 current_frame_info.save_lr = (regs_ever_live [GPR_LINK]
891 || profile_flag
892 || interrupt_handler);
894 reg_size += (current_frame_info.save_fp + current_frame_info.save_lr)
895 * UNITS_PER_WORD;
896 total_size += reg_size;
897 total_size = ((total_size + 3) & ~3);
899 frame_size = total_size;
901 /* Save computed information. */
902 current_frame_info.pretend_size = pretend_size;
903 current_frame_info.var_size = var_size;
904 current_frame_info.args_size = args_size;
905 current_frame_info.reg_size = reg_size;
906 current_frame_info.frame_size = args_size + var_size;
907 current_frame_info.total_size = total_size;
908 current_frame_info.extra_size = extra_size;
909 current_frame_info.reg_mask = reg_mask;
910 current_frame_info.initialized = reload_completed;
912 return total_size;
915 /* Emit code to save REG in stack offset pointed to by MEM.
916 STACK_OFFSET is the offset from the SP where the save will happen.
917 This function sets the REG_FRAME_RELATED_EXPR note accordingly. */
918 static void
919 mt_emit_save_restore (enum save_direction direction,
920 rtx reg, rtx mem, int stack_offset)
922 if (direction == FROM_PROCESSOR_TO_MEM)
924 rtx insn;
926 insn = emit_move_insn (mem, reg);
927 RTX_FRAME_RELATED_P (insn) = 1;
928 REG_NOTES (insn)
929 = gen_rtx_EXPR_LIST
930 (REG_FRAME_RELATED_EXPR,
931 gen_rtx_SET (VOIDmode,
932 gen_rtx_MEM (SImode,
933 gen_rtx_PLUS (SImode,
934 stack_pointer_rtx,
935 GEN_INT (stack_offset))),
936 reg),
937 REG_NOTES (insn));
939 else
940 emit_move_insn (reg, mem);
944 /* Emit code to save the frame pointer in the prologue and restore
945 frame pointer in epilogue. */
947 static void
948 mt_emit_save_fp (enum save_direction direction,
949 struct mt_frame_info info)
951 rtx base_reg;
952 int reg_mask = info.reg_mask & ~(FP_MASK | LINK_MASK);
953 int offset = info.total_size;
954 int stack_offset = info.total_size;
956 /* If there is nothing to save, get out now. */
957 if (! info.save_fp && ! info.save_lr && ! reg_mask)
958 return;
960 /* If offset doesn't fit in a 15-bit signed integer,
961 uses a scratch registers to get a smaller offset. */
962 if (CONST_OK_FOR_LETTER_P(offset, 'O'))
963 base_reg = stack_pointer_rtx;
964 else
966 /* Use the scratch register R9 that holds old stack pointer. */
967 base_reg = gen_rtx_REG (SImode, GPR_R9);
968 offset = 0;
971 if (info.save_fp)
973 offset -= UNITS_PER_WORD;
974 stack_offset -= UNITS_PER_WORD;
975 mt_emit_save_restore
976 (direction, gen_rtx_REG (SImode, GPR_FP),
977 gen_rtx_MEM (SImode,
978 gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
979 stack_offset);
983 /* Emit code to save registers in the prologue and restore register
984 in epilogue. */
986 static void
987 mt_emit_save_regs (enum save_direction direction,
988 struct mt_frame_info info)
990 rtx base_reg;
991 int regno;
992 int reg_mask = info.reg_mask & ~(FP_MASK | LINK_MASK);
993 int offset = info.total_size;
994 int stack_offset = info.total_size;
996 /* If there is nothing to save, get out now. */
997 if (! info.save_fp && ! info.save_lr && ! reg_mask)
998 return;
1000 /* If offset doesn't fit in a 15-bit signed integer,
1001 uses a scratch registers to get a smaller offset. */
1002 if (CONST_OK_FOR_LETTER_P(offset, 'O'))
1003 base_reg = stack_pointer_rtx;
1004 else
1006 /* Use the scratch register R9 that holds old stack pointer. */
1007 base_reg = gen_rtx_REG (SImode, GPR_R9);
1008 offset = 0;
1011 if (info.save_fp)
1013 /* This just records the space for it, the actual move generated in
1014 mt_emit_save_fp (). */
1015 offset -= UNITS_PER_WORD;
1016 stack_offset -= UNITS_PER_WORD;
1019 if (info.save_lr)
1021 offset -= UNITS_PER_WORD;
1022 stack_offset -= UNITS_PER_WORD;
1023 mt_emit_save_restore
1024 (direction, gen_rtx_REG (SImode, GPR_LINK),
1025 gen_rtx_MEM (SImode,
1026 gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
1027 stack_offset);
1030 /* Save any needed call-saved regs. */
1031 for (regno = GPR_R0; regno <= GPR_LAST; regno++)
1033 if ((reg_mask & (1 << regno)) != 0)
1035 offset -= UNITS_PER_WORD;
1036 stack_offset -= UNITS_PER_WORD;
1037 mt_emit_save_restore
1038 (direction, gen_rtx_REG (SImode, regno),
1039 gen_rtx_MEM (SImode,
1040 gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))),
1041 stack_offset);
1046 /* Return true if FUNC is a function with the 'interrupt' attribute. */
1047 static bool
1048 mt_interrupt_function_p (tree func)
1050 tree a;
1052 if (TREE_CODE (func) != FUNCTION_DECL)
1053 return false;
1055 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
1056 return a != NULL_TREE;
1059 /* Generate prologue code. */
1060 void
1061 mt_expand_prologue (void)
1063 rtx size_rtx, insn;
1064 unsigned int frame_size;
1066 if (mt_interrupt_function_p (current_function_decl))
1068 interrupt_handler = 1;
1069 if (cfun->machine)
1070 cfun->machine->interrupt_handler = 1;
1073 mt_compute_frame_size (get_frame_size ());
1075 if (TARGET_DEBUG_STACK)
1076 mt_debug_stack (&current_frame_info);
1078 /* Compute size of stack adjustment. */
1079 frame_size = current_frame_info.total_size;
1081 /* If offset doesn't fit in a 15-bit signed integer,
1082 uses a scratch registers to get a smaller offset. */
1083 if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1084 size_rtx = GEN_INT (frame_size);
1085 else
1087 /* We do not have any scratch registers. */
1088 gcc_assert (!interrupt_handler);
1090 size_rtx = gen_rtx_REG (SImode, GPR_R9);
1091 insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
1092 insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
1093 GEN_INT (frame_size & 0x0000ffff)));
1096 /* Allocate stack for this frame. */
1097 /* Make stack adjustment and use scratch register if constant too
1098 large to fit as immediate. */
1099 if (frame_size)
1101 insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
1102 stack_pointer_rtx,
1103 size_rtx));
1104 RTX_FRAME_RELATED_P (insn) = 1;
1105 REG_NOTES (insn)
1106 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1107 gen_rtx_SET (VOIDmode,
1108 stack_pointer_rtx,
1109 gen_rtx_MINUS (SImode,
1110 stack_pointer_rtx,
1111 GEN_INT (frame_size))),
1112 REG_NOTES (insn));
1115 /* Set R9 to point to old sp if required for access to register save
1116 area. */
1117 if ( current_frame_info.reg_size != 0
1118 && !CONST_OK_FOR_LETTER_P (frame_size, 'O'))
1119 emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
1121 /* Save the frame pointer. */
1122 mt_emit_save_fp (FROM_PROCESSOR_TO_MEM, current_frame_info);
1124 /* Now put the frame pointer into the frame pointer register. */
1125 if (frame_pointer_needed)
1127 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1128 RTX_FRAME_RELATED_P (insn) = 1;
1131 /* Save the registers. */
1132 mt_emit_save_regs (FROM_PROCESSOR_TO_MEM, current_frame_info);
1134 /* If we are profiling, make sure no instructions are scheduled before
1135 the call to mcount. */
1136 if (profile_flag)
1137 emit_insn (gen_blockage ());
1140 /* Implement EPILOGUE_USES. */
1142 mt_epilogue_uses (int regno)
1144 if (cfun->machine && cfun->machine->interrupt_handler && reload_completed)
1145 return 1;
1146 return regno == GPR_LINK;
1149 /* Generate epilogue. EH_MODE is NORMAL_EPILOGUE when generating a
1150 function epilogue, or EH_EPILOGUE when generating an EH
1151 epilogue. */
1152 void
1153 mt_expand_epilogue (enum epilogue_type eh_mode)
1155 rtx size_rtx, insn;
1156 unsigned frame_size;
1158 mt_compute_frame_size (get_frame_size ());
1160 if (TARGET_DEBUG_STACK)
1161 mt_debug_stack (& current_frame_info);
1163 /* Compute size of stack adjustment. */
1164 frame_size = current_frame_info.total_size;
1166 /* If offset doesn't fit in a 15-bit signed integer,
1167 uses a scratch registers to get a smaller offset. */
1168 if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1169 size_rtx = GEN_INT (frame_size);
1170 else
1172 /* We do not have any scratch registers. */
1173 gcc_assert (!interrupt_handler);
1175 size_rtx = gen_rtx_REG (SImode, GPR_R9);
1176 insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000));
1177 insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx,
1178 GEN_INT (frame_size & 0x0000ffff)));
1179 /* Set R9 to point to old sp if required for access to register
1180 save area. */
1181 emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx));
1184 /* Restore sp if there was some possible change to it. */
1185 if (frame_pointer_needed)
1186 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1188 /* Restore the registers. */
1189 mt_emit_save_fp (FROM_MEM_TO_PROCESSOR, current_frame_info);
1190 mt_emit_save_regs (FROM_MEM_TO_PROCESSOR, current_frame_info);
1192 /* Make stack adjustment and use scratch register if constant too
1193 large to fit as immediate. */
1194 if (frame_size)
1196 if (CONST_OK_FOR_LETTER_P(frame_size, 'O'))
1197 /* Can handle this with simple add. */
1198 insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
1199 stack_pointer_rtx,
1200 size_rtx));
1201 else
1202 /* Scratch reg R9 has the old sp value. */
1203 insn = emit_move_insn (stack_pointer_rtx,
1204 gen_rtx_REG (SImode, GPR_R9));
1206 REG_NOTES (insn)
1207 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1208 gen_rtx_SET (VOIDmode,
1209 stack_pointer_rtx,
1210 gen_rtx_PLUS (SImode,
1211 stack_pointer_rtx,
1212 GEN_INT (frame_size))),
1213 REG_NOTES (insn));
1216 if (cfun->machine && cfun->machine->eh_stack_adjust != NULL_RTX)
1217 /* Perform the additional bump for __throw. */
1218 emit_insn (gen_addsi3 (stack_pointer_rtx,
1219 stack_pointer_rtx,
1220 cfun->machine->eh_stack_adjust));
1222 /* Generate the appropriate return. */
1223 if (eh_mode == EH_EPILOGUE)
1225 emit_jump_insn (gen_eh_return_internal ());
1226 emit_barrier ();
1228 else if (interrupt_handler)
1229 emit_jump_insn (gen_return_interrupt_internal ());
1230 else
1231 emit_jump_insn (gen_return_internal ());
1233 /* Reset state info for each function. */
1234 interrupt_handler = 0;
1235 current_frame_info = zero_frame_info;
1236 if (cfun->machine)
1237 cfun->machine->eh_stack_adjust = NULL_RTX;
1241 /* Generate code for the "eh_return" pattern. */
1242 void
1243 mt_expand_eh_return (rtx * operands)
1245 if (GET_CODE (operands[0]) != REG
1246 || REGNO (operands[0]) != EH_RETURN_STACKADJ_REGNO)
1248 rtx sp = EH_RETURN_STACKADJ_RTX;
1250 emit_move_insn (sp, operands[0]);
1251 operands[0] = sp;
1254 emit_insn (gen_eh_epilogue (operands[0]));
1257 /* Generate code for the "eh_epilogue" pattern. */
1258 void
1259 mt_emit_eh_epilogue (rtx * operands ATTRIBUTE_UNUSED)
1261 cfun->machine->eh_stack_adjust = EH_RETURN_STACKADJ_RTX; /* operands[0]; */
1262 mt_expand_epilogue (EH_EPILOGUE);
1265 /* Handle an "interrupt" attribute. */
1266 static tree
1267 mt_handle_interrupt_attribute (tree * node,
1268 tree name,
1269 tree args ATTRIBUTE_UNUSED,
1270 int flags ATTRIBUTE_UNUSED,
1271 bool * no_add_attrs)
1273 if (TREE_CODE (*node) != FUNCTION_DECL)
1275 warning (OPT_Wattributes,
1276 "%qs attribute only applies to functions",
1277 IDENTIFIER_POINTER (name));
1278 *no_add_attrs = true;
1281 return NULL_TREE;
1284 /* Table of machine attributes. */
1285 const struct attribute_spec mt_attribute_table[] =
1287 /* name, min, max, decl?, type?, func?, handler */
1288 { "interrupt", 0, 0, false, false, false, mt_handle_interrupt_attribute },
1289 { NULL, 0, 0, false, false, false, NULL }
1292 /* Implement INITIAL_ELIMINATION_OFFSET. */
1294 mt_initial_elimination_offset (int from, int to)
1296 mt_compute_frame_size (get_frame_size ());
1298 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1299 return 0;
1301 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1302 return current_frame_info.total_size;
1304 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1305 return current_frame_info.total_size;
1307 else
1308 gcc_unreachable ();
1311 /* Generate a compare for CODE. Return a brand-new rtx that
1312 represents the result of the compare. */
1314 static rtx
1315 mt_generate_compare (enum rtx_code code, rtx op0, rtx op1)
1317 rtx scratch0, scratch1, const_scratch;
1319 switch (code)
1321 case GTU:
1322 case LTU:
1323 case GEU:
1324 case LEU:
1325 /* Need to adjust ranges for faking unsigned compares. */
1326 scratch0 = gen_reg_rtx (SImode);
1327 scratch1 = gen_reg_rtx (SImode);
1328 const_scratch = force_reg (SImode, GEN_INT(MT_MIN_INT));
1329 emit_insn (gen_addsi3 (scratch0, const_scratch, op0));
1330 emit_insn (gen_addsi3 (scratch1, const_scratch, op1));
1331 break;
1332 default:
1333 scratch0 = op0;
1334 scratch1 = op1;
1335 break;
1338 /* Adjust compare operator to fake unsigned compares. */
1339 switch (code)
1341 case GTU:
1342 code = GT; break;
1343 case LTU:
1344 code = LT; break;
1345 case GEU:
1346 code = GE; break;
1347 case LEU:
1348 code = LE; break;
1349 default:
1350 /* do nothing */
1351 break;
1354 /* Generate the actual compare. */
1355 return gen_rtx_fmt_ee (code, VOIDmode, scratch0, scratch1);
1358 /* Emit a branch of kind CODE to location LOC. */
1360 void
1361 mt_emit_cbranch (enum rtx_code code, rtx loc, rtx op0, rtx op1)
1363 rtx condition_rtx, loc_ref;
1365 if (! reg_or_0_operand (op0, SImode))
1366 op0 = copy_to_mode_reg (SImode, op0);
1368 if (! reg_or_0_operand (op1, SImode))
1369 op1 = copy_to_mode_reg (SImode, op1);
1371 condition_rtx = mt_generate_compare (code, op0, op1);
1372 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
1373 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1374 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
1375 loc_ref, pc_rtx)));
1378 /* Subfunction of the following function. Update the flags of any MEM
1379 found in part of X. */
1381 static void
1382 mt_set_memflags_1 (rtx x, int in_struct_p, int volatile_p)
1384 int i;
1386 switch (GET_CODE (x))
1388 case SEQUENCE:
1389 case PARALLEL:
1390 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1391 mt_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p);
1392 break;
1394 case INSN:
1395 mt_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p);
1396 break;
1398 case SET:
1399 mt_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p);
1400 mt_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p);
1401 break;
1403 case MEM:
1404 MEM_IN_STRUCT_P (x) = in_struct_p;
1405 MEM_VOLATILE_P (x) = volatile_p;
1406 /* Sadly, we cannot use alias sets because the extra aliasing
1407 produced by the AND interferes. Given that two-byte quantities
1408 are the only thing we would be able to differentiate anyway,
1409 there does not seem to be any point in convoluting the early
1410 out of the alias check. */
1411 /* set_mem_alias_set (x, alias_set); */
1412 break;
1414 default:
1415 break;
1419 /* Look for any MEMs in the current sequence of insns and set the
1420 in-struct, unchanging, and volatile flags from the flags in REF.
1421 If REF is not a MEM, don't do anything. */
1423 void
1424 mt_set_memflags (rtx ref)
1426 rtx insn;
1427 int in_struct_p, volatile_p;
1429 if (GET_CODE (ref) != MEM)
1430 return;
1432 in_struct_p = MEM_IN_STRUCT_P (ref);
1433 volatile_p = MEM_VOLATILE_P (ref);
1435 /* This is only called from mt.md, after having had something
1436 generated from one of the insn patterns. So if everything is
1437 zero, the pattern is already up-to-date. */
1438 if (! in_struct_p && ! volatile_p)
1439 return;
1441 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1442 mt_set_memflags_1 (insn, in_struct_p, volatile_p);
1445 /* Implement SECONDARY_RELOAD_CLASS. */
1446 enum reg_class
1447 mt_secondary_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
1448 enum machine_mode mode,
1449 rtx x)
1451 if ((mode == QImode && (!TARGET_BYTE_ACCESS)) || mode == HImode)
1453 if (GET_CODE (x) == MEM
1454 || (GET_CODE (x) == REG && true_regnum (x) == -1)
1455 || (GET_CODE (x) == SUBREG
1456 && (GET_CODE (SUBREG_REG (x)) == MEM
1457 || (GET_CODE (SUBREG_REG (x)) == REG
1458 && true_regnum (SUBREG_REG (x)) == -1))))
1459 return GENERAL_REGS;
1462 return NO_REGS;
1465 /* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE
1466 macros. */
1468 mt_function_value (tree valtype, enum machine_mode mode, tree func_decl ATTRIBUTE_UNUSED)
1470 if ((mode) == DImode || (mode) == DFmode)
1471 return gen_rtx_MEM (mode, gen_rtx_REG (mode, RETURN_VALUE_REGNUM));
1473 if (valtype)
1474 mode = TYPE_MODE (valtype);
1476 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1479 /* Split a move into two smaller pieces.
1480 MODE indicates the reduced mode. OPERANDS[0] is the original destination
1481 OPERANDS[1] is the original src. The new destinations are
1482 OPERANDS[2] and OPERANDS[4], while the new sources are OPERANDS[3]
1483 and OPERANDS[5]. */
1485 void
1486 mt_split_words (enum machine_mode nmode,
1487 enum machine_mode omode,
1488 rtx *operands)
1490 rtx dl,dh; /* src/dest pieces. */
1491 rtx sl,sh;
1492 int move_high_first = 0; /* Assume no overlap. */
1494 switch (GET_CODE (operands[0])) /* Dest. */
1496 case SUBREG:
1497 case REG:
1498 if ((GET_CODE (operands[1]) == REG
1499 || GET_CODE (operands[1]) == SUBREG)
1500 && true_regnum (operands[0]) <= true_regnum (operands[1]))
1501 move_high_first = 1;
1503 if (GET_CODE (operands[0]) == SUBREG)
1505 dl = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]),
1506 SUBREG_BYTE (operands[0]) + GET_MODE_SIZE (nmode));
1507 dh = gen_rtx_SUBREG (nmode, SUBREG_REG (operands[0]), SUBREG_BYTE (operands[0]));
1509 else if (GET_CODE (operands[0]) == REG && ! IS_PSEUDO_P (operands[0]))
1511 int r = REGNO (operands[0]);
1512 dh = gen_rtx_REG (nmode, r);
1513 dl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
1515 else
1517 dh = gen_rtx_SUBREG (nmode, operands[0], 0);
1518 dl = gen_rtx_SUBREG (nmode, operands[0], GET_MODE_SIZE (nmode));
1520 break;
1522 case MEM:
1523 switch (GET_CODE (XEXP (operands[0], 0)))
1525 case POST_INC:
1526 case POST_DEC:
1527 gcc_unreachable ();
1528 default:
1529 dl = operand_subword (operands[0],
1530 GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
1531 0, omode);
1532 dh = operand_subword (operands[0], 0, 0, omode);
1534 break;
1535 default:
1536 gcc_unreachable ();
1539 switch (GET_CODE (operands[1]))
1541 case REG:
1542 if (! IS_PSEUDO_P (operands[1]))
1544 int r = REGNO (operands[1]);
1546 sh = gen_rtx_REG (nmode, r);
1547 sl = gen_rtx_REG (nmode, r + HARD_REGNO_NREGS (r, nmode));
1549 else
1551 sh = gen_rtx_SUBREG (nmode, operands[1], 0);
1552 sl = gen_rtx_SUBREG (nmode, operands[1], GET_MODE_SIZE (nmode));
1554 break;
1556 case CONST_DOUBLE:
1557 if (operands[1] == const0_rtx)
1558 sh = sl = const0_rtx;
1559 else
1560 split_double (operands[1], & sh, & sl);
1561 break;
1563 case CONST_INT:
1564 if (operands[1] == const0_rtx)
1565 sh = sl = const0_rtx;
1566 else
1568 int vl, vh;
1570 switch (nmode)
1572 default:
1573 gcc_unreachable ();
1576 sl = GEN_INT (vl);
1577 sh = GEN_INT (vh);
1579 break;
1581 case SUBREG:
1582 sl = gen_rtx_SUBREG (nmode,
1583 SUBREG_REG (operands[1]),
1584 SUBREG_BYTE (operands[1]) + GET_MODE_SIZE (nmode));
1585 sh = gen_rtx_SUBREG (nmode,
1586 SUBREG_REG (operands[1]),
1587 SUBREG_BYTE (operands[1]));
1588 break;
1590 case MEM:
1591 switch (GET_CODE (XEXP (operands[1], 0)))
1593 case POST_DEC:
1594 case POST_INC:
1595 gcc_unreachable ();
1596 break;
1597 default:
1598 sl = operand_subword (operands[1],
1599 GET_MODE_SIZE (nmode)/UNITS_PER_WORD,
1600 0, omode);
1601 sh = operand_subword (operands[1], 0, 0, omode);
1603 /* Check if the DF load is going to clobber the register
1604 used for the address, and if so make sure that is going
1605 to be the second move. */
1606 if (GET_CODE (dl) == REG
1607 && true_regnum (dl)
1608 == true_regnum (XEXP (XEXP (sl, 0 ), 0)))
1609 move_high_first = 1;
1611 break;
1612 default:
1613 gcc_unreachable ();
1616 if (move_high_first)
1618 operands[2] = dh;
1619 operands[3] = sh;
1620 operands[4] = dl;
1621 operands[5] = sl;
1623 else
1625 operands[2] = dl;
1626 operands[3] = sl;
1627 operands[4] = dh;
1628 operands[5] = sh;
1630 return;
1633 /* Implement TARGET_MUST_PASS_IN_STACK hook. */
1634 static bool
1635 mt_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
1637 return (((type) != 0
1638 && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1639 || TREE_ADDRESSABLE (type))));
1642 /* Increment the counter for the number of loop instructions in the
1643 current function. */
1645 void mt_add_loop (void)
1647 cfun->machine->has_loops++;
1651 /* Maxium loop nesting depth. */
1652 #define MAX_LOOP_DEPTH 4
1653 /* Maxium size of a loop (allows some headroom for delayed branch slot
1654 filling. */
1655 #define MAX_LOOP_LENGTH (200 * 4)
1657 /* We need to keep a vector of loops */
1658 typedef struct loop_info *loop_info;
1659 DEF_VEC_P (loop_info);
1660 DEF_VEC_ALLOC_P (loop_info,heap);
1662 /* Information about a loop we have found (or are in the process of
1663 finding). */
1664 struct loop_info GTY (())
1666 /* loop number, for dumps */
1667 int loop_no;
1669 /* Predecessor block of the loop. This is the one that falls into
1670 the loop and contains the initialization instruction. */
1671 basic_block predecessor;
1673 /* First block in the loop. This is the one branched to by the dbnz
1674 insn. */
1675 basic_block head;
1677 /* Last block in the loop (the one with the dbnz insn */
1678 basic_block tail;
1680 /* The successor block of the loop. This is the one the dbnz insn
1681 falls into. */
1682 basic_block successor;
1684 /* The dbnz insn. */
1685 rtx dbnz;
1687 /* The initialization insn. */
1688 rtx init;
1690 /* The new initialization instruction. */
1691 rtx loop_init;
1693 /* The new ending instruction. */
1694 rtx loop_end;
1696 /* The new label placed at the end of the loop. */
1697 rtx end_label;
1699 /* The nesting depth of the loop. Set to -1 for a bad loop. */
1700 int depth;
1702 /* The length of the loop. */
1703 int length;
1705 /* Next loop in the graph. */
1706 struct loop_info *next;
1708 /* Vector of blocks only within the loop, (excluding those within
1709 inner loops). */
1710 VEC (basic_block,heap) *blocks;
1712 /* Vector of inner loops within this loop */
1713 VEC (loop_info,heap) *loops;
1716 /* Information used during loop detection. */
1717 typedef struct loop_work GTY(())
1719 /* Basic block to be scanned. */
1720 basic_block block;
1722 /* Loop it will be within. */
1723 loop_info loop;
1724 } loop_work;
1726 /* Work list. */
1727 DEF_VEC_O (loop_work);
1728 DEF_VEC_ALLOC_O (loop_work,heap);
1730 /* Determine the nesting and length of LOOP. Return false if the loop
1731 is bad. */
1733 static bool
1734 mt_loop_nesting (loop_info loop)
1736 loop_info inner;
1737 unsigned ix;
1738 int inner_depth = 0;
1740 if (!loop->depth)
1742 /* Make sure we only have one entry point. */
1743 if (EDGE_COUNT (loop->head->preds) == 2)
1745 loop->predecessor = EDGE_PRED (loop->head, 0)->src;
1746 if (loop->predecessor == loop->tail)
1747 /* We wanted the other predecessor. */
1748 loop->predecessor = EDGE_PRED (loop->head, 1)->src;
1750 /* We can only place a loop insn on a fall through edge of a
1751 single exit block. */
1752 if (EDGE_COUNT (loop->predecessor->succs) != 1
1753 || !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU))
1754 loop->predecessor = NULL;
1757 /* Mark this loop as bad for now. */
1758 loop->depth = -1;
1759 if (loop->predecessor)
1761 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix++, inner);)
1763 if (!inner->depth)
1764 mt_loop_nesting (inner);
1766 if (inner->depth < 0)
1768 inner_depth = -1;
1769 break;
1772 if (inner_depth < inner->depth)
1773 inner_depth = inner->depth;
1774 loop->length += inner->length;
1777 /* Set the proper loop depth, if it was good. */
1778 if (inner_depth >= 0)
1779 loop->depth = inner_depth + 1;
1782 return (loop->depth > 0
1783 && loop->predecessor
1784 && loop->depth < MAX_LOOP_DEPTH
1785 && loop->length < MAX_LOOP_LENGTH);
1788 /* Determine the length of block BB. */
1790 static int
1791 mt_block_length (basic_block bb)
1793 int length = 0;
1794 rtx insn;
1796 for (insn = BB_HEAD (bb);
1797 insn != NEXT_INSN (BB_END (bb));
1798 insn = NEXT_INSN (insn))
1800 if (!INSN_P (insn))
1801 continue;
1802 if (CALL_P (insn))
1804 /* Calls are not allowed in loops. */
1805 length = MAX_LOOP_LENGTH + 1;
1806 break;
1809 length += get_attr_length (insn);
1811 return length;
1814 /* Scan the blocks of LOOP (and its inferiors) looking for uses of
1815 REG. Return true, if we find any. Don't count the loop's dbnz
1816 insn if it matches DBNZ. */
1818 static bool
1819 mt_scan_loop (loop_info loop, rtx reg, rtx dbnz)
1821 unsigned ix;
1822 loop_info inner;
1823 basic_block bb;
1825 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
1827 rtx insn;
1829 for (insn = BB_HEAD (bb);
1830 insn != NEXT_INSN (BB_END (bb));
1831 insn = NEXT_INSN (insn))
1833 if (!INSN_P (insn))
1834 continue;
1835 if (insn == dbnz)
1836 continue;
1837 if (reg_mentioned_p (reg, PATTERN (insn)))
1838 return true;
1841 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
1842 if (mt_scan_loop (inner, reg, NULL_RTX))
1843 return true;
1845 return false;
1848 /* MS2 has a loop instruction which needs to be placed just before the
1849 loop. It indicates the end of the loop and specifies the number of
1850 loop iterations. It can be nested with an automatically maintained
1851 stack of counter and end address registers. It's an ideal
1852 candidate for doloop. Unfortunately, gcc presumes that loops
1853 always end with an explicit instriction, and the doloop_begin
1854 instruction is not a flow control instruction so it can be
1855 scheduled earlier than just before the start of the loop. To make
1856 matters worse, the optimization pipeline can duplicate loop exit
1857 and entrance blocks and fails to track abnormally exiting loops.
1858 Thus we cannot simply use doloop.
1860 What we do is emit a dbnz pattern for the doloop optimization, and
1861 let that be optimized as normal. Then in machine dependent reorg
1862 we have to repeat the loop searching algorithm. We use the
1863 flow graph to find closed loops ending in a dbnz insn. We then try
1864 and convert it to use the loop instruction. The conditions are,
1866 * the loop has no abnormal exits, duplicated end conditions or
1867 duplicated entrance blocks
1869 * the loop counter register is only used in the dbnz instruction
1870 within the loop
1872 * we can find the instruction setting the initial value of the loop
1873 counter
1875 * the loop is not executed more than 65535 times. (This might be
1876 changed to 2^32-1, and would therefore allow variable initializers.)
1878 * the loop is not nested more than 4 deep 5) there are no
1879 subroutine calls in the loop. */
1881 static void
1882 mt_reorg_loops (FILE *dump_file)
1884 basic_block bb;
1885 loop_info loops = NULL;
1886 loop_info loop;
1887 int nloops = 0;
1888 unsigned dwork = 0;
1889 VEC (loop_work,heap) *works = VEC_alloc (loop_work,heap,20);
1890 loop_work *work;
1891 edge e;
1892 edge_iterator ei;
1893 bool replaced = false;
1895 /* Find all the possible loop tails. This means searching for every
1896 dbnz instruction. For each one found, create a loop_info
1897 structure and add the head block to the work list. */
1898 FOR_EACH_BB (bb)
1900 rtx tail = BB_END (bb);
1902 while (GET_CODE (tail) == NOTE)
1903 tail = PREV_INSN (tail);
1905 bb->aux = NULL;
1906 if (recog_memoized (tail) == CODE_FOR_decrement_and_branch_until_zero)
1908 /* A possible loop end */
1910 loop = XNEW (struct loop_info);
1911 loop->next = loops;
1912 loops = loop;
1913 loop->tail = bb;
1914 loop->head = BRANCH_EDGE (bb)->dest;
1915 loop->successor = FALLTHRU_EDGE (bb)->dest;
1916 loop->predecessor = NULL;
1917 loop->dbnz = tail;
1918 loop->depth = 0;
1919 loop->length = mt_block_length (bb);
1920 loop->blocks = VEC_alloc (basic_block, heap, 20);
1921 VEC_quick_push (basic_block, loop->blocks, bb);
1922 loop->loops = NULL;
1923 loop->loop_no = nloops++;
1925 loop->init = loop->end_label = NULL_RTX;
1926 loop->loop_init = loop->loop_end = NULL_RTX;
1928 work = VEC_safe_push (loop_work, heap, works, NULL);
1929 work->block = loop->head;
1930 work->loop = loop;
1932 bb->aux = loop;
1934 if (dump_file)
1936 fprintf (dump_file, ";; potential loop %d ending at\n",
1937 loop->loop_no);
1938 print_rtl_single (dump_file, tail);
1943 /* Now find all the closed loops.
1944 until work list empty,
1945 if block's auxptr is set
1946 if != loop slot
1947 if block's loop's start != block
1948 mark loop as bad
1949 else
1950 append block's loop's fallthrough block to worklist
1951 increment this loop's depth
1952 else if block is exit block
1953 mark loop as bad
1954 else
1955 set auxptr
1956 for each target of block
1957 add to worklist */
1958 while (VEC_iterate (loop_work, works, dwork++, work))
1960 loop = work->loop;
1961 bb = work->block;
1962 if (bb == EXIT_BLOCK_PTR)
1963 /* We've reached the exit block. The loop must be bad. */
1964 loop->depth = -1;
1965 else if (!bb->aux)
1967 /* We've not seen this block before. Add it to the loop's
1968 list and then add each successor to the work list. */
1969 bb->aux = loop;
1970 loop->length += mt_block_length (bb);
1971 VEC_safe_push (basic_block, heap, loop->blocks, bb);
1972 FOR_EACH_EDGE (e, ei, bb->succs)
1974 if (!VEC_space (loop_work, works, 1))
1976 if (dwork)
1978 VEC_block_remove (loop_work, works, 0, dwork);
1979 dwork = 0;
1981 else
1982 VEC_reserve (loop_work, heap, works, 1);
1984 work = VEC_quick_push (loop_work, works, NULL);
1985 work->block = EDGE_SUCC (bb, ei.index)->dest;
1986 work->loop = loop;
1989 else if (bb->aux != loop)
1991 /* We've seen this block in a different loop. If it's not
1992 the other loop's head, then this loop must be bad.
1993 Otherwise, the other loop might be a nested loop, so
1994 continue from that loop's successor. */
1995 loop_info other = bb->aux;
1997 if (other->head != bb)
1998 loop->depth = -1;
1999 else
2001 VEC_safe_push (loop_info, heap, loop->loops, other);
2002 work = VEC_safe_push (loop_work, heap, works, NULL);
2003 work->loop = loop;
2004 work->block = other->successor;
2008 VEC_free (loop_work, heap, works);
2010 /* Now optimize the loops. */
2011 for (loop = loops; loop; loop = loop->next)
2013 rtx iter_reg, insn, init_insn;
2014 rtx init_val, loop_end, loop_init, end_label, head_label;
2016 if (!mt_loop_nesting (loop))
2018 if (dump_file)
2019 fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
2020 continue;
2023 /* Get the loop iteration register. */
2024 iter_reg = SET_DEST (XVECEXP (PATTERN (loop->dbnz), 0, 1));
2026 if (!REG_P (iter_reg))
2028 /* Spilled */
2029 if (dump_file)
2030 fprintf (dump_file, ";; loop %d has spilled iteration count\n",
2031 loop->loop_no);
2032 continue;
2035 /* Look for the initializing insn */
2036 init_insn = NULL_RTX;
2037 for (insn = BB_END (loop->predecessor);
2038 insn != PREV_INSN (BB_HEAD (loop->predecessor));
2039 insn = PREV_INSN (insn))
2041 if (!INSN_P (insn))
2042 continue;
2043 if (reg_mentioned_p (iter_reg, PATTERN (insn)))
2045 rtx set = single_set (insn);
2047 if (set && rtx_equal_p (iter_reg, SET_DEST (set)))
2048 init_insn = insn;
2049 break;
2053 if (!init_insn)
2055 if (dump_file)
2056 fprintf (dump_file, ";; loop %d has no initializer\n",
2057 loop->loop_no);
2058 continue;
2060 if (dump_file)
2062 fprintf (dump_file, ";; loop %d initialized by\n",
2063 loop->loop_no);
2064 print_rtl_single (dump_file, init_insn);
2067 init_val = PATTERN (init_insn);
2068 if (GET_CODE (init_val) == SET)
2069 init_val = SET_SRC (init_val);
2070 if (GET_CODE (init_val) != CONST_INT || INTVAL (init_val) >= 65535)
2072 if (dump_file)
2073 fprintf (dump_file, ";; loop %d has complex initializer\n",
2074 loop->loop_no);
2075 continue;
2078 /* Scan all the blocks to make sure they don't use iter_reg. */
2079 if (mt_scan_loop (loop, iter_reg, loop->dbnz))
2081 if (dump_file)
2082 fprintf (dump_file, ";; loop %d uses iterator\n",
2083 loop->loop_no);
2084 continue;
2087 /* The loop is good for replacement. */
2089 /* loop is 1 based, dbnz is zero based. */
2090 init_val = GEN_INT (INTVAL (init_val) + 1);
2092 iter_reg = gen_rtx_REG (SImode, LOOP_FIRST + loop->depth - 1);
2093 end_label = gen_label_rtx ();
2094 head_label = XEXP (SET_SRC (XVECEXP (PATTERN (loop->dbnz), 0, 0)), 1);
2095 loop_end = gen_loop_end (iter_reg, head_label);
2096 loop_init = gen_loop_init (iter_reg, init_val, end_label);
2097 loop->init = init_insn;
2098 loop->end_label = end_label;
2099 loop->loop_init = loop_init;
2100 loop->loop_end = loop_end;
2101 replaced = true;
2103 if (dump_file)
2105 fprintf (dump_file, ";; replacing loop %d initializer with\n",
2106 loop->loop_no);
2107 print_rtl_single (dump_file, loop->loop_init);
2108 fprintf (dump_file, ";; replacing loop %d terminator with\n",
2109 loop->loop_no);
2110 print_rtl_single (dump_file, loop->loop_end);
2114 /* Now apply the optimizations. Do it this way so we don't mess up
2115 the flow graph half way through. */
2116 for (loop = loops; loop; loop = loop->next)
2117 if (loop->loop_init)
2119 emit_jump_insn_after (loop->loop_init, BB_END (loop->predecessor));
2120 delete_insn (loop->init);
2121 emit_label_before (loop->end_label, loop->dbnz);
2122 emit_jump_insn_before (loop->loop_end, loop->dbnz);
2123 delete_insn (loop->dbnz);
2126 /* Free up the loop structures */
2127 while (loops)
2129 loop = loops;
2130 loops = loop->next;
2131 VEC_free (loop_info, heap, loop->loops);
2132 VEC_free (basic_block, heap, loop->blocks);
2133 XDELETE (loop);
2136 if (replaced && dump_file)
2138 fprintf (dump_file, ";; Replaced loops\n");
2139 print_rtl (dump_file, get_insns ());
2143 /* Structures to hold branch information during reorg. */
2144 typedef struct branch_info
2146 rtx insn; /* The branch insn. */
2148 struct branch_info *next;
2149 } branch_info;
2151 typedef struct label_info
2153 rtx label; /* The label. */
2154 branch_info *branches; /* branches to this label. */
2155 struct label_info *next;
2156 } label_info;
2158 /* Chain of labels found in current function, used during reorg. */
2159 static label_info *mt_labels;
2161 /* If *X is a label, add INSN to the list of branches for that
2162 label. */
2164 static int
2165 mt_add_branches (rtx *x, void *insn)
2167 if (GET_CODE (*x) == LABEL_REF)
2169 branch_info *branch = xmalloc (sizeof (*branch));
2170 rtx label = XEXP (*x, 0);
2171 label_info *info;
2173 for (info = mt_labels; info; info = info->next)
2174 if (info->label == label)
2175 break;
2177 if (!info)
2179 info = xmalloc (sizeof (*info));
2180 info->next = mt_labels;
2181 mt_labels = info;
2183 info->label = label;
2184 info->branches = NULL;
2187 branch->next = info->branches;
2188 info->branches = branch;
2189 branch->insn = insn;
2191 return 0;
2194 /* If BRANCH has a filled delay slot, check if INSN is dependent upon
2195 it. If so, undo the delay slot fill. Returns the next insn, if
2196 we patch out the branch. Returns the branch insn, if we cannot
2197 patch out the branch (due to anti-dependency in the delay slot).
2198 In that case, the caller must insert nops at the branch target. */
2200 static rtx
2201 mt_check_delay_slot (rtx branch, rtx insn)
2203 rtx slot;
2204 rtx tmp;
2205 rtx p;
2206 rtx jmp;
2208 gcc_assert (GET_CODE (PATTERN (branch)) == SEQUENCE);
2209 if (INSN_DELETED_P (branch))
2210 return NULL_RTX;
2211 slot = XVECEXP (PATTERN (branch), 0, 1);
2213 tmp = PATTERN (insn);
2214 note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
2215 if (tmp)
2216 /* Not dependent. */
2217 return NULL_RTX;
2219 /* Undo the delay slot. */
2220 jmp = XVECEXP (PATTERN (branch), 0, 0);
2222 tmp = PATTERN (jmp);
2223 note_stores (PATTERN (slot), insn_dependent_p_1, &tmp);
2224 if (!tmp)
2225 /* Anti dependent. */
2226 return branch;
2228 p = PREV_INSN (branch);
2229 NEXT_INSN (p) = slot;
2230 PREV_INSN (slot) = p;
2231 NEXT_INSN (slot) = jmp;
2232 PREV_INSN (jmp) = slot;
2233 NEXT_INSN (jmp) = branch;
2234 PREV_INSN (branch) = jmp;
2235 XVECEXP (PATTERN (branch), 0, 0) = NULL_RTX;
2236 XVECEXP (PATTERN (branch), 0, 1) = NULL_RTX;
2237 delete_insn (branch);
2238 return jmp;
2241 /* Insert nops to satisfy pipeline constraints. We only deal with ms2
2242 constraints here. Earlier CPUs are dealt with by inserting nops with
2243 final_prescan (but that can lead to inferior code, and is
2244 impractical with ms2's JAL hazard).
2246 ms2 dynamic constraints
2247 1) a load and a following use must be separated by one insn
2248 2) an insn and a following dependent call must be separated by two insns
2250 only arith insns are placed in delay slots so #1 cannot happen with
2251 a load in a delay slot. #2 can happen with an arith insn in the
2252 delay slot. */
2254 static void
2255 mt_reorg_hazard (void)
2257 rtx insn, next;
2259 /* Find all the branches */
2260 for (insn = get_insns ();
2261 insn;
2262 insn = NEXT_INSN (insn))
2264 rtx jmp;
2266 if (!INSN_P (insn))
2267 continue;
2269 jmp = PATTERN (insn);
2271 if (GET_CODE (jmp) != SEQUENCE)
2272 /* If it's not got a filled delay slot, then it can't
2273 conflict. */
2274 continue;
2276 jmp = XVECEXP (jmp, 0, 0);
2278 if (recog_memoized (jmp) == CODE_FOR_tablejump)
2279 for (jmp = XEXP (XEXP (XVECEXP (PATTERN (jmp), 0, 1), 0), 0);
2280 !JUMP_TABLE_DATA_P (jmp);
2281 jmp = NEXT_INSN (jmp))
2282 continue;
2284 for_each_rtx (&PATTERN (jmp), mt_add_branches, insn);
2287 /* Now scan for dependencies. */
2288 for (insn = get_insns ();
2289 insn && !INSN_P (insn);
2290 insn = NEXT_INSN (insn))
2291 continue;
2293 for (;
2294 insn;
2295 insn = next)
2297 rtx jmp, tmp;
2298 enum attr_type attr;
2300 gcc_assert (INSN_P (insn) && !INSN_DELETED_P (insn));
2301 for (next = NEXT_INSN (insn);
2302 next;
2303 next = NEXT_INSN (next))
2305 if (!INSN_P (next))
2306 continue;
2307 if (GET_CODE (PATTERN (next)) != USE)
2308 break;
2311 jmp = insn;
2312 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2313 jmp = XVECEXP (PATTERN (insn), 0, 0);
2315 attr = recog_memoized (jmp) >= 0 ? get_attr_type (jmp) : TYPE_UNKNOWN;
2317 if (next && attr == TYPE_LOAD)
2319 /* A load. See if NEXT is dependent, and if so insert a
2320 nop. */
2322 tmp = PATTERN (next);
2323 if (GET_CODE (tmp) == SEQUENCE)
2324 tmp = PATTERN (XVECEXP (tmp, 0, 0));
2325 note_stores (PATTERN (insn), insn_dependent_p_1, &tmp);
2326 if (!tmp)
2327 emit_insn_after (gen_nop (), insn);
2330 if (attr == TYPE_CALL)
2332 /* A call. Make sure we're not dependent on either of the
2333 previous two dynamic instructions. */
2334 int nops = 0;
2335 int count;
2336 rtx prev = insn;
2337 rtx rescan = NULL_RTX;
2339 for (count = 2; count && !nops;)
2341 int type;
2343 prev = PREV_INSN (prev);
2344 if (!prev)
2346 /* If we reach the start of the function, we must
2347 presume the caller set the address in the delay
2348 slot of the call instruction. */
2349 nops = count;
2350 break;
2353 if (BARRIER_P (prev))
2354 break;
2355 if (LABEL_P (prev))
2357 /* Look at branches to this label. */
2358 label_info *label;
2359 branch_info *branch;
2361 for (label = mt_labels;
2362 label;
2363 label = label->next)
2364 if (label->label == prev)
2366 for (branch = label->branches;
2367 branch;
2368 branch = branch->next)
2370 tmp = mt_check_delay_slot (branch->insn, jmp);
2372 if (tmp == branch->insn)
2374 nops = count;
2375 break;
2378 if (tmp && branch->insn == next)
2379 rescan = tmp;
2381 break;
2383 continue;
2385 if (!INSN_P (prev) || GET_CODE (PATTERN (prev)) == USE)
2386 continue;
2388 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
2390 /* Look at the delay slot. */
2391 tmp = mt_check_delay_slot (prev, jmp);
2392 if (tmp == prev)
2393 nops = count;
2394 break;
2397 type = (INSN_CODE (prev) >= 0 ? get_attr_type (prev)
2398 : TYPE_COMPLEX);
2399 if (type == TYPE_CALL || type == TYPE_BRANCH)
2400 break;
2402 if (type == TYPE_LOAD
2403 || type == TYPE_ARITH
2404 || type == TYPE_COMPLEX)
2406 tmp = PATTERN (jmp);
2407 note_stores (PATTERN (prev), insn_dependent_p_1, &tmp);
2408 if (!tmp)
2410 nops = count;
2411 break;
2415 if (INSN_CODE (prev) >= 0)
2416 count--;
2419 if (rescan)
2420 for (next = NEXT_INSN (rescan);
2421 next && !INSN_P (next);
2422 next = NEXT_INSN (next))
2423 continue;
2424 while (nops--)
2425 emit_insn_before (gen_nop (), insn);
2429 /* Free the data structures. */
2430 while (mt_labels)
2432 label_info *label = mt_labels;
2433 branch_info *branch, *next;
2435 mt_labels = label->next;
2436 for (branch = label->branches; branch; branch = next)
2438 next = branch->next;
2439 free (branch);
2441 free (label);
2445 /* Fixup the looping instructions, do delayed branch scheduling, fixup
2446 scheduling hazards. */
2448 static void
2449 mt_machine_reorg (void)
2451 if (cfun->machine->has_loops && TARGET_MS2)
2452 mt_reorg_loops (dump_file);
2454 if (mt_flag_delayed_branch)
2455 dbr_schedule (get_insns ());
2457 if (TARGET_MS2)
2459 /* Force all instructions to be split into their final form. */
2460 split_all_insns_noflow ();
2461 mt_reorg_hazard ();
2465 /* Initialize the GCC target structure. */
2466 const struct attribute_spec mt_attribute_table[];
2468 #undef TARGET_ATTRIBUTE_TABLE
2469 #define TARGET_ATTRIBUTE_TABLE mt_attribute_table
2470 #undef TARGET_STRUCT_VALUE_RTX
2471 #define TARGET_STRUCT_VALUE_RTX mt_struct_value_rtx
2472 #undef TARGET_PROMOTE_PROTOTYPES
2473 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2474 #undef TARGET_PASS_BY_REFERENCE
2475 #define TARGET_PASS_BY_REFERENCE mt_pass_by_reference
2476 #undef TARGET_MUST_PASS_IN_STACK
2477 #define TARGET_MUST_PASS_IN_STACK mt_pass_in_stack
2478 #undef TARGET_ARG_PARTIAL_BYTES
2479 #define TARGET_ARG_PARTIAL_BYTES mt_arg_partial_bytes
2480 #undef TARGET_SETUP_INCOMING_VARARGS
2481 #define TARGET_SETUP_INCOMING_VARARGS mt_setup_incoming_varargs
2482 #undef TARGET_MACHINE_DEPENDENT_REORG
2483 #define TARGET_MACHINE_DEPENDENT_REORG mt_machine_reorg
2485 struct gcc_target targetm = TARGET_INITIALIZER;
2487 #include "gt-mt.h"