1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
46 #include "diagnostic-core.h"
48 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
55 /* Structure of this file:
57 + Command Line Option Support
58 + Pattern support - constraints, predicates, expanders
61 + Functions to save and restore machine-specific function data.
62 + Frame/Epilog/Prolog Related
64 + Function args in registers
65 + Handle pipeline hazards
68 + Machine-dependent Reorg
73 Symbols are encoded as @ <char> . <name> where <char> is one of these:
81 c - cb (control bus) */
83 struct GTY(()) machine_function
85 int mep_frame_pointer_needed
;
93 /* Records __builtin_return address. */
97 int reg_save_slot
[FIRST_PSEUDO_REGISTER
];
98 unsigned char reg_saved
[FIRST_PSEUDO_REGISTER
];
100 /* 2 if the current function has an interrupt attribute, 1 if not, 0
101 if unknown. This is here because resource.c uses EPILOGUE_USES
103 int interrupt_handler
;
105 /* Likewise, for disinterrupt attribute. */
106 int disable_interrupts
;
108 /* Number of doloop tags used so far. */
111 /* True if the last tag was allocated to a doloop_end. */
112 bool doloop_tag_from_end
;
114 /* True if reload changes $TP. */
115 bool reload_changes_tp
;
117 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
118 We only set this if the function is an interrupt handler. */
119 int asms_without_operands
;
122 #define MEP_CONTROL_REG(x) \
123 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
125 static GTY(()) section
* based_section
;
126 static GTY(()) section
* tinybss_section
;
127 static GTY(()) section
* far_section
;
128 static GTY(()) section
* farbss_section
;
129 static GTY(()) section
* frodata_section
;
130 static GTY(()) section
* srodata_section
;
132 static GTY(()) section
* vtext_section
;
133 static GTY(()) section
* vftext_section
;
134 static GTY(()) section
* ftext_section
;
136 static void mep_set_leaf_registers (int);
137 static bool symbol_p (rtx
);
138 static bool symbolref_p (rtx
);
139 static void encode_pattern_1 (rtx
);
140 static void encode_pattern (rtx
);
141 static bool const_in_range (rtx
, int, int);
142 static void mep_rewrite_mult (rtx
, rtx
);
143 static void mep_rewrite_mulsi3 (rtx
, rtx
, rtx
, rtx
);
144 static void mep_rewrite_maddsi3 (rtx
, rtx
, rtx
, rtx
, rtx
);
145 static bool mep_reuse_lo_p_1 (rtx
, rtx
, rtx
, bool);
146 static bool move_needs_splitting (rtx
, rtx
, enum machine_mode
);
147 static bool mep_expand_setcc_1 (enum rtx_code
, rtx
, rtx
, rtx
);
148 static bool mep_nongeneral_reg (rtx
);
149 static bool mep_general_copro_reg (rtx
);
150 static bool mep_nonregister (rtx
);
151 static struct machine_function
* mep_init_machine_status (void);
152 static rtx
mep_tp_rtx (void);
153 static rtx
mep_gp_rtx (void);
154 static bool mep_interrupt_p (void);
155 static bool mep_disinterrupt_p (void);
156 static bool mep_reg_set_p (rtx
, rtx
);
157 static bool mep_reg_set_in_function (int);
158 static bool mep_interrupt_saved_reg (int);
159 static bool mep_call_saves_register (int);
161 static void add_constant (int, int, int, int);
162 static rtx
maybe_dead_move (rtx
, rtx
, bool);
163 static void mep_reload_pointer (int, const char *);
164 static void mep_start_function (FILE *, HOST_WIDE_INT
);
165 static bool mep_function_ok_for_sibcall (tree
, tree
);
166 static int unique_bit_in (HOST_WIDE_INT
);
167 static int bit_size_for_clip (HOST_WIDE_INT
);
168 static int bytesize (const_tree
, enum machine_mode
);
169 static tree
mep_validate_based_tiny (tree
*, tree
, tree
, int, bool *);
170 static tree
mep_validate_near_far (tree
*, tree
, tree
, int, bool *);
171 static tree
mep_validate_disinterrupt (tree
*, tree
, tree
, int, bool *);
172 static tree
mep_validate_interrupt (tree
*, tree
, tree
, int, bool *);
173 static tree
mep_validate_io_cb (tree
*, tree
, tree
, int, bool *);
174 static tree
mep_validate_vliw (tree
*, tree
, tree
, int, bool *);
175 static bool mep_function_attribute_inlinable_p (const_tree
);
176 static bool mep_can_inline_p (tree
, tree
);
177 static bool mep_lookup_pragma_disinterrupt (const char *);
178 static int mep_multiple_address_regions (tree
, bool);
179 static int mep_attrlist_to_encoding (tree
, tree
);
180 static void mep_insert_attributes (tree
, tree
*);
181 static void mep_encode_section_info (tree
, rtx
, int);
182 static section
* mep_select_section (tree
, int, unsigned HOST_WIDE_INT
);
183 static void mep_unique_section (tree
, int);
184 static unsigned int mep_section_type_flags (tree
, const char *, int);
185 static void mep_asm_named_section (const char *, unsigned int, tree
);
186 static bool mep_mentioned_p (rtx
, rtx
, int);
187 static void mep_reorg_regmove (rtx
);
188 static rtx
mep_insert_repeat_label_last (rtx
, rtx
, bool, bool);
189 static void mep_reorg_repeat (rtx
);
190 static bool mep_invertable_branch_p (rtx
);
191 static void mep_invert_branch (rtx
, rtx
);
192 static void mep_reorg_erepeat (rtx
);
193 static void mep_jmp_return_reorg (rtx
);
194 static void mep_reorg_addcombine (rtx
);
195 static void mep_reorg (void);
196 static void mep_init_intrinsics (void);
197 static void mep_init_builtins (void);
198 static void mep_intrinsic_unavailable (int);
199 static bool mep_get_intrinsic_insn (int, const struct cgen_insn
**);
200 static bool mep_get_move_insn (int, const struct cgen_insn
**);
201 static rtx
mep_convert_arg (enum machine_mode
, rtx
);
202 static rtx
mep_convert_regnum (const struct cgen_regnum_operand
*, rtx
);
203 static rtx
mep_legitimize_arg (const struct insn_operand_data
*, rtx
, int);
204 static void mep_incompatible_arg (const struct insn_operand_data
*, rtx
, int, tree
);
205 static rtx
mep_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
206 static int mep_adjust_cost (rtx
, rtx
, rtx
, int);
207 static int mep_issue_rate (void);
208 static rtx
mep_find_ready_insn (rtx
*, int, enum attr_slot
, int);
209 static void mep_move_ready_insn (rtx
*, int, rtx
);
210 static int mep_sched_reorder (FILE *, int, rtx
*, int *, int);
211 static rtx
mep_make_bundle (rtx
, rtx
);
212 static void mep_bundle_insns (rtx
);
213 static bool mep_rtx_cost (rtx
, int, int, int *, bool);
214 static int mep_address_cost (rtx
, bool);
215 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS
*, enum machine_mode
,
217 static bool mep_pass_by_reference (CUMULATIVE_ARGS
* cum
, enum machine_mode
,
219 static bool mep_vector_mode_supported_p (enum machine_mode
);
220 static bool mep_handle_option (size_t, const char *, int);
221 static rtx
mep_allocate_initial_value (rtx
);
222 static void mep_asm_init_sections (void);
223 static int mep_comp_type_attributes (const_tree
, const_tree
);
224 static bool mep_narrow_volatile_bitfield (void);
225 static rtx
mep_expand_builtin_saveregs (void);
226 static tree
mep_build_builtin_va_list (void);
227 static void mep_expand_va_start (tree
, rtx
);
228 static tree
mep_gimplify_va_arg_expr (tree
, tree
, gimple_seq
*, gimple_seq
*);
229 static bool mep_can_eliminate (const int, const int);
230 static void mep_trampoline_init (rtx
, tree
, rtx
);
232 #define WANT_GCC_DEFINITIONS
233 #include "mep-intrin.h"
234 #undef WANT_GCC_DEFINITIONS
237 /* Command Line Option Support. */
239 char mep_leaf_registers
[FIRST_PSEUDO_REGISTER
];
241 /* True if we can use cmov instructions to move values back and forth
242 between core and coprocessor registers. */
243 bool mep_have_core_copro_moves_p
;
245 /* True if we can use cmov instructions (or a work-alike) to move
246 values between coprocessor registers. */
247 bool mep_have_copro_copro_moves_p
;
249 /* A table of all coprocessor instructions that can act like
250 a coprocessor-to-coprocessor cmov. */
251 static const int mep_cmov_insns
[] = {
264 static int option_mtiny_specified
= 0;
268 mep_set_leaf_registers (int enable
)
272 if (mep_leaf_registers
[0] != enable
)
273 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
274 mep_leaf_registers
[i
] = enable
;
278 mep_conditional_register_usage (void)
282 if (!TARGET_OPT_MULT
&& !TARGET_OPT_DIV
)
284 fixed_regs
[HI_REGNO
] = 1;
285 fixed_regs
[LO_REGNO
] = 1;
286 call_used_regs
[HI_REGNO
] = 1;
287 call_used_regs
[LO_REGNO
] = 1;
290 for (i
= FIRST_SHADOW_REGISTER
; i
<= LAST_SHADOW_REGISTER
; i
++)
295 mep_optimization_options (void)
297 /* The first scheduling pass often increases register pressure and tends
298 to result in more spill code. Only run it when specifically asked. */
299 flag_schedule_insns
= 0;
301 /* Using $fp doesn't gain us much, even when debugging is important. */
302 flag_omit_frame_pointer
= 1;
306 mep_option_override (void)
309 warning (OPT_fpic
, "-fpic is not supported");
311 warning (OPT_fPIC
, "-fPIC is not supported");
312 if (TARGET_S
&& TARGET_M
)
313 error ("only one of -ms and -mm may be given");
314 if (TARGET_S
&& TARGET_L
)
315 error ("only one of -ms and -ml may be given");
316 if (TARGET_M
&& TARGET_L
)
317 error ("only one of -mm and -ml may be given");
318 if (TARGET_S
&& option_mtiny_specified
)
319 error ("only one of -ms and -mtiny= may be given");
320 if (TARGET_M
&& option_mtiny_specified
)
321 error ("only one of -mm and -mtiny= may be given");
322 if (TARGET_OPT_CLIP
&& ! TARGET_OPT_MINMAX
)
323 warning (0, "-mclip currently has no effect without -mminmax");
325 if (mep_const_section
)
327 if (strcmp (mep_const_section
, "tiny") != 0
328 && strcmp (mep_const_section
, "near") != 0
329 && strcmp (mep_const_section
, "far") != 0)
330 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
334 mep_tiny_cutoff
= 65536;
337 if (TARGET_L
&& ! option_mtiny_specified
)
340 if (TARGET_64BIT_CR_REGS
)
341 flag_split_wide_types
= 0;
343 init_machine_status
= mep_init_machine_status
;
344 mep_init_intrinsics ();
347 /* Pattern Support - constraints, predicates, expanders. */
349 /* MEP has very few instructions that can refer to the span of
350 addresses used by symbols, so it's common to check for them. */
355 int c
= GET_CODE (x
);
357 return (c
== CONST_INT
367 if (GET_CODE (x
) != MEM
)
370 c
= GET_CODE (XEXP (x
, 0));
371 return (c
== CONST_INT
376 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
378 #define GEN_REG(R, STRICT) \
381 && ((R) == ARG_POINTER_REGNUM \
382 || (R) >= FIRST_PSEUDO_REGISTER)))
384 static char pattern
[12], *patternp
;
385 static GTY(()) rtx patternr
[12];
386 #define RTX_IS(x) (strcmp (pattern, x) == 0)
389 encode_pattern_1 (rtx x
)
393 if (patternp
== pattern
+ sizeof (pattern
) - 2)
399 patternr
[patternp
-pattern
] = x
;
401 switch (GET_CODE (x
))
409 encode_pattern_1 (XEXP(x
, 0));
413 encode_pattern_1 (XEXP(x
, 0));
414 encode_pattern_1 (XEXP(x
, 1));
418 encode_pattern_1 (XEXP(x
, 0));
419 encode_pattern_1 (XEXP(x
, 1));
423 encode_pattern_1 (XEXP(x
, 0));
437 *patternp
++ = '0' + XCINT(x
, 1, UNSPEC
);
438 for (i
=0; i
<XVECLEN (x
, 0); i
++)
439 encode_pattern_1 (XVECEXP (x
, 0, i
));
447 fprintf (stderr
, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x
)));
456 encode_pattern (rtx x
)
459 encode_pattern_1 (x
);
464 mep_section_tag (rtx x
)
470 switch (GET_CODE (x
))
477 x
= XVECEXP (x
, 0, 0);
480 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
489 if (GET_CODE (x
) != SYMBOL_REF
)
492 if (name
[0] == '@' && name
[2] == '.')
494 if (name
[1] == 'i' || name
[1] == 'I')
497 return 'f'; /* near */
498 return 'n'; /* far */
506 mep_regno_reg_class (int regno
)
510 case SP_REGNO
: return SP_REGS
;
511 case TP_REGNO
: return TP_REGS
;
512 case GP_REGNO
: return GP_REGS
;
513 case 0: return R0_REGS
;
514 case HI_REGNO
: return HI_REGS
;
515 case LO_REGNO
: return LO_REGS
;
516 case ARG_POINTER_REGNUM
: return GENERAL_REGS
;
519 if (GR_REGNO_P (regno
))
520 return regno
< FIRST_GR_REGNO
+ 8 ? TPREL_REGS
: GENERAL_REGS
;
521 if (CONTROL_REGNO_P (regno
))
524 if (CR_REGNO_P (regno
))
528 /* Search for the register amongst user-defined subclasses of
529 the coprocessor registers. */
530 for (i
= USER0_REGS
; i
<= USER3_REGS
; ++i
)
532 if (! TEST_HARD_REG_BIT (reg_class_contents
[i
], regno
))
534 for (j
= 0; j
< N_REG_CLASSES
; ++j
)
536 enum reg_class sub
= reg_class_subclasses
[i
][j
];
538 if (sub
== LIM_REG_CLASSES
)
540 if (TEST_HARD_REG_BIT (reg_class_contents
[sub
], regno
))
545 return LOADABLE_CR_REGNO_P (regno
) ? LOADABLE_CR_REGS
: CR_REGS
;
548 if (CCR_REGNO_P (regno
))
551 gcc_assert (regno
>= FIRST_SHADOW_REGISTER
&& regno
<= LAST_SHADOW_REGISTER
);
557 mep_reg_class_from_constraint (int c
, const char *str
)
574 return LOADABLE_CR_REGS
;
576 return mep_have_copro_copro_moves_p
? CR_REGS
: NO_REGS
;
578 return mep_have_core_copro_moves_p
? CR_REGS
: NO_REGS
;
605 enum reg_class which
= c
- 'A' + USER0_REGS
;
606 return (reg_class_size
[which
] > 0 ? which
: NO_REGS
);
615 mep_const_ok_for_letter_p (HOST_WIDE_INT value
, int c
)
619 case 'I': return value
>= -32768 && value
< 32768;
620 case 'J': return value
>= 0 && value
< 65536;
621 case 'K': return value
>= 0 && value
< 0x01000000;
622 case 'L': return value
>= -32 && value
< 32;
623 case 'M': return value
>= 0 && value
< 32;
624 case 'N': return value
>= 0 && value
< 16;
628 return value
>= -2147483647-1 && value
<= 2147483647;
635 mep_extra_constraint (rtx value
, int c
)
637 encode_pattern (value
);
642 /* For near symbols, like what call uses. */
643 if (GET_CODE (value
) == REG
)
645 return mep_call_address_operand (value
, GET_MODE (value
));
648 /* For signed 8-bit immediates. */
649 return (GET_CODE (value
) == CONST_INT
650 && INTVAL (value
) >= -128
651 && INTVAL (value
) <= 127);
654 /* For tp/gp relative symbol values. */
655 return (RTX_IS ("u3s") || RTX_IS ("u2s")
656 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
659 /* Non-absolute memories. */
660 return GET_CODE (value
) == MEM
&& ! CONSTANT_P (XEXP (value
, 0));
664 return RTX_IS ("Hs");
667 /* Register indirect. */
668 return RTX_IS ("mr");
671 return mep_section_tag (value
) == 'c' && RTX_IS ("ms");
682 const_in_range (rtx x
, int minv
, int maxv
)
684 return (GET_CODE (x
) == CONST_INT
685 && INTVAL (x
) >= minv
686 && INTVAL (x
) <= maxv
);
689 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
690 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
691 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
692 at the end of the insn stream. */
695 mep_mulr_source (rtx insn
, rtx dest
, rtx src1
, rtx src2
)
697 if (rtx_equal_p (dest
, src1
))
699 else if (rtx_equal_p (dest
, src2
))
704 emit_insn (gen_movsi (copy_rtx (dest
), src1
));
706 emit_insn_before (gen_movsi (copy_rtx (dest
), src1
), insn
);
711 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
712 Change the last element of PATTERN from (clobber (scratch:SI))
713 to (clobber (reg:SI HI_REGNO)). */
716 mep_rewrite_mult (rtx insn
, rtx pattern
)
720 hi_clobber
= XVECEXP (pattern
, 0, XVECLEN (pattern
, 0) - 1);
721 XEXP (hi_clobber
, 0) = gen_rtx_REG (SImode
, HI_REGNO
);
722 PATTERN (insn
) = pattern
;
723 INSN_CODE (insn
) = -1;
726 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
727 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
728 store the result in DEST if nonnull. */
731 mep_rewrite_mulsi3 (rtx insn
, rtx dest
, rtx src1
, rtx src2
)
735 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
737 pattern
= gen_mulsi3r (lo
, dest
, copy_rtx (dest
),
738 mep_mulr_source (insn
, dest
, src1
, src2
));
740 pattern
= gen_mulsi3_lo (lo
, src1
, src2
);
741 mep_rewrite_mult (insn
, pattern
);
744 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
745 SRC3 into $lo, then use either madd or maddr. The move into $lo will
746 be deleted by a peephole2 if SRC3 is already in $lo. */
749 mep_rewrite_maddsi3 (rtx insn
, rtx dest
, rtx src1
, rtx src2
, rtx src3
)
753 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
754 emit_insn_before (gen_movsi (copy_rtx (lo
), src3
), insn
);
756 pattern
= gen_maddsi3r (lo
, dest
, copy_rtx (dest
),
757 mep_mulr_source (insn
, dest
, src1
, src2
),
760 pattern
= gen_maddsi3_lo (lo
, src1
, src2
, copy_rtx (lo
));
761 mep_rewrite_mult (insn
, pattern
);
764 /* Return true if $lo has the same value as integer register GPR when
765 instruction INSN is reached. If necessary, rewrite the instruction
766 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
767 rtx for (reg:SI LO_REGNO).
769 This function is intended to be used by the peephole2 pass. Since
770 that pass goes from the end of a basic block to the beginning, and
771 propagates liveness information on the way, there is no need to
772 update register notes here.
774 If GPR_DEAD_P is true on entry, and this function returns true,
775 then the caller will replace _every_ use of GPR in and after INSN
776 with LO. This means that if the instruction that sets $lo is a
777 mulr- or maddr-type instruction, we can rewrite it to use mul or
778 madd instead. In combination with the copy progagation pass,
779 this allows us to replace sequences like:
788 if GPR is no longer used. */
791 mep_reuse_lo_p_1 (rtx lo
, rtx gpr
, rtx insn
, bool gpr_dead_p
)
795 insn
= PREV_INSN (insn
);
797 switch (recog_memoized (insn
))
799 case CODE_FOR_mulsi3_1
:
801 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
803 mep_rewrite_mulsi3 (insn
,
804 gpr_dead_p
? NULL
: recog_data
.operand
[0],
805 recog_data
.operand
[1],
806 recog_data
.operand
[2]);
811 case CODE_FOR_maddsi3
:
813 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
815 mep_rewrite_maddsi3 (insn
,
816 gpr_dead_p
? NULL
: recog_data
.operand
[0],
817 recog_data
.operand
[1],
818 recog_data
.operand
[2],
819 recog_data
.operand
[3]);
824 case CODE_FOR_mulsi3r
:
825 case CODE_FOR_maddsi3r
:
827 return rtx_equal_p (recog_data
.operand
[1], gpr
);
830 if (reg_set_p (lo
, insn
)
831 || reg_set_p (gpr
, insn
)
832 || volatile_insn_p (PATTERN (insn
)))
835 if (gpr_dead_p
&& reg_referenced_p (gpr
, PATTERN (insn
)))
840 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
844 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
847 mep_reuse_lo_p (rtx lo
, rtx gpr
, rtx insn
, bool gpr_dead_p
)
849 bool result
= mep_reuse_lo_p_1 (lo
, gpr
, insn
, gpr_dead_p
);
854 /* Return true if SET can be turned into a post-modify load or store
855 that adds OFFSET to GPR. In other words, return true if SET can be
858 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
860 It's OK to change SET to an equivalent operation in order to
864 mep_use_post_modify_for_set_p (rtx set
, rtx gpr
, rtx offset
)
867 unsigned int reg_bytes
, mem_bytes
;
868 enum machine_mode reg_mode
, mem_mode
;
870 /* Only simple SETs can be converted. */
871 if (GET_CODE (set
) != SET
)
874 /* Point REG to what we hope will be the register side of the set and
875 MEM to what we hope will be the memory side. */
876 if (GET_CODE (SET_DEST (set
)) == MEM
)
878 mem
= &SET_DEST (set
);
879 reg
= &SET_SRC (set
);
883 reg
= &SET_DEST (set
);
884 mem
= &SET_SRC (set
);
885 if (GET_CODE (*mem
) == SIGN_EXTEND
)
886 mem
= &XEXP (*mem
, 0);
889 /* Check that *REG is a suitable coprocessor register. */
890 if (GET_CODE (*reg
) != REG
|| !LOADABLE_CR_REGNO_P (REGNO (*reg
)))
893 /* Check that *MEM is a suitable memory reference. */
894 if (GET_CODE (*mem
) != MEM
|| !rtx_equal_p (XEXP (*mem
, 0), gpr
))
897 /* Get the number of bytes in each operand. */
898 mem_bytes
= GET_MODE_SIZE (GET_MODE (*mem
));
899 reg_bytes
= GET_MODE_SIZE (GET_MODE (*reg
));
901 /* Check that OFFSET is suitably aligned. */
902 if (INTVAL (offset
) & (mem_bytes
- 1))
905 /* Convert *MEM to a normal integer mode. */
906 mem_mode
= mode_for_size (mem_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
907 *mem
= change_address (*mem
, mem_mode
, NULL
);
909 /* Adjust *REG as well. */
910 *reg
= shallow_copy_rtx (*reg
);
911 if (reg
== &SET_DEST (set
) && reg_bytes
< UNITS_PER_WORD
)
913 /* SET is a subword load. Convert it to an explicit extension. */
914 PUT_MODE (*reg
, SImode
);
915 *mem
= gen_rtx_SIGN_EXTEND (SImode
, *mem
);
919 reg_mode
= mode_for_size (reg_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
920 PUT_MODE (*reg
, reg_mode
);
925 /* Return the effect of frame-related instruction INSN. */
928 mep_frame_expr (rtx insn
)
932 note
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, 0);
933 expr
= (note
!= 0 ? XEXP (note
, 0) : copy_rtx (PATTERN (insn
)));
934 RTX_FRAME_RELATED_P (expr
) = 1;
938 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
939 new pattern in INSN1; INSN2 will be deleted by the caller. */
942 mep_make_parallel (rtx insn1
, rtx insn2
)
946 if (RTX_FRAME_RELATED_P (insn2
))
948 expr
= mep_frame_expr (insn2
);
949 if (RTX_FRAME_RELATED_P (insn1
))
950 expr
= gen_rtx_SEQUENCE (VOIDmode
,
951 gen_rtvec (2, mep_frame_expr (insn1
), expr
));
952 set_unique_reg_note (insn1
, REG_FRAME_RELATED_EXPR
, expr
);
953 RTX_FRAME_RELATED_P (insn1
) = 1;
956 PATTERN (insn1
) = gen_rtx_PARALLEL (VOIDmode
,
957 gen_rtvec (2, PATTERN (insn1
),
959 INSN_CODE (insn1
) = -1;
962 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
963 the basic block to see if any previous load or store instruction can
964 be persuaded to do SET_INSN as a side-effect. Return true if so. */
967 mep_use_post_modify_p_1 (rtx set_insn
, rtx reg
, rtx offset
)
974 insn
= PREV_INSN (insn
);
977 if (mep_use_post_modify_for_set_p (PATTERN (insn
), reg
, offset
))
979 mep_make_parallel (insn
, set_insn
);
983 if (reg_set_p (reg
, insn
)
984 || reg_referenced_p (reg
, PATTERN (insn
))
985 || volatile_insn_p (PATTERN (insn
)))
989 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
993 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
996 mep_use_post_modify_p (rtx insn
, rtx reg
, rtx offset
)
998 bool result
= mep_use_post_modify_p_1 (insn
, reg
, offset
);
1004 mep_allow_clip (rtx ux
, rtx lx
, int s
)
1006 HOST_WIDE_INT u
= INTVAL (ux
);
1007 HOST_WIDE_INT l
= INTVAL (lx
);
1010 if (!TARGET_OPT_CLIP
)
1015 for (i
= 0; i
< 30; i
++)
1016 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1)
1017 && (l
== - ((HOST_WIDE_INT
) 1 << i
)))
1025 for (i
= 0; i
< 30; i
++)
1026 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1))
1033 mep_bit_position_p (rtx x
, bool looking_for
)
1035 if (GET_CODE (x
) != CONST_INT
)
1037 switch ((int) INTVAL(x
) & 0xff)
1039 case 0x01: case 0x02: case 0x04: case 0x08:
1040 case 0x10: case 0x20: case 0x40: case 0x80:
1042 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1043 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1044 return !looking_for
;
1050 move_needs_splitting (rtx dest
, rtx src
,
1051 enum machine_mode mode ATTRIBUTE_UNUSED
)
1053 int s
= mep_section_tag (src
);
1057 if (GET_CODE (src
) == CONST
1058 || GET_CODE (src
) == MEM
)
1059 src
= XEXP (src
, 0);
1060 else if (GET_CODE (src
) == SYMBOL_REF
1061 || GET_CODE (src
) == LABEL_REF
1062 || GET_CODE (src
) == PLUS
)
1068 || (GET_CODE (src
) == PLUS
1069 && GET_CODE (XEXP (src
, 1)) == CONST_INT
1070 && (INTVAL (XEXP (src
, 1)) < -65536
1071 || INTVAL (XEXP (src
, 1)) > 0xffffff))
1072 || (GET_CODE (dest
) == REG
1073 && REGNO (dest
) > 7 && REGNO (dest
) < FIRST_PSEUDO_REGISTER
))
1079 mep_split_mov (rtx
*operands
, int symbolic
)
1083 if (move_needs_splitting (operands
[0], operands
[1], SImode
))
1088 if (GET_CODE (operands
[1]) != CONST_INT
)
1091 if (constraint_satisfied_p (operands
[1], CONSTRAINT_I
)
1092 || constraint_satisfied_p (operands
[1], CONSTRAINT_J
)
1093 || constraint_satisfied_p (operands
[1], CONSTRAINT_O
))
1096 if (((!reload_completed
&& !reload_in_progress
)
1097 || (REG_P (operands
[0]) && REGNO (operands
[0]) < 8))
1098 && constraint_satisfied_p (operands
[1], CONSTRAINT_K
))
1104 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1105 it to one specific value. So the insn chosen depends on whether
1106 the source and destination modes match. */
1109 mep_vliw_mode_match (rtx tgt
)
1111 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1112 bool tgt_vliw
= INTVAL (tgt
);
1114 return src_vliw
== tgt_vliw
;
1117 /* Like the above, but also test for near/far mismatches. */
1120 mep_vliw_jmp_match (rtx tgt
)
1122 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1123 bool tgt_vliw
= INTVAL (tgt
);
1125 if (mep_section_tag (DECL_RTL (cfun
->decl
)) == 'f')
1128 return src_vliw
== tgt_vliw
;
1132 mep_multi_slot (rtx x
)
1134 return get_attr_slot (x
) == SLOT_MULTI
;
1139 mep_legitimate_constant_p (rtx x
)
1141 /* We can't convert symbol values to gp- or tp-rel values after
1142 reload, as reload might have used $gp or $tp for other
1144 if (GET_CODE (x
) == SYMBOL_REF
&& (reload_in_progress
|| reload_completed
))
1146 char e
= mep_section_tag (x
);
1147 return (e
!= 't' && e
!= 'b');
1152 /* Be careful not to use macros that need to be compiled one way for
1153 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1156 mep_legitimate_address (enum machine_mode mode
, rtx x
, int strict
)
1160 #define DEBUG_LEGIT 0
1162 fprintf (stderr
, "legit: mode %s strict %d ", mode_name
[mode
], strict
);
1166 if (GET_CODE (x
) == LO_SUM
1167 && GET_CODE (XEXP (x
, 0)) == REG
1168 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1169 && CONSTANT_P (XEXP (x
, 1)))
1171 if (GET_MODE_SIZE (mode
) > 4)
1173 /* We will end up splitting this, and lo_sums are not
1174 offsettable for us. */
1176 fprintf(stderr
, " - nope, %%lo(sym)[reg] not splittable\n");
1181 fprintf (stderr
, " - yup, %%lo(sym)[reg]\n");
1186 if (GET_CODE (x
) == REG
1187 && GEN_REG (REGNO (x
), strict
))
1190 fprintf (stderr
, " - yup, [reg]\n");
1195 if (GET_CODE (x
) == PLUS
1196 && GET_CODE (XEXP (x
, 0)) == REG
1197 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1198 && const_in_range (XEXP (x
, 1), -32768, 32767))
1201 fprintf (stderr
, " - yup, [reg+const]\n");
1206 if (GET_CODE (x
) == PLUS
1207 && GET_CODE (XEXP (x
, 0)) == REG
1208 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1209 && GET_CODE (XEXP (x
, 1)) == CONST
1210 && (GET_CODE (XEXP (XEXP (x
, 1), 0)) == UNSPEC
1211 || (GET_CODE (XEXP (XEXP (x
, 1), 0)) == PLUS
1212 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == UNSPEC
1213 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == CONST_INT
)))
1216 fprintf (stderr
, " - yup, [reg+unspec]\n");
1221 the_tag
= mep_section_tag (x
);
1226 fprintf (stderr
, " - nope, [far]\n");
1231 if (mode
== VOIDmode
1232 && GET_CODE (x
) == SYMBOL_REF
)
1235 fprintf (stderr
, " - yup, call [symbol]\n");
1240 if ((mode
== SImode
|| mode
== SFmode
)
1242 && LEGITIMATE_CONSTANT_P (x
)
1243 && the_tag
!= 't' && the_tag
!= 'b')
1245 if (GET_CODE (x
) != CONST_INT
1246 || (INTVAL (x
) <= 0xfffff
1248 && (INTVAL (x
) % 4) == 0))
1251 fprintf (stderr
, " - yup, [const]\n");
1258 fprintf (stderr
, " - nope.\n");
1264 mep_legitimize_reload_address (rtx
*x
, enum machine_mode mode
, int opnum
,
1265 enum reload_type type
,
1266 int ind_levels ATTRIBUTE_UNUSED
)
1268 if (GET_CODE (*x
) == PLUS
1269 && GET_CODE (XEXP (*x
, 0)) == MEM
1270 && GET_CODE (XEXP (*x
, 1)) == REG
)
1272 /* GCC will by default copy the MEM into a REG, which results in
1273 an invalid address. For us, the best thing to do is move the
1274 whole expression to a REG. */
1275 push_reload (*x
, NULL_RTX
, x
, NULL
,
1276 GENERAL_REGS
, mode
, VOIDmode
,
1281 if (GET_CODE (*x
) == PLUS
1282 && GET_CODE (XEXP (*x
, 0)) == SYMBOL_REF
1283 && GET_CODE (XEXP (*x
, 1)) == CONST_INT
)
1285 char e
= mep_section_tag (XEXP (*x
, 0));
1287 if (e
!= 't' && e
!= 'b')
1289 /* GCC thinks that (sym+const) is a valid address. Well,
1290 sometimes it is, this time it isn't. The best thing to
1291 do is reload the symbol to a register, since reg+int
1292 tends to work, and we can't just add the symbol and
1294 push_reload (XEXP (*x
, 0), NULL_RTX
, &(XEXP(*x
, 0)), NULL
,
1295 GENERAL_REGS
, mode
, VOIDmode
,
1304 mep_core_address_length (rtx insn
, int opn
)
1306 rtx set
= single_set (insn
);
1307 rtx mem
= XEXP (set
, opn
);
1308 rtx other
= XEXP (set
, 1-opn
);
1309 rtx addr
= XEXP (mem
, 0);
1311 if (register_operand (addr
, Pmode
))
1313 if (GET_CODE (addr
) == PLUS
)
1315 rtx addend
= XEXP (addr
, 1);
1317 gcc_assert (REG_P (XEXP (addr
, 0)));
1319 switch (REGNO (XEXP (addr
, 0)))
1321 case STACK_POINTER_REGNUM
:
1322 if (GET_MODE_SIZE (GET_MODE (mem
)) == 4
1323 && mep_imm7a4_operand (addend
, VOIDmode
))
1328 gcc_assert (REG_P (other
));
1330 if (REGNO (other
) >= 8)
1333 if (GET_CODE (addend
) == CONST
1334 && GET_CODE (XEXP (addend
, 0)) == UNSPEC
1335 && XINT (XEXP (addend
, 0), 1) == UNS_TPREL
)
1338 if (GET_CODE (addend
) == CONST_INT
1339 && INTVAL (addend
) >= 0
1340 && INTVAL (addend
) <= 127
1341 && INTVAL (addend
) % GET_MODE_SIZE (GET_MODE (mem
)) == 0)
1351 mep_cop_address_length (rtx insn
, int opn
)
1353 rtx set
= single_set (insn
);
1354 rtx mem
= XEXP (set
, opn
);
1355 rtx addr
= XEXP (mem
, 0);
1357 if (GET_CODE (mem
) != MEM
)
1359 if (register_operand (addr
, Pmode
))
1361 if (GET_CODE (addr
) == POST_INC
)
1367 #define DEBUG_EXPAND_MOV 0
1369 mep_expand_mov (rtx
*operands
, enum machine_mode mode
)
1374 int post_reload
= 0;
1376 tag
[0] = mep_section_tag (operands
[0]);
1377 tag
[1] = mep_section_tag (operands
[1]);
1379 if (!reload_in_progress
1380 && !reload_completed
1381 && GET_CODE (operands
[0]) != REG
1382 && GET_CODE (operands
[0]) != SUBREG
1383 && GET_CODE (operands
[1]) != REG
1384 && GET_CODE (operands
[1]) != SUBREG
)
1385 operands
[1] = copy_to_mode_reg (mode
, operands
[1]);
1387 #if DEBUG_EXPAND_MOV
1388 fprintf(stderr
, "expand move %s %d\n", mode_name
[mode
],
1389 reload_in_progress
|| reload_completed
);
1390 debug_rtx (operands
[0]);
1391 debug_rtx (operands
[1]);
1394 if (mode
== DImode
|| mode
== DFmode
)
1397 if (reload_in_progress
|| reload_completed
)
1401 if (GET_CODE (operands
[0]) == REG
&& REGNO (operands
[0]) == TP_REGNO
)
1402 cfun
->machine
->reload_changes_tp
= true;
1404 if (tag
[0] == 't' || tag
[1] == 't')
1406 r
= has_hard_reg_initial_val (Pmode
, GP_REGNO
);
1407 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != GP_REGNO
)
1410 if (tag
[0] == 'b' || tag
[1] == 'b')
1412 r
= has_hard_reg_initial_val (Pmode
, TP_REGNO
);
1413 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != TP_REGNO
)
1416 if (cfun
->machine
->reload_changes_tp
== true)
1423 if (symbol_p (operands
[1]))
1425 t
= mep_section_tag (operands
[1]);
1426 if (t
== 'b' || t
== 't')
1429 if (GET_CODE (operands
[1]) == SYMBOL_REF
)
1431 tpsym
= operands
[1];
1432 n
= gen_rtx_UNSPEC (mode
,
1433 gen_rtvec (1, operands
[1]),
1434 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1435 n
= gen_rtx_CONST (mode
, n
);
1437 else if (GET_CODE (operands
[1]) == CONST
1438 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
1439 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
1440 && GET_CODE (XEXP (XEXP (operands
[1], 0), 1)) == CONST_INT
)
1442 tpsym
= XEXP (XEXP (operands
[1], 0), 0);
1443 tpoffs
= XEXP (XEXP (operands
[1], 0), 1);
1444 n
= gen_rtx_UNSPEC (mode
,
1445 gen_rtvec (1, tpsym
),
1446 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1447 n
= gen_rtx_PLUS (mode
, n
, tpoffs
);
1448 n
= gen_rtx_CONST (mode
, n
);
1450 else if (GET_CODE (operands
[1]) == CONST
1451 && GET_CODE (XEXP (operands
[1], 0)) == UNSPEC
)
1455 error ("unusual TP-relative address");
1459 n
= gen_rtx_PLUS (mode
, (t
== 'b' ? mep_tp_rtx ()
1460 : mep_gp_rtx ()), n
);
1461 n
= emit_insn (gen_rtx_SET (mode
, operands
[0], n
));
1462 #if DEBUG_EXPAND_MOV
1463 fprintf(stderr
, "mep_expand_mov emitting ");
1470 for (i
=0; i
< 2; i
++)
1472 t
= mep_section_tag (operands
[i
]);
1473 if (GET_CODE (operands
[i
]) == MEM
&& (t
== 'b' || t
== 't'))
1478 sym
= XEXP (operands
[i
], 0);
1479 if (GET_CODE (sym
) == CONST
1480 && GET_CODE (XEXP (sym
, 0)) == UNSPEC
)
1481 sym
= XVECEXP (XEXP (sym
, 0), 0, 0);
1494 n
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, sym
), u
);
1495 n
= gen_rtx_CONST (Pmode
, n
);
1496 n
= gen_rtx_PLUS (Pmode
, r
, n
);
1497 operands
[i
] = replace_equiv_address (operands
[i
], n
);
1502 if ((GET_CODE (operands
[1]) != REG
1503 && MEP_CONTROL_REG (operands
[0]))
1504 || (GET_CODE (operands
[0]) != REG
1505 && MEP_CONTROL_REG (operands
[1])))
1508 #if DEBUG_EXPAND_MOV
1509 fprintf (stderr
, "cr-mem, forcing op1 to reg\n");
1511 temp
= gen_reg_rtx (mode
);
1512 emit_move_insn (temp
, operands
[1]);
1516 if (symbolref_p (operands
[0])
1517 && (mep_section_tag (XEXP (operands
[0], 0)) == 'f'
1518 || (GET_MODE_SIZE (mode
) != 4)))
1522 gcc_assert (!reload_in_progress
&& !reload_completed
);
1524 temp
= force_reg (Pmode
, XEXP (operands
[0], 0));
1525 operands
[0] = replace_equiv_address (operands
[0], temp
);
1526 emit_move_insn (operands
[0], operands
[1]);
1530 if (!post_reload
&& (tag
[1] == 't' || tag
[1] == 'b'))
1533 if (symbol_p (operands
[1])
1534 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1536 emit_insn (gen_movsi_topsym_s (operands
[0], operands
[1]));
1537 emit_insn (gen_movsi_botsym_s (operands
[0], operands
[0], operands
[1]));
1541 if (symbolref_p (operands
[1])
1542 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1546 if (reload_in_progress
|| reload_completed
)
1549 temp
= gen_reg_rtx (Pmode
);
1551 emit_insn (gen_movsi_topsym_s (temp
, operands
[1]));
1552 emit_insn (gen_movsi_botsym_s (temp
, temp
, operands
[1]));
1553 emit_move_insn (operands
[0], replace_equiv_address (operands
[1], temp
));
1560 /* Cases where the pattern can't be made to use at all. */
1563 mep_mov_ok (rtx
*operands
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1567 #define DEBUG_MOV_OK 0
1569 fprintf (stderr
, "mep_mov_ok %s %c=%c\n", mode_name
[mode
], mep_section_tag (operands
[0]),
1570 mep_section_tag (operands
[1]));
1571 debug_rtx (operands
[0]);
1572 debug_rtx (operands
[1]);
1575 /* We want the movh patterns to get these. */
1576 if (GET_CODE (operands
[1]) == HIGH
)
1579 /* We can't store a register to a far variable without using a
1580 scratch register to hold the address. Using far variables should
1581 be split by mep_emit_mov anyway. */
1582 if (mep_section_tag (operands
[0]) == 'f'
1583 || mep_section_tag (operands
[1]) == 'f')
1586 fprintf (stderr
, " - no, f\n");
1590 i
= mep_section_tag (operands
[1]);
1591 if ((i
== 'b' || i
== 't') && !reload_completed
&& !reload_in_progress
)
1592 /* These are supposed to be generated with adds of the appropriate
1593 register. During and after reload, however, we allow them to
1594 be accessed as normal symbols because adding a dependency on
1595 the base register now might cause problems. */
1598 fprintf (stderr
, " - no, bt\n");
1603 /* The only moves we can allow involve at least one general
1604 register, so require it. */
1605 for (i
= 0; i
< 2; i
++)
1607 /* Allow subregs too, before reload. */
1608 rtx x
= operands
[i
];
1610 if (GET_CODE (x
) == SUBREG
)
1612 if (GET_CODE (x
) == REG
1613 && ! MEP_CONTROL_REG (x
))
1616 fprintf (stderr
, " - ok\n");
1622 fprintf (stderr
, " - no, no gen reg\n");
1627 #define DEBUG_SPLIT_WIDE_MOVE 0
1629 mep_split_wide_move (rtx
*operands
, enum machine_mode mode
)
1633 #if DEBUG_SPLIT_WIDE_MOVE
1634 fprintf (stderr
, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name
[mode
]);
1635 debug_rtx (operands
[0]);
1636 debug_rtx (operands
[1]);
1639 for (i
= 0; i
<= 1; i
++)
1641 rtx op
= operands
[i
], hi
, lo
;
1643 switch (GET_CODE (op
))
1647 unsigned int regno
= REGNO (op
);
1649 if (TARGET_64BIT_CR_REGS
&& CR_REGNO_P (regno
))
1653 lo
= gen_rtx_REG (SImode
, regno
);
1655 hi
= gen_rtx_ZERO_EXTRACT (SImode
,
1656 gen_rtx_REG (DImode
, regno
),
1661 hi
= gen_rtx_REG (SImode
, regno
+ TARGET_LITTLE_ENDIAN
);
1662 lo
= gen_rtx_REG (SImode
, regno
+ TARGET_BIG_ENDIAN
);
1670 hi
= operand_subword (op
, TARGET_LITTLE_ENDIAN
, 0, mode
);
1671 lo
= operand_subword (op
, TARGET_BIG_ENDIAN
, 0, mode
);
1678 /* The high part of CR <- GPR moves must be done after the low part. */
1679 operands
[i
+ 4] = lo
;
1680 operands
[i
+ 2] = hi
;
1683 if (reg_mentioned_p (operands
[2], operands
[5])
1684 || GET_CODE (operands
[2]) == ZERO_EXTRACT
1685 || GET_CODE (operands
[4]) == ZERO_EXTRACT
)
1689 /* Overlapping register pairs -- make sure we don't
1690 early-clobber ourselves. */
1692 operands
[2] = operands
[4];
1695 operands
[3] = operands
[5];
1699 #if DEBUG_SPLIT_WIDE_MOVE
1700 fprintf(stderr
, "\033[34m");
1701 debug_rtx (operands
[2]);
1702 debug_rtx (operands
[3]);
1703 debug_rtx (operands
[4]);
1704 debug_rtx (operands
[5]);
1705 fprintf(stderr
, "\033[0m");
1709 /* Emit a setcc instruction in its entirity. */
1712 mep_expand_setcc_1 (enum rtx_code code
, rtx dest
, rtx op1
, rtx op2
)
1720 tmp
= op1
, op1
= op2
, op2
= tmp
;
1721 code
= swap_condition (code
);
1726 op1
= force_reg (SImode
, op1
);
1727 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
1728 gen_rtx_fmt_ee (code
, SImode
, op1
, op2
)));
1732 if (op2
!= const0_rtx
)
1733 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1734 mep_expand_setcc_1 (LTU
, dest
, op1
, const1_rtx
);
1738 /* Branchful sequence:
1740 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1743 Branchless sequence:
1744 add3 tmp, op1, -op2 32-bit (or mov + sub)
1745 sltu3 tmp, tmp, 1 16-bit
1746 xor3 dest, tmp, 1 32-bit
1748 if (optimize_size
&& op2
!= const0_rtx
)
1751 if (op2
!= const0_rtx
)
1752 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1754 op2
= gen_reg_rtx (SImode
);
1755 mep_expand_setcc_1 (LTU
, op2
, op1
, const1_rtx
);
1757 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
1758 gen_rtx_XOR (SImode
, op2
, const1_rtx
)));
1762 if (GET_CODE (op2
) != CONST_INT
1763 || INTVAL (op2
) == 0x7ffffff)
1765 op2
= GEN_INT (INTVAL (op2
) + 1);
1766 return mep_expand_setcc_1 (LT
, dest
, op1
, op2
);
1769 if (GET_CODE (op2
) != CONST_INT
1770 || INTVAL (op2
) == -1)
1772 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) + 1, SImode
));
1773 return mep_expand_setcc_1 (LTU
, dest
, op1
, op2
);
1776 if (GET_CODE (op2
) != CONST_INT
1777 || INTVAL (op2
) == trunc_int_for_mode (0x80000000, SImode
))
1779 op2
= GEN_INT (INTVAL (op2
) - 1);
1780 return mep_expand_setcc_1 (GT
, dest
, op1
, op2
);
1783 if (GET_CODE (op2
) != CONST_INT
1784 || op2
== const0_rtx
)
1786 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) - 1, SImode
));
1787 return mep_expand_setcc_1 (GTU
, dest
, op1
, op2
);
1795 mep_expand_setcc (rtx
*operands
)
1797 rtx dest
= operands
[0];
1798 enum rtx_code code
= GET_CODE (operands
[1]);
1799 rtx op0
= operands
[2];
1800 rtx op1
= operands
[3];
1802 return mep_expand_setcc_1 (code
, dest
, op0
, op1
);
1806 mep_expand_cbranch (rtx
*operands
)
1808 enum rtx_code code
= GET_CODE (operands
[0]);
1809 rtx op0
= operands
[1];
1810 rtx op1
= operands
[2];
1817 if (mep_imm4_operand (op1
, SImode
))
1820 tmp
= gen_reg_rtx (SImode
);
1821 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1828 if (mep_imm4_operand (op1
, SImode
))
1831 tmp
= gen_reg_rtx (SImode
);
1832 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1841 if (! mep_reg_or_imm4_operand (op1
, SImode
))
1842 op1
= force_reg (SImode
, op1
);
1847 if (GET_CODE (op1
) == CONST_INT
1848 && INTVAL (op1
) != 0x7fffffff)
1850 op1
= GEN_INT (INTVAL (op1
) + 1);
1851 code
= (code
== LE
? LT
: GE
);
1855 tmp
= gen_reg_rtx (SImode
);
1856 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op1
, op0
));
1858 code
= (code
== LE
? EQ
: NE
);
1864 if (op1
== const1_rtx
)
1871 tmp
= gen_reg_rtx (SImode
);
1872 gcc_assert (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
));
1879 tmp
= gen_reg_rtx (SImode
);
1880 if (mep_expand_setcc_1 (LEU
, tmp
, op0
, op1
))
1882 else if (mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
))
1891 tmp
= gen_reg_rtx (SImode
);
1892 gcc_assert (mep_expand_setcc_1 (GTU
, tmp
, op0
, op1
)
1893 || mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
));
1900 tmp
= gen_reg_rtx (SImode
);
1901 if (mep_expand_setcc_1 (GEU
, tmp
, op0
, op1
))
1903 else if (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
))
1915 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
1919 mep_emit_cbranch (rtx
*operands
, int ne
)
1921 if (GET_CODE (operands
[1]) == REG
)
1922 return ne
? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1923 else if (INTVAL (operands
[1]) == 0 && !mep_vliw_function_p(cfun
->decl
))
1924 return ne
? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1926 return ne
? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1930 mep_expand_call (rtx
*operands
, int returns_value
)
1932 rtx addr
= operands
[returns_value
];
1933 rtx tp
= mep_tp_rtx ();
1934 rtx gp
= mep_gp_rtx ();
1936 gcc_assert (GET_CODE (addr
) == MEM
);
1938 addr
= XEXP (addr
, 0);
1940 if (! mep_call_address_operand (addr
, VOIDmode
))
1941 addr
= force_reg (SImode
, addr
);
1943 if (! operands
[returns_value
+2])
1944 operands
[returns_value
+2] = const0_rtx
;
1947 emit_call_insn (gen_call_value_internal (operands
[0], addr
, operands
[2],
1948 operands
[3], tp
, gp
));
1950 emit_call_insn (gen_call_internal (addr
, operands
[1],
1951 operands
[2], tp
, gp
));
1954 /* Aliasing Support. */
1956 /* If X is a machine specific address (i.e. a symbol or label being
1957 referenced as a displacement from the GOT implemented using an
1958 UNSPEC), then return the base term. Otherwise return X. */
1961 mep_find_base_term (rtx x
)
1966 if (GET_CODE (x
) != PLUS
)
1971 if (has_hard_reg_initial_val(Pmode
, TP_REGNO
)
1972 && base
== mep_tp_rtx ())
1974 else if (has_hard_reg_initial_val(Pmode
, GP_REGNO
)
1975 && base
== mep_gp_rtx ())
1980 if (GET_CODE (term
) != CONST
)
1982 term
= XEXP (term
, 0);
1984 if (GET_CODE (term
) != UNSPEC
1985 || XINT (term
, 1) != unspec
)
1988 return XVECEXP (term
, 0, 0);
1991 /* Reload Support. */
1993 /* Return true if the registers in CLASS cannot represent the change from
1994 modes FROM to TO. */
1997 mep_cannot_change_mode_class (enum machine_mode from
, enum machine_mode to
,
1998 enum reg_class regclass
)
2003 /* 64-bit COP regs must remain 64-bit COP regs. */
2004 if (TARGET_64BIT_CR_REGS
2005 && (regclass
== CR_REGS
2006 || regclass
== LOADABLE_CR_REGS
)
2007 && (GET_MODE_SIZE (to
) < 8
2008 || GET_MODE_SIZE (from
) < 8))
2014 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2017 mep_general_reg (rtx x
)
2019 while (GET_CODE (x
) == SUBREG
)
2021 return GET_CODE (x
) == REG
&& GR_REGNO_P (REGNO (x
));
2025 mep_nongeneral_reg (rtx x
)
2027 while (GET_CODE (x
) == SUBREG
)
2029 return (GET_CODE (x
) == REG
2030 && !GR_REGNO_P (REGNO (x
)) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2034 mep_general_copro_reg (rtx x
)
2036 while (GET_CODE (x
) == SUBREG
)
2038 return (GET_CODE (x
) == REG
&& CR_REGNO_P (REGNO (x
)));
2042 mep_nonregister (rtx x
)
2044 while (GET_CODE (x
) == SUBREG
)
2046 return (GET_CODE (x
) != REG
|| REGNO (x
) >= FIRST_PSEUDO_REGISTER
);
2049 #define DEBUG_RELOAD 0
2051 /* Return the secondary reload class needed for moving value X to or
2052 from a register in coprocessor register class CLASS. */
2054 static enum reg_class
2055 mep_secondary_copro_reload_class (enum reg_class rclass
, rtx x
)
2057 if (mep_general_reg (x
))
2058 /* We can do the move directly if mep_have_core_copro_moves_p,
2059 otherwise we need to go through memory. Either way, no secondary
2060 register is needed. */
2063 if (mep_general_copro_reg (x
))
2065 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2066 if (mep_have_copro_copro_moves_p
)
2069 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2070 if (mep_have_core_copro_moves_p
)
2071 return GENERAL_REGS
;
2073 /* Otherwise we need to do it through memory. No secondary
2074 register is needed. */
2078 if (reg_class_subset_p (rclass
, LOADABLE_CR_REGS
)
2079 && constraint_satisfied_p (x
, CONSTRAINT_U
))
2080 /* X is a memory value that we can access directly. */
2083 /* We have to move X into a GPR first and then copy it to
2084 the coprocessor register. The move from the GPR to the
2085 coprocessor might be done directly or through memory,
2086 depending on mep_have_core_copro_moves_p. */
2087 return GENERAL_REGS
;
2090 /* Copying X to register in RCLASS. */
2093 mep_secondary_input_reload_class (enum reg_class rclass
,
2094 enum machine_mode mode ATTRIBUTE_UNUSED
,
2100 fprintf (stderr
, "secondary input reload copy to %s %s from ", reg_class_names
[rclass
], mode_name
[mode
]);
2104 if (reg_class_subset_p (rclass
, CR_REGS
))
2105 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2106 else if (MEP_NONGENERAL_CLASS (rclass
)
2107 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2111 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2116 /* Copying register in RCLASS to X. */
2119 mep_secondary_output_reload_class (enum reg_class rclass
,
2120 enum machine_mode mode ATTRIBUTE_UNUSED
,
2126 fprintf (stderr
, "secondary output reload copy from %s %s to ", reg_class_names
[rclass
], mode_name
[mode
]);
2130 if (reg_class_subset_p (rclass
, CR_REGS
))
2131 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2132 else if (MEP_NONGENERAL_CLASS (rclass
)
2133 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2137 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2143 /* Implement SECONDARY_MEMORY_NEEDED. */
2146 mep_secondary_memory_needed (enum reg_class rclass1
, enum reg_class rclass2
,
2147 enum machine_mode mode ATTRIBUTE_UNUSED
)
2149 if (!mep_have_core_copro_moves_p
)
2151 if (reg_classes_intersect_p (rclass1
, CR_REGS
)
2152 && reg_classes_intersect_p (rclass2
, GENERAL_REGS
))
2154 if (reg_classes_intersect_p (rclass2
, CR_REGS
)
2155 && reg_classes_intersect_p (rclass1
, GENERAL_REGS
))
2157 if (!mep_have_copro_copro_moves_p
2158 && reg_classes_intersect_p (rclass1
, CR_REGS
)
2159 && reg_classes_intersect_p (rclass2
, CR_REGS
))
2166 mep_expand_reload (rtx
*operands
, enum machine_mode mode
)
2168 /* There are three cases for each direction:
2173 int s0
= mep_section_tag (operands
[0]) == 'f';
2174 int s1
= mep_section_tag (operands
[1]) == 'f';
2175 int c0
= mep_nongeneral_reg (operands
[0]);
2176 int c1
= mep_nongeneral_reg (operands
[1]);
2177 int which
= (s0
? 20:0) + (c0
? 10:0) + (s1
? 2:0) + (c1
? 1:0);
2180 fprintf (stderr
, "expand_reload %s\n", mode_name
[mode
]);
2181 debug_rtx (operands
[0]);
2182 debug_rtx (operands
[1]);
2187 case 00: /* Don't know why this gets here. */
2188 case 02: /* general = far */
2189 emit_move_insn (operands
[0], operands
[1]);
2192 case 10: /* cr = mem */
2193 case 11: /* cr = cr */
2194 case 01: /* mem = cr */
2195 case 12: /* cr = far */
2196 emit_move_insn (operands
[2], operands
[1]);
2197 emit_move_insn (operands
[0], operands
[2]);
2200 case 20: /* far = general */
2201 emit_move_insn (operands
[2], XEXP (operands
[1], 0));
2202 emit_move_insn (operands
[0], gen_rtx_MEM (mode
, operands
[2]));
2205 case 21: /* far = cr */
2206 case 22: /* far = far */
2208 fprintf (stderr
, "unsupported expand reload case %02d for mode %s\n",
2209 which
, mode_name
[mode
]);
2210 debug_rtx (operands
[0]);
2211 debug_rtx (operands
[1]);
2216 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2217 can be moved directly into registers 0 to 7, but not into the rest.
2218 If so, and if the required class includes registers 0 to 7, restrict
2219 it to those registers. */
2222 mep_preferred_reload_class (rtx x
, enum reg_class rclass
)
2224 switch (GET_CODE (x
))
2227 if (INTVAL (x
) >= 0x10000
2228 && INTVAL (x
) < 0x01000000
2229 && (INTVAL (x
) & 0xffff) != 0
2230 && reg_class_subset_p (TPREL_REGS
, rclass
))
2231 rclass
= TPREL_REGS
;
2237 if (mep_section_tag (x
) != 'f'
2238 && reg_class_subset_p (TPREL_REGS
, rclass
))
2239 rclass
= TPREL_REGS
;
2248 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2249 moves, 4 for direct double-register moves, and 1000 for anything
2250 that requires a temporary register or temporary stack slot. */
2253 mep_register_move_cost (enum machine_mode mode
, enum reg_class from
, enum reg_class to
)
2255 if (mep_have_copro_copro_moves_p
2256 && reg_class_subset_p (from
, CR_REGS
)
2257 && reg_class_subset_p (to
, CR_REGS
))
2259 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2263 if (reg_class_subset_p (from
, CR_REGS
)
2264 && reg_class_subset_p (to
, CR_REGS
))
2266 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2270 if (reg_class_subset_p (from
, CR_REGS
)
2271 || reg_class_subset_p (to
, CR_REGS
))
2273 if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2277 if (mep_secondary_memory_needed (from
, to
, mode
))
2279 if (MEP_NONGENERAL_CLASS (from
) && MEP_NONGENERAL_CLASS (to
))
2282 if (GET_MODE_SIZE (mode
) > 4)
2289 /* Functions to save and restore machine-specific function data. */
2291 static struct machine_function
*
2292 mep_init_machine_status (void)
2294 return ggc_alloc_cleared_machine_function ();
2298 mep_allocate_initial_value (rtx reg
)
2302 if (GET_CODE (reg
) != REG
)
2305 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2308 /* In interrupt functions, the "initial" values of $gp and $tp are
2309 provided by the prologue. They are not necessarily the same as
2310 the values that the caller was using. */
2311 if (REGNO (reg
) == TP_REGNO
|| REGNO (reg
) == GP_REGNO
)
2312 if (mep_interrupt_p ())
2315 if (! cfun
->machine
->reg_save_slot
[REGNO(reg
)])
2317 cfun
->machine
->reg_save_size
+= 4;
2318 cfun
->machine
->reg_save_slot
[REGNO(reg
)] = cfun
->machine
->reg_save_size
;
2321 rss
= cfun
->machine
->reg_save_slot
[REGNO(reg
)];
2322 return gen_rtx_MEM (SImode
, plus_constant (arg_pointer_rtx
, -rss
));
2326 mep_return_addr_rtx (int count
)
2331 return get_hard_reg_initial_val (Pmode
, LP_REGNO
);
2337 return get_hard_reg_initial_val (Pmode
, TP_REGNO
);
2343 return get_hard_reg_initial_val (Pmode
, GP_REGNO
);
2347 mep_interrupt_p (void)
2349 if (cfun
->machine
->interrupt_handler
== 0)
2351 int interrupt_handler
2352 = (lookup_attribute ("interrupt",
2353 DECL_ATTRIBUTES (current_function_decl
))
2355 cfun
->machine
->interrupt_handler
= interrupt_handler
? 2 : 1;
2357 return cfun
->machine
->interrupt_handler
== 2;
2361 mep_disinterrupt_p (void)
2363 if (cfun
->machine
->disable_interrupts
== 0)
2365 int disable_interrupts
2366 = (lookup_attribute ("disinterrupt",
2367 DECL_ATTRIBUTES (current_function_decl
))
2369 cfun
->machine
->disable_interrupts
= disable_interrupts
? 2 : 1;
2371 return cfun
->machine
->disable_interrupts
== 2;
2375 /* Frame/Epilog/Prolog Related. */
2378 mep_reg_set_p (rtx reg
, rtx insn
)
2380 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2383 if (FIND_REG_INC_NOTE (insn
, reg
))
2385 insn
= PATTERN (insn
);
2388 if (GET_CODE (insn
) == SET
2389 && GET_CODE (XEXP (insn
, 0)) == REG
2390 && GET_CODE (XEXP (insn
, 1)) == REG
2391 && REGNO (XEXP (insn
, 0)) == REGNO (XEXP (insn
, 1)))
2394 return set_of (reg
, insn
) != NULL_RTX
;
2398 #define MEP_SAVES_UNKNOWN 0
2399 #define MEP_SAVES_YES 1
2400 #define MEP_SAVES_MAYBE 2
2401 #define MEP_SAVES_NO 3
2404 mep_reg_set_in_function (int regno
)
2408 if (mep_interrupt_p () && df_regs_ever_live_p(regno
))
2411 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2414 push_topmost_sequence ();
2415 insn
= get_insns ();
2416 pop_topmost_sequence ();
2421 reg
= gen_rtx_REG (SImode
, regno
);
2423 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
2424 if (INSN_P (insn
) && mep_reg_set_p (reg
, insn
))
2430 mep_asm_without_operands_p (void)
2432 if (cfun
->machine
->asms_without_operands
== 0)
2436 push_topmost_sequence ();
2437 insn
= get_insns ();
2438 pop_topmost_sequence ();
2440 cfun
->machine
->asms_without_operands
= 1;
2444 && GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
2446 cfun
->machine
->asms_without_operands
= 2;
2449 insn
= NEXT_INSN (insn
);
2453 return cfun
->machine
->asms_without_operands
== 2;
2456 /* Interrupt functions save/restore every call-preserved register, and
2457 any call-used register it uses (or all if it calls any function,
2458 since they may get clobbered there too). Here we check to see
2459 which call-used registers need saving. */
2461 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2462 && (r == FIRST_CCR_REGNO + 1 \
2463 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2464 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2467 mep_interrupt_saved_reg (int r
)
2469 if (!mep_interrupt_p ())
2471 if (r
== REGSAVE_CONTROL_TEMP
2472 || (TARGET_64BIT_CR_REGS
&& TARGET_COP
&& r
== REGSAVE_CONTROL_TEMP
+1))
2474 if (mep_asm_without_operands_p ()
2476 || (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
|| r
== LP_REGNO
)
2477 || IVC2_ISAVED_REG (r
)))
2479 if (!current_function_is_leaf
)
2480 /* Function calls mean we need to save $lp. */
2481 if (r
== LP_REGNO
|| IVC2_ISAVED_REG (r
))
2483 if (!current_function_is_leaf
|| cfun
->machine
->doloop_tags
> 0)
2484 /* The interrupt handler might use these registers for repeat blocks,
2485 or it might call a function that does so. */
2486 if (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
)
2488 if (current_function_is_leaf
&& call_used_regs
[r
] && !df_regs_ever_live_p(r
))
2490 /* Functions we call might clobber these. */
2491 if (call_used_regs
[r
] && !fixed_regs
[r
])
2493 /* Additional registers that need to be saved for IVC2. */
2494 if (IVC2_ISAVED_REG (r
))
2501 mep_call_saves_register (int r
)
2503 if (! cfun
->machine
->frame_locked
)
2505 int rv
= MEP_SAVES_NO
;
2507 if (cfun
->machine
->reg_save_slot
[r
])
2509 else if (r
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2511 else if (r
== FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
2513 else if ((!call_used_regs
[r
] || r
== LP_REGNO
) && df_regs_ever_live_p(r
))
2515 else if (crtl
->calls_eh_return
&& (r
== 10 || r
== 11))
2516 /* We need these to have stack slots so that they can be set during
2519 else if (mep_interrupt_saved_reg (r
))
2521 cfun
->machine
->reg_saved
[r
] = rv
;
2523 return cfun
->machine
->reg_saved
[r
] == MEP_SAVES_YES
;
2526 /* Return true if epilogue uses register REGNO. */
2529 mep_epilogue_uses (int regno
)
2531 /* Since $lp is a call-saved register, the generic code will normally
2532 mark it used in the epilogue if it needs to be saved and restored.
2533 However, when profiling is enabled, the profiling code will implicitly
2534 clobber $11. This case has to be handled specially both here and in
2535 mep_call_saves_register. */
2536 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2538 /* Interrupt functions save/restore pretty much everything. */
2539 return (reload_completed
&& mep_interrupt_saved_reg (regno
));
2543 mep_reg_size (int regno
)
2545 if (CR_REGNO_P (regno
) && TARGET_64BIT_CR_REGS
)
2550 /* Worker function for TARGET_CAN_ELIMINATE. */
2553 mep_can_eliminate (const int from
, const int to
)
2555 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
2556 ? ! frame_pointer_needed
2561 mep_elimination_offset (int from
, int to
)
2565 int frame_size
= get_frame_size () + crtl
->outgoing_args_size
;
2568 if (!cfun
->machine
->frame_locked
)
2569 memset (cfun
->machine
->reg_saved
, 0, sizeof (cfun
->machine
->reg_saved
));
2571 /* We don't count arg_regs_to_save in the arg pointer offset, because
2572 gcc thinks the arg pointer has moved along with the saved regs.
2573 However, we do count it when we adjust $sp in the prologue. */
2575 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2576 if (mep_call_saves_register (i
))
2577 reg_save_size
+= mep_reg_size (i
);
2579 if (reg_save_size
% 8)
2580 cfun
->machine
->regsave_filler
= 8 - (reg_save_size
% 8);
2582 cfun
->machine
->regsave_filler
= 0;
2584 /* This is what our total stack adjustment looks like. */
2585 total_size
= (reg_save_size
+ frame_size
+ cfun
->machine
->regsave_filler
);
2588 cfun
->machine
->frame_filler
= 8 - (total_size
% 8);
2590 cfun
->machine
->frame_filler
= 0;
2593 if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
2594 return reg_save_size
+ cfun
->machine
->regsave_filler
;
2596 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2597 return cfun
->machine
->frame_filler
+ frame_size
;
2599 if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2600 return reg_save_size
+ cfun
->machine
->regsave_filler
+ cfun
->machine
->frame_filler
+ frame_size
;
2608 RTX_FRAME_RELATED_P (x
) = 1;
2612 /* Since the prologue/epilogue code is generated after optimization,
2613 we can't rely on gcc to split constants for us. So, this code
2614 captures all the ways to add a constant to a register in one logic
2615 chunk, including optimizing away insns we just don't need. This
2616 makes the prolog/epilog code easier to follow. */
2618 add_constant (int dest
, int src
, int value
, int mark_frame
)
2623 if (src
== dest
&& value
== 0)
2628 insn
= emit_move_insn (gen_rtx_REG (SImode
, dest
),
2629 gen_rtx_REG (SImode
, src
));
2631 RTX_FRAME_RELATED_P(insn
) = 1;
2635 if (value
>= -32768 && value
<= 32767)
2637 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2638 gen_rtx_REG (SImode
, src
),
2641 RTX_FRAME_RELATED_P(insn
) = 1;
2645 /* Big constant, need to use a temp register. We use
2646 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2647 area is always small enough to directly add to). */
2649 hi
= trunc_int_for_mode (value
& 0xffff0000, SImode
);
2650 lo
= value
& 0xffff;
2652 insn
= emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2657 insn
= emit_insn (gen_iorsi3 (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2658 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2662 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2663 gen_rtx_REG (SImode
, src
),
2664 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
)));
2667 RTX_FRAME_RELATED_P(insn
) = 1;
2668 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2669 gen_rtx_SET (SImode
,
2670 gen_rtx_REG (SImode
, dest
),
2671 gen_rtx_PLUS (SImode
,
2672 gen_rtx_REG (SImode
, dest
),
2677 /* Move SRC to DEST. Mark the move as being potentially dead if
2681 maybe_dead_move (rtx dest
, rtx src
, bool ATTRIBUTE_UNUSED maybe_dead_p
)
2683 rtx insn
= emit_move_insn (dest
, src
);
2686 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
, NULL
);
2691 /* Used for interrupt functions, which can't assume that $tp and $gp
2692 contain the correct pointers. */
2695 mep_reload_pointer (int regno
, const char *symbol
)
2699 if (!df_regs_ever_live_p(regno
) && current_function_is_leaf
)
2702 reg
= gen_rtx_REG (SImode
, regno
);
2703 sym
= gen_rtx_SYMBOL_REF (SImode
, symbol
);
2704 emit_insn (gen_movsi_topsym_s (reg
, sym
));
2705 emit_insn (gen_movsi_botsym_s (reg
, reg
, sym
));
2708 /* Assign save slots for any register not already saved. DImode
2709 registers go at the end of the reg save area; the rest go at the
2710 beginning. This is for alignment purposes. Returns true if a frame
2711 is really needed. */
2713 mep_assign_save_slots (int reg_save_size
)
2715 bool really_need_stack_frame
= false;
2719 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2720 if (mep_call_saves_register(i
))
2722 int regsize
= mep_reg_size (i
);
2724 if ((i
!= TP_REGNO
&& i
!= GP_REGNO
&& i
!= LP_REGNO
)
2725 || mep_reg_set_in_function (i
))
2726 really_need_stack_frame
= true;
2728 if (cfun
->machine
->reg_save_slot
[i
])
2733 cfun
->machine
->reg_save_size
+= regsize
;
2734 cfun
->machine
->reg_save_slot
[i
] = cfun
->machine
->reg_save_size
;
2738 cfun
->machine
->reg_save_slot
[i
] = reg_save_size
- di_ofs
;
2742 cfun
->machine
->frame_locked
= 1;
2743 return really_need_stack_frame
;
2747 mep_expand_prologue (void)
2749 int i
, rss
, sp_offset
= 0;
2752 int really_need_stack_frame
;
2754 /* We must not allow register renaming in interrupt functions,
2755 because that invalidates the correctness of the set of call-used
2756 registers we're going to save/restore. */
2757 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2759 if (mep_disinterrupt_p ())
2760 emit_insn (gen_mep_disable_int ());
2762 cfun
->machine
->mep_frame_pointer_needed
= frame_pointer_needed
;
2764 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2765 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2766 really_need_stack_frame
= frame_size
;
2768 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2770 sp_offset
= reg_save_size
;
2771 if (sp_offset
+ frame_size
< 128)
2772 sp_offset
+= frame_size
;
2774 add_constant (SP_REGNO
, SP_REGNO
, -sp_offset
, 1);
2776 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2777 if (mep_call_saves_register(i
))
2781 enum machine_mode rmode
;
2783 rss
= cfun
->machine
->reg_save_slot
[i
];
2785 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
2786 && (!mep_reg_set_in_function (i
)
2787 && !mep_interrupt_p ()))
2790 if (mep_reg_size (i
) == 8)
2795 /* If there is a pseudo associated with this register's initial value,
2796 reload might have already spilt it to the stack slot suggested by
2797 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2799 mem
= gen_rtx_MEM (rmode
,
2800 plus_constant (stack_pointer_rtx
, sp_offset
- rss
));
2801 maybe_dead_p
= rtx_equal_p (mem
, has_hard_reg_initial_val (rmode
, i
));
2803 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
2804 F(maybe_dead_move (mem
, gen_rtx_REG (rmode
, i
), maybe_dead_p
));
2805 else if (rmode
== DImode
)
2808 int be
= TARGET_BIG_ENDIAN
? 4 : 0;
2810 mem
= gen_rtx_MEM (SImode
,
2811 plus_constant (stack_pointer_rtx
, sp_offset
- rss
+ be
));
2813 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2814 gen_rtx_REG (SImode
, i
),
2816 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2817 gen_rtx_ZERO_EXTRACT (SImode
,
2818 gen_rtx_REG (DImode
, i
),
2822 insn
= maybe_dead_move (mem
,
2823 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2825 RTX_FRAME_RELATED_P (insn
) = 1;
2827 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2828 gen_rtx_SET (VOIDmode
,
2830 gen_rtx_REG (rmode
, i
)));
2831 mem
= gen_rtx_MEM (SImode
,
2832 plus_constant (stack_pointer_rtx
, sp_offset
- rss
+ (4-be
)));
2833 insn
= maybe_dead_move (mem
,
2834 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2840 maybe_dead_move (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2841 gen_rtx_REG (rmode
, i
),
2843 insn
= maybe_dead_move (mem
,
2844 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2846 RTX_FRAME_RELATED_P (insn
) = 1;
2848 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2849 gen_rtx_SET (VOIDmode
,
2851 gen_rtx_REG (rmode
, i
)));
2855 if (frame_pointer_needed
)
2857 /* We've already adjusted down by sp_offset. Total $sp change
2858 is reg_save_size + frame_size. We want a net change here of
2859 just reg_save_size. */
2860 add_constant (FP_REGNO
, SP_REGNO
, sp_offset
- reg_save_size
, 1);
2863 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
-(reg_save_size
+frame_size
), 1);
2865 if (mep_interrupt_p ())
2867 mep_reload_pointer(GP_REGNO
, "__sdabase");
2868 mep_reload_pointer(TP_REGNO
, "__tpbase");
2873 mep_start_function (FILE *file
, HOST_WIDE_INT hwi_local
)
2875 int local
= hwi_local
;
2876 int frame_size
= local
+ crtl
->outgoing_args_size
;
2881 int slot_map
[FIRST_PSEUDO_REGISTER
], si
, sj
;
2883 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2884 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2885 sp_offset
= reg_save_size
+ frame_size
;
2887 ffill
= cfun
->machine
->frame_filler
;
2889 if (cfun
->machine
->mep_frame_pointer_needed
)
2890 reg_names
[FP_REGNO
] = "$fp";
2892 reg_names
[FP_REGNO
] = "$8";
2897 if (debug_info_level
== DINFO_LEVEL_NONE
)
2899 fprintf (file
, "\t# frame: %d", sp_offset
);
2901 fprintf (file
, " %d regs", reg_save_size
);
2903 fprintf (file
, " %d locals", local
);
2904 if (crtl
->outgoing_args_size
)
2905 fprintf (file
, " %d args", crtl
->outgoing_args_size
);
2906 fprintf (file
, "\n");
2910 fprintf (file
, "\t#\n");
2911 fprintf (file
, "\t# Initial Frame Information:\n");
2912 if (sp_offset
|| !frame_pointer_needed
)
2913 fprintf (file
, "\t# Entry ---------- 0\n");
2915 /* Sort registers by save slots, so they're printed in the order
2916 they appear in memory, not the order they're saved in. */
2917 for (si
=0; si
<FIRST_PSEUDO_REGISTER
; si
++)
2919 for (si
=0; si
<FIRST_PSEUDO_REGISTER
-1; si
++)
2920 for (sj
=si
+1; sj
<FIRST_PSEUDO_REGISTER
; sj
++)
2921 if (cfun
->machine
->reg_save_slot
[slot_map
[si
]]
2922 > cfun
->machine
->reg_save_slot
[slot_map
[sj
]])
2924 int t
= slot_map
[si
];
2925 slot_map
[si
] = slot_map
[sj
];
2930 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2933 int r
= slot_map
[i
];
2934 int rss
= cfun
->machine
->reg_save_slot
[r
];
2936 if (!mep_call_saves_register (r
))
2939 if ((r
== TP_REGNO
|| r
== GP_REGNO
|| r
== LP_REGNO
)
2940 && (!mep_reg_set_in_function (r
)
2941 && !mep_interrupt_p ()))
2944 rsize
= mep_reg_size(r
);
2945 skip
= rss
- (sp
+rsize
);
2947 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2948 fprintf (file
, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2949 rsize
, reg_names
[r
], sp_offset
- rss
);
2953 skip
= reg_save_size
- sp
;
2955 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2957 if (frame_pointer_needed
)
2958 fprintf (file
, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size
, sp_offset
-reg_save_size
);
2960 fprintf (file
, "\t# %3d bytes for local vars\n", local
);
2962 fprintf (file
, "\t# %3d bytes for alignment\n", ffill
);
2963 if (crtl
->outgoing_args_size
)
2964 fprintf (file
, "\t# %3d bytes for outgoing args\n",
2965 crtl
->outgoing_args_size
);
2966 fprintf (file
, "\t# SP ---> ---------- %d\n", sp_offset
);
2967 fprintf (file
, "\t#\n");
2971 static int mep_prevent_lp_restore
= 0;
2972 static int mep_sibcall_epilogue
= 0;
2975 mep_expand_epilogue (void)
2977 int i
, sp_offset
= 0;
2978 int reg_save_size
= 0;
2980 int lp_temp
= LP_REGNO
, lp_slot
= -1;
2981 int really_need_stack_frame
= get_frame_size() + crtl
->outgoing_args_size
;
2982 int interrupt_handler
= mep_interrupt_p ();
2984 if (profile_arc_flag
== 2)
2985 emit_insn (gen_mep_bb_trace_ret ());
2987 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2988 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2990 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2992 if (frame_pointer_needed
)
2994 /* If we have a frame pointer, we won't have a reliable stack
2995 pointer (alloca, you know), so rebase SP from FP */
2996 emit_move_insn (gen_rtx_REG (SImode
, SP_REGNO
),
2997 gen_rtx_REG (SImode
, FP_REGNO
));
2998 sp_offset
= reg_save_size
;
3002 /* SP is right under our local variable space. Adjust it if
3004 sp_offset
= reg_save_size
+ frame_size
;
3005 if (sp_offset
>= 128)
3007 add_constant (SP_REGNO
, SP_REGNO
, frame_size
, 0);
3008 sp_offset
-= frame_size
;
3012 /* This is backwards so that we restore the control and coprocessor
3013 registers before the temporary registers we use to restore
3015 for (i
=FIRST_PSEUDO_REGISTER
-1; i
>=1; i
--)
3016 if (mep_call_saves_register (i
))
3018 enum machine_mode rmode
;
3019 int rss
= cfun
->machine
->reg_save_slot
[i
];
3021 if (mep_reg_size (i
) == 8)
3026 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
3027 && !(mep_reg_set_in_function (i
) || interrupt_handler
))
3029 if (mep_prevent_lp_restore
&& i
== LP_REGNO
)
3031 if (!mep_prevent_lp_restore
3032 && !interrupt_handler
3033 && (i
== 10 || i
== 11))
3036 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
3037 emit_move_insn (gen_rtx_REG (rmode
, i
),
3039 plus_constant (stack_pointer_rtx
,
3043 if (i
== LP_REGNO
&& !mep_sibcall_epilogue
&& !interrupt_handler
)
3044 /* Defer this one so we can jump indirect rather than
3045 copying the RA to $lp and "ret". EH epilogues
3046 automatically skip this anyway. */
3047 lp_slot
= sp_offset
-rss
;
3050 emit_move_insn (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
3052 plus_constant (stack_pointer_rtx
,
3054 emit_move_insn (gen_rtx_REG (rmode
, i
),
3055 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
));
3061 /* Restore this one last so we know it will be in the temp
3062 register when we return by jumping indirectly via the temp. */
3063 emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
3064 gen_rtx_MEM (SImode
,
3065 plus_constant (stack_pointer_rtx
,
3067 lp_temp
= REGSAVE_CONTROL_TEMP
;
3071 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
, 0);
3073 if (crtl
->calls_eh_return
&& mep_prevent_lp_restore
)
3074 emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, SP_REGNO
),
3075 gen_rtx_REG (SImode
, SP_REGNO
),
3076 cfun
->machine
->eh_stack_adjust
));
3078 if (mep_sibcall_epilogue
)
3081 if (mep_disinterrupt_p ())
3082 emit_insn (gen_mep_enable_int ());
3084 if (mep_prevent_lp_restore
)
3086 emit_jump_insn (gen_eh_return_internal ());
3089 else if (interrupt_handler
)
3090 emit_jump_insn (gen_mep_reti ());
3092 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode
, lp_temp
)));
3096 mep_expand_eh_return (rtx
*operands
)
3098 if (GET_CODE (operands
[0]) != REG
|| REGNO (operands
[0]) != LP_REGNO
)
3100 rtx ra
= gen_rtx_REG (Pmode
, LP_REGNO
);
3101 emit_move_insn (ra
, operands
[0]);
3105 emit_insn (gen_eh_epilogue (operands
[0]));
3109 mep_emit_eh_epilogue (rtx
*operands ATTRIBUTE_UNUSED
)
3111 cfun
->machine
->eh_stack_adjust
= gen_rtx_REG (Pmode
, 0);
3112 mep_prevent_lp_restore
= 1;
3113 mep_expand_epilogue ();
3114 mep_prevent_lp_restore
= 0;
3118 mep_expand_sibcall_epilogue (void)
3120 mep_sibcall_epilogue
= 1;
3121 mep_expand_epilogue ();
3122 mep_sibcall_epilogue
= 0;
3126 mep_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
3131 if (mep_section_tag (DECL_RTL (decl
)) == 'f')
3134 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3135 if (mep_interrupt_p () || mep_disinterrupt_p ())
3142 mep_return_stackadj_rtx (void)
3144 return gen_rtx_REG (SImode
, 10);
3148 mep_return_handler_rtx (void)
3150 return gen_rtx_REG (SImode
, LP_REGNO
);
3154 mep_function_profiler (FILE *file
)
3156 /* Always right at the beginning of the function. */
3157 fprintf (file
, "\t# mep function profiler\n");
3158 fprintf (file
, "\tadd\t$sp, -8\n");
3159 fprintf (file
, "\tsw\t$0, ($sp)\n");
3160 fprintf (file
, "\tldc\t$0, $lp\n");
3161 fprintf (file
, "\tsw\t$0, 4($sp)\n");
3162 fprintf (file
, "\tbsr\t__mep_mcount\n");
3163 fprintf (file
, "\tlw\t$0, 4($sp)\n");
3164 fprintf (file
, "\tstc\t$0, $lp\n");
3165 fprintf (file
, "\tlw\t$0, ($sp)\n");
3166 fprintf (file
, "\tadd\t$sp, 8\n\n");
3170 mep_emit_bb_trace_ret (void)
3172 fprintf (asm_out_file
, "\t# end of block profiling\n");
3173 fprintf (asm_out_file
, "\tadd\t$sp, -8\n");
3174 fprintf (asm_out_file
, "\tsw\t$0, ($sp)\n");
3175 fprintf (asm_out_file
, "\tldc\t$0, $lp\n");
3176 fprintf (asm_out_file
, "\tsw\t$0, 4($sp)\n");
3177 fprintf (asm_out_file
, "\tbsr\t__bb_trace_ret\n");
3178 fprintf (asm_out_file
, "\tlw\t$0, 4($sp)\n");
3179 fprintf (asm_out_file
, "\tstc\t$0, $lp\n");
3180 fprintf (asm_out_file
, "\tlw\t$0, ($sp)\n");
3181 fprintf (asm_out_file
, "\tadd\t$sp, 8\n\n");
3188 /* Operand Printing. */
3191 mep_print_operand_address (FILE *stream
, rtx address
)
3193 if (GET_CODE (address
) == MEM
)
3194 address
= XEXP (address
, 0);
3196 /* cf: gcc.dg/asm-4.c. */
3197 gcc_assert (GET_CODE (address
) == REG
);
3199 mep_print_operand (stream
, address
, 0);
3205 const char *pattern
;
3208 const conversions
[] =
3211 { 0, "m+ri", "3(2)" },
3215 { 0, "mLrs", "%lo(3)(2)" },
3216 { 0, "mLr+si", "%lo(4+5)(2)" },
3217 { 0, "m+ru2s", "%tpoff(5)(2)" },
3218 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3219 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3220 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3221 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3222 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3224 { 0, "m+si", "(2+3)" },
3225 { 0, "m+li", "(2+3)" },
3228 { 0, "+si", "1+2" },
3229 { 0, "+u2si", "%tpoff(3+4)" },
3230 { 0, "+u3si", "%sdaoff(3+4)" },
3236 { 'h', "Hs", "%hi(1)" },
3238 { 'I', "u2s", "%tpoff(2)" },
3239 { 'I', "u3s", "%sdaoff(2)" },
3240 { 'I', "+u2si", "%tpoff(3+4)" },
3241 { 'I', "+u3si", "%sdaoff(3+4)" },
3243 { 'P', "mr", "(1\\+),\\0" },
3249 unique_bit_in (HOST_WIDE_INT i
)
3253 case 0x01: case 0xfe: return 0;
3254 case 0x02: case 0xfd: return 1;
3255 case 0x04: case 0xfb: return 2;
3256 case 0x08: case 0xf7: return 3;
3257 case 0x10: case 0x7f: return 4;
3258 case 0x20: case 0xbf: return 5;
3259 case 0x40: case 0xdf: return 6;
3260 case 0x80: case 0xef: return 7;
3267 bit_size_for_clip (HOST_WIDE_INT i
)
3271 for (rv
= 0; rv
< 31; rv
++)
3272 if (((HOST_WIDE_INT
) 1 << rv
) > i
)
3277 /* Print an operand to a assembler instruction. */
3280 mep_print_operand (FILE *file
, rtx x
, int code
)
3283 const char *real_name
;
3287 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3288 we're using, then skip over the "mep_" part of its name. */
3289 const struct cgen_insn
*insn
;
3291 if (mep_get_move_insn (mep_cmov
, &insn
))
3292 fputs (cgen_intrinsics
[insn
->intrinsic
] + 4, file
);
3294 mep_intrinsic_unavailable (mep_cmov
);
3299 switch (GET_CODE (x
))
3302 fputs ("clr", file
);
3305 fputs ("set", file
);
3308 fputs ("not", file
);
3311 output_operand_lossage ("invalid %%L code");
3316 /* Print the second operand of a CR <- CR move. If we're using
3317 a two-operand instruction (i.e., a real cmov), then just print
3318 the operand normally. If we're using a "reg, reg, immediate"
3319 instruction such as caddi3, print the operand followed by a
3320 zero field. If we're using a three-register instruction,
3321 print the operand twice. */
3322 const struct cgen_insn
*insn
;
3324 mep_print_operand (file
, x
, 0);
3325 if (mep_get_move_insn (mep_cmov
, &insn
)
3326 && insn_data
[insn
->icode
].n_operands
== 3)
3329 if (insn_data
[insn
->icode
].operand
[2].predicate (x
, VOIDmode
))
3330 mep_print_operand (file
, x
, 0);
3332 mep_print_operand (file
, const0_rtx
, 0);
3338 for (i
= 0; conversions
[i
].pattern
; i
++)
3339 if (conversions
[i
].code
== code
3340 && strcmp(conversions
[i
].pattern
, pattern
) == 0)
3342 for (j
= 0; conversions
[i
].format
[j
]; j
++)
3343 if (conversions
[i
].format
[j
] == '\\')
3345 fputc (conversions
[i
].format
[j
+1], file
);
3348 else if (ISDIGIT(conversions
[i
].format
[j
]))
3350 rtx r
= patternr
[conversions
[i
].format
[j
] - '0'];
3351 switch (GET_CODE (r
))
3354 fprintf (file
, "%s", reg_names
[REGNO (r
)]);
3360 fprintf (file
, "%d", unique_bit_in (INTVAL (r
)));
3363 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)));
3366 fprintf (file
, "0x%x", ((int) INTVAL (r
) >> 16) & 0xffff);
3369 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)) - 1);
3372 fprintf (file
, "0x%x", (int) INTVAL (r
) & 0xffff);
3375 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3376 && !(INTVAL (r
) & 0xff))
3377 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL(r
));
3379 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3382 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3383 && conversions
[i
].format
[j
+1] == 0)
3385 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (r
));
3386 fprintf (file
, " # 0x%x", (int) INTVAL(r
) & 0xffff);
3389 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3392 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3397 fprintf(file
, "[const_double 0x%lx]",
3398 (unsigned long) CONST_DOUBLE_HIGH(r
));
3401 real_name
= TARGET_STRIP_NAME_ENCODING (XSTR (r
, 0));
3402 assemble_name (file
, real_name
);
3405 output_asm_label (r
);
3408 fprintf (stderr
, "don't know how to print this operand:");
3415 if (conversions
[i
].format
[j
] == '+'
3416 && (!code
|| code
== 'I')
3417 && ISDIGIT (conversions
[i
].format
[j
+1])
3418 && GET_CODE (patternr
[conversions
[i
].format
[j
+1] - '0']) == CONST_INT
3419 && INTVAL (patternr
[conversions
[i
].format
[j
+1] - '0']) < 0)
3421 fputc(conversions
[i
].format
[j
], file
);
3425 if (!conversions
[i
].pattern
)
3427 error ("unconvertible operand %c %qs", code
?code
:'-', pattern
);
3435 mep_final_prescan_insn (rtx insn
, rtx
*operands ATTRIBUTE_UNUSED
,
3436 int noperands ATTRIBUTE_UNUSED
)
3438 /* Despite the fact that MeP is perfectly capable of branching and
3439 doing something else in the same bundle, gcc does jump
3440 optimization *after* scheduling, so we cannot trust the bundling
3441 flags on jump instructions. */
3442 if (GET_MODE (insn
) == BImode
3443 && get_attr_slots (insn
) != SLOTS_CORE
)
3444 fputc ('+', asm_out_file
);
3447 /* Function args in registers. */
3450 mep_setup_incoming_varargs (CUMULATIVE_ARGS
*cum
,
3451 enum machine_mode mode ATTRIBUTE_UNUSED
,
3452 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
3453 int second_time ATTRIBUTE_UNUSED
)
3455 int nsave
= 4 - (cum
->nregs
+ 1);
3458 cfun
->machine
->arg_regs_to_save
= nsave
;
3459 *pretend_size
= nsave
* 4;
3463 bytesize (const_tree type
, enum machine_mode mode
)
3465 if (mode
== BLKmode
)
3466 return int_size_in_bytes (type
);
3467 return GET_MODE_SIZE (mode
);
3471 mep_expand_builtin_saveregs (void)
3476 ns
= cfun
->machine
->arg_regs_to_save
;
3479 bufsize
= 8 * ((ns
+ 1) / 2) + 8 * ns
;
3480 regbuf
= assign_stack_local (SImode
, bufsize
, 64);
3485 regbuf
= assign_stack_local (SImode
, bufsize
, 32);
3488 move_block_from_reg (5-ns
, regbuf
, ns
);
3492 rtx tmp
= gen_rtx_MEM (DImode
, XEXP (regbuf
, 0));
3493 int ofs
= 8 * ((ns
+1)/2);
3495 for (i
=0; i
<ns
; i
++)
3497 int rn
= (4-ns
) + i
+ 49;
3500 ptr
= offset_address (tmp
, GEN_INT (ofs
), 2);
3501 emit_move_insn (ptr
, gen_rtx_REG (DImode
, rn
));
3505 return XEXP (regbuf
, 0);
3508 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3511 mep_build_builtin_va_list (void)
3513 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3517 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3519 f_next_gp
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3520 get_identifier ("__va_next_gp"), ptr_type_node
);
3521 f_next_gp_limit
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3522 get_identifier ("__va_next_gp_limit"),
3524 f_next_cop
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_cop"),
3526 f_next_stack
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_stack"),
3529 DECL_FIELD_CONTEXT (f_next_gp
) = record
;
3530 DECL_FIELD_CONTEXT (f_next_gp_limit
) = record
;
3531 DECL_FIELD_CONTEXT (f_next_cop
) = record
;
3532 DECL_FIELD_CONTEXT (f_next_stack
) = record
;
3534 TYPE_FIELDS (record
) = f_next_gp
;
3535 DECL_CHAIN (f_next_gp
) = f_next_gp_limit
;
3536 DECL_CHAIN (f_next_gp_limit
) = f_next_cop
;
3537 DECL_CHAIN (f_next_cop
) = f_next_stack
;
3539 layout_type (record
);
3545 mep_expand_va_start (tree valist
, rtx nextarg
)
3547 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3548 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3552 ns
= cfun
->machine
->arg_regs_to_save
;
3554 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3555 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3556 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3557 f_next_stack
= DECL_CHAIN (f_next_cop
);
3559 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3561 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3562 valist
, f_next_gp_limit
, NULL_TREE
);
3563 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3565 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3566 valist
, f_next_stack
, NULL_TREE
);
3568 /* va_list.next_gp = expand_builtin_saveregs (); */
3569 u
= make_tree (sizetype
, expand_builtin_saveregs ());
3570 u
= fold_convert (ptr_type_node
, u
);
3571 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp
, u
);
3572 TREE_SIDE_EFFECTS (t
) = 1;
3573 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3575 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3576 u
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
, u
,
3578 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp_limit
, u
);
3579 TREE_SIDE_EFFECTS (t
) = 1;
3580 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3582 u
= fold_build2 (POINTER_PLUS_EXPR
, ptr_type_node
, u
,
3583 size_int (8 * ((ns
+1)/2)));
3584 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3585 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_cop
, u
);
3586 TREE_SIDE_EFFECTS (t
) = 1;
3587 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3589 /* va_list.next_stack = nextarg; */
3590 u
= make_tree (ptr_type_node
, nextarg
);
3591 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_stack
, u
);
3592 TREE_SIDE_EFFECTS (t
) = 1;
3593 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3597 mep_gimplify_va_arg_expr (tree valist
, tree type
,
3599 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
3601 HOST_WIDE_INT size
, rsize
;
3602 bool by_reference
, ivc2_vec
;
3603 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3604 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3605 tree label_sover
, label_selse
;
3608 ivc2_vec
= TARGET_IVC2
&& VECTOR_TYPE_P (type
);
3610 size
= int_size_in_bytes (type
);
3611 by_reference
= (size
> (ivc2_vec
? 8 : 4)) || (size
<= 0);
3615 type
= build_pointer_type (type
);
3618 rsize
= (size
+ UNITS_PER_WORD
- 1) & -UNITS_PER_WORD
;
3620 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3621 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3622 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3623 f_next_stack
= DECL_CHAIN (f_next_cop
);
3625 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3627 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3628 valist
, f_next_gp_limit
, NULL_TREE
);
3629 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3631 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3632 valist
, f_next_stack
, NULL_TREE
);
3634 /* if f_next_gp < f_next_gp_limit
3635 IF (VECTOR_P && IVC2)
3643 val = *f_next_stack;
3644 f_next_stack += rsize;
3648 label_sover
= create_artificial_label (UNKNOWN_LOCATION
);
3649 label_selse
= create_artificial_label (UNKNOWN_LOCATION
);
3650 res_addr
= create_tmp_var (ptr_type_node
, NULL
);
3652 tmp
= build2 (GE_EXPR
, boolean_type_node
, next_gp
,
3653 unshare_expr (next_gp_limit
));
3654 tmp
= build3 (COND_EXPR
, void_type_node
, tmp
,
3655 build1 (GOTO_EXPR
, void_type_node
,
3656 unshare_expr (label_selse
)),
3658 gimplify_and_add (tmp
, pre_p
);
3662 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_cop
);
3663 gimplify_and_add (tmp
, pre_p
);
3667 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_gp
);
3668 gimplify_and_add (tmp
, pre_p
);
3671 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
3672 unshare_expr (next_gp
), size_int (4));
3673 gimplify_assign (unshare_expr (next_gp
), tmp
, pre_p
);
3675 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
3676 unshare_expr (next_cop
), size_int (8));
3677 gimplify_assign (unshare_expr (next_cop
), tmp
, pre_p
);
3679 tmp
= build1 (GOTO_EXPR
, void_type_node
, unshare_expr (label_sover
));
3680 gimplify_and_add (tmp
, pre_p
);
3684 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_selse
));
3685 gimplify_and_add (tmp
, pre_p
);
3687 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, unshare_expr (next_stack
));
3688 gimplify_and_add (tmp
, pre_p
);
3690 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
3691 unshare_expr (next_stack
), size_int (rsize
));
3692 gimplify_assign (unshare_expr (next_stack
), tmp
, pre_p
);
3696 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_sover
));
3697 gimplify_and_add (tmp
, pre_p
);
3699 res_addr
= fold_convert (build_pointer_type (type
), res_addr
);
3702 res_addr
= build_va_arg_indirect_ref (res_addr
);
3704 return build_va_arg_indirect_ref (res_addr
);
3708 mep_init_cumulative_args (CUMULATIVE_ARGS
*pcum
, tree fntype
,
3709 rtx libname ATTRIBUTE_UNUSED
,
3710 tree fndecl ATTRIBUTE_UNUSED
)
3714 if (fntype
&& lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype
)))
3721 mep_function_arg (CUMULATIVE_ARGS cum
, enum machine_mode mode
,
3722 tree type ATTRIBUTE_UNUSED
, int named ATTRIBUTE_UNUSED
)
3724 /* VOIDmode is a signal for the backend to pass data to the call
3725 expander via the second operand to the call pattern. We use
3726 this to determine whether to use "jsr" or "jsrv". */
3727 if (mode
== VOIDmode
)
3728 return GEN_INT (cum
.vliw
);
3730 /* If we havn't run out of argument registers, return the next. */
3733 if (type
&& TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3734 return gen_rtx_REG (mode
, cum
.nregs
+ 49);
3736 return gen_rtx_REG (mode
, cum
.nregs
+ 1);
3739 /* Otherwise the argument goes on the stack. */
3744 mep_pass_by_reference (CUMULATIVE_ARGS
* cum ATTRIBUTE_UNUSED
,
3745 enum machine_mode mode
,
3747 bool named ATTRIBUTE_UNUSED
)
3749 int size
= bytesize (type
, mode
);
3751 /* This is non-obvious, but yes, large values passed after we've run
3752 out of registers are *still* passed by reference - we put the
3753 address of the parameter on the stack, as well as putting the
3754 parameter itself elsewhere on the stack. */
3756 if (size
<= 0 || size
> 8)
3760 if (TARGET_IVC2
&& cum
->nregs
< 4 && type
!= NULL_TREE
&& VECTOR_TYPE_P (type
))
3766 mep_arg_advance (CUMULATIVE_ARGS
*pcum
,
3767 enum machine_mode mode ATTRIBUTE_UNUSED
,
3768 tree type ATTRIBUTE_UNUSED
, int named ATTRIBUTE_UNUSED
)
3774 mep_return_in_memory (const_tree type
, const_tree decl ATTRIBUTE_UNUSED
)
3776 int size
= bytesize (type
, BLKmode
);
3777 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3778 return size
> 0 && size
<= 8 ? 0 : 1;
3779 return size
> 0 && size
<= 4 ? 0 : 1;
3783 mep_narrow_volatile_bitfield (void)
3789 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3792 mep_function_value (tree type
, tree func ATTRIBUTE_UNUSED
)
3794 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3795 return gen_rtx_REG (TYPE_MODE (type
), 48);
3796 return gen_rtx_REG (TYPE_MODE (type
), RETURN_VALUE_REGNUM
);
3799 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3802 mep_libcall_value (enum machine_mode mode
)
3804 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
3807 /* Handle pipeline hazards. */
3809 typedef enum { op_none
, op_stc
, op_fsft
, op_ret
} op_num
;
3810 static const char *opnames
[] = { "", "stc", "fsft", "ret" };
3812 static int prev_opcode
= 0;
3814 /* This isn't as optimal as it could be, because we don't know what
3815 control register the STC opcode is storing in. We only need to add
3816 the nop if it's the relevent register, but we add it for irrelevent
3820 mep_asm_output_opcode (FILE *file
, const char *ptr
)
3822 int this_opcode
= op_none
;
3823 const char *hazard
= 0;
3828 if (strncmp (ptr
, "fsft", 4) == 0 && !ISGRAPH (ptr
[4]))
3829 this_opcode
= op_fsft
;
3832 if (strncmp (ptr
, "ret", 3) == 0 && !ISGRAPH (ptr
[3]))
3833 this_opcode
= op_ret
;
3836 if (strncmp (ptr
, "stc", 3) == 0 && !ISGRAPH (ptr
[3]))
3837 this_opcode
= op_stc
;
3841 if (prev_opcode
== op_stc
&& this_opcode
== op_fsft
)
3843 if (prev_opcode
== op_stc
&& this_opcode
== op_ret
)
3847 fprintf(file
, "%s\t# %s-%s hazard\n\t",
3848 hazard
, opnames
[prev_opcode
], opnames
[this_opcode
]);
3850 prev_opcode
= this_opcode
;
3853 /* Handle attributes. */
3856 mep_validate_based_tiny (tree
*node
, tree name
, tree args
,
3857 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3859 if (TREE_CODE (*node
) != VAR_DECL
3860 && TREE_CODE (*node
) != POINTER_TYPE
3861 && TREE_CODE (*node
) != TYPE_DECL
)
3863 warning (0, "%qE attribute only applies to variables", name
);
3866 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3868 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3870 warning (0, "address region attributes not allowed with auto storage class");
3873 /* Ignore storage attribute of pointed to variable: char __far * x; */
3874 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3876 warning (0, "address region attributes on pointed-to types ignored");
3885 mep_multiple_address_regions (tree list
, bool check_section_attr
)
3888 int count_sections
= 0;
3889 int section_attr_count
= 0;
3891 for (a
= list
; a
; a
= TREE_CHAIN (a
))
3893 if (is_attribute_p ("based", TREE_PURPOSE (a
))
3894 || is_attribute_p ("tiny", TREE_PURPOSE (a
))
3895 || is_attribute_p ("near", TREE_PURPOSE (a
))
3896 || is_attribute_p ("far", TREE_PURPOSE (a
))
3897 || is_attribute_p ("io", TREE_PURPOSE (a
)))
3899 if (check_section_attr
)
3900 section_attr_count
+= is_attribute_p ("section", TREE_PURPOSE (a
));
3903 if (check_section_attr
)
3904 return section_attr_count
;
3906 return count_sections
;
3909 #define MEP_ATTRIBUTES(decl) \
3910 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3911 : DECL_ATTRIBUTES (decl) \
3912 ? (DECL_ATTRIBUTES (decl)) \
3913 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3916 mep_validate_near_far (tree
*node
, tree name
, tree args
,
3917 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3919 if (TREE_CODE (*node
) != VAR_DECL
3920 && TREE_CODE (*node
) != FUNCTION_DECL
3921 && TREE_CODE (*node
) != METHOD_TYPE
3922 && TREE_CODE (*node
) != POINTER_TYPE
3923 && TREE_CODE (*node
) != TYPE_DECL
)
3925 warning (0, "%qE attribute only applies to variables and functions",
3929 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3931 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3933 warning (0, "address region attributes not allowed with auto storage class");
3936 /* Ignore storage attribute of pointed to variable: char __far * x; */
3937 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3939 warning (0, "address region attributes on pointed-to types ignored");
3943 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node
), false) > 0)
3945 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3946 name
, DECL_NAME (*node
), DECL_SOURCE_LINE (*node
));
3947 DECL_ATTRIBUTES (*node
) = NULL_TREE
;
3953 mep_validate_disinterrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3954 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3956 if (TREE_CODE (*node
) != FUNCTION_DECL
3957 && TREE_CODE (*node
) != METHOD_TYPE
)
3959 warning (0, "%qE attribute only applies to functions", name
);
3966 mep_validate_interrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3967 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3971 if (TREE_CODE (*node
) != FUNCTION_DECL
)
3973 warning (0, "%qE attribute only applies to functions", name
);
3978 if (DECL_DECLARED_INLINE_P (*node
))
3979 error ("cannot inline interrupt function %qE", DECL_NAME (*node
));
3980 DECL_UNINLINABLE (*node
) = 1;
3982 function_type
= TREE_TYPE (*node
);
3984 if (TREE_TYPE (function_type
) != void_type_node
)
3985 error ("interrupt function must have return type of void");
3987 if (TYPE_ARG_TYPES (function_type
)
3988 && (TREE_VALUE (TYPE_ARG_TYPES (function_type
)) != void_type_node
3989 || TREE_CHAIN (TYPE_ARG_TYPES (function_type
)) != NULL_TREE
))
3990 error ("interrupt function must have no arguments");
3996 mep_validate_io_cb (tree
*node
, tree name
, tree args
,
3997 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3999 if (TREE_CODE (*node
) != VAR_DECL
)
4001 warning (0, "%qE attribute only applies to variables", name
);
4005 if (args
!= NULL_TREE
)
4007 if (TREE_CODE (TREE_VALUE (args
)) == NON_LVALUE_EXPR
)
4008 TREE_VALUE (args
) = TREE_OPERAND (TREE_VALUE (args
), 0);
4009 if (TREE_CODE (TREE_VALUE (args
)) != INTEGER_CST
)
4011 warning (0, "%qE attribute allows only an integer constant argument",
4017 if (*no_add
== false && !TARGET_IO_NO_VOLATILE
)
4018 TREE_THIS_VOLATILE (*node
) = 1;
4024 mep_validate_vliw (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
4025 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
4027 if (TREE_CODE (*node
) != FUNCTION_TYPE
4028 && TREE_CODE (*node
) != FUNCTION_DECL
4029 && TREE_CODE (*node
) != METHOD_TYPE
4030 && TREE_CODE (*node
) != FIELD_DECL
4031 && TREE_CODE (*node
) != TYPE_DECL
)
4033 static int gave_pointer_note
= 0;
4034 static int gave_array_note
= 0;
4035 static const char * given_type
= NULL
;
4037 given_type
= tree_code_name
[TREE_CODE (*node
)];
4038 if (TREE_CODE (*node
) == POINTER_TYPE
)
4039 given_type
= "pointers";
4040 if (TREE_CODE (*node
) == ARRAY_TYPE
)
4041 given_type
= "arrays";
4044 warning (0, "%qE attribute only applies to functions, not %s",
4047 warning (0, "%qE attribute only applies to functions",
4051 if (TREE_CODE (*node
) == POINTER_TYPE
4052 && !gave_pointer_note
)
4054 inform (input_location
, "To describe a pointer to a VLIW function, use syntax like this:");
4055 inform (input_location
, " typedef int (__vliw *vfuncptr) ();");
4056 gave_pointer_note
= 1;
4059 if (TREE_CODE (*node
) == ARRAY_TYPE
4060 && !gave_array_note
)
4062 inform (input_location
, "To describe an array of VLIW function pointers, use syntax like this:");
4063 inform (input_location
, " typedef int (__vliw *vfuncptr[]) ();");
4064 gave_array_note
= 1;
4068 error ("VLIW functions are not allowed without a VLIW configuration");
4072 static const struct attribute_spec mep_attribute_table
[11] =
4074 /* name min max decl type func handler */
4075 { "based", 0, 0, false, false, false, mep_validate_based_tiny
},
4076 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny
},
4077 { "near", 0, 0, false, false, false, mep_validate_near_far
},
4078 { "far", 0, 0, false, false, false, mep_validate_near_far
},
4079 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt
},
4080 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt
},
4081 { "io", 0, 1, false, false, false, mep_validate_io_cb
},
4082 { "cb", 0, 1, false, false, false, mep_validate_io_cb
},
4083 { "vliw", 0, 0, false, true, false, mep_validate_vliw
},
4084 { NULL
, 0, 0, false, false, false, NULL
}
4088 mep_function_attribute_inlinable_p (const_tree callee
)
4090 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (callee
));
4091 if (!attrs
) attrs
= DECL_ATTRIBUTES (callee
);
4092 return (lookup_attribute ("disinterrupt", attrs
) == 0
4093 && lookup_attribute ("interrupt", attrs
) == 0);
4097 mep_can_inline_p (tree caller
, tree callee
)
4099 if (TREE_CODE (callee
) == ADDR_EXPR
)
4100 callee
= TREE_OPERAND (callee
, 0);
4102 if (!mep_vliw_function_p (caller
)
4103 && mep_vliw_function_p (callee
))
4111 #define FUNC_DISINTERRUPT 2
4114 struct GTY(()) pragma_entry
{
4117 const char *funcname
;
4119 typedef struct pragma_entry pragma_entry
;
4121 /* Hash table of farcall-tagged sections. */
4122 static GTY((param_is (pragma_entry
))) htab_t pragma_htab
;
4125 pragma_entry_eq (const void *p1
, const void *p2
)
4127 const pragma_entry
*old
= (const pragma_entry
*) p1
;
4128 const char *new_name
= (const char *) p2
;
4130 return strcmp (old
->funcname
, new_name
) == 0;
4134 pragma_entry_hash (const void *p
)
4136 const pragma_entry
*old
= (const pragma_entry
*) p
;
4137 return htab_hash_string (old
->funcname
);
4141 mep_note_pragma_flag (const char *funcname
, int flag
)
4143 pragma_entry
**slot
;
4146 pragma_htab
= htab_create_ggc (31, pragma_entry_hash
,
4147 pragma_entry_eq
, NULL
);
4149 slot
= (pragma_entry
**)
4150 htab_find_slot_with_hash (pragma_htab
, funcname
,
4151 htab_hash_string (funcname
), INSERT
);
4155 *slot
= ggc_alloc_pragma_entry ();
4158 (*slot
)->funcname
= ggc_strdup (funcname
);
4160 (*slot
)->flag
|= flag
;
4164 mep_lookup_pragma_flag (const char *funcname
, int flag
)
4166 pragma_entry
**slot
;
4171 if (funcname
[0] == '@' && funcname
[2] == '.')
4174 slot
= (pragma_entry
**)
4175 htab_find_slot_with_hash (pragma_htab
, funcname
,
4176 htab_hash_string (funcname
), NO_INSERT
);
4177 if (slot
&& *slot
&& ((*slot
)->flag
& flag
))
4179 (*slot
)->used
|= flag
;
4186 mep_lookup_pragma_call (const char *funcname
)
4188 return mep_lookup_pragma_flag (funcname
, FUNC_CALL
);
4192 mep_note_pragma_call (const char *funcname
)
4194 mep_note_pragma_flag (funcname
, FUNC_CALL
);
4198 mep_lookup_pragma_disinterrupt (const char *funcname
)
4200 return mep_lookup_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4204 mep_note_pragma_disinterrupt (const char *funcname
)
4206 mep_note_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4210 note_unused_pragma_disinterrupt (void **slot
, void *data ATTRIBUTE_UNUSED
)
4212 const pragma_entry
*d
= (const pragma_entry
*)(*slot
);
4214 if ((d
->flag
& FUNC_DISINTERRUPT
)
4215 && !(d
->used
& FUNC_DISINTERRUPT
))
4216 warning (0, "\"#pragma disinterrupt %s\" not used", d
->funcname
);
4221 mep_file_cleanups (void)
4224 htab_traverse (pragma_htab
, note_unused_pragma_disinterrupt
, NULL
);
4227 /* These three functions provide a bridge between the pramgas that
4228 affect register classes, and the functions that maintain them. We
4229 can't call those functions directly as pragma handling is part of
4230 the front end and doesn't have direct access to them. */
4233 mep_save_register_info (void)
4235 save_register_info ();
4239 mep_reinit_regs (void)
4245 mep_init_regs (void)
4253 mep_attrlist_to_encoding (tree list
, tree decl
)
4255 if (mep_multiple_address_regions (list
, false) > 1)
4257 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4258 TREE_PURPOSE (TREE_CHAIN (list
)),
4260 DECL_SOURCE_LINE (decl
));
4261 TREE_CHAIN (list
) = NULL_TREE
;
4266 if (is_attribute_p ("based", TREE_PURPOSE (list
)))
4268 if (is_attribute_p ("tiny", TREE_PURPOSE (list
)))
4270 if (is_attribute_p ("near", TREE_PURPOSE (list
)))
4272 if (is_attribute_p ("far", TREE_PURPOSE (list
)))
4274 if (is_attribute_p ("io", TREE_PURPOSE (list
)))
4276 if (TREE_VALUE (list
)
4277 && TREE_VALUE (TREE_VALUE (list
))
4278 && TREE_CODE (TREE_VALUE (TREE_VALUE (list
))) == INTEGER_CST
)
4280 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list
)));
4282 && location
<= 0x1000000)
4287 if (is_attribute_p ("cb", TREE_PURPOSE (list
)))
4289 list
= TREE_CHAIN (list
);
4292 && TREE_CODE (decl
) == FUNCTION_DECL
4293 && DECL_SECTION_NAME (decl
) == 0)
4299 mep_comp_type_attributes (const_tree t1
, const_tree t2
)
4303 vliw1
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1
)) != 0);
4304 vliw2
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2
)) != 0);
4313 mep_insert_attributes (tree decl
, tree
*attributes
)
4316 const char *secname
= 0;
4317 tree attrib
, attrlist
;
4320 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4322 const char *funcname
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4324 if (mep_lookup_pragma_disinterrupt (funcname
))
4326 attrib
= build_tree_list (get_identifier ("disinterrupt"), NULL_TREE
);
4327 *attributes
= chainon (*attributes
, attrib
);
4331 if (TREE_CODE (decl
) != VAR_DECL
4332 || ! (TREE_PUBLIC (decl
) || TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
4335 if (TREE_READONLY (decl
) && TARGET_DC
)
4336 /* -mdc means that const variables default to the near section,
4337 regardless of the size cutoff. */
4340 /* User specified an attribute, so override the default.
4341 Ignore storage attribute of pointed to variable. char __far * x; */
4342 if (! (TREE_TYPE (decl
) && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
))
4344 if (TYPE_P (decl
) && TYPE_ATTRIBUTES (decl
) && *attributes
)
4345 TYPE_ATTRIBUTES (decl
) = NULL_TREE
;
4346 else if (DECL_ATTRIBUTES (decl
) && *attributes
)
4347 DECL_ATTRIBUTES (decl
) = NULL_TREE
;
4350 attrlist
= *attributes
? *attributes
: DECL_ATTRIBUTES (decl
);
4351 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4352 if (!encoding
&& TYPE_P (TREE_TYPE (decl
)))
4354 attrlist
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
4355 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4359 /* This means that the declaration has a specific section
4360 attribute, so we should not apply the default rules. */
4362 if (encoding
== 'i' || encoding
== 'I')
4364 tree attr
= lookup_attribute ("io", attrlist
);
4366 && TREE_VALUE (attr
)
4367 && TREE_VALUE (TREE_VALUE(attr
)))
4369 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4370 static tree previous_value
= 0;
4371 static int previous_location
= 0;
4372 static tree previous_name
= 0;
4374 /* We take advantage of the fact that gcc will reuse the
4375 same tree pointer when applying an attribute to a
4376 list of decls, but produce a new tree for attributes
4377 on separate source lines, even when they're textually
4378 identical. This is the behavior we want. */
4379 if (TREE_VALUE (attr
) == previous_value
4380 && location
== previous_location
)
4382 warning(0, "__io address 0x%x is the same for %qE and %qE",
4383 location
, previous_name
, DECL_NAME (decl
));
4385 previous_name
= DECL_NAME (decl
);
4386 previous_location
= location
;
4387 previous_value
= TREE_VALUE (attr
);
4394 /* Declarations of arrays can change size. Don't trust them. */
4395 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
4398 size
= int_size_in_bytes (TREE_TYPE (decl
));
4400 if (TARGET_RAND_TPGP
&& size
<= 4 && size
> 0)
4402 if (TREE_PUBLIC (decl
)
4403 || DECL_EXTERNAL (decl
)
4404 || TREE_STATIC (decl
))
4406 const char *name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4430 if (size
<= mep_based_cutoff
&& size
> 0)
4432 else if (size
<= mep_tiny_cutoff
&& size
> 0)
4438 if (mep_const_section
&& TREE_READONLY (decl
))
4440 if (strcmp (mep_const_section
, "tiny") == 0)
4442 else if (strcmp (mep_const_section
, "near") == 0)
4444 else if (strcmp (mep_const_section
, "far") == 0)
4451 if (!mep_multiple_address_regions (*attributes
, true)
4452 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl
), false))
4454 attrib
= build_tree_list (get_identifier (secname
), NULL_TREE
);
4456 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4457 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4458 and mep_validate_based_tiny. */
4459 DECL_ATTRIBUTES (decl
) = chainon (DECL_ATTRIBUTES (decl
), attrib
);
4464 mep_encode_section_info (tree decl
, rtx rtl
, int first
)
4467 const char *oldname
;
4468 const char *secname
;
4474 tree mep_attributes
;
4479 if (TREE_CODE (decl
) != VAR_DECL
4480 && TREE_CODE (decl
) != FUNCTION_DECL
)
4483 rtlname
= XEXP (rtl
, 0);
4484 if (GET_CODE (rtlname
) == SYMBOL_REF
)
4485 oldname
= XSTR (rtlname
, 0);
4486 else if (GET_CODE (rtlname
) == MEM
4487 && GET_CODE (XEXP (rtlname
, 0)) == SYMBOL_REF
)
4488 oldname
= XSTR (XEXP (rtlname
, 0), 0);
4492 type
= TREE_TYPE (decl
);
4493 if (type
== error_mark_node
)
4495 mep_attributes
= MEP_ATTRIBUTES (decl
);
4497 encoding
= mep_attrlist_to_encoding (mep_attributes
, decl
);
4501 newname
= (char *) alloca (strlen (oldname
) + 4);
4502 sprintf (newname
, "@%c.%s", encoding
, oldname
);
4503 idp
= get_identifier (newname
);
4505 gen_rtx_SYMBOL_REF (Pmode
, IDENTIFIER_POINTER (idp
));
4506 SYMBOL_REF_WEAK (XEXP (rtl
, 0)) = DECL_WEAK (decl
);
4507 SET_SYMBOL_REF_DECL (XEXP (rtl
, 0), decl
);
4520 maxsize
= 0x1000000;
4528 if (maxsize
&& int_size_in_bytes (TREE_TYPE (decl
)) > maxsize
)
4530 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4532 (long) int_size_in_bytes (TREE_TYPE (decl
)),
4540 mep_strip_name_encoding (const char *sym
)
4546 else if (*sym
== '@' && sym
[2] == '.')
4554 mep_select_section (tree decl
, int reloc ATTRIBUTE_UNUSED
,
4555 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
4560 switch (TREE_CODE (decl
))
4563 if (!TREE_READONLY (decl
)
4564 || TREE_SIDE_EFFECTS (decl
)
4565 || !DECL_INITIAL (decl
)
4566 || (DECL_INITIAL (decl
) != error_mark_node
4567 && !TREE_CONSTANT (DECL_INITIAL (decl
))))
4571 if (! TREE_CONSTANT (decl
))
4579 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4581 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4583 if (name
[0] == '@' && name
[2] == '.')
4588 if (flag_function_sections
|| DECL_ONE_ONLY (decl
))
4589 mep_unique_section (decl
, 0);
4590 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4592 if (encoding
== 'f')
4593 return vftext_section
;
4595 return vtext_section
;
4597 else if (encoding
== 'f')
4598 return ftext_section
;
4600 return text_section
;
4603 if (TREE_CODE (decl
) == VAR_DECL
)
4605 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4607 if (name
[0] == '@' && name
[2] == '.')
4611 return based_section
;
4615 return srodata_section
;
4616 if (DECL_INITIAL (decl
))
4617 return sdata_section
;
4618 return tinybss_section
;
4622 return frodata_section
;
4627 error_at (DECL_SOURCE_LOCATION (decl
),
4628 "variable %D of type %<io%> must be uninitialized", decl
);
4629 return data_section
;
4632 error_at (DECL_SOURCE_LOCATION (decl
),
4633 "variable %D of type %<cb%> must be uninitialized", decl
);
4634 return data_section
;
4639 return readonly_data_section
;
4641 return data_section
;
4645 mep_unique_section (tree decl
, int reloc
)
4647 static const char *prefixes
[][2] =
4649 { ".text.", ".gnu.linkonce.t." },
4650 { ".rodata.", ".gnu.linkonce.r." },
4651 { ".data.", ".gnu.linkonce.d." },
4652 { ".based.", ".gnu.linkonce.based." },
4653 { ".sdata.", ".gnu.linkonce.s." },
4654 { ".far.", ".gnu.linkonce.far." },
4655 { ".ftext.", ".gnu.linkonce.ft." },
4656 { ".frodata.", ".gnu.linkonce.frd." },
4657 { ".srodata.", ".gnu.linkonce.srd." },
4658 { ".vtext.", ".gnu.linkonce.v." },
4659 { ".vftext.", ".gnu.linkonce.vf." }
4661 int sec
= 2; /* .data */
4663 const char *name
, *prefix
;
4666 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
4667 if (DECL_RTL (decl
))
4668 name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4670 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4672 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4673 sec
= 9; /* .vtext */
4675 sec
= 0; /* .text */
4677 else if (decl_readonly_section (decl
, reloc
))
4678 sec
= 1; /* .rodata */
4680 if (name
[0] == '@' && name
[2] == '.')
4685 sec
= 3; /* .based */
4689 sec
= 8; /* .srodata */
4691 sec
= 4; /* .sdata */
4695 sec
= 6; /* .ftext */
4697 sec
= 10; /* .vftext */
4699 sec
= 7; /* .frodata */
4701 sec
= 5; /* .far. */
4707 prefix
= prefixes
[sec
][DECL_ONE_ONLY(decl
)];
4708 len
= strlen (name
) + strlen (prefix
);
4709 string
= (char *) alloca (len
+ 1);
4711 sprintf (string
, "%s%s", prefix
, name
);
4713 DECL_SECTION_NAME (decl
) = build_string (len
, string
);
4716 /* Given a decl, a section name, and whether the decl initializer
4717 has relocs, choose attributes for the section. */
4719 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4722 mep_section_type_flags (tree decl
, const char *name
, int reloc
)
4724 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4726 if (decl
&& TREE_CODE (decl
) == FUNCTION_DECL
4727 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4728 flags
|= SECTION_MEP_VLIW
;
4733 /* Switch to an arbitrary section NAME with attributes as specified
4734 by FLAGS. ALIGN specifies any known alignment requirements for
4735 the section; 0 if the default should be used.
4737 Differs from the standard ELF version only in support of VLIW mode. */
4740 mep_asm_named_section (const char *name
, unsigned int flags
, tree decl ATTRIBUTE_UNUSED
)
4742 char flagchars
[8], *f
= flagchars
;
4745 if (!(flags
& SECTION_DEBUG
))
4747 if (flags
& SECTION_WRITE
)
4749 if (flags
& SECTION_CODE
)
4751 if (flags
& SECTION_SMALL
)
4753 if (flags
& SECTION_MEP_VLIW
)
4757 if (flags
& SECTION_BSS
)
4762 fprintf (asm_out_file
, "\t.section\t%s,\"%s\",@%s\n",
4763 name
, flagchars
, type
);
4765 if (flags
& SECTION_CODE
)
4766 fputs ((flags
& SECTION_MEP_VLIW
? "\t.vliw\n" : "\t.core\n"),
4771 mep_output_aligned_common (FILE *stream
, tree decl
, const char *name
,
4772 int size
, int align
, int global
)
4774 /* We intentionally don't use mep_section_tag() here. */
4776 && (name
[1] == 'i' || name
[1] == 'I' || name
[1] == 'c')
4780 tree attr
= lookup_attribute ((name
[1] == 'c' ? "cb" : "io"),
4781 DECL_ATTRIBUTES (decl
));
4783 && TREE_VALUE (attr
)
4784 && TREE_VALUE (TREE_VALUE(attr
)))
4785 location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4790 fprintf (stream
, "\t.globl\t");
4791 assemble_name (stream
, name
);
4792 fprintf (stream
, "\n");
4794 assemble_name (stream
, name
);
4795 fprintf (stream
, " = %d\n", location
);
4798 if (name
[0] == '@' && name
[2] == '.')
4800 const char *sec
= 0;
4804 switch_to_section (based_section
);
4808 switch_to_section (tinybss_section
);
4812 switch_to_section (farbss_section
);
4821 while (align
> BITS_PER_UNIT
)
4826 name2
= TARGET_STRIP_NAME_ENCODING (name
);
4828 fprintf (stream
, "\t.globl\t%s\n", name2
);
4829 fprintf (stream
, "\t.p2align %d\n", p2align
);
4830 fprintf (stream
, "\t.type\t%s,@object\n", name2
);
4831 fprintf (stream
, "\t.size\t%s,%d\n", name2
, size
);
4832 fprintf (stream
, "%s:\n\t.zero\t%d\n", name2
, size
);
4839 fprintf (stream
, "\t.local\t");
4840 assemble_name (stream
, name
);
4841 fprintf (stream
, "\n");
4843 fprintf (stream
, "\t.comm\t");
4844 assemble_name (stream
, name
);
4845 fprintf (stream
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
4851 mep_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4853 rtx addr
= XEXP (m_tramp
, 0);
4854 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
4856 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__mep_trampoline_helper"),
4857 LCT_NORMAL
, VOIDmode
, 3,
4860 static_chain
, Pmode
);
4863 /* Experimental Reorg. */
4866 mep_mentioned_p (rtx in
,
4867 rtx reg
, /* NULL for mem */
4868 int modes_too
) /* if nonzero, modes must match also. */
4876 if (reg
&& GET_CODE (reg
) != REG
)
4879 if (GET_CODE (in
) == LABEL_REF
)
4882 code
= GET_CODE (in
);
4888 return mep_mentioned_p (XEXP (in
, 0), reg
, modes_too
);
4894 if (modes_too
&& (GET_MODE (in
) != GET_MODE (reg
)))
4896 return (REGNO (in
) == REGNO (reg
));
4909 /* Set's source should be read-only. */
4910 if (code
== SET
&& !reg
)
4911 return mep_mentioned_p (SET_DEST (in
), reg
, modes_too
);
4913 fmt
= GET_RTX_FORMAT (code
);
4915 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4920 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
4921 if (mep_mentioned_p (XVECEXP (in
, i
, j
), reg
, modes_too
))
4924 else if (fmt
[i
] == 'e'
4925 && mep_mentioned_p (XEXP (in
, i
), reg
, modes_too
))
4931 #define EXPERIMENTAL_REGMOVE_REORG 1
4933 #if EXPERIMENTAL_REGMOVE_REORG
4936 mep_compatible_reg_class (int r1
, int r2
)
4938 if (GR_REGNO_P (r1
) && GR_REGNO_P (r2
))
4940 if (CR_REGNO_P (r1
) && CR_REGNO_P (r2
))
4946 mep_reorg_regmove (rtx insns
)
4948 rtx insn
, next
, pat
, follow
, *where
;
4949 int count
= 0, done
= 0, replace
, before
= 0;
4952 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4953 if (GET_CODE (insn
) == INSN
)
4956 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4957 set that uses the r2 and r2 dies there. We replace r2 with r1
4958 and see if it's still a valid insn. If so, delete the first set.
4959 Copied from reorg.c. */
4964 for (insn
= insns
; insn
; insn
= next
)
4966 next
= NEXT_INSN (insn
);
4967 if (GET_CODE (insn
) != INSN
)
4969 pat
= PATTERN (insn
);
4973 if (GET_CODE (pat
) == SET
4974 && GET_CODE (SET_SRC (pat
)) == REG
4975 && GET_CODE (SET_DEST (pat
)) == REG
4976 && find_regno_note (insn
, REG_DEAD
, REGNO (SET_SRC (pat
)))
4977 && mep_compatible_reg_class (REGNO (SET_SRC (pat
)), REGNO (SET_DEST (pat
))))
4979 follow
= next_nonnote_insn (insn
);
4981 fprintf (dump_file
, "superfluous moves: considering %d\n", INSN_UID (insn
));
4983 while (follow
&& GET_CODE (follow
) == INSN
4984 && GET_CODE (PATTERN (follow
)) == SET
4985 && !dead_or_set_p (follow
, SET_SRC (pat
))
4986 && !mep_mentioned_p (PATTERN (follow
), SET_SRC (pat
), 0)
4987 && !mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 0))
4990 fprintf (dump_file
, "\tskipping %d\n", INSN_UID (follow
));
4991 follow
= next_nonnote_insn (follow
);
4995 fprintf (dump_file
, "\tfollow is %d\n", INSN_UID (follow
));
4996 if (follow
&& GET_CODE (follow
) == INSN
4997 && GET_CODE (PATTERN (follow
)) == SET
4998 && find_regno_note (follow
, REG_DEAD
, REGNO (SET_DEST (pat
))))
5000 if (GET_CODE (SET_DEST (PATTERN (follow
))) == REG
)
5002 if (mep_mentioned_p (SET_SRC (PATTERN (follow
)), SET_DEST (pat
), 1))
5005 where
= & SET_SRC (PATTERN (follow
));
5008 else if (GET_CODE (SET_DEST (PATTERN (follow
))) == MEM
)
5010 if (mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 1))
5013 where
= & PATTERN (follow
);
5019 /* If so, follow is the corresponding insn */
5026 fprintf (dump_file
, "----- Candidate for superfluous move deletion:\n\n");
5027 for (x
= insn
; x
;x
= NEXT_INSN (x
))
5029 print_rtl_single (dump_file
, x
);
5032 fprintf (dump_file
, "\n");
5036 if (validate_replace_rtx_subexp (SET_DEST (pat
), SET_SRC (pat
),
5040 next
= delete_insn (insn
);
5043 fprintf (dump_file
, "\n----- Success! new insn:\n\n");
5044 print_rtl_single (dump_file
, follow
);
5054 fprintf (dump_file
, "\n%d insn%s deleted out of %d.\n\n", count
, count
== 1 ? "" : "s", before
);
5055 fprintf (dump_file
, "=====\n");
5061 /* Figure out where to put LABEL, which is the label for a repeat loop.
5062 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5063 the loop ends just before LAST_INSN. If SHARED, insns other than the
5064 "repeat" might use LABEL to jump to the loop's continuation point.
5066 Return the last instruction in the adjusted loop. */
5069 mep_insert_repeat_label_last (rtx last_insn
, rtx label
, bool including
,
5073 int count
= 0, code
, icode
;
5076 fprintf (dump_file
, "considering end of repeat loop at insn %d\n",
5077 INSN_UID (last_insn
));
5079 /* Set PREV to the last insn in the loop. */
5082 prev
= PREV_INSN (prev
);
5084 /* Set NEXT to the next insn after the repeat label. */
5089 code
= GET_CODE (prev
);
5090 if (code
== CALL_INSN
|| code
== CODE_LABEL
|| code
== BARRIER
)
5095 if (GET_CODE (PATTERN (prev
)) == SEQUENCE
)
5096 prev
= XVECEXP (PATTERN (prev
), 0, 1);
5098 /* Other insns that should not be in the last two opcodes. */
5099 icode
= recog_memoized (prev
);
5101 || icode
== CODE_FOR_repeat
5102 || icode
== CODE_FOR_erepeat
5103 || get_attr_may_trap (prev
) == MAY_TRAP_YES
)
5106 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5107 is the second instruction in a VLIW bundle. In that case,
5108 loop again: if the first instruction also satisfies the
5109 conditions above then we will reach here again and put
5110 both of them into the repeat epilogue. Otherwise both
5111 should remain outside. */
5112 if (GET_MODE (prev
) != BImode
)
5117 print_rtl_single (dump_file
, next
);
5122 prev
= PREV_INSN (prev
);
5125 /* See if we're adding the label immediately after the repeat insn.
5126 If so, we need to separate them with a nop. */
5127 prev
= prev_real_insn (next
);
5129 switch (recog_memoized (prev
))
5131 case CODE_FOR_repeat
:
5132 case CODE_FOR_erepeat
:
5134 fprintf (dump_file
, "Adding nop inside loop\n");
5135 emit_insn_before (gen_nop (), next
);
5142 /* Insert the label. */
5143 emit_label_before (label
, next
);
5145 /* Insert the nops. */
5146 if (dump_file
&& count
< 2)
5147 fprintf (dump_file
, "Adding %d nop%s\n\n",
5148 2 - count
, count
== 1 ? "" : "s");
5150 for (; count
< 2; count
++)
5152 last_insn
= emit_insn_after (gen_nop (), last_insn
);
5154 emit_insn_before (gen_nop (), last_insn
);
5161 mep_emit_doloop (rtx
*operands
, int is_end
)
5165 if (cfun
->machine
->doloop_tags
== 0
5166 || cfun
->machine
->doloop_tag_from_end
== is_end
)
5168 cfun
->machine
->doloop_tags
++;
5169 cfun
->machine
->doloop_tag_from_end
= is_end
;
5172 tag
= GEN_INT (cfun
->machine
->doloop_tags
- 1);
5174 emit_jump_insn (gen_doloop_end_internal (operands
[0], operands
[4], tag
));
5176 emit_insn (gen_doloop_begin_internal (operands
[0], operands
[0], tag
));
5180 /* Code for converting doloop_begins and doloop_ends into valid
5181 MeP instructions. A doloop_begin is just a placeholder:
5183 $count = unspec ($count)
5185 where $count is initially the number of iterations - 1.
5186 doloop_end has the form:
5188 if ($count-- == 0) goto label
5190 The counter variable is private to the doloop insns, nothing else
5191 relies on its value.
5193 There are three cases, in decreasing order of preference:
5195 1. A loop has exactly one doloop_begin and one doloop_end.
5196 The doloop_end branches to the first instruction after
5199 In this case we can replace the doloop_begin with a repeat
5200 instruction and remove the doloop_end. I.e.:
5202 $count1 = unspec ($count1)
5207 if ($count2-- == 0) goto label
5211 repeat $count1,repeat_label
5219 2. As for (1), except there are several doloop_ends. One of them
5220 (call it X) falls through to a label L. All the others fall
5221 through to branches to L.
5223 In this case, we remove X and replace the other doloop_ends
5224 with branches to the repeat label. For example:
5226 $count1 = unspec ($count1)
5229 if ($count2-- == 0) goto label
5232 if ($count3-- == 0) goto label
5237 repeat $count1,repeat_label
5248 3. The fallback case. Replace doloop_begins with:
5252 Replace doloop_ends with the equivalent of:
5255 if ($count == 0) goto label
5257 Note that this might need a scratch register if $count
5258 is stored in memory. */
5260 /* A structure describing one doloop_begin. */
5261 struct mep_doloop_begin
{
5262 /* The next doloop_begin with the same tag. */
5263 struct mep_doloop_begin
*next
;
5265 /* The instruction itself. */
5268 /* The initial counter value. This is known to be a general register. */
5272 /* A structure describing a doloop_end. */
5273 struct mep_doloop_end
{
5274 /* The next doloop_end with the same loop tag. */
5275 struct mep_doloop_end
*next
;
5277 /* The instruction itself. */
5280 /* The first instruction after INSN when the branch isn't taken. */
5283 /* The location of the counter value. Since doloop_end_internal is a
5284 jump instruction, it has to allow the counter to be stored anywhere
5285 (any non-fixed register or memory location). */
5288 /* The target label (the place where the insn branches when the counter
5292 /* A scratch register. Only available when COUNTER isn't stored
5293 in a general register. */
5298 /* One do-while loop. */
5300 /* All the doloop_begins for this loop (in no particular order). */
5301 struct mep_doloop_begin
*begin
;
5303 /* All the doloop_ends. When there is more than one, arrange things
5304 so that the first one is the most likely to be X in case (2) above. */
5305 struct mep_doloop_end
*end
;
5309 /* Return true if LOOP can be converted into repeat/repeat_end form
5310 (that is, if it matches cases (1) or (2) above). */
5313 mep_repeat_loop_p (struct mep_doloop
*loop
)
5315 struct mep_doloop_end
*end
;
5318 /* There must be exactly one doloop_begin and at least one doloop_end. */
5319 if (loop
->begin
== 0 || loop
->end
== 0 || loop
->begin
->next
!= 0)
5322 /* The first doloop_end (X) must branch back to the insn after
5323 the doloop_begin. */
5324 if (prev_real_insn (loop
->end
->label
) != loop
->begin
->insn
)
5327 /* All the other doloop_ends must branch to the same place as X.
5328 When the branch isn't taken, they must jump to the instruction
5330 fallthrough
= loop
->end
->fallthrough
;
5331 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5332 if (end
->label
!= loop
->end
->label
5333 || !simplejump_p (end
->fallthrough
)
5334 || next_real_insn (JUMP_LABEL (end
->fallthrough
)) != fallthrough
)
5341 /* The main repeat reorg function. See comment above for details. */
5344 mep_reorg_repeat (rtx insns
)
5347 struct mep_doloop
*loops
, *loop
;
5348 struct mep_doloop_begin
*begin
;
5349 struct mep_doloop_end
*end
;
5351 /* Quick exit if we haven't created any loops. */
5352 if (cfun
->machine
->doloop_tags
== 0)
5355 /* Create an array of mep_doloop structures. */
5356 loops
= (struct mep_doloop
*) alloca (sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5357 memset (loops
, 0, sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5359 /* Search the function for do-while insns and group them by loop tag. */
5360 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5362 switch (recog_memoized (insn
))
5364 case CODE_FOR_doloop_begin_internal
:
5365 insn_extract (insn
);
5366 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5368 begin
= (struct mep_doloop_begin
*) alloca (sizeof (struct mep_doloop_begin
));
5369 begin
->next
= loop
->begin
;
5371 begin
->counter
= recog_data
.operand
[0];
5373 loop
->begin
= begin
;
5376 case CODE_FOR_doloop_end_internal
:
5377 insn_extract (insn
);
5378 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5380 end
= (struct mep_doloop_end
*) alloca (sizeof (struct mep_doloop_end
));
5382 end
->fallthrough
= next_real_insn (insn
);
5383 end
->counter
= recog_data
.operand
[0];
5384 end
->label
= recog_data
.operand
[1];
5385 end
->scratch
= recog_data
.operand
[3];
5387 /* If this insn falls through to an unconditional jump,
5388 give it a lower priority than the others. */
5389 if (loop
->end
!= 0 && simplejump_p (end
->fallthrough
))
5391 end
->next
= loop
->end
->next
;
5392 loop
->end
->next
= end
;
5396 end
->next
= loop
->end
;
5402 /* Convert the insns for each loop in turn. */
5403 for (loop
= loops
; loop
< loops
+ cfun
->machine
->doloop_tags
; loop
++)
5404 if (mep_repeat_loop_p (loop
))
5406 /* Case (1) or (2). */
5407 rtx repeat_label
, label_ref
;
5409 /* Create a new label for the repeat insn. */
5410 repeat_label
= gen_label_rtx ();
5412 /* Replace the doloop_begin with a repeat. */
5413 label_ref
= gen_rtx_LABEL_REF (VOIDmode
, repeat_label
);
5414 emit_insn_before (gen_repeat (loop
->begin
->counter
, label_ref
),
5416 delete_insn (loop
->begin
->insn
);
5418 /* Insert the repeat label before the first doloop_end.
5419 Fill the gap with nops if there are other doloop_ends. */
5420 mep_insert_repeat_label_last (loop
->end
->insn
, repeat_label
,
5421 false, loop
->end
->next
!= 0);
5423 /* Emit a repeat_end (to improve the readability of the output). */
5424 emit_insn_before (gen_repeat_end (), loop
->end
->insn
);
5426 /* Delete the first doloop_end. */
5427 delete_insn (loop
->end
->insn
);
5429 /* Replace the others with branches to REPEAT_LABEL. */
5430 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5432 emit_jump_insn_before (gen_jump (repeat_label
), end
->insn
);
5433 delete_insn (end
->insn
);
5434 delete_insn (end
->fallthrough
);
5439 /* Case (3). First replace all the doloop_begins with increment
5441 for (begin
= loop
->begin
; begin
!= 0; begin
= begin
->next
)
5443 emit_insn_before (gen_add3_insn (copy_rtx (begin
->counter
),
5444 begin
->counter
, const1_rtx
),
5446 delete_insn (begin
->insn
);
5449 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5450 for (end
= loop
->end
; end
!= 0; end
= end
->next
)
5456 /* Load the counter value into a general register. */
5458 if (!REG_P (reg
) || REGNO (reg
) > 15)
5461 emit_move_insn (copy_rtx (reg
), copy_rtx (end
->counter
));
5464 /* Decrement the counter. */
5465 emit_insn (gen_add3_insn (copy_rtx (reg
), copy_rtx (reg
),
5468 /* Copy it back to its original location. */
5469 if (reg
!= end
->counter
)
5470 emit_move_insn (copy_rtx (end
->counter
), copy_rtx (reg
));
5472 /* Jump back to the start label. */
5473 insn
= emit_jump_insn (gen_mep_bne_true (reg
, const0_rtx
,
5475 JUMP_LABEL (insn
) = end
->label
;
5476 LABEL_NUSES (end
->label
)++;
5478 /* Emit the whole sequence before the doloop_end. */
5479 insn
= get_insns ();
5481 emit_insn_before (insn
, end
->insn
);
5483 /* Delete the doloop_end. */
5484 delete_insn (end
->insn
);
5491 mep_invertable_branch_p (rtx insn
)
5494 enum rtx_code old_code
;
5497 set
= PATTERN (insn
);
5498 if (GET_CODE (set
) != SET
)
5500 if (GET_CODE (XEXP (set
, 1)) != IF_THEN_ELSE
)
5502 cond
= XEXP (XEXP (set
, 1), 0);
5503 old_code
= GET_CODE (cond
);
5507 PUT_CODE (cond
, NE
);
5510 PUT_CODE (cond
, EQ
);
5513 PUT_CODE (cond
, GE
);
5516 PUT_CODE (cond
, LT
);
5521 INSN_CODE (insn
) = -1;
5522 i
= recog_memoized (insn
);
5523 PUT_CODE (cond
, old_code
);
5524 INSN_CODE (insn
) = -1;
5529 mep_invert_branch (rtx insn
, rtx after
)
5531 rtx cond
, set
, label
;
5534 set
= PATTERN (insn
);
5536 gcc_assert (GET_CODE (set
) == SET
);
5537 gcc_assert (GET_CODE (XEXP (set
, 1)) == IF_THEN_ELSE
);
5539 cond
= XEXP (XEXP (set
, 1), 0);
5540 switch (GET_CODE (cond
))
5543 PUT_CODE (cond
, NE
);
5546 PUT_CODE (cond
, EQ
);
5549 PUT_CODE (cond
, GE
);
5552 PUT_CODE (cond
, LT
);
5557 label
= gen_label_rtx ();
5558 emit_label_after (label
, after
);
5559 for (i
=1; i
<=2; i
++)
5560 if (GET_CODE (XEXP (XEXP (set
, 1), i
)) == LABEL_REF
)
5562 rtx ref
= XEXP (XEXP (set
, 1), i
);
5563 if (LABEL_NUSES (XEXP (ref
, 0)) == 1)
5564 delete_insn (XEXP (ref
, 0));
5565 XEXP (ref
, 0) = label
;
5566 LABEL_NUSES (label
) ++;
5567 JUMP_LABEL (insn
) = label
;
5569 INSN_CODE (insn
) = -1;
5570 i
= recog_memoized (insn
);
5571 gcc_assert (i
>= 0);
5575 mep_reorg_erepeat (rtx insns
)
5577 rtx insn
, prev
, l
, x
;
5580 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5582 && ! JUMP_TABLE_DATA_P (insn
)
5583 && mep_invertable_branch_p (insn
))
5587 fprintf (dump_file
, "\n------------------------------\n");
5588 fprintf (dump_file
, "erepeat: considering this jump:\n");
5589 print_rtl_single (dump_file
, insn
);
5591 count
= simplejump_p (insn
) ? 0 : 1;
5592 for (prev
= PREV_INSN (insn
); prev
; prev
= PREV_INSN (prev
))
5594 if (GET_CODE (prev
) == CALL_INSN
5595 || BARRIER_P (prev
))
5598 if (prev
== JUMP_LABEL (insn
))
5602 fprintf (dump_file
, "found loop top, %d insns\n", count
);
5604 if (LABEL_NUSES (prev
) == 1)
5605 /* We're the only user, always safe */ ;
5606 else if (LABEL_NUSES (prev
) == 2)
5608 /* See if there's a barrier before this label. If
5609 so, we know nobody inside the loop uses it.
5610 But we must be careful to put the erepeat
5611 *after* the label. */
5613 for (barrier
= PREV_INSN (prev
);
5614 barrier
&& GET_CODE (barrier
) == NOTE
;
5615 barrier
= PREV_INSN (barrier
))
5617 if (barrier
&& GET_CODE (barrier
) != BARRIER
)
5622 /* We don't know who else, within or without our loop, uses this */
5624 fprintf (dump_file
, "... but there are multiple users, too risky.\n");
5628 /* Generate a label to be used by the erepat insn. */
5629 l
= gen_label_rtx ();
5631 /* Insert the erepeat after INSN's target label. */
5632 x
= gen_erepeat (gen_rtx_LABEL_REF (VOIDmode
, l
));
5634 emit_insn_after (x
, prev
);
5636 /* Insert the erepeat label. */
5637 newlast
= (mep_insert_repeat_label_last
5638 (insn
, l
, !simplejump_p (insn
), false));
5639 if (simplejump_p (insn
))
5641 emit_insn_before (gen_erepeat_end (), insn
);
5646 mep_invert_branch (insn
, newlast
);
5647 emit_insn_after (gen_erepeat_end (), newlast
);
5654 /* A label is OK if there is exactly one user, and we
5655 can find that user before the next label. */
5658 if (LABEL_NUSES (prev
) == 1)
5660 for (user
= PREV_INSN (prev
);
5661 user
&& (INSN_P (user
) || GET_CODE (user
) == NOTE
);
5662 user
= PREV_INSN (user
))
5663 if (GET_CODE (user
) == JUMP_INSN
5664 && JUMP_LABEL (user
) == prev
)
5666 safe
= INSN_UID (user
);
5673 fprintf (dump_file
, "... ignoring jump from insn %d to %d\n",
5674 safe
, INSN_UID (prev
));
5684 fprintf (dump_file
, "\n==============================\n");
5687 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5688 always do this on its own. */
5691 mep_jmp_return_reorg (rtx insns
)
5693 rtx insn
, label
, ret
;
5696 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5697 if (simplejump_p (insn
))
5699 /* Find the fist real insn the jump jumps to. */
5700 label
= ret
= JUMP_LABEL (insn
);
5702 && (GET_CODE (ret
) == NOTE
5703 || GET_CODE (ret
) == CODE_LABEL
5704 || GET_CODE (PATTERN (ret
)) == USE
))
5705 ret
= NEXT_INSN (ret
);
5709 /* Is it a return? */
5710 ret_code
= recog_memoized (ret
);
5711 if (ret_code
== CODE_FOR_return_internal
5712 || ret_code
== CODE_FOR_eh_return_internal
)
5714 /* It is. Replace the jump with a return. */
5715 LABEL_NUSES (label
) --;
5716 if (LABEL_NUSES (label
) == 0)
5717 delete_insn (label
);
5718 PATTERN (insn
) = copy_rtx (PATTERN (ret
));
5719 INSN_CODE (insn
) = -1;
5727 mep_reorg_addcombine (rtx insns
)
5731 for (i
= insns
; i
; i
= NEXT_INSN (i
))
5733 && INSN_CODE (i
) == CODE_FOR_addsi3
5734 && GET_CODE (SET_DEST (PATTERN (i
))) == REG
5735 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 0)) == REG
5736 && REGNO (SET_DEST (PATTERN (i
))) == REGNO (XEXP (SET_SRC (PATTERN (i
)), 0))
5737 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 1)) == CONST_INT
)
5741 && INSN_CODE (n
) == CODE_FOR_addsi3
5742 && GET_CODE (SET_DEST (PATTERN (n
))) == REG
5743 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 0)) == REG
5744 && REGNO (SET_DEST (PATTERN (n
))) == REGNO (XEXP (SET_SRC (PATTERN (n
)), 0))
5745 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 1)) == CONST_INT
)
5747 int ic
= INTVAL (XEXP (SET_SRC (PATTERN (i
)), 1));
5748 int nc
= INTVAL (XEXP (SET_SRC (PATTERN (n
)), 1));
5749 if (REGNO (SET_DEST (PATTERN (i
))) == REGNO (SET_DEST (PATTERN (n
)))
5751 && ic
+ nc
> -32768)
5753 XEXP (SET_SRC (PATTERN (i
)), 1) = GEN_INT (ic
+ nc
);
5754 NEXT_INSN (i
) = NEXT_INSN (n
);
5756 PREV_INSN (NEXT_INSN (i
)) = i
;
5762 /* If this insn adjusts the stack, return the adjustment, else return
5765 add_sp_insn_p (rtx insn
)
5769 if (! single_set (insn
))
5771 pat
= PATTERN (insn
);
5772 if (GET_CODE (SET_DEST (pat
)) != REG
)
5774 if (REGNO (SET_DEST (pat
)) != SP_REGNO
)
5776 if (GET_CODE (SET_SRC (pat
)) != PLUS
)
5778 if (GET_CODE (XEXP (SET_SRC (pat
), 0)) != REG
)
5780 if (REGNO (XEXP (SET_SRC (pat
), 0)) != SP_REGNO
)
5782 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) != CONST_INT
)
5784 return INTVAL (XEXP (SET_SRC (pat
), 1));
5787 /* Check for trivial functions that set up an unneeded stack
5790 mep_reorg_noframe (rtx insns
)
5792 rtx start_frame_insn
;
5793 rtx end_frame_insn
= 0;
5797 /* The first insn should be $sp = $sp + N */
5798 while (insns
&& ! INSN_P (insns
))
5799 insns
= NEXT_INSN (insns
);
5803 sp_adjust
= add_sp_insn_p (insns
);
5807 start_frame_insn
= insns
;
5808 sp
= SET_DEST (PATTERN (start_frame_insn
));
5810 insns
= next_real_insn (insns
);
5814 rtx next
= next_real_insn (insns
);
5818 sp2
= add_sp_insn_p (insns
);
5823 end_frame_insn
= insns
;
5824 if (sp2
!= -sp_adjust
)
5827 else if (mep_mentioned_p (insns
, sp
, 0))
5829 else if (CALL_P (insns
))
5837 delete_insn (start_frame_insn
);
5838 delete_insn (end_frame_insn
);
5845 rtx insns
= get_insns ();
5847 /* We require accurate REG_DEAD notes. */
5848 compute_bb_for_insn ();
5849 df_note_add_problem ();
5852 mep_reorg_addcombine (insns
);
5853 #if EXPERIMENTAL_REGMOVE_REORG
5854 /* VLIW packing has been done already, so we can't just delete things. */
5855 if (!mep_vliw_function_p (cfun
->decl
))
5856 mep_reorg_regmove (insns
);
5858 mep_jmp_return_reorg (insns
);
5859 mep_bundle_insns (insns
);
5860 mep_reorg_repeat (insns
);
5863 && !profile_arc_flag
5864 && TARGET_OPT_REPEAT
5865 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO
)))
5866 mep_reorg_erepeat (insns
);
5868 /* This may delete *insns so make sure it's last. */
5869 mep_reorg_noframe (insns
);
5871 df_finish_pass (false);
5876 /*----------------------------------------------------------------------*/
5878 /*----------------------------------------------------------------------*/
5880 /* Element X gives the index into cgen_insns[] of the most general
5881 implementation of intrinsic X. Unimplemented intrinsics are
5883 int mep_intrinsic_insn
[ARRAY_SIZE (cgen_intrinsics
)];
5885 /* Element X gives the index of another instruction that is mapped to
5886 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5889 Things are set up so that mep_intrinsic_chain[X] < X. */
5890 static int mep_intrinsic_chain
[ARRAY_SIZE (cgen_insns
)];
5892 /* The bitmask for the current ISA. The ISA masks are declared
5894 unsigned int mep_selected_isa
;
5897 const char *config_name
;
5901 static struct mep_config mep_configs
[] = {
5902 #ifdef COPROC_SELECTION_TABLE
5903 COPROC_SELECTION_TABLE
,
5908 /* Initialize the global intrinsics variables above. */
5911 mep_init_intrinsics (void)
5915 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5916 mep_selected_isa
= mep_configs
[0].isa
;
5917 if (mep_config_string
!= 0)
5918 for (i
= 0; mep_configs
[i
].config_name
; i
++)
5919 if (strcmp (mep_config_string
, mep_configs
[i
].config_name
) == 0)
5921 mep_selected_isa
= mep_configs
[i
].isa
;
5925 /* Assume all intrinsics are unavailable. */
5926 for (i
= 0; i
< ARRAY_SIZE (mep_intrinsic_insn
); i
++)
5927 mep_intrinsic_insn
[i
] = -1;
5929 /* Build up the global intrinsic tables. */
5930 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
5931 if ((cgen_insns
[i
].isas
& mep_selected_isa
) != 0)
5933 mep_intrinsic_chain
[i
] = mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
];
5934 mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] = i
;
5936 /* See whether we can directly move values between one coprocessor
5937 register and another. */
5938 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
5939 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns
[i
]))
5940 mep_have_copro_copro_moves_p
= true;
5942 /* See whether we can directly move values between core and
5943 coprocessor registers. */
5944 mep_have_core_copro_moves_p
= (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1
)
5945 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2
));
5947 mep_have_core_copro_moves_p
= 1;
5950 /* Declare all available intrinsic functions. Called once only. */
5952 static tree cp_data_bus_int_type_node
;
5953 static tree opaque_vector_type_node
;
5954 static tree v8qi_type_node
;
5955 static tree v4hi_type_node
;
5956 static tree v2si_type_node
;
5957 static tree v8uqi_type_node
;
5958 static tree v4uhi_type_node
;
5959 static tree v2usi_type_node
;
5962 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr
)
5966 case cgen_regnum_operand_type_POINTER
: return ptr_type_node
;
5967 case cgen_regnum_operand_type_LONG
: return long_integer_type_node
;
5968 case cgen_regnum_operand_type_ULONG
: return long_unsigned_type_node
;
5969 case cgen_regnum_operand_type_SHORT
: return short_integer_type_node
;
5970 case cgen_regnum_operand_type_USHORT
: return short_unsigned_type_node
;
5971 case cgen_regnum_operand_type_CHAR
: return char_type_node
;
5972 case cgen_regnum_operand_type_UCHAR
: return unsigned_char_type_node
;
5973 case cgen_regnum_operand_type_SI
: return intSI_type_node
;
5974 case cgen_regnum_operand_type_DI
: return intDI_type_node
;
5975 case cgen_regnum_operand_type_VECTOR
: return opaque_vector_type_node
;
5976 case cgen_regnum_operand_type_V8QI
: return v8qi_type_node
;
5977 case cgen_regnum_operand_type_V4HI
: return v4hi_type_node
;
5978 case cgen_regnum_operand_type_V2SI
: return v2si_type_node
;
5979 case cgen_regnum_operand_type_V8UQI
: return v8uqi_type_node
;
5980 case cgen_regnum_operand_type_V4UHI
: return v4uhi_type_node
;
5981 case cgen_regnum_operand_type_V2USI
: return v2usi_type_node
;
5982 case cgen_regnum_operand_type_CP_DATA_BUS_INT
: return cp_data_bus_int_type_node
;
5984 return void_type_node
;
5989 mep_init_builtins (void)
5993 if (TARGET_64BIT_CR_REGS
)
5994 cp_data_bus_int_type_node
= long_long_integer_type_node
;
5996 cp_data_bus_int_type_node
= long_integer_type_node
;
5998 opaque_vector_type_node
= build_opaque_vector_type (intQI_type_node
, 8);
5999 v8qi_type_node
= build_vector_type (intQI_type_node
, 8);
6000 v4hi_type_node
= build_vector_type (intHI_type_node
, 4);
6001 v2si_type_node
= build_vector_type (intSI_type_node
, 2);
6002 v8uqi_type_node
= build_vector_type (unsigned_intQI_type_node
, 8);
6003 v4uhi_type_node
= build_vector_type (unsigned_intHI_type_node
, 4);
6004 v2usi_type_node
= build_vector_type (unsigned_intSI_type_node
, 2);
6006 (*lang_hooks
.decls
.pushdecl
)
6007 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_data_bus_int"),
6008 cp_data_bus_int_type_node
));
6010 (*lang_hooks
.decls
.pushdecl
)
6011 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_vector"),
6012 opaque_vector_type_node
));
6014 (*lang_hooks
.decls
.pushdecl
)
6015 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_v8qi"),
6017 (*lang_hooks
.decls
.pushdecl
)
6018 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_v4hi"),
6020 (*lang_hooks
.decls
.pushdecl
)
6021 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_v2si"),
6024 (*lang_hooks
.decls
.pushdecl
)
6025 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_v8uqi"),
6027 (*lang_hooks
.decls
.pushdecl
)
6028 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_v4uhi"),
6030 (*lang_hooks
.decls
.pushdecl
)
6031 (build_decl (BUILTINS_LOCATION
, TYPE_DECL
, get_identifier ("cp_v2usi"),
6034 /* Intrinsics like mep_cadd3 are implemented with two groups of
6035 instructions, one which uses UNSPECs and one which uses a specific
6036 rtl code such as PLUS. Instructions in the latter group belong
6037 to GROUP_KNOWN_CODE.
6039 In such cases, the intrinsic will have two entries in the global
6040 tables above. The unspec form is accessed using builtin functions
6041 while the specific form is accessed using the mep_* enum in
6044 The idea is that __cop arithmetic and builtin functions have
6045 different optimization requirements. If mep_cadd3() appears in
6046 the source code, the user will surely except gcc to use cadd3
6047 rather than a work-alike such as add3. However, if the user
6048 just writes "a + b", where a or b are __cop variables, it is
6049 reasonable for gcc to choose a core instruction rather than
6050 cadd3 if it believes that is more optimal. */
6051 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
6052 if ((cgen_insns
[i
].groups
& GROUP_KNOWN_CODE
) == 0
6053 && mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] >= 0)
6055 tree ret_type
= void_type_node
;
6058 if (i
> 0 && cgen_insns
[i
].intrinsic
== cgen_insns
[i
-1].intrinsic
)
6061 if (cgen_insns
[i
].cret_p
)
6062 ret_type
= mep_cgen_regnum_to_type (cgen_insns
[i
].regnums
[0].type
);
6064 bi_type
= build_function_type (ret_type
, 0);
6065 add_builtin_function (cgen_intrinsics
[cgen_insns
[i
].intrinsic
],
6067 cgen_insns
[i
].intrinsic
, BUILT_IN_MD
, NULL
, NULL
);
6071 /* Report the unavailablity of the given intrinsic. */
6075 mep_intrinsic_unavailable (int intrinsic
)
6077 static int already_reported_p
[ARRAY_SIZE (cgen_intrinsics
)];
6079 if (already_reported_p
[intrinsic
])
6082 if (mep_intrinsic_insn
[intrinsic
] < 0)
6083 error ("coprocessor intrinsic %qs is not available in this configuration",
6084 cgen_intrinsics
[intrinsic
]);
6085 else if (CGEN_CURRENT_GROUP
== GROUP_VLIW
)
6086 error ("%qs is not available in VLIW functions",
6087 cgen_intrinsics
[intrinsic
]);
6089 error ("%qs is not available in non-VLIW functions",
6090 cgen_intrinsics
[intrinsic
]);
6092 already_reported_p
[intrinsic
] = 1;
6097 /* See if any implementation of INTRINSIC is available to the
6098 current function. If so, store the most general implementation
6099 in *INSN_PTR and return true. Return false otherwise. */
6102 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED
, const struct cgen_insn
**insn_ptr ATTRIBUTE_UNUSED
)
6106 i
= mep_intrinsic_insn
[intrinsic
];
6107 while (i
>= 0 && !CGEN_ENABLE_INSN_P (i
))
6108 i
= mep_intrinsic_chain
[i
];
6112 *insn_ptr
= &cgen_insns
[i
];
6119 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6120 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6121 try using a work-alike instead. In this case, the returned insn
6122 may have three operands rather than two. */
6125 mep_get_move_insn (int intrinsic
, const struct cgen_insn
**cgen_insn
)
6129 if (intrinsic
== mep_cmov
)
6131 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
6132 if (mep_get_intrinsic_insn (mep_cmov_insns
[i
], cgen_insn
))
6136 return mep_get_intrinsic_insn (intrinsic
, cgen_insn
);
6140 /* If ARG is a register operand that is the same size as MODE, convert it
6141 to MODE using a subreg. Otherwise return ARG as-is. */
6144 mep_convert_arg (enum machine_mode mode
, rtx arg
)
6146 if (GET_MODE (arg
) != mode
6147 && register_operand (arg
, VOIDmode
)
6148 && GET_MODE_SIZE (GET_MODE (arg
)) == GET_MODE_SIZE (mode
))
6149 return simplify_gen_subreg (mode
, arg
, GET_MODE (arg
), 0);
6154 /* Apply regnum conversions to ARG using the description given by REGNUM.
6155 Return the new argument on success and null on failure. */
6158 mep_convert_regnum (const struct cgen_regnum_operand
*regnum
, rtx arg
)
6160 if (regnum
->count
== 0)
6163 if (GET_CODE (arg
) != CONST_INT
6165 || INTVAL (arg
) >= regnum
->count
)
6168 return gen_rtx_REG (SImode
, INTVAL (arg
) + regnum
->base
);
6172 /* Try to make intrinsic argument ARG match the given operand.
6173 UNSIGNED_P is true if the argument has an unsigned type. */
6176 mep_legitimize_arg (const struct insn_operand_data
*operand
, rtx arg
,
6179 if (GET_CODE (arg
) == CONST_INT
)
6181 /* CONST_INTs can only be bound to integer operands. */
6182 if (GET_MODE_CLASS (operand
->mode
) != MODE_INT
)
6185 else if (GET_CODE (arg
) == CONST_DOUBLE
)
6186 /* These hold vector constants. */;
6187 else if (GET_MODE_SIZE (GET_MODE (arg
)) != GET_MODE_SIZE (operand
->mode
))
6189 /* If the argument is a different size from what's expected, we must
6190 have a value in the right mode class in order to convert it. */
6191 if (GET_MODE_CLASS (operand
->mode
) != GET_MODE_CLASS (GET_MODE (arg
)))
6194 /* If the operand is an rvalue, promote or demote it to match the
6195 operand's size. This might not need extra instructions when
6196 ARG is a register value. */
6197 if (operand
->constraint
[0] != '=')
6198 arg
= convert_to_mode (operand
->mode
, arg
, unsigned_p
);
6201 /* If the operand is an lvalue, bind the operand to a new register.
6202 The caller will copy this value into ARG after the main
6203 instruction. By doing this always, we produce slightly more
6205 /* But not for control registers. */
6206 if (operand
->constraint
[0] == '='
6208 || ! (CONTROL_REGNO_P (REGNO (arg
))
6209 || CCR_REGNO_P (REGNO (arg
))
6210 || CR_REGNO_P (REGNO (arg
)))
6212 return gen_reg_rtx (operand
->mode
);
6214 /* Try simple mode punning. */
6215 arg
= mep_convert_arg (operand
->mode
, arg
);
6216 if (operand
->predicate (arg
, operand
->mode
))
6219 /* See if forcing the argument into a register will make it match. */
6220 if (GET_CODE (arg
) == CONST_INT
|| GET_CODE (arg
) == CONST_DOUBLE
)
6221 arg
= force_reg (operand
->mode
, arg
);
6223 arg
= mep_convert_arg (operand
->mode
, force_reg (GET_MODE (arg
), arg
));
6224 if (operand
->predicate (arg
, operand
->mode
))
6231 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6232 function FNNAME. OPERAND describes the operand to which ARGNUM
6236 mep_incompatible_arg (const struct insn_operand_data
*operand
, rtx arg
,
6237 int argnum
, tree fnname
)
6241 if (GET_CODE (arg
) == CONST_INT
)
6242 for (i
= 0; i
< ARRAY_SIZE (cgen_immediate_predicates
); i
++)
6243 if (operand
->predicate
== cgen_immediate_predicates
[i
].predicate
)
6245 const struct cgen_immediate_predicate
*predicate
;
6246 HOST_WIDE_INT argval
;
6248 predicate
= &cgen_immediate_predicates
[i
];
6249 argval
= INTVAL (arg
);
6250 if (argval
< predicate
->lower
|| argval
>= predicate
->upper
)
6251 error ("argument %d of %qE must be in the range %d...%d",
6252 argnum
, fnname
, predicate
->lower
, predicate
->upper
- 1);
6254 error ("argument %d of %qE must be a multiple of %d",
6255 argnum
, fnname
, predicate
->align
);
6259 error ("incompatible type for argument %d of %qE", argnum
, fnname
);
6263 mep_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6264 rtx subtarget ATTRIBUTE_UNUSED
,
6265 enum machine_mode mode ATTRIBUTE_UNUSED
,
6266 int ignore ATTRIBUTE_UNUSED
)
6268 rtx pat
, op
[10], arg
[10];
6270 int opindex
, unsigned_p
[10];
6272 unsigned int n_args
;
6274 const struct cgen_insn
*cgen_insn
;
6275 const struct insn_data_d
*idata
;
6276 unsigned int first_arg
= 0;
6277 unsigned int builtin_n_args
;
6279 fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
6280 fnname
= DECL_NAME (fndecl
);
6282 /* Find out which instruction we should emit. Note that some coprocessor
6283 intrinsics may only be available in VLIW mode, or only in normal mode. */
6284 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl
), &cgen_insn
))
6286 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl
));
6289 idata
= &insn_data
[cgen_insn
->icode
];
6291 builtin_n_args
= cgen_insn
->num_args
;
6293 if (cgen_insn
->cret_p
)
6295 if (cgen_insn
->cret_p
> 1)
6298 mep_cgen_regnum_to_type (cgen_insn
->regnums
[0].type
);
6302 /* Evaluate each argument. */
6303 n_args
= call_expr_nargs (exp
);
6305 if (n_args
< builtin_n_args
)
6307 error ("too few arguments to %qE", fnname
);
6310 if (n_args
> builtin_n_args
)
6312 error ("too many arguments to %qE", fnname
);
6316 for (a
= first_arg
; a
< builtin_n_args
+ first_arg
; a
++)
6320 args
= CALL_EXPR_ARG (exp
, a
- first_arg
);
6325 if (cgen_insn
->regnums
[a
].reference_p
)
6327 if (TREE_CODE (value
) != ADDR_EXPR
)
6330 error ("argument %d of %qE must be an address", a
+1, fnname
);
6333 value
= TREE_OPERAND (value
, 0);
6337 /* If the argument has been promoted to int, get the unpromoted
6338 value. This is necessary when sub-int memory values are bound
6339 to reference parameters. */
6340 if (TREE_CODE (value
) == NOP_EXPR
6341 && TREE_TYPE (value
) == integer_type_node
6342 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6343 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value
, 0)))
6344 < TYPE_PRECISION (TREE_TYPE (value
))))
6345 value
= TREE_OPERAND (value
, 0);
6347 /* If the argument has been promoted to double, get the unpromoted
6348 SFmode value. This is necessary for FMAX support, for example. */
6349 if (TREE_CODE (value
) == NOP_EXPR
6350 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value
))
6351 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6352 && TYPE_MODE (TREE_TYPE (value
)) == DFmode
6353 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value
, 0))) == SFmode
)
6354 value
= TREE_OPERAND (value
, 0);
6356 unsigned_p
[a
] = TYPE_UNSIGNED (TREE_TYPE (value
));
6357 arg
[a
] = expand_expr (value
, NULL
, VOIDmode
, EXPAND_NORMAL
);
6358 arg
[a
] = mep_convert_regnum (&cgen_insn
->regnums
[a
], arg
[a
]);
6359 if (cgen_insn
->regnums
[a
].reference_p
)
6361 tree pointed_to
= TREE_TYPE (TREE_TYPE (value
));
6362 enum machine_mode pointed_mode
= TYPE_MODE (pointed_to
);
6364 arg
[a
] = gen_rtx_MEM (pointed_mode
, arg
[a
]);
6368 error ("argument %d of %qE must be in the range %d...%d",
6369 a
+ 1, fnname
, 0, cgen_insn
->regnums
[a
].count
- 1);
6374 for (a
= 0; a
< first_arg
; a
++)
6376 if (a
== 0 && target
&& GET_MODE (target
) == idata
->operand
[0].mode
)
6379 arg
[a
] = gen_reg_rtx (idata
->operand
[0].mode
);
6382 /* Convert the arguments into a form suitable for the intrinsic.
6383 Report an error if this isn't possible. */
6384 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6386 a
= cgen_insn
->op_mapping
[opindex
];
6387 op
[opindex
] = mep_legitimize_arg (&idata
->operand
[opindex
],
6388 arg
[a
], unsigned_p
[a
]);
6389 if (op
[opindex
] == 0)
6391 mep_incompatible_arg (&idata
->operand
[opindex
],
6392 arg
[a
], a
+ 1 - first_arg
, fnname
);
6397 /* Emit the instruction. */
6398 pat
= idata
->genfun (op
[0], op
[1], op
[2], op
[3], op
[4],
6399 op
[5], op
[6], op
[7], op
[8], op
[9]);
6401 if (GET_CODE (pat
) == SET
6402 && GET_CODE (SET_DEST (pat
)) == PC
6403 && GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
6404 emit_jump_insn (pat
);
6408 /* Copy lvalues back to their final locations. */
6409 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6410 if (idata
->operand
[opindex
].constraint
[0] == '=')
6412 a
= cgen_insn
->op_mapping
[opindex
];
6415 if (GET_MODE_CLASS (GET_MODE (arg
[a
]))
6416 != GET_MODE_CLASS (GET_MODE (op
[opindex
])))
6417 emit_move_insn (arg
[a
], gen_lowpart (GET_MODE (arg
[a
]),
6421 /* First convert the operand to the right mode, then copy it
6422 into the destination. Doing the conversion as a separate
6423 step (rather than using convert_move) means that we can
6424 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6425 refer to the same register. */
6426 op
[opindex
] = convert_to_mode (GET_MODE (arg
[a
]),
6427 op
[opindex
], unsigned_p
[a
]);
6428 if (!rtx_equal_p (arg
[a
], op
[opindex
]))
6429 emit_move_insn (arg
[a
], op
[opindex
]);
6434 if (first_arg
> 0 && target
&& target
!= op
[0])
6436 emit_move_insn (target
, op
[0]);
6443 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED
)
6448 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6449 a global register. */
6452 global_reg_mentioned_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
6460 switch (GET_CODE (x
))
6463 if (REG_P (SUBREG_REG (x
)))
6465 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
6466 && global_regs
[subreg_regno (x
)])
6474 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
6488 /* A non-constant call might use a global register. */
6498 /* Returns nonzero if X mentions a global register. */
6501 global_reg_mentioned_p (rtx x
)
6507 if (! RTL_CONST_OR_PURE_CALL_P (x
))
6509 x
= CALL_INSN_FUNCTION_USAGE (x
);
6517 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
6519 /* Scheduling hooks for VLIW mode.
6521 Conceptually this is very simple: we have a two-pack architecture
6522 that takes one core insn and one coprocessor insn to make up either
6523 a 32- or 64-bit instruction word (depending on the option bit set in
6524 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6525 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6526 and one 48-bit cop insn or two 32-bit core/cop insns.
6528 In practice, instruction selection will be a bear. Consider in
6529 VL64 mode the following insns
6534 these cannot pack, since the add is a 16-bit core insn and cmov
6535 is a 32-bit cop insn. However,
6540 packs just fine. For good VLIW code generation in VL64 mode, we
6541 will have to have 32-bit alternatives for many of the common core
6542 insns. Not implemented. */
6545 mep_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
6549 if (REG_NOTE_KIND (link
) != 0)
6551 /* See whether INSN and DEP_INSN are intrinsics that set the same
6552 hard register. If so, it is more important to free up DEP_INSN
6553 than it is to free up INSN.
6555 Note that intrinsics like mep_mulr are handled differently from
6556 the equivalent mep.md patterns. In mep.md, if we don't care
6557 about the value of $lo and $hi, the pattern will just clobber
6558 the registers, not set them. Since clobbers don't count as
6559 output dependencies, it is often possible to reorder two mulrs,
6562 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6563 so any pair of mep_mulr()s will be inter-dependent. We should
6564 therefore give the first mep_mulr() a higher priority. */
6565 if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
6566 && global_reg_mentioned_p (PATTERN (insn
))
6567 && global_reg_mentioned_p (PATTERN (dep_insn
)))
6570 /* If the dependence is an anti or output dependence, assume it
6575 /* If we can't recognize the insns, we can't really do anything. */
6576 if (recog_memoized (dep_insn
) < 0)
6579 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6580 attribute instead. */
6583 cost_specified
= get_attr_latency (dep_insn
);
6584 if (cost_specified
!= 0)
6585 return cost_specified
;
6591 /* ??? We don't properly compute the length of a load/store insn,
6592 taking into account the addressing mode. */
6595 mep_issue_rate (void)
6597 return TARGET_IVC2
? 3 : 2;
6600 /* Return true if function DECL was declared with the vliw attribute. */
6603 mep_vliw_function_p (tree decl
)
6605 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))) != 0;
6609 mep_find_ready_insn (rtx
*ready
, int nready
, enum attr_slot slot
, int length
)
6613 for (i
= nready
- 1; i
>= 0; --i
)
6615 rtx insn
= ready
[i
];
6616 if (recog_memoized (insn
) >= 0
6617 && get_attr_slot (insn
) == slot
6618 && get_attr_length (insn
) == length
)
6626 mep_move_ready_insn (rtx
*ready
, int nready
, rtx insn
)
6630 for (i
= 0; i
< nready
; ++i
)
6631 if (ready
[i
] == insn
)
6633 for (; i
< nready
- 1; ++i
)
6634 ready
[i
] = ready
[i
+ 1];
6643 mep_print_sched_insn (FILE *dump
, rtx insn
)
6645 const char *slots
= "none";
6646 const char *name
= NULL
;
6650 if (GET_CODE (PATTERN (insn
)) == SET
6651 || GET_CODE (PATTERN (insn
)) == PARALLEL
)
6653 switch (get_attr_slots (insn
))
6655 case SLOTS_CORE
: slots
= "core"; break;
6656 case SLOTS_C3
: slots
= "c3"; break;
6657 case SLOTS_P0
: slots
= "p0"; break;
6658 case SLOTS_P0_P0S
: slots
= "p0,p0s"; break;
6659 case SLOTS_P0_P1
: slots
= "p0,p1"; break;
6660 case SLOTS_P0S
: slots
= "p0s"; break;
6661 case SLOTS_P0S_P1
: slots
= "p0s,p1"; break;
6662 case SLOTS_P1
: slots
= "p1"; break;
6664 sprintf(buf
, "%d", get_attr_slots (insn
));
6669 if (GET_CODE (PATTERN (insn
)) == USE
)
6672 code
= INSN_CODE (insn
);
6674 name
= get_insn_name (code
);
6679 "insn %4d %4d %8s %s\n",
6687 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED
,
6688 int sched_verbose ATTRIBUTE_UNUSED
, rtx
*ready
,
6689 int *pnready
, int clock ATTRIBUTE_UNUSED
)
6691 int nready
= *pnready
;
6692 rtx core_insn
, cop_insn
;
6695 if (dump
&& sched_verbose
> 1)
6697 fprintf (dump
, "\nsched_reorder: clock %d nready %d\n", clock
, nready
);
6698 for (i
=0; i
<nready
; i
++)
6699 mep_print_sched_insn (dump
, ready
[i
]);
6700 fprintf (dump
, "\n");
6703 if (!mep_vliw_function_p (cfun
->decl
))
6708 /* IVC2 uses a DFA to determine what's ready and what's not. */
6712 /* We can issue either a core or coprocessor instruction.
6713 Look for a matched pair of insns to reorder. If we don't
6714 find any, don't second-guess the scheduler's priorities. */
6716 if ((core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 2))
6717 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
,
6718 TARGET_OPT_VL64
? 6 : 2)))
6720 else if (TARGET_OPT_VL64
6721 && (core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 4))
6722 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
, 4)))
6725 /* We didn't find a pair. Issue the single insn at the head
6726 of the ready list. */
6729 /* Reorder the two insns first. */
6730 mep_move_ready_insn (ready
, nready
, core_insn
);
6731 mep_move_ready_insn (ready
, nready
- 1, cop_insn
);
6735 /* A for_each_rtx callback. Return true if *X is a register that is
6736 set by insn PREV. */
6739 mep_store_find_set (rtx
*x
, void *prev
)
6741 return REG_P (*x
) && reg_set_p (*x
, (const_rtx
) prev
);
6744 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6745 not the containing insn. */
6748 mep_store_data_bypass_1 (rtx prev
, rtx pat
)
6750 /* Cope with intrinsics like swcpa. */
6751 if (GET_CODE (pat
) == PARALLEL
)
6755 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
6756 if (mep_store_data_bypass_p (prev
, XVECEXP (pat
, 0, i
)))
6762 /* Check for some sort of store. */
6763 if (GET_CODE (pat
) != SET
6764 || GET_CODE (SET_DEST (pat
)) != MEM
)
6767 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6768 The first operand to the unspec is the store data and the other operands
6769 are used to calculate the address. */
6770 if (GET_CODE (SET_SRC (pat
)) == UNSPEC
)
6775 src
= SET_SRC (pat
);
6776 for (i
= 1; i
< XVECLEN (src
, 0); i
++)
6777 if (for_each_rtx (&XVECEXP (src
, 0, i
), mep_store_find_set
, prev
))
6783 /* Otherwise just check that PREV doesn't modify any register mentioned
6784 in the memory destination. */
6785 return !for_each_rtx (&SET_DEST (pat
), mep_store_find_set
, prev
);
6788 /* Return true if INSN is a store instruction and if the store address
6789 has no true dependence on PREV. */
6792 mep_store_data_bypass_p (rtx prev
, rtx insn
)
6794 return INSN_P (insn
) ? mep_store_data_bypass_1 (prev
, PATTERN (insn
)) : false;
6797 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6798 is a register other than LO or HI and if PREV sets *X. */
6801 mep_mul_hilo_bypass_1 (rtx
*x
, void *prev
)
6804 && REGNO (*x
) != LO_REGNO
6805 && REGNO (*x
) != HI_REGNO
6806 && reg_set_p (*x
, (const_rtx
) prev
));
6809 /* Return true if, apart from HI/LO, there are no true dependencies
6810 between multiplication instructions PREV and INSN. */
6813 mep_mul_hilo_bypass_p (rtx prev
, rtx insn
)
6817 pat
= PATTERN (insn
);
6818 if (GET_CODE (pat
) == PARALLEL
)
6819 pat
= XVECEXP (pat
, 0, 0);
6820 return (GET_CODE (pat
) == SET
6821 && !for_each_rtx (&SET_SRC (pat
), mep_mul_hilo_bypass_1
, prev
));
6824 /* Return true if INSN is an ldc instruction that issues to the
6825 MeP-h1 integer pipeline. This is true for instructions that
6826 read from PSW, LP, SAR, HI and LO. */
6829 mep_ipipe_ldc_p (rtx insn
)
6833 pat
= PATTERN (insn
);
6835 /* Cope with instrinsics that set both a hard register and its shadow.
6836 The set of the hard register comes first. */
6837 if (GET_CODE (pat
) == PARALLEL
)
6838 pat
= XVECEXP (pat
, 0, 0);
6840 if (GET_CODE (pat
) == SET
)
6842 src
= SET_SRC (pat
);
6844 /* Cope with intrinsics. The first operand to the unspec is
6845 the source register. */
6846 if (GET_CODE (src
) == UNSPEC
|| GET_CODE (src
) == UNSPEC_VOLATILE
)
6847 src
= XVECEXP (src
, 0, 0);
6850 switch (REGNO (src
))
6863 /* Create a VLIW bundle from core instruction CORE and coprocessor
6864 instruction COP. COP always satisfies INSN_P, but CORE can be
6865 either a new pattern or an existing instruction.
6867 Emit the bundle in place of COP and return it. */
6870 mep_make_bundle (rtx core
, rtx cop
)
6874 /* If CORE is an existing instruction, remove it, otherwise put
6875 the new pattern in an INSN harness. */
6879 core
= make_insn_raw (core
);
6881 /* Generate the bundle sequence and replace COP with it. */
6882 insn
= gen_rtx_SEQUENCE (VOIDmode
, gen_rtvec (2, core
, cop
));
6883 insn
= emit_insn_after (insn
, cop
);
6886 /* Set up the links of the insns inside the SEQUENCE. */
6887 PREV_INSN (core
) = PREV_INSN (insn
);
6888 NEXT_INSN (core
) = cop
;
6889 PREV_INSN (cop
) = core
;
6890 NEXT_INSN (cop
) = NEXT_INSN (insn
);
6892 /* Set the VLIW flag for the coprocessor instruction. */
6893 PUT_MODE (core
, VOIDmode
);
6894 PUT_MODE (cop
, BImode
);
6896 /* Derive a location for the bundle. Individual instructions cannot
6897 have their own location because there can be no assembler labels
6898 between CORE and COP. */
6899 INSN_LOCATOR (insn
) = INSN_LOCATOR (INSN_LOCATOR (core
) ? core
: cop
);
6900 INSN_LOCATOR (core
) = 0;
6901 INSN_LOCATOR (cop
) = 0;
6906 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6909 mep_insn_dependent_p_1 (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
6911 rtx
* pinsn
= (rtx
*) data
;
6913 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
6917 /* Return true if anything in insn X is (anti,output,true) dependent on
6918 anything in insn Y. */
6921 mep_insn_dependent_p (rtx x
, rtx y
)
6925 gcc_assert (INSN_P (x
));
6926 gcc_assert (INSN_P (y
));
6929 note_stores (PATTERN (x
), mep_insn_dependent_p_1
, &tmp
);
6930 if (tmp
== NULL_RTX
)
6934 note_stores (PATTERN (y
), mep_insn_dependent_p_1
, &tmp
);
6935 if (tmp
== NULL_RTX
)
6942 core_insn_p (rtx insn
)
6944 if (GET_CODE (PATTERN (insn
)) == USE
)
6946 if (get_attr_slot (insn
) == SLOT_CORE
)
6951 /* Mark coprocessor instructions that can be bundled together with
6952 the immediately preceeding core instruction. This is later used
6953 to emit the "+" that tells the assembler to create a VLIW insn.
6955 For unbundled insns, the assembler will automatically add coprocessor
6956 nops, and 16-bit core nops. Due to an apparent oversight in the
6957 spec, the assembler will _not_ automatically add 32-bit core nops,
6958 so we have to emit those here.
6960 Called from mep_insn_reorg. */
6963 mep_bundle_insns (rtx insns
)
6965 rtx insn
, last
= NULL_RTX
, first
= NULL_RTX
;
6966 int saw_scheduling
= 0;
6968 /* Only do bundling if we're in vliw mode. */
6969 if (!mep_vliw_function_p (cfun
->decl
))
6972 /* The first insn in a bundle are TImode, the remainder are
6973 VOIDmode. After this function, the first has VOIDmode and the
6974 rest have BImode. */
6976 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6978 /* First, move any NOTEs that are within a bundle, to the beginning
6980 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6982 if (NOTE_P (insn
) && first
)
6983 /* Don't clear FIRST. */;
6985 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == TImode
)
6988 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == VOIDmode
&& first
)
6992 /* INSN is part of a bundle; FIRST is the first insn in that
6993 bundle. Move all intervening notes out of the bundle.
6994 In addition, since the debug pass may insert a label
6995 whenever the current line changes, set the location info
6996 for INSN to match FIRST. */
6998 INSN_LOCATOR (insn
) = INSN_LOCATOR (first
);
7000 note
= PREV_INSN (insn
);
7001 while (note
&& note
!= first
)
7003 prev
= PREV_INSN (note
);
7007 /* Remove NOTE from here... */
7008 PREV_INSN (NEXT_INSN (note
)) = PREV_INSN (note
);
7009 NEXT_INSN (PREV_INSN (note
)) = NEXT_INSN (note
);
7010 /* ...and put it in here. */
7011 NEXT_INSN (note
) = first
;
7012 PREV_INSN (note
) = PREV_INSN (first
);
7013 NEXT_INSN (PREV_INSN (note
)) = note
;
7014 PREV_INSN (NEXT_INSN (note
)) = note
;
7021 else if (!NONJUMP_INSN_P (insn
))
7025 /* Now fix up the bundles. */
7026 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
7031 if (!NONJUMP_INSN_P (insn
))
7037 /* If we're not optimizing enough, there won't be scheduling
7038 info. We detect that here. */
7039 if (GET_MODE (insn
) == TImode
)
7041 if (!saw_scheduling
)
7046 rtx core_insn
= NULL_RTX
;
7048 /* IVC2 slots are scheduled by DFA, so we just accept
7049 whatever the scheduler gives us. However, we must make
7050 sure the core insn (if any) is the first in the bundle.
7051 The IVC2 assembler can insert whatever NOPs are needed,
7052 and allows a COP insn to be first. */
7054 if (NONJUMP_INSN_P (insn
)
7055 && GET_CODE (PATTERN (insn
)) != USE
7056 && GET_MODE (insn
) == TImode
)
7060 && GET_MODE (NEXT_INSN (last
)) == VOIDmode
7061 && NONJUMP_INSN_P (NEXT_INSN (last
));
7062 last
= NEXT_INSN (last
))
7064 if (core_insn_p (last
))
7067 if (core_insn_p (last
))
7070 if (core_insn
&& core_insn
!= insn
)
7072 /* Swap core insn to first in the bundle. */
7074 /* Remove core insn. */
7075 if (PREV_INSN (core_insn
))
7076 NEXT_INSN (PREV_INSN (core_insn
)) = NEXT_INSN (core_insn
);
7077 if (NEXT_INSN (core_insn
))
7078 PREV_INSN (NEXT_INSN (core_insn
)) = PREV_INSN (core_insn
);
7080 /* Re-insert core insn. */
7081 PREV_INSN (core_insn
) = PREV_INSN (insn
);
7082 NEXT_INSN (core_insn
) = insn
;
7084 if (PREV_INSN (core_insn
))
7085 NEXT_INSN (PREV_INSN (core_insn
)) = core_insn
;
7086 PREV_INSN (insn
) = core_insn
;
7088 PUT_MODE (core_insn
, TImode
);
7089 PUT_MODE (insn
, VOIDmode
);
7093 /* The first insn has TImode, the rest have VOIDmode */
7094 if (GET_MODE (insn
) == TImode
)
7095 PUT_MODE (insn
, VOIDmode
);
7097 PUT_MODE (insn
, BImode
);
7101 PUT_MODE (insn
, VOIDmode
);
7102 if (recog_memoized (insn
) >= 0
7103 && get_attr_slot (insn
) == SLOT_COP
)
7105 if (GET_CODE (insn
) == JUMP_INSN
7107 || recog_memoized (last
) < 0
7108 || get_attr_slot (last
) != SLOT_CORE
7109 || (get_attr_length (insn
)
7110 != (TARGET_OPT_VL64
? 8 : 4) - get_attr_length (last
))
7111 || mep_insn_dependent_p (insn
, last
))
7113 switch (get_attr_length (insn
))
7118 insn
= mep_make_bundle (gen_nop (), insn
);
7121 if (TARGET_OPT_VL64
)
7122 insn
= mep_make_bundle (gen_nop32 (), insn
);
7125 if (TARGET_OPT_VL64
)
7126 error ("2 byte cop instructions are"
7127 " not allowed in 64-bit VLIW mode");
7129 insn
= mep_make_bundle (gen_nop (), insn
);
7132 error ("unexpected %d byte cop instruction",
7133 get_attr_length (insn
));
7138 insn
= mep_make_bundle (last
, insn
);
7146 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7147 Return true on success. This function can fail if the intrinsic
7148 is unavailable or if the operands don't satisfy their predicates. */
7151 mep_emit_intrinsic (int intrinsic
, const rtx
*operands
)
7153 const struct cgen_insn
*cgen_insn
;
7154 const struct insn_data_d
*idata
;
7158 if (!mep_get_intrinsic_insn (intrinsic
, &cgen_insn
))
7161 idata
= &insn_data
[cgen_insn
->icode
];
7162 for (i
= 0; i
< idata
->n_operands
; i
++)
7164 newop
[i
] = mep_convert_arg (idata
->operand
[i
].mode
, operands
[i
]);
7165 if (!idata
->operand
[i
].predicate (newop
[i
], idata
->operand
[i
].mode
))
7169 emit_insn (idata
->genfun (newop
[0], newop
[1], newop
[2],
7170 newop
[3], newop
[4], newop
[5],
7171 newop
[6], newop
[7], newop
[8]));
7177 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7178 OPERANDS[0]. Report an error if the instruction could not
7179 be synthesized. OPERANDS[1] is a register_operand. For sign
7180 and zero extensions, it may be smaller than SImode. */
7183 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic
,
7184 rtx
* operands ATTRIBUTE_UNUSED
)
7190 /* Likewise, but apply a binary operation to OPERANDS[1] and
7191 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7192 can be a general_operand.
7194 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7195 third operand. REG and REG3 take register operands only. */
7198 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate
,
7199 int ATTRIBUTE_UNUSED immediate3
,
7200 int ATTRIBUTE_UNUSED reg
,
7201 int ATTRIBUTE_UNUSED reg3
,
7202 rtx
* operands ATTRIBUTE_UNUSED
)
7208 mep_rtx_cost (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
, int *total
, bool ATTRIBUTE_UNUSED speed_t
)
7213 if (INTVAL (x
) >= -128 && INTVAL (x
) < 127)
7215 else if (INTVAL (x
) >= -32768 && INTVAL (x
) < 65536)
7222 *total
= optimize_size
? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7226 *total
= (GET_CODE (XEXP (x
, 1)) == CONST_INT
7228 : COSTS_N_INSNS (2));
7235 mep_address_cost (rtx addr ATTRIBUTE_UNUSED
, bool ATTRIBUTE_UNUSED speed_p
)
7241 mep_handle_option (size_t code
,
7242 const char *arg ATTRIBUTE_UNUSED
,
7243 int value ATTRIBUTE_UNUSED
)
7250 target_flags
|= MEP_ALL_OPTS
;
7254 target_flags
&= ~ MEP_ALL_OPTS
;
7258 target_flags
|= MASK_COP
;
7259 target_flags
|= MASK_64BIT_CR_REGS
;
7263 option_mtiny_specified
= 1;
7266 target_flags
|= MASK_COP
;
7267 target_flags
|= MASK_64BIT_CR_REGS
;
7268 target_flags
|= MASK_VLIW
;
7269 target_flags
|= MASK_OPT_VL64
;
7270 target_flags
|= MASK_IVC2
;
7272 for (i
=0; i
<32; i
++)
7273 fixed_regs
[i
+48] = 0;
7274 for (i
=0; i
<32; i
++)
7275 call_used_regs
[i
+48] = 1;
7277 call_used_regs
[i
+48] = 0;
7279 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7316 mep_asm_init_sections (void)
7319 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7320 "\t.section .based,\"aw\"");
7323 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7324 "\t.section .sbss,\"aw\"");
7327 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7328 "\t.section .sdata,\"aw\",@progbits");
7331 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7332 "\t.section .far,\"aw\"");
7335 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7336 "\t.section .farbss,\"aw\"");
7339 = get_unnamed_section (0, output_section_asm_op
,
7340 "\t.section .frodata,\"a\"");
7343 = get_unnamed_section (0, output_section_asm_op
,
7344 "\t.section .srodata,\"a\"");
7347 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7348 "\t.section .vtext,\"axv\"\n\t.vliw");
7351 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7352 "\t.section .vftext,\"axv\"\n\t.vliw");
7355 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7356 "\t.section .ftext,\"ax\"\n\t.core");
7360 /* Initialize the GCC target structure. */
7362 #undef TARGET_ASM_FUNCTION_PROLOGUE
7363 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7364 #undef TARGET_ATTRIBUTE_TABLE
7365 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7366 #undef TARGET_COMP_TYPE_ATTRIBUTES
7367 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7368 #undef TARGET_INSERT_ATTRIBUTES
7369 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7370 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7371 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7372 #undef TARGET_CAN_INLINE_P
7373 #define TARGET_CAN_INLINE_P mep_can_inline_p
7374 #undef TARGET_SECTION_TYPE_FLAGS
7375 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7376 #undef TARGET_ASM_NAMED_SECTION
7377 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7378 #undef TARGET_INIT_BUILTINS
7379 #define TARGET_INIT_BUILTINS mep_init_builtins
7380 #undef TARGET_EXPAND_BUILTIN
7381 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7382 #undef TARGET_SCHED_ADJUST_COST
7383 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7384 #undef TARGET_SCHED_ISSUE_RATE
7385 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7386 #undef TARGET_SCHED_REORDER
7387 #define TARGET_SCHED_REORDER mep_sched_reorder
7388 #undef TARGET_STRIP_NAME_ENCODING
7389 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7390 #undef TARGET_ASM_SELECT_SECTION
7391 #define TARGET_ASM_SELECT_SECTION mep_select_section
7392 #undef TARGET_ASM_UNIQUE_SECTION
7393 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7394 #undef TARGET_ENCODE_SECTION_INFO
7395 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7396 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7397 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7398 #undef TARGET_RTX_COSTS
7399 #define TARGET_RTX_COSTS mep_rtx_cost
7400 #undef TARGET_ADDRESS_COST
7401 #define TARGET_ADDRESS_COST mep_address_cost
7402 #undef TARGET_MACHINE_DEPENDENT_REORG
7403 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7404 #undef TARGET_SETUP_INCOMING_VARARGS
7405 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7406 #undef TARGET_PASS_BY_REFERENCE
7407 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7408 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7409 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7410 #undef TARGET_HANDLE_OPTION
7411 #define TARGET_HANDLE_OPTION mep_handle_option
7412 #undef TARGET_OPTION_OVERRIDE
7413 #define TARGET_OPTION_OVERRIDE mep_option_override
7414 #undef TARGET_DEFAULT_TARGET_FLAGS
7415 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
7416 #undef TARGET_ALLOCATE_INITIAL_VALUE
7417 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7418 #undef TARGET_ASM_INIT_SECTIONS
7419 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7420 #undef TARGET_RETURN_IN_MEMORY
7421 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7422 #undef TARGET_NARROW_VOLATILE_BITFIELD
7423 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7424 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7425 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7426 #undef TARGET_BUILD_BUILTIN_VA_LIST
7427 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7428 #undef TARGET_EXPAND_BUILTIN_VA_START
7429 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7430 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7431 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7432 #undef TARGET_CAN_ELIMINATE
7433 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7434 #undef TARGET_TRAMPOLINE_INIT
7435 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7437 struct gcc_target targetm
= TARGET_INITIALIZER
;