1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "insn-flags.h"
37 #include "insn-attr.h"
50 #include "insn-codes.h"
55 #include "diagnostic-core.h"
57 #include "target-def.h"
58 #include "langhooks.h"
59 #include "dominance.h"
65 #include "cfgcleanup.h"
67 #include "basic-block.h"
69 #include "hash-table.h"
70 #include "tree-ssa-alias.h"
71 #include "internal-fn.h"
72 #include "gimple-fold.h"
74 #include "gimple-expr.h"
83 /* Structure of this file:
85 + Command Line Option Support
86 + Pattern support - constraints, predicates, expanders
89 + Functions to save and restore machine-specific function data.
90 + Frame/Epilog/Prolog Related
92 + Function args in registers
93 + Handle pipeline hazards
96 + Machine-dependent Reorg
101 Symbols are encoded as @ <char> . <name> where <char> is one of these:
109 c - cb (control bus) */
111 struct GTY(()) machine_function
113 int mep_frame_pointer_needed
;
116 int arg_regs_to_save
;
121 /* Records __builtin_return address. */
125 int reg_save_slot
[FIRST_PSEUDO_REGISTER
];
126 unsigned char reg_saved
[FIRST_PSEUDO_REGISTER
];
128 /* 2 if the current function has an interrupt attribute, 1 if not, 0
129 if unknown. This is here because resource.c uses EPILOGUE_USES
131 int interrupt_handler
;
133 /* Likewise, for disinterrupt attribute. */
134 int disable_interrupts
;
136 /* Number of doloop tags used so far. */
139 /* True if the last tag was allocated to a doloop_end. */
140 bool doloop_tag_from_end
;
142 /* True if reload changes $TP. */
143 bool reload_changes_tp
;
145 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
146 We only set this if the function is an interrupt handler. */
147 int asms_without_operands
;
150 #define MEP_CONTROL_REG(x) \
151 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
153 static GTY(()) section
* based_section
;
154 static GTY(()) section
* tinybss_section
;
155 static GTY(()) section
* far_section
;
156 static GTY(()) section
* farbss_section
;
157 static GTY(()) section
* frodata_section
;
158 static GTY(()) section
* srodata_section
;
160 static GTY(()) section
* vtext_section
;
161 static GTY(()) section
* vftext_section
;
162 static GTY(()) section
* ftext_section
;
164 static void mep_set_leaf_registers (int);
165 static bool symbol_p (rtx
);
166 static bool symbolref_p (rtx
);
167 static void encode_pattern_1 (rtx
);
168 static void encode_pattern (rtx
);
169 static bool const_in_range (rtx
, int, int);
170 static void mep_rewrite_mult (rtx_insn
*, rtx
);
171 static void mep_rewrite_mulsi3 (rtx_insn
*, rtx
, rtx
, rtx
);
172 static void mep_rewrite_maddsi3 (rtx_insn
*, rtx
, rtx
, rtx
, rtx
);
173 static bool mep_reuse_lo_p_1 (rtx
, rtx
, rtx_insn
*, bool);
174 static bool move_needs_splitting (rtx
, rtx
, machine_mode
);
175 static bool mep_expand_setcc_1 (enum rtx_code
, rtx
, rtx
, rtx
);
176 static bool mep_nongeneral_reg (rtx
);
177 static bool mep_general_copro_reg (rtx
);
178 static bool mep_nonregister (rtx
);
179 static struct machine_function
* mep_init_machine_status (void);
180 static rtx
mep_tp_rtx (void);
181 static rtx
mep_gp_rtx (void);
182 static bool mep_interrupt_p (void);
183 static bool mep_disinterrupt_p (void);
184 static bool mep_reg_set_p (rtx
, rtx
);
185 static bool mep_reg_set_in_function (int);
186 static bool mep_interrupt_saved_reg (int);
187 static bool mep_call_saves_register (int);
188 static rtx_insn
*F (rtx_insn
*);
189 static void add_constant (int, int, int, int);
190 static rtx_insn
*maybe_dead_move (rtx
, rtx
, bool);
191 static void mep_reload_pointer (int, const char *);
192 static void mep_start_function (FILE *, HOST_WIDE_INT
);
193 static bool mep_function_ok_for_sibcall (tree
, tree
);
194 static int unique_bit_in (HOST_WIDE_INT
);
195 static int bit_size_for_clip (HOST_WIDE_INT
);
196 static int bytesize (const_tree
, machine_mode
);
197 static tree
mep_validate_type_based_tiny (tree
*, tree
, tree
, int, bool *);
198 static tree
mep_validate_decl_based_tiny (tree
*, tree
, tree
, int, bool *);
199 static tree
mep_validate_type_near_far (tree
*, tree
, tree
, int, bool *);
200 static tree
mep_validate_decl_near_far (tree
*, tree
, tree
, int, bool *);
201 static tree
mep_validate_type_disinterrupt (tree
*, tree
, tree
, int, bool *);
202 static tree
mep_validate_decl_disinterrupt (tree
*, tree
, tree
, int, bool *);
203 static tree
mep_validate_type_warning(tree
*, tree
, tree
, int, bool *);
204 static tree
mep_validate_interrupt (tree
*, tree
, tree
, int, bool *);
205 static tree
mep_validate_io_cb (tree
*, tree
, tree
, int, bool *);
206 static tree
mep_validate_vliw (tree
*, tree
, tree
, int, bool *);
207 static bool mep_function_attribute_inlinable_p (const_tree
);
208 static bool mep_can_inline_p (tree
, tree
);
209 static bool mep_lookup_pragma_disinterrupt (const char *);
210 static int mep_multiple_address_regions (tree
, bool);
211 static int mep_attrlist_to_encoding (tree
, tree
);
212 static void mep_insert_attributes (tree
, tree
*);
213 static void mep_encode_section_info (tree
, rtx
, int);
214 static section
* mep_select_section (tree
, int, unsigned HOST_WIDE_INT
);
215 static void mep_unique_section (tree
, int);
216 static unsigned int mep_section_type_flags (tree
, const char *, int);
217 static void mep_asm_named_section (const char *, unsigned int, tree
);
218 static bool mep_mentioned_p (rtx
, rtx
, int);
219 static void mep_reorg_regmove (rtx_insn
*);
220 static rtx_insn
*mep_insert_repeat_label_last (rtx_insn
*, rtx_code_label
*,
222 static void mep_reorg_repeat (rtx_insn
*);
223 static bool mep_invertable_branch_p (rtx_insn
*);
224 static void mep_invert_branch (rtx_insn
*, rtx_insn
*);
225 static void mep_reorg_erepeat (rtx_insn
*);
226 static void mep_jmp_return_reorg (rtx_insn
*);
227 static void mep_reorg_addcombine (rtx_insn
*);
228 static void mep_reorg (void);
229 static void mep_init_intrinsics (void);
230 static void mep_init_builtins (void);
231 static void mep_intrinsic_unavailable (int);
232 static bool mep_get_intrinsic_insn (int, const struct cgen_insn
**);
233 static bool mep_get_move_insn (int, const struct cgen_insn
**);
234 static rtx
mep_convert_arg (machine_mode
, rtx
);
235 static rtx
mep_convert_regnum (const struct cgen_regnum_operand
*, rtx
);
236 static rtx
mep_legitimize_arg (const struct insn_operand_data
*, rtx
, int);
237 static void mep_incompatible_arg (const struct insn_operand_data
*, rtx
, int, tree
);
238 static rtx
mep_expand_builtin (tree
, rtx
, rtx
, machine_mode
, int);
239 static int mep_adjust_cost (rtx_insn
*, rtx
, rtx_insn
*, int);
240 static int mep_issue_rate (void);
241 static rtx_insn
*mep_find_ready_insn (rtx_insn
**, int, enum attr_slot
, int);
242 static void mep_move_ready_insn (rtx_insn
**, int, rtx_insn
*);
243 static int mep_sched_reorder (FILE *, int, rtx_insn
**, int *, int);
244 static rtx_insn
*mep_make_bundle (rtx
, rtx_insn
*);
245 static void mep_bundle_insns (rtx_insn
*);
246 static bool mep_rtx_cost (rtx
, int, int, int, int *, bool);
247 static int mep_address_cost (rtx
, machine_mode
, addr_space_t
, bool);
248 static void mep_setup_incoming_varargs (cumulative_args_t
, machine_mode
,
250 static bool mep_pass_by_reference (cumulative_args_t cum
, machine_mode
,
252 static rtx
mep_function_arg (cumulative_args_t
, machine_mode
,
254 static void mep_function_arg_advance (cumulative_args_t
, machine_mode
,
256 static bool mep_vector_mode_supported_p (machine_mode
);
257 static rtx
mep_allocate_initial_value (rtx
);
258 static void mep_asm_init_sections (void);
259 static int mep_comp_type_attributes (const_tree
, const_tree
);
260 static bool mep_narrow_volatile_bitfield (void);
261 static rtx
mep_expand_builtin_saveregs (void);
262 static tree
mep_build_builtin_va_list (void);
263 static void mep_expand_va_start (tree
, rtx
);
264 static tree
mep_gimplify_va_arg_expr (tree
, tree
, gimple_seq
*, gimple_seq
*);
265 static bool mep_can_eliminate (const int, const int);
266 static void mep_conditional_register_usage (void);
267 static void mep_trampoline_init (rtx
, tree
, rtx
);
269 #define WANT_GCC_DEFINITIONS
270 #include "mep-intrin.h"
271 #undef WANT_GCC_DEFINITIONS
274 /* Command Line Option Support. */
276 char mep_leaf_registers
[FIRST_PSEUDO_REGISTER
];
278 /* True if we can use cmov instructions to move values back and forth
279 between core and coprocessor registers. */
280 bool mep_have_core_copro_moves_p
;
282 /* True if we can use cmov instructions (or a work-alike) to move
283 values between coprocessor registers. */
284 bool mep_have_copro_copro_moves_p
;
286 /* A table of all coprocessor instructions that can act like
287 a coprocessor-to-coprocessor cmov. */
288 static const int mep_cmov_insns
[] = {
303 mep_set_leaf_registers (int enable
)
307 if (mep_leaf_registers
[0] != enable
)
308 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
309 mep_leaf_registers
[i
] = enable
;
313 mep_conditional_register_usage (void)
317 if (!TARGET_OPT_MULT
&& !TARGET_OPT_DIV
)
319 fixed_regs
[HI_REGNO
] = 1;
320 fixed_regs
[LO_REGNO
] = 1;
321 call_used_regs
[HI_REGNO
] = 1;
322 call_used_regs
[LO_REGNO
] = 1;
325 for (i
= FIRST_SHADOW_REGISTER
; i
<= LAST_SHADOW_REGISTER
; i
++)
330 mep_option_override (void)
334 cl_deferred_option
*opt
;
335 vec
<cl_deferred_option
> *v
= (vec
<cl_deferred_option
> *) mep_deferred_options
;
338 FOR_EACH_VEC_ELT (*v
, i
, opt
)
340 switch (opt
->opt_index
)
343 for (j
= 0; j
< 32; j
++)
344 fixed_regs
[j
+ 48] = 0;
345 for (j
= 0; j
< 32; j
++)
346 call_used_regs
[j
+ 48] = 1;
347 for (j
= 6; j
< 8; j
++)
348 call_used_regs
[j
+ 48] = 0;
350 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
385 warning (OPT_fpic
, "-fpic is not supported");
387 warning (OPT_fPIC
, "-fPIC is not supported");
388 if (TARGET_S
&& TARGET_M
)
389 error ("only one of -ms and -mm may be given");
390 if (TARGET_S
&& TARGET_L
)
391 error ("only one of -ms and -ml may be given");
392 if (TARGET_M
&& TARGET_L
)
393 error ("only one of -mm and -ml may be given");
394 if (TARGET_S
&& global_options_set
.x_mep_tiny_cutoff
)
395 error ("only one of -ms and -mtiny= may be given");
396 if (TARGET_M
&& global_options_set
.x_mep_tiny_cutoff
)
397 error ("only one of -mm and -mtiny= may be given");
398 if (TARGET_OPT_CLIP
&& ! TARGET_OPT_MINMAX
)
399 warning (0, "-mclip currently has no effect without -mminmax");
401 if (mep_const_section
)
403 if (strcmp (mep_const_section
, "tiny") != 0
404 && strcmp (mep_const_section
, "near") != 0
405 && strcmp (mep_const_section
, "far") != 0)
406 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
410 mep_tiny_cutoff
= 65536;
413 if (TARGET_L
&& ! global_options_set
.x_mep_tiny_cutoff
)
416 if (TARGET_64BIT_CR_REGS
)
417 flag_split_wide_types
= 0;
419 init_machine_status
= mep_init_machine_status
;
420 mep_init_intrinsics ();
423 /* Pattern Support - constraints, predicates, expanders. */
425 /* MEP has very few instructions that can refer to the span of
426 addresses used by symbols, so it's common to check for them. */
431 int c
= GET_CODE (x
);
433 return (c
== CONST_INT
443 if (GET_CODE (x
) != MEM
)
446 c
= GET_CODE (XEXP (x
, 0));
447 return (c
== CONST_INT
452 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
454 #define GEN_REG(R, STRICT) \
457 && ((R) == ARG_POINTER_REGNUM \
458 || (R) >= FIRST_PSEUDO_REGISTER)))
460 static char pattern
[12], *patternp
;
461 static GTY(()) rtx patternr
[12];
462 #define RTX_IS(x) (strcmp (pattern, x) == 0)
465 encode_pattern_1 (rtx x
)
469 if (patternp
== pattern
+ sizeof (pattern
) - 2)
475 patternr
[patternp
-pattern
] = x
;
477 switch (GET_CODE (x
))
485 encode_pattern_1 (XEXP(x
, 0));
489 encode_pattern_1 (XEXP(x
, 0));
490 encode_pattern_1 (XEXP(x
, 1));
494 encode_pattern_1 (XEXP(x
, 0));
495 encode_pattern_1 (XEXP(x
, 1));
499 encode_pattern_1 (XEXP(x
, 0));
513 *patternp
++ = '0' + XCINT(x
, 1, UNSPEC
);
514 for (i
=0; i
<XVECLEN (x
, 0); i
++)
515 encode_pattern_1 (XVECEXP (x
, 0, i
));
523 fprintf (stderr
, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x
)));
532 encode_pattern (rtx x
)
535 encode_pattern_1 (x
);
540 mep_section_tag (rtx x
)
546 switch (GET_CODE (x
))
553 x
= XVECEXP (x
, 0, 0);
556 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
565 if (GET_CODE (x
) != SYMBOL_REF
)
568 if (name
[0] == '@' && name
[2] == '.')
570 if (name
[1] == 'i' || name
[1] == 'I')
573 return 'f'; /* near */
574 return 'n'; /* far */
582 mep_regno_reg_class (int regno
)
586 case SP_REGNO
: return SP_REGS
;
587 case TP_REGNO
: return TP_REGS
;
588 case GP_REGNO
: return GP_REGS
;
589 case 0: return R0_REGS
;
590 case HI_REGNO
: return HI_REGS
;
591 case LO_REGNO
: return LO_REGS
;
592 case ARG_POINTER_REGNUM
: return GENERAL_REGS
;
595 if (GR_REGNO_P (regno
))
596 return regno
< FIRST_GR_REGNO
+ 8 ? TPREL_REGS
: GENERAL_REGS
;
597 if (CONTROL_REGNO_P (regno
))
600 if (CR_REGNO_P (regno
))
604 /* Search for the register amongst user-defined subclasses of
605 the coprocessor registers. */
606 for (i
= USER0_REGS
; i
<= USER3_REGS
; ++i
)
608 if (! TEST_HARD_REG_BIT (reg_class_contents
[i
], regno
))
610 for (j
= 0; j
< N_REG_CLASSES
; ++j
)
612 enum reg_class sub
= reg_class_subclasses
[i
][j
];
614 if (sub
== LIM_REG_CLASSES
)
616 if (TEST_HARD_REG_BIT (reg_class_contents
[sub
], regno
))
621 return LOADABLE_CR_REGNO_P (regno
) ? LOADABLE_CR_REGS
: CR_REGS
;
624 if (CCR_REGNO_P (regno
))
627 gcc_assert (regno
>= FIRST_SHADOW_REGISTER
&& regno
<= LAST_SHADOW_REGISTER
);
632 const_in_range (rtx x
, int minv
, int maxv
)
634 return (GET_CODE (x
) == CONST_INT
635 && INTVAL (x
) >= minv
636 && INTVAL (x
) <= maxv
);
639 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
640 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
641 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
642 at the end of the insn stream. */
645 mep_mulr_source (rtx_insn
*insn
, rtx dest
, rtx src1
, rtx src2
)
647 if (rtx_equal_p (dest
, src1
))
649 else if (rtx_equal_p (dest
, src2
))
654 emit_insn (gen_movsi (copy_rtx (dest
), src1
));
656 emit_insn_before (gen_movsi (copy_rtx (dest
), src1
), insn
);
661 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
662 Change the last element of PATTERN from (clobber (scratch:SI))
663 to (clobber (reg:SI HI_REGNO)). */
666 mep_rewrite_mult (rtx_insn
*insn
, rtx pattern
)
670 hi_clobber
= XVECEXP (pattern
, 0, XVECLEN (pattern
, 0) - 1);
671 XEXP (hi_clobber
, 0) = gen_rtx_REG (SImode
, HI_REGNO
);
672 PATTERN (insn
) = pattern
;
673 INSN_CODE (insn
) = -1;
676 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
677 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
678 store the result in DEST if nonnull. */
681 mep_rewrite_mulsi3 (rtx_insn
*insn
, rtx dest
, rtx src1
, rtx src2
)
685 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
687 pattern
= gen_mulsi3r (lo
, dest
, copy_rtx (dest
),
688 mep_mulr_source (insn
, dest
, src1
, src2
));
690 pattern
= gen_mulsi3_lo (lo
, src1
, src2
);
691 mep_rewrite_mult (insn
, pattern
);
694 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
695 SRC3 into $lo, then use either madd or maddr. The move into $lo will
696 be deleted by a peephole2 if SRC3 is already in $lo. */
699 mep_rewrite_maddsi3 (rtx_insn
*insn
, rtx dest
, rtx src1
, rtx src2
, rtx src3
)
703 lo
= gen_rtx_REG (SImode
, LO_REGNO
);
704 emit_insn_before (gen_movsi (copy_rtx (lo
), src3
), insn
);
706 pattern
= gen_maddsi3r (lo
, dest
, copy_rtx (dest
),
707 mep_mulr_source (insn
, dest
, src1
, src2
),
710 pattern
= gen_maddsi3_lo (lo
, src1
, src2
, copy_rtx (lo
));
711 mep_rewrite_mult (insn
, pattern
);
714 /* Return true if $lo has the same value as integer register GPR when
715 instruction INSN is reached. If necessary, rewrite the instruction
716 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
717 rtx for (reg:SI LO_REGNO).
719 This function is intended to be used by the peephole2 pass. Since
720 that pass goes from the end of a basic block to the beginning, and
721 propagates liveness information on the way, there is no need to
722 update register notes here.
724 If GPR_DEAD_P is true on entry, and this function returns true,
725 then the caller will replace _every_ use of GPR in and after INSN
726 with LO. This means that if the instruction that sets $lo is a
727 mulr- or maddr-type instruction, we can rewrite it to use mul or
728 madd instead. In combination with the copy progagation pass,
729 this allows us to replace sequences like:
738 if GPR is no longer used. */
741 mep_reuse_lo_p_1 (rtx lo
, rtx gpr
, rtx_insn
*insn
, bool gpr_dead_p
)
745 insn
= PREV_INSN (insn
);
747 switch (recog_memoized (insn
))
749 case CODE_FOR_mulsi3_1
:
751 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
753 mep_rewrite_mulsi3 (insn
,
754 gpr_dead_p
? NULL
: recog_data
.operand
[0],
755 recog_data
.operand
[1],
756 recog_data
.operand
[2]);
761 case CODE_FOR_maddsi3
:
763 if (rtx_equal_p (recog_data
.operand
[0], gpr
))
765 mep_rewrite_maddsi3 (insn
,
766 gpr_dead_p
? NULL
: recog_data
.operand
[0],
767 recog_data
.operand
[1],
768 recog_data
.operand
[2],
769 recog_data
.operand
[3]);
774 case CODE_FOR_mulsi3r
:
775 case CODE_FOR_maddsi3r
:
777 return rtx_equal_p (recog_data
.operand
[1], gpr
);
780 if (reg_set_p (lo
, insn
)
781 || reg_set_p (gpr
, insn
)
782 || volatile_insn_p (PATTERN (insn
)))
785 if (gpr_dead_p
&& reg_referenced_p (gpr
, PATTERN (insn
)))
790 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
794 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
797 mep_reuse_lo_p (rtx lo
, rtx gpr
, rtx_insn
*insn
, bool gpr_dead_p
)
799 bool result
= mep_reuse_lo_p_1 (lo
, gpr
, insn
, gpr_dead_p
);
804 /* Return true if SET can be turned into a post-modify load or store
805 that adds OFFSET to GPR. In other words, return true if SET can be
808 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
810 It's OK to change SET to an equivalent operation in order to
814 mep_use_post_modify_for_set_p (rtx set
, rtx gpr
, rtx offset
)
817 unsigned int reg_bytes
, mem_bytes
;
818 machine_mode reg_mode
, mem_mode
;
820 /* Only simple SETs can be converted. */
821 if (GET_CODE (set
) != SET
)
824 /* Point REG to what we hope will be the register side of the set and
825 MEM to what we hope will be the memory side. */
826 if (GET_CODE (SET_DEST (set
)) == MEM
)
828 mem
= &SET_DEST (set
);
829 reg
= &SET_SRC (set
);
833 reg
= &SET_DEST (set
);
834 mem
= &SET_SRC (set
);
835 if (GET_CODE (*mem
) == SIGN_EXTEND
)
836 mem
= &XEXP (*mem
, 0);
839 /* Check that *REG is a suitable coprocessor register. */
840 if (GET_CODE (*reg
) != REG
|| !LOADABLE_CR_REGNO_P (REGNO (*reg
)))
843 /* Check that *MEM is a suitable memory reference. */
844 if (GET_CODE (*mem
) != MEM
|| !rtx_equal_p (XEXP (*mem
, 0), gpr
))
847 /* Get the number of bytes in each operand. */
848 mem_bytes
= GET_MODE_SIZE (GET_MODE (*mem
));
849 reg_bytes
= GET_MODE_SIZE (GET_MODE (*reg
));
851 /* Check that OFFSET is suitably aligned. */
852 if (INTVAL (offset
) & (mem_bytes
- 1))
855 /* Convert *MEM to a normal integer mode. */
856 mem_mode
= mode_for_size (mem_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
857 *mem
= change_address (*mem
, mem_mode
, NULL
);
859 /* Adjust *REG as well. */
860 *reg
= shallow_copy_rtx (*reg
);
861 if (reg
== &SET_DEST (set
) && reg_bytes
< UNITS_PER_WORD
)
863 /* SET is a subword load. Convert it to an explicit extension. */
864 PUT_MODE (*reg
, SImode
);
865 *mem
= gen_rtx_SIGN_EXTEND (SImode
, *mem
);
869 reg_mode
= mode_for_size (reg_bytes
* BITS_PER_UNIT
, MODE_INT
, 0);
870 PUT_MODE (*reg
, reg_mode
);
875 /* Return the effect of frame-related instruction INSN. */
878 mep_frame_expr (rtx_insn
*insn
)
882 note
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, 0);
883 expr
= (note
!= 0 ? XEXP (note
, 0) : copy_rtx (PATTERN (insn
)));
884 RTX_FRAME_RELATED_P (expr
) = 1;
888 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
889 new pattern in INSN1; INSN2 will be deleted by the caller. */
892 mep_make_parallel (rtx_insn
*insn1
, rtx_insn
*insn2
)
896 if (RTX_FRAME_RELATED_P (insn2
))
898 expr
= mep_frame_expr (insn2
);
899 if (RTX_FRAME_RELATED_P (insn1
))
900 expr
= gen_rtx_SEQUENCE (VOIDmode
,
901 gen_rtvec (2, mep_frame_expr (insn1
), expr
));
902 set_unique_reg_note (insn1
, REG_FRAME_RELATED_EXPR
, expr
);
903 RTX_FRAME_RELATED_P (insn1
) = 1;
906 PATTERN (insn1
) = gen_rtx_PARALLEL (VOIDmode
,
907 gen_rtvec (2, PATTERN (insn1
),
909 INSN_CODE (insn1
) = -1;
912 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
913 the basic block to see if any previous load or store instruction can
914 be persuaded to do SET_INSN as a side-effect. Return true if so. */
917 mep_use_post_modify_p_1 (rtx_insn
*set_insn
, rtx reg
, rtx offset
)
924 insn
= PREV_INSN (insn
);
927 if (mep_use_post_modify_for_set_p (PATTERN (insn
), reg
, offset
))
929 mep_make_parallel (insn
, set_insn
);
933 if (reg_set_p (reg
, insn
)
934 || reg_referenced_p (reg
, PATTERN (insn
))
935 || volatile_insn_p (PATTERN (insn
)))
939 while (!NOTE_INSN_BASIC_BLOCK_P (insn
));
943 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
946 mep_use_post_modify_p (rtx_insn
*insn
, rtx reg
, rtx offset
)
948 bool result
= mep_use_post_modify_p_1 (insn
, reg
, offset
);
954 mep_allow_clip (rtx ux
, rtx lx
, int s
)
956 HOST_WIDE_INT u
= INTVAL (ux
);
957 HOST_WIDE_INT l
= INTVAL (lx
);
960 if (!TARGET_OPT_CLIP
)
965 for (i
= 0; i
< 30; i
++)
966 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1)
967 && (l
== - ((HOST_WIDE_INT
) 1 << i
)))
975 for (i
= 0; i
< 30; i
++)
976 if ((u
== ((HOST_WIDE_INT
) 1 << i
) - 1))
983 mep_bit_position_p (rtx x
, bool looking_for
)
985 if (GET_CODE (x
) != CONST_INT
)
987 switch ((int) INTVAL(x
) & 0xff)
989 case 0x01: case 0x02: case 0x04: case 0x08:
990 case 0x10: case 0x20: case 0x40: case 0x80:
992 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
993 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1000 move_needs_splitting (rtx dest
, rtx src
,
1001 machine_mode mode ATTRIBUTE_UNUSED
)
1003 int s
= mep_section_tag (src
);
1007 if (GET_CODE (src
) == CONST
1008 || GET_CODE (src
) == MEM
)
1009 src
= XEXP (src
, 0);
1010 else if (GET_CODE (src
) == SYMBOL_REF
1011 || GET_CODE (src
) == LABEL_REF
1012 || GET_CODE (src
) == PLUS
)
1018 || (GET_CODE (src
) == PLUS
1019 && GET_CODE (XEXP (src
, 1)) == CONST_INT
1020 && (INTVAL (XEXP (src
, 1)) < -65536
1021 || INTVAL (XEXP (src
, 1)) > 0xffffff))
1022 || (GET_CODE (dest
) == REG
1023 && REGNO (dest
) > 7 && REGNO (dest
) < FIRST_PSEUDO_REGISTER
))
1029 mep_split_mov (rtx
*operands
, int symbolic
)
1033 if (move_needs_splitting (operands
[0], operands
[1], SImode
))
1038 if (GET_CODE (operands
[1]) != CONST_INT
)
1041 if (constraint_satisfied_p (operands
[1], CONSTRAINT_I
)
1042 || constraint_satisfied_p (operands
[1], CONSTRAINT_J
)
1043 || constraint_satisfied_p (operands
[1], CONSTRAINT_O
))
1046 if (((!reload_completed
&& !reload_in_progress
)
1047 || (REG_P (operands
[0]) && REGNO (operands
[0]) < 8))
1048 && constraint_satisfied_p (operands
[1], CONSTRAINT_K
))
1054 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1055 it to one specific value. So the insn chosen depends on whether
1056 the source and destination modes match. */
1059 mep_vliw_mode_match (rtx tgt
)
1061 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1062 bool tgt_vliw
= INTVAL (tgt
);
1064 return src_vliw
== tgt_vliw
;
1067 /* Like the above, but also test for near/far mismatches. */
1070 mep_vliw_jmp_match (rtx tgt
)
1072 bool src_vliw
= mep_vliw_function_p (cfun
->decl
);
1073 bool tgt_vliw
= INTVAL (tgt
);
1075 if (mep_section_tag (DECL_RTL (cfun
->decl
)) == 'f')
1078 return src_vliw
== tgt_vliw
;
1082 mep_multi_slot (rtx_insn
*x
)
1084 return get_attr_slot (x
) == SLOT_MULTI
;
1087 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
1090 mep_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
1092 /* We can't convert symbol values to gp- or tp-rel values after
1093 reload, as reload might have used $gp or $tp for other
1095 if (GET_CODE (x
) == SYMBOL_REF
&& (reload_in_progress
|| reload_completed
))
1097 char e
= mep_section_tag (x
);
1098 return (e
!= 't' && e
!= 'b');
1103 /* Be careful not to use macros that need to be compiled one way for
1104 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1107 mep_legitimate_address (machine_mode mode
, rtx x
, int strict
)
1111 #define DEBUG_LEGIT 0
1113 fprintf (stderr
, "legit: mode %s strict %d ", mode_name
[mode
], strict
);
1117 if (GET_CODE (x
) == LO_SUM
1118 && GET_CODE (XEXP (x
, 0)) == REG
1119 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1120 && CONSTANT_P (XEXP (x
, 1)))
1122 if (GET_MODE_SIZE (mode
) > 4)
1124 /* We will end up splitting this, and lo_sums are not
1125 offsettable for us. */
1127 fprintf(stderr
, " - nope, %%lo(sym)[reg] not splittable\n");
1132 fprintf (stderr
, " - yup, %%lo(sym)[reg]\n");
1137 if (GET_CODE (x
) == REG
1138 && GEN_REG (REGNO (x
), strict
))
1141 fprintf (stderr
, " - yup, [reg]\n");
1146 if (GET_CODE (x
) == PLUS
1147 && GET_CODE (XEXP (x
, 0)) == REG
1148 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1149 && const_in_range (XEXP (x
, 1), -32768, 32767))
1152 fprintf (stderr
, " - yup, [reg+const]\n");
1157 if (GET_CODE (x
) == PLUS
1158 && GET_CODE (XEXP (x
, 0)) == REG
1159 && GEN_REG (REGNO (XEXP (x
, 0)), strict
)
1160 && GET_CODE (XEXP (x
, 1)) == CONST
1161 && (GET_CODE (XEXP (XEXP (x
, 1), 0)) == UNSPEC
1162 || (GET_CODE (XEXP (XEXP (x
, 1), 0)) == PLUS
1163 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == UNSPEC
1164 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == CONST_INT
)))
1167 fprintf (stderr
, " - yup, [reg+unspec]\n");
1172 the_tag
= mep_section_tag (x
);
1177 fprintf (stderr
, " - nope, [far]\n");
1182 if (mode
== VOIDmode
1183 && GET_CODE (x
) == SYMBOL_REF
)
1186 fprintf (stderr
, " - yup, call [symbol]\n");
1191 if ((mode
== SImode
|| mode
== SFmode
)
1193 && mep_legitimate_constant_p (mode
, x
)
1194 && the_tag
!= 't' && the_tag
!= 'b')
1196 if (GET_CODE (x
) != CONST_INT
1197 || (INTVAL (x
) <= 0xfffff
1199 && (INTVAL (x
) % 4) == 0))
1202 fprintf (stderr
, " - yup, [const]\n");
1209 fprintf (stderr
, " - nope.\n");
1215 mep_legitimize_reload_address (rtx
*x
, machine_mode mode
, int opnum
,
1217 int ind_levels ATTRIBUTE_UNUSED
)
1219 enum reload_type type
= (enum reload_type
) type_i
;
1221 if (GET_CODE (*x
) == PLUS
1222 && GET_CODE (XEXP (*x
, 0)) == MEM
1223 && GET_CODE (XEXP (*x
, 1)) == REG
)
1225 /* GCC will by default copy the MEM into a REG, which results in
1226 an invalid address. For us, the best thing to do is move the
1227 whole expression to a REG. */
1228 push_reload (*x
, NULL_RTX
, x
, NULL
,
1229 GENERAL_REGS
, mode
, VOIDmode
,
1234 if (GET_CODE (*x
) == PLUS
1235 && GET_CODE (XEXP (*x
, 0)) == SYMBOL_REF
1236 && GET_CODE (XEXP (*x
, 1)) == CONST_INT
)
1238 char e
= mep_section_tag (XEXP (*x
, 0));
1240 if (e
!= 't' && e
!= 'b')
1242 /* GCC thinks that (sym+const) is a valid address. Well,
1243 sometimes it is, this time it isn't. The best thing to
1244 do is reload the symbol to a register, since reg+int
1245 tends to work, and we can't just add the symbol and
1247 push_reload (XEXP (*x
, 0), NULL_RTX
, &(XEXP(*x
, 0)), NULL
,
1248 GENERAL_REGS
, mode
, VOIDmode
,
1257 mep_core_address_length (rtx_insn
*insn
, int opn
)
1259 rtx set
= single_set (insn
);
1260 rtx mem
= XEXP (set
, opn
);
1261 rtx other
= XEXP (set
, 1-opn
);
1262 rtx addr
= XEXP (mem
, 0);
1264 if (register_operand (addr
, Pmode
))
1266 if (GET_CODE (addr
) == PLUS
)
1268 rtx addend
= XEXP (addr
, 1);
1270 gcc_assert (REG_P (XEXP (addr
, 0)));
1272 switch (REGNO (XEXP (addr
, 0)))
1274 case STACK_POINTER_REGNUM
:
1275 if (GET_MODE_SIZE (GET_MODE (mem
)) == 4
1276 && mep_imm7a4_operand (addend
, VOIDmode
))
1281 gcc_assert (REG_P (other
));
1283 if (REGNO (other
) >= 8)
1286 if (GET_CODE (addend
) == CONST
1287 && GET_CODE (XEXP (addend
, 0)) == UNSPEC
1288 && XINT (XEXP (addend
, 0), 1) == UNS_TPREL
)
1291 if (GET_CODE (addend
) == CONST_INT
1292 && INTVAL (addend
) >= 0
1293 && INTVAL (addend
) <= 127
1294 && INTVAL (addend
) % GET_MODE_SIZE (GET_MODE (mem
)) == 0)
1304 mep_cop_address_length (rtx_insn
*insn
, int opn
)
1306 rtx set
= single_set (insn
);
1307 rtx mem
= XEXP (set
, opn
);
1308 rtx addr
= XEXP (mem
, 0);
1310 if (GET_CODE (mem
) != MEM
)
1312 if (register_operand (addr
, Pmode
))
1314 if (GET_CODE (addr
) == POST_INC
)
1320 #define DEBUG_EXPAND_MOV 0
1322 mep_expand_mov (rtx
*operands
, machine_mode mode
)
1327 int post_reload
= 0;
1329 tag
[0] = mep_section_tag (operands
[0]);
1330 tag
[1] = mep_section_tag (operands
[1]);
1332 if (!reload_in_progress
1333 && !reload_completed
1334 && GET_CODE (operands
[0]) != REG
1335 && GET_CODE (operands
[0]) != SUBREG
1336 && GET_CODE (operands
[1]) != REG
1337 && GET_CODE (operands
[1]) != SUBREG
)
1338 operands
[1] = copy_to_mode_reg (mode
, operands
[1]);
1340 #if DEBUG_EXPAND_MOV
1341 fprintf(stderr
, "expand move %s %d\n", mode_name
[mode
],
1342 reload_in_progress
|| reload_completed
);
1343 debug_rtx (operands
[0]);
1344 debug_rtx (operands
[1]);
1347 if (mode
== DImode
|| mode
== DFmode
)
1350 if (reload_in_progress
|| reload_completed
)
1354 if (GET_CODE (operands
[0]) == REG
&& REGNO (operands
[0]) == TP_REGNO
)
1355 cfun
->machine
->reload_changes_tp
= true;
1357 if (tag
[0] == 't' || tag
[1] == 't')
1359 r
= has_hard_reg_initial_val (Pmode
, GP_REGNO
);
1360 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != GP_REGNO
)
1363 if (tag
[0] == 'b' || tag
[1] == 'b')
1365 r
= has_hard_reg_initial_val (Pmode
, TP_REGNO
);
1366 if (!r
|| GET_CODE (r
) != REG
|| REGNO (r
) != TP_REGNO
)
1369 if (cfun
->machine
->reload_changes_tp
== true)
1376 if (symbol_p (operands
[1]))
1378 t
= mep_section_tag (operands
[1]);
1379 if (t
== 'b' || t
== 't')
1382 if (GET_CODE (operands
[1]) == SYMBOL_REF
)
1384 tpsym
= operands
[1];
1385 n
= gen_rtx_UNSPEC (mode
,
1386 gen_rtvec (1, operands
[1]),
1387 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1388 n
= gen_rtx_CONST (mode
, n
);
1390 else if (GET_CODE (operands
[1]) == CONST
1391 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
1392 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
1393 && GET_CODE (XEXP (XEXP (operands
[1], 0), 1)) == CONST_INT
)
1395 tpsym
= XEXP (XEXP (operands
[1], 0), 0);
1396 tpoffs
= XEXP (XEXP (operands
[1], 0), 1);
1397 n
= gen_rtx_UNSPEC (mode
,
1398 gen_rtvec (1, tpsym
),
1399 t
== 'b' ? UNS_TPREL
: UNS_GPREL
);
1400 n
= gen_rtx_PLUS (mode
, n
, tpoffs
);
1401 n
= gen_rtx_CONST (mode
, n
);
1403 else if (GET_CODE (operands
[1]) == CONST
1404 && GET_CODE (XEXP (operands
[1], 0)) == UNSPEC
)
1408 error ("unusual TP-relative address");
1412 n
= gen_rtx_PLUS (mode
, (t
== 'b' ? mep_tp_rtx ()
1413 : mep_gp_rtx ()), n
);
1414 n
= emit_insn (gen_rtx_SET (mode
, operands
[0], n
));
1415 #if DEBUG_EXPAND_MOV
1416 fprintf(stderr
, "mep_expand_mov emitting ");
1423 for (i
=0; i
< 2; i
++)
1425 t
= mep_section_tag (operands
[i
]);
1426 if (GET_CODE (operands
[i
]) == MEM
&& (t
== 'b' || t
== 't'))
1431 sym
= XEXP (operands
[i
], 0);
1432 if (GET_CODE (sym
) == CONST
1433 && GET_CODE (XEXP (sym
, 0)) == UNSPEC
)
1434 sym
= XVECEXP (XEXP (sym
, 0), 0, 0);
1447 n
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, sym
), u
);
1448 n
= gen_rtx_CONST (Pmode
, n
);
1449 n
= gen_rtx_PLUS (Pmode
, r
, n
);
1450 operands
[i
] = replace_equiv_address (operands
[i
], n
);
1455 if ((GET_CODE (operands
[1]) != REG
1456 && MEP_CONTROL_REG (operands
[0]))
1457 || (GET_CODE (operands
[0]) != REG
1458 && MEP_CONTROL_REG (operands
[1])))
1461 #if DEBUG_EXPAND_MOV
1462 fprintf (stderr
, "cr-mem, forcing op1 to reg\n");
1464 temp
= gen_reg_rtx (mode
);
1465 emit_move_insn (temp
, operands
[1]);
1469 if (symbolref_p (operands
[0])
1470 && (mep_section_tag (XEXP (operands
[0], 0)) == 'f'
1471 || (GET_MODE_SIZE (mode
) != 4)))
1475 gcc_assert (!reload_in_progress
&& !reload_completed
);
1477 temp
= force_reg (Pmode
, XEXP (operands
[0], 0));
1478 operands
[0] = replace_equiv_address (operands
[0], temp
);
1479 emit_move_insn (operands
[0], operands
[1]);
1483 if (!post_reload
&& (tag
[1] == 't' || tag
[1] == 'b'))
1486 if (symbol_p (operands
[1])
1487 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1489 emit_insn (gen_movsi_topsym_s (operands
[0], operands
[1]));
1490 emit_insn (gen_movsi_botsym_s (operands
[0], operands
[0], operands
[1]));
1494 if (symbolref_p (operands
[1])
1495 && (tag
[1] == 'f' || tag
[1] == 't' || tag
[1] == 'b'))
1499 if (reload_in_progress
|| reload_completed
)
1502 temp
= gen_reg_rtx (Pmode
);
1504 emit_insn (gen_movsi_topsym_s (temp
, operands
[1]));
1505 emit_insn (gen_movsi_botsym_s (temp
, temp
, operands
[1]));
1506 emit_move_insn (operands
[0], replace_equiv_address (operands
[1], temp
));
1513 /* Cases where the pattern can't be made to use at all. */
1516 mep_mov_ok (rtx
*operands
, machine_mode mode ATTRIBUTE_UNUSED
)
1520 #define DEBUG_MOV_OK 0
1522 fprintf (stderr
, "mep_mov_ok %s %c=%c\n", mode_name
[mode
], mep_section_tag (operands
[0]),
1523 mep_section_tag (operands
[1]));
1524 debug_rtx (operands
[0]);
1525 debug_rtx (operands
[1]);
1528 /* We want the movh patterns to get these. */
1529 if (GET_CODE (operands
[1]) == HIGH
)
1532 /* We can't store a register to a far variable without using a
1533 scratch register to hold the address. Using far variables should
1534 be split by mep_emit_mov anyway. */
1535 if (mep_section_tag (operands
[0]) == 'f'
1536 || mep_section_tag (operands
[1]) == 'f')
1539 fprintf (stderr
, " - no, f\n");
1543 i
= mep_section_tag (operands
[1]);
1544 if ((i
== 'b' || i
== 't') && !reload_completed
&& !reload_in_progress
)
1545 /* These are supposed to be generated with adds of the appropriate
1546 register. During and after reload, however, we allow them to
1547 be accessed as normal symbols because adding a dependency on
1548 the base register now might cause problems. */
1551 fprintf (stderr
, " - no, bt\n");
1556 /* The only moves we can allow involve at least one general
1557 register, so require it. */
1558 for (i
= 0; i
< 2; i
++)
1560 /* Allow subregs too, before reload. */
1561 rtx x
= operands
[i
];
1563 if (GET_CODE (x
) == SUBREG
)
1565 if (GET_CODE (x
) == REG
1566 && ! MEP_CONTROL_REG (x
))
1569 fprintf (stderr
, " - ok\n");
1575 fprintf (stderr
, " - no, no gen reg\n");
1580 #define DEBUG_SPLIT_WIDE_MOVE 0
1582 mep_split_wide_move (rtx
*operands
, machine_mode mode
)
1586 #if DEBUG_SPLIT_WIDE_MOVE
1587 fprintf (stderr
, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name
[mode
]);
1588 debug_rtx (operands
[0]);
1589 debug_rtx (operands
[1]);
1592 for (i
= 0; i
<= 1; i
++)
1594 rtx op
= operands
[i
], hi
, lo
;
1596 switch (GET_CODE (op
))
1600 unsigned int regno
= REGNO (op
);
1602 if (TARGET_64BIT_CR_REGS
&& CR_REGNO_P (regno
))
1606 lo
= gen_rtx_REG (SImode
, regno
);
1608 hi
= gen_rtx_ZERO_EXTRACT (SImode
,
1609 gen_rtx_REG (DImode
, regno
),
1614 hi
= gen_rtx_REG (SImode
, regno
+ TARGET_LITTLE_ENDIAN
);
1615 lo
= gen_rtx_REG (SImode
, regno
+ TARGET_BIG_ENDIAN
);
1623 hi
= operand_subword (op
, TARGET_LITTLE_ENDIAN
, 0, mode
);
1624 lo
= operand_subword (op
, TARGET_BIG_ENDIAN
, 0, mode
);
1631 /* The high part of CR <- GPR moves must be done after the low part. */
1632 operands
[i
+ 4] = lo
;
1633 operands
[i
+ 2] = hi
;
1636 if (reg_mentioned_p (operands
[2], operands
[5])
1637 || GET_CODE (operands
[2]) == ZERO_EXTRACT
1638 || GET_CODE (operands
[4]) == ZERO_EXTRACT
)
1642 /* Overlapping register pairs -- make sure we don't
1643 early-clobber ourselves. */
1645 operands
[2] = operands
[4];
1648 operands
[3] = operands
[5];
1652 #if DEBUG_SPLIT_WIDE_MOVE
1653 fprintf(stderr
, "\033[34m");
1654 debug_rtx (operands
[2]);
1655 debug_rtx (operands
[3]);
1656 debug_rtx (operands
[4]);
1657 debug_rtx (operands
[5]);
1658 fprintf(stderr
, "\033[0m");
1662 /* Emit a setcc instruction in its entirity. */
1665 mep_expand_setcc_1 (enum rtx_code code
, rtx dest
, rtx op1
, rtx op2
)
1673 tmp
= op1
, op1
= op2
, op2
= tmp
;
1674 code
= swap_condition (code
);
1679 op1
= force_reg (SImode
, op1
);
1680 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
1681 gen_rtx_fmt_ee (code
, SImode
, op1
, op2
)));
1685 if (op2
!= const0_rtx
)
1686 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1687 mep_expand_setcc_1 (LTU
, dest
, op1
, const1_rtx
);
1691 /* Branchful sequence:
1693 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1696 Branchless sequence:
1697 add3 tmp, op1, -op2 32-bit (or mov + sub)
1698 sltu3 tmp, tmp, 1 16-bit
1699 xor3 dest, tmp, 1 32-bit
1701 if (optimize_size
&& op2
!= const0_rtx
)
1704 if (op2
!= const0_rtx
)
1705 op1
= expand_binop (SImode
, sub_optab
, op1
, op2
, NULL
, 1, OPTAB_WIDEN
);
1707 op2
= gen_reg_rtx (SImode
);
1708 mep_expand_setcc_1 (LTU
, op2
, op1
, const1_rtx
);
1710 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
1711 gen_rtx_XOR (SImode
, op2
, const1_rtx
)));
1715 if (GET_CODE (op2
) != CONST_INT
1716 || INTVAL (op2
) == 0x7ffffff)
1718 op2
= GEN_INT (INTVAL (op2
) + 1);
1719 return mep_expand_setcc_1 (LT
, dest
, op1
, op2
);
1722 if (GET_CODE (op2
) != CONST_INT
1723 || INTVAL (op2
) == -1)
1725 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) + 1, SImode
));
1726 return mep_expand_setcc_1 (LTU
, dest
, op1
, op2
);
1729 if (GET_CODE (op2
) != CONST_INT
1730 || INTVAL (op2
) == trunc_int_for_mode (0x80000000, SImode
))
1732 op2
= GEN_INT (INTVAL (op2
) - 1);
1733 return mep_expand_setcc_1 (GT
, dest
, op1
, op2
);
1736 if (GET_CODE (op2
) != CONST_INT
1737 || op2
== const0_rtx
)
1739 op2
= GEN_INT (trunc_int_for_mode (INTVAL (op2
) - 1, SImode
));
1740 return mep_expand_setcc_1 (GTU
, dest
, op1
, op2
);
1748 mep_expand_setcc (rtx
*operands
)
1750 rtx dest
= operands
[0];
1751 enum rtx_code code
= GET_CODE (operands
[1]);
1752 rtx op0
= operands
[2];
1753 rtx op1
= operands
[3];
1755 return mep_expand_setcc_1 (code
, dest
, op0
, op1
);
1759 mep_expand_cbranch (rtx
*operands
)
1761 enum rtx_code code
= GET_CODE (operands
[0]);
1762 rtx op0
= operands
[1];
1763 rtx op1
= operands
[2];
1770 if (mep_imm4_operand (op1
, SImode
))
1773 tmp
= gen_reg_rtx (SImode
);
1774 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1781 if (mep_imm4_operand (op1
, SImode
))
1784 tmp
= gen_reg_rtx (SImode
);
1785 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op0
, op1
));
1794 if (! mep_reg_or_imm4_operand (op1
, SImode
))
1795 op1
= force_reg (SImode
, op1
);
1800 if (GET_CODE (op1
) == CONST_INT
1801 && INTVAL (op1
) != 0x7fffffff)
1803 op1
= GEN_INT (INTVAL (op1
) + 1);
1804 code
= (code
== LE
? LT
: GE
);
1808 tmp
= gen_reg_rtx (SImode
);
1809 gcc_assert (mep_expand_setcc_1 (LT
, tmp
, op1
, op0
));
1811 code
= (code
== LE
? EQ
: NE
);
1817 if (op1
== const1_rtx
)
1824 tmp
= gen_reg_rtx (SImode
);
1825 gcc_assert (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
));
1832 tmp
= gen_reg_rtx (SImode
);
1833 if (mep_expand_setcc_1 (LEU
, tmp
, op0
, op1
))
1835 else if (mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
))
1844 tmp
= gen_reg_rtx (SImode
);
1845 gcc_assert (mep_expand_setcc_1 (GTU
, tmp
, op0
, op1
)
1846 || mep_expand_setcc_1 (LTU
, tmp
, op1
, op0
));
1853 tmp
= gen_reg_rtx (SImode
);
1854 if (mep_expand_setcc_1 (GEU
, tmp
, op0
, op1
))
1856 else if (mep_expand_setcc_1 (LTU
, tmp
, op0
, op1
))
1868 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
1872 mep_emit_cbranch (rtx
*operands
, int ne
)
1874 if (GET_CODE (operands
[1]) == REG
)
1875 return ne
? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1876 else if (INTVAL (operands
[1]) == 0 && !mep_vliw_function_p(cfun
->decl
))
1877 return ne
? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1879 return ne
? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1883 mep_expand_call (rtx
*operands
, int returns_value
)
1885 rtx addr
= operands
[returns_value
];
1886 rtx tp
= mep_tp_rtx ();
1887 rtx gp
= mep_gp_rtx ();
1889 gcc_assert (GET_CODE (addr
) == MEM
);
1891 addr
= XEXP (addr
, 0);
1893 if (! mep_call_address_operand (addr
, VOIDmode
))
1894 addr
= force_reg (SImode
, addr
);
1896 if (! operands
[returns_value
+2])
1897 operands
[returns_value
+2] = const0_rtx
;
1900 emit_call_insn (gen_call_value_internal (operands
[0], addr
, operands
[2],
1901 operands
[3], tp
, gp
));
1903 emit_call_insn (gen_call_internal (addr
, operands
[1],
1904 operands
[2], tp
, gp
));
1907 /* Aliasing Support. */
1909 /* If X is a machine specific address (i.e. a symbol or label being
1910 referenced as a displacement from the GOT implemented using an
1911 UNSPEC), then return the base term. Otherwise return X. */
1914 mep_find_base_term (rtx x
)
1919 if (GET_CODE (x
) != PLUS
)
1924 if (has_hard_reg_initial_val(Pmode
, TP_REGNO
)
1925 && base
== mep_tp_rtx ())
1927 else if (has_hard_reg_initial_val(Pmode
, GP_REGNO
)
1928 && base
== mep_gp_rtx ())
1933 if (GET_CODE (term
) != CONST
)
1935 term
= XEXP (term
, 0);
1937 if (GET_CODE (term
) != UNSPEC
1938 || XINT (term
, 1) != unspec
)
1941 return XVECEXP (term
, 0, 0);
1944 /* Reload Support. */
1946 /* Return true if the registers in CLASS cannot represent the change from
1947 modes FROM to TO. */
1950 mep_cannot_change_mode_class (machine_mode from
, machine_mode to
,
1951 enum reg_class regclass
)
1956 /* 64-bit COP regs must remain 64-bit COP regs. */
1957 if (TARGET_64BIT_CR_REGS
1958 && (regclass
== CR_REGS
1959 || regclass
== LOADABLE_CR_REGS
)
1960 && (GET_MODE_SIZE (to
) < 8
1961 || GET_MODE_SIZE (from
) < 8))
1967 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
1970 mep_general_reg (rtx x
)
1972 while (GET_CODE (x
) == SUBREG
)
1974 return GET_CODE (x
) == REG
&& GR_REGNO_P (REGNO (x
));
1978 mep_nongeneral_reg (rtx x
)
1980 while (GET_CODE (x
) == SUBREG
)
1982 return (GET_CODE (x
) == REG
1983 && !GR_REGNO_P (REGNO (x
)) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
1987 mep_general_copro_reg (rtx x
)
1989 while (GET_CODE (x
) == SUBREG
)
1991 return (GET_CODE (x
) == REG
&& CR_REGNO_P (REGNO (x
)));
1995 mep_nonregister (rtx x
)
1997 while (GET_CODE (x
) == SUBREG
)
1999 return (GET_CODE (x
) != REG
|| REGNO (x
) >= FIRST_PSEUDO_REGISTER
);
2002 #define DEBUG_RELOAD 0
2004 /* Return the secondary reload class needed for moving value X to or
2005 from a register in coprocessor register class CLASS. */
2007 static enum reg_class
2008 mep_secondary_copro_reload_class (enum reg_class rclass
, rtx x
)
2010 if (mep_general_reg (x
))
2011 /* We can do the move directly if mep_have_core_copro_moves_p,
2012 otherwise we need to go through memory. Either way, no secondary
2013 register is needed. */
2016 if (mep_general_copro_reg (x
))
2018 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2019 if (mep_have_copro_copro_moves_p
)
2022 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2023 if (mep_have_core_copro_moves_p
)
2024 return GENERAL_REGS
;
2026 /* Otherwise we need to do it through memory. No secondary
2027 register is needed. */
2031 if (reg_class_subset_p (rclass
, LOADABLE_CR_REGS
)
2032 && constraint_satisfied_p (x
, CONSTRAINT_U
))
2033 /* X is a memory value that we can access directly. */
2036 /* We have to move X into a GPR first and then copy it to
2037 the coprocessor register. The move from the GPR to the
2038 coprocessor might be done directly or through memory,
2039 depending on mep_have_core_copro_moves_p. */
2040 return GENERAL_REGS
;
2043 /* Copying X to register in RCLASS. */
2046 mep_secondary_input_reload_class (enum reg_class rclass
,
2047 machine_mode mode ATTRIBUTE_UNUSED
,
2053 fprintf (stderr
, "secondary input reload copy to %s %s from ", reg_class_names
[rclass
], mode_name
[mode
]);
2057 if (reg_class_subset_p (rclass
, CR_REGS
))
2058 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2059 else if (MEP_NONGENERAL_CLASS (rclass
)
2060 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2064 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2066 return (enum reg_class
) rv
;
2069 /* Copying register in RCLASS to X. */
2072 mep_secondary_output_reload_class (enum reg_class rclass
,
2073 machine_mode mode ATTRIBUTE_UNUSED
,
2079 fprintf (stderr
, "secondary output reload copy from %s %s to ", reg_class_names
[rclass
], mode_name
[mode
]);
2083 if (reg_class_subset_p (rclass
, CR_REGS
))
2084 rv
= mep_secondary_copro_reload_class (rclass
, x
);
2085 else if (MEP_NONGENERAL_CLASS (rclass
)
2086 && (mep_nonregister (x
) || mep_nongeneral_reg (x
)))
2090 fprintf (stderr
, " - requires %s\n", reg_class_names
[rv
]);
2093 return (enum reg_class
) rv
;
2096 /* Implement SECONDARY_MEMORY_NEEDED. */
2099 mep_secondary_memory_needed (enum reg_class rclass1
, enum reg_class rclass2
,
2100 machine_mode mode ATTRIBUTE_UNUSED
)
2102 if (!mep_have_core_copro_moves_p
)
2104 if (reg_classes_intersect_p (rclass1
, CR_REGS
)
2105 && reg_classes_intersect_p (rclass2
, GENERAL_REGS
))
2107 if (reg_classes_intersect_p (rclass2
, CR_REGS
)
2108 && reg_classes_intersect_p (rclass1
, GENERAL_REGS
))
2110 if (!mep_have_copro_copro_moves_p
2111 && reg_classes_intersect_p (rclass1
, CR_REGS
)
2112 && reg_classes_intersect_p (rclass2
, CR_REGS
))
2119 mep_expand_reload (rtx
*operands
, machine_mode mode
)
2121 /* There are three cases for each direction:
2126 int s0
= mep_section_tag (operands
[0]) == 'f';
2127 int s1
= mep_section_tag (operands
[1]) == 'f';
2128 int c0
= mep_nongeneral_reg (operands
[0]);
2129 int c1
= mep_nongeneral_reg (operands
[1]);
2130 int which
= (s0
? 20:0) + (c0
? 10:0) + (s1
? 2:0) + (c1
? 1:0);
2133 fprintf (stderr
, "expand_reload %s\n", mode_name
[mode
]);
2134 debug_rtx (operands
[0]);
2135 debug_rtx (operands
[1]);
2140 case 00: /* Don't know why this gets here. */
2141 case 02: /* general = far */
2142 emit_move_insn (operands
[0], operands
[1]);
2145 case 10: /* cr = mem */
2146 case 11: /* cr = cr */
2147 case 01: /* mem = cr */
2148 case 12: /* cr = far */
2149 emit_move_insn (operands
[2], operands
[1]);
2150 emit_move_insn (operands
[0], operands
[2]);
2153 case 20: /* far = general */
2154 emit_move_insn (operands
[2], XEXP (operands
[1], 0));
2155 emit_move_insn (operands
[0], gen_rtx_MEM (mode
, operands
[2]));
2158 case 21: /* far = cr */
2159 case 22: /* far = far */
2161 fprintf (stderr
, "unsupported expand reload case %02d for mode %s\n",
2162 which
, mode_name
[mode
]);
2163 debug_rtx (operands
[0]);
2164 debug_rtx (operands
[1]);
2169 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2170 can be moved directly into registers 0 to 7, but not into the rest.
2171 If so, and if the required class includes registers 0 to 7, restrict
2172 it to those registers. */
2175 mep_preferred_reload_class (rtx x
, enum reg_class rclass
)
2177 switch (GET_CODE (x
))
2180 if (INTVAL (x
) >= 0x10000
2181 && INTVAL (x
) < 0x01000000
2182 && (INTVAL (x
) & 0xffff) != 0
2183 && reg_class_subset_p (TPREL_REGS
, rclass
))
2184 rclass
= TPREL_REGS
;
2190 if (mep_section_tag (x
) != 'f'
2191 && reg_class_subset_p (TPREL_REGS
, rclass
))
2192 rclass
= TPREL_REGS
;
2201 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2202 moves, 4 for direct double-register moves, and 1000 for anything
2203 that requires a temporary register or temporary stack slot. */
2206 mep_register_move_cost (machine_mode mode
, enum reg_class from
, enum reg_class to
)
2208 if (mep_have_copro_copro_moves_p
2209 && reg_class_subset_p (from
, CR_REGS
)
2210 && reg_class_subset_p (to
, CR_REGS
))
2212 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2216 if (reg_class_subset_p (from
, CR_REGS
)
2217 && reg_class_subset_p (to
, CR_REGS
))
2219 if (TARGET_32BIT_CR_REGS
&& GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2223 if (reg_class_subset_p (from
, CR_REGS
)
2224 || reg_class_subset_p (to
, CR_REGS
))
2226 if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2230 if (mep_secondary_memory_needed (from
, to
, mode
))
2232 if (MEP_NONGENERAL_CLASS (from
) && MEP_NONGENERAL_CLASS (to
))
2235 if (GET_MODE_SIZE (mode
) > 4)
2242 /* Functions to save and restore machine-specific function data. */
2244 static struct machine_function
*
2245 mep_init_machine_status (void)
2247 return ggc_cleared_alloc
<machine_function
> ();
2251 mep_allocate_initial_value (rtx reg
)
2255 if (GET_CODE (reg
) != REG
)
2258 if (REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2261 /* In interrupt functions, the "initial" values of $gp and $tp are
2262 provided by the prologue. They are not necessarily the same as
2263 the values that the caller was using. */
2264 if (REGNO (reg
) == TP_REGNO
|| REGNO (reg
) == GP_REGNO
)
2265 if (mep_interrupt_p ())
2268 if (! cfun
->machine
->reg_save_slot
[REGNO(reg
)])
2270 cfun
->machine
->reg_save_size
+= 4;
2271 cfun
->machine
->reg_save_slot
[REGNO(reg
)] = cfun
->machine
->reg_save_size
;
2274 rss
= cfun
->machine
->reg_save_slot
[REGNO(reg
)];
2275 return gen_rtx_MEM (SImode
, plus_constant (Pmode
, arg_pointer_rtx
, -rss
));
2279 mep_return_addr_rtx (int count
)
2284 return get_hard_reg_initial_val (Pmode
, LP_REGNO
);
2290 return get_hard_reg_initial_val (Pmode
, TP_REGNO
);
2296 return get_hard_reg_initial_val (Pmode
, GP_REGNO
);
2300 mep_interrupt_p (void)
2302 if (cfun
->machine
->interrupt_handler
== 0)
2304 int interrupt_handler
2305 = (lookup_attribute ("interrupt",
2306 DECL_ATTRIBUTES (current_function_decl
))
2308 cfun
->machine
->interrupt_handler
= interrupt_handler
? 2 : 1;
2310 return cfun
->machine
->interrupt_handler
== 2;
2314 mep_disinterrupt_p (void)
2316 if (cfun
->machine
->disable_interrupts
== 0)
2318 int disable_interrupts
2319 = (lookup_attribute ("disinterrupt",
2320 DECL_ATTRIBUTES (current_function_decl
))
2322 cfun
->machine
->disable_interrupts
= disable_interrupts
? 2 : 1;
2324 return cfun
->machine
->disable_interrupts
== 2;
2328 /* Frame/Epilog/Prolog Related. */
2331 mep_reg_set_p (rtx reg
, rtx insn
)
2333 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2336 if (FIND_REG_INC_NOTE (insn
, reg
))
2338 insn
= PATTERN (insn
);
2341 if (GET_CODE (insn
) == SET
2342 && GET_CODE (XEXP (insn
, 0)) == REG
2343 && GET_CODE (XEXP (insn
, 1)) == REG
2344 && REGNO (XEXP (insn
, 0)) == REGNO (XEXP (insn
, 1)))
2347 return set_of (reg
, insn
) != NULL_RTX
;
2351 #define MEP_SAVES_UNKNOWN 0
2352 #define MEP_SAVES_YES 1
2353 #define MEP_SAVES_MAYBE 2
2354 #define MEP_SAVES_NO 3
2357 mep_reg_set_in_function (int regno
)
2362 if (mep_interrupt_p () && df_regs_ever_live_p(regno
))
2365 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2368 push_topmost_sequence ();
2369 insn
= get_insns ();
2370 pop_topmost_sequence ();
2375 reg
= gen_rtx_REG (SImode
, regno
);
2377 for (insn
= NEXT_INSN (insn
); insn
; insn
= NEXT_INSN (insn
))
2378 if (INSN_P (insn
) && mep_reg_set_p (reg
, insn
))
2384 mep_asm_without_operands_p (void)
2386 if (cfun
->machine
->asms_without_operands
== 0)
2390 push_topmost_sequence ();
2391 insn
= get_insns ();
2392 pop_topmost_sequence ();
2394 cfun
->machine
->asms_without_operands
= 1;
2398 && GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
2400 cfun
->machine
->asms_without_operands
= 2;
2403 insn
= NEXT_INSN (insn
);
2407 return cfun
->machine
->asms_without_operands
== 2;
2410 /* Interrupt functions save/restore every call-preserved register, and
2411 any call-used register it uses (or all if it calls any function,
2412 since they may get clobbered there too). Here we check to see
2413 which call-used registers need saving. */
2415 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2416 && (r == FIRST_CCR_REGNO + 1 \
2417 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2418 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2421 mep_interrupt_saved_reg (int r
)
2423 if (!mep_interrupt_p ())
2425 if (r
== REGSAVE_CONTROL_TEMP
2426 || (TARGET_64BIT_CR_REGS
&& TARGET_COP
&& r
== REGSAVE_CONTROL_TEMP
+1))
2428 if (mep_asm_without_operands_p ()
2430 || (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
|| r
== LP_REGNO
)
2431 || IVC2_ISAVED_REG (r
)))
2434 /* Function calls mean we need to save $lp. */
2435 if (r
== LP_REGNO
|| IVC2_ISAVED_REG (r
))
2437 if (!crtl
->is_leaf
|| cfun
->machine
->doloop_tags
> 0)
2438 /* The interrupt handler might use these registers for repeat blocks,
2439 or it might call a function that does so. */
2440 if (r
== RPB_REGNO
|| r
== RPE_REGNO
|| r
== RPC_REGNO
)
2442 if (crtl
->is_leaf
&& call_used_regs
[r
] && !df_regs_ever_live_p(r
))
2444 /* Functions we call might clobber these. */
2445 if (call_used_regs
[r
] && !fixed_regs
[r
])
2447 /* Additional registers that need to be saved for IVC2. */
2448 if (IVC2_ISAVED_REG (r
))
2455 mep_call_saves_register (int r
)
2457 if (! cfun
->machine
->frame_locked
)
2459 int rv
= MEP_SAVES_NO
;
2461 if (cfun
->machine
->reg_save_slot
[r
])
2463 else if (r
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2465 else if (r
== FRAME_POINTER_REGNUM
&& frame_pointer_needed
)
2467 else if ((!call_used_regs
[r
] || r
== LP_REGNO
) && df_regs_ever_live_p(r
))
2469 else if (crtl
->calls_eh_return
&& (r
== 10 || r
== 11))
2470 /* We need these to have stack slots so that they can be set during
2473 else if (mep_interrupt_saved_reg (r
))
2475 cfun
->machine
->reg_saved
[r
] = rv
;
2477 return cfun
->machine
->reg_saved
[r
] == MEP_SAVES_YES
;
2480 /* Return true if epilogue uses register REGNO. */
2483 mep_epilogue_uses (int regno
)
2485 /* Since $lp is a call-saved register, the generic code will normally
2486 mark it used in the epilogue if it needs to be saved and restored.
2487 However, when profiling is enabled, the profiling code will implicitly
2488 clobber $11. This case has to be handled specially both here and in
2489 mep_call_saves_register. */
2490 if (regno
== LP_REGNO
&& (profile_arc_flag
> 0 || profile_flag
> 0))
2492 /* Interrupt functions save/restore pretty much everything. */
2493 return (reload_completed
&& mep_interrupt_saved_reg (regno
));
2497 mep_reg_size (int regno
)
2499 if (CR_REGNO_P (regno
) && TARGET_64BIT_CR_REGS
)
2504 /* Worker function for TARGET_CAN_ELIMINATE. */
2507 mep_can_eliminate (const int from
, const int to
)
2509 return (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
2510 ? ! frame_pointer_needed
2515 mep_elimination_offset (int from
, int to
)
2519 int frame_size
= get_frame_size () + crtl
->outgoing_args_size
;
2522 if (!cfun
->machine
->frame_locked
)
2523 memset (cfun
->machine
->reg_saved
, 0, sizeof (cfun
->machine
->reg_saved
));
2525 /* We don't count arg_regs_to_save in the arg pointer offset, because
2526 gcc thinks the arg pointer has moved along with the saved regs.
2527 However, we do count it when we adjust $sp in the prologue. */
2529 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2530 if (mep_call_saves_register (i
))
2531 reg_save_size
+= mep_reg_size (i
);
2533 if (reg_save_size
% 8)
2534 cfun
->machine
->regsave_filler
= 8 - (reg_save_size
% 8);
2536 cfun
->machine
->regsave_filler
= 0;
2538 /* This is what our total stack adjustment looks like. */
2539 total_size
= (reg_save_size
+ frame_size
+ cfun
->machine
->regsave_filler
);
2542 cfun
->machine
->frame_filler
= 8 - (total_size
% 8);
2544 cfun
->machine
->frame_filler
= 0;
2547 if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
2548 return reg_save_size
+ cfun
->machine
->regsave_filler
;
2550 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2551 return cfun
->machine
->frame_filler
+ frame_size
;
2553 if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
2554 return reg_save_size
+ cfun
->machine
->regsave_filler
+ cfun
->machine
->frame_filler
+ frame_size
;
2562 RTX_FRAME_RELATED_P (x
) = 1;
2566 /* Since the prologue/epilogue code is generated after optimization,
2567 we can't rely on gcc to split constants for us. So, this code
2568 captures all the ways to add a constant to a register in one logic
2569 chunk, including optimizing away insns we just don't need. This
2570 makes the prolog/epilog code easier to follow. */
2572 add_constant (int dest
, int src
, int value
, int mark_frame
)
2577 if (src
== dest
&& value
== 0)
2582 insn
= emit_move_insn (gen_rtx_REG (SImode
, dest
),
2583 gen_rtx_REG (SImode
, src
));
2585 RTX_FRAME_RELATED_P(insn
) = 1;
2589 if (value
>= -32768 && value
<= 32767)
2591 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2592 gen_rtx_REG (SImode
, src
),
2595 RTX_FRAME_RELATED_P(insn
) = 1;
2599 /* Big constant, need to use a temp register. We use
2600 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2601 area is always small enough to directly add to). */
2603 hi
= trunc_int_for_mode (value
& 0xffff0000, SImode
);
2604 lo
= value
& 0xffff;
2606 insn
= emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2611 insn
= emit_insn (gen_iorsi3 (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2612 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2616 insn
= emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, dest
),
2617 gen_rtx_REG (SImode
, src
),
2618 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
)));
2621 RTX_FRAME_RELATED_P(insn
) = 1;
2622 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2623 gen_rtx_SET (SImode
,
2624 gen_rtx_REG (SImode
, dest
),
2625 gen_rtx_PLUS (SImode
,
2626 gen_rtx_REG (SImode
, dest
),
2631 /* Move SRC to DEST. Mark the move as being potentially dead if
2635 maybe_dead_move (rtx dest
, rtx src
, bool ATTRIBUTE_UNUSED maybe_dead_p
)
2637 rtx_insn
*insn
= emit_move_insn (dest
, src
);
2640 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
, NULL
);
2645 /* Used for interrupt functions, which can't assume that $tp and $gp
2646 contain the correct pointers. */
2649 mep_reload_pointer (int regno
, const char *symbol
)
2653 if (!df_regs_ever_live_p(regno
) && crtl
->is_leaf
)
2656 reg
= gen_rtx_REG (SImode
, regno
);
2657 sym
= gen_rtx_SYMBOL_REF (SImode
, symbol
);
2658 emit_insn (gen_movsi_topsym_s (reg
, sym
));
2659 emit_insn (gen_movsi_botsym_s (reg
, reg
, sym
));
2662 /* Assign save slots for any register not already saved. DImode
2663 registers go at the end of the reg save area; the rest go at the
2664 beginning. This is for alignment purposes. Returns true if a frame
2665 is really needed. */
2667 mep_assign_save_slots (int reg_save_size
)
2669 bool really_need_stack_frame
= false;
2673 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2674 if (mep_call_saves_register(i
))
2676 int regsize
= mep_reg_size (i
);
2678 if ((i
!= TP_REGNO
&& i
!= GP_REGNO
&& i
!= LP_REGNO
)
2679 || mep_reg_set_in_function (i
))
2680 really_need_stack_frame
= true;
2682 if (cfun
->machine
->reg_save_slot
[i
])
2687 cfun
->machine
->reg_save_size
+= regsize
;
2688 cfun
->machine
->reg_save_slot
[i
] = cfun
->machine
->reg_save_size
;
2692 cfun
->machine
->reg_save_slot
[i
] = reg_save_size
- di_ofs
;
2696 cfun
->machine
->frame_locked
= 1;
2697 return really_need_stack_frame
;
2701 mep_expand_prologue (void)
2703 int i
, rss
, sp_offset
= 0;
2706 int really_need_stack_frame
;
2708 /* We must not allow register renaming in interrupt functions,
2709 because that invalidates the correctness of the set of call-used
2710 registers we're going to save/restore. */
2711 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2713 if (mep_disinterrupt_p ())
2714 emit_insn (gen_mep_disable_int ());
2716 cfun
->machine
->mep_frame_pointer_needed
= frame_pointer_needed
;
2718 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2719 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2720 really_need_stack_frame
= frame_size
;
2722 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2724 sp_offset
= reg_save_size
;
2725 if (sp_offset
+ frame_size
< 128)
2726 sp_offset
+= frame_size
;
2728 add_constant (SP_REGNO
, SP_REGNO
, -sp_offset
, 1);
2730 for (i
=0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2731 if (mep_call_saves_register(i
))
2737 rss
= cfun
->machine
->reg_save_slot
[i
];
2739 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
2740 && (!mep_reg_set_in_function (i
)
2741 && !mep_interrupt_p ()))
2744 if (mep_reg_size (i
) == 8)
2749 /* If there is a pseudo associated with this register's initial value,
2750 reload might have already spilt it to the stack slot suggested by
2751 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2753 mem
= gen_rtx_MEM (rmode
,
2754 plus_constant (Pmode
, stack_pointer_rtx
,
2756 maybe_dead_p
= rtx_equal_p (mem
, has_hard_reg_initial_val (rmode
, i
));
2758 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
2759 F(maybe_dead_move (mem
, gen_rtx_REG (rmode
, i
), maybe_dead_p
));
2760 else if (rmode
== DImode
)
2763 int be
= TARGET_BIG_ENDIAN
? 4 : 0;
2765 mem
= gen_rtx_MEM (SImode
,
2766 plus_constant (Pmode
, stack_pointer_rtx
,
2767 sp_offset
- rss
+ be
));
2769 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2770 gen_rtx_REG (SImode
, i
),
2772 maybe_dead_move (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2773 gen_rtx_ZERO_EXTRACT (SImode
,
2774 gen_rtx_REG (DImode
, i
),
2778 insn
= maybe_dead_move (mem
,
2779 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
2781 RTX_FRAME_RELATED_P (insn
) = 1;
2783 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2784 gen_rtx_SET (VOIDmode
,
2786 gen_rtx_REG (rmode
, i
)));
2787 mem
= gen_rtx_MEM (SImode
,
2788 plus_constant (Pmode
, stack_pointer_rtx
,
2789 sp_offset
- rss
+ (4-be
)));
2790 insn
= maybe_dead_move (mem
,
2791 gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
+1),
2797 maybe_dead_move (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2798 gen_rtx_REG (rmode
, i
),
2800 insn
= maybe_dead_move (mem
,
2801 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
2803 RTX_FRAME_RELATED_P (insn
) = 1;
2805 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
2806 gen_rtx_SET (VOIDmode
,
2808 gen_rtx_REG (rmode
, i
)));
2812 if (frame_pointer_needed
)
2814 /* We've already adjusted down by sp_offset. Total $sp change
2815 is reg_save_size + frame_size. We want a net change here of
2816 just reg_save_size. */
2817 add_constant (FP_REGNO
, SP_REGNO
, sp_offset
- reg_save_size
, 1);
2820 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
-(reg_save_size
+frame_size
), 1);
2822 if (mep_interrupt_p ())
2824 mep_reload_pointer(GP_REGNO
, "__sdabase");
2825 mep_reload_pointer(TP_REGNO
, "__tpbase");
2830 mep_start_function (FILE *file
, HOST_WIDE_INT hwi_local
)
2832 int local
= hwi_local
;
2833 int frame_size
= local
+ crtl
->outgoing_args_size
;
2838 int slot_map
[FIRST_PSEUDO_REGISTER
], si
, sj
;
2840 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2841 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2842 sp_offset
= reg_save_size
+ frame_size
;
2844 ffill
= cfun
->machine
->frame_filler
;
2846 if (cfun
->machine
->mep_frame_pointer_needed
)
2847 reg_names
[FP_REGNO
] = "$fp";
2849 reg_names
[FP_REGNO
] = "$8";
2854 if (debug_info_level
== DINFO_LEVEL_NONE
)
2856 fprintf (file
, "\t# frame: %d", sp_offset
);
2858 fprintf (file
, " %d regs", reg_save_size
);
2860 fprintf (file
, " %d locals", local
);
2861 if (crtl
->outgoing_args_size
)
2862 fprintf (file
, " %d args", crtl
->outgoing_args_size
);
2863 fprintf (file
, "\n");
2867 fprintf (file
, "\t#\n");
2868 fprintf (file
, "\t# Initial Frame Information:\n");
2869 if (sp_offset
|| !frame_pointer_needed
)
2870 fprintf (file
, "\t# Entry ---------- 0\n");
2872 /* Sort registers by save slots, so they're printed in the order
2873 they appear in memory, not the order they're saved in. */
2874 for (si
=0; si
<FIRST_PSEUDO_REGISTER
; si
++)
2876 for (si
=0; si
<FIRST_PSEUDO_REGISTER
-1; si
++)
2877 for (sj
=si
+1; sj
<FIRST_PSEUDO_REGISTER
; sj
++)
2878 if (cfun
->machine
->reg_save_slot
[slot_map
[si
]]
2879 > cfun
->machine
->reg_save_slot
[slot_map
[sj
]])
2881 int t
= slot_map
[si
];
2882 slot_map
[si
] = slot_map
[sj
];
2887 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2890 int r
= slot_map
[i
];
2891 int rss
= cfun
->machine
->reg_save_slot
[r
];
2893 if (!mep_call_saves_register (r
))
2896 if ((r
== TP_REGNO
|| r
== GP_REGNO
|| r
== LP_REGNO
)
2897 && (!mep_reg_set_in_function (r
)
2898 && !mep_interrupt_p ()))
2901 rsize
= mep_reg_size(r
);
2902 skip
= rss
- (sp
+rsize
);
2904 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2905 fprintf (file
, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2906 rsize
, reg_names
[r
], sp_offset
- rss
);
2910 skip
= reg_save_size
- sp
;
2912 fprintf (file
, "\t# %3d bytes for alignment\n", skip
);
2914 if (frame_pointer_needed
)
2915 fprintf (file
, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size
, sp_offset
-reg_save_size
);
2917 fprintf (file
, "\t# %3d bytes for local vars\n", local
);
2919 fprintf (file
, "\t# %3d bytes for alignment\n", ffill
);
2920 if (crtl
->outgoing_args_size
)
2921 fprintf (file
, "\t# %3d bytes for outgoing args\n",
2922 crtl
->outgoing_args_size
);
2923 fprintf (file
, "\t# SP ---> ---------- %d\n", sp_offset
);
2924 fprintf (file
, "\t#\n");
2928 static int mep_prevent_lp_restore
= 0;
2929 static int mep_sibcall_epilogue
= 0;
2932 mep_expand_epilogue (void)
2934 int i
, sp_offset
= 0;
2935 int reg_save_size
= 0;
2937 int lp_temp
= LP_REGNO
, lp_slot
= -1;
2938 int really_need_stack_frame
= get_frame_size() + crtl
->outgoing_args_size
;
2939 int interrupt_handler
= mep_interrupt_p ();
2941 if (profile_arc_flag
== 2)
2942 emit_insn (gen_mep_bb_trace_ret ());
2944 reg_save_size
= mep_elimination_offset (ARG_POINTER_REGNUM
, FRAME_POINTER_REGNUM
);
2945 frame_size
= mep_elimination_offset (FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
);
2947 really_need_stack_frame
|= mep_assign_save_slots (reg_save_size
);
2949 if (frame_pointer_needed
)
2951 /* If we have a frame pointer, we won't have a reliable stack
2952 pointer (alloca, you know), so rebase SP from FP */
2953 emit_move_insn (gen_rtx_REG (SImode
, SP_REGNO
),
2954 gen_rtx_REG (SImode
, FP_REGNO
));
2955 sp_offset
= reg_save_size
;
2959 /* SP is right under our local variable space. Adjust it if
2961 sp_offset
= reg_save_size
+ frame_size
;
2962 if (sp_offset
>= 128)
2964 add_constant (SP_REGNO
, SP_REGNO
, frame_size
, 0);
2965 sp_offset
-= frame_size
;
2969 /* This is backwards so that we restore the control and coprocessor
2970 registers before the temporary registers we use to restore
2972 for (i
=FIRST_PSEUDO_REGISTER
-1; i
>=1; i
--)
2973 if (mep_call_saves_register (i
))
2976 int rss
= cfun
->machine
->reg_save_slot
[i
];
2978 if (mep_reg_size (i
) == 8)
2983 if ((i
== TP_REGNO
|| i
== GP_REGNO
|| i
== LP_REGNO
)
2984 && !(mep_reg_set_in_function (i
) || interrupt_handler
))
2986 if (mep_prevent_lp_restore
&& i
== LP_REGNO
)
2988 if (!mep_prevent_lp_restore
2989 && !interrupt_handler
2990 && (i
== 10 || i
== 11))
2993 if (GR_REGNO_P (i
) || LOADABLE_CR_REGNO_P (i
))
2994 emit_move_insn (gen_rtx_REG (rmode
, i
),
2996 plus_constant (Pmode
, stack_pointer_rtx
,
3000 if (i
== LP_REGNO
&& !mep_sibcall_epilogue
&& !interrupt_handler
)
3001 /* Defer this one so we can jump indirect rather than
3002 copying the RA to $lp and "ret". EH epilogues
3003 automatically skip this anyway. */
3004 lp_slot
= sp_offset
-rss
;
3007 emit_move_insn (gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
),
3009 plus_constant (Pmode
,
3012 emit_move_insn (gen_rtx_REG (rmode
, i
),
3013 gen_rtx_REG (rmode
, REGSAVE_CONTROL_TEMP
));
3019 /* Restore this one last so we know it will be in the temp
3020 register when we return by jumping indirectly via the temp. */
3021 emit_move_insn (gen_rtx_REG (SImode
, REGSAVE_CONTROL_TEMP
),
3022 gen_rtx_MEM (SImode
,
3023 plus_constant (Pmode
, stack_pointer_rtx
,
3025 lp_temp
= REGSAVE_CONTROL_TEMP
;
3029 add_constant (SP_REGNO
, SP_REGNO
, sp_offset
, 0);
3031 if (crtl
->calls_eh_return
&& mep_prevent_lp_restore
)
3032 emit_insn (gen_addsi3 (gen_rtx_REG (SImode
, SP_REGNO
),
3033 gen_rtx_REG (SImode
, SP_REGNO
),
3034 cfun
->machine
->eh_stack_adjust
));
3036 if (mep_sibcall_epilogue
)
3039 if (mep_disinterrupt_p ())
3040 emit_insn (gen_mep_enable_int ());
3042 if (mep_prevent_lp_restore
)
3044 emit_jump_insn (gen_eh_return_internal ());
3047 else if (interrupt_handler
)
3048 emit_jump_insn (gen_mep_reti ());
3050 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode
, lp_temp
)));
3054 mep_expand_eh_return (rtx
*operands
)
3056 if (GET_CODE (operands
[0]) != REG
|| REGNO (operands
[0]) != LP_REGNO
)
3058 rtx ra
= gen_rtx_REG (Pmode
, LP_REGNO
);
3059 emit_move_insn (ra
, operands
[0]);
3063 emit_insn (gen_eh_epilogue (operands
[0]));
3067 mep_emit_eh_epilogue (rtx
*operands ATTRIBUTE_UNUSED
)
3069 cfun
->machine
->eh_stack_adjust
= gen_rtx_REG (Pmode
, 0);
3070 mep_prevent_lp_restore
= 1;
3071 mep_expand_epilogue ();
3072 mep_prevent_lp_restore
= 0;
3076 mep_expand_sibcall_epilogue (void)
3078 mep_sibcall_epilogue
= 1;
3079 mep_expand_epilogue ();
3080 mep_sibcall_epilogue
= 0;
3084 mep_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
3089 if (mep_section_tag (DECL_RTL (decl
)) == 'f')
3092 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3093 if (mep_interrupt_p () || mep_disinterrupt_p ())
3100 mep_return_stackadj_rtx (void)
3102 return gen_rtx_REG (SImode
, 10);
3106 mep_return_handler_rtx (void)
3108 return gen_rtx_REG (SImode
, LP_REGNO
);
3112 mep_function_profiler (FILE *file
)
3114 /* Always right at the beginning of the function. */
3115 fprintf (file
, "\t# mep function profiler\n");
3116 fprintf (file
, "\tadd\t$sp, -8\n");
3117 fprintf (file
, "\tsw\t$0, ($sp)\n");
3118 fprintf (file
, "\tldc\t$0, $lp\n");
3119 fprintf (file
, "\tsw\t$0, 4($sp)\n");
3120 fprintf (file
, "\tbsr\t__mep_mcount\n");
3121 fprintf (file
, "\tlw\t$0, 4($sp)\n");
3122 fprintf (file
, "\tstc\t$0, $lp\n");
3123 fprintf (file
, "\tlw\t$0, ($sp)\n");
3124 fprintf (file
, "\tadd\t$sp, 8\n\n");
3128 mep_emit_bb_trace_ret (void)
3130 fprintf (asm_out_file
, "\t# end of block profiling\n");
3131 fprintf (asm_out_file
, "\tadd\t$sp, -8\n");
3132 fprintf (asm_out_file
, "\tsw\t$0, ($sp)\n");
3133 fprintf (asm_out_file
, "\tldc\t$0, $lp\n");
3134 fprintf (asm_out_file
, "\tsw\t$0, 4($sp)\n");
3135 fprintf (asm_out_file
, "\tbsr\t__bb_trace_ret\n");
3136 fprintf (asm_out_file
, "\tlw\t$0, 4($sp)\n");
3137 fprintf (asm_out_file
, "\tstc\t$0, $lp\n");
3138 fprintf (asm_out_file
, "\tlw\t$0, ($sp)\n");
3139 fprintf (asm_out_file
, "\tadd\t$sp, 8\n\n");
3146 /* Operand Printing. */
3149 mep_print_operand_address (FILE *stream
, rtx address
)
3151 if (GET_CODE (address
) == MEM
)
3152 address
= XEXP (address
, 0);
3154 /* cf: gcc.dg/asm-4.c. */
3155 gcc_assert (GET_CODE (address
) == REG
);
3157 mep_print_operand (stream
, address
, 0);
3163 const char *pattern
;
3166 const conversions
[] =
3169 { 0, "m+ri", "3(2)" },
3173 { 0, "mLrs", "%lo(3)(2)" },
3174 { 0, "mLr+si", "%lo(4+5)(2)" },
3175 { 0, "m+ru2s", "%tpoff(5)(2)" },
3176 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3177 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3178 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3179 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3180 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3182 { 0, "m+si", "(2+3)" },
3183 { 0, "m+li", "(2+3)" },
3186 { 0, "+si", "1+2" },
3187 { 0, "+u2si", "%tpoff(3+4)" },
3188 { 0, "+u3si", "%sdaoff(3+4)" },
3194 { 'h', "Hs", "%hi(1)" },
3196 { 'I', "u2s", "%tpoff(2)" },
3197 { 'I', "u3s", "%sdaoff(2)" },
3198 { 'I', "+u2si", "%tpoff(3+4)" },
3199 { 'I', "+u3si", "%sdaoff(3+4)" },
3201 { 'P', "mr", "(1\\+),\\0" },
3207 unique_bit_in (HOST_WIDE_INT i
)
3211 case 0x01: case 0xfe: return 0;
3212 case 0x02: case 0xfd: return 1;
3213 case 0x04: case 0xfb: return 2;
3214 case 0x08: case 0xf7: return 3;
3215 case 0x10: case 0x7f: return 4;
3216 case 0x20: case 0xbf: return 5;
3217 case 0x40: case 0xdf: return 6;
3218 case 0x80: case 0xef: return 7;
3225 bit_size_for_clip (HOST_WIDE_INT i
)
3229 for (rv
= 0; rv
< 31; rv
++)
3230 if (((HOST_WIDE_INT
) 1 << rv
) > i
)
3235 /* Print an operand to a assembler instruction. */
3238 mep_print_operand (FILE *file
, rtx x
, int code
)
3241 const char *real_name
;
3245 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3246 we're using, then skip over the "mep_" part of its name. */
3247 const struct cgen_insn
*insn
;
3249 if (mep_get_move_insn (mep_cmov
, &insn
))
3250 fputs (cgen_intrinsics
[insn
->intrinsic
] + 4, file
);
3252 mep_intrinsic_unavailable (mep_cmov
);
3257 switch (GET_CODE (x
))
3260 fputs ("clr", file
);
3263 fputs ("set", file
);
3266 fputs ("not", file
);
3269 output_operand_lossage ("invalid %%L code");
3274 /* Print the second operand of a CR <- CR move. If we're using
3275 a two-operand instruction (i.e., a real cmov), then just print
3276 the operand normally. If we're using a "reg, reg, immediate"
3277 instruction such as caddi3, print the operand followed by a
3278 zero field. If we're using a three-register instruction,
3279 print the operand twice. */
3280 const struct cgen_insn
*insn
;
3282 mep_print_operand (file
, x
, 0);
3283 if (mep_get_move_insn (mep_cmov
, &insn
)
3284 && insn_data
[insn
->icode
].n_operands
== 3)
3287 if (insn_data
[insn
->icode
].operand
[2].predicate (x
, VOIDmode
))
3288 mep_print_operand (file
, x
, 0);
3290 mep_print_operand (file
, const0_rtx
, 0);
3296 for (i
= 0; conversions
[i
].pattern
; i
++)
3297 if (conversions
[i
].code
== code
3298 && strcmp(conversions
[i
].pattern
, pattern
) == 0)
3300 for (j
= 0; conversions
[i
].format
[j
]; j
++)
3301 if (conversions
[i
].format
[j
] == '\\')
3303 fputc (conversions
[i
].format
[j
+1], file
);
3306 else if (ISDIGIT(conversions
[i
].format
[j
]))
3308 rtx r
= patternr
[conversions
[i
].format
[j
] - '0'];
3309 switch (GET_CODE (r
))
3312 fprintf (file
, "%s", reg_names
[REGNO (r
)]);
3318 fprintf (file
, "%d", unique_bit_in (INTVAL (r
)));
3321 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)));
3324 fprintf (file
, "0x%x", ((int) INTVAL (r
) >> 16) & 0xffff);
3327 fprintf (file
, "%d", bit_size_for_clip (INTVAL (r
)) - 1);
3330 fprintf (file
, "0x%x", (int) INTVAL (r
) & 0xffff);
3333 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3334 && !(INTVAL (r
) & 0xff))
3335 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL(r
));
3337 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3340 if (INTVAL (r
) & ~(HOST_WIDE_INT
)0xff
3341 && conversions
[i
].format
[j
+1] == 0)
3343 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (r
));
3344 fprintf (file
, " # 0x%x", (int) INTVAL(r
) & 0xffff);
3347 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3350 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL(r
));
3355 fprintf(file
, "[const_double 0x%lx]",
3356 (unsigned long) CONST_DOUBLE_HIGH(r
));
3359 real_name
= targetm
.strip_name_encoding (XSTR (r
, 0));
3360 assemble_name (file
, real_name
);
3363 output_asm_label (r
);
3366 fprintf (stderr
, "don't know how to print this operand:");
3373 if (conversions
[i
].format
[j
] == '+'
3374 && (!code
|| code
== 'I')
3375 && ISDIGIT (conversions
[i
].format
[j
+1])
3376 && GET_CODE (patternr
[conversions
[i
].format
[j
+1] - '0']) == CONST_INT
3377 && INTVAL (patternr
[conversions
[i
].format
[j
+1] - '0']) < 0)
3379 fputc(conversions
[i
].format
[j
], file
);
3383 if (!conversions
[i
].pattern
)
3385 error ("unconvertible operand %c %qs", code
?code
:'-', pattern
);
3393 mep_final_prescan_insn (rtx_insn
*insn
, rtx
*operands ATTRIBUTE_UNUSED
,
3394 int noperands ATTRIBUTE_UNUSED
)
3396 /* Despite the fact that MeP is perfectly capable of branching and
3397 doing something else in the same bundle, gcc does jump
3398 optimization *after* scheduling, so we cannot trust the bundling
3399 flags on jump instructions. */
3400 if (GET_MODE (insn
) == BImode
3401 && get_attr_slots (insn
) != SLOTS_CORE
)
3402 fputc ('+', asm_out_file
);
3405 /* Function args in registers. */
3408 mep_setup_incoming_varargs (cumulative_args_t cum
,
3409 machine_mode mode ATTRIBUTE_UNUSED
,
3410 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
3411 int second_time ATTRIBUTE_UNUSED
)
3413 int nsave
= 4 - (get_cumulative_args (cum
)->nregs
+ 1);
3416 cfun
->machine
->arg_regs_to_save
= nsave
;
3417 *pretend_size
= nsave
* 4;
3421 bytesize (const_tree type
, machine_mode mode
)
3423 if (mode
== BLKmode
)
3424 return int_size_in_bytes (type
);
3425 return GET_MODE_SIZE (mode
);
3429 mep_expand_builtin_saveregs (void)
3434 ns
= cfun
->machine
->arg_regs_to_save
;
3437 bufsize
= 8 * ((ns
+ 1) / 2) + 8 * ns
;
3438 regbuf
= assign_stack_local (SImode
, bufsize
, 64);
3443 regbuf
= assign_stack_local (SImode
, bufsize
, 32);
3446 move_block_from_reg (5-ns
, regbuf
, ns
);
3450 rtx tmp
= gen_rtx_MEM (DImode
, XEXP (regbuf
, 0));
3451 int ofs
= 8 * ((ns
+1)/2);
3453 for (i
=0; i
<ns
; i
++)
3455 int rn
= (4-ns
) + i
+ 49;
3458 ptr
= offset_address (tmp
, GEN_INT (ofs
), 2);
3459 emit_move_insn (ptr
, gen_rtx_REG (DImode
, rn
));
3463 return XEXP (regbuf
, 0);
3467 mep_build_builtin_va_list (void)
3469 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3473 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3475 f_next_gp
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3476 get_identifier ("__va_next_gp"), ptr_type_node
);
3477 f_next_gp_limit
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
3478 get_identifier ("__va_next_gp_limit"),
3480 f_next_cop
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_cop"),
3482 f_next_stack
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
, get_identifier ("__va_next_stack"),
3485 DECL_FIELD_CONTEXT (f_next_gp
) = record
;
3486 DECL_FIELD_CONTEXT (f_next_gp_limit
) = record
;
3487 DECL_FIELD_CONTEXT (f_next_cop
) = record
;
3488 DECL_FIELD_CONTEXT (f_next_stack
) = record
;
3490 TYPE_FIELDS (record
) = f_next_gp
;
3491 DECL_CHAIN (f_next_gp
) = f_next_gp_limit
;
3492 DECL_CHAIN (f_next_gp_limit
) = f_next_cop
;
3493 DECL_CHAIN (f_next_cop
) = f_next_stack
;
3495 layout_type (record
);
3501 mep_expand_va_start (tree valist
, rtx nextarg
)
3503 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3504 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3508 ns
= cfun
->machine
->arg_regs_to_save
;
3510 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3511 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3512 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3513 f_next_stack
= DECL_CHAIN (f_next_cop
);
3515 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3517 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3518 valist
, f_next_gp_limit
, NULL_TREE
);
3519 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3521 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3522 valist
, f_next_stack
, NULL_TREE
);
3524 /* va_list.next_gp = expand_builtin_saveregs (); */
3525 u
= make_tree (sizetype
, expand_builtin_saveregs ());
3526 u
= fold_convert (ptr_type_node
, u
);
3527 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp
, u
);
3528 TREE_SIDE_EFFECTS (t
) = 1;
3529 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3531 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3532 u
= fold_build_pointer_plus_hwi (u
, 4 * ns
);
3533 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_gp_limit
, u
);
3534 TREE_SIDE_EFFECTS (t
) = 1;
3535 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3537 u
= fold_build_pointer_plus_hwi (u
, 8 * ((ns
+1)/2));
3538 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3539 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_cop
, u
);
3540 TREE_SIDE_EFFECTS (t
) = 1;
3541 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3543 /* va_list.next_stack = nextarg; */
3544 u
= make_tree (ptr_type_node
, nextarg
);
3545 t
= build2 (MODIFY_EXPR
, ptr_type_node
, next_stack
, u
);
3546 TREE_SIDE_EFFECTS (t
) = 1;
3547 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3551 mep_gimplify_va_arg_expr (tree valist
, tree type
,
3553 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
3555 HOST_WIDE_INT size
, rsize
;
3556 bool by_reference
, ivc2_vec
;
3557 tree f_next_gp
, f_next_gp_limit
, f_next_cop
, f_next_stack
;
3558 tree next_gp
, next_gp_limit
, next_cop
, next_stack
;
3559 tree label_sover
, label_selse
;
3562 ivc2_vec
= TARGET_IVC2
&& VECTOR_TYPE_P (type
);
3564 size
= int_size_in_bytes (type
);
3565 by_reference
= (size
> (ivc2_vec
? 8 : 4)) || (size
<= 0);
3569 type
= build_pointer_type (type
);
3572 rsize
= (size
+ UNITS_PER_WORD
- 1) & -UNITS_PER_WORD
;
3574 f_next_gp
= TYPE_FIELDS (va_list_type_node
);
3575 f_next_gp_limit
= DECL_CHAIN (f_next_gp
);
3576 f_next_cop
= DECL_CHAIN (f_next_gp_limit
);
3577 f_next_stack
= DECL_CHAIN (f_next_cop
);
3579 next_gp
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp
), valist
, f_next_gp
,
3581 next_gp_limit
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_gp_limit
),
3582 valist
, f_next_gp_limit
, NULL_TREE
);
3583 next_cop
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_cop
), valist
, f_next_cop
,
3585 next_stack
= build3 (COMPONENT_REF
, TREE_TYPE (f_next_stack
),
3586 valist
, f_next_stack
, NULL_TREE
);
3588 /* if f_next_gp < f_next_gp_limit
3589 IF (VECTOR_P && IVC2)
3597 val = *f_next_stack;
3598 f_next_stack += rsize;
3602 label_sover
= create_artificial_label (UNKNOWN_LOCATION
);
3603 label_selse
= create_artificial_label (UNKNOWN_LOCATION
);
3604 res_addr
= create_tmp_var (ptr_type_node
);
3606 tmp
= build2 (GE_EXPR
, boolean_type_node
, next_gp
,
3607 unshare_expr (next_gp_limit
));
3608 tmp
= build3 (COND_EXPR
, void_type_node
, tmp
,
3609 build1 (GOTO_EXPR
, void_type_node
,
3610 unshare_expr (label_selse
)),
3612 gimplify_and_add (tmp
, pre_p
);
3616 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_cop
);
3617 gimplify_and_add (tmp
, pre_p
);
3621 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, next_gp
);
3622 gimplify_and_add (tmp
, pre_p
);
3625 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_gp
), 4);
3626 gimplify_assign (unshare_expr (next_gp
), tmp
, pre_p
);
3628 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_cop
), 8);
3629 gimplify_assign (unshare_expr (next_cop
), tmp
, pre_p
);
3631 tmp
= build1 (GOTO_EXPR
, void_type_node
, unshare_expr (label_sover
));
3632 gimplify_and_add (tmp
, pre_p
);
3636 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_selse
));
3637 gimplify_and_add (tmp
, pre_p
);
3639 tmp
= build2 (MODIFY_EXPR
, void_type_node
, res_addr
, unshare_expr (next_stack
));
3640 gimplify_and_add (tmp
, pre_p
);
3642 tmp
= fold_build_pointer_plus_hwi (unshare_expr (next_stack
), rsize
);
3643 gimplify_assign (unshare_expr (next_stack
), tmp
, pre_p
);
3647 tmp
= build1 (LABEL_EXPR
, void_type_node
, unshare_expr (label_sover
));
3648 gimplify_and_add (tmp
, pre_p
);
3650 res_addr
= fold_convert (build_pointer_type (type
), res_addr
);
3653 res_addr
= build_va_arg_indirect_ref (res_addr
);
3655 return build_va_arg_indirect_ref (res_addr
);
3659 mep_init_cumulative_args (CUMULATIVE_ARGS
*pcum
, tree fntype
,
3660 rtx libname ATTRIBUTE_UNUSED
,
3661 tree fndecl ATTRIBUTE_UNUSED
)
3665 if (fntype
&& lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype
)))
3671 /* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3672 larger than 4 bytes are passed indirectly. Return value in 0,
3673 unless bigger than 4 bytes, then the caller passes a pointer as the
3674 first arg. For varargs, we copy $1..$4 to the stack. */
3677 mep_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
3678 const_tree type ATTRIBUTE_UNUSED
,
3679 bool named ATTRIBUTE_UNUSED
)
3681 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
3683 /* VOIDmode is a signal for the backend to pass data to the call
3684 expander via the second operand to the call pattern. We use
3685 this to determine whether to use "jsr" or "jsrv". */
3686 if (mode
== VOIDmode
)
3687 return GEN_INT (cum
->vliw
);
3689 /* If we havn't run out of argument registers, return the next. */
3692 if (type
&& TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3693 return gen_rtx_REG (mode
, cum
->nregs
+ 49);
3695 return gen_rtx_REG (mode
, cum
->nregs
+ 1);
3698 /* Otherwise the argument goes on the stack. */
3703 mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
3706 bool named ATTRIBUTE_UNUSED
)
3708 int size
= bytesize (type
, mode
);
3710 /* This is non-obvious, but yes, large values passed after we've run
3711 out of registers are *still* passed by reference - we put the
3712 address of the parameter on the stack, as well as putting the
3713 parameter itself elsewhere on the stack. */
3715 if (size
<= 0 || size
> 8)
3719 if (TARGET_IVC2
&& get_cumulative_args (cum
)->nregs
< 4
3720 && type
!= NULL_TREE
&& VECTOR_TYPE_P (type
))
3726 mep_function_arg_advance (cumulative_args_t pcum
,
3727 machine_mode mode ATTRIBUTE_UNUSED
,
3728 const_tree type ATTRIBUTE_UNUSED
,
3729 bool named ATTRIBUTE_UNUSED
)
3731 get_cumulative_args (pcum
)->nregs
+= 1;
3735 mep_return_in_memory (const_tree type
, const_tree decl ATTRIBUTE_UNUSED
)
3737 int size
= bytesize (type
, BLKmode
);
3738 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3739 return size
> 0 && size
<= 8 ? 0 : 1;
3740 return size
> 0 && size
<= 4 ? 0 : 1;
3744 mep_narrow_volatile_bitfield (void)
3750 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3753 mep_function_value (const_tree type
, const_tree func ATTRIBUTE_UNUSED
)
3755 if (TARGET_IVC2
&& VECTOR_TYPE_P (type
))
3756 return gen_rtx_REG (TYPE_MODE (type
), 48);
3757 return gen_rtx_REG (TYPE_MODE (type
), RETURN_VALUE_REGNUM
);
3760 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3763 mep_libcall_value (machine_mode mode
)
3765 return gen_rtx_REG (mode
, RETURN_VALUE_REGNUM
);
3768 /* Handle pipeline hazards. */
3770 typedef enum { op_none
, op_stc
, op_fsft
, op_ret
} op_num
;
3771 static const char *opnames
[] = { "", "stc", "fsft", "ret" };
3773 static int prev_opcode
= 0;
3775 /* This isn't as optimal as it could be, because we don't know what
3776 control register the STC opcode is storing in. We only need to add
3777 the nop if it's the relevant register, but we add it for irrelevant
3781 mep_asm_output_opcode (FILE *file
, const char *ptr
)
3783 int this_opcode
= op_none
;
3784 const char *hazard
= 0;
3789 if (strncmp (ptr
, "fsft", 4) == 0 && !ISGRAPH (ptr
[4]))
3790 this_opcode
= op_fsft
;
3793 if (strncmp (ptr
, "ret", 3) == 0 && !ISGRAPH (ptr
[3]))
3794 this_opcode
= op_ret
;
3797 if (strncmp (ptr
, "stc", 3) == 0 && !ISGRAPH (ptr
[3]))
3798 this_opcode
= op_stc
;
3802 if (prev_opcode
== op_stc
&& this_opcode
== op_fsft
)
3804 if (prev_opcode
== op_stc
&& this_opcode
== op_ret
)
3808 fprintf(file
, "%s\t# %s-%s hazard\n\t",
3809 hazard
, opnames
[prev_opcode
], opnames
[this_opcode
]);
3811 prev_opcode
= this_opcode
;
3814 /* Handle attributes. */
3817 mep_validate_type_based_tiny (tree
*node
, tree name
, tree args
,
3818 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3820 if (TREE_CODE (*node
) != POINTER_TYPE
)
3822 warning (0, "%qE attribute only applies to variables", name
);
3830 mep_validate_decl_based_tiny (tree
*node
, tree name
, tree args
,
3831 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3833 if (TREE_CODE (*node
) != VAR_DECL
3834 && TREE_CODE (*node
) != TYPE_DECL
)
3836 warning (0, "%qE attribute only applies to variables", name
);
3839 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3841 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3843 warning (0, "address region attributes not allowed with auto storage class");
3846 /* Ignore storage attribute of pointed to variable: char __far * x; */
3847 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3849 warning (0, "address region attributes on pointed-to types ignored");
3858 mep_multiple_address_regions (tree list
, bool check_section_attr
)
3861 int count_sections
= 0;
3862 int section_attr_count
= 0;
3864 for (a
= list
; a
; a
= TREE_CHAIN (a
))
3866 if (is_attribute_p ("based", TREE_PURPOSE (a
))
3867 || is_attribute_p ("tiny", TREE_PURPOSE (a
))
3868 || is_attribute_p ("near", TREE_PURPOSE (a
))
3869 || is_attribute_p ("far", TREE_PURPOSE (a
))
3870 || is_attribute_p ("io", TREE_PURPOSE (a
)))
3872 if (check_section_attr
)
3873 section_attr_count
+= is_attribute_p ("section", TREE_PURPOSE (a
));
3876 if (check_section_attr
)
3877 return section_attr_count
;
3879 return count_sections
;
3882 #define MEP_ATTRIBUTES(decl) \
3883 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3884 : DECL_ATTRIBUTES (decl) \
3885 ? (DECL_ATTRIBUTES (decl)) \
3886 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3889 mep_validate_type_near_far (tree
*node
, tree name
, tree args
,
3890 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3892 if (TREE_CODE (*node
) != METHOD_TYPE
&& TREE_CODE (*node
) != POINTER_TYPE
)
3894 warning (0, "%qE attribute only applies to variables and functions",
3898 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node
), false) > 0)
3900 warning (0, "duplicate address region attribute %qE", name
);
3901 TYPE_ATTRIBUTES (*node
) = NULL_TREE
;
3907 mep_validate_decl_near_far (tree
*node
, tree name
, tree args
,
3908 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3910 if (TREE_CODE (*node
) != VAR_DECL
3911 && TREE_CODE (*node
) != FUNCTION_DECL
3912 && TREE_CODE (*node
) != TYPE_DECL
)
3914 warning (0, "%qE attribute only applies to variables and functions",
3918 else if (args
== NULL_TREE
&& TREE_CODE (*node
) == VAR_DECL
)
3920 if (! (TREE_PUBLIC (*node
) || TREE_STATIC (*node
)))
3922 warning (0, "address region attributes not allowed with auto storage class");
3925 /* Ignore storage attribute of pointed to variable: char __far * x; */
3926 if (TREE_TYPE (*node
) && TREE_CODE (TREE_TYPE (*node
)) == POINTER_TYPE
)
3928 warning (0, "address region attributes on pointed-to types ignored");
3932 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node
), false) > 0)
3934 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3935 name
, DECL_NAME (*node
), DECL_SOURCE_LINE (*node
));
3936 DECL_ATTRIBUTES (*node
) = NULL_TREE
;
3942 mep_validate_type_disinterrupt (tree
*node
, tree name
,
3943 tree args ATTRIBUTE_UNUSED
,
3944 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3946 if (TREE_CODE (*node
) != METHOD_TYPE
)
3948 warning (0, "%qE attribute only applies to functions", name
);
3955 mep_validate_decl_disinterrupt (tree
*node
, tree name
,
3956 tree args ATTRIBUTE_UNUSED
,
3957 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3959 if (TREE_CODE (*node
) != FUNCTION_DECL
)
3961 warning (0, "%qE attribute only applies to functions", name
);
3968 mep_validate_type_warning (tree
*node ATTRIBUTE_UNUSED
, tree name
,
3969 tree args ATTRIBUTE_UNUSED
,
3970 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3972 warning (0, "%qE attribute only applies to functions", name
);
3979 mep_validate_interrupt (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
3980 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
3984 if (TREE_CODE (*node
) != FUNCTION_DECL
)
3986 warning (0, "%qE attribute only applies to functions", name
);
3991 if (DECL_DECLARED_INLINE_P (*node
))
3992 error ("cannot inline interrupt function %qE", DECL_NAME (*node
));
3993 DECL_UNINLINABLE (*node
) = 1;
3995 function_type
= TREE_TYPE (*node
);
3997 if (TREE_TYPE (function_type
) != void_type_node
)
3998 error ("interrupt function must have return type of void");
4000 if (prototype_p (function_type
)
4001 && (TREE_VALUE (TYPE_ARG_TYPES (function_type
)) != void_type_node
4002 || TREE_CHAIN (TYPE_ARG_TYPES (function_type
)) != NULL_TREE
))
4003 error ("interrupt function must have no arguments");
4009 mep_validate_io_cb (tree
*node
, tree name
, tree args
,
4010 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
4012 if (TREE_CODE (*node
) != VAR_DECL
)
4014 warning (0, "%qE attribute only applies to variables", name
);
4018 if (args
!= NULL_TREE
)
4020 if (TREE_CODE (TREE_VALUE (args
)) == NON_LVALUE_EXPR
)
4021 TREE_VALUE (args
) = TREE_OPERAND (TREE_VALUE (args
), 0);
4022 if (TREE_CODE (TREE_VALUE (args
)) != INTEGER_CST
)
4024 warning (0, "%qE attribute allows only an integer constant argument",
4030 if (*no_add
== false && !TARGET_IO_NO_VOLATILE
)
4031 TREE_THIS_VOLATILE (*node
) = 1;
4037 mep_validate_vliw (tree
*node
, tree name
, tree args ATTRIBUTE_UNUSED
,
4038 int flags ATTRIBUTE_UNUSED
, bool *no_add
)
4040 if (TREE_CODE (*node
) != FUNCTION_TYPE
4041 && TREE_CODE (*node
) != FUNCTION_DECL
4042 && TREE_CODE (*node
) != METHOD_TYPE
4043 && TREE_CODE (*node
) != FIELD_DECL
4044 && TREE_CODE (*node
) != TYPE_DECL
)
4046 static int gave_pointer_note
= 0;
4047 static int gave_array_note
= 0;
4048 static const char * given_type
= NULL
;
4050 given_type
= get_tree_code_name (TREE_CODE (*node
));
4051 if (TREE_CODE (*node
) == POINTER_TYPE
)
4052 given_type
= "pointers";
4053 if (TREE_CODE (*node
) == ARRAY_TYPE
)
4054 given_type
= "arrays";
4057 warning (0, "%qE attribute only applies to functions, not %s",
4060 warning (0, "%qE attribute only applies to functions",
4064 if (TREE_CODE (*node
) == POINTER_TYPE
4065 && !gave_pointer_note
)
4067 inform (input_location
,
4068 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
4069 " typedef int (__vliw *vfuncptr) ();");
4070 gave_pointer_note
= 1;
4073 if (TREE_CODE (*node
) == ARRAY_TYPE
4074 && !gave_array_note
)
4076 inform (input_location
,
4077 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4078 " typedef int (__vliw *vfuncptr[]) ();");
4079 gave_array_note
= 1;
4083 error ("VLIW functions are not allowed without a VLIW configuration");
4087 static const struct attribute_spec mep_attribute_table
[11] =
4089 /* name min max decl type func handler
4090 affects_type_identity */
4091 { "based", 0, 0, false, false, false, mep_validate_decl_based_tiny
,
4092 mep_validate_type_based_tiny
, false },
4093 { "tiny", 0, 0, false, false, false, mep_validate_decl_based_tiny
,
4094 mep_validate_type_based_tiny
, false },
4095 { "near", 0, 0, false, false, false, mep_validate_decl_near_far
,
4096 mep_validate_type_near_far
, false },
4097 { "far", 0, 0, false, false, false, mep_validate_decl_near_far
,
4098 mep_validate_type_near_far
, false },
4099 { "disinterrupt", 0, 0, false, false, false, mep_validate_decl_disinterrupt
,
4100 mep_validate_type_disinterrupt
, false },
4101 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt
,
4102 mep_validate_type_warning
, false },
4103 { "io", 0, 1, false, false, false, mep_validate_io_cb
,
4104 mep_validate_type_warning
, false },
4105 { "cb", 0, 1, false, false, false, mep_validate_io_cb
,
4106 mep_validate_type_warning
, false },
4107 { "vliw", 0, 0, false, true, false, NULL
, mep_validate_vliw
, false },
4108 { NULL
, 0, 0, false, false, false, NULL
, NULL
, false }
4112 mep_function_attribute_inlinable_p (const_tree callee
)
4114 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (callee
));
4115 if (!attrs
) attrs
= DECL_ATTRIBUTES (callee
);
4116 return (lookup_attribute ("disinterrupt", attrs
) == 0
4117 && lookup_attribute ("interrupt", attrs
) == 0);
4121 mep_can_inline_p (tree caller
, tree callee
)
4123 if (TREE_CODE (callee
) == ADDR_EXPR
)
4124 callee
= TREE_OPERAND (callee
, 0);
4126 if (!mep_vliw_function_p (caller
)
4127 && mep_vliw_function_p (callee
))
4135 #define FUNC_DISINTERRUPT 2
4138 struct GTY(()) pragma_entry
{
4143 struct pragma_traits
: default_hashmap_traits
4145 static hashval_t
hash (const char *s
) { return htab_hash_string (s
); }
4147 equal_keys (const char *a
, const char *b
)
4149 return strcmp (a
, b
) == 0;
4153 /* Hash table of farcall-tagged sections. */
4154 static GTY(()) hash_map
<const char *, pragma_entry
, pragma_traits
> *
4158 mep_note_pragma_flag (const char *funcname
, int flag
)
4162 = hash_map
<const char *, pragma_entry
, pragma_traits
>::create_ggc (31);
4165 const char *name
= ggc_strdup (funcname
);
4166 pragma_entry
*slot
= &pragma_htab
->get_or_insert (name
, &existed
);
4176 mep_lookup_pragma_flag (const char *funcname
, int flag
)
4181 if (funcname
[0] == '@' && funcname
[2] == '.')
4184 pragma_entry
*slot
= pragma_htab
->get (funcname
);
4185 if (slot
&& (slot
->flag
& flag
))
4194 mep_lookup_pragma_call (const char *funcname
)
4196 return mep_lookup_pragma_flag (funcname
, FUNC_CALL
);
4200 mep_note_pragma_call (const char *funcname
)
4202 mep_note_pragma_flag (funcname
, FUNC_CALL
);
4206 mep_lookup_pragma_disinterrupt (const char *funcname
)
4208 return mep_lookup_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4212 mep_note_pragma_disinterrupt (const char *funcname
)
4214 mep_note_pragma_flag (funcname
, FUNC_DISINTERRUPT
);
4218 note_unused_pragma_disinterrupt (const char *const &s
, const pragma_entry
&e
,
4221 if ((e
.flag
& FUNC_DISINTERRUPT
)
4222 && !(e
.used
& FUNC_DISINTERRUPT
))
4223 warning (0, "\"#pragma disinterrupt %s\" not used", s
);
4228 mep_file_cleanups (void)
4231 pragma_htab
->traverse
<void *, note_unused_pragma_disinterrupt
> (NULL
);
4234 /* These three functions provide a bridge between the pramgas that
4235 affect register classes, and the functions that maintain them. We
4236 can't call those functions directly as pragma handling is part of
4237 the front end and doesn't have direct access to them. */
4240 mep_save_register_info (void)
4242 save_register_info ();
4246 mep_reinit_regs (void)
4252 mep_init_regs (void)
4260 mep_attrlist_to_encoding (tree list
, tree decl
)
4262 if (mep_multiple_address_regions (list
, false) > 1)
4264 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4265 TREE_PURPOSE (TREE_CHAIN (list
)),
4267 DECL_SOURCE_LINE (decl
));
4268 TREE_CHAIN (list
) = NULL_TREE
;
4273 if (is_attribute_p ("based", TREE_PURPOSE (list
)))
4275 if (is_attribute_p ("tiny", TREE_PURPOSE (list
)))
4277 if (is_attribute_p ("near", TREE_PURPOSE (list
)))
4279 if (is_attribute_p ("far", TREE_PURPOSE (list
)))
4281 if (is_attribute_p ("io", TREE_PURPOSE (list
)))
4283 if (TREE_VALUE (list
)
4284 && TREE_VALUE (TREE_VALUE (list
))
4285 && TREE_CODE (TREE_VALUE (TREE_VALUE (list
))) == INTEGER_CST
)
4287 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list
)));
4289 && location
<= 0x1000000)
4294 if (is_attribute_p ("cb", TREE_PURPOSE (list
)))
4296 list
= TREE_CHAIN (list
);
4299 && TREE_CODE (decl
) == FUNCTION_DECL
4300 && DECL_SECTION_NAME (decl
) == 0)
4306 mep_comp_type_attributes (const_tree t1
, const_tree t2
)
4310 vliw1
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1
)) != 0);
4311 vliw2
= (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2
)) != 0);
4320 mep_insert_attributes (tree decl
, tree
*attributes
)
4323 const char *secname
= 0;
4324 tree attrib
, attrlist
;
4327 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4329 const char *funcname
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4331 if (mep_lookup_pragma_disinterrupt (funcname
))
4333 attrib
= build_tree_list (get_identifier ("disinterrupt"), NULL_TREE
);
4334 *attributes
= chainon (*attributes
, attrib
);
4338 if (TREE_CODE (decl
) != VAR_DECL
4339 || ! (TREE_PUBLIC (decl
) || TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
4342 if (TREE_READONLY (decl
) && TARGET_DC
)
4343 /* -mdc means that const variables default to the near section,
4344 regardless of the size cutoff. */
4347 /* User specified an attribute, so override the default.
4348 Ignore storage attribute of pointed to variable. char __far * x; */
4349 if (! (TREE_TYPE (decl
) && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
))
4351 if (TYPE_P (decl
) && TYPE_ATTRIBUTES (decl
) && *attributes
)
4352 TYPE_ATTRIBUTES (decl
) = NULL_TREE
;
4353 else if (DECL_ATTRIBUTES (decl
) && *attributes
)
4354 DECL_ATTRIBUTES (decl
) = NULL_TREE
;
4357 attrlist
= *attributes
? *attributes
: DECL_ATTRIBUTES (decl
);
4358 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4359 if (!encoding
&& TYPE_P (TREE_TYPE (decl
)))
4361 attrlist
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
4362 encoding
= mep_attrlist_to_encoding (attrlist
, decl
);
4366 /* This means that the declaration has a specific section
4367 attribute, so we should not apply the default rules. */
4369 if (encoding
== 'i' || encoding
== 'I')
4371 tree attr
= lookup_attribute ("io", attrlist
);
4373 && TREE_VALUE (attr
)
4374 && TREE_VALUE (TREE_VALUE(attr
)))
4376 int location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4377 static tree previous_value
= 0;
4378 static int previous_location
= 0;
4379 static tree previous_name
= 0;
4381 /* We take advantage of the fact that gcc will reuse the
4382 same tree pointer when applying an attribute to a
4383 list of decls, but produce a new tree for attributes
4384 on separate source lines, even when they're textually
4385 identical. This is the behavior we want. */
4386 if (TREE_VALUE (attr
) == previous_value
4387 && location
== previous_location
)
4389 warning(0, "__io address 0x%x is the same for %qE and %qE",
4390 location
, previous_name
, DECL_NAME (decl
));
4392 previous_name
= DECL_NAME (decl
);
4393 previous_location
= location
;
4394 previous_value
= TREE_VALUE (attr
);
4401 /* Declarations of arrays can change size. Don't trust them. */
4402 if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
4405 size
= int_size_in_bytes (TREE_TYPE (decl
));
4407 if (TARGET_RAND_TPGP
&& size
<= 4 && size
> 0)
4409 if (TREE_PUBLIC (decl
)
4410 || DECL_EXTERNAL (decl
)
4411 || TREE_STATIC (decl
))
4413 const char *name
= IDENTIFIER_POINTER (DECL_NAME (decl
));
4437 if (size
<= mep_based_cutoff
&& size
> 0)
4439 else if (size
<= mep_tiny_cutoff
&& size
> 0)
4445 if (mep_const_section
&& TREE_READONLY (decl
))
4447 if (strcmp (mep_const_section
, "tiny") == 0)
4449 else if (strcmp (mep_const_section
, "near") == 0)
4451 else if (strcmp (mep_const_section
, "far") == 0)
4458 if (!mep_multiple_address_regions (*attributes
, true)
4459 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl
), false))
4461 attrib
= build_tree_list (get_identifier (secname
), NULL_TREE
);
4463 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4464 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4465 and mep_validate_based_tiny. */
4466 DECL_ATTRIBUTES (decl
) = chainon (DECL_ATTRIBUTES (decl
), attrib
);
4471 mep_encode_section_info (tree decl
, rtx rtl
, int first
)
4474 const char *oldname
;
4475 const char *secname
;
4481 tree mep_attributes
;
4486 if (TREE_CODE (decl
) != VAR_DECL
4487 && TREE_CODE (decl
) != FUNCTION_DECL
)
4490 rtlname
= XEXP (rtl
, 0);
4491 if (GET_CODE (rtlname
) == SYMBOL_REF
)
4492 oldname
= XSTR (rtlname
, 0);
4493 else if (GET_CODE (rtlname
) == MEM
4494 && GET_CODE (XEXP (rtlname
, 0)) == SYMBOL_REF
)
4495 oldname
= XSTR (XEXP (rtlname
, 0), 0);
4499 type
= TREE_TYPE (decl
);
4500 if (type
== error_mark_node
)
4502 mep_attributes
= MEP_ATTRIBUTES (decl
);
4504 encoding
= mep_attrlist_to_encoding (mep_attributes
, decl
);
4508 newname
= (char *) alloca (strlen (oldname
) + 4);
4509 sprintf (newname
, "@%c.%s", encoding
, oldname
);
4510 idp
= get_identifier (newname
);
4512 gen_rtx_SYMBOL_REF (Pmode
, IDENTIFIER_POINTER (idp
));
4513 SYMBOL_REF_WEAK (XEXP (rtl
, 0)) = DECL_WEAK (decl
);
4514 SET_SYMBOL_REF_DECL (XEXP (rtl
, 0), decl
);
4527 maxsize
= 0x1000000;
4535 if (maxsize
&& int_size_in_bytes (TREE_TYPE (decl
)) > maxsize
)
4537 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4539 (long) int_size_in_bytes (TREE_TYPE (decl
)),
4547 mep_strip_name_encoding (const char *sym
)
4553 else if (*sym
== '@' && sym
[2] == '.')
4561 mep_select_section (tree decl
, int reloc ATTRIBUTE_UNUSED
,
4562 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
4567 switch (TREE_CODE (decl
))
4570 if (!TREE_READONLY (decl
)
4571 || TREE_SIDE_EFFECTS (decl
)
4572 || !DECL_INITIAL (decl
)
4573 || (DECL_INITIAL (decl
) != error_mark_node
4574 && !TREE_CONSTANT (DECL_INITIAL (decl
))))
4578 if (! TREE_CONSTANT (decl
))
4586 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4588 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4590 if (name
[0] == '@' && name
[2] == '.')
4595 if (flag_function_sections
|| DECL_COMDAT_GROUP (decl
))
4596 mep_unique_section (decl
, 0);
4597 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4599 if (encoding
== 'f')
4600 return vftext_section
;
4602 return vtext_section
;
4604 else if (encoding
== 'f')
4605 return ftext_section
;
4607 return text_section
;
4610 if (TREE_CODE (decl
) == VAR_DECL
)
4612 const char *name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4614 if (name
[0] == '@' && name
[2] == '.')
4618 return based_section
;
4622 return srodata_section
;
4623 if (DECL_INITIAL (decl
))
4624 return sdata_section
;
4625 return tinybss_section
;
4629 return frodata_section
;
4634 error_at (DECL_SOURCE_LOCATION (decl
),
4635 "variable %D of type %<io%> must be uninitialized", decl
);
4636 return data_section
;
4639 error_at (DECL_SOURCE_LOCATION (decl
),
4640 "variable %D of type %<cb%> must be uninitialized", decl
);
4641 return data_section
;
4646 return readonly_data_section
;
4648 return data_section
;
4652 mep_unique_section (tree decl
, int reloc
)
4654 static const char *prefixes
[][2] =
4656 { ".text.", ".gnu.linkonce.t." },
4657 { ".rodata.", ".gnu.linkonce.r." },
4658 { ".data.", ".gnu.linkonce.d." },
4659 { ".based.", ".gnu.linkonce.based." },
4660 { ".sdata.", ".gnu.linkonce.s." },
4661 { ".far.", ".gnu.linkonce.far." },
4662 { ".ftext.", ".gnu.linkonce.ft." },
4663 { ".frodata.", ".gnu.linkonce.frd." },
4664 { ".srodata.", ".gnu.linkonce.srd." },
4665 { ".vtext.", ".gnu.linkonce.v." },
4666 { ".vftext.", ".gnu.linkonce.vf." }
4668 int sec
= 2; /* .data */
4670 const char *name
, *prefix
;
4673 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
4674 if (DECL_RTL (decl
))
4675 name
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
4677 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4679 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4680 sec
= 9; /* .vtext */
4682 sec
= 0; /* .text */
4684 else if (decl_readonly_section (decl
, reloc
))
4685 sec
= 1; /* .rodata */
4687 if (name
[0] == '@' && name
[2] == '.')
4692 sec
= 3; /* .based */
4696 sec
= 8; /* .srodata */
4698 sec
= 4; /* .sdata */
4702 sec
= 6; /* .ftext */
4704 sec
= 10; /* .vftext */
4706 sec
= 7; /* .frodata */
4708 sec
= 5; /* .far. */
4714 prefix
= prefixes
[sec
][DECL_COMDAT_GROUP(decl
) != NULL
];
4715 len
= strlen (name
) + strlen (prefix
);
4716 string
= (char *) alloca (len
+ 1);
4718 sprintf (string
, "%s%s", prefix
, name
);
4720 set_decl_section_name (decl
, string
);
4723 /* Given a decl, a section name, and whether the decl initializer
4724 has relocs, choose attributes for the section. */
4726 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4729 mep_section_type_flags (tree decl
, const char *name
, int reloc
)
4731 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4733 if (decl
&& TREE_CODE (decl
) == FUNCTION_DECL
4734 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
4735 flags
|= SECTION_MEP_VLIW
;
4740 /* Switch to an arbitrary section NAME with attributes as specified
4741 by FLAGS. ALIGN specifies any known alignment requirements for
4742 the section; 0 if the default should be used.
4744 Differs from the standard ELF version only in support of VLIW mode. */
4747 mep_asm_named_section (const char *name
, unsigned int flags
, tree decl ATTRIBUTE_UNUSED
)
4749 char flagchars
[8], *f
= flagchars
;
4752 if (!(flags
& SECTION_DEBUG
))
4754 if (flags
& SECTION_WRITE
)
4756 if (flags
& SECTION_CODE
)
4758 if (flags
& SECTION_SMALL
)
4760 if (flags
& SECTION_MEP_VLIW
)
4764 if (flags
& SECTION_BSS
)
4769 fprintf (asm_out_file
, "\t.section\t%s,\"%s\",@%s\n",
4770 name
, flagchars
, type
);
4772 if (flags
& SECTION_CODE
)
4773 fputs ((flags
& SECTION_MEP_VLIW
? "\t.vliw\n" : "\t.core\n"),
4778 mep_output_aligned_common (FILE *stream
, tree decl
, const char *name
,
4779 int size
, int align
, int global
)
4781 /* We intentionally don't use mep_section_tag() here. */
4783 && (name
[1] == 'i' || name
[1] == 'I' || name
[1] == 'c')
4787 tree attr
= lookup_attribute ((name
[1] == 'c' ? "cb" : "io"),
4788 DECL_ATTRIBUTES (decl
));
4790 && TREE_VALUE (attr
)
4791 && TREE_VALUE (TREE_VALUE(attr
)))
4792 location
= TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr
)));
4797 fprintf (stream
, "\t.globl\t");
4798 assemble_name (stream
, name
);
4799 fprintf (stream
, "\n");
4801 assemble_name (stream
, name
);
4802 fprintf (stream
, " = %d\n", location
);
4805 if (name
[0] == '@' && name
[2] == '.')
4807 const char *sec
= 0;
4811 switch_to_section (based_section
);
4815 switch_to_section (tinybss_section
);
4819 switch_to_section (farbss_section
);
4828 while (align
> BITS_PER_UNIT
)
4833 name2
= targetm
.strip_name_encoding (name
);
4835 fprintf (stream
, "\t.globl\t%s\n", name2
);
4836 fprintf (stream
, "\t.p2align %d\n", p2align
);
4837 fprintf (stream
, "\t.type\t%s,@object\n", name2
);
4838 fprintf (stream
, "\t.size\t%s,%d\n", name2
, size
);
4839 fprintf (stream
, "%s:\n\t.zero\t%d\n", name2
, size
);
4846 fprintf (stream
, "\t.local\t");
4847 assemble_name (stream
, name
);
4848 fprintf (stream
, "\n");
4850 fprintf (stream
, "\t.comm\t");
4851 assemble_name (stream
, name
);
4852 fprintf (stream
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
4858 mep_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4860 rtx addr
= XEXP (m_tramp
, 0);
4861 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
4863 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__mep_trampoline_helper"),
4864 LCT_NORMAL
, VOIDmode
, 3,
4867 static_chain
, Pmode
);
4870 /* Experimental Reorg. */
4873 mep_mentioned_p (rtx in
,
4874 rtx reg
, /* NULL for mem */
4875 int modes_too
) /* if nonzero, modes must match also. */
4883 if (reg
&& GET_CODE (reg
) != REG
)
4886 if (GET_CODE (in
) == LABEL_REF
)
4889 code
= GET_CODE (in
);
4895 return mep_mentioned_p (XEXP (in
, 0), reg
, modes_too
);
4901 if (modes_too
&& (GET_MODE (in
) != GET_MODE (reg
)))
4903 return (REGNO (in
) == REGNO (reg
));
4916 /* Set's source should be read-only. */
4917 if (code
== SET
&& !reg
)
4918 return mep_mentioned_p (SET_DEST (in
), reg
, modes_too
);
4920 fmt
= GET_RTX_FORMAT (code
);
4922 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4927 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
4928 if (mep_mentioned_p (XVECEXP (in
, i
, j
), reg
, modes_too
))
4931 else if (fmt
[i
] == 'e'
4932 && mep_mentioned_p (XEXP (in
, i
), reg
, modes_too
))
4938 #define EXPERIMENTAL_REGMOVE_REORG 1
4940 #if EXPERIMENTAL_REGMOVE_REORG
4943 mep_compatible_reg_class (int r1
, int r2
)
4945 if (GR_REGNO_P (r1
) && GR_REGNO_P (r2
))
4947 if (CR_REGNO_P (r1
) && CR_REGNO_P (r2
))
4953 mep_reorg_regmove (rtx_insn
*insns
)
4955 rtx_insn
*insn
, *next
, *follow
;
4957 int count
= 0, done
= 0, replace
, before
= 0;
4960 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4961 if (NONJUMP_INSN_P (insn
))
4964 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4965 set that uses the r2 and r2 dies there. We replace r2 with r1
4966 and see if it's still a valid insn. If so, delete the first set.
4967 Copied from reorg.c. */
4972 for (insn
= insns
; insn
; insn
= next
)
4974 next
= next_nonnote_nondebug_insn (insn
);
4975 if (! NONJUMP_INSN_P (insn
))
4977 pat
= PATTERN (insn
);
4981 if (GET_CODE (pat
) == SET
4982 && GET_CODE (SET_SRC (pat
)) == REG
4983 && GET_CODE (SET_DEST (pat
)) == REG
4984 && find_regno_note (insn
, REG_DEAD
, REGNO (SET_SRC (pat
)))
4985 && mep_compatible_reg_class (REGNO (SET_SRC (pat
)), REGNO (SET_DEST (pat
))))
4987 follow
= next_nonnote_nondebug_insn (insn
);
4989 fprintf (dump_file
, "superfluous moves: considering %d\n", INSN_UID (insn
));
4991 while (follow
&& NONJUMP_INSN_P (follow
)
4992 && GET_CODE (PATTERN (follow
)) == SET
4993 && !dead_or_set_p (follow
, SET_SRC (pat
))
4994 && !mep_mentioned_p (PATTERN (follow
), SET_SRC (pat
), 0)
4995 && !mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 0))
4998 fprintf (dump_file
, "\tskipping %d\n", INSN_UID (follow
));
4999 follow
= next_nonnote_insn (follow
);
5003 fprintf (dump_file
, "\tfollow is %d\n", INSN_UID (follow
));
5004 if (follow
&& NONJUMP_INSN_P (follow
)
5005 && GET_CODE (PATTERN (follow
)) == SET
5006 && find_regno_note (follow
, REG_DEAD
, REGNO (SET_DEST (pat
))))
5008 if (GET_CODE (SET_DEST (PATTERN (follow
))) == REG
)
5010 if (mep_mentioned_p (SET_SRC (PATTERN (follow
)), SET_DEST (pat
), 1))
5013 where
= & SET_SRC (PATTERN (follow
));
5016 else if (GET_CODE (SET_DEST (PATTERN (follow
))) == MEM
)
5018 if (mep_mentioned_p (PATTERN (follow
), SET_DEST (pat
), 1))
5021 where
= & PATTERN (follow
);
5027 /* If so, follow is the corresponding insn */
5034 fprintf (dump_file
, "----- Candidate for superfluous move deletion:\n\n");
5035 for (x
= insn
; x
;x
= NEXT_INSN (x
))
5037 print_rtl_single (dump_file
, x
);
5040 fprintf (dump_file
, "\n");
5044 if (validate_replace_rtx_subexp (SET_DEST (pat
), SET_SRC (pat
),
5051 fprintf (dump_file
, "\n----- Success! new insn:\n\n");
5052 print_rtl_single (dump_file
, follow
);
5062 fprintf (dump_file
, "\n%d insn%s deleted out of %d.\n\n", count
, count
== 1 ? "" : "s", before
);
5063 fprintf (dump_file
, "=====\n");
5069 /* Figure out where to put LABEL, which is the label for a repeat loop.
5070 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5071 the loop ends just before LAST_INSN. If SHARED, insns other than the
5072 "repeat" might use LABEL to jump to the loop's continuation point.
5074 Return the last instruction in the adjusted loop. */
5077 mep_insert_repeat_label_last (rtx_insn
*last_insn
, rtx_code_label
*label
,
5078 bool including
, bool shared
)
5080 rtx_insn
*next
, *prev
;
5081 int count
= 0, code
, icode
;
5084 fprintf (dump_file
, "considering end of repeat loop at insn %d\n",
5085 INSN_UID (last_insn
));
5087 /* Set PREV to the last insn in the loop. */
5090 prev
= PREV_INSN (prev
);
5092 /* Set NEXT to the next insn after the repeat label. */
5097 code
= GET_CODE (prev
);
5098 if (code
== CALL_INSN
|| code
== CODE_LABEL
|| code
== BARRIER
)
5103 if (GET_CODE (PATTERN (prev
)) == SEQUENCE
)
5104 prev
= as_a
<rtx_insn
*> (XVECEXP (PATTERN (prev
), 0, 1));
5106 /* Other insns that should not be in the last two opcodes. */
5107 icode
= recog_memoized (prev
);
5109 || icode
== CODE_FOR_repeat
5110 || icode
== CODE_FOR_erepeat
5111 || get_attr_may_trap (prev
) == MAY_TRAP_YES
)
5114 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5115 is the second instruction in a VLIW bundle. In that case,
5116 loop again: if the first instruction also satisfies the
5117 conditions above then we will reach here again and put
5118 both of them into the repeat epilogue. Otherwise both
5119 should remain outside. */
5120 if (GET_MODE (prev
) != BImode
)
5125 print_rtl_single (dump_file
, next
);
5130 prev
= PREV_INSN (prev
);
5133 /* See if we're adding the label immediately after the repeat insn.
5134 If so, we need to separate them with a nop. */
5135 prev
= prev_real_insn (next
);
5137 switch (recog_memoized (prev
))
5139 case CODE_FOR_repeat
:
5140 case CODE_FOR_erepeat
:
5142 fprintf (dump_file
, "Adding nop inside loop\n");
5143 emit_insn_before (gen_nop (), next
);
5150 /* Insert the label. */
5151 emit_label_before (label
, next
);
5153 /* Insert the nops. */
5154 if (dump_file
&& count
< 2)
5155 fprintf (dump_file
, "Adding %d nop%s\n\n",
5156 2 - count
, count
== 1 ? "" : "s");
5158 for (; count
< 2; count
++)
5160 last_insn
= emit_insn_after (gen_nop (), last_insn
);
5162 emit_insn_before (gen_nop (), last_insn
);
5169 mep_emit_doloop (rtx
*operands
, int is_end
)
5173 if (cfun
->machine
->doloop_tags
== 0
5174 || cfun
->machine
->doloop_tag_from_end
== is_end
)
5176 cfun
->machine
->doloop_tags
++;
5177 cfun
->machine
->doloop_tag_from_end
= is_end
;
5180 tag
= GEN_INT (cfun
->machine
->doloop_tags
- 1);
5182 emit_jump_insn (gen_doloop_end_internal (operands
[0], operands
[1], tag
));
5184 emit_insn (gen_doloop_begin_internal (operands
[0], operands
[0], tag
));
5188 /* Code for converting doloop_begins and doloop_ends into valid
5189 MeP instructions. A doloop_begin is just a placeholder:
5191 $count = unspec ($count)
5193 where $count is initially the number of iterations - 1.
5194 doloop_end has the form:
5196 if ($count-- == 0) goto label
5198 The counter variable is private to the doloop insns, nothing else
5199 relies on its value.
5201 There are three cases, in decreasing order of preference:
5203 1. A loop has exactly one doloop_begin and one doloop_end.
5204 The doloop_end branches to the first instruction after
5207 In this case we can replace the doloop_begin with a repeat
5208 instruction and remove the doloop_end. I.e.:
5210 $count1 = unspec ($count1)
5215 if ($count2-- == 0) goto label
5219 repeat $count1,repeat_label
5227 2. As for (1), except there are several doloop_ends. One of them
5228 (call it X) falls through to a label L. All the others fall
5229 through to branches to L.
5231 In this case, we remove X and replace the other doloop_ends
5232 with branches to the repeat label. For example:
5234 $count1 = unspec ($count1)
5237 if ($count2-- == 0) goto label
5240 if ($count3-- == 0) goto label
5245 repeat $count1,repeat_label
5256 3. The fallback case. Replace doloop_begins with:
5260 Replace doloop_ends with the equivalent of:
5263 if ($count == 0) goto label
5265 Note that this might need a scratch register if $count
5266 is stored in memory. */
5268 /* A structure describing one doloop_begin. */
5269 struct mep_doloop_begin
{
5270 /* The next doloop_begin with the same tag. */
5271 struct mep_doloop_begin
*next
;
5273 /* The instruction itself. */
5276 /* The initial counter value. This is known to be a general register. */
5280 /* A structure describing a doloop_end. */
5281 struct mep_doloop_end
{
5282 /* The next doloop_end with the same loop tag. */
5283 struct mep_doloop_end
*next
;
5285 /* The instruction itself. */
5288 /* The first instruction after INSN when the branch isn't taken. */
5289 rtx_insn
*fallthrough
;
5291 /* The location of the counter value. Since doloop_end_internal is a
5292 jump instruction, it has to allow the counter to be stored anywhere
5293 (any non-fixed register or memory location). */
5296 /* The target label (the place where the insn branches when the counter
5300 /* A scratch register. Only available when COUNTER isn't stored
5301 in a general register. */
5306 /* One do-while loop. */
5308 /* All the doloop_begins for this loop (in no particular order). */
5309 struct mep_doloop_begin
*begin
;
5311 /* All the doloop_ends. When there is more than one, arrange things
5312 so that the first one is the most likely to be X in case (2) above. */
5313 struct mep_doloop_end
*end
;
5317 /* Return true if LOOP can be converted into repeat/repeat_end form
5318 (that is, if it matches cases (1) or (2) above). */
5321 mep_repeat_loop_p (struct mep_doloop
*loop
)
5323 struct mep_doloop_end
*end
;
5326 /* There must be exactly one doloop_begin and at least one doloop_end. */
5327 if (loop
->begin
== 0 || loop
->end
== 0 || loop
->begin
->next
!= 0)
5330 /* The first doloop_end (X) must branch back to the insn after
5331 the doloop_begin. */
5332 if (prev_real_insn (loop
->end
->label
) != loop
->begin
->insn
)
5335 /* All the other doloop_ends must branch to the same place as X.
5336 When the branch isn't taken, they must jump to the instruction
5338 fallthrough
= loop
->end
->fallthrough
;
5339 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5340 if (end
->label
!= loop
->end
->label
5341 || !simplejump_p (end
->fallthrough
)
5342 || next_real_insn (JUMP_LABEL (end
->fallthrough
)) != fallthrough
)
5349 /* The main repeat reorg function. See comment above for details. */
5352 mep_reorg_repeat (rtx_insn
*insns
)
5355 struct mep_doloop
*loops
, *loop
;
5356 struct mep_doloop_begin
*begin
;
5357 struct mep_doloop_end
*end
;
5359 /* Quick exit if we haven't created any loops. */
5360 if (cfun
->machine
->doloop_tags
== 0)
5363 /* Create an array of mep_doloop structures. */
5364 loops
= (struct mep_doloop
*) alloca (sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5365 memset (loops
, 0, sizeof (loops
[0]) * cfun
->machine
->doloop_tags
);
5367 /* Search the function for do-while insns and group them by loop tag. */
5368 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5370 switch (recog_memoized (insn
))
5372 case CODE_FOR_doloop_begin_internal
:
5373 insn_extract (insn
);
5374 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5376 begin
= (struct mep_doloop_begin
*) alloca (sizeof (struct mep_doloop_begin
));
5377 begin
->next
= loop
->begin
;
5379 begin
->counter
= recog_data
.operand
[0];
5381 loop
->begin
= begin
;
5384 case CODE_FOR_doloop_end_internal
:
5385 insn_extract (insn
);
5386 loop
= &loops
[INTVAL (recog_data
.operand
[2])];
5388 end
= (struct mep_doloop_end
*) alloca (sizeof (struct mep_doloop_end
));
5390 end
->fallthrough
= next_real_insn (insn
);
5391 end
->counter
= recog_data
.operand
[0];
5392 end
->label
= recog_data
.operand
[1];
5393 end
->scratch
= recog_data
.operand
[3];
5395 /* If this insn falls through to an unconditional jump,
5396 give it a lower priority than the others. */
5397 if (loop
->end
!= 0 && simplejump_p (end
->fallthrough
))
5399 end
->next
= loop
->end
->next
;
5400 loop
->end
->next
= end
;
5404 end
->next
= loop
->end
;
5410 /* Convert the insns for each loop in turn. */
5411 for (loop
= loops
; loop
< loops
+ cfun
->machine
->doloop_tags
; loop
++)
5412 if (mep_repeat_loop_p (loop
))
5414 /* Case (1) or (2). */
5415 rtx_code_label
*repeat_label
;
5418 /* Create a new label for the repeat insn. */
5419 repeat_label
= gen_label_rtx ();
5421 /* Replace the doloop_begin with a repeat. */
5422 label_ref
= gen_rtx_LABEL_REF (VOIDmode
, repeat_label
);
5423 emit_insn_before (gen_repeat (loop
->begin
->counter
, label_ref
),
5425 delete_insn (loop
->begin
->insn
);
5427 /* Insert the repeat label before the first doloop_end.
5428 Fill the gap with nops if there are other doloop_ends. */
5429 mep_insert_repeat_label_last (loop
->end
->insn
, repeat_label
,
5430 false, loop
->end
->next
!= 0);
5432 /* Emit a repeat_end (to improve the readability of the output). */
5433 emit_insn_before (gen_repeat_end (), loop
->end
->insn
);
5435 /* Delete the first doloop_end. */
5436 delete_insn (loop
->end
->insn
);
5438 /* Replace the others with branches to REPEAT_LABEL. */
5439 for (end
= loop
->end
->next
; end
!= 0; end
= end
->next
)
5441 emit_jump_insn_before (gen_jump (repeat_label
), end
->insn
);
5442 delete_insn (end
->insn
);
5443 delete_insn (end
->fallthrough
);
5448 /* Case (3). First replace all the doloop_begins with increment
5450 for (begin
= loop
->begin
; begin
!= 0; begin
= begin
->next
)
5452 emit_insn_before (gen_add3_insn (copy_rtx (begin
->counter
),
5453 begin
->counter
, const1_rtx
),
5455 delete_insn (begin
->insn
);
5458 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5459 for (end
= loop
->end
; end
!= 0; end
= end
->next
)
5465 /* Load the counter value into a general register. */
5467 if (!REG_P (reg
) || REGNO (reg
) > 15)
5470 emit_move_insn (copy_rtx (reg
), copy_rtx (end
->counter
));
5473 /* Decrement the counter. */
5474 emit_insn (gen_add3_insn (copy_rtx (reg
), copy_rtx (reg
),
5477 /* Copy it back to its original location. */
5478 if (reg
!= end
->counter
)
5479 emit_move_insn (copy_rtx (end
->counter
), copy_rtx (reg
));
5481 /* Jump back to the start label. */
5482 insn
= emit_jump_insn (gen_mep_bne_true (reg
, const0_rtx
,
5484 JUMP_LABEL (insn
) = end
->label
;
5485 LABEL_NUSES (end
->label
)++;
5487 /* Emit the whole sequence before the doloop_end. */
5488 insn
= get_insns ();
5490 emit_insn_before (insn
, end
->insn
);
5492 /* Delete the doloop_end. */
5493 delete_insn (end
->insn
);
5500 mep_invertable_branch_p (rtx_insn
*insn
)
5503 enum rtx_code old_code
;
5506 set
= PATTERN (insn
);
5507 if (GET_CODE (set
) != SET
)
5509 if (GET_CODE (XEXP (set
, 1)) != IF_THEN_ELSE
)
5511 cond
= XEXP (XEXP (set
, 1), 0);
5512 old_code
= GET_CODE (cond
);
5516 PUT_CODE (cond
, NE
);
5519 PUT_CODE (cond
, EQ
);
5522 PUT_CODE (cond
, GE
);
5525 PUT_CODE (cond
, LT
);
5530 INSN_CODE (insn
) = -1;
5531 i
= recog_memoized (insn
);
5532 PUT_CODE (cond
, old_code
);
5533 INSN_CODE (insn
) = -1;
5538 mep_invert_branch (rtx_insn
*insn
, rtx_insn
*after
)
5540 rtx cond
, set
, label
;
5543 set
= PATTERN (insn
);
5545 gcc_assert (GET_CODE (set
) == SET
);
5546 gcc_assert (GET_CODE (XEXP (set
, 1)) == IF_THEN_ELSE
);
5548 cond
= XEXP (XEXP (set
, 1), 0);
5549 switch (GET_CODE (cond
))
5552 PUT_CODE (cond
, NE
);
5555 PUT_CODE (cond
, EQ
);
5558 PUT_CODE (cond
, GE
);
5561 PUT_CODE (cond
, LT
);
5566 label
= gen_label_rtx ();
5567 emit_label_after (label
, after
);
5568 for (i
=1; i
<=2; i
++)
5569 if (GET_CODE (XEXP (XEXP (set
, 1), i
)) == LABEL_REF
)
5571 rtx ref
= XEXP (XEXP (set
, 1), i
);
5572 if (LABEL_NUSES (XEXP (ref
, 0)) == 1)
5573 delete_insn (XEXP (ref
, 0));
5574 XEXP (ref
, 0) = label
;
5575 LABEL_NUSES (label
) ++;
5576 JUMP_LABEL (insn
) = label
;
5578 INSN_CODE (insn
) = -1;
5579 i
= recog_memoized (insn
);
5580 gcc_assert (i
>= 0);
5584 mep_reorg_erepeat (rtx_insn
*insns
)
5586 rtx_insn
*insn
, *prev
;
5591 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5593 && mep_invertable_branch_p (insn
))
5597 fprintf (dump_file
, "\n------------------------------\n");
5598 fprintf (dump_file
, "erepeat: considering this jump:\n");
5599 print_rtl_single (dump_file
, insn
);
5601 count
= simplejump_p (insn
) ? 0 : 1;
5602 for (prev
= PREV_INSN (insn
); prev
; prev
= PREV_INSN (prev
))
5604 if (CALL_P (prev
) || BARRIER_P (prev
))
5607 if (prev
== JUMP_LABEL (insn
))
5611 fprintf (dump_file
, "found loop top, %d insns\n", count
);
5613 if (LABEL_NUSES (prev
) == 1)
5614 /* We're the only user, always safe */ ;
5615 else if (LABEL_NUSES (prev
) == 2)
5617 /* See if there's a barrier before this label. If
5618 so, we know nobody inside the loop uses it.
5619 But we must be careful to put the erepeat
5620 *after* the label. */
5622 for (barrier
= PREV_INSN (prev
);
5623 barrier
&& NOTE_P (barrier
);
5624 barrier
= PREV_INSN (barrier
))
5626 if (barrier
&& ! BARRIER_P (barrier
))
5631 /* We don't know who else, within or without our loop, uses this */
5633 fprintf (dump_file
, "... but there are multiple users, too risky.\n");
5637 /* Generate a label to be used by the erepat insn. */
5638 l
= gen_label_rtx ();
5640 /* Insert the erepeat after INSN's target label. */
5641 x
= gen_erepeat (gen_rtx_LABEL_REF (VOIDmode
, l
));
5643 emit_insn_after (x
, prev
);
5645 /* Insert the erepeat label. */
5646 newlast
= (mep_insert_repeat_label_last
5647 (insn
, l
, !simplejump_p (insn
), false));
5648 if (simplejump_p (insn
))
5650 emit_insn_before (gen_erepeat_end (), insn
);
5655 mep_invert_branch (insn
, newlast
);
5656 emit_insn_after (gen_erepeat_end (), newlast
);
5663 /* A label is OK if there is exactly one user, and we
5664 can find that user before the next label. */
5667 if (LABEL_NUSES (prev
) == 1)
5669 for (user
= PREV_INSN (prev
);
5670 user
&& (INSN_P (user
) || NOTE_P (user
));
5671 user
= PREV_INSN (user
))
5672 if (JUMP_P (user
) && JUMP_LABEL (user
) == prev
)
5674 safe
= INSN_UID (user
);
5681 fprintf (dump_file
, "... ignoring jump from insn %d to %d\n",
5682 safe
, INSN_UID (prev
));
5692 fprintf (dump_file
, "\n==============================\n");
5695 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5696 always do this on its own. */
5699 mep_jmp_return_reorg (rtx_insn
*insns
)
5701 rtx_insn
*insn
, *label
, *ret
;
5704 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5705 if (simplejump_p (insn
))
5707 /* Find the fist real insn the jump jumps to. */
5708 label
= ret
= safe_as_a
<rtx_insn
*> (JUMP_LABEL (insn
));
5712 || GET_CODE (PATTERN (ret
)) == USE
))
5713 ret
= NEXT_INSN (ret
);
5717 /* Is it a return? */
5718 ret_code
= recog_memoized (ret
);
5719 if (ret_code
== CODE_FOR_return_internal
5720 || ret_code
== CODE_FOR_eh_return_internal
)
5722 /* It is. Replace the jump with a return. */
5723 LABEL_NUSES (label
) --;
5724 if (LABEL_NUSES (label
) == 0)
5725 delete_insn (label
);
5726 PATTERN (insn
) = copy_rtx (PATTERN (ret
));
5727 INSN_CODE (insn
) = -1;
5735 mep_reorg_addcombine (rtx_insn
*insns
)
5739 for (i
= insns
; i
; i
= NEXT_INSN (i
))
5741 && INSN_CODE (i
) == CODE_FOR_addsi3
5742 && GET_CODE (SET_DEST (PATTERN (i
))) == REG
5743 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 0)) == REG
5744 && REGNO (SET_DEST (PATTERN (i
))) == REGNO (XEXP (SET_SRC (PATTERN (i
)), 0))
5745 && GET_CODE (XEXP (SET_SRC (PATTERN (i
)), 1)) == CONST_INT
)
5749 && INSN_CODE (n
) == CODE_FOR_addsi3
5750 && GET_CODE (SET_DEST (PATTERN (n
))) == REG
5751 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 0)) == REG
5752 && REGNO (SET_DEST (PATTERN (n
))) == REGNO (XEXP (SET_SRC (PATTERN (n
)), 0))
5753 && GET_CODE (XEXP (SET_SRC (PATTERN (n
)), 1)) == CONST_INT
)
5755 int ic
= INTVAL (XEXP (SET_SRC (PATTERN (i
)), 1));
5756 int nc
= INTVAL (XEXP (SET_SRC (PATTERN (n
)), 1));
5757 if (REGNO (SET_DEST (PATTERN (i
))) == REGNO (SET_DEST (PATTERN (n
)))
5759 && ic
+ nc
> -32768)
5761 XEXP (SET_SRC (PATTERN (i
)), 1) = GEN_INT (ic
+ nc
);
5762 SET_NEXT_INSN (i
) = NEXT_INSN (n
);
5764 SET_PREV_INSN (NEXT_INSN (i
)) = i
;
5770 /* If this insn adjusts the stack, return the adjustment, else return
5773 add_sp_insn_p (rtx_insn
*insn
)
5777 if (! single_set (insn
))
5779 pat
= PATTERN (insn
);
5780 if (GET_CODE (SET_DEST (pat
)) != REG
)
5782 if (REGNO (SET_DEST (pat
)) != SP_REGNO
)
5784 if (GET_CODE (SET_SRC (pat
)) != PLUS
)
5786 if (GET_CODE (XEXP (SET_SRC (pat
), 0)) != REG
)
5788 if (REGNO (XEXP (SET_SRC (pat
), 0)) != SP_REGNO
)
5790 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) != CONST_INT
)
5792 return INTVAL (XEXP (SET_SRC (pat
), 1));
5795 /* Check for trivial functions that set up an unneeded stack
5798 mep_reorg_noframe (rtx_insn
*insns
)
5800 rtx_insn
*start_frame_insn
;
5801 rtx_insn
*end_frame_insn
= 0;
5805 /* The first insn should be $sp = $sp + N */
5806 while (insns
&& ! INSN_P (insns
))
5807 insns
= NEXT_INSN (insns
);
5811 sp_adjust
= add_sp_insn_p (insns
);
5815 start_frame_insn
= insns
;
5816 sp
= SET_DEST (PATTERN (start_frame_insn
));
5818 insns
= next_real_insn (insns
);
5822 rtx_insn
*next
= next_real_insn (insns
);
5826 sp2
= add_sp_insn_p (insns
);
5831 end_frame_insn
= insns
;
5832 if (sp2
!= -sp_adjust
)
5835 else if (mep_mentioned_p (insns
, sp
, 0))
5837 else if (CALL_P (insns
))
5845 delete_insn (start_frame_insn
);
5846 delete_insn (end_frame_insn
);
5853 rtx_insn
*insns
= get_insns ();
5855 /* We require accurate REG_DEAD notes. */
5856 compute_bb_for_insn ();
5857 df_note_add_problem ();
5860 mep_reorg_addcombine (insns
);
5861 #if EXPERIMENTAL_REGMOVE_REORG
5862 /* VLIW packing has been done already, so we can't just delete things. */
5863 if (!mep_vliw_function_p (cfun
->decl
))
5864 mep_reorg_regmove (insns
);
5866 mep_jmp_return_reorg (insns
);
5867 mep_bundle_insns (insns
);
5868 mep_reorg_repeat (insns
);
5871 && !profile_arc_flag
5872 && TARGET_OPT_REPEAT
5873 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO
)))
5874 mep_reorg_erepeat (insns
);
5876 /* This may delete *insns so make sure it's last. */
5877 mep_reorg_noframe (insns
);
5879 df_finish_pass (false);
5884 /*----------------------------------------------------------------------*/
5886 /*----------------------------------------------------------------------*/
5888 /* Element X gives the index into cgen_insns[] of the most general
5889 implementation of intrinsic X. Unimplemented intrinsics are
5891 int mep_intrinsic_insn
[ARRAY_SIZE (cgen_intrinsics
)];
5893 /* Element X gives the index of another instruction that is mapped to
5894 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5897 Things are set up so that mep_intrinsic_chain[X] < X. */
5898 static int mep_intrinsic_chain
[ARRAY_SIZE (cgen_insns
)];
5900 /* The bitmask for the current ISA. The ISA masks are declared
5902 unsigned int mep_selected_isa
;
5905 const char *config_name
;
5909 static struct mep_config mep_configs
[] = {
5910 #ifdef COPROC_SELECTION_TABLE
5911 COPROC_SELECTION_TABLE
,
5916 /* Initialize the global intrinsics variables above. */
5919 mep_init_intrinsics (void)
5923 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5924 mep_selected_isa
= mep_configs
[0].isa
;
5925 if (mep_config_string
!= 0)
5926 for (i
= 0; mep_configs
[i
].config_name
; i
++)
5927 if (strcmp (mep_config_string
, mep_configs
[i
].config_name
) == 0)
5929 mep_selected_isa
= mep_configs
[i
].isa
;
5933 /* Assume all intrinsics are unavailable. */
5934 for (i
= 0; i
< ARRAY_SIZE (mep_intrinsic_insn
); i
++)
5935 mep_intrinsic_insn
[i
] = -1;
5937 /* Build up the global intrinsic tables. */
5938 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
5939 if ((cgen_insns
[i
].isas
& mep_selected_isa
) != 0)
5941 mep_intrinsic_chain
[i
] = mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
];
5942 mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] = i
;
5944 /* See whether we can directly move values between one coprocessor
5945 register and another. */
5946 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
5947 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns
[i
]))
5948 mep_have_copro_copro_moves_p
= true;
5950 /* See whether we can directly move values between core and
5951 coprocessor registers. */
5952 mep_have_core_copro_moves_p
= (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1
)
5953 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2
));
5955 mep_have_core_copro_moves_p
= 1;
5958 /* Declare all available intrinsic functions. Called once only. */
5960 static tree cp_data_bus_int_type_node
;
5961 static tree opaque_vector_type_node
;
5962 static tree v8qi_type_node
;
5963 static tree v4hi_type_node
;
5964 static tree v2si_type_node
;
5965 static tree v8uqi_type_node
;
5966 static tree v4uhi_type_node
;
5967 static tree v2usi_type_node
;
5970 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr
)
5974 case cgen_regnum_operand_type_POINTER
: return ptr_type_node
;
5975 case cgen_regnum_operand_type_LONG
: return long_integer_type_node
;
5976 case cgen_regnum_operand_type_ULONG
: return long_unsigned_type_node
;
5977 case cgen_regnum_operand_type_SHORT
: return short_integer_type_node
;
5978 case cgen_regnum_operand_type_USHORT
: return short_unsigned_type_node
;
5979 case cgen_regnum_operand_type_CHAR
: return char_type_node
;
5980 case cgen_regnum_operand_type_UCHAR
: return unsigned_char_type_node
;
5981 case cgen_regnum_operand_type_SI
: return intSI_type_node
;
5982 case cgen_regnum_operand_type_DI
: return intDI_type_node
;
5983 case cgen_regnum_operand_type_VECTOR
: return opaque_vector_type_node
;
5984 case cgen_regnum_operand_type_V8QI
: return v8qi_type_node
;
5985 case cgen_regnum_operand_type_V4HI
: return v4hi_type_node
;
5986 case cgen_regnum_operand_type_V2SI
: return v2si_type_node
;
5987 case cgen_regnum_operand_type_V8UQI
: return v8uqi_type_node
;
5988 case cgen_regnum_operand_type_V4UHI
: return v4uhi_type_node
;
5989 case cgen_regnum_operand_type_V2USI
: return v2usi_type_node
;
5990 case cgen_regnum_operand_type_CP_DATA_BUS_INT
: return cp_data_bus_int_type_node
;
5992 return void_type_node
;
5997 mep_init_builtins (void)
6001 if (TARGET_64BIT_CR_REGS
)
6002 cp_data_bus_int_type_node
= long_long_integer_type_node
;
6004 cp_data_bus_int_type_node
= long_integer_type_node
;
6006 opaque_vector_type_node
= build_opaque_vector_type (intQI_type_node
, 8);
6007 v8qi_type_node
= build_vector_type (intQI_type_node
, 8);
6008 v4hi_type_node
= build_vector_type (intHI_type_node
, 4);
6009 v2si_type_node
= build_vector_type (intSI_type_node
, 2);
6010 v8uqi_type_node
= build_vector_type (unsigned_intQI_type_node
, 8);
6011 v4uhi_type_node
= build_vector_type (unsigned_intHI_type_node
, 4);
6012 v2usi_type_node
= build_vector_type (unsigned_intSI_type_node
, 2);
6014 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node
);
6016 add_builtin_type ("cp_vector", opaque_vector_type_node
);
6018 add_builtin_type ("cp_v8qi", v8qi_type_node
);
6019 add_builtin_type ("cp_v4hi", v4hi_type_node
);
6020 add_builtin_type ("cp_v2si", v2si_type_node
);
6022 add_builtin_type ("cp_v8uqi", v8uqi_type_node
);
6023 add_builtin_type ("cp_v4uhi", v4uhi_type_node
);
6024 add_builtin_type ("cp_v2usi", v2usi_type_node
);
6026 /* Intrinsics like mep_cadd3 are implemented with two groups of
6027 instructions, one which uses UNSPECs and one which uses a specific
6028 rtl code such as PLUS. Instructions in the latter group belong
6029 to GROUP_KNOWN_CODE.
6031 In such cases, the intrinsic will have two entries in the global
6032 tables above. The unspec form is accessed using builtin functions
6033 while the specific form is accessed using the mep_* enum in
6036 The idea is that __cop arithmetic and builtin functions have
6037 different optimization requirements. If mep_cadd3() appears in
6038 the source code, the user will surely except gcc to use cadd3
6039 rather than a work-alike such as add3. However, if the user
6040 just writes "a + b", where a or b are __cop variables, it is
6041 reasonable for gcc to choose a core instruction rather than
6042 cadd3 if it believes that is more optimal. */
6043 for (i
= 0; i
< ARRAY_SIZE (cgen_insns
); i
++)
6044 if ((cgen_insns
[i
].groups
& GROUP_KNOWN_CODE
) == 0
6045 && mep_intrinsic_insn
[cgen_insns
[i
].intrinsic
] >= 0)
6047 tree ret_type
= void_type_node
;
6050 if (i
> 0 && cgen_insns
[i
].intrinsic
== cgen_insns
[i
-1].intrinsic
)
6053 if (cgen_insns
[i
].cret_p
)
6054 ret_type
= mep_cgen_regnum_to_type (cgen_insns
[i
].regnums
[0].type
);
6056 bi_type
= build_function_type_list (ret_type
, NULL_TREE
);
6057 add_builtin_function (cgen_intrinsics
[cgen_insns
[i
].intrinsic
],
6059 cgen_insns
[i
].intrinsic
, BUILT_IN_MD
, NULL
, NULL
);
6063 /* Report the unavailablity of the given intrinsic. */
6067 mep_intrinsic_unavailable (int intrinsic
)
6069 static int already_reported_p
[ARRAY_SIZE (cgen_intrinsics
)];
6071 if (already_reported_p
[intrinsic
])
6074 if (mep_intrinsic_insn
[intrinsic
] < 0)
6075 error ("coprocessor intrinsic %qs is not available in this configuration",
6076 cgen_intrinsics
[intrinsic
]);
6077 else if (CGEN_CURRENT_GROUP
== GROUP_VLIW
)
6078 error ("%qs is not available in VLIW functions",
6079 cgen_intrinsics
[intrinsic
]);
6081 error ("%qs is not available in non-VLIW functions",
6082 cgen_intrinsics
[intrinsic
]);
6084 already_reported_p
[intrinsic
] = 1;
6089 /* See if any implementation of INTRINSIC is available to the
6090 current function. If so, store the most general implementation
6091 in *INSN_PTR and return true. Return false otherwise. */
6094 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED
, const struct cgen_insn
**insn_ptr ATTRIBUTE_UNUSED
)
6098 i
= mep_intrinsic_insn
[intrinsic
];
6099 while (i
>= 0 && !CGEN_ENABLE_INSN_P (i
))
6100 i
= mep_intrinsic_chain
[i
];
6104 *insn_ptr
= &cgen_insns
[i
];
6111 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6112 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6113 try using a work-alike instead. In this case, the returned insn
6114 may have three operands rather than two. */
6117 mep_get_move_insn (int intrinsic
, const struct cgen_insn
**cgen_insn
)
6121 if (intrinsic
== mep_cmov
)
6123 for (i
= 0; i
< ARRAY_SIZE (mep_cmov_insns
); i
++)
6124 if (mep_get_intrinsic_insn (mep_cmov_insns
[i
], cgen_insn
))
6128 return mep_get_intrinsic_insn (intrinsic
, cgen_insn
);
6132 /* If ARG is a register operand that is the same size as MODE, convert it
6133 to MODE using a subreg. Otherwise return ARG as-is. */
6136 mep_convert_arg (machine_mode mode
, rtx arg
)
6138 if (GET_MODE (arg
) != mode
6139 && register_operand (arg
, VOIDmode
)
6140 && GET_MODE_SIZE (GET_MODE (arg
)) == GET_MODE_SIZE (mode
))
6141 return simplify_gen_subreg (mode
, arg
, GET_MODE (arg
), 0);
6146 /* Apply regnum conversions to ARG using the description given by REGNUM.
6147 Return the new argument on success and null on failure. */
6150 mep_convert_regnum (const struct cgen_regnum_operand
*regnum
, rtx arg
)
6152 if (regnum
->count
== 0)
6155 if (GET_CODE (arg
) != CONST_INT
6157 || INTVAL (arg
) >= regnum
->count
)
6160 return gen_rtx_REG (SImode
, INTVAL (arg
) + regnum
->base
);
6164 /* Try to make intrinsic argument ARG match the given operand.
6165 UNSIGNED_P is true if the argument has an unsigned type. */
6168 mep_legitimize_arg (const struct insn_operand_data
*operand
, rtx arg
,
6171 if (GET_CODE (arg
) == CONST_INT
)
6173 /* CONST_INTs can only be bound to integer operands. */
6174 if (GET_MODE_CLASS (operand
->mode
) != MODE_INT
)
6177 else if (GET_CODE (arg
) == CONST_DOUBLE
)
6178 /* These hold vector constants. */;
6179 else if (GET_MODE_SIZE (GET_MODE (arg
)) != GET_MODE_SIZE (operand
->mode
))
6181 /* If the argument is a different size from what's expected, we must
6182 have a value in the right mode class in order to convert it. */
6183 if (GET_MODE_CLASS (operand
->mode
) != GET_MODE_CLASS (GET_MODE (arg
)))
6186 /* If the operand is an rvalue, promote or demote it to match the
6187 operand's size. This might not need extra instructions when
6188 ARG is a register value. */
6189 if (operand
->constraint
[0] != '=')
6190 arg
= convert_to_mode (operand
->mode
, arg
, unsigned_p
);
6193 /* If the operand is an lvalue, bind the operand to a new register.
6194 The caller will copy this value into ARG after the main
6195 instruction. By doing this always, we produce slightly more
6197 /* But not for control registers. */
6198 if (operand
->constraint
[0] == '='
6200 || ! (CONTROL_REGNO_P (REGNO (arg
))
6201 || CCR_REGNO_P (REGNO (arg
))
6202 || CR_REGNO_P (REGNO (arg
)))
6204 return gen_reg_rtx (operand
->mode
);
6206 /* Try simple mode punning. */
6207 arg
= mep_convert_arg (operand
->mode
, arg
);
6208 if (operand
->predicate (arg
, operand
->mode
))
6211 /* See if forcing the argument into a register will make it match. */
6212 if (GET_CODE (arg
) == CONST_INT
|| GET_CODE (arg
) == CONST_DOUBLE
)
6213 arg
= force_reg (operand
->mode
, arg
);
6215 arg
= mep_convert_arg (operand
->mode
, force_reg (GET_MODE (arg
), arg
));
6216 if (operand
->predicate (arg
, operand
->mode
))
6223 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6224 function FNNAME. OPERAND describes the operand to which ARGNUM
6228 mep_incompatible_arg (const struct insn_operand_data
*operand
, rtx arg
,
6229 int argnum
, tree fnname
)
6233 if (GET_CODE (arg
) == CONST_INT
)
6234 for (i
= 0; i
< ARRAY_SIZE (cgen_immediate_predicates
); i
++)
6235 if (operand
->predicate
== cgen_immediate_predicates
[i
].predicate
)
6237 const struct cgen_immediate_predicate
*predicate
;
6238 HOST_WIDE_INT argval
;
6240 predicate
= &cgen_immediate_predicates
[i
];
6241 argval
= INTVAL (arg
);
6242 if (argval
< predicate
->lower
|| argval
>= predicate
->upper
)
6243 error ("argument %d of %qE must be in the range %d...%d",
6244 argnum
, fnname
, predicate
->lower
, predicate
->upper
- 1);
6246 error ("argument %d of %qE must be a multiple of %d",
6247 argnum
, fnname
, predicate
->align
);
6251 error ("incompatible type for argument %d of %qE", argnum
, fnname
);
6255 mep_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6256 rtx subtarget ATTRIBUTE_UNUSED
,
6257 machine_mode mode ATTRIBUTE_UNUSED
,
6258 int ignore ATTRIBUTE_UNUSED
)
6260 rtx pat
, op
[10], arg
[10];
6262 int opindex
, unsigned_p
[10];
6264 unsigned int n_args
;
6266 const struct cgen_insn
*cgen_insn
;
6267 const struct insn_data_d
*idata
;
6268 unsigned int first_arg
= 0;
6269 unsigned int builtin_n_args
;
6271 fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
6272 fnname
= DECL_NAME (fndecl
);
6274 /* Find out which instruction we should emit. Note that some coprocessor
6275 intrinsics may only be available in VLIW mode, or only in normal mode. */
6276 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl
), &cgen_insn
))
6278 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl
));
6281 idata
= &insn_data
[cgen_insn
->icode
];
6283 builtin_n_args
= cgen_insn
->num_args
;
6285 if (cgen_insn
->cret_p
)
6287 if (cgen_insn
->cret_p
> 1)
6290 mep_cgen_regnum_to_type (cgen_insn
->regnums
[0].type
);
6294 /* Evaluate each argument. */
6295 n_args
= call_expr_nargs (exp
);
6297 if (n_args
< builtin_n_args
)
6299 error ("too few arguments to %qE", fnname
);
6302 if (n_args
> builtin_n_args
)
6304 error ("too many arguments to %qE", fnname
);
6308 for (a
= first_arg
; a
< builtin_n_args
+ first_arg
; a
++)
6312 args
= CALL_EXPR_ARG (exp
, a
- first_arg
);
6317 if (cgen_insn
->regnums
[a
].reference_p
)
6319 if (TREE_CODE (value
) != ADDR_EXPR
)
6322 error ("argument %d of %qE must be an address", a
+1, fnname
);
6325 value
= TREE_OPERAND (value
, 0);
6329 /* If the argument has been promoted to int, get the unpromoted
6330 value. This is necessary when sub-int memory values are bound
6331 to reference parameters. */
6332 if (TREE_CODE (value
) == NOP_EXPR
6333 && TREE_TYPE (value
) == integer_type_node
6334 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6335 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value
, 0)))
6336 < TYPE_PRECISION (TREE_TYPE (value
))))
6337 value
= TREE_OPERAND (value
, 0);
6339 /* If the argument has been promoted to double, get the unpromoted
6340 SFmode value. This is necessary for FMAX support, for example. */
6341 if (TREE_CODE (value
) == NOP_EXPR
6342 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value
))
6343 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value
, 0)))
6344 && TYPE_MODE (TREE_TYPE (value
)) == DFmode
6345 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value
, 0))) == SFmode
)
6346 value
= TREE_OPERAND (value
, 0);
6348 unsigned_p
[a
] = TYPE_UNSIGNED (TREE_TYPE (value
));
6349 arg
[a
] = expand_expr (value
, NULL
, VOIDmode
, EXPAND_NORMAL
);
6350 arg
[a
] = mep_convert_regnum (&cgen_insn
->regnums
[a
], arg
[a
]);
6351 if (cgen_insn
->regnums
[a
].reference_p
)
6353 tree pointed_to
= TREE_TYPE (TREE_TYPE (value
));
6354 machine_mode pointed_mode
= TYPE_MODE (pointed_to
);
6356 arg
[a
] = gen_rtx_MEM (pointed_mode
, arg
[a
]);
6360 error ("argument %d of %qE must be in the range %d...%d",
6361 a
+ 1, fnname
, 0, cgen_insn
->regnums
[a
].count
- 1);
6366 for (a
= 0; a
< first_arg
; a
++)
6368 if (a
== 0 && target
&& GET_MODE (target
) == idata
->operand
[0].mode
)
6371 arg
[a
] = gen_reg_rtx (idata
->operand
[0].mode
);
6374 /* Convert the arguments into a form suitable for the intrinsic.
6375 Report an error if this isn't possible. */
6376 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6378 a
= cgen_insn
->op_mapping
[opindex
];
6379 op
[opindex
] = mep_legitimize_arg (&idata
->operand
[opindex
],
6380 arg
[a
], unsigned_p
[a
]);
6381 if (op
[opindex
] == 0)
6383 mep_incompatible_arg (&idata
->operand
[opindex
],
6384 arg
[a
], a
+ 1 - first_arg
, fnname
);
6389 /* Emit the instruction. */
6390 pat
= idata
->genfun (op
[0], op
[1], op
[2], op
[3], op
[4],
6391 op
[5], op
[6], op
[7], op
[8], op
[9]);
6393 if (GET_CODE (pat
) == SET
6394 && GET_CODE (SET_DEST (pat
)) == PC
6395 && GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
6396 emit_jump_insn (pat
);
6400 /* Copy lvalues back to their final locations. */
6401 for (opindex
= 0; opindex
< idata
->n_operands
; opindex
++)
6402 if (idata
->operand
[opindex
].constraint
[0] == '=')
6404 a
= cgen_insn
->op_mapping
[opindex
];
6407 if (GET_MODE_CLASS (GET_MODE (arg
[a
]))
6408 != GET_MODE_CLASS (GET_MODE (op
[opindex
])))
6409 emit_move_insn (arg
[a
], gen_lowpart (GET_MODE (arg
[a
]),
6413 /* First convert the operand to the right mode, then copy it
6414 into the destination. Doing the conversion as a separate
6415 step (rather than using convert_move) means that we can
6416 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6417 refer to the same register. */
6418 op
[opindex
] = convert_to_mode (GET_MODE (arg
[a
]),
6419 op
[opindex
], unsigned_p
[a
]);
6420 if (!rtx_equal_p (arg
[a
], op
[opindex
]))
6421 emit_move_insn (arg
[a
], op
[opindex
]);
6426 if (first_arg
> 0 && target
&& target
!= op
[0])
6428 emit_move_insn (target
, op
[0]);
6435 mep_vector_mode_supported_p (machine_mode mode ATTRIBUTE_UNUSED
)
6440 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6441 a global register. */
6444 global_reg_mentioned_p_1 (const_rtx x
)
6448 switch (GET_CODE (x
))
6451 if (REG_P (SUBREG_REG (x
)))
6453 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
6454 && global_regs
[subreg_regno (x
)])
6462 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
6467 /* A non-constant call might use a global register. */
6477 /* Returns nonzero if X mentions a global register. */
6480 global_reg_mentioned_p (rtx x
)
6486 if (! RTL_CONST_OR_PURE_CALL_P (x
))
6488 x
= CALL_INSN_FUNCTION_USAGE (x
);
6496 subrtx_iterator::array_type array
;
6497 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
6498 if (global_reg_mentioned_p_1 (*iter
))
6502 /* Scheduling hooks for VLIW mode.
6504 Conceptually this is very simple: we have a two-pack architecture
6505 that takes one core insn and one coprocessor insn to make up either
6506 a 32- or 64-bit instruction word (depending on the option bit set in
6507 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6508 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6509 and one 48-bit cop insn or two 32-bit core/cop insns.
6511 In practice, instruction selection will be a bear. Consider in
6512 VL64 mode the following insns
6517 these cannot pack, since the add is a 16-bit core insn and cmov
6518 is a 32-bit cop insn. However,
6523 packs just fine. For good VLIW code generation in VL64 mode, we
6524 will have to have 32-bit alternatives for many of the common core
6525 insns. Not implemented. */
6528 mep_adjust_cost (rtx_insn
*insn
, rtx link
, rtx_insn
*dep_insn
, int cost
)
6532 if (REG_NOTE_KIND (link
) != 0)
6534 /* See whether INSN and DEP_INSN are intrinsics that set the same
6535 hard register. If so, it is more important to free up DEP_INSN
6536 than it is to free up INSN.
6538 Note that intrinsics like mep_mulr are handled differently from
6539 the equivalent mep.md patterns. In mep.md, if we don't care
6540 about the value of $lo and $hi, the pattern will just clobber
6541 the registers, not set them. Since clobbers don't count as
6542 output dependencies, it is often possible to reorder two mulrs,
6545 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6546 so any pair of mep_mulr()s will be inter-dependent. We should
6547 therefore give the first mep_mulr() a higher priority. */
6548 if (REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
6549 && global_reg_mentioned_p (PATTERN (insn
))
6550 && global_reg_mentioned_p (PATTERN (dep_insn
)))
6553 /* If the dependence is an anti or output dependence, assume it
6558 /* If we can't recognize the insns, we can't really do anything. */
6559 if (recog_memoized (dep_insn
) < 0)
6562 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6563 attribute instead. */
6566 cost_specified
= get_attr_latency (dep_insn
);
6567 if (cost_specified
!= 0)
6568 return cost_specified
;
6574 /* ??? We don't properly compute the length of a load/store insn,
6575 taking into account the addressing mode. */
6578 mep_issue_rate (void)
6580 return TARGET_IVC2
? 3 : 2;
6583 /* Return true if function DECL was declared with the vliw attribute. */
6586 mep_vliw_function_p (tree decl
)
6588 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl
))) != 0;
6592 mep_find_ready_insn (rtx_insn
**ready
, int nready
, enum attr_slot slot
,
6597 for (i
= nready
- 1; i
>= 0; --i
)
6599 rtx_insn
*insn
= ready
[i
];
6600 if (recog_memoized (insn
) >= 0
6601 && get_attr_slot (insn
) == slot
6602 && get_attr_length (insn
) == length
)
6610 mep_move_ready_insn (rtx_insn
**ready
, int nready
, rtx_insn
*insn
)
6614 for (i
= 0; i
< nready
; ++i
)
6615 if (ready
[i
] == insn
)
6617 for (; i
< nready
- 1; ++i
)
6618 ready
[i
] = ready
[i
+ 1];
6627 mep_print_sched_insn (FILE *dump
, rtx_insn
*insn
)
6629 const char *slots
= "none";
6630 const char *name
= NULL
;
6634 if (GET_CODE (PATTERN (insn
)) == SET
6635 || GET_CODE (PATTERN (insn
)) == PARALLEL
)
6637 switch (get_attr_slots (insn
))
6639 case SLOTS_CORE
: slots
= "core"; break;
6640 case SLOTS_C3
: slots
= "c3"; break;
6641 case SLOTS_P0
: slots
= "p0"; break;
6642 case SLOTS_P0_P0S
: slots
= "p0,p0s"; break;
6643 case SLOTS_P0_P1
: slots
= "p0,p1"; break;
6644 case SLOTS_P0S
: slots
= "p0s"; break;
6645 case SLOTS_P0S_P1
: slots
= "p0s,p1"; break;
6646 case SLOTS_P1
: slots
= "p1"; break;
6648 sprintf(buf
, "%d", get_attr_slots (insn
));
6653 if (GET_CODE (PATTERN (insn
)) == USE
)
6656 code
= INSN_CODE (insn
);
6658 name
= get_insn_name (code
);
6663 "insn %4d %4d %8s %s\n",
6671 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED
,
6672 int sched_verbose ATTRIBUTE_UNUSED
, rtx_insn
**ready
,
6673 int *pnready
, int clock ATTRIBUTE_UNUSED
)
6675 int nready
= *pnready
;
6676 rtx_insn
*core_insn
, *cop_insn
;
6679 if (dump
&& sched_verbose
> 1)
6681 fprintf (dump
, "\nsched_reorder: clock %d nready %d\n", clock
, nready
);
6682 for (i
=0; i
<nready
; i
++)
6683 mep_print_sched_insn (dump
, ready
[i
]);
6684 fprintf (dump
, "\n");
6687 if (!mep_vliw_function_p (cfun
->decl
))
6692 /* IVC2 uses a DFA to determine what's ready and what's not. */
6696 /* We can issue either a core or coprocessor instruction.
6697 Look for a matched pair of insns to reorder. If we don't
6698 find any, don't second-guess the scheduler's priorities. */
6700 if ((core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 2))
6701 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
,
6702 TARGET_OPT_VL64
? 6 : 2)))
6704 else if (TARGET_OPT_VL64
6705 && (core_insn
= mep_find_ready_insn (ready
, nready
, SLOT_CORE
, 4))
6706 && (cop_insn
= mep_find_ready_insn (ready
, nready
, SLOT_COP
, 4)))
6709 /* We didn't find a pair. Issue the single insn at the head
6710 of the ready list. */
6713 /* Reorder the two insns first. */
6714 mep_move_ready_insn (ready
, nready
, core_insn
);
6715 mep_move_ready_insn (ready
, nready
- 1, cop_insn
);
6719 /* Return true if X contains a register that is set by insn PREV. */
6722 mep_store_find_set (const_rtx x
, const rtx_insn
*prev
)
6724 subrtx_iterator::array_type array
;
6725 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
6726 if (REG_P (x
) && reg_set_p (x
, prev
))
6731 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6732 not the containing insn. */
6735 mep_store_data_bypass_1 (rtx_insn
*prev
, rtx pat
)
6737 /* Cope with intrinsics like swcpa. */
6738 if (GET_CODE (pat
) == PARALLEL
)
6742 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
6743 if (mep_store_data_bypass_p (prev
,
6744 as_a
<rtx_insn
*> (XVECEXP (pat
, 0, i
))))
6750 /* Check for some sort of store. */
6751 if (GET_CODE (pat
) != SET
6752 || GET_CODE (SET_DEST (pat
)) != MEM
)
6755 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6756 The first operand to the unspec is the store data and the other operands
6757 are used to calculate the address. */
6758 if (GET_CODE (SET_SRC (pat
)) == UNSPEC
)
6763 src
= SET_SRC (pat
);
6764 for (i
= 1; i
< XVECLEN (src
, 0); i
++)
6765 if (mep_store_find_set (XVECEXP (src
, 0, i
), prev
))
6771 /* Otherwise just check that PREV doesn't modify any register mentioned
6772 in the memory destination. */
6773 return !mep_store_find_set (SET_DEST (pat
), prev
);
6776 /* Return true if INSN is a store instruction and if the store address
6777 has no true dependence on PREV. */
6780 mep_store_data_bypass_p (rtx_insn
*prev
, rtx_insn
*insn
)
6782 return INSN_P (insn
) ? mep_store_data_bypass_1 (prev
, PATTERN (insn
)) : false;
6785 /* Return true if, apart from HI/LO, there are no true dependencies
6786 between multiplication instructions PREV and INSN. */
6789 mep_mul_hilo_bypass_p (rtx_insn
*prev
, rtx_insn
*insn
)
6793 pat
= PATTERN (insn
);
6794 if (GET_CODE (pat
) == PARALLEL
)
6795 pat
= XVECEXP (pat
, 0, 0);
6796 if (GET_CODE (pat
) != SET
)
6798 subrtx_iterator::array_type array
;
6799 FOR_EACH_SUBRTX (iter
, array
, SET_SRC (pat
), NONCONST
)
6801 const_rtx x
= *iter
;
6803 && REGNO (x
) != LO_REGNO
6804 && REGNO (x
) != HI_REGNO
6805 && reg_set_p (x
, prev
))
6811 /* Return true if INSN is an ldc instruction that issues to the
6812 MeP-h1 integer pipeline. This is true for instructions that
6813 read from PSW, LP, SAR, HI and LO. */
6816 mep_ipipe_ldc_p (rtx_insn
*insn
)
6820 pat
= PATTERN (insn
);
6822 /* Cope with instrinsics that set both a hard register and its shadow.
6823 The set of the hard register comes first. */
6824 if (GET_CODE (pat
) == PARALLEL
)
6825 pat
= XVECEXP (pat
, 0, 0);
6827 if (GET_CODE (pat
) == SET
)
6829 src
= SET_SRC (pat
);
6831 /* Cope with intrinsics. The first operand to the unspec is
6832 the source register. */
6833 if (GET_CODE (src
) == UNSPEC
|| GET_CODE (src
) == UNSPEC_VOLATILE
)
6834 src
= XVECEXP (src
, 0, 0);
6837 switch (REGNO (src
))
6850 /* Create a VLIW bundle from core instruction CORE and coprocessor
6851 instruction COP. COP always satisfies INSN_P, but CORE can be
6852 either a new pattern or an existing instruction.
6854 Emit the bundle in place of COP and return it. */
6857 mep_make_bundle (rtx core_insn_or_pat
, rtx_insn
*cop
)
6860 rtx_insn
*core_insn
;
6863 /* If CORE is an existing instruction, remove it, otherwise put
6864 the new pattern in an INSN harness. */
6865 if (INSN_P (core_insn_or_pat
))
6867 core_insn
= as_a
<rtx_insn
*> (core_insn_or_pat
);
6868 remove_insn (core_insn
);
6871 core_insn
= make_insn_raw (core_insn_or_pat
);
6873 /* Generate the bundle sequence and replace COP with it. */
6874 seq
= gen_rtx_SEQUENCE (VOIDmode
, gen_rtvec (2, core_insn
, cop
));
6875 insn
= emit_insn_after (seq
, cop
);
6878 /* Set up the links of the insns inside the SEQUENCE. */
6879 SET_PREV_INSN (core_insn
) = PREV_INSN (insn
);
6880 SET_NEXT_INSN (core_insn
) = cop
;
6881 SET_PREV_INSN (cop
) = core_insn
;
6882 SET_NEXT_INSN (cop
) = NEXT_INSN (insn
);
6884 /* Set the VLIW flag for the coprocessor instruction. */
6885 PUT_MODE (core_insn
, VOIDmode
);
6886 PUT_MODE (cop
, BImode
);
6888 /* Derive a location for the bundle. Individual instructions cannot
6889 have their own location because there can be no assembler labels
6890 between CORE_INSN and COP. */
6891 INSN_LOCATION (insn
) = INSN_LOCATION (INSN_LOCATION (core_insn
) ? core_insn
: cop
);
6892 INSN_LOCATION (core_insn
) = 0;
6893 INSN_LOCATION (cop
) = 0;
6898 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6901 mep_insn_dependent_p_1 (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
6903 rtx
* pinsn
= (rtx
*) data
;
6905 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
6909 /* Return true if anything in insn X is (anti,output,true) dependent on
6910 anything in insn Y. */
6913 mep_insn_dependent_p (rtx x
, rtx y
)
6917 gcc_assert (INSN_P (x
));
6918 gcc_assert (INSN_P (y
));
6921 note_stores (PATTERN (x
), mep_insn_dependent_p_1
, &tmp
);
6922 if (tmp
== NULL_RTX
)
6926 note_stores (PATTERN (y
), mep_insn_dependent_p_1
, &tmp
);
6927 if (tmp
== NULL_RTX
)
6934 core_insn_p (rtx_insn
*insn
)
6936 if (GET_CODE (PATTERN (insn
)) == USE
)
6938 if (get_attr_slot (insn
) == SLOT_CORE
)
6943 /* Mark coprocessor instructions that can be bundled together with
6944 the immediately preceding core instruction. This is later used
6945 to emit the "+" that tells the assembler to create a VLIW insn.
6947 For unbundled insns, the assembler will automatically add coprocessor
6948 nops, and 16-bit core nops. Due to an apparent oversight in the
6949 spec, the assembler will _not_ automatically add 32-bit core nops,
6950 so we have to emit those here.
6952 Called from mep_insn_reorg. */
6955 mep_bundle_insns (rtx_insn
*insns
)
6957 rtx_insn
*insn
, *last
= NULL
, *first
= NULL
;
6958 int saw_scheduling
= 0;
6960 /* Only do bundling if we're in vliw mode. */
6961 if (!mep_vliw_function_p (cfun
->decl
))
6964 /* The first insn in a bundle are TImode, the remainder are
6965 VOIDmode. After this function, the first has VOIDmode and the
6966 rest have BImode. */
6968 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6970 /* First, move any NOTEs that are within a bundle, to the beginning
6972 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6974 if (NOTE_P (insn
) && first
)
6975 /* Don't clear FIRST. */;
6977 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == TImode
)
6980 else if (NONJUMP_INSN_P (insn
) && GET_MODE (insn
) == VOIDmode
&& first
)
6982 rtx_insn
*note
, *prev
;
6984 /* INSN is part of a bundle; FIRST is the first insn in that
6985 bundle. Move all intervening notes out of the bundle.
6986 In addition, since the debug pass may insert a label
6987 whenever the current line changes, set the location info
6988 for INSN to match FIRST. */
6990 INSN_LOCATION (insn
) = INSN_LOCATION (first
);
6992 note
= PREV_INSN (insn
);
6993 while (note
&& note
!= first
)
6995 prev
= PREV_INSN (note
);
6999 /* Remove NOTE from here... */
7000 SET_PREV_INSN (NEXT_INSN (note
)) = PREV_INSN (note
);
7001 SET_NEXT_INSN (PREV_INSN (note
)) = NEXT_INSN (note
);
7002 /* ...and put it in here. */
7003 SET_NEXT_INSN (note
) = first
;
7004 SET_PREV_INSN (note
) = PREV_INSN (first
);
7005 SET_NEXT_INSN (PREV_INSN (note
)) = note
;
7006 SET_PREV_INSN (NEXT_INSN (note
)) = note
;
7013 else if (!NONJUMP_INSN_P (insn
))
7017 /* Now fix up the bundles. */
7018 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
7023 if (!NONJUMP_INSN_P (insn
))
7029 /* If we're not optimizing enough, there won't be scheduling
7030 info. We detect that here. */
7031 if (GET_MODE (insn
) == TImode
)
7033 if (!saw_scheduling
)
7038 rtx_insn
*core_insn
= NULL
;
7040 /* IVC2 slots are scheduled by DFA, so we just accept
7041 whatever the scheduler gives us. However, we must make
7042 sure the core insn (if any) is the first in the bundle.
7043 The IVC2 assembler can insert whatever NOPs are needed,
7044 and allows a COP insn to be first. */
7046 if (NONJUMP_INSN_P (insn
)
7047 && GET_CODE (PATTERN (insn
)) != USE
7048 && GET_MODE (insn
) == TImode
)
7052 && GET_MODE (NEXT_INSN (last
)) == VOIDmode
7053 && NONJUMP_INSN_P (NEXT_INSN (last
));
7054 last
= NEXT_INSN (last
))
7056 if (core_insn_p (last
))
7059 if (core_insn_p (last
))
7062 if (core_insn
&& core_insn
!= insn
)
7064 /* Swap core insn to first in the bundle. */
7066 /* Remove core insn. */
7067 if (PREV_INSN (core_insn
))
7068 SET_NEXT_INSN (PREV_INSN (core_insn
)) = NEXT_INSN (core_insn
);
7069 if (NEXT_INSN (core_insn
))
7070 SET_PREV_INSN (NEXT_INSN (core_insn
)) = PREV_INSN (core_insn
);
7072 /* Re-insert core insn. */
7073 SET_PREV_INSN (core_insn
) = PREV_INSN (insn
);
7074 SET_NEXT_INSN (core_insn
) = insn
;
7076 if (PREV_INSN (core_insn
))
7077 SET_NEXT_INSN (PREV_INSN (core_insn
)) = core_insn
;
7078 SET_PREV_INSN (insn
) = core_insn
;
7080 PUT_MODE (core_insn
, TImode
);
7081 PUT_MODE (insn
, VOIDmode
);
7085 /* The first insn has TImode, the rest have VOIDmode */
7086 if (GET_MODE (insn
) == TImode
)
7087 PUT_MODE (insn
, VOIDmode
);
7089 PUT_MODE (insn
, BImode
);
7093 PUT_MODE (insn
, VOIDmode
);
7094 if (recog_memoized (insn
) >= 0
7095 && get_attr_slot (insn
) == SLOT_COP
)
7099 || recog_memoized (last
) < 0
7100 || get_attr_slot (last
) != SLOT_CORE
7101 || (get_attr_length (insn
)
7102 != (TARGET_OPT_VL64
? 8 : 4) - get_attr_length (last
))
7103 || mep_insn_dependent_p (insn
, last
))
7105 switch (get_attr_length (insn
))
7110 insn
= mep_make_bundle (gen_nop (), insn
);
7113 if (TARGET_OPT_VL64
)
7114 insn
= mep_make_bundle (gen_nop32 (), insn
);
7117 if (TARGET_OPT_VL64
)
7118 error ("2 byte cop instructions are"
7119 " not allowed in 64-bit VLIW mode");
7121 insn
= mep_make_bundle (gen_nop (), insn
);
7124 error ("unexpected %d byte cop instruction",
7125 get_attr_length (insn
));
7130 insn
= mep_make_bundle (last
, insn
);
7138 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7139 Return true on success. This function can fail if the intrinsic
7140 is unavailable or if the operands don't satisfy their predicates. */
7143 mep_emit_intrinsic (int intrinsic
, const rtx
*operands
)
7145 const struct cgen_insn
*cgen_insn
;
7146 const struct insn_data_d
*idata
;
7150 if (!mep_get_intrinsic_insn (intrinsic
, &cgen_insn
))
7153 idata
= &insn_data
[cgen_insn
->icode
];
7154 for (i
= 0; i
< idata
->n_operands
; i
++)
7156 newop
[i
] = mep_convert_arg (idata
->operand
[i
].mode
, operands
[i
]);
7157 if (!idata
->operand
[i
].predicate (newop
[i
], idata
->operand
[i
].mode
))
7161 emit_insn (idata
->genfun (newop
[0], newop
[1], newop
[2],
7162 newop
[3], newop
[4], newop
[5],
7163 newop
[6], newop
[7], newop
[8]));
7169 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7170 OPERANDS[0]. Report an error if the instruction could not
7171 be synthesized. OPERANDS[1] is a register_operand. For sign
7172 and zero extensions, it may be smaller than SImode. */
7175 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic
,
7176 rtx
* operands ATTRIBUTE_UNUSED
)
7182 /* Likewise, but apply a binary operation to OPERANDS[1] and
7183 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7184 can be a general_operand.
7186 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7187 third operand. REG and REG3 take register operands only. */
7190 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate
,
7191 int ATTRIBUTE_UNUSED immediate3
,
7192 int ATTRIBUTE_UNUSED reg
,
7193 int ATTRIBUTE_UNUSED reg3
,
7194 rtx
* operands ATTRIBUTE_UNUSED
)
7200 mep_rtx_cost (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
7201 int opno ATTRIBUTE_UNUSED
, int *total
,
7202 bool ATTRIBUTE_UNUSED speed_t
)
7207 if (INTVAL (x
) >= -128 && INTVAL (x
) < 127)
7209 else if (INTVAL (x
) >= -32768 && INTVAL (x
) < 65536)
7216 *total
= optimize_size
? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7220 *total
= (GET_CODE (XEXP (x
, 1)) == CONST_INT
7222 : COSTS_N_INSNS (2));
7229 mep_address_cost (rtx addr ATTRIBUTE_UNUSED
,
7230 machine_mode mode ATTRIBUTE_UNUSED
,
7231 addr_space_t as ATTRIBUTE_UNUSED
,
7232 bool ATTRIBUTE_UNUSED speed_p
)
7238 mep_asm_init_sections (void)
7241 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7242 "\t.section .based,\"aw\"");
7245 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7246 "\t.section .sbss,\"aw\"");
7249 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7250 "\t.section .sdata,\"aw\",@progbits");
7253 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
7254 "\t.section .far,\"aw\"");
7257 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
, output_section_asm_op
,
7258 "\t.section .farbss,\"aw\"");
7261 = get_unnamed_section (0, output_section_asm_op
,
7262 "\t.section .frodata,\"a\"");
7265 = get_unnamed_section (0, output_section_asm_op
,
7266 "\t.section .srodata,\"a\"");
7269 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7270 "\t.section .vtext,\"axv\"\n\t.vliw");
7273 = get_unnamed_section (SECTION_CODE
| SECTION_MEP_VLIW
, output_section_asm_op
,
7274 "\t.section .vftext,\"axv\"\n\t.vliw");
7277 = get_unnamed_section (SECTION_CODE
, output_section_asm_op
,
7278 "\t.section .ftext,\"ax\"\n\t.core");
7282 /* Initialize the GCC target structure. */
7284 #undef TARGET_ASM_FUNCTION_PROLOGUE
7285 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7286 #undef TARGET_ATTRIBUTE_TABLE
7287 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7288 #undef TARGET_COMP_TYPE_ATTRIBUTES
7289 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7290 #undef TARGET_INSERT_ATTRIBUTES
7291 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7292 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7293 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7294 #undef TARGET_CAN_INLINE_P
7295 #define TARGET_CAN_INLINE_P mep_can_inline_p
7296 #undef TARGET_SECTION_TYPE_FLAGS
7297 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7298 #undef TARGET_ASM_NAMED_SECTION
7299 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7300 #undef TARGET_INIT_BUILTINS
7301 #define TARGET_INIT_BUILTINS mep_init_builtins
7302 #undef TARGET_EXPAND_BUILTIN
7303 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
7304 #undef TARGET_SCHED_ADJUST_COST
7305 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7306 #undef TARGET_SCHED_ISSUE_RATE
7307 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7308 #undef TARGET_SCHED_REORDER
7309 #define TARGET_SCHED_REORDER mep_sched_reorder
7310 #undef TARGET_STRIP_NAME_ENCODING
7311 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7312 #undef TARGET_ASM_SELECT_SECTION
7313 #define TARGET_ASM_SELECT_SECTION mep_select_section
7314 #undef TARGET_ASM_UNIQUE_SECTION
7315 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7316 #undef TARGET_ENCODE_SECTION_INFO
7317 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7318 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
7319 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7320 #undef TARGET_RTX_COSTS
7321 #define TARGET_RTX_COSTS mep_rtx_cost
7322 #undef TARGET_ADDRESS_COST
7323 #define TARGET_ADDRESS_COST mep_address_cost
7324 #undef TARGET_MACHINE_DEPENDENT_REORG
7325 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7326 #undef TARGET_SETUP_INCOMING_VARARGS
7327 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7328 #undef TARGET_PASS_BY_REFERENCE
7329 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
7330 #undef TARGET_FUNCTION_ARG
7331 #define TARGET_FUNCTION_ARG mep_function_arg
7332 #undef TARGET_FUNCTION_ARG_ADVANCE
7333 #define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
7334 #undef TARGET_VECTOR_MODE_SUPPORTED_P
7335 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
7336 #undef TARGET_OPTION_OVERRIDE
7337 #define TARGET_OPTION_OVERRIDE mep_option_override
7338 #undef TARGET_ALLOCATE_INITIAL_VALUE
7339 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7340 #undef TARGET_ASM_INIT_SECTIONS
7341 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7342 #undef TARGET_RETURN_IN_MEMORY
7343 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7344 #undef TARGET_NARROW_VOLATILE_BITFIELD
7345 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7346 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
7347 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7348 #undef TARGET_BUILD_BUILTIN_VA_LIST
7349 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7350 #undef TARGET_EXPAND_BUILTIN_VA_START
7351 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7352 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
7353 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7354 #undef TARGET_CAN_ELIMINATE
7355 #define TARGET_CAN_ELIMINATE mep_can_eliminate
7356 #undef TARGET_CONDITIONAL_REGISTER_USAGE
7357 #define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
7358 #undef TARGET_TRAMPOLINE_INIT
7359 #define TARGET_TRAMPOLINE_INIT mep_trampoline_init
7360 #undef TARGET_LEGITIMATE_CONSTANT_P
7361 #define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
7362 #undef TARGET_CAN_USE_DOLOOP_P
7363 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
7365 struct gcc_target targetm
= TARGET_INITIALIZER
;